tree.c (element_mode, [...]): New functions.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
86
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
90
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
111 };
112
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 tree, tree, tree);
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
132 tree *, tree *);
133 static int simple_operand_p (const_tree);
134 static bool simple_operand_p_2 (tree);
135 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
136 static tree range_predecessor (tree);
137 static tree range_successor (tree);
138 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
139 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
140 static tree unextend (tree, int, int, tree);
141 static tree optimize_minmax_comparison (location_t, enum tree_code,
142 tree, tree, tree);
143 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
144 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
145 static tree fold_binary_op_with_conditional_arg (location_t,
146 enum tree_code, tree,
147 tree, tree,
148 tree, tree, int);
149 static tree fold_mathfn_compare (location_t,
150 enum built_in_function, enum tree_code,
151 tree, tree, tree);
152 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
153 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
154 static bool reorder_operands_p (const_tree, const_tree);
155 static tree fold_negate_const (tree, tree);
156 static tree fold_not_const (const_tree, tree);
157 static tree fold_relational_const (enum tree_code, tree, tree, tree);
158 static tree fold_convert_const (enum tree_code, tree, tree);
159
160 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
161 Otherwise, return LOC. */
162
163 static location_t
164 expr_location_or (tree t, location_t loc)
165 {
166 location_t tloc = EXPR_LOCATION (t);
167 return tloc == UNKNOWN_LOCATION ? loc : tloc;
168 }
169
170 /* Similar to protected_set_expr_location, but never modify x in place,
171 if location can and needs to be set, unshare it. */
172
173 static inline tree
174 protected_set_expr_location_unshare (tree x, location_t loc)
175 {
176 if (CAN_HAVE_LOCATION_P (x)
177 && EXPR_LOCATION (x) != loc
178 && !(TREE_CODE (x) == SAVE_EXPR
179 || TREE_CODE (x) == TARGET_EXPR
180 || TREE_CODE (x) == BIND_EXPR))
181 {
182 x = copy_node (x);
183 SET_EXPR_LOCATION (x, loc);
184 }
185 return x;
186 }
187 \f
188 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
189 division and returns the quotient. Otherwise returns
190 NULL_TREE. */
191
192 tree
193 div_if_zero_remainder (const_tree arg1, const_tree arg2)
194 {
195 widest_int quo;
196
197 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
198 SIGNED, &quo))
199 return wide_int_to_tree (TREE_TYPE (arg1), quo);
200
201 return NULL_TREE;
202 }
203 \f
204 /* This is nonzero if we should defer warnings about undefined
205 overflow. This facility exists because these warnings are a
206 special case. The code to estimate loop iterations does not want
207 to issue any warnings, since it works with expressions which do not
208 occur in user code. Various bits of cleanup code call fold(), but
209 only use the result if it has certain characteristics (e.g., is a
210 constant); that code only wants to issue a warning if the result is
211 used. */
212
213 static int fold_deferring_overflow_warnings;
214
215 /* If a warning about undefined overflow is deferred, this is the
216 warning. Note that this may cause us to turn two warnings into
217 one, but that is fine since it is sufficient to only give one
218 warning per expression. */
219
220 static const char* fold_deferred_overflow_warning;
221
222 /* If a warning about undefined overflow is deferred, this is the
223 level at which the warning should be emitted. */
224
225 static enum warn_strict_overflow_code fold_deferred_overflow_code;
226
227 /* Start deferring overflow warnings. We could use a stack here to
228 permit nested calls, but at present it is not necessary. */
229
230 void
231 fold_defer_overflow_warnings (void)
232 {
233 ++fold_deferring_overflow_warnings;
234 }
235
236 /* Stop deferring overflow warnings. If there is a pending warning,
237 and ISSUE is true, then issue the warning if appropriate. STMT is
238 the statement with which the warning should be associated (used for
239 location information); STMT may be NULL. CODE is the level of the
240 warning--a warn_strict_overflow_code value. This function will use
241 the smaller of CODE and the deferred code when deciding whether to
242 issue the warning. CODE may be zero to mean to always use the
243 deferred code. */
244
245 void
246 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
247 {
248 const char *warnmsg;
249 location_t locus;
250
251 gcc_assert (fold_deferring_overflow_warnings > 0);
252 --fold_deferring_overflow_warnings;
253 if (fold_deferring_overflow_warnings > 0)
254 {
255 if (fold_deferred_overflow_warning != NULL
256 && code != 0
257 && code < (int) fold_deferred_overflow_code)
258 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
259 return;
260 }
261
262 warnmsg = fold_deferred_overflow_warning;
263 fold_deferred_overflow_warning = NULL;
264
265 if (!issue || warnmsg == NULL)
266 return;
267
268 if (gimple_no_warning_p (stmt))
269 return;
270
271 /* Use the smallest code level when deciding to issue the
272 warning. */
273 if (code == 0 || code > (int) fold_deferred_overflow_code)
274 code = fold_deferred_overflow_code;
275
276 if (!issue_strict_overflow_warning (code))
277 return;
278
279 if (stmt == NULL)
280 locus = input_location;
281 else
282 locus = gimple_location (stmt);
283 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 }
285
286 /* Stop deferring overflow warnings, ignoring any deferred
287 warnings. */
288
289 void
290 fold_undefer_and_ignore_overflow_warnings (void)
291 {
292 fold_undefer_overflow_warnings (false, NULL, 0);
293 }
294
295 /* Whether we are deferring overflow warnings. */
296
297 bool
298 fold_deferring_overflow_warnings_p (void)
299 {
300 return fold_deferring_overflow_warnings > 0;
301 }
302
303 /* This is called when we fold something based on the fact that signed
304 overflow is undefined. */
305
306 static void
307 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
308 {
309 if (fold_deferring_overflow_warnings > 0)
310 {
311 if (fold_deferred_overflow_warning == NULL
312 || wc < fold_deferred_overflow_code)
313 {
314 fold_deferred_overflow_warning = gmsgid;
315 fold_deferred_overflow_code = wc;
316 }
317 }
318 else if (issue_strict_overflow_warning (wc))
319 warning (OPT_Wstrict_overflow, gmsgid);
320 }
321 \f
322 /* Return true if the built-in mathematical function specified by CODE
323 is odd, i.e. -f(x) == f(-x). */
324
325 static bool
326 negate_mathfn_p (enum built_in_function code)
327 {
328 switch (code)
329 {
330 CASE_FLT_FN (BUILT_IN_ASIN):
331 CASE_FLT_FN (BUILT_IN_ASINH):
332 CASE_FLT_FN (BUILT_IN_ATAN):
333 CASE_FLT_FN (BUILT_IN_ATANH):
334 CASE_FLT_FN (BUILT_IN_CASIN):
335 CASE_FLT_FN (BUILT_IN_CASINH):
336 CASE_FLT_FN (BUILT_IN_CATAN):
337 CASE_FLT_FN (BUILT_IN_CATANH):
338 CASE_FLT_FN (BUILT_IN_CBRT):
339 CASE_FLT_FN (BUILT_IN_CPROJ):
340 CASE_FLT_FN (BUILT_IN_CSIN):
341 CASE_FLT_FN (BUILT_IN_CSINH):
342 CASE_FLT_FN (BUILT_IN_CTAN):
343 CASE_FLT_FN (BUILT_IN_CTANH):
344 CASE_FLT_FN (BUILT_IN_ERF):
345 CASE_FLT_FN (BUILT_IN_LLROUND):
346 CASE_FLT_FN (BUILT_IN_LROUND):
347 CASE_FLT_FN (BUILT_IN_ROUND):
348 CASE_FLT_FN (BUILT_IN_SIN):
349 CASE_FLT_FN (BUILT_IN_SINH):
350 CASE_FLT_FN (BUILT_IN_TAN):
351 CASE_FLT_FN (BUILT_IN_TANH):
352 CASE_FLT_FN (BUILT_IN_TRUNC):
353 return true;
354
355 CASE_FLT_FN (BUILT_IN_LLRINT):
356 CASE_FLT_FN (BUILT_IN_LRINT):
357 CASE_FLT_FN (BUILT_IN_NEARBYINT):
358 CASE_FLT_FN (BUILT_IN_RINT):
359 return !flag_rounding_math;
360
361 default:
362 break;
363 }
364 return false;
365 }
366
367 /* Check whether we may negate an integer constant T without causing
368 overflow. */
369
370 bool
371 may_negate_without_overflow_p (const_tree t)
372 {
373 tree type;
374
375 gcc_assert (TREE_CODE (t) == INTEGER_CST);
376
377 type = TREE_TYPE (t);
378 if (TYPE_UNSIGNED (type))
379 return false;
380
381 return !wi::only_sign_bit_p (t);
382 }
383
384 /* Determine whether an expression T can be cheaply negated using
385 the function negate_expr without introducing undefined overflow. */
386
387 static bool
388 negate_expr_p (tree t)
389 {
390 tree type;
391
392 if (t == 0)
393 return false;
394
395 type = TREE_TYPE (t);
396
397 STRIP_SIGN_NOPS (t);
398 switch (TREE_CODE (t))
399 {
400 case INTEGER_CST:
401 if (TYPE_OVERFLOW_WRAPS (type))
402 return true;
403
404 /* Check that -CST will not overflow type. */
405 return may_negate_without_overflow_p (t);
406 case BIT_NOT_EXPR:
407 return (INTEGRAL_TYPE_P (type)
408 && TYPE_OVERFLOW_WRAPS (type));
409
410 case FIXED_CST:
411 case NEGATE_EXPR:
412 return true;
413
414 case REAL_CST:
415 /* We want to canonicalize to positive real constants. Pretend
416 that only negative ones can be easily negated. */
417 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
418
419 case COMPLEX_CST:
420 return negate_expr_p (TREE_REALPART (t))
421 && negate_expr_p (TREE_IMAGPART (t));
422
423 case VECTOR_CST:
424 {
425 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
426 return true;
427
428 int count = TYPE_VECTOR_SUBPARTS (type), i;
429
430 for (i = 0; i < count; i++)
431 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
432 return false;
433
434 return true;
435 }
436
437 case COMPLEX_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0))
439 && negate_expr_p (TREE_OPERAND (t, 1));
440
441 case CONJ_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0));
443
444 case PLUS_EXPR:
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 || HONOR_SIGNED_ZEROS (element_mode (type)))
447 return false;
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t, 1))
450 && reorder_operands_p (TREE_OPERAND (t, 0),
451 TREE_OPERAND (t, 1)))
452 return true;
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t, 0));
455
456 case MINUS_EXPR:
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
459 && !HONOR_SIGNED_ZEROS (element_mode (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (TREE_TYPE (t)))
465 break;
466
467 /* Fall through. */
468
469 case RDIV_EXPR:
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
471 return negate_expr_p (TREE_OPERAND (t, 1))
472 || negate_expr_p (TREE_OPERAND (t, 0));
473 break;
474
475 case TRUNC_DIV_EXPR:
476 case ROUND_DIV_EXPR:
477 case EXACT_DIV_EXPR:
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
482 overflow. */
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
484 {
485 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
486 break;
487 /* If overflow is undefined then we have to be careful because
488 we ask whether it's ok to associate the negate with the
489 division which is not ok for example for
490 -((a - b) / c) where (-(a - b)) / c may invoke undefined
491 overflow because of negating INT_MIN. So do not use
492 negate_expr_p here but open-code the two important cases. */
493 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
494 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
495 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
496 return true;
497 }
498 else if (negate_expr_p (TREE_OPERAND (t, 0)))
499 return true;
500 return negate_expr_p (TREE_OPERAND (t, 1));
501
502 case NOP_EXPR:
503 /* Negate -((double)float) as (double)(-float). */
504 if (TREE_CODE (type) == REAL_TYPE)
505 {
506 tree tem = strip_float_extensions (t);
507 if (tem != t)
508 return negate_expr_p (tem);
509 }
510 break;
511
512 case CALL_EXPR:
513 /* Negate -f(x) as f(-x). */
514 if (negate_mathfn_p (builtin_mathfn_code (t)))
515 return negate_expr_p (CALL_EXPR_ARG (t, 0));
516 break;
517
518 case RSHIFT_EXPR:
519 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
520 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
521 {
522 tree op1 = TREE_OPERAND (t, 1);
523 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
524 return true;
525 }
526 break;
527
528 default:
529 break;
530 }
531 return false;
532 }
533
534 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
535 simplification is possible.
536 If negate_expr_p would return true for T, NULL_TREE will never be
537 returned. */
538
539 static tree
540 fold_negate_expr (location_t loc, tree t)
541 {
542 tree type = TREE_TYPE (t);
543 tree tem;
544
545 switch (TREE_CODE (t))
546 {
547 /* Convert - (~A) to A + 1. */
548 case BIT_NOT_EXPR:
549 if (INTEGRAL_TYPE_P (type))
550 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
551 build_one_cst (type));
552 break;
553
554 case INTEGER_CST:
555 tem = fold_negate_const (t, type);
556 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
557 || (!TYPE_OVERFLOW_TRAPS (type)
558 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0))
559 return tem;
560 break;
561
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
566 return tem;
567 break;
568
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
572
573 case COMPLEX_CST:
574 {
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
577
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
583 }
584 break;
585
586 case VECTOR_CST:
587 {
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
590
591 for (i = 0; i < count; i++)
592 {
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
595 return NULL_TREE;
596 }
597
598 return build_vector (type, elts);
599 }
600
601 case COMPLEX_EXPR:
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
606 break;
607
608 case CONJ_EXPR:
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
612 break;
613
614 case NEGATE_EXPR:
615 if (!TYPE_OVERFLOW_SANITIZED (type))
616 return TREE_OPERAND (t, 0);
617 break;
618
619 case PLUS_EXPR:
620 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
621 && !HONOR_SIGNED_ZEROS (element_mode (type)))
622 {
623 /* -(A + B) -> (-B) - A. */
624 if (negate_expr_p (TREE_OPERAND (t, 1))
625 && reorder_operands_p (TREE_OPERAND (t, 0),
626 TREE_OPERAND (t, 1)))
627 {
628 tem = negate_expr (TREE_OPERAND (t, 1));
629 return fold_build2_loc (loc, MINUS_EXPR, type,
630 tem, TREE_OPERAND (t, 0));
631 }
632
633 /* -(A + B) -> (-A) - B. */
634 if (negate_expr_p (TREE_OPERAND (t, 0)))
635 {
636 tem = negate_expr (TREE_OPERAND (t, 0));
637 return fold_build2_loc (loc, MINUS_EXPR, type,
638 tem, TREE_OPERAND (t, 1));
639 }
640 }
641 break;
642
643 case MINUS_EXPR:
644 /* - (A - B) -> B - A */
645 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
646 && !HONOR_SIGNED_ZEROS (element_mode (type))
647 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
648 return fold_build2_loc (loc, MINUS_EXPR, type,
649 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
650 break;
651
652 case MULT_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655
656 /* Fall through. */
657
658 case RDIV_EXPR:
659 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
660 {
661 tem = TREE_OPERAND (t, 1);
662 if (negate_expr_p (tem))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 TREE_OPERAND (t, 0), negate_expr (tem));
665 tem = TREE_OPERAND (t, 0);
666 if (negate_expr_p (tem))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 negate_expr (tem), TREE_OPERAND (t, 1));
669 }
670 break;
671
672 case TRUNC_DIV_EXPR:
673 case ROUND_DIV_EXPR:
674 case EXACT_DIV_EXPR:
675 /* In general we can't negate A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. But if overflow is
678 undefined, we can negate, because - (INT_MIN / 1) is an
679 overflow. */
680 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
681 {
682 const char * const warnmsg = G_("assuming signed overflow does not "
683 "occur when negating a division");
684 tem = TREE_OPERAND (t, 1);
685 if (negate_expr_p (tem))
686 {
687 if (INTEGRAL_TYPE_P (type)
688 && (TREE_CODE (tem) != INTEGER_CST
689 || integer_onep (tem)))
690 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
691 return fold_build2_loc (loc, TREE_CODE (t), type,
692 TREE_OPERAND (t, 0), negate_expr (tem));
693 }
694 /* If overflow is undefined then we have to be careful because
695 we ask whether it's ok to associate the negate with the
696 division which is not ok for example for
697 -((a - b) / c) where (-(a - b)) / c may invoke undefined
698 overflow because of negating INT_MIN. So do not use
699 negate_expr_p here but open-code the two important cases. */
700 tem = TREE_OPERAND (t, 0);
701 if ((INTEGRAL_TYPE_P (type)
702 && (TREE_CODE (tem) == NEGATE_EXPR
703 || (TREE_CODE (tem) == INTEGER_CST
704 && may_negate_without_overflow_p (tem))))
705 || !INTEGRAL_TYPE_P (type))
706 return fold_build2_loc (loc, TREE_CODE (t), type,
707 negate_expr (tem), TREE_OPERAND (t, 1));
708 }
709 break;
710
711 case NOP_EXPR:
712 /* Convert -((double)float) into (double)(-float). */
713 if (TREE_CODE (type) == REAL_TYPE)
714 {
715 tem = strip_float_extensions (t);
716 if (tem != t && negate_expr_p (tem))
717 return fold_convert_loc (loc, type, negate_expr (tem));
718 }
719 break;
720
721 case CALL_EXPR:
722 /* Negate -f(x) as f(-x). */
723 if (negate_mathfn_p (builtin_mathfn_code (t))
724 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
725 {
726 tree fndecl, arg;
727
728 fndecl = get_callee_fndecl (t);
729 arg = negate_expr (CALL_EXPR_ARG (t, 0));
730 return build_call_expr_loc (loc, fndecl, 1, arg);
731 }
732 break;
733
734 case RSHIFT_EXPR:
735 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
736 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
737 {
738 tree op1 = TREE_OPERAND (t, 1);
739 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
740 {
741 tree ntype = TYPE_UNSIGNED (type)
742 ? signed_type_for (type)
743 : unsigned_type_for (type);
744 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
745 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
746 return fold_convert_loc (loc, type, temp);
747 }
748 }
749 break;
750
751 default:
752 break;
753 }
754
755 return NULL_TREE;
756 }
757
758 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
759 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
760 return NULL_TREE. */
761
762 static tree
763 negate_expr (tree t)
764 {
765 tree type, tem;
766 location_t loc;
767
768 if (t == NULL_TREE)
769 return NULL_TREE;
770
771 loc = EXPR_LOCATION (t);
772 type = TREE_TYPE (t);
773 STRIP_SIGN_NOPS (t);
774
775 tem = fold_negate_expr (loc, t);
776 if (!tem)
777 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
778 return fold_convert_loc (loc, type, tem);
779 }
780 \f
781 /* Split a tree IN into a constant, literal and variable parts that could be
782 combined with CODE to make IN. "constant" means an expression with
783 TREE_CONSTANT but that isn't an actual constant. CODE must be a
784 commutative arithmetic operation. Store the constant part into *CONP,
785 the literal in *LITP and return the variable part. If a part isn't
786 present, set it to null. If the tree does not decompose in this way,
787 return the entire tree as the variable part and the other parts as null.
788
789 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
790 case, we negate an operand that was subtracted. Except if it is a
791 literal for which we use *MINUS_LITP instead.
792
793 If NEGATE_P is true, we are negating all of IN, again except a literal
794 for which we use *MINUS_LITP instead.
795
796 If IN is itself a literal or constant, return it as appropriate.
797
798 Note that we do not guarantee that any of the three values will be the
799 same type as IN, but they will have the same signedness and mode. */
800
801 static tree
802 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
803 tree *minus_litp, int negate_p)
804 {
805 tree var = 0;
806
807 *conp = 0;
808 *litp = 0;
809 *minus_litp = 0;
810
811 /* Strip any conversions that don't change the machine mode or signedness. */
812 STRIP_SIGN_NOPS (in);
813
814 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
815 || TREE_CODE (in) == FIXED_CST)
816 *litp = in;
817 else if (TREE_CODE (in) == code
818 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
819 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
820 /* We can associate addition and subtraction together (even
821 though the C standard doesn't say so) for integers because
822 the value is not affected. For reals, the value might be
823 affected, so we can't. */
824 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
825 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
826 {
827 tree op0 = TREE_OPERAND (in, 0);
828 tree op1 = TREE_OPERAND (in, 1);
829 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
830 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
831
832 /* First see if either of the operands is a literal, then a constant. */
833 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
834 || TREE_CODE (op0) == FIXED_CST)
835 *litp = op0, op0 = 0;
836 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
837 || TREE_CODE (op1) == FIXED_CST)
838 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
839
840 if (op0 != 0 && TREE_CONSTANT (op0))
841 *conp = op0, op0 = 0;
842 else if (op1 != 0 && TREE_CONSTANT (op1))
843 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
844
845 /* If we haven't dealt with either operand, this is not a case we can
846 decompose. Otherwise, VAR is either of the ones remaining, if any. */
847 if (op0 != 0 && op1 != 0)
848 var = in;
849 else if (op0 != 0)
850 var = op0;
851 else
852 var = op1, neg_var_p = neg1_p;
853
854 /* Now do any needed negations. */
855 if (neg_litp_p)
856 *minus_litp = *litp, *litp = 0;
857 if (neg_conp_p)
858 *conp = negate_expr (*conp);
859 if (neg_var_p)
860 var = negate_expr (var);
861 }
862 else if (TREE_CODE (in) == BIT_NOT_EXPR
863 && code == PLUS_EXPR)
864 {
865 /* -X - 1 is folded to ~X, undo that here. */
866 *minus_litp = build_one_cst (TREE_TYPE (in));
867 var = negate_expr (TREE_OPERAND (in, 0));
868 }
869 else if (TREE_CONSTANT (in))
870 *conp = in;
871 else
872 var = in;
873
874 if (negate_p)
875 {
876 if (*litp)
877 *minus_litp = *litp, *litp = 0;
878 else if (*minus_litp)
879 *litp = *minus_litp, *minus_litp = 0;
880 *conp = negate_expr (*conp);
881 var = negate_expr (var);
882 }
883
884 return var;
885 }
886
887 /* Re-associate trees split by the above function. T1 and T2 are
888 either expressions to associate or null. Return the new
889 expression, if any. LOC is the location of the new expression. If
890 we build an operation, do it in TYPE and with CODE. */
891
892 static tree
893 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
894 {
895 if (t1 == 0)
896 return t2;
897 else if (t2 == 0)
898 return t1;
899
900 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
901 try to fold this since we will have infinite recursion. But do
902 deal with any NEGATE_EXPRs. */
903 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
904 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 {
906 if (code == PLUS_EXPR)
907 {
908 if (TREE_CODE (t1) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t2),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t1, 0)));
913 else if (TREE_CODE (t2) == NEGATE_EXPR)
914 return build2_loc (loc, MINUS_EXPR, type,
915 fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type,
917 TREE_OPERAND (t2, 0)));
918 else if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921 else if (code == MINUS_EXPR)
922 {
923 if (integer_zerop (t2))
924 return fold_convert_loc (loc, type, t1);
925 }
926
927 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 fold_convert_loc (loc, type, t2));
929 }
930
931 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
932 fold_convert_loc (loc, type, t2));
933 }
934 \f
935 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
936 for use in int_const_binop, size_binop and size_diffop. */
937
938 static bool
939 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 {
941 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
942 return false;
943 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
944 return false;
945
946 switch (code)
947 {
948 case LSHIFT_EXPR:
949 case RSHIFT_EXPR:
950 case LROTATE_EXPR:
951 case RROTATE_EXPR:
952 return true;
953
954 default:
955 break;
956 }
957
958 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
959 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
960 && TYPE_MODE (type1) == TYPE_MODE (type2);
961 }
962
963
964 /* Combine two integer constants ARG1 and ARG2 under operation CODE
965 to produce a new constant. Return NULL_TREE if we don't know how
966 to evaluate CODE at compile-time. */
967
968 static tree
969 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
970 int overflowable)
971 {
972 wide_int res;
973 tree t;
974 tree type = TREE_TYPE (arg1);
975 signop sign = TYPE_SIGN (type);
976 bool overflow = false;
977
978 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
979 TYPE_SIGN (TREE_TYPE (parg2)));
980
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 arg2 = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005
1006 if (code == RSHIFT_EXPR)
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 else
1012 res = wi::lshift (arg1, arg2);
1013 break;
1014
1015 case RROTATE_EXPR:
1016 case LROTATE_EXPR:
1017 if (wi::neg_p (arg2))
1018 {
1019 arg2 = -arg2;
1020 if (code == RROTATE_EXPR)
1021 code = LROTATE_EXPR;
1022 else
1023 code = RROTATE_EXPR;
1024 }
1025
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, arg2);
1028 else
1029 res = wi::lrotate (arg1, arg2);
1030 break;
1031
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, &overflow);
1034 break;
1035
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, &overflow);
1038 break;
1039
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, &overflow);
1042 break;
1043
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1047
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_floor (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::div_round (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return NULL_TREE;
1088 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1089 break;
1090
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return NULL_TREE;
1094 res = wi::mod_round (arg1, arg2, sign, &overflow);
1095 break;
1096
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1100
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1104
1105 default:
1106 return NULL_TREE;
1107 }
1108
1109 t = force_fit_type (type, res, overflowable,
1110 (((sign == SIGNED || overflowable == -1)
1111 && overflow)
1112 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1113
1114 return t;
1115 }
1116
1117 tree
1118 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 {
1120 return int_const_binop_1 (code, arg1, arg2, 1);
1121 }
1122
1123 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1124 constant. We assume ARG1 and ARG2 have the same data type, or at least
1125 are the same kind of constant and the same machine mode. Return zero if
1126 combining the constants is not allowed in the current operating mode. */
1127
1128 static tree
1129 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 {
1131 /* Sanity check for the recursive cases. */
1132 if (!arg1 || !arg2)
1133 return NULL_TREE;
1134
1135 STRIP_NOPS (arg1);
1136 STRIP_NOPS (arg2);
1137
1138 if (TREE_CODE (arg1) == INTEGER_CST)
1139 return int_const_binop (code, arg1, arg2);
1140
1141 if (TREE_CODE (arg1) == REAL_CST)
1142 {
1143 machine_mode mode;
1144 REAL_VALUE_TYPE d1;
1145 REAL_VALUE_TYPE d2;
1146 REAL_VALUE_TYPE value;
1147 REAL_VALUE_TYPE result;
1148 bool inexact;
1149 tree t, type;
1150
1151 /* The following codes are handled by real_arithmetic. */
1152 switch (code)
1153 {
1154 case PLUS_EXPR:
1155 case MINUS_EXPR:
1156 case MULT_EXPR:
1157 case RDIV_EXPR:
1158 case MIN_EXPR:
1159 case MAX_EXPR:
1160 break;
1161
1162 default:
1163 return NULL_TREE;
1164 }
1165
1166 d1 = TREE_REAL_CST (arg1);
1167 d2 = TREE_REAL_CST (arg2);
1168
1169 type = TREE_TYPE (arg1);
1170 mode = TYPE_MODE (type);
1171
1172 /* Don't perform operation if we honor signaling NaNs and
1173 either operand is a NaN. */
1174 if (HONOR_SNANS (mode)
1175 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1176 return NULL_TREE;
1177
1178 /* Don't perform operation if it would raise a division
1179 by zero exception. */
1180 if (code == RDIV_EXPR
1181 && REAL_VALUES_EQUAL (d2, dconst0)
1182 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1183 return NULL_TREE;
1184
1185 /* If either operand is a NaN, just return it. Otherwise, set up
1186 for floating-point trap; we return an overflow. */
1187 if (REAL_VALUE_ISNAN (d1))
1188 return arg1;
1189 else if (REAL_VALUE_ISNAN (d2))
1190 return arg2;
1191
1192 inexact = real_arithmetic (&value, code, &d1, &d2);
1193 real_convert (&result, mode, &value);
1194
1195 /* Don't constant fold this floating point operation if
1196 the result has overflowed and flag_trapping_math. */
1197 if (flag_trapping_math
1198 && MODE_HAS_INFINITIES (mode)
1199 && REAL_VALUE_ISINF (result)
1200 && !REAL_VALUE_ISINF (d1)
1201 && !REAL_VALUE_ISINF (d2))
1202 return NULL_TREE;
1203
1204 /* Don't constant fold this floating point operation if the
1205 result may dependent upon the run-time rounding mode and
1206 flag_rounding_math is set, or if GCC's software emulation
1207 is unable to accurately represent the result. */
1208 if ((flag_rounding_math
1209 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1210 && (inexact || !real_identical (&result, &value)))
1211 return NULL_TREE;
1212
1213 t = build_real (type, result);
1214
1215 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1216 return t;
1217 }
1218
1219 if (TREE_CODE (arg1) == FIXED_CST)
1220 {
1221 FIXED_VALUE_TYPE f1;
1222 FIXED_VALUE_TYPE f2;
1223 FIXED_VALUE_TYPE result;
1224 tree t, type;
1225 int sat_p;
1226 bool overflow_p;
1227
1228 /* The following codes are handled by fixed_arithmetic. */
1229 switch (code)
1230 {
1231 case PLUS_EXPR:
1232 case MINUS_EXPR:
1233 case MULT_EXPR:
1234 case TRUNC_DIV_EXPR:
1235 f2 = TREE_FIXED_CST (arg2);
1236 break;
1237
1238 case LSHIFT_EXPR:
1239 case RSHIFT_EXPR:
1240 {
1241 wide_int w2 = arg2;
1242 f2.data.high = w2.elt (1);
1243 f2.data.low = w2.elt (0);
1244 f2.mode = SImode;
1245 }
1246 break;
1247
1248 default:
1249 return NULL_TREE;
1250 }
1251
1252 f1 = TREE_FIXED_CST (arg1);
1253 type = TREE_TYPE (arg1);
1254 sat_p = TYPE_SATURATING (type);
1255 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1256 t = build_fixed (type, result);
1257 /* Propagate overflow flags. */
1258 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1259 TREE_OVERFLOW (t) = 1;
1260 return t;
1261 }
1262
1263 if (TREE_CODE (arg1) == COMPLEX_CST)
1264 {
1265 tree type = TREE_TYPE (arg1);
1266 tree r1 = TREE_REALPART (arg1);
1267 tree i1 = TREE_IMAGPART (arg1);
1268 tree r2 = TREE_REALPART (arg2);
1269 tree i2 = TREE_IMAGPART (arg2);
1270 tree real, imag;
1271
1272 switch (code)
1273 {
1274 case PLUS_EXPR:
1275 case MINUS_EXPR:
1276 real = const_binop (code, r1, r2);
1277 imag = const_binop (code, i1, i2);
1278 break;
1279
1280 case MULT_EXPR:
1281 if (COMPLEX_FLOAT_TYPE_P (type))
1282 return do_mpc_arg2 (arg1, arg2, type,
1283 /* do_nonfinite= */ folding_initializer,
1284 mpc_mul);
1285
1286 real = const_binop (MINUS_EXPR,
1287 const_binop (MULT_EXPR, r1, r2),
1288 const_binop (MULT_EXPR, i1, i2));
1289 imag = const_binop (PLUS_EXPR,
1290 const_binop (MULT_EXPR, r1, i2),
1291 const_binop (MULT_EXPR, i1, r2));
1292 break;
1293
1294 case RDIV_EXPR:
1295 if (COMPLEX_FLOAT_TYPE_P (type))
1296 return do_mpc_arg2 (arg1, arg2, type,
1297 /* do_nonfinite= */ folding_initializer,
1298 mpc_div);
1299 /* Fallthru ... */
1300 case TRUNC_DIV_EXPR:
1301 case CEIL_DIV_EXPR:
1302 case FLOOR_DIV_EXPR:
1303 case ROUND_DIV_EXPR:
1304 if (flag_complex_method == 0)
1305 {
1306 /* Keep this algorithm in sync with
1307 tree-complex.c:expand_complex_div_straight().
1308
1309 Expand complex division to scalars, straightforward algorithm.
1310 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1311 t = br*br + bi*bi
1312 */
1313 tree magsquared
1314 = const_binop (PLUS_EXPR,
1315 const_binop (MULT_EXPR, r2, r2),
1316 const_binop (MULT_EXPR, i2, i2));
1317 tree t1
1318 = const_binop (PLUS_EXPR,
1319 const_binop (MULT_EXPR, r1, r2),
1320 const_binop (MULT_EXPR, i1, i2));
1321 tree t2
1322 = const_binop (MINUS_EXPR,
1323 const_binop (MULT_EXPR, i1, r2),
1324 const_binop (MULT_EXPR, r1, i2));
1325
1326 real = const_binop (code, t1, magsquared);
1327 imag = const_binop (code, t2, magsquared);
1328 }
1329 else
1330 {
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_wide().
1333
1334 Expand complex division to scalars, modified algorithm to minimize
1335 overflow with wide input ranges. */
1336 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1337 fold_abs_const (r2, TREE_TYPE (type)),
1338 fold_abs_const (i2, TREE_TYPE (type)));
1339
1340 if (integer_nonzerop (compare))
1341 {
1342 /* In the TRUE branch, we compute
1343 ratio = br/bi;
1344 div = (br * ratio) + bi;
1345 tr = (ar * ratio) + ai;
1346 ti = (ai * ratio) - ar;
1347 tr = tr / div;
1348 ti = ti / div; */
1349 tree ratio = const_binop (code, r2, i2);
1350 tree div = const_binop (PLUS_EXPR, i2,
1351 const_binop (MULT_EXPR, r2, ratio));
1352 real = const_binop (MULT_EXPR, r1, ratio);
1353 real = const_binop (PLUS_EXPR, real, i1);
1354 real = const_binop (code, real, div);
1355
1356 imag = const_binop (MULT_EXPR, i1, ratio);
1357 imag = const_binop (MINUS_EXPR, imag, r1);
1358 imag = const_binop (code, imag, div);
1359 }
1360 else
1361 {
1362 /* In the FALSE branch, we compute
1363 ratio = d/c;
1364 divisor = (d * ratio) + c;
1365 tr = (b * ratio) + a;
1366 ti = b - (a * ratio);
1367 tr = tr / div;
1368 ti = ti / div; */
1369 tree ratio = const_binop (code, i2, r2);
1370 tree div = const_binop (PLUS_EXPR, r2,
1371 const_binop (MULT_EXPR, i2, ratio));
1372
1373 real = const_binop (MULT_EXPR, i1, ratio);
1374 real = const_binop (PLUS_EXPR, real, r1);
1375 real = const_binop (code, real, div);
1376
1377 imag = const_binop (MULT_EXPR, r1, ratio);
1378 imag = const_binop (MINUS_EXPR, i1, imag);
1379 imag = const_binop (code, imag, div);
1380 }
1381 }
1382 break;
1383
1384 default:
1385 return NULL_TREE;
1386 }
1387
1388 if (real && imag)
1389 return build_complex (type, real, imag);
1390 }
1391
1392 if (TREE_CODE (arg1) == VECTOR_CST
1393 && TREE_CODE (arg2) == VECTOR_CST)
1394 {
1395 tree type = TREE_TYPE (arg1);
1396 int count = TYPE_VECTOR_SUBPARTS (type), i;
1397 tree *elts = XALLOCAVEC (tree, count);
1398
1399 for (i = 0; i < count; i++)
1400 {
1401 tree elem1 = VECTOR_CST_ELT (arg1, i);
1402 tree elem2 = VECTOR_CST_ELT (arg2, i);
1403
1404 elts[i] = const_binop (code, elem1, elem2);
1405
1406 /* It is possible that const_binop cannot handle the given
1407 code and return NULL_TREE */
1408 if (elts[i] == NULL_TREE)
1409 return NULL_TREE;
1410 }
1411
1412 return build_vector (type, elts);
1413 }
1414
1415 /* Shifts allow a scalar offset for a vector. */
1416 if (TREE_CODE (arg1) == VECTOR_CST
1417 && TREE_CODE (arg2) == INTEGER_CST)
1418 {
1419 tree type = TREE_TYPE (arg1);
1420 int count = TYPE_VECTOR_SUBPARTS (type), i;
1421 tree *elts = XALLOCAVEC (tree, count);
1422
1423 for (i = 0; i < count; i++)
1424 {
1425 tree elem1 = VECTOR_CST_ELT (arg1, i);
1426
1427 elts[i] = const_binop (code, elem1, arg2);
1428
1429 /* It is possible that const_binop cannot handle the given
1430 code and return NULL_TREE. */
1431 if (elts[i] == NULL_TREE)
1432 return NULL_TREE;
1433 }
1434
1435 return build_vector (type, elts);
1436 }
1437 return NULL_TREE;
1438 }
1439
1440 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1441 indicates which particular sizetype to create. */
1442
1443 tree
1444 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1445 {
1446 return build_int_cst (sizetype_tab[(int) kind], number);
1447 }
1448 \f
1449 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1450 is a tree code. The type of the result is taken from the operands.
1451 Both must be equivalent integer types, ala int_binop_types_match_p.
1452 If the operands are constant, so is the result. */
1453
1454 tree
1455 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1456 {
1457 tree type = TREE_TYPE (arg0);
1458
1459 if (arg0 == error_mark_node || arg1 == error_mark_node)
1460 return error_mark_node;
1461
1462 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1463 TREE_TYPE (arg1)));
1464
1465 /* Handle the special case of two integer constants faster. */
1466 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1467 {
1468 /* And some specific cases even faster than that. */
1469 if (code == PLUS_EXPR)
1470 {
1471 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1472 return arg1;
1473 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1474 return arg0;
1475 }
1476 else if (code == MINUS_EXPR)
1477 {
1478 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1479 return arg0;
1480 }
1481 else if (code == MULT_EXPR)
1482 {
1483 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1484 return arg1;
1485 }
1486
1487 /* Handle general case of two integer constants. For sizetype
1488 constant calculations we always want to know about overflow,
1489 even in the unsigned case. */
1490 return int_const_binop_1 (code, arg0, arg1, -1);
1491 }
1492
1493 return fold_build2_loc (loc, code, type, arg0, arg1);
1494 }
1495
1496 /* Given two values, either both of sizetype or both of bitsizetype,
1497 compute the difference between the two values. Return the value
1498 in signed type corresponding to the type of the operands. */
1499
1500 tree
1501 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1502 {
1503 tree type = TREE_TYPE (arg0);
1504 tree ctype;
1505
1506 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1507 TREE_TYPE (arg1)));
1508
1509 /* If the type is already signed, just do the simple thing. */
1510 if (!TYPE_UNSIGNED (type))
1511 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1512
1513 if (type == sizetype)
1514 ctype = ssizetype;
1515 else if (type == bitsizetype)
1516 ctype = sbitsizetype;
1517 else
1518 ctype = signed_type_for (type);
1519
1520 /* If either operand is not a constant, do the conversions to the signed
1521 type and subtract. The hardware will do the right thing with any
1522 overflow in the subtraction. */
1523 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1524 return size_binop_loc (loc, MINUS_EXPR,
1525 fold_convert_loc (loc, ctype, arg0),
1526 fold_convert_loc (loc, ctype, arg1));
1527
1528 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1529 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1530 overflow) and negate (which can't either). Special-case a result
1531 of zero while we're here. */
1532 if (tree_int_cst_equal (arg0, arg1))
1533 return build_int_cst (ctype, 0);
1534 else if (tree_int_cst_lt (arg1, arg0))
1535 return fold_convert_loc (loc, ctype,
1536 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1537 else
1538 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1539 fold_convert_loc (loc, ctype,
1540 size_binop_loc (loc,
1541 MINUS_EXPR,
1542 arg1, arg0)));
1543 }
1544 \f
1545 /* A subroutine of fold_convert_const handling conversions of an
1546 INTEGER_CST to another integer type. */
1547
1548 static tree
1549 fold_convert_const_int_from_int (tree type, const_tree arg1)
1550 {
1551 /* Given an integer constant, make new constant with new type,
1552 appropriately sign-extended or truncated. Use widest_int
1553 so that any extension is done according ARG1's type. */
1554 return force_fit_type (type, wi::to_widest (arg1),
1555 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1556 TREE_OVERFLOW (arg1));
1557 }
1558
1559 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1560 to an integer type. */
1561
1562 static tree
1563 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1564 {
1565 bool overflow = false;
1566 tree t;
1567
1568 /* The following code implements the floating point to integer
1569 conversion rules required by the Java Language Specification,
1570 that IEEE NaNs are mapped to zero and values that overflow
1571 the target precision saturate, i.e. values greater than
1572 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1573 are mapped to INT_MIN. These semantics are allowed by the
1574 C and C++ standards that simply state that the behavior of
1575 FP-to-integer conversion is unspecified upon overflow. */
1576
1577 wide_int val;
1578 REAL_VALUE_TYPE r;
1579 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1580
1581 switch (code)
1582 {
1583 case FIX_TRUNC_EXPR:
1584 real_trunc (&r, VOIDmode, &x);
1585 break;
1586
1587 default:
1588 gcc_unreachable ();
1589 }
1590
1591 /* If R is NaN, return zero and show we have an overflow. */
1592 if (REAL_VALUE_ISNAN (r))
1593 {
1594 overflow = true;
1595 val = wi::zero (TYPE_PRECISION (type));
1596 }
1597
1598 /* See if R is less than the lower bound or greater than the
1599 upper bound. */
1600
1601 if (! overflow)
1602 {
1603 tree lt = TYPE_MIN_VALUE (type);
1604 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1605 if (REAL_VALUES_LESS (r, l))
1606 {
1607 overflow = true;
1608 val = lt;
1609 }
1610 }
1611
1612 if (! overflow)
1613 {
1614 tree ut = TYPE_MAX_VALUE (type);
1615 if (ut)
1616 {
1617 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1618 if (REAL_VALUES_LESS (u, r))
1619 {
1620 overflow = true;
1621 val = ut;
1622 }
1623 }
1624 }
1625
1626 if (! overflow)
1627 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1628
1629 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1630 return t;
1631 }
1632
1633 /* A subroutine of fold_convert_const handling conversions of a
1634 FIXED_CST to an integer type. */
1635
1636 static tree
1637 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1638 {
1639 tree t;
1640 double_int temp, temp_trunc;
1641 unsigned int mode;
1642
1643 /* Right shift FIXED_CST to temp by fbit. */
1644 temp = TREE_FIXED_CST (arg1).data;
1645 mode = TREE_FIXED_CST (arg1).mode;
1646 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1647 {
1648 temp = temp.rshift (GET_MODE_FBIT (mode),
1649 HOST_BITS_PER_DOUBLE_INT,
1650 SIGNED_FIXED_POINT_MODE_P (mode));
1651
1652 /* Left shift temp to temp_trunc by fbit. */
1653 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1654 HOST_BITS_PER_DOUBLE_INT,
1655 SIGNED_FIXED_POINT_MODE_P (mode));
1656 }
1657 else
1658 {
1659 temp = double_int_zero;
1660 temp_trunc = double_int_zero;
1661 }
1662
1663 /* If FIXED_CST is negative, we need to round the value toward 0.
1664 By checking if the fractional bits are not zero to add 1 to temp. */
1665 if (SIGNED_FIXED_POINT_MODE_P (mode)
1666 && temp_trunc.is_negative ()
1667 && TREE_FIXED_CST (arg1).data != temp_trunc)
1668 temp += double_int_one;
1669
1670 /* Given a fixed-point constant, make new constant with new type,
1671 appropriately sign-extended or truncated. */
1672 t = force_fit_type (type, temp, -1,
1673 (temp.is_negative ()
1674 && (TYPE_UNSIGNED (type)
1675 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1676 | TREE_OVERFLOW (arg1));
1677
1678 return t;
1679 }
1680
1681 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1682 to another floating point type. */
1683
1684 static tree
1685 fold_convert_const_real_from_real (tree type, const_tree arg1)
1686 {
1687 REAL_VALUE_TYPE value;
1688 tree t;
1689
1690 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1691 t = build_real (type, value);
1692
1693 /* If converting an infinity or NAN to a representation that doesn't
1694 have one, set the overflow bit so that we can produce some kind of
1695 error message at the appropriate point if necessary. It's not the
1696 most user-friendly message, but it's better than nothing. */
1697 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1698 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1699 TREE_OVERFLOW (t) = 1;
1700 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1701 && !MODE_HAS_NANS (TYPE_MODE (type)))
1702 TREE_OVERFLOW (t) = 1;
1703 /* Regular overflow, conversion produced an infinity in a mode that
1704 can't represent them. */
1705 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1706 && REAL_VALUE_ISINF (value)
1707 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1708 TREE_OVERFLOW (t) = 1;
1709 else
1710 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1711 return t;
1712 }
1713
1714 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1715 to a floating point type. */
1716
1717 static tree
1718 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1719 {
1720 REAL_VALUE_TYPE value;
1721 tree t;
1722
1723 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1724 t = build_real (type, value);
1725
1726 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1727 return t;
1728 }
1729
1730 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1731 to another fixed-point type. */
1732
1733 static tree
1734 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1735 {
1736 FIXED_VALUE_TYPE value;
1737 tree t;
1738 bool overflow_p;
1739
1740 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1741 TYPE_SATURATING (type));
1742 t = build_fixed (type, value);
1743
1744 /* Propagate overflow flags. */
1745 if (overflow_p | TREE_OVERFLOW (arg1))
1746 TREE_OVERFLOW (t) = 1;
1747 return t;
1748 }
1749
1750 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1751 to a fixed-point type. */
1752
1753 static tree
1754 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1755 {
1756 FIXED_VALUE_TYPE value;
1757 tree t;
1758 bool overflow_p;
1759 double_int di;
1760
1761 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1762
1763 di.low = TREE_INT_CST_ELT (arg1, 0);
1764 if (TREE_INT_CST_NUNITS (arg1) == 1)
1765 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1766 else
1767 di.high = TREE_INT_CST_ELT (arg1, 1);
1768
1769 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1770 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1771 TYPE_SATURATING (type));
1772 t = build_fixed (type, value);
1773
1774 /* Propagate overflow flags. */
1775 if (overflow_p | TREE_OVERFLOW (arg1))
1776 TREE_OVERFLOW (t) = 1;
1777 return t;
1778 }
1779
1780 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1781 to a fixed-point type. */
1782
1783 static tree
1784 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1785 {
1786 FIXED_VALUE_TYPE value;
1787 tree t;
1788 bool overflow_p;
1789
1790 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1791 &TREE_REAL_CST (arg1),
1792 TYPE_SATURATING (type));
1793 t = build_fixed (type, value);
1794
1795 /* Propagate overflow flags. */
1796 if (overflow_p | TREE_OVERFLOW (arg1))
1797 TREE_OVERFLOW (t) = 1;
1798 return t;
1799 }
1800
1801 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1802 type TYPE. If no simplification can be done return NULL_TREE. */
1803
1804 static tree
1805 fold_convert_const (enum tree_code code, tree type, tree arg1)
1806 {
1807 if (TREE_TYPE (arg1) == type)
1808 return arg1;
1809
1810 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1811 || TREE_CODE (type) == OFFSET_TYPE)
1812 {
1813 if (TREE_CODE (arg1) == INTEGER_CST)
1814 return fold_convert_const_int_from_int (type, arg1);
1815 else if (TREE_CODE (arg1) == REAL_CST)
1816 return fold_convert_const_int_from_real (code, type, arg1);
1817 else if (TREE_CODE (arg1) == FIXED_CST)
1818 return fold_convert_const_int_from_fixed (type, arg1);
1819 }
1820 else if (TREE_CODE (type) == REAL_TYPE)
1821 {
1822 if (TREE_CODE (arg1) == INTEGER_CST)
1823 return build_real_from_int_cst (type, arg1);
1824 else if (TREE_CODE (arg1) == REAL_CST)
1825 return fold_convert_const_real_from_real (type, arg1);
1826 else if (TREE_CODE (arg1) == FIXED_CST)
1827 return fold_convert_const_real_from_fixed (type, arg1);
1828 }
1829 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1830 {
1831 if (TREE_CODE (arg1) == FIXED_CST)
1832 return fold_convert_const_fixed_from_fixed (type, arg1);
1833 else if (TREE_CODE (arg1) == INTEGER_CST)
1834 return fold_convert_const_fixed_from_int (type, arg1);
1835 else if (TREE_CODE (arg1) == REAL_CST)
1836 return fold_convert_const_fixed_from_real (type, arg1);
1837 }
1838 return NULL_TREE;
1839 }
1840
1841 /* Construct a vector of zero elements of vector type TYPE. */
1842
1843 static tree
1844 build_zero_vector (tree type)
1845 {
1846 tree t;
1847
1848 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1849 return build_vector_from_val (type, t);
1850 }
1851
1852 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1853
1854 bool
1855 fold_convertible_p (const_tree type, const_tree arg)
1856 {
1857 tree orig = TREE_TYPE (arg);
1858
1859 if (type == orig)
1860 return true;
1861
1862 if (TREE_CODE (arg) == ERROR_MARK
1863 || TREE_CODE (type) == ERROR_MARK
1864 || TREE_CODE (orig) == ERROR_MARK)
1865 return false;
1866
1867 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1868 return true;
1869
1870 switch (TREE_CODE (type))
1871 {
1872 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1873 case POINTER_TYPE: case REFERENCE_TYPE:
1874 case OFFSET_TYPE:
1875 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1876 || TREE_CODE (orig) == OFFSET_TYPE)
1877 return true;
1878 return (TREE_CODE (orig) == VECTOR_TYPE
1879 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1880
1881 case REAL_TYPE:
1882 case FIXED_POINT_TYPE:
1883 case COMPLEX_TYPE:
1884 case VECTOR_TYPE:
1885 case VOID_TYPE:
1886 return TREE_CODE (type) == TREE_CODE (orig);
1887
1888 default:
1889 return false;
1890 }
1891 }
1892
1893 /* Convert expression ARG to type TYPE. Used by the middle-end for
1894 simple conversions in preference to calling the front-end's convert. */
1895
1896 tree
1897 fold_convert_loc (location_t loc, tree type, tree arg)
1898 {
1899 tree orig = TREE_TYPE (arg);
1900 tree tem;
1901
1902 if (type == orig)
1903 return arg;
1904
1905 if (TREE_CODE (arg) == ERROR_MARK
1906 || TREE_CODE (type) == ERROR_MARK
1907 || TREE_CODE (orig) == ERROR_MARK)
1908 return error_mark_node;
1909
1910 switch (TREE_CODE (type))
1911 {
1912 case POINTER_TYPE:
1913 case REFERENCE_TYPE:
1914 /* Handle conversions between pointers to different address spaces. */
1915 if (POINTER_TYPE_P (orig)
1916 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1917 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1918 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1919 /* fall through */
1920
1921 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1922 case OFFSET_TYPE:
1923 if (TREE_CODE (arg) == INTEGER_CST)
1924 {
1925 tem = fold_convert_const (NOP_EXPR, type, arg);
1926 if (tem != NULL_TREE)
1927 return tem;
1928 }
1929 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1930 || TREE_CODE (orig) == OFFSET_TYPE)
1931 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1932 if (TREE_CODE (orig) == COMPLEX_TYPE)
1933 return fold_convert_loc (loc, type,
1934 fold_build1_loc (loc, REALPART_EXPR,
1935 TREE_TYPE (orig), arg));
1936 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1937 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1938 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1939
1940 case REAL_TYPE:
1941 if (TREE_CODE (arg) == INTEGER_CST)
1942 {
1943 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1944 if (tem != NULL_TREE)
1945 return tem;
1946 }
1947 else if (TREE_CODE (arg) == REAL_CST)
1948 {
1949 tem = fold_convert_const (NOP_EXPR, type, arg);
1950 if (tem != NULL_TREE)
1951 return tem;
1952 }
1953 else if (TREE_CODE (arg) == FIXED_CST)
1954 {
1955 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1956 if (tem != NULL_TREE)
1957 return tem;
1958 }
1959
1960 switch (TREE_CODE (orig))
1961 {
1962 case INTEGER_TYPE:
1963 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1964 case POINTER_TYPE: case REFERENCE_TYPE:
1965 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1966
1967 case REAL_TYPE:
1968 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1969
1970 case FIXED_POINT_TYPE:
1971 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1972
1973 case COMPLEX_TYPE:
1974 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1975 return fold_convert_loc (loc, type, tem);
1976
1977 default:
1978 gcc_unreachable ();
1979 }
1980
1981 case FIXED_POINT_TYPE:
1982 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1983 || TREE_CODE (arg) == REAL_CST)
1984 {
1985 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1986 if (tem != NULL_TREE)
1987 goto fold_convert_exit;
1988 }
1989
1990 switch (TREE_CODE (orig))
1991 {
1992 case FIXED_POINT_TYPE:
1993 case INTEGER_TYPE:
1994 case ENUMERAL_TYPE:
1995 case BOOLEAN_TYPE:
1996 case REAL_TYPE:
1997 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1998
1999 case COMPLEX_TYPE:
2000 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2001 return fold_convert_loc (loc, type, tem);
2002
2003 default:
2004 gcc_unreachable ();
2005 }
2006
2007 case COMPLEX_TYPE:
2008 switch (TREE_CODE (orig))
2009 {
2010 case INTEGER_TYPE:
2011 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2012 case POINTER_TYPE: case REFERENCE_TYPE:
2013 case REAL_TYPE:
2014 case FIXED_POINT_TYPE:
2015 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2016 fold_convert_loc (loc, TREE_TYPE (type), arg),
2017 fold_convert_loc (loc, TREE_TYPE (type),
2018 integer_zero_node));
2019 case COMPLEX_TYPE:
2020 {
2021 tree rpart, ipart;
2022
2023 if (TREE_CODE (arg) == COMPLEX_EXPR)
2024 {
2025 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2026 TREE_OPERAND (arg, 0));
2027 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2028 TREE_OPERAND (arg, 1));
2029 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2030 }
2031
2032 arg = save_expr (arg);
2033 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2034 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2035 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2036 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2037 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2038 }
2039
2040 default:
2041 gcc_unreachable ();
2042 }
2043
2044 case VECTOR_TYPE:
2045 if (integer_zerop (arg))
2046 return build_zero_vector (type);
2047 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2048 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2049 || TREE_CODE (orig) == VECTOR_TYPE);
2050 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2051
2052 case VOID_TYPE:
2053 tem = fold_ignored_result (arg);
2054 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2055
2056 default:
2057 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2058 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2059 gcc_unreachable ();
2060 }
2061 fold_convert_exit:
2062 protected_set_expr_location_unshare (tem, loc);
2063 return tem;
2064 }
2065 \f
2066 /* Return false if expr can be assumed not to be an lvalue, true
2067 otherwise. */
2068
2069 static bool
2070 maybe_lvalue_p (const_tree x)
2071 {
2072 /* We only need to wrap lvalue tree codes. */
2073 switch (TREE_CODE (x))
2074 {
2075 case VAR_DECL:
2076 case PARM_DECL:
2077 case RESULT_DECL:
2078 case LABEL_DECL:
2079 case FUNCTION_DECL:
2080 case SSA_NAME:
2081
2082 case COMPONENT_REF:
2083 case MEM_REF:
2084 case INDIRECT_REF:
2085 case ARRAY_REF:
2086 case ARRAY_RANGE_REF:
2087 case BIT_FIELD_REF:
2088 case OBJ_TYPE_REF:
2089
2090 case REALPART_EXPR:
2091 case IMAGPART_EXPR:
2092 case PREINCREMENT_EXPR:
2093 case PREDECREMENT_EXPR:
2094 case SAVE_EXPR:
2095 case TRY_CATCH_EXPR:
2096 case WITH_CLEANUP_EXPR:
2097 case COMPOUND_EXPR:
2098 case MODIFY_EXPR:
2099 case TARGET_EXPR:
2100 case COND_EXPR:
2101 case BIND_EXPR:
2102 break;
2103
2104 default:
2105 /* Assume the worst for front-end tree codes. */
2106 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2107 break;
2108 return false;
2109 }
2110
2111 return true;
2112 }
2113
2114 /* Return an expr equal to X but certainly not valid as an lvalue. */
2115
2116 tree
2117 non_lvalue_loc (location_t loc, tree x)
2118 {
2119 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2120 us. */
2121 if (in_gimple_form)
2122 return x;
2123
2124 if (! maybe_lvalue_p (x))
2125 return x;
2126 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2127 }
2128
2129 /* When pedantic, return an expr equal to X but certainly not valid as a
2130 pedantic lvalue. Otherwise, return X. */
2131
2132 static tree
2133 pedantic_non_lvalue_loc (location_t loc, tree x)
2134 {
2135 return protected_set_expr_location_unshare (x, loc);
2136 }
2137 \f
2138 /* Given a tree comparison code, return the code that is the logical inverse.
2139 It is generally not safe to do this for floating-point comparisons, except
2140 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2141 ERROR_MARK in this case. */
2142
2143 enum tree_code
2144 invert_tree_comparison (enum tree_code code, bool honor_nans)
2145 {
2146 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2147 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2148 return ERROR_MARK;
2149
2150 switch (code)
2151 {
2152 case EQ_EXPR:
2153 return NE_EXPR;
2154 case NE_EXPR:
2155 return EQ_EXPR;
2156 case GT_EXPR:
2157 return honor_nans ? UNLE_EXPR : LE_EXPR;
2158 case GE_EXPR:
2159 return honor_nans ? UNLT_EXPR : LT_EXPR;
2160 case LT_EXPR:
2161 return honor_nans ? UNGE_EXPR : GE_EXPR;
2162 case LE_EXPR:
2163 return honor_nans ? UNGT_EXPR : GT_EXPR;
2164 case LTGT_EXPR:
2165 return UNEQ_EXPR;
2166 case UNEQ_EXPR:
2167 return LTGT_EXPR;
2168 case UNGT_EXPR:
2169 return LE_EXPR;
2170 case UNGE_EXPR:
2171 return LT_EXPR;
2172 case UNLT_EXPR:
2173 return GE_EXPR;
2174 case UNLE_EXPR:
2175 return GT_EXPR;
2176 case ORDERED_EXPR:
2177 return UNORDERED_EXPR;
2178 case UNORDERED_EXPR:
2179 return ORDERED_EXPR;
2180 default:
2181 gcc_unreachable ();
2182 }
2183 }
2184
2185 /* Similar, but return the comparison that results if the operands are
2186 swapped. This is safe for floating-point. */
2187
2188 enum tree_code
2189 swap_tree_comparison (enum tree_code code)
2190 {
2191 switch (code)
2192 {
2193 case EQ_EXPR:
2194 case NE_EXPR:
2195 case ORDERED_EXPR:
2196 case UNORDERED_EXPR:
2197 case LTGT_EXPR:
2198 case UNEQ_EXPR:
2199 return code;
2200 case GT_EXPR:
2201 return LT_EXPR;
2202 case GE_EXPR:
2203 return LE_EXPR;
2204 case LT_EXPR:
2205 return GT_EXPR;
2206 case LE_EXPR:
2207 return GE_EXPR;
2208 case UNGT_EXPR:
2209 return UNLT_EXPR;
2210 case UNGE_EXPR:
2211 return UNLE_EXPR;
2212 case UNLT_EXPR:
2213 return UNGT_EXPR;
2214 case UNLE_EXPR:
2215 return UNGE_EXPR;
2216 default:
2217 gcc_unreachable ();
2218 }
2219 }
2220
2221
2222 /* Convert a comparison tree code from an enum tree_code representation
2223 into a compcode bit-based encoding. This function is the inverse of
2224 compcode_to_comparison. */
2225
2226 static enum comparison_code
2227 comparison_to_compcode (enum tree_code code)
2228 {
2229 switch (code)
2230 {
2231 case LT_EXPR:
2232 return COMPCODE_LT;
2233 case EQ_EXPR:
2234 return COMPCODE_EQ;
2235 case LE_EXPR:
2236 return COMPCODE_LE;
2237 case GT_EXPR:
2238 return COMPCODE_GT;
2239 case NE_EXPR:
2240 return COMPCODE_NE;
2241 case GE_EXPR:
2242 return COMPCODE_GE;
2243 case ORDERED_EXPR:
2244 return COMPCODE_ORD;
2245 case UNORDERED_EXPR:
2246 return COMPCODE_UNORD;
2247 case UNLT_EXPR:
2248 return COMPCODE_UNLT;
2249 case UNEQ_EXPR:
2250 return COMPCODE_UNEQ;
2251 case UNLE_EXPR:
2252 return COMPCODE_UNLE;
2253 case UNGT_EXPR:
2254 return COMPCODE_UNGT;
2255 case LTGT_EXPR:
2256 return COMPCODE_LTGT;
2257 case UNGE_EXPR:
2258 return COMPCODE_UNGE;
2259 default:
2260 gcc_unreachable ();
2261 }
2262 }
2263
2264 /* Convert a compcode bit-based encoding of a comparison operator back
2265 to GCC's enum tree_code representation. This function is the
2266 inverse of comparison_to_compcode. */
2267
2268 static enum tree_code
2269 compcode_to_comparison (enum comparison_code code)
2270 {
2271 switch (code)
2272 {
2273 case COMPCODE_LT:
2274 return LT_EXPR;
2275 case COMPCODE_EQ:
2276 return EQ_EXPR;
2277 case COMPCODE_LE:
2278 return LE_EXPR;
2279 case COMPCODE_GT:
2280 return GT_EXPR;
2281 case COMPCODE_NE:
2282 return NE_EXPR;
2283 case COMPCODE_GE:
2284 return GE_EXPR;
2285 case COMPCODE_ORD:
2286 return ORDERED_EXPR;
2287 case COMPCODE_UNORD:
2288 return UNORDERED_EXPR;
2289 case COMPCODE_UNLT:
2290 return UNLT_EXPR;
2291 case COMPCODE_UNEQ:
2292 return UNEQ_EXPR;
2293 case COMPCODE_UNLE:
2294 return UNLE_EXPR;
2295 case COMPCODE_UNGT:
2296 return UNGT_EXPR;
2297 case COMPCODE_LTGT:
2298 return LTGT_EXPR;
2299 case COMPCODE_UNGE:
2300 return UNGE_EXPR;
2301 default:
2302 gcc_unreachable ();
2303 }
2304 }
2305
2306 /* Return a tree for the comparison which is the combination of
2307 doing the AND or OR (depending on CODE) of the two operations LCODE
2308 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2309 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2310 if this makes the transformation invalid. */
2311
2312 tree
2313 combine_comparisons (location_t loc,
2314 enum tree_code code, enum tree_code lcode,
2315 enum tree_code rcode, tree truth_type,
2316 tree ll_arg, tree lr_arg)
2317 {
2318 bool honor_nans = HONOR_NANS (element_mode (ll_arg));
2319 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2320 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2321 int compcode;
2322
2323 switch (code)
2324 {
2325 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2326 compcode = lcompcode & rcompcode;
2327 break;
2328
2329 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2330 compcode = lcompcode | rcompcode;
2331 break;
2332
2333 default:
2334 return NULL_TREE;
2335 }
2336
2337 if (!honor_nans)
2338 {
2339 /* Eliminate unordered comparisons, as well as LTGT and ORD
2340 which are not used unless the mode has NaNs. */
2341 compcode &= ~COMPCODE_UNORD;
2342 if (compcode == COMPCODE_LTGT)
2343 compcode = COMPCODE_NE;
2344 else if (compcode == COMPCODE_ORD)
2345 compcode = COMPCODE_TRUE;
2346 }
2347 else if (flag_trapping_math)
2348 {
2349 /* Check that the original operation and the optimized ones will trap
2350 under the same condition. */
2351 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2352 && (lcompcode != COMPCODE_EQ)
2353 && (lcompcode != COMPCODE_ORD);
2354 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2355 && (rcompcode != COMPCODE_EQ)
2356 && (rcompcode != COMPCODE_ORD);
2357 bool trap = (compcode & COMPCODE_UNORD) == 0
2358 && (compcode != COMPCODE_EQ)
2359 && (compcode != COMPCODE_ORD);
2360
2361 /* In a short-circuited boolean expression the LHS might be
2362 such that the RHS, if evaluated, will never trap. For
2363 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2364 if neither x nor y is NaN. (This is a mixed blessing: for
2365 example, the expression above will never trap, hence
2366 optimizing it to x < y would be invalid). */
2367 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2368 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2369 rtrap = false;
2370
2371 /* If the comparison was short-circuited, and only the RHS
2372 trapped, we may now generate a spurious trap. */
2373 if (rtrap && !ltrap
2374 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2375 return NULL_TREE;
2376
2377 /* If we changed the conditions that cause a trap, we lose. */
2378 if ((ltrap || rtrap) != trap)
2379 return NULL_TREE;
2380 }
2381
2382 if (compcode == COMPCODE_TRUE)
2383 return constant_boolean_node (true, truth_type);
2384 else if (compcode == COMPCODE_FALSE)
2385 return constant_boolean_node (false, truth_type);
2386 else
2387 {
2388 enum tree_code tcode;
2389
2390 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2391 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2392 }
2393 }
2394 \f
2395 /* Return nonzero if two operands (typically of the same tree node)
2396 are necessarily equal. If either argument has side-effects this
2397 function returns zero. FLAGS modifies behavior as follows:
2398
2399 If OEP_ONLY_CONST is set, only return nonzero for constants.
2400 This function tests whether the operands are indistinguishable;
2401 it does not test whether they are equal using C's == operation.
2402 The distinction is important for IEEE floating point, because
2403 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2404 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2405
2406 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2407 even though it may hold multiple values during a function.
2408 This is because a GCC tree node guarantees that nothing else is
2409 executed between the evaluation of its "operands" (which may often
2410 be evaluated in arbitrary order). Hence if the operands themselves
2411 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2412 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2413 unset means assuming isochronic (or instantaneous) tree equivalence.
2414 Unless comparing arbitrary expression trees, such as from different
2415 statements, this flag can usually be left unset.
2416
2417 If OEP_PURE_SAME is set, then pure functions with identical arguments
2418 are considered the same. It is used when the caller has other ways
2419 to ensure that global memory is unchanged in between. */
2420
2421 int
2422 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2423 {
2424 /* If either is ERROR_MARK, they aren't equal. */
2425 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2426 || TREE_TYPE (arg0) == error_mark_node
2427 || TREE_TYPE (arg1) == error_mark_node)
2428 return 0;
2429
2430 /* Similar, if either does not have a type (like a released SSA name),
2431 they aren't equal. */
2432 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2433 return 0;
2434
2435 /* Check equality of integer constants before bailing out due to
2436 precision differences. */
2437 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2438 return tree_int_cst_equal (arg0, arg1);
2439
2440 /* If both types don't have the same signedness, then we can't consider
2441 them equal. We must check this before the STRIP_NOPS calls
2442 because they may change the signedness of the arguments. As pointers
2443 strictly don't have a signedness, require either two pointers or
2444 two non-pointers as well. */
2445 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2446 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2447 return 0;
2448
2449 /* We cannot consider pointers to different address space equal. */
2450 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2451 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2452 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2453 return 0;
2454
2455 /* If both types don't have the same precision, then it is not safe
2456 to strip NOPs. */
2457 if (element_precision (TREE_TYPE (arg0))
2458 != element_precision (TREE_TYPE (arg1)))
2459 return 0;
2460
2461 STRIP_NOPS (arg0);
2462 STRIP_NOPS (arg1);
2463
2464 /* In case both args are comparisons but with different comparison
2465 code, try to swap the comparison operands of one arg to produce
2466 a match and compare that variant. */
2467 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2468 && COMPARISON_CLASS_P (arg0)
2469 && COMPARISON_CLASS_P (arg1))
2470 {
2471 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2472
2473 if (TREE_CODE (arg0) == swap_code)
2474 return operand_equal_p (TREE_OPERAND (arg0, 0),
2475 TREE_OPERAND (arg1, 1), flags)
2476 && operand_equal_p (TREE_OPERAND (arg0, 1),
2477 TREE_OPERAND (arg1, 0), flags);
2478 }
2479
2480 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2481 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2482 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2483 return 0;
2484
2485 /* This is needed for conversions and for COMPONENT_REF.
2486 Might as well play it safe and always test this. */
2487 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2488 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2489 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2490 return 0;
2491
2492 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2493 We don't care about side effects in that case because the SAVE_EXPR
2494 takes care of that for us. In all other cases, two expressions are
2495 equal if they have no side effects. If we have two identical
2496 expressions with side effects that should be treated the same due
2497 to the only side effects being identical SAVE_EXPR's, that will
2498 be detected in the recursive calls below.
2499 If we are taking an invariant address of two identical objects
2500 they are necessarily equal as well. */
2501 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2502 && (TREE_CODE (arg0) == SAVE_EXPR
2503 || (flags & OEP_CONSTANT_ADDRESS_OF)
2504 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2505 return 1;
2506
2507 /* Next handle constant cases, those for which we can return 1 even
2508 if ONLY_CONST is set. */
2509 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2510 switch (TREE_CODE (arg0))
2511 {
2512 case INTEGER_CST:
2513 return tree_int_cst_equal (arg0, arg1);
2514
2515 case FIXED_CST:
2516 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2517 TREE_FIXED_CST (arg1));
2518
2519 case REAL_CST:
2520 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2521 TREE_REAL_CST (arg1)))
2522 return 1;
2523
2524
2525 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2526 {
2527 /* If we do not distinguish between signed and unsigned zero,
2528 consider them equal. */
2529 if (real_zerop (arg0) && real_zerop (arg1))
2530 return 1;
2531 }
2532 return 0;
2533
2534 case VECTOR_CST:
2535 {
2536 unsigned i;
2537
2538 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2539 return 0;
2540
2541 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2542 {
2543 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2544 VECTOR_CST_ELT (arg1, i), flags))
2545 return 0;
2546 }
2547 return 1;
2548 }
2549
2550 case COMPLEX_CST:
2551 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2552 flags)
2553 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2554 flags));
2555
2556 case STRING_CST:
2557 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2558 && ! memcmp (TREE_STRING_POINTER (arg0),
2559 TREE_STRING_POINTER (arg1),
2560 TREE_STRING_LENGTH (arg0)));
2561
2562 case ADDR_EXPR:
2563 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2564 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2565 ? OEP_CONSTANT_ADDRESS_OF : 0);
2566 default:
2567 break;
2568 }
2569
2570 if (flags & OEP_ONLY_CONST)
2571 return 0;
2572
2573 /* Define macros to test an operand from arg0 and arg1 for equality and a
2574 variant that allows null and views null as being different from any
2575 non-null value. In the latter case, if either is null, the both
2576 must be; otherwise, do the normal comparison. */
2577 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2578 TREE_OPERAND (arg1, N), flags)
2579
2580 #define OP_SAME_WITH_NULL(N) \
2581 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2582 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2583
2584 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2585 {
2586 case tcc_unary:
2587 /* Two conversions are equal only if signedness and modes match. */
2588 switch (TREE_CODE (arg0))
2589 {
2590 CASE_CONVERT:
2591 case FIX_TRUNC_EXPR:
2592 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2593 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2594 return 0;
2595 break;
2596 default:
2597 break;
2598 }
2599
2600 return OP_SAME (0);
2601
2602
2603 case tcc_comparison:
2604 case tcc_binary:
2605 if (OP_SAME (0) && OP_SAME (1))
2606 return 1;
2607
2608 /* For commutative ops, allow the other order. */
2609 return (commutative_tree_code (TREE_CODE (arg0))
2610 && operand_equal_p (TREE_OPERAND (arg0, 0),
2611 TREE_OPERAND (arg1, 1), flags)
2612 && operand_equal_p (TREE_OPERAND (arg0, 1),
2613 TREE_OPERAND (arg1, 0), flags));
2614
2615 case tcc_reference:
2616 /* If either of the pointer (or reference) expressions we are
2617 dereferencing contain a side effect, these cannot be equal,
2618 but their addresses can be. */
2619 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2620 && (TREE_SIDE_EFFECTS (arg0)
2621 || TREE_SIDE_EFFECTS (arg1)))
2622 return 0;
2623
2624 switch (TREE_CODE (arg0))
2625 {
2626 case INDIRECT_REF:
2627 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2628 return OP_SAME (0);
2629
2630 case REALPART_EXPR:
2631 case IMAGPART_EXPR:
2632 return OP_SAME (0);
2633
2634 case TARGET_MEM_REF:
2635 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2636 /* Require equal extra operands and then fall through to MEM_REF
2637 handling of the two common operands. */
2638 if (!OP_SAME_WITH_NULL (2)
2639 || !OP_SAME_WITH_NULL (3)
2640 || !OP_SAME_WITH_NULL (4))
2641 return 0;
2642 /* Fallthru. */
2643 case MEM_REF:
2644 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2645 /* Require equal access sizes, and similar pointer types.
2646 We can have incomplete types for array references of
2647 variable-sized arrays from the Fortran frontend
2648 though. Also verify the types are compatible. */
2649 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2650 || (TYPE_SIZE (TREE_TYPE (arg0))
2651 && TYPE_SIZE (TREE_TYPE (arg1))
2652 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2653 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2654 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2655 && alias_ptr_types_compatible_p
2656 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2657 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2658 && OP_SAME (0) && OP_SAME (1));
2659
2660 case ARRAY_REF:
2661 case ARRAY_RANGE_REF:
2662 /* Operands 2 and 3 may be null.
2663 Compare the array index by value if it is constant first as we
2664 may have different types but same value here. */
2665 if (!OP_SAME (0))
2666 return 0;
2667 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2668 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2669 TREE_OPERAND (arg1, 1))
2670 || OP_SAME (1))
2671 && OP_SAME_WITH_NULL (2)
2672 && OP_SAME_WITH_NULL (3));
2673
2674 case COMPONENT_REF:
2675 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2676 may be NULL when we're called to compare MEM_EXPRs. */
2677 if (!OP_SAME_WITH_NULL (0)
2678 || !OP_SAME (1))
2679 return 0;
2680 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2681 return OP_SAME_WITH_NULL (2);
2682
2683 case BIT_FIELD_REF:
2684 if (!OP_SAME (0))
2685 return 0;
2686 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2687 return OP_SAME (1) && OP_SAME (2);
2688
2689 default:
2690 return 0;
2691 }
2692
2693 case tcc_expression:
2694 switch (TREE_CODE (arg0))
2695 {
2696 case ADDR_EXPR:
2697 case TRUTH_NOT_EXPR:
2698 return OP_SAME (0);
2699
2700 case TRUTH_ANDIF_EXPR:
2701 case TRUTH_ORIF_EXPR:
2702 return OP_SAME (0) && OP_SAME (1);
2703
2704 case FMA_EXPR:
2705 case WIDEN_MULT_PLUS_EXPR:
2706 case WIDEN_MULT_MINUS_EXPR:
2707 if (!OP_SAME (2))
2708 return 0;
2709 /* The multiplcation operands are commutative. */
2710 /* FALLTHRU */
2711
2712 case TRUTH_AND_EXPR:
2713 case TRUTH_OR_EXPR:
2714 case TRUTH_XOR_EXPR:
2715 if (OP_SAME (0) && OP_SAME (1))
2716 return 1;
2717
2718 /* Otherwise take into account this is a commutative operation. */
2719 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2720 TREE_OPERAND (arg1, 1), flags)
2721 && operand_equal_p (TREE_OPERAND (arg0, 1),
2722 TREE_OPERAND (arg1, 0), flags));
2723
2724 case COND_EXPR:
2725 case VEC_COND_EXPR:
2726 case DOT_PROD_EXPR:
2727 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2728
2729 default:
2730 return 0;
2731 }
2732
2733 case tcc_vl_exp:
2734 switch (TREE_CODE (arg0))
2735 {
2736 case CALL_EXPR:
2737 /* If the CALL_EXPRs call different functions, then they
2738 clearly can not be equal. */
2739 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2740 flags))
2741 return 0;
2742
2743 {
2744 unsigned int cef = call_expr_flags (arg0);
2745 if (flags & OEP_PURE_SAME)
2746 cef &= ECF_CONST | ECF_PURE;
2747 else
2748 cef &= ECF_CONST;
2749 if (!cef)
2750 return 0;
2751 }
2752
2753 /* Now see if all the arguments are the same. */
2754 {
2755 const_call_expr_arg_iterator iter0, iter1;
2756 const_tree a0, a1;
2757 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2758 a1 = first_const_call_expr_arg (arg1, &iter1);
2759 a0 && a1;
2760 a0 = next_const_call_expr_arg (&iter0),
2761 a1 = next_const_call_expr_arg (&iter1))
2762 if (! operand_equal_p (a0, a1, flags))
2763 return 0;
2764
2765 /* If we get here and both argument lists are exhausted
2766 then the CALL_EXPRs are equal. */
2767 return ! (a0 || a1);
2768 }
2769 default:
2770 return 0;
2771 }
2772
2773 case tcc_declaration:
2774 /* Consider __builtin_sqrt equal to sqrt. */
2775 return (TREE_CODE (arg0) == FUNCTION_DECL
2776 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2777 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2778 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2779
2780 default:
2781 return 0;
2782 }
2783
2784 #undef OP_SAME
2785 #undef OP_SAME_WITH_NULL
2786 }
2787 \f
2788 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2789 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2790
2791 When in doubt, return 0. */
2792
2793 static int
2794 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2795 {
2796 int unsignedp1, unsignedpo;
2797 tree primarg0, primarg1, primother;
2798 unsigned int correct_width;
2799
2800 if (operand_equal_p (arg0, arg1, 0))
2801 return 1;
2802
2803 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2804 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2805 return 0;
2806
2807 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2808 and see if the inner values are the same. This removes any
2809 signedness comparison, which doesn't matter here. */
2810 primarg0 = arg0, primarg1 = arg1;
2811 STRIP_NOPS (primarg0);
2812 STRIP_NOPS (primarg1);
2813 if (operand_equal_p (primarg0, primarg1, 0))
2814 return 1;
2815
2816 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2817 actual comparison operand, ARG0.
2818
2819 First throw away any conversions to wider types
2820 already present in the operands. */
2821
2822 primarg1 = get_narrower (arg1, &unsignedp1);
2823 primother = get_narrower (other, &unsignedpo);
2824
2825 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2826 if (unsignedp1 == unsignedpo
2827 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2828 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2829 {
2830 tree type = TREE_TYPE (arg0);
2831
2832 /* Make sure shorter operand is extended the right way
2833 to match the longer operand. */
2834 primarg1 = fold_convert (signed_or_unsigned_type_for
2835 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2836
2837 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2838 return 1;
2839 }
2840
2841 return 0;
2842 }
2843 \f
2844 /* See if ARG is an expression that is either a comparison or is performing
2845 arithmetic on comparisons. The comparisons must only be comparing
2846 two different values, which will be stored in *CVAL1 and *CVAL2; if
2847 they are nonzero it means that some operands have already been found.
2848 No variables may be used anywhere else in the expression except in the
2849 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2850 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2851
2852 If this is true, return 1. Otherwise, return zero. */
2853
2854 static int
2855 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2856 {
2857 enum tree_code code = TREE_CODE (arg);
2858 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2859
2860 /* We can handle some of the tcc_expression cases here. */
2861 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2862 tclass = tcc_unary;
2863 else if (tclass == tcc_expression
2864 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2865 || code == COMPOUND_EXPR))
2866 tclass = tcc_binary;
2867
2868 else if (tclass == tcc_expression && code == SAVE_EXPR
2869 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2870 {
2871 /* If we've already found a CVAL1 or CVAL2, this expression is
2872 two complex to handle. */
2873 if (*cval1 || *cval2)
2874 return 0;
2875
2876 tclass = tcc_unary;
2877 *save_p = 1;
2878 }
2879
2880 switch (tclass)
2881 {
2882 case tcc_unary:
2883 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2884
2885 case tcc_binary:
2886 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2887 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2888 cval1, cval2, save_p));
2889
2890 case tcc_constant:
2891 return 1;
2892
2893 case tcc_expression:
2894 if (code == COND_EXPR)
2895 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2896 cval1, cval2, save_p)
2897 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2898 cval1, cval2, save_p)
2899 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2900 cval1, cval2, save_p));
2901 return 0;
2902
2903 case tcc_comparison:
2904 /* First see if we can handle the first operand, then the second. For
2905 the second operand, we know *CVAL1 can't be zero. It must be that
2906 one side of the comparison is each of the values; test for the
2907 case where this isn't true by failing if the two operands
2908 are the same. */
2909
2910 if (operand_equal_p (TREE_OPERAND (arg, 0),
2911 TREE_OPERAND (arg, 1), 0))
2912 return 0;
2913
2914 if (*cval1 == 0)
2915 *cval1 = TREE_OPERAND (arg, 0);
2916 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2917 ;
2918 else if (*cval2 == 0)
2919 *cval2 = TREE_OPERAND (arg, 0);
2920 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2921 ;
2922 else
2923 return 0;
2924
2925 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2926 ;
2927 else if (*cval2 == 0)
2928 *cval2 = TREE_OPERAND (arg, 1);
2929 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2930 ;
2931 else
2932 return 0;
2933
2934 return 1;
2935
2936 default:
2937 return 0;
2938 }
2939 }
2940 \f
2941 /* ARG is a tree that is known to contain just arithmetic operations and
2942 comparisons. Evaluate the operations in the tree substituting NEW0 for
2943 any occurrence of OLD0 as an operand of a comparison and likewise for
2944 NEW1 and OLD1. */
2945
2946 static tree
2947 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2948 tree old1, tree new1)
2949 {
2950 tree type = TREE_TYPE (arg);
2951 enum tree_code code = TREE_CODE (arg);
2952 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2953
2954 /* We can handle some of the tcc_expression cases here. */
2955 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2956 tclass = tcc_unary;
2957 else if (tclass == tcc_expression
2958 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2959 tclass = tcc_binary;
2960
2961 switch (tclass)
2962 {
2963 case tcc_unary:
2964 return fold_build1_loc (loc, code, type,
2965 eval_subst (loc, TREE_OPERAND (arg, 0),
2966 old0, new0, old1, new1));
2967
2968 case tcc_binary:
2969 return fold_build2_loc (loc, code, type,
2970 eval_subst (loc, TREE_OPERAND (arg, 0),
2971 old0, new0, old1, new1),
2972 eval_subst (loc, TREE_OPERAND (arg, 1),
2973 old0, new0, old1, new1));
2974
2975 case tcc_expression:
2976 switch (code)
2977 {
2978 case SAVE_EXPR:
2979 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2980 old1, new1);
2981
2982 case COMPOUND_EXPR:
2983 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2984 old1, new1);
2985
2986 case COND_EXPR:
2987 return fold_build3_loc (loc, code, type,
2988 eval_subst (loc, TREE_OPERAND (arg, 0),
2989 old0, new0, old1, new1),
2990 eval_subst (loc, TREE_OPERAND (arg, 1),
2991 old0, new0, old1, new1),
2992 eval_subst (loc, TREE_OPERAND (arg, 2),
2993 old0, new0, old1, new1));
2994 default:
2995 break;
2996 }
2997 /* Fall through - ??? */
2998
2999 case tcc_comparison:
3000 {
3001 tree arg0 = TREE_OPERAND (arg, 0);
3002 tree arg1 = TREE_OPERAND (arg, 1);
3003
3004 /* We need to check both for exact equality and tree equality. The
3005 former will be true if the operand has a side-effect. In that
3006 case, we know the operand occurred exactly once. */
3007
3008 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3009 arg0 = new0;
3010 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3011 arg0 = new1;
3012
3013 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3014 arg1 = new0;
3015 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3016 arg1 = new1;
3017
3018 return fold_build2_loc (loc, code, type, arg0, arg1);
3019 }
3020
3021 default:
3022 return arg;
3023 }
3024 }
3025 \f
3026 /* Return a tree for the case when the result of an expression is RESULT
3027 converted to TYPE and OMITTED was previously an operand of the expression
3028 but is now not needed (e.g., we folded OMITTED * 0).
3029
3030 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3031 the conversion of RESULT to TYPE. */
3032
3033 tree
3034 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3035 {
3036 tree t = fold_convert_loc (loc, type, result);
3037
3038 /* If the resulting operand is an empty statement, just return the omitted
3039 statement casted to void. */
3040 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3041 return build1_loc (loc, NOP_EXPR, void_type_node,
3042 fold_ignored_result (omitted));
3043
3044 if (TREE_SIDE_EFFECTS (omitted))
3045 return build2_loc (loc, COMPOUND_EXPR, type,
3046 fold_ignored_result (omitted), t);
3047
3048 return non_lvalue_loc (loc, t);
3049 }
3050
3051 /* Return a tree for the case when the result of an expression is RESULT
3052 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3053 of the expression but are now not needed.
3054
3055 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3056 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3057 evaluated before OMITTED2. Otherwise, if neither has side effects,
3058 just do the conversion of RESULT to TYPE. */
3059
3060 tree
3061 omit_two_operands_loc (location_t loc, tree type, tree result,
3062 tree omitted1, tree omitted2)
3063 {
3064 tree t = fold_convert_loc (loc, type, result);
3065
3066 if (TREE_SIDE_EFFECTS (omitted2))
3067 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3068 if (TREE_SIDE_EFFECTS (omitted1))
3069 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3070
3071 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3072 }
3073
3074 \f
3075 /* Return a simplified tree node for the truth-negation of ARG. This
3076 never alters ARG itself. We assume that ARG is an operation that
3077 returns a truth value (0 or 1).
3078
3079 FIXME: one would think we would fold the result, but it causes
3080 problems with the dominator optimizer. */
3081
3082 static tree
3083 fold_truth_not_expr (location_t loc, tree arg)
3084 {
3085 tree type = TREE_TYPE (arg);
3086 enum tree_code code = TREE_CODE (arg);
3087 location_t loc1, loc2;
3088
3089 /* If this is a comparison, we can simply invert it, except for
3090 floating-point non-equality comparisons, in which case we just
3091 enclose a TRUTH_NOT_EXPR around what we have. */
3092
3093 if (TREE_CODE_CLASS (code) == tcc_comparison)
3094 {
3095 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3096 if (FLOAT_TYPE_P (op_type)
3097 && flag_trapping_math
3098 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3099 && code != NE_EXPR && code != EQ_EXPR)
3100 return NULL_TREE;
3101
3102 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3103 if (code == ERROR_MARK)
3104 return NULL_TREE;
3105
3106 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3107 TREE_OPERAND (arg, 1));
3108 }
3109
3110 switch (code)
3111 {
3112 case INTEGER_CST:
3113 return constant_boolean_node (integer_zerop (arg), type);
3114
3115 case TRUTH_AND_EXPR:
3116 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3117 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3118 return build2_loc (loc, TRUTH_OR_EXPR, type,
3119 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3120 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3121
3122 case TRUTH_OR_EXPR:
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3124 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3125 return build2_loc (loc, TRUTH_AND_EXPR, type,
3126 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3127 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3128
3129 case TRUTH_XOR_EXPR:
3130 /* Here we can invert either operand. We invert the first operand
3131 unless the second operand is a TRUTH_NOT_EXPR in which case our
3132 result is the XOR of the first operand with the inside of the
3133 negation of the second operand. */
3134
3135 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3136 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3137 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3138 else
3139 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3140 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3141 TREE_OPERAND (arg, 1));
3142
3143 case TRUTH_ANDIF_EXPR:
3144 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3145 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3146 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3147 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3148 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3149
3150 case TRUTH_ORIF_EXPR:
3151 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3152 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3153 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3154 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3155 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3156
3157 case TRUTH_NOT_EXPR:
3158 return TREE_OPERAND (arg, 0);
3159
3160 case COND_EXPR:
3161 {
3162 tree arg1 = TREE_OPERAND (arg, 1);
3163 tree arg2 = TREE_OPERAND (arg, 2);
3164
3165 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3166 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3167
3168 /* A COND_EXPR may have a throw as one operand, which
3169 then has void type. Just leave void operands
3170 as they are. */
3171 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3172 VOID_TYPE_P (TREE_TYPE (arg1))
3173 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3174 VOID_TYPE_P (TREE_TYPE (arg2))
3175 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3176 }
3177
3178 case COMPOUND_EXPR:
3179 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3180 return build2_loc (loc, COMPOUND_EXPR, type,
3181 TREE_OPERAND (arg, 0),
3182 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3183
3184 case NON_LVALUE_EXPR:
3185 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3186 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3187
3188 CASE_CONVERT:
3189 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3190 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3191
3192 /* ... fall through ... */
3193
3194 case FLOAT_EXPR:
3195 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3196 return build1_loc (loc, TREE_CODE (arg), type,
3197 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3198
3199 case BIT_AND_EXPR:
3200 if (!integer_onep (TREE_OPERAND (arg, 1)))
3201 return NULL_TREE;
3202 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3203
3204 case SAVE_EXPR:
3205 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3206
3207 case CLEANUP_POINT_EXPR:
3208 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3209 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3210 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3211
3212 default:
3213 return NULL_TREE;
3214 }
3215 }
3216
3217 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3218 assume that ARG is an operation that returns a truth value (0 or 1
3219 for scalars, 0 or -1 for vectors). Return the folded expression if
3220 folding is successful. Otherwise, return NULL_TREE. */
3221
3222 static tree
3223 fold_invert_truthvalue (location_t loc, tree arg)
3224 {
3225 tree type = TREE_TYPE (arg);
3226 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3227 ? BIT_NOT_EXPR
3228 : TRUTH_NOT_EXPR,
3229 type, arg);
3230 }
3231
3232 /* Return a simplified tree node for the truth-negation of ARG. This
3233 never alters ARG itself. We assume that ARG is an operation that
3234 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3235
3236 tree
3237 invert_truthvalue_loc (location_t loc, tree arg)
3238 {
3239 if (TREE_CODE (arg) == ERROR_MARK)
3240 return arg;
3241
3242 tree type = TREE_TYPE (arg);
3243 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3244 ? BIT_NOT_EXPR
3245 : TRUTH_NOT_EXPR,
3246 type, arg);
3247 }
3248
3249 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3250 operands are another bit-wise operation with a common input. If so,
3251 distribute the bit operations to save an operation and possibly two if
3252 constants are involved. For example, convert
3253 (A | B) & (A | C) into A | (B & C)
3254 Further simplification will occur if B and C are constants.
3255
3256 If this optimization cannot be done, 0 will be returned. */
3257
3258 static tree
3259 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3260 tree arg0, tree arg1)
3261 {
3262 tree common;
3263 tree left, right;
3264
3265 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3266 || TREE_CODE (arg0) == code
3267 || (TREE_CODE (arg0) != BIT_AND_EXPR
3268 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3269 return 0;
3270
3271 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3272 {
3273 common = TREE_OPERAND (arg0, 0);
3274 left = TREE_OPERAND (arg0, 1);
3275 right = TREE_OPERAND (arg1, 1);
3276 }
3277 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3278 {
3279 common = TREE_OPERAND (arg0, 0);
3280 left = TREE_OPERAND (arg0, 1);
3281 right = TREE_OPERAND (arg1, 0);
3282 }
3283 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3284 {
3285 common = TREE_OPERAND (arg0, 1);
3286 left = TREE_OPERAND (arg0, 0);
3287 right = TREE_OPERAND (arg1, 1);
3288 }
3289 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3290 {
3291 common = TREE_OPERAND (arg0, 1);
3292 left = TREE_OPERAND (arg0, 0);
3293 right = TREE_OPERAND (arg1, 0);
3294 }
3295 else
3296 return 0;
3297
3298 common = fold_convert_loc (loc, type, common);
3299 left = fold_convert_loc (loc, type, left);
3300 right = fold_convert_loc (loc, type, right);
3301 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3302 fold_build2_loc (loc, code, type, left, right));
3303 }
3304
3305 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3306 with code CODE. This optimization is unsafe. */
3307 static tree
3308 distribute_real_division (location_t loc, enum tree_code code, tree type,
3309 tree arg0, tree arg1)
3310 {
3311 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3312 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3313
3314 /* (A / C) +- (B / C) -> (A +- B) / C. */
3315 if (mul0 == mul1
3316 && operand_equal_p (TREE_OPERAND (arg0, 1),
3317 TREE_OPERAND (arg1, 1), 0))
3318 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3319 fold_build2_loc (loc, code, type,
3320 TREE_OPERAND (arg0, 0),
3321 TREE_OPERAND (arg1, 0)),
3322 TREE_OPERAND (arg0, 1));
3323
3324 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3325 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3326 TREE_OPERAND (arg1, 0), 0)
3327 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3328 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3329 {
3330 REAL_VALUE_TYPE r0, r1;
3331 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3332 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3333 if (!mul0)
3334 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3335 if (!mul1)
3336 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3337 real_arithmetic (&r0, code, &r0, &r1);
3338 return fold_build2_loc (loc, MULT_EXPR, type,
3339 TREE_OPERAND (arg0, 0),
3340 build_real (type, r0));
3341 }
3342
3343 return NULL_TREE;
3344 }
3345 \f
3346 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3347 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3348
3349 static tree
3350 make_bit_field_ref (location_t loc, tree inner, tree type,
3351 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3352 {
3353 tree result, bftype;
3354
3355 if (bitpos == 0)
3356 {
3357 tree size = TYPE_SIZE (TREE_TYPE (inner));
3358 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3359 || POINTER_TYPE_P (TREE_TYPE (inner)))
3360 && tree_fits_shwi_p (size)
3361 && tree_to_shwi (size) == bitsize)
3362 return fold_convert_loc (loc, type, inner);
3363 }
3364
3365 bftype = type;
3366 if (TYPE_PRECISION (bftype) != bitsize
3367 || TYPE_UNSIGNED (bftype) == !unsignedp)
3368 bftype = build_nonstandard_integer_type (bitsize, 0);
3369
3370 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3371 size_int (bitsize), bitsize_int (bitpos));
3372
3373 if (bftype != type)
3374 result = fold_convert_loc (loc, type, result);
3375
3376 return result;
3377 }
3378
3379 /* Optimize a bit-field compare.
3380
3381 There are two cases: First is a compare against a constant and the
3382 second is a comparison of two items where the fields are at the same
3383 bit position relative to the start of a chunk (byte, halfword, word)
3384 large enough to contain it. In these cases we can avoid the shift
3385 implicit in bitfield extractions.
3386
3387 For constants, we emit a compare of the shifted constant with the
3388 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3389 compared. For two fields at the same position, we do the ANDs with the
3390 similar mask and compare the result of the ANDs.
3391
3392 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3393 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3394 are the left and right operands of the comparison, respectively.
3395
3396 If the optimization described above can be done, we return the resulting
3397 tree. Otherwise we return zero. */
3398
3399 static tree
3400 optimize_bit_field_compare (location_t loc, enum tree_code code,
3401 tree compare_type, tree lhs, tree rhs)
3402 {
3403 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3404 tree type = TREE_TYPE (lhs);
3405 tree unsigned_type;
3406 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3407 machine_mode lmode, rmode, nmode;
3408 int lunsignedp, runsignedp;
3409 int lvolatilep = 0, rvolatilep = 0;
3410 tree linner, rinner = NULL_TREE;
3411 tree mask;
3412 tree offset;
3413
3414 /* Get all the information about the extractions being done. If the bit size
3415 if the same as the size of the underlying object, we aren't doing an
3416 extraction at all and so can do nothing. We also don't want to
3417 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3418 then will no longer be able to replace it. */
3419 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3420 &lunsignedp, &lvolatilep, false);
3421 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3422 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3423 return 0;
3424
3425 if (!const_p)
3426 {
3427 /* If this is not a constant, we can only do something if bit positions,
3428 sizes, and signedness are the same. */
3429 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3430 &runsignedp, &rvolatilep, false);
3431
3432 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3433 || lunsignedp != runsignedp || offset != 0
3434 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3435 return 0;
3436 }
3437
3438 /* See if we can find a mode to refer to this field. We should be able to,
3439 but fail if we can't. */
3440 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3441 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3442 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3443 TYPE_ALIGN (TREE_TYPE (rinner))),
3444 word_mode, false);
3445 if (nmode == VOIDmode)
3446 return 0;
3447
3448 /* Set signed and unsigned types of the precision of this mode for the
3449 shifts below. */
3450 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3451
3452 /* Compute the bit position and size for the new reference and our offset
3453 within it. If the new reference is the same size as the original, we
3454 won't optimize anything, so return zero. */
3455 nbitsize = GET_MODE_BITSIZE (nmode);
3456 nbitpos = lbitpos & ~ (nbitsize - 1);
3457 lbitpos -= nbitpos;
3458 if (nbitsize == lbitsize)
3459 return 0;
3460
3461 if (BYTES_BIG_ENDIAN)
3462 lbitpos = nbitsize - lbitsize - lbitpos;
3463
3464 /* Make the mask to be used against the extracted field. */
3465 mask = build_int_cst_type (unsigned_type, -1);
3466 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3467 mask = const_binop (RSHIFT_EXPR, mask,
3468 size_int (nbitsize - lbitsize - lbitpos));
3469
3470 if (! const_p)
3471 /* If not comparing with constant, just rework the comparison
3472 and return. */
3473 return fold_build2_loc (loc, code, compare_type,
3474 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3475 make_bit_field_ref (loc, linner,
3476 unsigned_type,
3477 nbitsize, nbitpos,
3478 1),
3479 mask),
3480 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3481 make_bit_field_ref (loc, rinner,
3482 unsigned_type,
3483 nbitsize, nbitpos,
3484 1),
3485 mask));
3486
3487 /* Otherwise, we are handling the constant case. See if the constant is too
3488 big for the field. Warn and return a tree of for 0 (false) if so. We do
3489 this not only for its own sake, but to avoid having to test for this
3490 error case below. If we didn't, we might generate wrong code.
3491
3492 For unsigned fields, the constant shifted right by the field length should
3493 be all zero. For signed fields, the high-order bits should agree with
3494 the sign bit. */
3495
3496 if (lunsignedp)
3497 {
3498 if (wi::lrshift (rhs, lbitsize) != 0)
3499 {
3500 warning (0, "comparison is always %d due to width of bit-field",
3501 code == NE_EXPR);
3502 return constant_boolean_node (code == NE_EXPR, compare_type);
3503 }
3504 }
3505 else
3506 {
3507 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3508 if (tem != 0 && tem != -1)
3509 {
3510 warning (0, "comparison is always %d due to width of bit-field",
3511 code == NE_EXPR);
3512 return constant_boolean_node (code == NE_EXPR, compare_type);
3513 }
3514 }
3515
3516 /* Single-bit compares should always be against zero. */
3517 if (lbitsize == 1 && ! integer_zerop (rhs))
3518 {
3519 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3520 rhs = build_int_cst (type, 0);
3521 }
3522
3523 /* Make a new bitfield reference, shift the constant over the
3524 appropriate number of bits and mask it with the computed mask
3525 (in case this was a signed field). If we changed it, make a new one. */
3526 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3527
3528 rhs = const_binop (BIT_AND_EXPR,
3529 const_binop (LSHIFT_EXPR,
3530 fold_convert_loc (loc, unsigned_type, rhs),
3531 size_int (lbitpos)),
3532 mask);
3533
3534 lhs = build2_loc (loc, code, compare_type,
3535 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3536 return lhs;
3537 }
3538 \f
3539 /* Subroutine for fold_truth_andor_1: decode a field reference.
3540
3541 If EXP is a comparison reference, we return the innermost reference.
3542
3543 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3544 set to the starting bit number.
3545
3546 If the innermost field can be completely contained in a mode-sized
3547 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3548
3549 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3550 otherwise it is not changed.
3551
3552 *PUNSIGNEDP is set to the signedness of the field.
3553
3554 *PMASK is set to the mask used. This is either contained in a
3555 BIT_AND_EXPR or derived from the width of the field.
3556
3557 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3558
3559 Return 0 if this is not a component reference or is one that we can't
3560 do anything with. */
3561
3562 static tree
3563 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3564 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3565 int *punsignedp, int *pvolatilep,
3566 tree *pmask, tree *pand_mask)
3567 {
3568 tree outer_type = 0;
3569 tree and_mask = 0;
3570 tree mask, inner, offset;
3571 tree unsigned_type;
3572 unsigned int precision;
3573
3574 /* All the optimizations using this function assume integer fields.
3575 There are problems with FP fields since the type_for_size call
3576 below can fail for, e.g., XFmode. */
3577 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3578 return 0;
3579
3580 /* We are interested in the bare arrangement of bits, so strip everything
3581 that doesn't affect the machine mode. However, record the type of the
3582 outermost expression if it may matter below. */
3583 if (CONVERT_EXPR_P (exp)
3584 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3585 outer_type = TREE_TYPE (exp);
3586 STRIP_NOPS (exp);
3587
3588 if (TREE_CODE (exp) == BIT_AND_EXPR)
3589 {
3590 and_mask = TREE_OPERAND (exp, 1);
3591 exp = TREE_OPERAND (exp, 0);
3592 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3593 if (TREE_CODE (and_mask) != INTEGER_CST)
3594 return 0;
3595 }
3596
3597 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3598 punsignedp, pvolatilep, false);
3599 if ((inner == exp && and_mask == 0)
3600 || *pbitsize < 0 || offset != 0
3601 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3602 return 0;
3603
3604 /* If the number of bits in the reference is the same as the bitsize of
3605 the outer type, then the outer type gives the signedness. Otherwise
3606 (in case of a small bitfield) the signedness is unchanged. */
3607 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3608 *punsignedp = TYPE_UNSIGNED (outer_type);
3609
3610 /* Compute the mask to access the bitfield. */
3611 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3612 precision = TYPE_PRECISION (unsigned_type);
3613
3614 mask = build_int_cst_type (unsigned_type, -1);
3615
3616 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3617 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3618
3619 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3620 if (and_mask != 0)
3621 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3622 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3623
3624 *pmask = mask;
3625 *pand_mask = and_mask;
3626 return inner;
3627 }
3628
3629 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3630 bit positions and MASK is SIGNED. */
3631
3632 static int
3633 all_ones_mask_p (const_tree mask, unsigned int size)
3634 {
3635 tree type = TREE_TYPE (mask);
3636 unsigned int precision = TYPE_PRECISION (type);
3637
3638 /* If this function returns true when the type of the mask is
3639 UNSIGNED, then there will be errors. In particular see
3640 gcc.c-torture/execute/990326-1.c. There does not appear to be
3641 any documentation paper trail as to why this is so. But the pre
3642 wide-int worked with that restriction and it has been preserved
3643 here. */
3644 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3645 return false;
3646
3647 return wi::mask (size, false, precision) == mask;
3648 }
3649
3650 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3651 represents the sign bit of EXP's type. If EXP represents a sign
3652 or zero extension, also test VAL against the unextended type.
3653 The return value is the (sub)expression whose sign bit is VAL,
3654 or NULL_TREE otherwise. */
3655
3656 tree
3657 sign_bit_p (tree exp, const_tree val)
3658 {
3659 int width;
3660 tree t;
3661
3662 /* Tree EXP must have an integral type. */
3663 t = TREE_TYPE (exp);
3664 if (! INTEGRAL_TYPE_P (t))
3665 return NULL_TREE;
3666
3667 /* Tree VAL must be an integer constant. */
3668 if (TREE_CODE (val) != INTEGER_CST
3669 || TREE_OVERFLOW (val))
3670 return NULL_TREE;
3671
3672 width = TYPE_PRECISION (t);
3673 if (wi::only_sign_bit_p (val, width))
3674 return exp;
3675
3676 /* Handle extension from a narrower type. */
3677 if (TREE_CODE (exp) == NOP_EXPR
3678 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3679 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3680
3681 return NULL_TREE;
3682 }
3683
3684 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3685 to be evaluated unconditionally. */
3686
3687 static int
3688 simple_operand_p (const_tree exp)
3689 {
3690 /* Strip any conversions that don't change the machine mode. */
3691 STRIP_NOPS (exp);
3692
3693 return (CONSTANT_CLASS_P (exp)
3694 || TREE_CODE (exp) == SSA_NAME
3695 || (DECL_P (exp)
3696 && ! TREE_ADDRESSABLE (exp)
3697 && ! TREE_THIS_VOLATILE (exp)
3698 && ! DECL_NONLOCAL (exp)
3699 /* Don't regard global variables as simple. They may be
3700 allocated in ways unknown to the compiler (shared memory,
3701 #pragma weak, etc). */
3702 && ! TREE_PUBLIC (exp)
3703 && ! DECL_EXTERNAL (exp)
3704 /* Weakrefs are not safe to be read, since they can be NULL.
3705 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3706 have DECL_WEAK flag set. */
3707 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3708 /* Loading a static variable is unduly expensive, but global
3709 registers aren't expensive. */
3710 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3711 }
3712
3713 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3714 to be evaluated unconditionally.
3715 I addition to simple_operand_p, we assume that comparisons, conversions,
3716 and logic-not operations are simple, if their operands are simple, too. */
3717
3718 static bool
3719 simple_operand_p_2 (tree exp)
3720 {
3721 enum tree_code code;
3722
3723 if (TREE_SIDE_EFFECTS (exp)
3724 || tree_could_trap_p (exp))
3725 return false;
3726
3727 while (CONVERT_EXPR_P (exp))
3728 exp = TREE_OPERAND (exp, 0);
3729
3730 code = TREE_CODE (exp);
3731
3732 if (TREE_CODE_CLASS (code) == tcc_comparison)
3733 return (simple_operand_p (TREE_OPERAND (exp, 0))
3734 && simple_operand_p (TREE_OPERAND (exp, 1)));
3735
3736 if (code == TRUTH_NOT_EXPR)
3737 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3738
3739 return simple_operand_p (exp);
3740 }
3741
3742 \f
3743 /* The following functions are subroutines to fold_range_test and allow it to
3744 try to change a logical combination of comparisons into a range test.
3745
3746 For example, both
3747 X == 2 || X == 3 || X == 4 || X == 5
3748 and
3749 X >= 2 && X <= 5
3750 are converted to
3751 (unsigned) (X - 2) <= 3
3752
3753 We describe each set of comparisons as being either inside or outside
3754 a range, using a variable named like IN_P, and then describe the
3755 range with a lower and upper bound. If one of the bounds is omitted,
3756 it represents either the highest or lowest value of the type.
3757
3758 In the comments below, we represent a range by two numbers in brackets
3759 preceded by a "+" to designate being inside that range, or a "-" to
3760 designate being outside that range, so the condition can be inverted by
3761 flipping the prefix. An omitted bound is represented by a "-". For
3762 example, "- [-, 10]" means being outside the range starting at the lowest
3763 possible value and ending at 10, in other words, being greater than 10.
3764 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3765 always false.
3766
3767 We set up things so that the missing bounds are handled in a consistent
3768 manner so neither a missing bound nor "true" and "false" need to be
3769 handled using a special case. */
3770
3771 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3772 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3773 and UPPER1_P are nonzero if the respective argument is an upper bound
3774 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3775 must be specified for a comparison. ARG1 will be converted to ARG0's
3776 type if both are specified. */
3777
3778 static tree
3779 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3780 tree arg1, int upper1_p)
3781 {
3782 tree tem;
3783 int result;
3784 int sgn0, sgn1;
3785
3786 /* If neither arg represents infinity, do the normal operation.
3787 Else, if not a comparison, return infinity. Else handle the special
3788 comparison rules. Note that most of the cases below won't occur, but
3789 are handled for consistency. */
3790
3791 if (arg0 != 0 && arg1 != 0)
3792 {
3793 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3794 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3795 STRIP_NOPS (tem);
3796 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3797 }
3798
3799 if (TREE_CODE_CLASS (code) != tcc_comparison)
3800 return 0;
3801
3802 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3803 for neither. In real maths, we cannot assume open ended ranges are
3804 the same. But, this is computer arithmetic, where numbers are finite.
3805 We can therefore make the transformation of any unbounded range with
3806 the value Z, Z being greater than any representable number. This permits
3807 us to treat unbounded ranges as equal. */
3808 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3809 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3810 switch (code)
3811 {
3812 case EQ_EXPR:
3813 result = sgn0 == sgn1;
3814 break;
3815 case NE_EXPR:
3816 result = sgn0 != sgn1;
3817 break;
3818 case LT_EXPR:
3819 result = sgn0 < sgn1;
3820 break;
3821 case LE_EXPR:
3822 result = sgn0 <= sgn1;
3823 break;
3824 case GT_EXPR:
3825 result = sgn0 > sgn1;
3826 break;
3827 case GE_EXPR:
3828 result = sgn0 >= sgn1;
3829 break;
3830 default:
3831 gcc_unreachable ();
3832 }
3833
3834 return constant_boolean_node (result, type);
3835 }
3836 \f
3837 /* Helper routine for make_range. Perform one step for it, return
3838 new expression if the loop should continue or NULL_TREE if it should
3839 stop. */
3840
3841 tree
3842 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3843 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3844 bool *strict_overflow_p)
3845 {
3846 tree arg0_type = TREE_TYPE (arg0);
3847 tree n_low, n_high, low = *p_low, high = *p_high;
3848 int in_p = *p_in_p, n_in_p;
3849
3850 switch (code)
3851 {
3852 case TRUTH_NOT_EXPR:
3853 /* We can only do something if the range is testing for zero. */
3854 if (low == NULL_TREE || high == NULL_TREE
3855 || ! integer_zerop (low) || ! integer_zerop (high))
3856 return NULL_TREE;
3857 *p_in_p = ! in_p;
3858 return arg0;
3859
3860 case EQ_EXPR: case NE_EXPR:
3861 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3862 /* We can only do something if the range is testing for zero
3863 and if the second operand is an integer constant. Note that
3864 saying something is "in" the range we make is done by
3865 complementing IN_P since it will set in the initial case of
3866 being not equal to zero; "out" is leaving it alone. */
3867 if (low == NULL_TREE || high == NULL_TREE
3868 || ! integer_zerop (low) || ! integer_zerop (high)
3869 || TREE_CODE (arg1) != INTEGER_CST)
3870 return NULL_TREE;
3871
3872 switch (code)
3873 {
3874 case NE_EXPR: /* - [c, c] */
3875 low = high = arg1;
3876 break;
3877 case EQ_EXPR: /* + [c, c] */
3878 in_p = ! in_p, low = high = arg1;
3879 break;
3880 case GT_EXPR: /* - [-, c] */
3881 low = 0, high = arg1;
3882 break;
3883 case GE_EXPR: /* + [c, -] */
3884 in_p = ! in_p, low = arg1, high = 0;
3885 break;
3886 case LT_EXPR: /* - [c, -] */
3887 low = arg1, high = 0;
3888 break;
3889 case LE_EXPR: /* + [-, c] */
3890 in_p = ! in_p, low = 0, high = arg1;
3891 break;
3892 default:
3893 gcc_unreachable ();
3894 }
3895
3896 /* If this is an unsigned comparison, we also know that EXP is
3897 greater than or equal to zero. We base the range tests we make
3898 on that fact, so we record it here so we can parse existing
3899 range tests. We test arg0_type since often the return type
3900 of, e.g. EQ_EXPR, is boolean. */
3901 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3902 {
3903 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3904 in_p, low, high, 1,
3905 build_int_cst (arg0_type, 0),
3906 NULL_TREE))
3907 return NULL_TREE;
3908
3909 in_p = n_in_p, low = n_low, high = n_high;
3910
3911 /* If the high bound is missing, but we have a nonzero low
3912 bound, reverse the range so it goes from zero to the low bound
3913 minus 1. */
3914 if (high == 0 && low && ! integer_zerop (low))
3915 {
3916 in_p = ! in_p;
3917 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3918 build_int_cst (TREE_TYPE (low), 1), 0);
3919 low = build_int_cst (arg0_type, 0);
3920 }
3921 }
3922
3923 *p_low = low;
3924 *p_high = high;
3925 *p_in_p = in_p;
3926 return arg0;
3927
3928 case NEGATE_EXPR:
3929 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3930 low and high are non-NULL, then normalize will DTRT. */
3931 if (!TYPE_UNSIGNED (arg0_type)
3932 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3933 {
3934 if (low == NULL_TREE)
3935 low = TYPE_MIN_VALUE (arg0_type);
3936 if (high == NULL_TREE)
3937 high = TYPE_MAX_VALUE (arg0_type);
3938 }
3939
3940 /* (-x) IN [a,b] -> x in [-b, -a] */
3941 n_low = range_binop (MINUS_EXPR, exp_type,
3942 build_int_cst (exp_type, 0),
3943 0, high, 1);
3944 n_high = range_binop (MINUS_EXPR, exp_type,
3945 build_int_cst (exp_type, 0),
3946 0, low, 0);
3947 if (n_high != 0 && TREE_OVERFLOW (n_high))
3948 return NULL_TREE;
3949 goto normalize;
3950
3951 case BIT_NOT_EXPR:
3952 /* ~ X -> -X - 1 */
3953 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3954 build_int_cst (exp_type, 1));
3955
3956 case PLUS_EXPR:
3957 case MINUS_EXPR:
3958 if (TREE_CODE (arg1) != INTEGER_CST)
3959 return NULL_TREE;
3960
3961 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3962 move a constant to the other side. */
3963 if (!TYPE_UNSIGNED (arg0_type)
3964 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3965 return NULL_TREE;
3966
3967 /* If EXP is signed, any overflow in the computation is undefined,
3968 so we don't worry about it so long as our computations on
3969 the bounds don't overflow. For unsigned, overflow is defined
3970 and this is exactly the right thing. */
3971 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3972 arg0_type, low, 0, arg1, 0);
3973 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3974 arg0_type, high, 1, arg1, 0);
3975 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3976 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3977 return NULL_TREE;
3978
3979 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3980 *strict_overflow_p = true;
3981
3982 normalize:
3983 /* Check for an unsigned range which has wrapped around the maximum
3984 value thus making n_high < n_low, and normalize it. */
3985 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3986 {
3987 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3988 build_int_cst (TREE_TYPE (n_high), 1), 0);
3989 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3990 build_int_cst (TREE_TYPE (n_low), 1), 0);
3991
3992 /* If the range is of the form +/- [ x+1, x ], we won't
3993 be able to normalize it. But then, it represents the
3994 whole range or the empty set, so make it
3995 +/- [ -, - ]. */
3996 if (tree_int_cst_equal (n_low, low)
3997 && tree_int_cst_equal (n_high, high))
3998 low = high = 0;
3999 else
4000 in_p = ! in_p;
4001 }
4002 else
4003 low = n_low, high = n_high;
4004
4005 *p_low = low;
4006 *p_high = high;
4007 *p_in_p = in_p;
4008 return arg0;
4009
4010 CASE_CONVERT:
4011 case NON_LVALUE_EXPR:
4012 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4013 return NULL_TREE;
4014
4015 if (! INTEGRAL_TYPE_P (arg0_type)
4016 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4017 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4018 return NULL_TREE;
4019
4020 n_low = low, n_high = high;
4021
4022 if (n_low != 0)
4023 n_low = fold_convert_loc (loc, arg0_type, n_low);
4024
4025 if (n_high != 0)
4026 n_high = fold_convert_loc (loc, arg0_type, n_high);
4027
4028 /* If we're converting arg0 from an unsigned type, to exp,
4029 a signed type, we will be doing the comparison as unsigned.
4030 The tests above have already verified that LOW and HIGH
4031 are both positive.
4032
4033 So we have to ensure that we will handle large unsigned
4034 values the same way that the current signed bounds treat
4035 negative values. */
4036
4037 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4038 {
4039 tree high_positive;
4040 tree equiv_type;
4041 /* For fixed-point modes, we need to pass the saturating flag
4042 as the 2nd parameter. */
4043 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4044 equiv_type
4045 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4046 TYPE_SATURATING (arg0_type));
4047 else
4048 equiv_type
4049 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4050
4051 /* A range without an upper bound is, naturally, unbounded.
4052 Since convert would have cropped a very large value, use
4053 the max value for the destination type. */
4054 high_positive
4055 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4056 : TYPE_MAX_VALUE (arg0_type);
4057
4058 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4059 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4060 fold_convert_loc (loc, arg0_type,
4061 high_positive),
4062 build_int_cst (arg0_type, 1));
4063
4064 /* If the low bound is specified, "and" the range with the
4065 range for which the original unsigned value will be
4066 positive. */
4067 if (low != 0)
4068 {
4069 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4070 1, fold_convert_loc (loc, arg0_type,
4071 integer_zero_node),
4072 high_positive))
4073 return NULL_TREE;
4074
4075 in_p = (n_in_p == in_p);
4076 }
4077 else
4078 {
4079 /* Otherwise, "or" the range with the range of the input
4080 that will be interpreted as negative. */
4081 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4082 1, fold_convert_loc (loc, arg0_type,
4083 integer_zero_node),
4084 high_positive))
4085 return NULL_TREE;
4086
4087 in_p = (in_p != n_in_p);
4088 }
4089 }
4090
4091 *p_low = n_low;
4092 *p_high = n_high;
4093 *p_in_p = in_p;
4094 return arg0;
4095
4096 default:
4097 return NULL_TREE;
4098 }
4099 }
4100
4101 /* Given EXP, a logical expression, set the range it is testing into
4102 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4103 actually being tested. *PLOW and *PHIGH will be made of the same
4104 type as the returned expression. If EXP is not a comparison, we
4105 will most likely not be returning a useful value and range. Set
4106 *STRICT_OVERFLOW_P to true if the return value is only valid
4107 because signed overflow is undefined; otherwise, do not change
4108 *STRICT_OVERFLOW_P. */
4109
4110 tree
4111 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4112 bool *strict_overflow_p)
4113 {
4114 enum tree_code code;
4115 tree arg0, arg1 = NULL_TREE;
4116 tree exp_type, nexp;
4117 int in_p;
4118 tree low, high;
4119 location_t loc = EXPR_LOCATION (exp);
4120
4121 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4122 and see if we can refine the range. Some of the cases below may not
4123 happen, but it doesn't seem worth worrying about this. We "continue"
4124 the outer loop when we've changed something; otherwise we "break"
4125 the switch, which will "break" the while. */
4126
4127 in_p = 0;
4128 low = high = build_int_cst (TREE_TYPE (exp), 0);
4129
4130 while (1)
4131 {
4132 code = TREE_CODE (exp);
4133 exp_type = TREE_TYPE (exp);
4134 arg0 = NULL_TREE;
4135
4136 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4137 {
4138 if (TREE_OPERAND_LENGTH (exp) > 0)
4139 arg0 = TREE_OPERAND (exp, 0);
4140 if (TREE_CODE_CLASS (code) == tcc_binary
4141 || TREE_CODE_CLASS (code) == tcc_comparison
4142 || (TREE_CODE_CLASS (code) == tcc_expression
4143 && TREE_OPERAND_LENGTH (exp) > 1))
4144 arg1 = TREE_OPERAND (exp, 1);
4145 }
4146 if (arg0 == NULL_TREE)
4147 break;
4148
4149 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4150 &high, &in_p, strict_overflow_p);
4151 if (nexp == NULL_TREE)
4152 break;
4153 exp = nexp;
4154 }
4155
4156 /* If EXP is a constant, we can evaluate whether this is true or false. */
4157 if (TREE_CODE (exp) == INTEGER_CST)
4158 {
4159 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4160 exp, 0, low, 0))
4161 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4162 exp, 1, high, 1)));
4163 low = high = 0;
4164 exp = 0;
4165 }
4166
4167 *pin_p = in_p, *plow = low, *phigh = high;
4168 return exp;
4169 }
4170 \f
4171 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4172 type, TYPE, return an expression to test if EXP is in (or out of, depending
4173 on IN_P) the range. Return 0 if the test couldn't be created. */
4174
4175 tree
4176 build_range_check (location_t loc, tree type, tree exp, int in_p,
4177 tree low, tree high)
4178 {
4179 tree etype = TREE_TYPE (exp), value;
4180
4181 #ifdef HAVE_canonicalize_funcptr_for_compare
4182 /* Disable this optimization for function pointer expressions
4183 on targets that require function pointer canonicalization. */
4184 if (HAVE_canonicalize_funcptr_for_compare
4185 && TREE_CODE (etype) == POINTER_TYPE
4186 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4187 return NULL_TREE;
4188 #endif
4189
4190 if (! in_p)
4191 {
4192 value = build_range_check (loc, type, exp, 1, low, high);
4193 if (value != 0)
4194 return invert_truthvalue_loc (loc, value);
4195
4196 return 0;
4197 }
4198
4199 if (low == 0 && high == 0)
4200 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4201
4202 if (low == 0)
4203 return fold_build2_loc (loc, LE_EXPR, type, exp,
4204 fold_convert_loc (loc, etype, high));
4205
4206 if (high == 0)
4207 return fold_build2_loc (loc, GE_EXPR, type, exp,
4208 fold_convert_loc (loc, etype, low));
4209
4210 if (operand_equal_p (low, high, 0))
4211 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4212 fold_convert_loc (loc, etype, low));
4213
4214 if (integer_zerop (low))
4215 {
4216 if (! TYPE_UNSIGNED (etype))
4217 {
4218 etype = unsigned_type_for (etype);
4219 high = fold_convert_loc (loc, etype, high);
4220 exp = fold_convert_loc (loc, etype, exp);
4221 }
4222 return build_range_check (loc, type, exp, 1, 0, high);
4223 }
4224
4225 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4226 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4227 {
4228 int prec = TYPE_PRECISION (etype);
4229
4230 if (wi::mask (prec - 1, false, prec) == high)
4231 {
4232 if (TYPE_UNSIGNED (etype))
4233 {
4234 tree signed_etype = signed_type_for (etype);
4235 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4236 etype
4237 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4238 else
4239 etype = signed_etype;
4240 exp = fold_convert_loc (loc, etype, exp);
4241 }
4242 return fold_build2_loc (loc, GT_EXPR, type, exp,
4243 build_int_cst (etype, 0));
4244 }
4245 }
4246
4247 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4248 This requires wrap-around arithmetics for the type of the expression.
4249 First make sure that arithmetics in this type is valid, then make sure
4250 that it wraps around. */
4251 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4252 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4253 TYPE_UNSIGNED (etype));
4254
4255 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4256 {
4257 tree utype, minv, maxv;
4258
4259 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4260 for the type in question, as we rely on this here. */
4261 utype = unsigned_type_for (etype);
4262 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4263 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4264 build_int_cst (TREE_TYPE (maxv), 1), 1);
4265 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4266
4267 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4268 minv, 1, maxv, 1)))
4269 etype = utype;
4270 else
4271 return 0;
4272 }
4273
4274 high = fold_convert_loc (loc, etype, high);
4275 low = fold_convert_loc (loc, etype, low);
4276 exp = fold_convert_loc (loc, etype, exp);
4277
4278 value = const_binop (MINUS_EXPR, high, low);
4279
4280
4281 if (POINTER_TYPE_P (etype))
4282 {
4283 if (value != 0 && !TREE_OVERFLOW (value))
4284 {
4285 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4286 return build_range_check (loc, type,
4287 fold_build_pointer_plus_loc (loc, exp, low),
4288 1, build_int_cst (etype, 0), value);
4289 }
4290 return 0;
4291 }
4292
4293 if (value != 0 && !TREE_OVERFLOW (value))
4294 return build_range_check (loc, type,
4295 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4296 1, build_int_cst (etype, 0), value);
4297
4298 return 0;
4299 }
4300 \f
4301 /* Return the predecessor of VAL in its type, handling the infinite case. */
4302
4303 static tree
4304 range_predecessor (tree val)
4305 {
4306 tree type = TREE_TYPE (val);
4307
4308 if (INTEGRAL_TYPE_P (type)
4309 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4310 return 0;
4311 else
4312 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4313 build_int_cst (TREE_TYPE (val), 1), 0);
4314 }
4315
4316 /* Return the successor of VAL in its type, handling the infinite case. */
4317
4318 static tree
4319 range_successor (tree val)
4320 {
4321 tree type = TREE_TYPE (val);
4322
4323 if (INTEGRAL_TYPE_P (type)
4324 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4325 return 0;
4326 else
4327 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4328 build_int_cst (TREE_TYPE (val), 1), 0);
4329 }
4330
4331 /* Given two ranges, see if we can merge them into one. Return 1 if we
4332 can, 0 if we can't. Set the output range into the specified parameters. */
4333
4334 bool
4335 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4336 tree high0, int in1_p, tree low1, tree high1)
4337 {
4338 int no_overlap;
4339 int subset;
4340 int temp;
4341 tree tem;
4342 int in_p;
4343 tree low, high;
4344 int lowequal = ((low0 == 0 && low1 == 0)
4345 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4346 low0, 0, low1, 0)));
4347 int highequal = ((high0 == 0 && high1 == 0)
4348 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4349 high0, 1, high1, 1)));
4350
4351 /* Make range 0 be the range that starts first, or ends last if they
4352 start at the same value. Swap them if it isn't. */
4353 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4354 low0, 0, low1, 0))
4355 || (lowequal
4356 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4357 high1, 1, high0, 1))))
4358 {
4359 temp = in0_p, in0_p = in1_p, in1_p = temp;
4360 tem = low0, low0 = low1, low1 = tem;
4361 tem = high0, high0 = high1, high1 = tem;
4362 }
4363
4364 /* Now flag two cases, whether the ranges are disjoint or whether the
4365 second range is totally subsumed in the first. Note that the tests
4366 below are simplified by the ones above. */
4367 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4368 high0, 1, low1, 0));
4369 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4370 high1, 1, high0, 1));
4371
4372 /* We now have four cases, depending on whether we are including or
4373 excluding the two ranges. */
4374 if (in0_p && in1_p)
4375 {
4376 /* If they don't overlap, the result is false. If the second range
4377 is a subset it is the result. Otherwise, the range is from the start
4378 of the second to the end of the first. */
4379 if (no_overlap)
4380 in_p = 0, low = high = 0;
4381 else if (subset)
4382 in_p = 1, low = low1, high = high1;
4383 else
4384 in_p = 1, low = low1, high = high0;
4385 }
4386
4387 else if (in0_p && ! in1_p)
4388 {
4389 /* If they don't overlap, the result is the first range. If they are
4390 equal, the result is false. If the second range is a subset of the
4391 first, and the ranges begin at the same place, we go from just after
4392 the end of the second range to the end of the first. If the second
4393 range is not a subset of the first, or if it is a subset and both
4394 ranges end at the same place, the range starts at the start of the
4395 first range and ends just before the second range.
4396 Otherwise, we can't describe this as a single range. */
4397 if (no_overlap)
4398 in_p = 1, low = low0, high = high0;
4399 else if (lowequal && highequal)
4400 in_p = 0, low = high = 0;
4401 else if (subset && lowequal)
4402 {
4403 low = range_successor (high1);
4404 high = high0;
4405 in_p = 1;
4406 if (low == 0)
4407 {
4408 /* We are in the weird situation where high0 > high1 but
4409 high1 has no successor. Punt. */
4410 return 0;
4411 }
4412 }
4413 else if (! subset || highequal)
4414 {
4415 low = low0;
4416 high = range_predecessor (low1);
4417 in_p = 1;
4418 if (high == 0)
4419 {
4420 /* low0 < low1 but low1 has no predecessor. Punt. */
4421 return 0;
4422 }
4423 }
4424 else
4425 return 0;
4426 }
4427
4428 else if (! in0_p && in1_p)
4429 {
4430 /* If they don't overlap, the result is the second range. If the second
4431 is a subset of the first, the result is false. Otherwise,
4432 the range starts just after the first range and ends at the
4433 end of the second. */
4434 if (no_overlap)
4435 in_p = 1, low = low1, high = high1;
4436 else if (subset || highequal)
4437 in_p = 0, low = high = 0;
4438 else
4439 {
4440 low = range_successor (high0);
4441 high = high1;
4442 in_p = 1;
4443 if (low == 0)
4444 {
4445 /* high1 > high0 but high0 has no successor. Punt. */
4446 return 0;
4447 }
4448 }
4449 }
4450
4451 else
4452 {
4453 /* The case where we are excluding both ranges. Here the complex case
4454 is if they don't overlap. In that case, the only time we have a
4455 range is if they are adjacent. If the second is a subset of the
4456 first, the result is the first. Otherwise, the range to exclude
4457 starts at the beginning of the first range and ends at the end of the
4458 second. */
4459 if (no_overlap)
4460 {
4461 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4462 range_successor (high0),
4463 1, low1, 0)))
4464 in_p = 0, low = low0, high = high1;
4465 else
4466 {
4467 /* Canonicalize - [min, x] into - [-, x]. */
4468 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4469 switch (TREE_CODE (TREE_TYPE (low0)))
4470 {
4471 case ENUMERAL_TYPE:
4472 if (TYPE_PRECISION (TREE_TYPE (low0))
4473 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4474 break;
4475 /* FALLTHROUGH */
4476 case INTEGER_TYPE:
4477 if (tree_int_cst_equal (low0,
4478 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4479 low0 = 0;
4480 break;
4481 case POINTER_TYPE:
4482 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4483 && integer_zerop (low0))
4484 low0 = 0;
4485 break;
4486 default:
4487 break;
4488 }
4489
4490 /* Canonicalize - [x, max] into - [x, -]. */
4491 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4492 switch (TREE_CODE (TREE_TYPE (high1)))
4493 {
4494 case ENUMERAL_TYPE:
4495 if (TYPE_PRECISION (TREE_TYPE (high1))
4496 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4497 break;
4498 /* FALLTHROUGH */
4499 case INTEGER_TYPE:
4500 if (tree_int_cst_equal (high1,
4501 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4502 high1 = 0;
4503 break;
4504 case POINTER_TYPE:
4505 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4506 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4507 high1, 1,
4508 build_int_cst (TREE_TYPE (high1), 1),
4509 1)))
4510 high1 = 0;
4511 break;
4512 default:
4513 break;
4514 }
4515
4516 /* The ranges might be also adjacent between the maximum and
4517 minimum values of the given type. For
4518 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4519 return + [x + 1, y - 1]. */
4520 if (low0 == 0 && high1 == 0)
4521 {
4522 low = range_successor (high0);
4523 high = range_predecessor (low1);
4524 if (low == 0 || high == 0)
4525 return 0;
4526
4527 in_p = 1;
4528 }
4529 else
4530 return 0;
4531 }
4532 }
4533 else if (subset)
4534 in_p = 0, low = low0, high = high0;
4535 else
4536 in_p = 0, low = low0, high = high1;
4537 }
4538
4539 *pin_p = in_p, *plow = low, *phigh = high;
4540 return 1;
4541 }
4542 \f
4543
4544 /* Subroutine of fold, looking inside expressions of the form
4545 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4546 of the COND_EXPR. This function is being used also to optimize
4547 A op B ? C : A, by reversing the comparison first.
4548
4549 Return a folded expression whose code is not a COND_EXPR
4550 anymore, or NULL_TREE if no folding opportunity is found. */
4551
4552 static tree
4553 fold_cond_expr_with_comparison (location_t loc, tree type,
4554 tree arg0, tree arg1, tree arg2)
4555 {
4556 enum tree_code comp_code = TREE_CODE (arg0);
4557 tree arg00 = TREE_OPERAND (arg0, 0);
4558 tree arg01 = TREE_OPERAND (arg0, 1);
4559 tree arg1_type = TREE_TYPE (arg1);
4560 tree tem;
4561
4562 STRIP_NOPS (arg1);
4563 STRIP_NOPS (arg2);
4564
4565 /* If we have A op 0 ? A : -A, consider applying the following
4566 transformations:
4567
4568 A == 0? A : -A same as -A
4569 A != 0? A : -A same as A
4570 A >= 0? A : -A same as abs (A)
4571 A > 0? A : -A same as abs (A)
4572 A <= 0? A : -A same as -abs (A)
4573 A < 0? A : -A same as -abs (A)
4574
4575 None of these transformations work for modes with signed
4576 zeros. If A is +/-0, the first two transformations will
4577 change the sign of the result (from +0 to -0, or vice
4578 versa). The last four will fix the sign of the result,
4579 even though the original expressions could be positive or
4580 negative, depending on the sign of A.
4581
4582 Note that all these transformations are correct if A is
4583 NaN, since the two alternatives (A and -A) are also NaNs. */
4584 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4585 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4586 ? real_zerop (arg01)
4587 : integer_zerop (arg01))
4588 && ((TREE_CODE (arg2) == NEGATE_EXPR
4589 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4590 /* In the case that A is of the form X-Y, '-A' (arg2) may
4591 have already been folded to Y-X, check for that. */
4592 || (TREE_CODE (arg1) == MINUS_EXPR
4593 && TREE_CODE (arg2) == MINUS_EXPR
4594 && operand_equal_p (TREE_OPERAND (arg1, 0),
4595 TREE_OPERAND (arg2, 1), 0)
4596 && operand_equal_p (TREE_OPERAND (arg1, 1),
4597 TREE_OPERAND (arg2, 0), 0))))
4598 switch (comp_code)
4599 {
4600 case EQ_EXPR:
4601 case UNEQ_EXPR:
4602 tem = fold_convert_loc (loc, arg1_type, arg1);
4603 return pedantic_non_lvalue_loc (loc,
4604 fold_convert_loc (loc, type,
4605 negate_expr (tem)));
4606 case NE_EXPR:
4607 case LTGT_EXPR:
4608 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4609 case UNGE_EXPR:
4610 case UNGT_EXPR:
4611 if (flag_trapping_math)
4612 break;
4613 /* Fall through. */
4614 case GE_EXPR:
4615 case GT_EXPR:
4616 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4617 arg1 = fold_convert_loc (loc, signed_type_for
4618 (TREE_TYPE (arg1)), arg1);
4619 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4620 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4621 case UNLE_EXPR:
4622 case UNLT_EXPR:
4623 if (flag_trapping_math)
4624 break;
4625 case LE_EXPR:
4626 case LT_EXPR:
4627 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4628 arg1 = fold_convert_loc (loc, signed_type_for
4629 (TREE_TYPE (arg1)), arg1);
4630 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4631 return negate_expr (fold_convert_loc (loc, type, tem));
4632 default:
4633 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4634 break;
4635 }
4636
4637 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4638 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4639 both transformations are correct when A is NaN: A != 0
4640 is then true, and A == 0 is false. */
4641
4642 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4643 && integer_zerop (arg01) && integer_zerop (arg2))
4644 {
4645 if (comp_code == NE_EXPR)
4646 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4647 else if (comp_code == EQ_EXPR)
4648 return build_zero_cst (type);
4649 }
4650
4651 /* Try some transformations of A op B ? A : B.
4652
4653 A == B? A : B same as B
4654 A != B? A : B same as A
4655 A >= B? A : B same as max (A, B)
4656 A > B? A : B same as max (B, A)
4657 A <= B? A : B same as min (A, B)
4658 A < B? A : B same as min (B, A)
4659
4660 As above, these transformations don't work in the presence
4661 of signed zeros. For example, if A and B are zeros of
4662 opposite sign, the first two transformations will change
4663 the sign of the result. In the last four, the original
4664 expressions give different results for (A=+0, B=-0) and
4665 (A=-0, B=+0), but the transformed expressions do not.
4666
4667 The first two transformations are correct if either A or B
4668 is a NaN. In the first transformation, the condition will
4669 be false, and B will indeed be chosen. In the case of the
4670 second transformation, the condition A != B will be true,
4671 and A will be chosen.
4672
4673 The conversions to max() and min() are not correct if B is
4674 a number and A is not. The conditions in the original
4675 expressions will be false, so all four give B. The min()
4676 and max() versions would give a NaN instead. */
4677 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4678 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4679 /* Avoid these transformations if the COND_EXPR may be used
4680 as an lvalue in the C++ front-end. PR c++/19199. */
4681 && (in_gimple_form
4682 || VECTOR_TYPE_P (type)
4683 || (strcmp (lang_hooks.name, "GNU C++") != 0
4684 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4685 || ! maybe_lvalue_p (arg1)
4686 || ! maybe_lvalue_p (arg2)))
4687 {
4688 tree comp_op0 = arg00;
4689 tree comp_op1 = arg01;
4690 tree comp_type = TREE_TYPE (comp_op0);
4691
4692 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4693 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4694 {
4695 comp_type = type;
4696 comp_op0 = arg1;
4697 comp_op1 = arg2;
4698 }
4699
4700 switch (comp_code)
4701 {
4702 case EQ_EXPR:
4703 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4704 case NE_EXPR:
4705 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4706 case LE_EXPR:
4707 case LT_EXPR:
4708 case UNLE_EXPR:
4709 case UNLT_EXPR:
4710 /* In C++ a ?: expression can be an lvalue, so put the
4711 operand which will be used if they are equal first
4712 so that we can convert this back to the
4713 corresponding COND_EXPR. */
4714 if (!HONOR_NANS (element_mode (arg1)))
4715 {
4716 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4717 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4718 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4719 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4720 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4721 comp_op1, comp_op0);
4722 return pedantic_non_lvalue_loc (loc,
4723 fold_convert_loc (loc, type, tem));
4724 }
4725 break;
4726 case GE_EXPR:
4727 case GT_EXPR:
4728 case UNGE_EXPR:
4729 case UNGT_EXPR:
4730 if (!HONOR_NANS (element_mode (arg1)))
4731 {
4732 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4733 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4734 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4735 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4736 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4737 comp_op1, comp_op0);
4738 return pedantic_non_lvalue_loc (loc,
4739 fold_convert_loc (loc, type, tem));
4740 }
4741 break;
4742 case UNEQ_EXPR:
4743 if (!HONOR_NANS (element_mode (arg1)))
4744 return pedantic_non_lvalue_loc (loc,
4745 fold_convert_loc (loc, type, arg2));
4746 break;
4747 case LTGT_EXPR:
4748 if (!HONOR_NANS (element_mode (arg1)))
4749 return pedantic_non_lvalue_loc (loc,
4750 fold_convert_loc (loc, type, arg1));
4751 break;
4752 default:
4753 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4754 break;
4755 }
4756 }
4757
4758 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4759 we might still be able to simplify this. For example,
4760 if C1 is one less or one more than C2, this might have started
4761 out as a MIN or MAX and been transformed by this function.
4762 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4763
4764 if (INTEGRAL_TYPE_P (type)
4765 && TREE_CODE (arg01) == INTEGER_CST
4766 && TREE_CODE (arg2) == INTEGER_CST)
4767 switch (comp_code)
4768 {
4769 case EQ_EXPR:
4770 if (TREE_CODE (arg1) == INTEGER_CST)
4771 break;
4772 /* We can replace A with C1 in this case. */
4773 arg1 = fold_convert_loc (loc, type, arg01);
4774 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4775
4776 case LT_EXPR:
4777 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4778 MIN_EXPR, to preserve the signedness of the comparison. */
4779 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4780 OEP_ONLY_CONST)
4781 && operand_equal_p (arg01,
4782 const_binop (PLUS_EXPR, arg2,
4783 build_int_cst (type, 1)),
4784 OEP_ONLY_CONST))
4785 {
4786 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4787 fold_convert_loc (loc, TREE_TYPE (arg00),
4788 arg2));
4789 return pedantic_non_lvalue_loc (loc,
4790 fold_convert_loc (loc, type, tem));
4791 }
4792 break;
4793
4794 case LE_EXPR:
4795 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4796 as above. */
4797 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4798 OEP_ONLY_CONST)
4799 && operand_equal_p (arg01,
4800 const_binop (MINUS_EXPR, arg2,
4801 build_int_cst (type, 1)),
4802 OEP_ONLY_CONST))
4803 {
4804 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4805 fold_convert_loc (loc, TREE_TYPE (arg00),
4806 arg2));
4807 return pedantic_non_lvalue_loc (loc,
4808 fold_convert_loc (loc, type, tem));
4809 }
4810 break;
4811
4812 case GT_EXPR:
4813 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4814 MAX_EXPR, to preserve the signedness of the comparison. */
4815 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4816 OEP_ONLY_CONST)
4817 && operand_equal_p (arg01,
4818 const_binop (MINUS_EXPR, arg2,
4819 build_int_cst (type, 1)),
4820 OEP_ONLY_CONST))
4821 {
4822 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4823 fold_convert_loc (loc, TREE_TYPE (arg00),
4824 arg2));
4825 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4826 }
4827 break;
4828
4829 case GE_EXPR:
4830 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4831 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4832 OEP_ONLY_CONST)
4833 && operand_equal_p (arg01,
4834 const_binop (PLUS_EXPR, arg2,
4835 build_int_cst (type, 1)),
4836 OEP_ONLY_CONST))
4837 {
4838 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4839 fold_convert_loc (loc, TREE_TYPE (arg00),
4840 arg2));
4841 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4842 }
4843 break;
4844 case NE_EXPR:
4845 break;
4846 default:
4847 gcc_unreachable ();
4848 }
4849
4850 return NULL_TREE;
4851 }
4852
4853
4854 \f
4855 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4856 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4857 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4858 false) >= 2)
4859 #endif
4860
4861 /* EXP is some logical combination of boolean tests. See if we can
4862 merge it into some range test. Return the new tree if so. */
4863
4864 static tree
4865 fold_range_test (location_t loc, enum tree_code code, tree type,
4866 tree op0, tree op1)
4867 {
4868 int or_op = (code == TRUTH_ORIF_EXPR
4869 || code == TRUTH_OR_EXPR);
4870 int in0_p, in1_p, in_p;
4871 tree low0, low1, low, high0, high1, high;
4872 bool strict_overflow_p = false;
4873 tree tem, lhs, rhs;
4874 const char * const warnmsg = G_("assuming signed overflow does not occur "
4875 "when simplifying range test");
4876
4877 if (!INTEGRAL_TYPE_P (type))
4878 return 0;
4879
4880 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4881 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4882
4883 /* If this is an OR operation, invert both sides; we will invert
4884 again at the end. */
4885 if (or_op)
4886 in0_p = ! in0_p, in1_p = ! in1_p;
4887
4888 /* If both expressions are the same, if we can merge the ranges, and we
4889 can build the range test, return it or it inverted. If one of the
4890 ranges is always true or always false, consider it to be the same
4891 expression as the other. */
4892 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4893 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4894 in1_p, low1, high1)
4895 && 0 != (tem = (build_range_check (loc, type,
4896 lhs != 0 ? lhs
4897 : rhs != 0 ? rhs : integer_zero_node,
4898 in_p, low, high))))
4899 {
4900 if (strict_overflow_p)
4901 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4902 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4903 }
4904
4905 /* On machines where the branch cost is expensive, if this is a
4906 short-circuited branch and the underlying object on both sides
4907 is the same, make a non-short-circuit operation. */
4908 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4909 && lhs != 0 && rhs != 0
4910 && (code == TRUTH_ANDIF_EXPR
4911 || code == TRUTH_ORIF_EXPR)
4912 && operand_equal_p (lhs, rhs, 0))
4913 {
4914 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4915 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4916 which cases we can't do this. */
4917 if (simple_operand_p (lhs))
4918 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4919 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4920 type, op0, op1);
4921
4922 else if (!lang_hooks.decls.global_bindings_p ()
4923 && !CONTAINS_PLACEHOLDER_P (lhs))
4924 {
4925 tree common = save_expr (lhs);
4926
4927 if (0 != (lhs = build_range_check (loc, type, common,
4928 or_op ? ! in0_p : in0_p,
4929 low0, high0))
4930 && (0 != (rhs = build_range_check (loc, type, common,
4931 or_op ? ! in1_p : in1_p,
4932 low1, high1))))
4933 {
4934 if (strict_overflow_p)
4935 fold_overflow_warning (warnmsg,
4936 WARN_STRICT_OVERFLOW_COMPARISON);
4937 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4938 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4939 type, lhs, rhs);
4940 }
4941 }
4942 }
4943
4944 return 0;
4945 }
4946 \f
4947 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4948 bit value. Arrange things so the extra bits will be set to zero if and
4949 only if C is signed-extended to its full width. If MASK is nonzero,
4950 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4951
4952 static tree
4953 unextend (tree c, int p, int unsignedp, tree mask)
4954 {
4955 tree type = TREE_TYPE (c);
4956 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4957 tree temp;
4958
4959 if (p == modesize || unsignedp)
4960 return c;
4961
4962 /* We work by getting just the sign bit into the low-order bit, then
4963 into the high-order bit, then sign-extend. We then XOR that value
4964 with C. */
4965 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
4966
4967 /* We must use a signed type in order to get an arithmetic right shift.
4968 However, we must also avoid introducing accidental overflows, so that
4969 a subsequent call to integer_zerop will work. Hence we must
4970 do the type conversion here. At this point, the constant is either
4971 zero or one, and the conversion to a signed type can never overflow.
4972 We could get an overflow if this conversion is done anywhere else. */
4973 if (TYPE_UNSIGNED (type))
4974 temp = fold_convert (signed_type_for (type), temp);
4975
4976 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4977 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4978 if (mask != 0)
4979 temp = const_binop (BIT_AND_EXPR, temp,
4980 fold_convert (TREE_TYPE (c), mask));
4981 /* If necessary, convert the type back to match the type of C. */
4982 if (TYPE_UNSIGNED (type))
4983 temp = fold_convert (type, temp);
4984
4985 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4986 }
4987 \f
4988 /* For an expression that has the form
4989 (A && B) || ~B
4990 or
4991 (A || B) && ~B,
4992 we can drop one of the inner expressions and simplify to
4993 A || ~B
4994 or
4995 A && ~B
4996 LOC is the location of the resulting expression. OP is the inner
4997 logical operation; the left-hand side in the examples above, while CMPOP
4998 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4999 removing a condition that guards another, as in
5000 (A != NULL && A->...) || A == NULL
5001 which we must not transform. If RHS_ONLY is true, only eliminate the
5002 right-most operand of the inner logical operation. */
5003
5004 static tree
5005 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5006 bool rhs_only)
5007 {
5008 tree type = TREE_TYPE (cmpop);
5009 enum tree_code code = TREE_CODE (cmpop);
5010 enum tree_code truthop_code = TREE_CODE (op);
5011 tree lhs = TREE_OPERAND (op, 0);
5012 tree rhs = TREE_OPERAND (op, 1);
5013 tree orig_lhs = lhs, orig_rhs = rhs;
5014 enum tree_code rhs_code = TREE_CODE (rhs);
5015 enum tree_code lhs_code = TREE_CODE (lhs);
5016 enum tree_code inv_code;
5017
5018 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5019 return NULL_TREE;
5020
5021 if (TREE_CODE_CLASS (code) != tcc_comparison)
5022 return NULL_TREE;
5023
5024 if (rhs_code == truthop_code)
5025 {
5026 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5027 if (newrhs != NULL_TREE)
5028 {
5029 rhs = newrhs;
5030 rhs_code = TREE_CODE (rhs);
5031 }
5032 }
5033 if (lhs_code == truthop_code && !rhs_only)
5034 {
5035 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5036 if (newlhs != NULL_TREE)
5037 {
5038 lhs = newlhs;
5039 lhs_code = TREE_CODE (lhs);
5040 }
5041 }
5042
5043 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5044 if (inv_code == rhs_code
5045 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5046 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5047 return lhs;
5048 if (!rhs_only && inv_code == lhs_code
5049 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5050 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5051 return rhs;
5052 if (rhs != orig_rhs || lhs != orig_lhs)
5053 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5054 lhs, rhs);
5055 return NULL_TREE;
5056 }
5057
5058 /* Find ways of folding logical expressions of LHS and RHS:
5059 Try to merge two comparisons to the same innermost item.
5060 Look for range tests like "ch >= '0' && ch <= '9'".
5061 Look for combinations of simple terms on machines with expensive branches
5062 and evaluate the RHS unconditionally.
5063
5064 For example, if we have p->a == 2 && p->b == 4 and we can make an
5065 object large enough to span both A and B, we can do this with a comparison
5066 against the object ANDed with the a mask.
5067
5068 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5069 operations to do this with one comparison.
5070
5071 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5072 function and the one above.
5073
5074 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5075 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5076
5077 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5078 two operands.
5079
5080 We return the simplified tree or 0 if no optimization is possible. */
5081
5082 static tree
5083 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5084 tree lhs, tree rhs)
5085 {
5086 /* If this is the "or" of two comparisons, we can do something if
5087 the comparisons are NE_EXPR. If this is the "and", we can do something
5088 if the comparisons are EQ_EXPR. I.e.,
5089 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5090
5091 WANTED_CODE is this operation code. For single bit fields, we can
5092 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5093 comparison for one-bit fields. */
5094
5095 enum tree_code wanted_code;
5096 enum tree_code lcode, rcode;
5097 tree ll_arg, lr_arg, rl_arg, rr_arg;
5098 tree ll_inner, lr_inner, rl_inner, rr_inner;
5099 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5100 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5101 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5102 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5103 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5104 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5105 machine_mode lnmode, rnmode;
5106 tree ll_mask, lr_mask, rl_mask, rr_mask;
5107 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5108 tree l_const, r_const;
5109 tree lntype, rntype, result;
5110 HOST_WIDE_INT first_bit, end_bit;
5111 int volatilep;
5112
5113 /* Start by getting the comparison codes. Fail if anything is volatile.
5114 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5115 it were surrounded with a NE_EXPR. */
5116
5117 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5118 return 0;
5119
5120 lcode = TREE_CODE (lhs);
5121 rcode = TREE_CODE (rhs);
5122
5123 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5124 {
5125 lhs = build2 (NE_EXPR, truth_type, lhs,
5126 build_int_cst (TREE_TYPE (lhs), 0));
5127 lcode = NE_EXPR;
5128 }
5129
5130 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5131 {
5132 rhs = build2 (NE_EXPR, truth_type, rhs,
5133 build_int_cst (TREE_TYPE (rhs), 0));
5134 rcode = NE_EXPR;
5135 }
5136
5137 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5138 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5139 return 0;
5140
5141 ll_arg = TREE_OPERAND (lhs, 0);
5142 lr_arg = TREE_OPERAND (lhs, 1);
5143 rl_arg = TREE_OPERAND (rhs, 0);
5144 rr_arg = TREE_OPERAND (rhs, 1);
5145
5146 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5147 if (simple_operand_p (ll_arg)
5148 && simple_operand_p (lr_arg))
5149 {
5150 if (operand_equal_p (ll_arg, rl_arg, 0)
5151 && operand_equal_p (lr_arg, rr_arg, 0))
5152 {
5153 result = combine_comparisons (loc, code, lcode, rcode,
5154 truth_type, ll_arg, lr_arg);
5155 if (result)
5156 return result;
5157 }
5158 else if (operand_equal_p (ll_arg, rr_arg, 0)
5159 && operand_equal_p (lr_arg, rl_arg, 0))
5160 {
5161 result = combine_comparisons (loc, code, lcode,
5162 swap_tree_comparison (rcode),
5163 truth_type, ll_arg, lr_arg);
5164 if (result)
5165 return result;
5166 }
5167 }
5168
5169 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5170 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5171
5172 /* If the RHS can be evaluated unconditionally and its operands are
5173 simple, it wins to evaluate the RHS unconditionally on machines
5174 with expensive branches. In this case, this isn't a comparison
5175 that can be merged. */
5176
5177 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5178 false) >= 2
5179 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5180 && simple_operand_p (rl_arg)
5181 && simple_operand_p (rr_arg))
5182 {
5183 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5184 if (code == TRUTH_OR_EXPR
5185 && lcode == NE_EXPR && integer_zerop (lr_arg)
5186 && rcode == NE_EXPR && integer_zerop (rr_arg)
5187 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5188 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5189 return build2_loc (loc, NE_EXPR, truth_type,
5190 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5191 ll_arg, rl_arg),
5192 build_int_cst (TREE_TYPE (ll_arg), 0));
5193
5194 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5195 if (code == TRUTH_AND_EXPR
5196 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5197 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5198 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5199 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5200 return build2_loc (loc, EQ_EXPR, truth_type,
5201 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5202 ll_arg, rl_arg),
5203 build_int_cst (TREE_TYPE (ll_arg), 0));
5204 }
5205
5206 /* See if the comparisons can be merged. Then get all the parameters for
5207 each side. */
5208
5209 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5210 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5211 return 0;
5212
5213 volatilep = 0;
5214 ll_inner = decode_field_reference (loc, ll_arg,
5215 &ll_bitsize, &ll_bitpos, &ll_mode,
5216 &ll_unsignedp, &volatilep, &ll_mask,
5217 &ll_and_mask);
5218 lr_inner = decode_field_reference (loc, lr_arg,
5219 &lr_bitsize, &lr_bitpos, &lr_mode,
5220 &lr_unsignedp, &volatilep, &lr_mask,
5221 &lr_and_mask);
5222 rl_inner = decode_field_reference (loc, rl_arg,
5223 &rl_bitsize, &rl_bitpos, &rl_mode,
5224 &rl_unsignedp, &volatilep, &rl_mask,
5225 &rl_and_mask);
5226 rr_inner = decode_field_reference (loc, rr_arg,
5227 &rr_bitsize, &rr_bitpos, &rr_mode,
5228 &rr_unsignedp, &volatilep, &rr_mask,
5229 &rr_and_mask);
5230
5231 /* It must be true that the inner operation on the lhs of each
5232 comparison must be the same if we are to be able to do anything.
5233 Then see if we have constants. If not, the same must be true for
5234 the rhs's. */
5235 if (volatilep || ll_inner == 0 || rl_inner == 0
5236 || ! operand_equal_p (ll_inner, rl_inner, 0))
5237 return 0;
5238
5239 if (TREE_CODE (lr_arg) == INTEGER_CST
5240 && TREE_CODE (rr_arg) == INTEGER_CST)
5241 l_const = lr_arg, r_const = rr_arg;
5242 else if (lr_inner == 0 || rr_inner == 0
5243 || ! operand_equal_p (lr_inner, rr_inner, 0))
5244 return 0;
5245 else
5246 l_const = r_const = 0;
5247
5248 /* If either comparison code is not correct for our logical operation,
5249 fail. However, we can convert a one-bit comparison against zero into
5250 the opposite comparison against that bit being set in the field. */
5251
5252 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5253 if (lcode != wanted_code)
5254 {
5255 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5256 {
5257 /* Make the left operand unsigned, since we are only interested
5258 in the value of one bit. Otherwise we are doing the wrong
5259 thing below. */
5260 ll_unsignedp = 1;
5261 l_const = ll_mask;
5262 }
5263 else
5264 return 0;
5265 }
5266
5267 /* This is analogous to the code for l_const above. */
5268 if (rcode != wanted_code)
5269 {
5270 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5271 {
5272 rl_unsignedp = 1;
5273 r_const = rl_mask;
5274 }
5275 else
5276 return 0;
5277 }
5278
5279 /* See if we can find a mode that contains both fields being compared on
5280 the left. If we can't, fail. Otherwise, update all constants and masks
5281 to be relative to a field of that size. */
5282 first_bit = MIN (ll_bitpos, rl_bitpos);
5283 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5284 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5285 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5286 volatilep);
5287 if (lnmode == VOIDmode)
5288 return 0;
5289
5290 lnbitsize = GET_MODE_BITSIZE (lnmode);
5291 lnbitpos = first_bit & ~ (lnbitsize - 1);
5292 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5293 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5294
5295 if (BYTES_BIG_ENDIAN)
5296 {
5297 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5298 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5299 }
5300
5301 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5302 size_int (xll_bitpos));
5303 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5304 size_int (xrl_bitpos));
5305
5306 if (l_const)
5307 {
5308 l_const = fold_convert_loc (loc, lntype, l_const);
5309 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5310 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5311 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5312 fold_build1_loc (loc, BIT_NOT_EXPR,
5313 lntype, ll_mask))))
5314 {
5315 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5316
5317 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5318 }
5319 }
5320 if (r_const)
5321 {
5322 r_const = fold_convert_loc (loc, lntype, r_const);
5323 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5324 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5325 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5326 fold_build1_loc (loc, BIT_NOT_EXPR,
5327 lntype, rl_mask))))
5328 {
5329 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5330
5331 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5332 }
5333 }
5334
5335 /* If the right sides are not constant, do the same for it. Also,
5336 disallow this optimization if a size or signedness mismatch occurs
5337 between the left and right sides. */
5338 if (l_const == 0)
5339 {
5340 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5341 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5342 /* Make sure the two fields on the right
5343 correspond to the left without being swapped. */
5344 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5345 return 0;
5346
5347 first_bit = MIN (lr_bitpos, rr_bitpos);
5348 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5349 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5350 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5351 volatilep);
5352 if (rnmode == VOIDmode)
5353 return 0;
5354
5355 rnbitsize = GET_MODE_BITSIZE (rnmode);
5356 rnbitpos = first_bit & ~ (rnbitsize - 1);
5357 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5358 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5359
5360 if (BYTES_BIG_ENDIAN)
5361 {
5362 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5363 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5364 }
5365
5366 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5367 rntype, lr_mask),
5368 size_int (xlr_bitpos));
5369 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5370 rntype, rr_mask),
5371 size_int (xrr_bitpos));
5372
5373 /* Make a mask that corresponds to both fields being compared.
5374 Do this for both items being compared. If the operands are the
5375 same size and the bits being compared are in the same position
5376 then we can do this by masking both and comparing the masked
5377 results. */
5378 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5379 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5380 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5381 {
5382 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5383 ll_unsignedp || rl_unsignedp);
5384 if (! all_ones_mask_p (ll_mask, lnbitsize))
5385 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5386
5387 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5388 lr_unsignedp || rr_unsignedp);
5389 if (! all_ones_mask_p (lr_mask, rnbitsize))
5390 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5391
5392 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5393 }
5394
5395 /* There is still another way we can do something: If both pairs of
5396 fields being compared are adjacent, we may be able to make a wider
5397 field containing them both.
5398
5399 Note that we still must mask the lhs/rhs expressions. Furthermore,
5400 the mask must be shifted to account for the shift done by
5401 make_bit_field_ref. */
5402 if ((ll_bitsize + ll_bitpos == rl_bitpos
5403 && lr_bitsize + lr_bitpos == rr_bitpos)
5404 || (ll_bitpos == rl_bitpos + rl_bitsize
5405 && lr_bitpos == rr_bitpos + rr_bitsize))
5406 {
5407 tree type;
5408
5409 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5410 ll_bitsize + rl_bitsize,
5411 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5412 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5413 lr_bitsize + rr_bitsize,
5414 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5415
5416 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5417 size_int (MIN (xll_bitpos, xrl_bitpos)));
5418 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5419 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5420
5421 /* Convert to the smaller type before masking out unwanted bits. */
5422 type = lntype;
5423 if (lntype != rntype)
5424 {
5425 if (lnbitsize > rnbitsize)
5426 {
5427 lhs = fold_convert_loc (loc, rntype, lhs);
5428 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5429 type = rntype;
5430 }
5431 else if (lnbitsize < rnbitsize)
5432 {
5433 rhs = fold_convert_loc (loc, lntype, rhs);
5434 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5435 type = lntype;
5436 }
5437 }
5438
5439 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5440 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5441
5442 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5443 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5444
5445 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5446 }
5447
5448 return 0;
5449 }
5450
5451 /* Handle the case of comparisons with constants. If there is something in
5452 common between the masks, those bits of the constants must be the same.
5453 If not, the condition is always false. Test for this to avoid generating
5454 incorrect code below. */
5455 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5456 if (! integer_zerop (result)
5457 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5458 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5459 {
5460 if (wanted_code == NE_EXPR)
5461 {
5462 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5463 return constant_boolean_node (true, truth_type);
5464 }
5465 else
5466 {
5467 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5468 return constant_boolean_node (false, truth_type);
5469 }
5470 }
5471
5472 /* Construct the expression we will return. First get the component
5473 reference we will make. Unless the mask is all ones the width of
5474 that field, perform the mask operation. Then compare with the
5475 merged constant. */
5476 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5477 ll_unsignedp || rl_unsignedp);
5478
5479 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5480 if (! all_ones_mask_p (ll_mask, lnbitsize))
5481 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5482
5483 return build2_loc (loc, wanted_code, truth_type, result,
5484 const_binop (BIT_IOR_EXPR, l_const, r_const));
5485 }
5486 \f
5487 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5488 constant. */
5489
5490 static tree
5491 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5492 tree op0, tree op1)
5493 {
5494 tree arg0 = op0;
5495 enum tree_code op_code;
5496 tree comp_const;
5497 tree minmax_const;
5498 int consts_equal, consts_lt;
5499 tree inner;
5500
5501 STRIP_SIGN_NOPS (arg0);
5502
5503 op_code = TREE_CODE (arg0);
5504 minmax_const = TREE_OPERAND (arg0, 1);
5505 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5506 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5507 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5508 inner = TREE_OPERAND (arg0, 0);
5509
5510 /* If something does not permit us to optimize, return the original tree. */
5511 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5512 || TREE_CODE (comp_const) != INTEGER_CST
5513 || TREE_OVERFLOW (comp_const)
5514 || TREE_CODE (minmax_const) != INTEGER_CST
5515 || TREE_OVERFLOW (minmax_const))
5516 return NULL_TREE;
5517
5518 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5519 and GT_EXPR, doing the rest with recursive calls using logical
5520 simplifications. */
5521 switch (code)
5522 {
5523 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5524 {
5525 tree tem
5526 = optimize_minmax_comparison (loc,
5527 invert_tree_comparison (code, false),
5528 type, op0, op1);
5529 if (tem)
5530 return invert_truthvalue_loc (loc, tem);
5531 return NULL_TREE;
5532 }
5533
5534 case GE_EXPR:
5535 return
5536 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5537 optimize_minmax_comparison
5538 (loc, EQ_EXPR, type, arg0, comp_const),
5539 optimize_minmax_comparison
5540 (loc, GT_EXPR, type, arg0, comp_const));
5541
5542 case EQ_EXPR:
5543 if (op_code == MAX_EXPR && consts_equal)
5544 /* MAX (X, 0) == 0 -> X <= 0 */
5545 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5546
5547 else if (op_code == MAX_EXPR && consts_lt)
5548 /* MAX (X, 0) == 5 -> X == 5 */
5549 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5550
5551 else if (op_code == MAX_EXPR)
5552 /* MAX (X, 0) == -1 -> false */
5553 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5554
5555 else if (consts_equal)
5556 /* MIN (X, 0) == 0 -> X >= 0 */
5557 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5558
5559 else if (consts_lt)
5560 /* MIN (X, 0) == 5 -> false */
5561 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5562
5563 else
5564 /* MIN (X, 0) == -1 -> X == -1 */
5565 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5566
5567 case GT_EXPR:
5568 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5569 /* MAX (X, 0) > 0 -> X > 0
5570 MAX (X, 0) > 5 -> X > 5 */
5571 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5572
5573 else if (op_code == MAX_EXPR)
5574 /* MAX (X, 0) > -1 -> true */
5575 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5576
5577 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5578 /* MIN (X, 0) > 0 -> false
5579 MIN (X, 0) > 5 -> false */
5580 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5581
5582 else
5583 /* MIN (X, 0) > -1 -> X > -1 */
5584 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5585
5586 default:
5587 return NULL_TREE;
5588 }
5589 }
5590 \f
5591 /* T is an integer expression that is being multiplied, divided, or taken a
5592 modulus (CODE says which and what kind of divide or modulus) by a
5593 constant C. See if we can eliminate that operation by folding it with
5594 other operations already in T. WIDE_TYPE, if non-null, is a type that
5595 should be used for the computation if wider than our type.
5596
5597 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5598 (X * 2) + (Y * 4). We must, however, be assured that either the original
5599 expression would not overflow or that overflow is undefined for the type
5600 in the language in question.
5601
5602 If we return a non-null expression, it is an equivalent form of the
5603 original computation, but need not be in the original type.
5604
5605 We set *STRICT_OVERFLOW_P to true if the return values depends on
5606 signed overflow being undefined. Otherwise we do not change
5607 *STRICT_OVERFLOW_P. */
5608
5609 static tree
5610 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5611 bool *strict_overflow_p)
5612 {
5613 /* To avoid exponential search depth, refuse to allow recursion past
5614 three levels. Beyond that (1) it's highly unlikely that we'll find
5615 something interesting and (2) we've probably processed it before
5616 when we built the inner expression. */
5617
5618 static int depth;
5619 tree ret;
5620
5621 if (depth > 3)
5622 return NULL;
5623
5624 depth++;
5625 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5626 depth--;
5627
5628 return ret;
5629 }
5630
5631 static tree
5632 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5633 bool *strict_overflow_p)
5634 {
5635 tree type = TREE_TYPE (t);
5636 enum tree_code tcode = TREE_CODE (t);
5637 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5638 > GET_MODE_SIZE (TYPE_MODE (type)))
5639 ? wide_type : type);
5640 tree t1, t2;
5641 int same_p = tcode == code;
5642 tree op0 = NULL_TREE, op1 = NULL_TREE;
5643 bool sub_strict_overflow_p;
5644
5645 /* Don't deal with constants of zero here; they confuse the code below. */
5646 if (integer_zerop (c))
5647 return NULL_TREE;
5648
5649 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5650 op0 = TREE_OPERAND (t, 0);
5651
5652 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5653 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5654
5655 /* Note that we need not handle conditional operations here since fold
5656 already handles those cases. So just do arithmetic here. */
5657 switch (tcode)
5658 {
5659 case INTEGER_CST:
5660 /* For a constant, we can always simplify if we are a multiply
5661 or (for divide and modulus) if it is a multiple of our constant. */
5662 if (code == MULT_EXPR
5663 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5664 return const_binop (code, fold_convert (ctype, t),
5665 fold_convert (ctype, c));
5666 break;
5667
5668 CASE_CONVERT: case NON_LVALUE_EXPR:
5669 /* If op0 is an expression ... */
5670 if ((COMPARISON_CLASS_P (op0)
5671 || UNARY_CLASS_P (op0)
5672 || BINARY_CLASS_P (op0)
5673 || VL_EXP_CLASS_P (op0)
5674 || EXPRESSION_CLASS_P (op0))
5675 /* ... and has wrapping overflow, and its type is smaller
5676 than ctype, then we cannot pass through as widening. */
5677 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5678 && (TYPE_PRECISION (ctype)
5679 > TYPE_PRECISION (TREE_TYPE (op0))))
5680 /* ... or this is a truncation (t is narrower than op0),
5681 then we cannot pass through this narrowing. */
5682 || (TYPE_PRECISION (type)
5683 < TYPE_PRECISION (TREE_TYPE (op0)))
5684 /* ... or signedness changes for division or modulus,
5685 then we cannot pass through this conversion. */
5686 || (code != MULT_EXPR
5687 && (TYPE_UNSIGNED (ctype)
5688 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5689 /* ... or has undefined overflow while the converted to
5690 type has not, we cannot do the operation in the inner type
5691 as that would introduce undefined overflow. */
5692 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5693 && !TYPE_OVERFLOW_UNDEFINED (type))))
5694 break;
5695
5696 /* Pass the constant down and see if we can make a simplification. If
5697 we can, replace this expression with the inner simplification for
5698 possible later conversion to our or some other type. */
5699 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5700 && TREE_CODE (t2) == INTEGER_CST
5701 && !TREE_OVERFLOW (t2)
5702 && (0 != (t1 = extract_muldiv (op0, t2, code,
5703 code == MULT_EXPR
5704 ? ctype : NULL_TREE,
5705 strict_overflow_p))))
5706 return t1;
5707 break;
5708
5709 case ABS_EXPR:
5710 /* If widening the type changes it from signed to unsigned, then we
5711 must avoid building ABS_EXPR itself as unsigned. */
5712 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5713 {
5714 tree cstype = (*signed_type_for) (ctype);
5715 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5716 != 0)
5717 {
5718 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5719 return fold_convert (ctype, t1);
5720 }
5721 break;
5722 }
5723 /* If the constant is negative, we cannot simplify this. */
5724 if (tree_int_cst_sgn (c) == -1)
5725 break;
5726 /* FALLTHROUGH */
5727 case NEGATE_EXPR:
5728 /* For division and modulus, type can't be unsigned, as e.g.
5729 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5730 For signed types, even with wrapping overflow, this is fine. */
5731 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5732 break;
5733 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5734 != 0)
5735 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5736 break;
5737
5738 case MIN_EXPR: case MAX_EXPR:
5739 /* If widening the type changes the signedness, then we can't perform
5740 this optimization as that changes the result. */
5741 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5742 break;
5743
5744 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5745 sub_strict_overflow_p = false;
5746 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5747 &sub_strict_overflow_p)) != 0
5748 && (t2 = extract_muldiv (op1, c, code, wide_type,
5749 &sub_strict_overflow_p)) != 0)
5750 {
5751 if (tree_int_cst_sgn (c) < 0)
5752 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5753 if (sub_strict_overflow_p)
5754 *strict_overflow_p = true;
5755 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5756 fold_convert (ctype, t2));
5757 }
5758 break;
5759
5760 case LSHIFT_EXPR: case RSHIFT_EXPR:
5761 /* If the second operand is constant, this is a multiplication
5762 or floor division, by a power of two, so we can treat it that
5763 way unless the multiplier or divisor overflows. Signed
5764 left-shift overflow is implementation-defined rather than
5765 undefined in C90, so do not convert signed left shift into
5766 multiplication. */
5767 if (TREE_CODE (op1) == INTEGER_CST
5768 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5769 /* const_binop may not detect overflow correctly,
5770 so check for it explicitly here. */
5771 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5772 && 0 != (t1 = fold_convert (ctype,
5773 const_binop (LSHIFT_EXPR,
5774 size_one_node,
5775 op1)))
5776 && !TREE_OVERFLOW (t1))
5777 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5778 ? MULT_EXPR : FLOOR_DIV_EXPR,
5779 ctype,
5780 fold_convert (ctype, op0),
5781 t1),
5782 c, code, wide_type, strict_overflow_p);
5783 break;
5784
5785 case PLUS_EXPR: case MINUS_EXPR:
5786 /* See if we can eliminate the operation on both sides. If we can, we
5787 can return a new PLUS or MINUS. If we can't, the only remaining
5788 cases where we can do anything are if the second operand is a
5789 constant. */
5790 sub_strict_overflow_p = false;
5791 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5792 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5793 if (t1 != 0 && t2 != 0
5794 && (code == MULT_EXPR
5795 /* If not multiplication, we can only do this if both operands
5796 are divisible by c. */
5797 || (multiple_of_p (ctype, op0, c)
5798 && multiple_of_p (ctype, op1, c))))
5799 {
5800 if (sub_strict_overflow_p)
5801 *strict_overflow_p = true;
5802 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5803 fold_convert (ctype, t2));
5804 }
5805
5806 /* If this was a subtraction, negate OP1 and set it to be an addition.
5807 This simplifies the logic below. */
5808 if (tcode == MINUS_EXPR)
5809 {
5810 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5811 /* If OP1 was not easily negatable, the constant may be OP0. */
5812 if (TREE_CODE (op0) == INTEGER_CST)
5813 {
5814 tree tem = op0;
5815 op0 = op1;
5816 op1 = tem;
5817 tem = t1;
5818 t1 = t2;
5819 t2 = tem;
5820 }
5821 }
5822
5823 if (TREE_CODE (op1) != INTEGER_CST)
5824 break;
5825
5826 /* If either OP1 or C are negative, this optimization is not safe for
5827 some of the division and remainder types while for others we need
5828 to change the code. */
5829 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5830 {
5831 if (code == CEIL_DIV_EXPR)
5832 code = FLOOR_DIV_EXPR;
5833 else if (code == FLOOR_DIV_EXPR)
5834 code = CEIL_DIV_EXPR;
5835 else if (code != MULT_EXPR
5836 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5837 break;
5838 }
5839
5840 /* If it's a multiply or a division/modulus operation of a multiple
5841 of our constant, do the operation and verify it doesn't overflow. */
5842 if (code == MULT_EXPR
5843 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5844 {
5845 op1 = const_binop (code, fold_convert (ctype, op1),
5846 fold_convert (ctype, c));
5847 /* We allow the constant to overflow with wrapping semantics. */
5848 if (op1 == 0
5849 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5850 break;
5851 }
5852 else
5853 break;
5854
5855 /* If we have an unsigned type, we cannot widen the operation since it
5856 will change the result if the original computation overflowed. */
5857 if (TYPE_UNSIGNED (ctype) && ctype != type)
5858 break;
5859
5860 /* If we were able to eliminate our operation from the first side,
5861 apply our operation to the second side and reform the PLUS. */
5862 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5864
5865 /* The last case is if we are a multiply. In that case, we can
5866 apply the distributive law to commute the multiply and addition
5867 if the multiplication of the constants doesn't overflow
5868 and overflow is defined. With undefined overflow
5869 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5870 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5871 return fold_build2 (tcode, ctype,
5872 fold_build2 (code, ctype,
5873 fold_convert (ctype, op0),
5874 fold_convert (ctype, c)),
5875 op1);
5876
5877 break;
5878
5879 case MULT_EXPR:
5880 /* We have a special case here if we are doing something like
5881 (C * 8) % 4 since we know that's zero. */
5882 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5883 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5884 /* If the multiplication can overflow we cannot optimize this. */
5885 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5886 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5887 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5888 {
5889 *strict_overflow_p = true;
5890 return omit_one_operand (type, integer_zero_node, op0);
5891 }
5892
5893 /* ... fall through ... */
5894
5895 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5896 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5897 /* If we can extract our operation from the LHS, do so and return a
5898 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5899 do something only if the second operand is a constant. */
5900 if (same_p
5901 && (t1 = extract_muldiv (op0, c, code, wide_type,
5902 strict_overflow_p)) != 0)
5903 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5904 fold_convert (ctype, op1));
5905 else if (tcode == MULT_EXPR && code == MULT_EXPR
5906 && (t1 = extract_muldiv (op1, c, code, wide_type,
5907 strict_overflow_p)) != 0)
5908 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5909 fold_convert (ctype, t1));
5910 else if (TREE_CODE (op1) != INTEGER_CST)
5911 return 0;
5912
5913 /* If these are the same operation types, we can associate them
5914 assuming no overflow. */
5915 if (tcode == code)
5916 {
5917 bool overflow_p = false;
5918 bool overflow_mul_p;
5919 signop sign = TYPE_SIGN (ctype);
5920 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5921 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5922 if (overflow_mul_p
5923 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5924 overflow_p = true;
5925 if (!overflow_p)
5926 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5927 wide_int_to_tree (ctype, mul));
5928 }
5929
5930 /* If these operations "cancel" each other, we have the main
5931 optimizations of this pass, which occur when either constant is a
5932 multiple of the other, in which case we replace this with either an
5933 operation or CODE or TCODE.
5934
5935 If we have an unsigned type, we cannot do this since it will change
5936 the result if the original computation overflowed. */
5937 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5938 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5939 || (tcode == MULT_EXPR
5940 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5941 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5942 && code != MULT_EXPR)))
5943 {
5944 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5945 {
5946 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5947 *strict_overflow_p = true;
5948 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5949 fold_convert (ctype,
5950 const_binop (TRUNC_DIV_EXPR,
5951 op1, c)));
5952 }
5953 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5954 {
5955 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5956 *strict_overflow_p = true;
5957 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5958 fold_convert (ctype,
5959 const_binop (TRUNC_DIV_EXPR,
5960 c, op1)));
5961 }
5962 }
5963 break;
5964
5965 default:
5966 break;
5967 }
5968
5969 return 0;
5970 }
5971 \f
5972 /* Return a node which has the indicated constant VALUE (either 0 or
5973 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5974 and is of the indicated TYPE. */
5975
5976 tree
5977 constant_boolean_node (bool value, tree type)
5978 {
5979 if (type == integer_type_node)
5980 return value ? integer_one_node : integer_zero_node;
5981 else if (type == boolean_type_node)
5982 return value ? boolean_true_node : boolean_false_node;
5983 else if (TREE_CODE (type) == VECTOR_TYPE)
5984 return build_vector_from_val (type,
5985 build_int_cst (TREE_TYPE (type),
5986 value ? -1 : 0));
5987 else
5988 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5989 }
5990
5991
5992 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5993 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5994 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5995 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5996 COND is the first argument to CODE; otherwise (as in the example
5997 given here), it is the second argument. TYPE is the type of the
5998 original expression. Return NULL_TREE if no simplification is
5999 possible. */
6000
6001 static tree
6002 fold_binary_op_with_conditional_arg (location_t loc,
6003 enum tree_code code,
6004 tree type, tree op0, tree op1,
6005 tree cond, tree arg, int cond_first_p)
6006 {
6007 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6008 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6009 tree test, true_value, false_value;
6010 tree lhs = NULL_TREE;
6011 tree rhs = NULL_TREE;
6012 enum tree_code cond_code = COND_EXPR;
6013
6014 if (TREE_CODE (cond) == COND_EXPR
6015 || TREE_CODE (cond) == VEC_COND_EXPR)
6016 {
6017 test = TREE_OPERAND (cond, 0);
6018 true_value = TREE_OPERAND (cond, 1);
6019 false_value = TREE_OPERAND (cond, 2);
6020 /* If this operand throws an expression, then it does not make
6021 sense to try to perform a logical or arithmetic operation
6022 involving it. */
6023 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6024 lhs = true_value;
6025 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6026 rhs = false_value;
6027 }
6028 else
6029 {
6030 tree testtype = TREE_TYPE (cond);
6031 test = cond;
6032 true_value = constant_boolean_node (true, testtype);
6033 false_value = constant_boolean_node (false, testtype);
6034 }
6035
6036 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6037 cond_code = VEC_COND_EXPR;
6038
6039 /* This transformation is only worthwhile if we don't have to wrap ARG
6040 in a SAVE_EXPR and the operation can be simplified without recursing
6041 on at least one of the branches once its pushed inside the COND_EXPR. */
6042 if (!TREE_CONSTANT (arg)
6043 && (TREE_SIDE_EFFECTS (arg)
6044 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6045 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6046 return NULL_TREE;
6047
6048 arg = fold_convert_loc (loc, arg_type, arg);
6049 if (lhs == 0)
6050 {
6051 true_value = fold_convert_loc (loc, cond_type, true_value);
6052 if (cond_first_p)
6053 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6054 else
6055 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6056 }
6057 if (rhs == 0)
6058 {
6059 false_value = fold_convert_loc (loc, cond_type, false_value);
6060 if (cond_first_p)
6061 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6062 else
6063 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6064 }
6065
6066 /* Check that we have simplified at least one of the branches. */
6067 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6068 return NULL_TREE;
6069
6070 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6071 }
6072
6073 \f
6074 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6075
6076 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6077 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6078 ADDEND is the same as X.
6079
6080 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6081 and finite. The problematic cases are when X is zero, and its mode
6082 has signed zeros. In the case of rounding towards -infinity,
6083 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6084 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6085
6086 bool
6087 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6088 {
6089 if (!real_zerop (addend))
6090 return false;
6091
6092 /* Don't allow the fold with -fsignaling-nans. */
6093 if (HONOR_SNANS (element_mode (type)))
6094 return false;
6095
6096 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6097 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6098 return true;
6099
6100 /* In a vector or complex, we would need to check the sign of all zeros. */
6101 if (TREE_CODE (addend) != REAL_CST)
6102 return false;
6103
6104 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6105 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6106 negate = !negate;
6107
6108 /* The mode has signed zeros, and we have to honor their sign.
6109 In this situation, there is only one case we can return true for.
6110 X - 0 is the same as X unless rounding towards -infinity is
6111 supported. */
6112 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6113 }
6114
6115 /* Subroutine of fold() that checks comparisons of built-in math
6116 functions against real constants.
6117
6118 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6119 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6120 is the type of the result and ARG0 and ARG1 are the operands of the
6121 comparison. ARG1 must be a TREE_REAL_CST.
6122
6123 The function returns the constant folded tree if a simplification
6124 can be made, and NULL_TREE otherwise. */
6125
6126 static tree
6127 fold_mathfn_compare (location_t loc,
6128 enum built_in_function fcode, enum tree_code code,
6129 tree type, tree arg0, tree arg1)
6130 {
6131 REAL_VALUE_TYPE c;
6132
6133 if (BUILTIN_SQRT_P (fcode))
6134 {
6135 tree arg = CALL_EXPR_ARG (arg0, 0);
6136 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6137
6138 c = TREE_REAL_CST (arg1);
6139 if (REAL_VALUE_NEGATIVE (c))
6140 {
6141 /* sqrt(x) < y is always false, if y is negative. */
6142 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6143 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6144
6145 /* sqrt(x) > y is always true, if y is negative and we
6146 don't care about NaNs, i.e. negative values of x. */
6147 if (code == NE_EXPR || !HONOR_NANS (mode))
6148 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6149
6150 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6151 return fold_build2_loc (loc, GE_EXPR, type, arg,
6152 build_real (TREE_TYPE (arg), dconst0));
6153 }
6154 else if (code == GT_EXPR || code == GE_EXPR)
6155 {
6156 REAL_VALUE_TYPE c2;
6157
6158 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6159 real_convert (&c2, mode, &c2);
6160
6161 if (REAL_VALUE_ISINF (c2))
6162 {
6163 /* sqrt(x) > y is x == +Inf, when y is very large. */
6164 if (HONOR_INFINITIES (mode))
6165 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6166 build_real (TREE_TYPE (arg), c2));
6167
6168 /* sqrt(x) > y is always false, when y is very large
6169 and we don't care about infinities. */
6170 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6171 }
6172
6173 /* sqrt(x) > c is the same as x > c*c. */
6174 return fold_build2_loc (loc, code, type, arg,
6175 build_real (TREE_TYPE (arg), c2));
6176 }
6177 else if (code == LT_EXPR || code == LE_EXPR)
6178 {
6179 REAL_VALUE_TYPE c2;
6180
6181 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6182 real_convert (&c2, mode, &c2);
6183
6184 if (REAL_VALUE_ISINF (c2))
6185 {
6186 /* sqrt(x) < y is always true, when y is a very large
6187 value and we don't care about NaNs or Infinities. */
6188 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6189 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6190
6191 /* sqrt(x) < y is x != +Inf when y is very large and we
6192 don't care about NaNs. */
6193 if (! HONOR_NANS (mode))
6194 return fold_build2_loc (loc, NE_EXPR, type, arg,
6195 build_real (TREE_TYPE (arg), c2));
6196
6197 /* sqrt(x) < y is x >= 0 when y is very large and we
6198 don't care about Infinities. */
6199 if (! HONOR_INFINITIES (mode))
6200 return fold_build2_loc (loc, GE_EXPR, type, arg,
6201 build_real (TREE_TYPE (arg), dconst0));
6202
6203 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6204 arg = save_expr (arg);
6205 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6206 fold_build2_loc (loc, GE_EXPR, type, arg,
6207 build_real (TREE_TYPE (arg),
6208 dconst0)),
6209 fold_build2_loc (loc, NE_EXPR, type, arg,
6210 build_real (TREE_TYPE (arg),
6211 c2)));
6212 }
6213
6214 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6215 if (! HONOR_NANS (mode))
6216 return fold_build2_loc (loc, code, type, arg,
6217 build_real (TREE_TYPE (arg), c2));
6218
6219 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6220 arg = save_expr (arg);
6221 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6222 fold_build2_loc (loc, GE_EXPR, type, arg,
6223 build_real (TREE_TYPE (arg),
6224 dconst0)),
6225 fold_build2_loc (loc, code, type, arg,
6226 build_real (TREE_TYPE (arg),
6227 c2)));
6228 }
6229 }
6230
6231 return NULL_TREE;
6232 }
6233
6234 /* Subroutine of fold() that optimizes comparisons against Infinities,
6235 either +Inf or -Inf.
6236
6237 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6238 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6239 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6240
6241 The function returns the constant folded tree if a simplification
6242 can be made, and NULL_TREE otherwise. */
6243
6244 static tree
6245 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6246 tree arg0, tree arg1)
6247 {
6248 machine_mode mode;
6249 REAL_VALUE_TYPE max;
6250 tree temp;
6251 bool neg;
6252
6253 mode = TYPE_MODE (TREE_TYPE (arg0));
6254
6255 /* For negative infinity swap the sense of the comparison. */
6256 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6257 if (neg)
6258 code = swap_tree_comparison (code);
6259
6260 switch (code)
6261 {
6262 case GT_EXPR:
6263 /* x > +Inf is always false, if with ignore sNANs. */
6264 if (HONOR_SNANS (mode))
6265 return NULL_TREE;
6266 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6267
6268 case LE_EXPR:
6269 /* x <= +Inf is always true, if we don't case about NaNs. */
6270 if (! HONOR_NANS (mode))
6271 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6272
6273 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6274 arg0 = save_expr (arg0);
6275 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6276
6277 case EQ_EXPR:
6278 case GE_EXPR:
6279 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6280 real_maxval (&max, neg, mode);
6281 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6282 arg0, build_real (TREE_TYPE (arg0), max));
6283
6284 case LT_EXPR:
6285 /* x < +Inf is always equal to x <= DBL_MAX. */
6286 real_maxval (&max, neg, mode);
6287 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6288 arg0, build_real (TREE_TYPE (arg0), max));
6289
6290 case NE_EXPR:
6291 /* x != +Inf is always equal to !(x > DBL_MAX). */
6292 real_maxval (&max, neg, mode);
6293 if (! HONOR_NANS (mode))
6294 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6295 arg0, build_real (TREE_TYPE (arg0), max));
6296
6297 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6298 arg0, build_real (TREE_TYPE (arg0), max));
6299 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6300
6301 default:
6302 break;
6303 }
6304
6305 return NULL_TREE;
6306 }
6307
6308 /* Subroutine of fold() that optimizes comparisons of a division by
6309 a nonzero integer constant against an integer constant, i.e.
6310 X/C1 op C2.
6311
6312 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6313 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6314 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6315
6316 The function returns the constant folded tree if a simplification
6317 can be made, and NULL_TREE otherwise. */
6318
6319 static tree
6320 fold_div_compare (location_t loc,
6321 enum tree_code code, tree type, tree arg0, tree arg1)
6322 {
6323 tree prod, tmp, hi, lo;
6324 tree arg00 = TREE_OPERAND (arg0, 0);
6325 tree arg01 = TREE_OPERAND (arg0, 1);
6326 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6327 bool neg_overflow = false;
6328 bool overflow;
6329
6330 /* We have to do this the hard way to detect unsigned overflow.
6331 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6332 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6333 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6334 neg_overflow = false;
6335
6336 if (sign == UNSIGNED)
6337 {
6338 tmp = int_const_binop (MINUS_EXPR, arg01,
6339 build_int_cst (TREE_TYPE (arg01), 1));
6340 lo = prod;
6341
6342 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6343 val = wi::add (prod, tmp, sign, &overflow);
6344 hi = force_fit_type (TREE_TYPE (arg00), val,
6345 -1, overflow | TREE_OVERFLOW (prod));
6346 }
6347 else if (tree_int_cst_sgn (arg01) >= 0)
6348 {
6349 tmp = int_const_binop (MINUS_EXPR, arg01,
6350 build_int_cst (TREE_TYPE (arg01), 1));
6351 switch (tree_int_cst_sgn (arg1))
6352 {
6353 case -1:
6354 neg_overflow = true;
6355 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6356 hi = prod;
6357 break;
6358
6359 case 0:
6360 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6361 hi = tmp;
6362 break;
6363
6364 case 1:
6365 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6366 lo = prod;
6367 break;
6368
6369 default:
6370 gcc_unreachable ();
6371 }
6372 }
6373 else
6374 {
6375 /* A negative divisor reverses the relational operators. */
6376 code = swap_tree_comparison (code);
6377
6378 tmp = int_const_binop (PLUS_EXPR, arg01,
6379 build_int_cst (TREE_TYPE (arg01), 1));
6380 switch (tree_int_cst_sgn (arg1))
6381 {
6382 case -1:
6383 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6384 lo = prod;
6385 break;
6386
6387 case 0:
6388 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6389 lo = tmp;
6390 break;
6391
6392 case 1:
6393 neg_overflow = true;
6394 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6395 hi = prod;
6396 break;
6397
6398 default:
6399 gcc_unreachable ();
6400 }
6401 }
6402
6403 switch (code)
6404 {
6405 case EQ_EXPR:
6406 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6407 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6408 if (TREE_OVERFLOW (hi))
6409 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6410 if (TREE_OVERFLOW (lo))
6411 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6412 return build_range_check (loc, type, arg00, 1, lo, hi);
6413
6414 case NE_EXPR:
6415 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6416 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6417 if (TREE_OVERFLOW (hi))
6418 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6419 if (TREE_OVERFLOW (lo))
6420 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6421 return build_range_check (loc, type, arg00, 0, lo, hi);
6422
6423 case LT_EXPR:
6424 if (TREE_OVERFLOW (lo))
6425 {
6426 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6427 return omit_one_operand_loc (loc, type, tmp, arg00);
6428 }
6429 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6430
6431 case LE_EXPR:
6432 if (TREE_OVERFLOW (hi))
6433 {
6434 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6435 return omit_one_operand_loc (loc, type, tmp, arg00);
6436 }
6437 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6438
6439 case GT_EXPR:
6440 if (TREE_OVERFLOW (hi))
6441 {
6442 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6443 return omit_one_operand_loc (loc, type, tmp, arg00);
6444 }
6445 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6446
6447 case GE_EXPR:
6448 if (TREE_OVERFLOW (lo))
6449 {
6450 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6451 return omit_one_operand_loc (loc, type, tmp, arg00);
6452 }
6453 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6454
6455 default:
6456 break;
6457 }
6458
6459 return NULL_TREE;
6460 }
6461
6462
6463 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6464 equality/inequality test, then return a simplified form of the test
6465 using a sign testing. Otherwise return NULL. TYPE is the desired
6466 result type. */
6467
6468 static tree
6469 fold_single_bit_test_into_sign_test (location_t loc,
6470 enum tree_code code, tree arg0, tree arg1,
6471 tree result_type)
6472 {
6473 /* If this is testing a single bit, we can optimize the test. */
6474 if ((code == NE_EXPR || code == EQ_EXPR)
6475 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6476 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6477 {
6478 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6479 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6480 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6481
6482 if (arg00 != NULL_TREE
6483 /* This is only a win if casting to a signed type is cheap,
6484 i.e. when arg00's type is not a partial mode. */
6485 && TYPE_PRECISION (TREE_TYPE (arg00))
6486 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6487 {
6488 tree stype = signed_type_for (TREE_TYPE (arg00));
6489 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6490 result_type,
6491 fold_convert_loc (loc, stype, arg00),
6492 build_int_cst (stype, 0));
6493 }
6494 }
6495
6496 return NULL_TREE;
6497 }
6498
6499 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6500 equality/inequality test, then return a simplified form of
6501 the test using shifts and logical operations. Otherwise return
6502 NULL. TYPE is the desired result type. */
6503
6504 tree
6505 fold_single_bit_test (location_t loc, enum tree_code code,
6506 tree arg0, tree arg1, tree result_type)
6507 {
6508 /* If this is testing a single bit, we can optimize the test. */
6509 if ((code == NE_EXPR || code == EQ_EXPR)
6510 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6511 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6512 {
6513 tree inner = TREE_OPERAND (arg0, 0);
6514 tree type = TREE_TYPE (arg0);
6515 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6516 machine_mode operand_mode = TYPE_MODE (type);
6517 int ops_unsigned;
6518 tree signed_type, unsigned_type, intermediate_type;
6519 tree tem, one;
6520
6521 /* First, see if we can fold the single bit test into a sign-bit
6522 test. */
6523 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6524 result_type);
6525 if (tem)
6526 return tem;
6527
6528 /* Otherwise we have (A & C) != 0 where C is a single bit,
6529 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6530 Similarly for (A & C) == 0. */
6531
6532 /* If INNER is a right shift of a constant and it plus BITNUM does
6533 not overflow, adjust BITNUM and INNER. */
6534 if (TREE_CODE (inner) == RSHIFT_EXPR
6535 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6536 && bitnum < TYPE_PRECISION (type)
6537 && wi::ltu_p (TREE_OPERAND (inner, 1),
6538 TYPE_PRECISION (type) - bitnum))
6539 {
6540 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6541 inner = TREE_OPERAND (inner, 0);
6542 }
6543
6544 /* If we are going to be able to omit the AND below, we must do our
6545 operations as unsigned. If we must use the AND, we have a choice.
6546 Normally unsigned is faster, but for some machines signed is. */
6547 #ifdef LOAD_EXTEND_OP
6548 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6549 && !flag_syntax_only) ? 0 : 1;
6550 #else
6551 ops_unsigned = 1;
6552 #endif
6553
6554 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6555 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6556 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6557 inner = fold_convert_loc (loc, intermediate_type, inner);
6558
6559 if (bitnum != 0)
6560 inner = build2 (RSHIFT_EXPR, intermediate_type,
6561 inner, size_int (bitnum));
6562
6563 one = build_int_cst (intermediate_type, 1);
6564
6565 if (code == EQ_EXPR)
6566 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6567
6568 /* Put the AND last so it can combine with more things. */
6569 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6570
6571 /* Make sure to return the proper type. */
6572 inner = fold_convert_loc (loc, result_type, inner);
6573
6574 return inner;
6575 }
6576 return NULL_TREE;
6577 }
6578
6579 /* Check whether we are allowed to reorder operands arg0 and arg1,
6580 such that the evaluation of arg1 occurs before arg0. */
6581
6582 static bool
6583 reorder_operands_p (const_tree arg0, const_tree arg1)
6584 {
6585 if (! flag_evaluation_order)
6586 return true;
6587 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6588 return true;
6589 return ! TREE_SIDE_EFFECTS (arg0)
6590 && ! TREE_SIDE_EFFECTS (arg1);
6591 }
6592
6593 /* Test whether it is preferable two swap two operands, ARG0 and
6594 ARG1, for example because ARG0 is an integer constant and ARG1
6595 isn't. If REORDER is true, only recommend swapping if we can
6596 evaluate the operands in reverse order. */
6597
6598 bool
6599 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6600 {
6601 if (CONSTANT_CLASS_P (arg1))
6602 return 0;
6603 if (CONSTANT_CLASS_P (arg0))
6604 return 1;
6605
6606 STRIP_SIGN_NOPS (arg0);
6607 STRIP_SIGN_NOPS (arg1);
6608
6609 if (TREE_CONSTANT (arg1))
6610 return 0;
6611 if (TREE_CONSTANT (arg0))
6612 return 1;
6613
6614 if (reorder && flag_evaluation_order
6615 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6616 return 0;
6617
6618 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6619 for commutative and comparison operators. Ensuring a canonical
6620 form allows the optimizers to find additional redundancies without
6621 having to explicitly check for both orderings. */
6622 if (TREE_CODE (arg0) == SSA_NAME
6623 && TREE_CODE (arg1) == SSA_NAME
6624 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6625 return 1;
6626
6627 /* Put SSA_NAMEs last. */
6628 if (TREE_CODE (arg1) == SSA_NAME)
6629 return 0;
6630 if (TREE_CODE (arg0) == SSA_NAME)
6631 return 1;
6632
6633 /* Put variables last. */
6634 if (DECL_P (arg1))
6635 return 0;
6636 if (DECL_P (arg0))
6637 return 1;
6638
6639 return 0;
6640 }
6641
6642 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6643 ARG0 is extended to a wider type. */
6644
6645 static tree
6646 fold_widened_comparison (location_t loc, enum tree_code code,
6647 tree type, tree arg0, tree arg1)
6648 {
6649 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6650 tree arg1_unw;
6651 tree shorter_type, outer_type;
6652 tree min, max;
6653 bool above, below;
6654
6655 if (arg0_unw == arg0)
6656 return NULL_TREE;
6657 shorter_type = TREE_TYPE (arg0_unw);
6658
6659 #ifdef HAVE_canonicalize_funcptr_for_compare
6660 /* Disable this optimization if we're casting a function pointer
6661 type on targets that require function pointer canonicalization. */
6662 if (HAVE_canonicalize_funcptr_for_compare
6663 && TREE_CODE (shorter_type) == POINTER_TYPE
6664 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6665 return NULL_TREE;
6666 #endif
6667
6668 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6669 return NULL_TREE;
6670
6671 arg1_unw = get_unwidened (arg1, NULL_TREE);
6672
6673 /* If possible, express the comparison in the shorter mode. */
6674 if ((code == EQ_EXPR || code == NE_EXPR
6675 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6676 && (TREE_TYPE (arg1_unw) == shorter_type
6677 || ((TYPE_PRECISION (shorter_type)
6678 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6679 && (TYPE_UNSIGNED (shorter_type)
6680 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6681 || (TREE_CODE (arg1_unw) == INTEGER_CST
6682 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6683 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6684 && int_fits_type_p (arg1_unw, shorter_type))))
6685 return fold_build2_loc (loc, code, type, arg0_unw,
6686 fold_convert_loc (loc, shorter_type, arg1_unw));
6687
6688 if (TREE_CODE (arg1_unw) != INTEGER_CST
6689 || TREE_CODE (shorter_type) != INTEGER_TYPE
6690 || !int_fits_type_p (arg1_unw, shorter_type))
6691 return NULL_TREE;
6692
6693 /* If we are comparing with the integer that does not fit into the range
6694 of the shorter type, the result is known. */
6695 outer_type = TREE_TYPE (arg1_unw);
6696 min = lower_bound_in_type (outer_type, shorter_type);
6697 max = upper_bound_in_type (outer_type, shorter_type);
6698
6699 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6700 max, arg1_unw));
6701 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6702 arg1_unw, min));
6703
6704 switch (code)
6705 {
6706 case EQ_EXPR:
6707 if (above || below)
6708 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6709 break;
6710
6711 case NE_EXPR:
6712 if (above || below)
6713 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6714 break;
6715
6716 case LT_EXPR:
6717 case LE_EXPR:
6718 if (above)
6719 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6720 else if (below)
6721 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6722
6723 case GT_EXPR:
6724 case GE_EXPR:
6725 if (above)
6726 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6727 else if (below)
6728 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6729
6730 default:
6731 break;
6732 }
6733
6734 return NULL_TREE;
6735 }
6736
6737 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6738 ARG0 just the signedness is changed. */
6739
6740 static tree
6741 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6742 tree arg0, tree arg1)
6743 {
6744 tree arg0_inner;
6745 tree inner_type, outer_type;
6746
6747 if (!CONVERT_EXPR_P (arg0))
6748 return NULL_TREE;
6749
6750 outer_type = TREE_TYPE (arg0);
6751 arg0_inner = TREE_OPERAND (arg0, 0);
6752 inner_type = TREE_TYPE (arg0_inner);
6753
6754 #ifdef HAVE_canonicalize_funcptr_for_compare
6755 /* Disable this optimization if we're casting a function pointer
6756 type on targets that require function pointer canonicalization. */
6757 if (HAVE_canonicalize_funcptr_for_compare
6758 && TREE_CODE (inner_type) == POINTER_TYPE
6759 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6760 return NULL_TREE;
6761 #endif
6762
6763 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6764 return NULL_TREE;
6765
6766 if (TREE_CODE (arg1) != INTEGER_CST
6767 && !(CONVERT_EXPR_P (arg1)
6768 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6769 return NULL_TREE;
6770
6771 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6772 && code != NE_EXPR
6773 && code != EQ_EXPR)
6774 return NULL_TREE;
6775
6776 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6777 return NULL_TREE;
6778
6779 if (TREE_CODE (arg1) == INTEGER_CST)
6780 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6781 TREE_OVERFLOW (arg1));
6782 else
6783 arg1 = fold_convert_loc (loc, inner_type, arg1);
6784
6785 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6786 }
6787
6788
6789 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6790 means A >= Y && A != MAX, but in this case we know that
6791 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6792
6793 static tree
6794 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6795 {
6796 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6797
6798 if (TREE_CODE (bound) == LT_EXPR)
6799 a = TREE_OPERAND (bound, 0);
6800 else if (TREE_CODE (bound) == GT_EXPR)
6801 a = TREE_OPERAND (bound, 1);
6802 else
6803 return NULL_TREE;
6804
6805 typea = TREE_TYPE (a);
6806 if (!INTEGRAL_TYPE_P (typea)
6807 && !POINTER_TYPE_P (typea))
6808 return NULL_TREE;
6809
6810 if (TREE_CODE (ineq) == LT_EXPR)
6811 {
6812 a1 = TREE_OPERAND (ineq, 1);
6813 y = TREE_OPERAND (ineq, 0);
6814 }
6815 else if (TREE_CODE (ineq) == GT_EXPR)
6816 {
6817 a1 = TREE_OPERAND (ineq, 0);
6818 y = TREE_OPERAND (ineq, 1);
6819 }
6820 else
6821 return NULL_TREE;
6822
6823 if (TREE_TYPE (a1) != typea)
6824 return NULL_TREE;
6825
6826 if (POINTER_TYPE_P (typea))
6827 {
6828 /* Convert the pointer types into integer before taking the difference. */
6829 tree ta = fold_convert_loc (loc, ssizetype, a);
6830 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6831 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6832 }
6833 else
6834 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6835
6836 if (!diff || !integer_onep (diff))
6837 return NULL_TREE;
6838
6839 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6840 }
6841
6842 /* Fold a sum or difference of at least one multiplication.
6843 Returns the folded tree or NULL if no simplification could be made. */
6844
6845 static tree
6846 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6847 tree arg0, tree arg1)
6848 {
6849 tree arg00, arg01, arg10, arg11;
6850 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6851
6852 /* (A * C) +- (B * C) -> (A+-B) * C.
6853 (A * C) +- A -> A * (C+-1).
6854 We are most concerned about the case where C is a constant,
6855 but other combinations show up during loop reduction. Since
6856 it is not difficult, try all four possibilities. */
6857
6858 if (TREE_CODE (arg0) == MULT_EXPR)
6859 {
6860 arg00 = TREE_OPERAND (arg0, 0);
6861 arg01 = TREE_OPERAND (arg0, 1);
6862 }
6863 else if (TREE_CODE (arg0) == INTEGER_CST)
6864 {
6865 arg00 = build_one_cst (type);
6866 arg01 = arg0;
6867 }
6868 else
6869 {
6870 /* We cannot generate constant 1 for fract. */
6871 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6872 return NULL_TREE;
6873 arg00 = arg0;
6874 arg01 = build_one_cst (type);
6875 }
6876 if (TREE_CODE (arg1) == MULT_EXPR)
6877 {
6878 arg10 = TREE_OPERAND (arg1, 0);
6879 arg11 = TREE_OPERAND (arg1, 1);
6880 }
6881 else if (TREE_CODE (arg1) == INTEGER_CST)
6882 {
6883 arg10 = build_one_cst (type);
6884 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6885 the purpose of this canonicalization. */
6886 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6887 && negate_expr_p (arg1)
6888 && code == PLUS_EXPR)
6889 {
6890 arg11 = negate_expr (arg1);
6891 code = MINUS_EXPR;
6892 }
6893 else
6894 arg11 = arg1;
6895 }
6896 else
6897 {
6898 /* We cannot generate constant 1 for fract. */
6899 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6900 return NULL_TREE;
6901 arg10 = arg1;
6902 arg11 = build_one_cst (type);
6903 }
6904 same = NULL_TREE;
6905
6906 if (operand_equal_p (arg01, arg11, 0))
6907 same = arg01, alt0 = arg00, alt1 = arg10;
6908 else if (operand_equal_p (arg00, arg10, 0))
6909 same = arg00, alt0 = arg01, alt1 = arg11;
6910 else if (operand_equal_p (arg00, arg11, 0))
6911 same = arg00, alt0 = arg01, alt1 = arg10;
6912 else if (operand_equal_p (arg01, arg10, 0))
6913 same = arg01, alt0 = arg00, alt1 = arg11;
6914
6915 /* No identical multiplicands; see if we can find a common
6916 power-of-two factor in non-power-of-two multiplies. This
6917 can help in multi-dimensional array access. */
6918 else if (tree_fits_shwi_p (arg01)
6919 && tree_fits_shwi_p (arg11))
6920 {
6921 HOST_WIDE_INT int01, int11, tmp;
6922 bool swap = false;
6923 tree maybe_same;
6924 int01 = tree_to_shwi (arg01);
6925 int11 = tree_to_shwi (arg11);
6926
6927 /* Move min of absolute values to int11. */
6928 if (absu_hwi (int01) < absu_hwi (int11))
6929 {
6930 tmp = int01, int01 = int11, int11 = tmp;
6931 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6932 maybe_same = arg01;
6933 swap = true;
6934 }
6935 else
6936 maybe_same = arg11;
6937
6938 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6939 /* The remainder should not be a constant, otherwise we
6940 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6941 increased the number of multiplications necessary. */
6942 && TREE_CODE (arg10) != INTEGER_CST)
6943 {
6944 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6945 build_int_cst (TREE_TYPE (arg00),
6946 int01 / int11));
6947 alt1 = arg10;
6948 same = maybe_same;
6949 if (swap)
6950 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6951 }
6952 }
6953
6954 if (same)
6955 return fold_build2_loc (loc, MULT_EXPR, type,
6956 fold_build2_loc (loc, code, type,
6957 fold_convert_loc (loc, type, alt0),
6958 fold_convert_loc (loc, type, alt1)),
6959 fold_convert_loc (loc, type, same));
6960
6961 return NULL_TREE;
6962 }
6963
6964 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6965 specified by EXPR into the buffer PTR of length LEN bytes.
6966 Return the number of bytes placed in the buffer, or zero
6967 upon failure. */
6968
6969 static int
6970 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6971 {
6972 tree type = TREE_TYPE (expr);
6973 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6974 int byte, offset, word, words;
6975 unsigned char value;
6976
6977 if ((off == -1 && total_bytes > len)
6978 || off >= total_bytes)
6979 return 0;
6980 if (off == -1)
6981 off = 0;
6982 words = total_bytes / UNITS_PER_WORD;
6983
6984 for (byte = 0; byte < total_bytes; byte++)
6985 {
6986 int bitpos = byte * BITS_PER_UNIT;
6987 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6988 number of bytes. */
6989 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6990
6991 if (total_bytes > UNITS_PER_WORD)
6992 {
6993 word = byte / UNITS_PER_WORD;
6994 if (WORDS_BIG_ENDIAN)
6995 word = (words - 1) - word;
6996 offset = word * UNITS_PER_WORD;
6997 if (BYTES_BIG_ENDIAN)
6998 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6999 else
7000 offset += byte % UNITS_PER_WORD;
7001 }
7002 else
7003 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7004 if (offset >= off
7005 && offset - off < len)
7006 ptr[offset - off] = value;
7007 }
7008 return MIN (len, total_bytes - off);
7009 }
7010
7011
7012 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7013 specified by EXPR into the buffer PTR of length LEN bytes.
7014 Return the number of bytes placed in the buffer, or zero
7015 upon failure. */
7016
7017 static int
7018 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7019 {
7020 tree type = TREE_TYPE (expr);
7021 machine_mode mode = TYPE_MODE (type);
7022 int total_bytes = GET_MODE_SIZE (mode);
7023 FIXED_VALUE_TYPE value;
7024 tree i_value, i_type;
7025
7026 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7027 return 0;
7028
7029 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7030
7031 if (NULL_TREE == i_type
7032 || TYPE_PRECISION (i_type) != total_bytes)
7033 return 0;
7034
7035 value = TREE_FIXED_CST (expr);
7036 i_value = double_int_to_tree (i_type, value.data);
7037
7038 return native_encode_int (i_value, ptr, len, off);
7039 }
7040
7041
7042 /* Subroutine of native_encode_expr. Encode the REAL_CST
7043 specified by EXPR into the buffer PTR of length LEN bytes.
7044 Return the number of bytes placed in the buffer, or zero
7045 upon failure. */
7046
7047 static int
7048 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7049 {
7050 tree type = TREE_TYPE (expr);
7051 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7052 int byte, offset, word, words, bitpos;
7053 unsigned char value;
7054
7055 /* There are always 32 bits in each long, no matter the size of
7056 the hosts long. We handle floating point representations with
7057 up to 192 bits. */
7058 long tmp[6];
7059
7060 if ((off == -1 && total_bytes > len)
7061 || off >= total_bytes)
7062 return 0;
7063 if (off == -1)
7064 off = 0;
7065 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7066
7067 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7068
7069 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7070 bitpos += BITS_PER_UNIT)
7071 {
7072 byte = (bitpos / BITS_PER_UNIT) & 3;
7073 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7074
7075 if (UNITS_PER_WORD < 4)
7076 {
7077 word = byte / UNITS_PER_WORD;
7078 if (WORDS_BIG_ENDIAN)
7079 word = (words - 1) - word;
7080 offset = word * UNITS_PER_WORD;
7081 if (BYTES_BIG_ENDIAN)
7082 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7083 else
7084 offset += byte % UNITS_PER_WORD;
7085 }
7086 else
7087 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7088 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7089 if (offset >= off
7090 && offset - off < len)
7091 ptr[offset - off] = value;
7092 }
7093 return MIN (len, total_bytes - off);
7094 }
7095
7096 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7099 upon failure. */
7100
7101 static int
7102 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7103 {
7104 int rsize, isize;
7105 tree part;
7106
7107 part = TREE_REALPART (expr);
7108 rsize = native_encode_expr (part, ptr, len, off);
7109 if (off == -1
7110 && rsize == 0)
7111 return 0;
7112 part = TREE_IMAGPART (expr);
7113 if (off != -1)
7114 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7115 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7116 if (off == -1
7117 && isize != rsize)
7118 return 0;
7119 return rsize + isize;
7120 }
7121
7122
7123 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7124 specified by EXPR into the buffer PTR of length LEN bytes.
7125 Return the number of bytes placed in the buffer, or zero
7126 upon failure. */
7127
7128 static int
7129 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7130 {
7131 unsigned i, count;
7132 int size, offset;
7133 tree itype, elem;
7134
7135 offset = 0;
7136 count = VECTOR_CST_NELTS (expr);
7137 itype = TREE_TYPE (TREE_TYPE (expr));
7138 size = GET_MODE_SIZE (TYPE_MODE (itype));
7139 for (i = 0; i < count; i++)
7140 {
7141 if (off >= size)
7142 {
7143 off -= size;
7144 continue;
7145 }
7146 elem = VECTOR_CST_ELT (expr, i);
7147 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7148 if ((off == -1 && res != size)
7149 || res == 0)
7150 return 0;
7151 offset += res;
7152 if (offset >= len)
7153 return offset;
7154 if (off != -1)
7155 off = 0;
7156 }
7157 return offset;
7158 }
7159
7160
7161 /* Subroutine of native_encode_expr. Encode the STRING_CST
7162 specified by EXPR into the buffer PTR of length LEN bytes.
7163 Return the number of bytes placed in the buffer, or zero
7164 upon failure. */
7165
7166 static int
7167 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7168 {
7169 tree type = TREE_TYPE (expr);
7170 HOST_WIDE_INT total_bytes;
7171
7172 if (TREE_CODE (type) != ARRAY_TYPE
7173 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7174 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7175 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7176 return 0;
7177 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7178 if ((off == -1 && total_bytes > len)
7179 || off >= total_bytes)
7180 return 0;
7181 if (off == -1)
7182 off = 0;
7183 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7184 {
7185 int written = 0;
7186 if (off < TREE_STRING_LENGTH (expr))
7187 {
7188 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7189 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7190 }
7191 memset (ptr + written, 0,
7192 MIN (total_bytes - written, len - written));
7193 }
7194 else
7195 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7196 return MIN (total_bytes - off, len);
7197 }
7198
7199
7200 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7201 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7202 buffer PTR of length LEN bytes. If OFF is not -1 then start
7203 the encoding at byte offset OFF and encode at most LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero upon failure. */
7205
7206 int
7207 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7208 {
7209 switch (TREE_CODE (expr))
7210 {
7211 case INTEGER_CST:
7212 return native_encode_int (expr, ptr, len, off);
7213
7214 case REAL_CST:
7215 return native_encode_real (expr, ptr, len, off);
7216
7217 case FIXED_CST:
7218 return native_encode_fixed (expr, ptr, len, off);
7219
7220 case COMPLEX_CST:
7221 return native_encode_complex (expr, ptr, len, off);
7222
7223 case VECTOR_CST:
7224 return native_encode_vector (expr, ptr, len, off);
7225
7226 case STRING_CST:
7227 return native_encode_string (expr, ptr, len, off);
7228
7229 default:
7230 return 0;
7231 }
7232 }
7233
7234
7235 /* Subroutine of native_interpret_expr. Interpret the contents of
7236 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7237 If the buffer cannot be interpreted, return NULL_TREE. */
7238
7239 static tree
7240 native_interpret_int (tree type, const unsigned char *ptr, int len)
7241 {
7242 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7243
7244 if (total_bytes > len
7245 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7246 return NULL_TREE;
7247
7248 wide_int result = wi::from_buffer (ptr, total_bytes);
7249
7250 return wide_int_to_tree (type, result);
7251 }
7252
7253
7254 /* Subroutine of native_interpret_expr. Interpret the contents of
7255 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7256 If the buffer cannot be interpreted, return NULL_TREE. */
7257
7258 static tree
7259 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7260 {
7261 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7262 double_int result;
7263 FIXED_VALUE_TYPE fixed_value;
7264
7265 if (total_bytes > len
7266 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7267 return NULL_TREE;
7268
7269 result = double_int::from_buffer (ptr, total_bytes);
7270 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7271
7272 return build_fixed (type, fixed_value);
7273 }
7274
7275
7276 /* Subroutine of native_interpret_expr. Interpret the contents of
7277 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7278 If the buffer cannot be interpreted, return NULL_TREE. */
7279
7280 static tree
7281 native_interpret_real (tree type, const unsigned char *ptr, int len)
7282 {
7283 machine_mode mode = TYPE_MODE (type);
7284 int total_bytes = GET_MODE_SIZE (mode);
7285 int byte, offset, word, words, bitpos;
7286 unsigned char value;
7287 /* There are always 32 bits in each long, no matter the size of
7288 the hosts long. We handle floating point representations with
7289 up to 192 bits. */
7290 REAL_VALUE_TYPE r;
7291 long tmp[6];
7292
7293 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7294 if (total_bytes > len || total_bytes > 24)
7295 return NULL_TREE;
7296 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7297
7298 memset (tmp, 0, sizeof (tmp));
7299 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7300 bitpos += BITS_PER_UNIT)
7301 {
7302 byte = (bitpos / BITS_PER_UNIT) & 3;
7303 if (UNITS_PER_WORD < 4)
7304 {
7305 word = byte / UNITS_PER_WORD;
7306 if (WORDS_BIG_ENDIAN)
7307 word = (words - 1) - word;
7308 offset = word * UNITS_PER_WORD;
7309 if (BYTES_BIG_ENDIAN)
7310 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7311 else
7312 offset += byte % UNITS_PER_WORD;
7313 }
7314 else
7315 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7316 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7317
7318 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7319 }
7320
7321 real_from_target (&r, tmp, mode);
7322 return build_real (type, r);
7323 }
7324
7325
7326 /* Subroutine of native_interpret_expr. Interpret the contents of
7327 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7328 If the buffer cannot be interpreted, return NULL_TREE. */
7329
7330 static tree
7331 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7332 {
7333 tree etype, rpart, ipart;
7334 int size;
7335
7336 etype = TREE_TYPE (type);
7337 size = GET_MODE_SIZE (TYPE_MODE (etype));
7338 if (size * 2 > len)
7339 return NULL_TREE;
7340 rpart = native_interpret_expr (etype, ptr, size);
7341 if (!rpart)
7342 return NULL_TREE;
7343 ipart = native_interpret_expr (etype, ptr+size, size);
7344 if (!ipart)
7345 return NULL_TREE;
7346 return build_complex (type, rpart, ipart);
7347 }
7348
7349
7350 /* Subroutine of native_interpret_expr. Interpret the contents of
7351 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7352 If the buffer cannot be interpreted, return NULL_TREE. */
7353
7354 static tree
7355 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7356 {
7357 tree etype, elem;
7358 int i, size, count;
7359 tree *elements;
7360
7361 etype = TREE_TYPE (type);
7362 size = GET_MODE_SIZE (TYPE_MODE (etype));
7363 count = TYPE_VECTOR_SUBPARTS (type);
7364 if (size * count > len)
7365 return NULL_TREE;
7366
7367 elements = XALLOCAVEC (tree, count);
7368 for (i = count - 1; i >= 0; i--)
7369 {
7370 elem = native_interpret_expr (etype, ptr+(i*size), size);
7371 if (!elem)
7372 return NULL_TREE;
7373 elements[i] = elem;
7374 }
7375 return build_vector (type, elements);
7376 }
7377
7378
7379 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7380 the buffer PTR of length LEN as a constant of type TYPE. For
7381 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7382 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7383 return NULL_TREE. */
7384
7385 tree
7386 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7387 {
7388 switch (TREE_CODE (type))
7389 {
7390 case INTEGER_TYPE:
7391 case ENUMERAL_TYPE:
7392 case BOOLEAN_TYPE:
7393 case POINTER_TYPE:
7394 case REFERENCE_TYPE:
7395 return native_interpret_int (type, ptr, len);
7396
7397 case REAL_TYPE:
7398 return native_interpret_real (type, ptr, len);
7399
7400 case FIXED_POINT_TYPE:
7401 return native_interpret_fixed (type, ptr, len);
7402
7403 case COMPLEX_TYPE:
7404 return native_interpret_complex (type, ptr, len);
7405
7406 case VECTOR_TYPE:
7407 return native_interpret_vector (type, ptr, len);
7408
7409 default:
7410 return NULL_TREE;
7411 }
7412 }
7413
7414 /* Returns true if we can interpret the contents of a native encoding
7415 as TYPE. */
7416
7417 static bool
7418 can_native_interpret_type_p (tree type)
7419 {
7420 switch (TREE_CODE (type))
7421 {
7422 case INTEGER_TYPE:
7423 case ENUMERAL_TYPE:
7424 case BOOLEAN_TYPE:
7425 case POINTER_TYPE:
7426 case REFERENCE_TYPE:
7427 case FIXED_POINT_TYPE:
7428 case REAL_TYPE:
7429 case COMPLEX_TYPE:
7430 case VECTOR_TYPE:
7431 return true;
7432 default:
7433 return false;
7434 }
7435 }
7436
7437 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7438 TYPE at compile-time. If we're unable to perform the conversion
7439 return NULL_TREE. */
7440
7441 static tree
7442 fold_view_convert_expr (tree type, tree expr)
7443 {
7444 /* We support up to 512-bit values (for V8DFmode). */
7445 unsigned char buffer[64];
7446 int len;
7447
7448 /* Check that the host and target are sane. */
7449 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7450 return NULL_TREE;
7451
7452 len = native_encode_expr (expr, buffer, sizeof (buffer));
7453 if (len == 0)
7454 return NULL_TREE;
7455
7456 return native_interpret_expr (type, buffer, len);
7457 }
7458
7459 /* Build an expression for the address of T. Folds away INDIRECT_REF
7460 to avoid confusing the gimplify process. */
7461
7462 tree
7463 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7464 {
7465 /* The size of the object is not relevant when talking about its address. */
7466 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7467 t = TREE_OPERAND (t, 0);
7468
7469 if (TREE_CODE (t) == INDIRECT_REF)
7470 {
7471 t = TREE_OPERAND (t, 0);
7472
7473 if (TREE_TYPE (t) != ptrtype)
7474 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7475 }
7476 else if (TREE_CODE (t) == MEM_REF
7477 && integer_zerop (TREE_OPERAND (t, 1)))
7478 return TREE_OPERAND (t, 0);
7479 else if (TREE_CODE (t) == MEM_REF
7480 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7481 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7482 TREE_OPERAND (t, 0),
7483 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7484 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7485 {
7486 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7487
7488 if (TREE_TYPE (t) != ptrtype)
7489 t = fold_convert_loc (loc, ptrtype, t);
7490 }
7491 else
7492 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7493
7494 return t;
7495 }
7496
7497 /* Build an expression for the address of T. */
7498
7499 tree
7500 build_fold_addr_expr_loc (location_t loc, tree t)
7501 {
7502 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7503
7504 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7505 }
7506
7507 static bool vec_cst_ctor_to_array (tree, tree *);
7508
7509 /* Fold a unary expression of code CODE and type TYPE with operand
7510 OP0. Return the folded expression if folding is successful.
7511 Otherwise, return NULL_TREE. */
7512
7513 tree
7514 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7515 {
7516 tree tem;
7517 tree arg0;
7518 enum tree_code_class kind = TREE_CODE_CLASS (code);
7519
7520 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7521 && TREE_CODE_LENGTH (code) == 1);
7522
7523 tem = generic_simplify (loc, code, type, op0);
7524 if (tem)
7525 return tem;
7526
7527 arg0 = op0;
7528 if (arg0)
7529 {
7530 if (CONVERT_EXPR_CODE_P (code)
7531 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7532 {
7533 /* Don't use STRIP_NOPS, because signedness of argument type
7534 matters. */
7535 STRIP_SIGN_NOPS (arg0);
7536 }
7537 else
7538 {
7539 /* Strip any conversions that don't change the mode. This
7540 is safe for every expression, except for a comparison
7541 expression because its signedness is derived from its
7542 operands.
7543
7544 Note that this is done as an internal manipulation within
7545 the constant folder, in order to find the simplest
7546 representation of the arguments so that their form can be
7547 studied. In any cases, the appropriate type conversions
7548 should be put back in the tree that will get out of the
7549 constant folder. */
7550 STRIP_NOPS (arg0);
7551 }
7552 }
7553
7554 if (TREE_CODE_CLASS (code) == tcc_unary)
7555 {
7556 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7557 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7558 fold_build1_loc (loc, code, type,
7559 fold_convert_loc (loc, TREE_TYPE (op0),
7560 TREE_OPERAND (arg0, 1))));
7561 else if (TREE_CODE (arg0) == COND_EXPR)
7562 {
7563 tree arg01 = TREE_OPERAND (arg0, 1);
7564 tree arg02 = TREE_OPERAND (arg0, 2);
7565 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7566 arg01 = fold_build1_loc (loc, code, type,
7567 fold_convert_loc (loc,
7568 TREE_TYPE (op0), arg01));
7569 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7570 arg02 = fold_build1_loc (loc, code, type,
7571 fold_convert_loc (loc,
7572 TREE_TYPE (op0), arg02));
7573 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7574 arg01, arg02);
7575
7576 /* If this was a conversion, and all we did was to move into
7577 inside the COND_EXPR, bring it back out. But leave it if
7578 it is a conversion from integer to integer and the
7579 result precision is no wider than a word since such a
7580 conversion is cheap and may be optimized away by combine,
7581 while it couldn't if it were outside the COND_EXPR. Then return
7582 so we don't get into an infinite recursion loop taking the
7583 conversion out and then back in. */
7584
7585 if ((CONVERT_EXPR_CODE_P (code)
7586 || code == NON_LVALUE_EXPR)
7587 && TREE_CODE (tem) == COND_EXPR
7588 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7589 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7590 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7591 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7592 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7593 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7594 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7595 && (INTEGRAL_TYPE_P
7596 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7597 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7598 || flag_syntax_only))
7599 tem = build1_loc (loc, code, type,
7600 build3 (COND_EXPR,
7601 TREE_TYPE (TREE_OPERAND
7602 (TREE_OPERAND (tem, 1), 0)),
7603 TREE_OPERAND (tem, 0),
7604 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7605 TREE_OPERAND (TREE_OPERAND (tem, 2),
7606 0)));
7607 return tem;
7608 }
7609 }
7610
7611 switch (code)
7612 {
7613 case NON_LVALUE_EXPR:
7614 if (!maybe_lvalue_p (op0))
7615 return fold_convert_loc (loc, type, op0);
7616 return NULL_TREE;
7617
7618 CASE_CONVERT:
7619 case FLOAT_EXPR:
7620 case FIX_TRUNC_EXPR:
7621 if (COMPARISON_CLASS_P (op0))
7622 {
7623 /* If we have (type) (a CMP b) and type is an integral type, return
7624 new expression involving the new type. Canonicalize
7625 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7626 non-integral type.
7627 Do not fold the result as that would not simplify further, also
7628 folding again results in recursions. */
7629 if (TREE_CODE (type) == BOOLEAN_TYPE)
7630 return build2_loc (loc, TREE_CODE (op0), type,
7631 TREE_OPERAND (op0, 0),
7632 TREE_OPERAND (op0, 1));
7633 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7634 && TREE_CODE (type) != VECTOR_TYPE)
7635 return build3_loc (loc, COND_EXPR, type, op0,
7636 constant_boolean_node (true, type),
7637 constant_boolean_node (false, type));
7638 }
7639
7640 /* Handle (T *)&A.B.C for A being of type T and B and C
7641 living at offset zero. This occurs frequently in
7642 C++ upcasting and then accessing the base. */
7643 if (TREE_CODE (op0) == ADDR_EXPR
7644 && POINTER_TYPE_P (type)
7645 && handled_component_p (TREE_OPERAND (op0, 0)))
7646 {
7647 HOST_WIDE_INT bitsize, bitpos;
7648 tree offset;
7649 machine_mode mode;
7650 int unsignedp, volatilep;
7651 tree base = TREE_OPERAND (op0, 0);
7652 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7653 &mode, &unsignedp, &volatilep, false);
7654 /* If the reference was to a (constant) zero offset, we can use
7655 the address of the base if it has the same base type
7656 as the result type and the pointer type is unqualified. */
7657 if (! offset && bitpos == 0
7658 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7659 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7660 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7661 return fold_convert_loc (loc, type,
7662 build_fold_addr_expr_loc (loc, base));
7663 }
7664
7665 if (TREE_CODE (op0) == MODIFY_EXPR
7666 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7667 /* Detect assigning a bitfield. */
7668 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7669 && DECL_BIT_FIELD
7670 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7671 {
7672 /* Don't leave an assignment inside a conversion
7673 unless assigning a bitfield. */
7674 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7675 /* First do the assignment, then return converted constant. */
7676 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7677 TREE_NO_WARNING (tem) = 1;
7678 TREE_USED (tem) = 1;
7679 return tem;
7680 }
7681
7682 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7683 constants (if x has signed type, the sign bit cannot be set
7684 in c). This folds extension into the BIT_AND_EXPR.
7685 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7686 very likely don't have maximal range for their precision and this
7687 transformation effectively doesn't preserve non-maximal ranges. */
7688 if (TREE_CODE (type) == INTEGER_TYPE
7689 && TREE_CODE (op0) == BIT_AND_EXPR
7690 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7691 {
7692 tree and_expr = op0;
7693 tree and0 = TREE_OPERAND (and_expr, 0);
7694 tree and1 = TREE_OPERAND (and_expr, 1);
7695 int change = 0;
7696
7697 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7698 || (TYPE_PRECISION (type)
7699 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7700 change = 1;
7701 else if (TYPE_PRECISION (TREE_TYPE (and1))
7702 <= HOST_BITS_PER_WIDE_INT
7703 && tree_fits_uhwi_p (and1))
7704 {
7705 unsigned HOST_WIDE_INT cst;
7706
7707 cst = tree_to_uhwi (and1);
7708 cst &= HOST_WIDE_INT_M1U
7709 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7710 change = (cst == 0);
7711 #ifdef LOAD_EXTEND_OP
7712 if (change
7713 && !flag_syntax_only
7714 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7715 == ZERO_EXTEND))
7716 {
7717 tree uns = unsigned_type_for (TREE_TYPE (and0));
7718 and0 = fold_convert_loc (loc, uns, and0);
7719 and1 = fold_convert_loc (loc, uns, and1);
7720 }
7721 #endif
7722 }
7723 if (change)
7724 {
7725 tem = force_fit_type (type, wi::to_widest (and1), 0,
7726 TREE_OVERFLOW (and1));
7727 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7728 fold_convert_loc (loc, type, and0), tem);
7729 }
7730 }
7731
7732 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7733 when one of the new casts will fold away. Conservatively we assume
7734 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7735 if (POINTER_TYPE_P (type)
7736 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7737 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7738 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7739 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7740 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7741 {
7742 tree arg00 = TREE_OPERAND (arg0, 0);
7743 tree arg01 = TREE_OPERAND (arg0, 1);
7744
7745 return fold_build_pointer_plus_loc
7746 (loc, fold_convert_loc (loc, type, arg00), arg01);
7747 }
7748
7749 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7750 of the same precision, and X is an integer type not narrower than
7751 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7752 if (INTEGRAL_TYPE_P (type)
7753 && TREE_CODE (op0) == BIT_NOT_EXPR
7754 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7755 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7756 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7757 {
7758 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7759 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7760 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7761 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7762 fold_convert_loc (loc, type, tem));
7763 }
7764
7765 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7766 type of X and Y (integer types only). */
7767 if (INTEGRAL_TYPE_P (type)
7768 && TREE_CODE (op0) == MULT_EXPR
7769 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7770 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7771 {
7772 /* Be careful not to introduce new overflows. */
7773 tree mult_type;
7774 if (TYPE_OVERFLOW_WRAPS (type))
7775 mult_type = type;
7776 else
7777 mult_type = unsigned_type_for (type);
7778
7779 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7780 {
7781 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7782 fold_convert_loc (loc, mult_type,
7783 TREE_OPERAND (op0, 0)),
7784 fold_convert_loc (loc, mult_type,
7785 TREE_OPERAND (op0, 1)));
7786 return fold_convert_loc (loc, type, tem);
7787 }
7788 }
7789
7790 tem = fold_convert_const (code, type, arg0);
7791 return tem ? tem : NULL_TREE;
7792
7793 case ADDR_SPACE_CONVERT_EXPR:
7794 if (integer_zerop (arg0))
7795 return fold_convert_const (code, type, arg0);
7796 return NULL_TREE;
7797
7798 case FIXED_CONVERT_EXPR:
7799 tem = fold_convert_const (code, type, arg0);
7800 return tem ? tem : NULL_TREE;
7801
7802 case VIEW_CONVERT_EXPR:
7803 if (TREE_CODE (op0) == MEM_REF)
7804 return fold_build2_loc (loc, MEM_REF, type,
7805 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7806
7807 return fold_view_convert_expr (type, op0);
7808
7809 case NEGATE_EXPR:
7810 tem = fold_negate_expr (loc, arg0);
7811 if (tem)
7812 return fold_convert_loc (loc, type, tem);
7813 return NULL_TREE;
7814
7815 case ABS_EXPR:
7816 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7817 return fold_abs_const (arg0, type);
7818 /* Convert fabs((double)float) into (double)fabsf(float). */
7819 else if (TREE_CODE (arg0) == NOP_EXPR
7820 && TREE_CODE (type) == REAL_TYPE)
7821 {
7822 tree targ0 = strip_float_extensions (arg0);
7823 if (targ0 != arg0)
7824 return fold_convert_loc (loc, type,
7825 fold_build1_loc (loc, ABS_EXPR,
7826 TREE_TYPE (targ0),
7827 targ0));
7828 }
7829 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7830 else if (TREE_CODE (arg0) == ABS_EXPR)
7831 return arg0;
7832
7833 /* Strip sign ops from argument. */
7834 if (TREE_CODE (type) == REAL_TYPE)
7835 {
7836 tem = fold_strip_sign_ops (arg0);
7837 if (tem)
7838 return fold_build1_loc (loc, ABS_EXPR, type,
7839 fold_convert_loc (loc, type, tem));
7840 }
7841 return NULL_TREE;
7842
7843 case CONJ_EXPR:
7844 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7845 return fold_convert_loc (loc, type, arg0);
7846 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7847 {
7848 tree itype = TREE_TYPE (type);
7849 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7850 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7851 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7852 negate_expr (ipart));
7853 }
7854 if (TREE_CODE (arg0) == COMPLEX_CST)
7855 {
7856 tree itype = TREE_TYPE (type);
7857 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7858 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7859 return build_complex (type, rpart, negate_expr (ipart));
7860 }
7861 if (TREE_CODE (arg0) == CONJ_EXPR)
7862 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7863 return NULL_TREE;
7864
7865 case BIT_NOT_EXPR:
7866 if (TREE_CODE (arg0) == INTEGER_CST)
7867 return fold_not_const (arg0, type);
7868 /* Convert ~ (-A) to A - 1. */
7869 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7870 return fold_build2_loc (loc, MINUS_EXPR, type,
7871 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7872 build_int_cst (type, 1));
7873 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7874 else if (INTEGRAL_TYPE_P (type)
7875 && ((TREE_CODE (arg0) == MINUS_EXPR
7876 && integer_onep (TREE_OPERAND (arg0, 1)))
7877 || (TREE_CODE (arg0) == PLUS_EXPR
7878 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7879 return fold_build1_loc (loc, NEGATE_EXPR, type,
7880 fold_convert_loc (loc, type,
7881 TREE_OPERAND (arg0, 0)));
7882 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7883 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7884 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7885 fold_convert_loc (loc, type,
7886 TREE_OPERAND (arg0, 0)))))
7887 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7888 fold_convert_loc (loc, type,
7889 TREE_OPERAND (arg0, 1)));
7890 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7891 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7892 fold_convert_loc (loc, type,
7893 TREE_OPERAND (arg0, 1)))))
7894 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7895 fold_convert_loc (loc, type,
7896 TREE_OPERAND (arg0, 0)), tem);
7897 /* Perform BIT_NOT_EXPR on each element individually. */
7898 else if (TREE_CODE (arg0) == VECTOR_CST)
7899 {
7900 tree *elements;
7901 tree elem;
7902 unsigned count = VECTOR_CST_NELTS (arg0), i;
7903
7904 elements = XALLOCAVEC (tree, count);
7905 for (i = 0; i < count; i++)
7906 {
7907 elem = VECTOR_CST_ELT (arg0, i);
7908 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7909 if (elem == NULL_TREE)
7910 break;
7911 elements[i] = elem;
7912 }
7913 if (i == count)
7914 return build_vector (type, elements);
7915 }
7916
7917 return NULL_TREE;
7918
7919 case TRUTH_NOT_EXPR:
7920 /* Note that the operand of this must be an int
7921 and its values must be 0 or 1.
7922 ("true" is a fixed value perhaps depending on the language,
7923 but we don't handle values other than 1 correctly yet.) */
7924 tem = fold_truth_not_expr (loc, arg0);
7925 if (!tem)
7926 return NULL_TREE;
7927 return fold_convert_loc (loc, type, tem);
7928
7929 case REALPART_EXPR:
7930 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7931 return fold_convert_loc (loc, type, arg0);
7932 if (TREE_CODE (arg0) == COMPLEX_CST)
7933 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
7934 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7935 {
7936 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7937 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7938 fold_build1_loc (loc, REALPART_EXPR, itype,
7939 TREE_OPERAND (arg0, 0)),
7940 fold_build1_loc (loc, REALPART_EXPR, itype,
7941 TREE_OPERAND (arg0, 1)));
7942 return fold_convert_loc (loc, type, tem);
7943 }
7944 if (TREE_CODE (arg0) == CONJ_EXPR)
7945 {
7946 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7947 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
7948 TREE_OPERAND (arg0, 0));
7949 return fold_convert_loc (loc, type, tem);
7950 }
7951 if (TREE_CODE (arg0) == CALL_EXPR)
7952 {
7953 tree fn = get_callee_fndecl (arg0);
7954 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7955 switch (DECL_FUNCTION_CODE (fn))
7956 {
7957 CASE_FLT_FN (BUILT_IN_CEXPI):
7958 fn = mathfn_built_in (type, BUILT_IN_COS);
7959 if (fn)
7960 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
7961 break;
7962
7963 default:
7964 break;
7965 }
7966 }
7967 return NULL_TREE;
7968
7969 case IMAGPART_EXPR:
7970 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7971 return build_zero_cst (type);
7972 if (TREE_CODE (arg0) == COMPLEX_CST)
7973 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
7974 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7975 {
7976 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7977 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7978 fold_build1_loc (loc, IMAGPART_EXPR, itype,
7979 TREE_OPERAND (arg0, 0)),
7980 fold_build1_loc (loc, IMAGPART_EXPR, itype,
7981 TREE_OPERAND (arg0, 1)));
7982 return fold_convert_loc (loc, type, tem);
7983 }
7984 if (TREE_CODE (arg0) == CONJ_EXPR)
7985 {
7986 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7987 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7988 return fold_convert_loc (loc, type, negate_expr (tem));
7989 }
7990 if (TREE_CODE (arg0) == CALL_EXPR)
7991 {
7992 tree fn = get_callee_fndecl (arg0);
7993 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7994 switch (DECL_FUNCTION_CODE (fn))
7995 {
7996 CASE_FLT_FN (BUILT_IN_CEXPI):
7997 fn = mathfn_built_in (type, BUILT_IN_SIN);
7998 if (fn)
7999 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8000 break;
8001
8002 default:
8003 break;
8004 }
8005 }
8006 return NULL_TREE;
8007
8008 case INDIRECT_REF:
8009 /* Fold *&X to X if X is an lvalue. */
8010 if (TREE_CODE (op0) == ADDR_EXPR)
8011 {
8012 tree op00 = TREE_OPERAND (op0, 0);
8013 if ((TREE_CODE (op00) == VAR_DECL
8014 || TREE_CODE (op00) == PARM_DECL
8015 || TREE_CODE (op00) == RESULT_DECL)
8016 && !TREE_READONLY (op00))
8017 return op00;
8018 }
8019 return NULL_TREE;
8020
8021 case VEC_UNPACK_LO_EXPR:
8022 case VEC_UNPACK_HI_EXPR:
8023 case VEC_UNPACK_FLOAT_LO_EXPR:
8024 case VEC_UNPACK_FLOAT_HI_EXPR:
8025 {
8026 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8027 tree *elts;
8028 enum tree_code subcode;
8029
8030 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8031 if (TREE_CODE (arg0) != VECTOR_CST)
8032 return NULL_TREE;
8033
8034 elts = XALLOCAVEC (tree, nelts * 2);
8035 if (!vec_cst_ctor_to_array (arg0, elts))
8036 return NULL_TREE;
8037
8038 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8039 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8040 elts += nelts;
8041
8042 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8043 subcode = NOP_EXPR;
8044 else
8045 subcode = FLOAT_EXPR;
8046
8047 for (i = 0; i < nelts; i++)
8048 {
8049 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8050 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8051 return NULL_TREE;
8052 }
8053
8054 return build_vector (type, elts);
8055 }
8056
8057 case REDUC_MIN_EXPR:
8058 case REDUC_MAX_EXPR:
8059 case REDUC_PLUS_EXPR:
8060 {
8061 unsigned int nelts, i;
8062 tree *elts;
8063 enum tree_code subcode;
8064
8065 if (TREE_CODE (op0) != VECTOR_CST)
8066 return NULL_TREE;
8067 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8068
8069 elts = XALLOCAVEC (tree, nelts);
8070 if (!vec_cst_ctor_to_array (op0, elts))
8071 return NULL_TREE;
8072
8073 switch (code)
8074 {
8075 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8076 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8077 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8078 default: gcc_unreachable ();
8079 }
8080
8081 for (i = 1; i < nelts; i++)
8082 {
8083 elts[0] = const_binop (subcode, elts[0], elts[i]);
8084 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8085 return NULL_TREE;
8086 }
8087
8088 return elts[0];
8089 }
8090
8091 default:
8092 return NULL_TREE;
8093 } /* switch (code) */
8094 }
8095
8096
8097 /* If the operation was a conversion do _not_ mark a resulting constant
8098 with TREE_OVERFLOW if the original constant was not. These conversions
8099 have implementation defined behavior and retaining the TREE_OVERFLOW
8100 flag here would confuse later passes such as VRP. */
8101 tree
8102 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8103 tree type, tree op0)
8104 {
8105 tree res = fold_unary_loc (loc, code, type, op0);
8106 if (res
8107 && TREE_CODE (res) == INTEGER_CST
8108 && TREE_CODE (op0) == INTEGER_CST
8109 && CONVERT_EXPR_CODE_P (code))
8110 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8111
8112 return res;
8113 }
8114
8115 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8116 operands OP0 and OP1. LOC is the location of the resulting expression.
8117 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8118 Return the folded expression if folding is successful. Otherwise,
8119 return NULL_TREE. */
8120 static tree
8121 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8122 tree arg0, tree arg1, tree op0, tree op1)
8123 {
8124 tree tem;
8125
8126 /* We only do these simplifications if we are optimizing. */
8127 if (!optimize)
8128 return NULL_TREE;
8129
8130 /* Check for things like (A || B) && (A || C). We can convert this
8131 to A || (B && C). Note that either operator can be any of the four
8132 truth and/or operations and the transformation will still be
8133 valid. Also note that we only care about order for the
8134 ANDIF and ORIF operators. If B contains side effects, this
8135 might change the truth-value of A. */
8136 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8137 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8138 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8139 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8140 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8141 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8142 {
8143 tree a00 = TREE_OPERAND (arg0, 0);
8144 tree a01 = TREE_OPERAND (arg0, 1);
8145 tree a10 = TREE_OPERAND (arg1, 0);
8146 tree a11 = TREE_OPERAND (arg1, 1);
8147 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8148 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8149 && (code == TRUTH_AND_EXPR
8150 || code == TRUTH_OR_EXPR));
8151
8152 if (operand_equal_p (a00, a10, 0))
8153 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8154 fold_build2_loc (loc, code, type, a01, a11));
8155 else if (commutative && operand_equal_p (a00, a11, 0))
8156 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8157 fold_build2_loc (loc, code, type, a01, a10));
8158 else if (commutative && operand_equal_p (a01, a10, 0))
8159 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8160 fold_build2_loc (loc, code, type, a00, a11));
8161
8162 /* This case if tricky because we must either have commutative
8163 operators or else A10 must not have side-effects. */
8164
8165 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8166 && operand_equal_p (a01, a11, 0))
8167 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8168 fold_build2_loc (loc, code, type, a00, a10),
8169 a01);
8170 }
8171
8172 /* See if we can build a range comparison. */
8173 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8174 return tem;
8175
8176 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8177 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8178 {
8179 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8180 if (tem)
8181 return fold_build2_loc (loc, code, type, tem, arg1);
8182 }
8183
8184 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8185 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8186 {
8187 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8188 if (tem)
8189 return fold_build2_loc (loc, code, type, arg0, tem);
8190 }
8191
8192 /* Check for the possibility of merging component references. If our
8193 lhs is another similar operation, try to merge its rhs with our
8194 rhs. Then try to merge our lhs and rhs. */
8195 if (TREE_CODE (arg0) == code
8196 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8197 TREE_OPERAND (arg0, 1), arg1)))
8198 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8199
8200 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8201 return tem;
8202
8203 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8204 && (code == TRUTH_AND_EXPR
8205 || code == TRUTH_ANDIF_EXPR
8206 || code == TRUTH_OR_EXPR
8207 || code == TRUTH_ORIF_EXPR))
8208 {
8209 enum tree_code ncode, icode;
8210
8211 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8212 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8213 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8214
8215 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8216 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8217 We don't want to pack more than two leafs to a non-IF AND/OR
8218 expression.
8219 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8220 equal to IF-CODE, then we don't want to add right-hand operand.
8221 If the inner right-hand side of left-hand operand has
8222 side-effects, or isn't simple, then we can't add to it,
8223 as otherwise we might destroy if-sequence. */
8224 if (TREE_CODE (arg0) == icode
8225 && simple_operand_p_2 (arg1)
8226 /* Needed for sequence points to handle trappings, and
8227 side-effects. */
8228 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8229 {
8230 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8231 arg1);
8232 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8233 tem);
8234 }
8235 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8236 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8237 else if (TREE_CODE (arg1) == icode
8238 && simple_operand_p_2 (arg0)
8239 /* Needed for sequence points to handle trappings, and
8240 side-effects. */
8241 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8242 {
8243 tem = fold_build2_loc (loc, ncode, type,
8244 arg0, TREE_OPERAND (arg1, 0));
8245 return fold_build2_loc (loc, icode, type, tem,
8246 TREE_OPERAND (arg1, 1));
8247 }
8248 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8249 into (A OR B).
8250 For sequence point consistancy, we need to check for trapping,
8251 and side-effects. */
8252 else if (code == icode && simple_operand_p_2 (arg0)
8253 && simple_operand_p_2 (arg1))
8254 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8255 }
8256
8257 return NULL_TREE;
8258 }
8259
8260 /* Fold a binary expression of code CODE and type TYPE with operands
8261 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8262 Return the folded expression if folding is successful. Otherwise,
8263 return NULL_TREE. */
8264
8265 static tree
8266 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8267 {
8268 enum tree_code compl_code;
8269
8270 if (code == MIN_EXPR)
8271 compl_code = MAX_EXPR;
8272 else if (code == MAX_EXPR)
8273 compl_code = MIN_EXPR;
8274 else
8275 gcc_unreachable ();
8276
8277 /* MIN (MAX (a, b), b) == b. */
8278 if (TREE_CODE (op0) == compl_code
8279 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8280 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8281
8282 /* MIN (MAX (b, a), b) == b. */
8283 if (TREE_CODE (op0) == compl_code
8284 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8285 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8286 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8287
8288 /* MIN (a, MAX (a, b)) == a. */
8289 if (TREE_CODE (op1) == compl_code
8290 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8291 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8292 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8293
8294 /* MIN (a, MAX (b, a)) == a. */
8295 if (TREE_CODE (op1) == compl_code
8296 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8297 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8298 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8299
8300 return NULL_TREE;
8301 }
8302
8303 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8304 by changing CODE to reduce the magnitude of constants involved in
8305 ARG0 of the comparison.
8306 Returns a canonicalized comparison tree if a simplification was
8307 possible, otherwise returns NULL_TREE.
8308 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8309 valid if signed overflow is undefined. */
8310
8311 static tree
8312 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8313 tree arg0, tree arg1,
8314 bool *strict_overflow_p)
8315 {
8316 enum tree_code code0 = TREE_CODE (arg0);
8317 tree t, cst0 = NULL_TREE;
8318 int sgn0;
8319 bool swap = false;
8320
8321 /* Match A +- CST code arg1 and CST code arg1. We can change the
8322 first form only if overflow is undefined. */
8323 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8324 /* In principle pointers also have undefined overflow behavior,
8325 but that causes problems elsewhere. */
8326 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8327 && (code0 == MINUS_EXPR
8328 || code0 == PLUS_EXPR)
8329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8330 || code0 == INTEGER_CST))
8331 return NULL_TREE;
8332
8333 /* Identify the constant in arg0 and its sign. */
8334 if (code0 == INTEGER_CST)
8335 cst0 = arg0;
8336 else
8337 cst0 = TREE_OPERAND (arg0, 1);
8338 sgn0 = tree_int_cst_sgn (cst0);
8339
8340 /* Overflowed constants and zero will cause problems. */
8341 if (integer_zerop (cst0)
8342 || TREE_OVERFLOW (cst0))
8343 return NULL_TREE;
8344
8345 /* See if we can reduce the magnitude of the constant in
8346 arg0 by changing the comparison code. */
8347 if (code0 == INTEGER_CST)
8348 {
8349 /* CST <= arg1 -> CST-1 < arg1. */
8350 if (code == LE_EXPR && sgn0 == 1)
8351 code = LT_EXPR;
8352 /* -CST < arg1 -> -CST-1 <= arg1. */
8353 else if (code == LT_EXPR && sgn0 == -1)
8354 code = LE_EXPR;
8355 /* CST > arg1 -> CST-1 >= arg1. */
8356 else if (code == GT_EXPR && sgn0 == 1)
8357 code = GE_EXPR;
8358 /* -CST >= arg1 -> -CST-1 > arg1. */
8359 else if (code == GE_EXPR && sgn0 == -1)
8360 code = GT_EXPR;
8361 else
8362 return NULL_TREE;
8363 /* arg1 code' CST' might be more canonical. */
8364 swap = true;
8365 }
8366 else
8367 {
8368 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8369 if (code == LT_EXPR
8370 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8371 code = LE_EXPR;
8372 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8373 else if (code == GT_EXPR
8374 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8375 code = GE_EXPR;
8376 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8377 else if (code == LE_EXPR
8378 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8379 code = LT_EXPR;
8380 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8381 else if (code == GE_EXPR
8382 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8383 code = GT_EXPR;
8384 else
8385 return NULL_TREE;
8386 *strict_overflow_p = true;
8387 }
8388
8389 /* Now build the constant reduced in magnitude. But not if that
8390 would produce one outside of its types range. */
8391 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8392 && ((sgn0 == 1
8393 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8394 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8395 || (sgn0 == -1
8396 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8397 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8398 /* We cannot swap the comparison here as that would cause us to
8399 endlessly recurse. */
8400 return NULL_TREE;
8401
8402 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8403 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8404 if (code0 != INTEGER_CST)
8405 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8406 t = fold_convert (TREE_TYPE (arg1), t);
8407
8408 /* If swapping might yield to a more canonical form, do so. */
8409 if (swap)
8410 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8411 else
8412 return fold_build2_loc (loc, code, type, t, arg1);
8413 }
8414
8415 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8416 overflow further. Try to decrease the magnitude of constants involved
8417 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8418 and put sole constants at the second argument position.
8419 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8420
8421 static tree
8422 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8423 tree arg0, tree arg1)
8424 {
8425 tree t;
8426 bool strict_overflow_p;
8427 const char * const warnmsg = G_("assuming signed overflow does not occur "
8428 "when reducing constant in comparison");
8429
8430 /* Try canonicalization by simplifying arg0. */
8431 strict_overflow_p = false;
8432 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8433 &strict_overflow_p);
8434 if (t)
8435 {
8436 if (strict_overflow_p)
8437 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8438 return t;
8439 }
8440
8441 /* Try canonicalization by simplifying arg1 using the swapped
8442 comparison. */
8443 code = swap_tree_comparison (code);
8444 strict_overflow_p = false;
8445 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8446 &strict_overflow_p);
8447 if (t && strict_overflow_p)
8448 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8449 return t;
8450 }
8451
8452 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8453 space. This is used to avoid issuing overflow warnings for
8454 expressions like &p->x which can not wrap. */
8455
8456 static bool
8457 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8458 {
8459 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8460 return true;
8461
8462 if (bitpos < 0)
8463 return true;
8464
8465 wide_int wi_offset;
8466 int precision = TYPE_PRECISION (TREE_TYPE (base));
8467 if (offset == NULL_TREE)
8468 wi_offset = wi::zero (precision);
8469 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8470 return true;
8471 else
8472 wi_offset = offset;
8473
8474 bool overflow;
8475 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8476 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8477 if (overflow)
8478 return true;
8479
8480 if (!wi::fits_uhwi_p (total))
8481 return true;
8482
8483 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8484 if (size <= 0)
8485 return true;
8486
8487 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8488 array. */
8489 if (TREE_CODE (base) == ADDR_EXPR)
8490 {
8491 HOST_WIDE_INT base_size;
8492
8493 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8494 if (base_size > 0 && size < base_size)
8495 size = base_size;
8496 }
8497
8498 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8499 }
8500
8501 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8502 kind INTEGER_CST. This makes sure to properly sign-extend the
8503 constant. */
8504
8505 static HOST_WIDE_INT
8506 size_low_cst (const_tree t)
8507 {
8508 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8509 int prec = TYPE_PRECISION (TREE_TYPE (t));
8510 if (prec < HOST_BITS_PER_WIDE_INT)
8511 return sext_hwi (w, prec);
8512 return w;
8513 }
8514
8515 /* Subroutine of fold_binary. This routine performs all of the
8516 transformations that are common to the equality/inequality
8517 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8518 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8519 fold_binary should call fold_binary. Fold a comparison with
8520 tree code CODE and type TYPE with operands OP0 and OP1. Return
8521 the folded comparison or NULL_TREE. */
8522
8523 static tree
8524 fold_comparison (location_t loc, enum tree_code code, tree type,
8525 tree op0, tree op1)
8526 {
8527 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8528 tree arg0, arg1, tem;
8529
8530 arg0 = op0;
8531 arg1 = op1;
8532
8533 STRIP_SIGN_NOPS (arg0);
8534 STRIP_SIGN_NOPS (arg1);
8535
8536 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8537 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8538 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8539 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8540 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8541 && TREE_CODE (arg1) == INTEGER_CST
8542 && !TREE_OVERFLOW (arg1))
8543 {
8544 const enum tree_code
8545 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8546 tree const1 = TREE_OPERAND (arg0, 1);
8547 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8548 tree variable = TREE_OPERAND (arg0, 0);
8549 tree new_const = int_const_binop (reverse_op, const2, const1);
8550
8551 /* If the constant operation overflowed this can be
8552 simplified as a comparison against INT_MAX/INT_MIN. */
8553 if (TREE_OVERFLOW (new_const)
8554 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8555 {
8556 int const1_sgn = tree_int_cst_sgn (const1);
8557 enum tree_code code2 = code;
8558
8559 /* Get the sign of the constant on the lhs if the
8560 operation were VARIABLE + CONST1. */
8561 if (TREE_CODE (arg0) == MINUS_EXPR)
8562 const1_sgn = -const1_sgn;
8563
8564 /* The sign of the constant determines if we overflowed
8565 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8566 Canonicalize to the INT_MIN overflow by swapping the comparison
8567 if necessary. */
8568 if (const1_sgn == -1)
8569 code2 = swap_tree_comparison (code);
8570
8571 /* We now can look at the canonicalized case
8572 VARIABLE + 1 CODE2 INT_MIN
8573 and decide on the result. */
8574 switch (code2)
8575 {
8576 case EQ_EXPR:
8577 case LT_EXPR:
8578 case LE_EXPR:
8579 return
8580 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8581
8582 case NE_EXPR:
8583 case GE_EXPR:
8584 case GT_EXPR:
8585 return
8586 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8587
8588 default:
8589 gcc_unreachable ();
8590 }
8591 }
8592 else
8593 {
8594 if (!equality_code)
8595 fold_overflow_warning ("assuming signed overflow does not occur "
8596 "when changing X +- C1 cmp C2 to "
8597 "X cmp C2 -+ C1",
8598 WARN_STRICT_OVERFLOW_COMPARISON);
8599 return fold_build2_loc (loc, code, type, variable, new_const);
8600 }
8601 }
8602
8603 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8604 if (TREE_CODE (arg0) == MINUS_EXPR
8605 && equality_code
8606 && integer_zerop (arg1))
8607 {
8608 /* ??? The transformation is valid for the other operators if overflow
8609 is undefined for the type, but performing it here badly interacts
8610 with the transformation in fold_cond_expr_with_comparison which
8611 attempts to synthetize ABS_EXPR. */
8612 if (!equality_code)
8613 fold_overflow_warning ("assuming signed overflow does not occur "
8614 "when changing X - Y cmp 0 to X cmp Y",
8615 WARN_STRICT_OVERFLOW_COMPARISON);
8616 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8617 TREE_OPERAND (arg0, 1));
8618 }
8619
8620 /* For comparisons of pointers we can decompose it to a compile time
8621 comparison of the base objects and the offsets into the object.
8622 This requires at least one operand being an ADDR_EXPR or a
8623 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8624 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8625 && (TREE_CODE (arg0) == ADDR_EXPR
8626 || TREE_CODE (arg1) == ADDR_EXPR
8627 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8628 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8629 {
8630 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8631 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8632 machine_mode mode;
8633 int volatilep, unsignedp;
8634 bool indirect_base0 = false, indirect_base1 = false;
8635
8636 /* Get base and offset for the access. Strip ADDR_EXPR for
8637 get_inner_reference, but put it back by stripping INDIRECT_REF
8638 off the base object if possible. indirect_baseN will be true
8639 if baseN is not an address but refers to the object itself. */
8640 base0 = arg0;
8641 if (TREE_CODE (arg0) == ADDR_EXPR)
8642 {
8643 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8644 &bitsize, &bitpos0, &offset0, &mode,
8645 &unsignedp, &volatilep, false);
8646 if (TREE_CODE (base0) == INDIRECT_REF)
8647 base0 = TREE_OPERAND (base0, 0);
8648 else
8649 indirect_base0 = true;
8650 }
8651 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8652 {
8653 base0 = TREE_OPERAND (arg0, 0);
8654 STRIP_SIGN_NOPS (base0);
8655 if (TREE_CODE (base0) == ADDR_EXPR)
8656 {
8657 base0 = TREE_OPERAND (base0, 0);
8658 indirect_base0 = true;
8659 }
8660 offset0 = TREE_OPERAND (arg0, 1);
8661 if (tree_fits_shwi_p (offset0))
8662 {
8663 HOST_WIDE_INT off = size_low_cst (offset0);
8664 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8665 * BITS_PER_UNIT)
8666 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8667 {
8668 bitpos0 = off * BITS_PER_UNIT;
8669 offset0 = NULL_TREE;
8670 }
8671 }
8672 }
8673
8674 base1 = arg1;
8675 if (TREE_CODE (arg1) == ADDR_EXPR)
8676 {
8677 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8678 &bitsize, &bitpos1, &offset1, &mode,
8679 &unsignedp, &volatilep, false);
8680 if (TREE_CODE (base1) == INDIRECT_REF)
8681 base1 = TREE_OPERAND (base1, 0);
8682 else
8683 indirect_base1 = true;
8684 }
8685 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8686 {
8687 base1 = TREE_OPERAND (arg1, 0);
8688 STRIP_SIGN_NOPS (base1);
8689 if (TREE_CODE (base1) == ADDR_EXPR)
8690 {
8691 base1 = TREE_OPERAND (base1, 0);
8692 indirect_base1 = true;
8693 }
8694 offset1 = TREE_OPERAND (arg1, 1);
8695 if (tree_fits_shwi_p (offset1))
8696 {
8697 HOST_WIDE_INT off = size_low_cst (offset1);
8698 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8699 * BITS_PER_UNIT)
8700 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8701 {
8702 bitpos1 = off * BITS_PER_UNIT;
8703 offset1 = NULL_TREE;
8704 }
8705 }
8706 }
8707
8708 /* A local variable can never be pointed to by
8709 the default SSA name of an incoming parameter. */
8710 if ((TREE_CODE (arg0) == ADDR_EXPR
8711 && indirect_base0
8712 && TREE_CODE (base0) == VAR_DECL
8713 && auto_var_in_fn_p (base0, current_function_decl)
8714 && !indirect_base1
8715 && TREE_CODE (base1) == SSA_NAME
8716 && SSA_NAME_IS_DEFAULT_DEF (base1)
8717 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8718 || (TREE_CODE (arg1) == ADDR_EXPR
8719 && indirect_base1
8720 && TREE_CODE (base1) == VAR_DECL
8721 && auto_var_in_fn_p (base1, current_function_decl)
8722 && !indirect_base0
8723 && TREE_CODE (base0) == SSA_NAME
8724 && SSA_NAME_IS_DEFAULT_DEF (base0)
8725 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8726 {
8727 if (code == NE_EXPR)
8728 return constant_boolean_node (1, type);
8729 else if (code == EQ_EXPR)
8730 return constant_boolean_node (0, type);
8731 }
8732 /* If we have equivalent bases we might be able to simplify. */
8733 else if (indirect_base0 == indirect_base1
8734 && operand_equal_p (base0, base1, 0))
8735 {
8736 /* We can fold this expression to a constant if the non-constant
8737 offset parts are equal. */
8738 if ((offset0 == offset1
8739 || (offset0 && offset1
8740 && operand_equal_p (offset0, offset1, 0)))
8741 && (code == EQ_EXPR
8742 || code == NE_EXPR
8743 || (indirect_base0 && DECL_P (base0))
8744 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8745
8746 {
8747 if (!equality_code
8748 && bitpos0 != bitpos1
8749 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8750 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8751 fold_overflow_warning (("assuming pointer wraparound does not "
8752 "occur when comparing P +- C1 with "
8753 "P +- C2"),
8754 WARN_STRICT_OVERFLOW_CONDITIONAL);
8755
8756 switch (code)
8757 {
8758 case EQ_EXPR:
8759 return constant_boolean_node (bitpos0 == bitpos1, type);
8760 case NE_EXPR:
8761 return constant_boolean_node (bitpos0 != bitpos1, type);
8762 case LT_EXPR:
8763 return constant_boolean_node (bitpos0 < bitpos1, type);
8764 case LE_EXPR:
8765 return constant_boolean_node (bitpos0 <= bitpos1, type);
8766 case GE_EXPR:
8767 return constant_boolean_node (bitpos0 >= bitpos1, type);
8768 case GT_EXPR:
8769 return constant_boolean_node (bitpos0 > bitpos1, type);
8770 default:;
8771 }
8772 }
8773 /* We can simplify the comparison to a comparison of the variable
8774 offset parts if the constant offset parts are equal.
8775 Be careful to use signed sizetype here because otherwise we
8776 mess with array offsets in the wrong way. This is possible
8777 because pointer arithmetic is restricted to retain within an
8778 object and overflow on pointer differences is undefined as of
8779 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8780 else if (bitpos0 == bitpos1
8781 && (equality_code
8782 || (indirect_base0 && DECL_P (base0))
8783 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8784 {
8785 /* By converting to signed sizetype we cover middle-end pointer
8786 arithmetic which operates on unsigned pointer types of size
8787 type size and ARRAY_REF offsets which are properly sign or
8788 zero extended from their type in case it is narrower than
8789 sizetype. */
8790 if (offset0 == NULL_TREE)
8791 offset0 = build_int_cst (ssizetype, 0);
8792 else
8793 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8794 if (offset1 == NULL_TREE)
8795 offset1 = build_int_cst (ssizetype, 0);
8796 else
8797 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8798
8799 if (!equality_code
8800 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8801 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8802 fold_overflow_warning (("assuming pointer wraparound does not "
8803 "occur when comparing P +- C1 with "
8804 "P +- C2"),
8805 WARN_STRICT_OVERFLOW_COMPARISON);
8806
8807 return fold_build2_loc (loc, code, type, offset0, offset1);
8808 }
8809 }
8810 /* For non-equal bases we can simplify if they are addresses
8811 of local binding decls or constants. */
8812 else if (indirect_base0 && indirect_base1
8813 /* We know that !operand_equal_p (base0, base1, 0)
8814 because the if condition was false. But make
8815 sure two decls are not the same. */
8816 && base0 != base1
8817 && TREE_CODE (arg0) == ADDR_EXPR
8818 && TREE_CODE (arg1) == ADDR_EXPR
8819 && (((TREE_CODE (base0) == VAR_DECL
8820 || TREE_CODE (base0) == PARM_DECL)
8821 && (targetm.binds_local_p (base0)
8822 || CONSTANT_CLASS_P (base1)))
8823 || CONSTANT_CLASS_P (base0))
8824 && (((TREE_CODE (base1) == VAR_DECL
8825 || TREE_CODE (base1) == PARM_DECL)
8826 && (targetm.binds_local_p (base1)
8827 || CONSTANT_CLASS_P (base0)))
8828 || CONSTANT_CLASS_P (base1)))
8829 {
8830 if (code == EQ_EXPR)
8831 return omit_two_operands_loc (loc, type, boolean_false_node,
8832 arg0, arg1);
8833 else if (code == NE_EXPR)
8834 return omit_two_operands_loc (loc, type, boolean_true_node,
8835 arg0, arg1);
8836 }
8837 /* For equal offsets we can simplify to a comparison of the
8838 base addresses. */
8839 else if (bitpos0 == bitpos1
8840 && (indirect_base0
8841 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8842 && (indirect_base1
8843 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8844 && ((offset0 == offset1)
8845 || (offset0 && offset1
8846 && operand_equal_p (offset0, offset1, 0))))
8847 {
8848 if (indirect_base0)
8849 base0 = build_fold_addr_expr_loc (loc, base0);
8850 if (indirect_base1)
8851 base1 = build_fold_addr_expr_loc (loc, base1);
8852 return fold_build2_loc (loc, code, type, base0, base1);
8853 }
8854 }
8855
8856 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8857 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8858 the resulting offset is smaller in absolute value than the
8859 original one and has the same sign. */
8860 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8861 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8862 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8863 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8864 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8865 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8866 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8867 {
8868 tree const1 = TREE_OPERAND (arg0, 1);
8869 tree const2 = TREE_OPERAND (arg1, 1);
8870 tree variable1 = TREE_OPERAND (arg0, 0);
8871 tree variable2 = TREE_OPERAND (arg1, 0);
8872 tree cst;
8873 const char * const warnmsg = G_("assuming signed overflow does not "
8874 "occur when combining constants around "
8875 "a comparison");
8876
8877 /* Put the constant on the side where it doesn't overflow and is
8878 of lower absolute value and of same sign than before. */
8879 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8880 ? MINUS_EXPR : PLUS_EXPR,
8881 const2, const1);
8882 if (!TREE_OVERFLOW (cst)
8883 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8884 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8885 {
8886 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8887 return fold_build2_loc (loc, code, type,
8888 variable1,
8889 fold_build2_loc (loc, TREE_CODE (arg1),
8890 TREE_TYPE (arg1),
8891 variable2, cst));
8892 }
8893
8894 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8895 ? MINUS_EXPR : PLUS_EXPR,
8896 const1, const2);
8897 if (!TREE_OVERFLOW (cst)
8898 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8899 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8900 {
8901 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8902 return fold_build2_loc (loc, code, type,
8903 fold_build2_loc (loc, TREE_CODE (arg0),
8904 TREE_TYPE (arg0),
8905 variable1, cst),
8906 variable2);
8907 }
8908 }
8909
8910 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8911 signed arithmetic case. That form is created by the compiler
8912 often enough for folding it to be of value. One example is in
8913 computing loop trip counts after Operator Strength Reduction. */
8914 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8915 && TREE_CODE (arg0) == MULT_EXPR
8916 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8917 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8918 && integer_zerop (arg1))
8919 {
8920 tree const1 = TREE_OPERAND (arg0, 1);
8921 tree const2 = arg1; /* zero */
8922 tree variable1 = TREE_OPERAND (arg0, 0);
8923 enum tree_code cmp_code = code;
8924
8925 /* Handle unfolded multiplication by zero. */
8926 if (integer_zerop (const1))
8927 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8928
8929 fold_overflow_warning (("assuming signed overflow does not occur when "
8930 "eliminating multiplication in comparison "
8931 "with zero"),
8932 WARN_STRICT_OVERFLOW_COMPARISON);
8933
8934 /* If const1 is negative we swap the sense of the comparison. */
8935 if (tree_int_cst_sgn (const1) < 0)
8936 cmp_code = swap_tree_comparison (cmp_code);
8937
8938 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8939 }
8940
8941 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8942 if (tem)
8943 return tem;
8944
8945 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8946 {
8947 tree targ0 = strip_float_extensions (arg0);
8948 tree targ1 = strip_float_extensions (arg1);
8949 tree newtype = TREE_TYPE (targ0);
8950
8951 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8952 newtype = TREE_TYPE (targ1);
8953
8954 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8955 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8956 return fold_build2_loc (loc, code, type,
8957 fold_convert_loc (loc, newtype, targ0),
8958 fold_convert_loc (loc, newtype, targ1));
8959
8960 /* (-a) CMP (-b) -> b CMP a */
8961 if (TREE_CODE (arg0) == NEGATE_EXPR
8962 && TREE_CODE (arg1) == NEGATE_EXPR)
8963 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8964 TREE_OPERAND (arg0, 0));
8965
8966 if (TREE_CODE (arg1) == REAL_CST)
8967 {
8968 REAL_VALUE_TYPE cst;
8969 cst = TREE_REAL_CST (arg1);
8970
8971 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8972 if (TREE_CODE (arg0) == NEGATE_EXPR)
8973 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8974 TREE_OPERAND (arg0, 0),
8975 build_real (TREE_TYPE (arg1),
8976 real_value_negate (&cst)));
8977
8978 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8979 /* a CMP (-0) -> a CMP 0 */
8980 if (REAL_VALUE_MINUS_ZERO (cst))
8981 return fold_build2_loc (loc, code, type, arg0,
8982 build_real (TREE_TYPE (arg1), dconst0));
8983
8984 /* x != NaN is always true, other ops are always false. */
8985 if (REAL_VALUE_ISNAN (cst)
8986 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8987 {
8988 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8989 return omit_one_operand_loc (loc, type, tem, arg0);
8990 }
8991
8992 /* Fold comparisons against infinity. */
8993 if (REAL_VALUE_ISINF (cst)
8994 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
8995 {
8996 tem = fold_inf_compare (loc, code, type, arg0, arg1);
8997 if (tem != NULL_TREE)
8998 return tem;
8999 }
9000 }
9001
9002 /* If this is a comparison of a real constant with a PLUS_EXPR
9003 or a MINUS_EXPR of a real constant, we can convert it into a
9004 comparison with a revised real constant as long as no overflow
9005 occurs when unsafe_math_optimizations are enabled. */
9006 if (flag_unsafe_math_optimizations
9007 && TREE_CODE (arg1) == REAL_CST
9008 && (TREE_CODE (arg0) == PLUS_EXPR
9009 || TREE_CODE (arg0) == MINUS_EXPR)
9010 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9011 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9012 ? MINUS_EXPR : PLUS_EXPR,
9013 arg1, TREE_OPERAND (arg0, 1)))
9014 && !TREE_OVERFLOW (tem))
9015 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9016
9017 /* Likewise, we can simplify a comparison of a real constant with
9018 a MINUS_EXPR whose first operand is also a real constant, i.e.
9019 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9020 floating-point types only if -fassociative-math is set. */
9021 if (flag_associative_math
9022 && TREE_CODE (arg1) == REAL_CST
9023 && TREE_CODE (arg0) == MINUS_EXPR
9024 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9025 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9026 arg1))
9027 && !TREE_OVERFLOW (tem))
9028 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9029 TREE_OPERAND (arg0, 1), tem);
9030
9031 /* Fold comparisons against built-in math functions. */
9032 if (TREE_CODE (arg1) == REAL_CST
9033 && flag_unsafe_math_optimizations
9034 && ! flag_errno_math)
9035 {
9036 enum built_in_function fcode = builtin_mathfn_code (arg0);
9037
9038 if (fcode != END_BUILTINS)
9039 {
9040 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9041 if (tem != NULL_TREE)
9042 return tem;
9043 }
9044 }
9045 }
9046
9047 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9048 && CONVERT_EXPR_P (arg0))
9049 {
9050 /* If we are widening one operand of an integer comparison,
9051 see if the other operand is similarly being widened. Perhaps we
9052 can do the comparison in the narrower type. */
9053 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9054 if (tem)
9055 return tem;
9056
9057 /* Or if we are changing signedness. */
9058 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9059 if (tem)
9060 return tem;
9061 }
9062
9063 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9064 constant, we can simplify it. */
9065 if (TREE_CODE (arg1) == INTEGER_CST
9066 && (TREE_CODE (arg0) == MIN_EXPR
9067 || TREE_CODE (arg0) == MAX_EXPR)
9068 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9069 {
9070 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9071 if (tem)
9072 return tem;
9073 }
9074
9075 /* Simplify comparison of something with itself. (For IEEE
9076 floating-point, we can only do some of these simplifications.) */
9077 if (operand_equal_p (arg0, arg1, 0))
9078 {
9079 switch (code)
9080 {
9081 case EQ_EXPR:
9082 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9083 || ! HONOR_NANS (element_mode (arg0)))
9084 return constant_boolean_node (1, type);
9085 break;
9086
9087 case GE_EXPR:
9088 case LE_EXPR:
9089 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9090 || ! HONOR_NANS (element_mode (arg0)))
9091 return constant_boolean_node (1, type);
9092 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9093
9094 case NE_EXPR:
9095 /* For NE, we can only do this simplification if integer
9096 or we don't honor IEEE floating point NaNs. */
9097 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9098 && HONOR_NANS (element_mode (arg0)))
9099 break;
9100 /* ... fall through ... */
9101 case GT_EXPR:
9102 case LT_EXPR:
9103 return constant_boolean_node (0, type);
9104 default:
9105 gcc_unreachable ();
9106 }
9107 }
9108
9109 /* If we are comparing an expression that just has comparisons
9110 of two integer values, arithmetic expressions of those comparisons,
9111 and constants, we can simplify it. There are only three cases
9112 to check: the two values can either be equal, the first can be
9113 greater, or the second can be greater. Fold the expression for
9114 those three values. Since each value must be 0 or 1, we have
9115 eight possibilities, each of which corresponds to the constant 0
9116 or 1 or one of the six possible comparisons.
9117
9118 This handles common cases like (a > b) == 0 but also handles
9119 expressions like ((x > y) - (y > x)) > 0, which supposedly
9120 occur in macroized code. */
9121
9122 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9123 {
9124 tree cval1 = 0, cval2 = 0;
9125 int save_p = 0;
9126
9127 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9128 /* Don't handle degenerate cases here; they should already
9129 have been handled anyway. */
9130 && cval1 != 0 && cval2 != 0
9131 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9132 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9133 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9134 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9135 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9136 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9137 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9138 {
9139 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9140 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9141
9142 /* We can't just pass T to eval_subst in case cval1 or cval2
9143 was the same as ARG1. */
9144
9145 tree high_result
9146 = fold_build2_loc (loc, code, type,
9147 eval_subst (loc, arg0, cval1, maxval,
9148 cval2, minval),
9149 arg1);
9150 tree equal_result
9151 = fold_build2_loc (loc, code, type,
9152 eval_subst (loc, arg0, cval1, maxval,
9153 cval2, maxval),
9154 arg1);
9155 tree low_result
9156 = fold_build2_loc (loc, code, type,
9157 eval_subst (loc, arg0, cval1, minval,
9158 cval2, maxval),
9159 arg1);
9160
9161 /* All three of these results should be 0 or 1. Confirm they are.
9162 Then use those values to select the proper code to use. */
9163
9164 if (TREE_CODE (high_result) == INTEGER_CST
9165 && TREE_CODE (equal_result) == INTEGER_CST
9166 && TREE_CODE (low_result) == INTEGER_CST)
9167 {
9168 /* Make a 3-bit mask with the high-order bit being the
9169 value for `>', the next for '=', and the low for '<'. */
9170 switch ((integer_onep (high_result) * 4)
9171 + (integer_onep (equal_result) * 2)
9172 + integer_onep (low_result))
9173 {
9174 case 0:
9175 /* Always false. */
9176 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9177 case 1:
9178 code = LT_EXPR;
9179 break;
9180 case 2:
9181 code = EQ_EXPR;
9182 break;
9183 case 3:
9184 code = LE_EXPR;
9185 break;
9186 case 4:
9187 code = GT_EXPR;
9188 break;
9189 case 5:
9190 code = NE_EXPR;
9191 break;
9192 case 6:
9193 code = GE_EXPR;
9194 break;
9195 case 7:
9196 /* Always true. */
9197 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9198 }
9199
9200 if (save_p)
9201 {
9202 tem = save_expr (build2 (code, type, cval1, cval2));
9203 SET_EXPR_LOCATION (tem, loc);
9204 return tem;
9205 }
9206 return fold_build2_loc (loc, code, type, cval1, cval2);
9207 }
9208 }
9209 }
9210
9211 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9212 into a single range test. */
9213 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9214 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9215 && TREE_CODE (arg1) == INTEGER_CST
9216 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9217 && !integer_zerop (TREE_OPERAND (arg0, 1))
9218 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9219 && !TREE_OVERFLOW (arg1))
9220 {
9221 tem = fold_div_compare (loc, code, type, arg0, arg1);
9222 if (tem != NULL_TREE)
9223 return tem;
9224 }
9225
9226 /* Fold ~X op ~Y as Y op X. */
9227 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9228 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9229 {
9230 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9231 return fold_build2_loc (loc, code, type,
9232 fold_convert_loc (loc, cmp_type,
9233 TREE_OPERAND (arg1, 0)),
9234 TREE_OPERAND (arg0, 0));
9235 }
9236
9237 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9238 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9239 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9240 {
9241 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9242 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9243 TREE_OPERAND (arg0, 0),
9244 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9245 fold_convert_loc (loc, cmp_type, arg1)));
9246 }
9247
9248 return NULL_TREE;
9249 }
9250
9251
9252 /* Subroutine of fold_binary. Optimize complex multiplications of the
9253 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9254 argument EXPR represents the expression "z" of type TYPE. */
9255
9256 static tree
9257 fold_mult_zconjz (location_t loc, tree type, tree expr)
9258 {
9259 tree itype = TREE_TYPE (type);
9260 tree rpart, ipart, tem;
9261
9262 if (TREE_CODE (expr) == COMPLEX_EXPR)
9263 {
9264 rpart = TREE_OPERAND (expr, 0);
9265 ipart = TREE_OPERAND (expr, 1);
9266 }
9267 else if (TREE_CODE (expr) == COMPLEX_CST)
9268 {
9269 rpart = TREE_REALPART (expr);
9270 ipart = TREE_IMAGPART (expr);
9271 }
9272 else
9273 {
9274 expr = save_expr (expr);
9275 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9276 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9277 }
9278
9279 rpart = save_expr (rpart);
9280 ipart = save_expr (ipart);
9281 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9282 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9283 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9284 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9285 build_zero_cst (itype));
9286 }
9287
9288
9289 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9290 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9291 guarantees that P and N have the same least significant log2(M) bits.
9292 N is not otherwise constrained. In particular, N is not normalized to
9293 0 <= N < M as is common. In general, the precise value of P is unknown.
9294 M is chosen as large as possible such that constant N can be determined.
9295
9296 Returns M and sets *RESIDUE to N.
9297
9298 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9299 account. This is not always possible due to PR 35705.
9300 */
9301
9302 static unsigned HOST_WIDE_INT
9303 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9304 bool allow_func_align)
9305 {
9306 enum tree_code code;
9307
9308 *residue = 0;
9309
9310 code = TREE_CODE (expr);
9311 if (code == ADDR_EXPR)
9312 {
9313 unsigned int bitalign;
9314 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9315 *residue /= BITS_PER_UNIT;
9316 return bitalign / BITS_PER_UNIT;
9317 }
9318 else if (code == POINTER_PLUS_EXPR)
9319 {
9320 tree op0, op1;
9321 unsigned HOST_WIDE_INT modulus;
9322 enum tree_code inner_code;
9323
9324 op0 = TREE_OPERAND (expr, 0);
9325 STRIP_NOPS (op0);
9326 modulus = get_pointer_modulus_and_residue (op0, residue,
9327 allow_func_align);
9328
9329 op1 = TREE_OPERAND (expr, 1);
9330 STRIP_NOPS (op1);
9331 inner_code = TREE_CODE (op1);
9332 if (inner_code == INTEGER_CST)
9333 {
9334 *residue += TREE_INT_CST_LOW (op1);
9335 return modulus;
9336 }
9337 else if (inner_code == MULT_EXPR)
9338 {
9339 op1 = TREE_OPERAND (op1, 1);
9340 if (TREE_CODE (op1) == INTEGER_CST)
9341 {
9342 unsigned HOST_WIDE_INT align;
9343
9344 /* Compute the greatest power-of-2 divisor of op1. */
9345 align = TREE_INT_CST_LOW (op1);
9346 align &= -align;
9347
9348 /* If align is non-zero and less than *modulus, replace
9349 *modulus with align., If align is 0, then either op1 is 0
9350 or the greatest power-of-2 divisor of op1 doesn't fit in an
9351 unsigned HOST_WIDE_INT. In either case, no additional
9352 constraint is imposed. */
9353 if (align)
9354 modulus = MIN (modulus, align);
9355
9356 return modulus;
9357 }
9358 }
9359 }
9360
9361 /* If we get here, we were unable to determine anything useful about the
9362 expression. */
9363 return 1;
9364 }
9365
9366 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9367 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9368
9369 static bool
9370 vec_cst_ctor_to_array (tree arg, tree *elts)
9371 {
9372 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9373
9374 if (TREE_CODE (arg) == VECTOR_CST)
9375 {
9376 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9377 elts[i] = VECTOR_CST_ELT (arg, i);
9378 }
9379 else if (TREE_CODE (arg) == CONSTRUCTOR)
9380 {
9381 constructor_elt *elt;
9382
9383 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9384 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9385 return false;
9386 else
9387 elts[i] = elt->value;
9388 }
9389 else
9390 return false;
9391 for (; i < nelts; i++)
9392 elts[i]
9393 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9394 return true;
9395 }
9396
9397 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9398 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9399 NULL_TREE otherwise. */
9400
9401 static tree
9402 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9403 {
9404 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9405 tree *elts;
9406 bool need_ctor = false;
9407
9408 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9409 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9410 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9411 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9412 return NULL_TREE;
9413
9414 elts = XALLOCAVEC (tree, nelts * 3);
9415 if (!vec_cst_ctor_to_array (arg0, elts)
9416 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9417 return NULL_TREE;
9418
9419 for (i = 0; i < nelts; i++)
9420 {
9421 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9422 need_ctor = true;
9423 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9424 }
9425
9426 if (need_ctor)
9427 {
9428 vec<constructor_elt, va_gc> *v;
9429 vec_alloc (v, nelts);
9430 for (i = 0; i < nelts; i++)
9431 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9432 return build_constructor (type, v);
9433 }
9434 else
9435 return build_vector (type, &elts[2 * nelts]);
9436 }
9437
9438 /* Try to fold a pointer difference of type TYPE two address expressions of
9439 array references AREF0 and AREF1 using location LOC. Return a
9440 simplified expression for the difference or NULL_TREE. */
9441
9442 static tree
9443 fold_addr_of_array_ref_difference (location_t loc, tree type,
9444 tree aref0, tree aref1)
9445 {
9446 tree base0 = TREE_OPERAND (aref0, 0);
9447 tree base1 = TREE_OPERAND (aref1, 0);
9448 tree base_offset = build_int_cst (type, 0);
9449
9450 /* If the bases are array references as well, recurse. If the bases
9451 are pointer indirections compute the difference of the pointers.
9452 If the bases are equal, we are set. */
9453 if ((TREE_CODE (base0) == ARRAY_REF
9454 && TREE_CODE (base1) == ARRAY_REF
9455 && (base_offset
9456 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9457 || (INDIRECT_REF_P (base0)
9458 && INDIRECT_REF_P (base1)
9459 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9460 TREE_OPERAND (base0, 0),
9461 TREE_OPERAND (base1, 0))))
9462 || operand_equal_p (base0, base1, 0))
9463 {
9464 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9465 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9466 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9467 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9468 return fold_build2_loc (loc, PLUS_EXPR, type,
9469 base_offset,
9470 fold_build2_loc (loc, MULT_EXPR, type,
9471 diff, esz));
9472 }
9473 return NULL_TREE;
9474 }
9475
9476 /* If the real or vector real constant CST of type TYPE has an exact
9477 inverse, return it, else return NULL. */
9478
9479 tree
9480 exact_inverse (tree type, tree cst)
9481 {
9482 REAL_VALUE_TYPE r;
9483 tree unit_type, *elts;
9484 machine_mode mode;
9485 unsigned vec_nelts, i;
9486
9487 switch (TREE_CODE (cst))
9488 {
9489 case REAL_CST:
9490 r = TREE_REAL_CST (cst);
9491
9492 if (exact_real_inverse (TYPE_MODE (type), &r))
9493 return build_real (type, r);
9494
9495 return NULL_TREE;
9496
9497 case VECTOR_CST:
9498 vec_nelts = VECTOR_CST_NELTS (cst);
9499 elts = XALLOCAVEC (tree, vec_nelts);
9500 unit_type = TREE_TYPE (type);
9501 mode = TYPE_MODE (unit_type);
9502
9503 for (i = 0; i < vec_nelts; i++)
9504 {
9505 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9506 if (!exact_real_inverse (mode, &r))
9507 return NULL_TREE;
9508 elts[i] = build_real (unit_type, r);
9509 }
9510
9511 return build_vector (type, elts);
9512
9513 default:
9514 return NULL_TREE;
9515 }
9516 }
9517
9518 /* Mask out the tz least significant bits of X of type TYPE where
9519 tz is the number of trailing zeroes in Y. */
9520 static wide_int
9521 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9522 {
9523 int tz = wi::ctz (y);
9524 if (tz > 0)
9525 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9526 return x;
9527 }
9528
9529 /* Return true when T is an address and is known to be nonzero.
9530 For floating point we further ensure that T is not denormal.
9531 Similar logic is present in nonzero_address in rtlanal.h.
9532
9533 If the return value is based on the assumption that signed overflow
9534 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9535 change *STRICT_OVERFLOW_P. */
9536
9537 static bool
9538 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9539 {
9540 tree type = TREE_TYPE (t);
9541 enum tree_code code;
9542
9543 /* Doing something useful for floating point would need more work. */
9544 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9545 return false;
9546
9547 code = TREE_CODE (t);
9548 switch (TREE_CODE_CLASS (code))
9549 {
9550 case tcc_unary:
9551 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9552 strict_overflow_p);
9553 case tcc_binary:
9554 case tcc_comparison:
9555 return tree_binary_nonzero_warnv_p (code, type,
9556 TREE_OPERAND (t, 0),
9557 TREE_OPERAND (t, 1),
9558 strict_overflow_p);
9559 case tcc_constant:
9560 case tcc_declaration:
9561 case tcc_reference:
9562 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9563
9564 default:
9565 break;
9566 }
9567
9568 switch (code)
9569 {
9570 case TRUTH_NOT_EXPR:
9571 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9572 strict_overflow_p);
9573
9574 case TRUTH_AND_EXPR:
9575 case TRUTH_OR_EXPR:
9576 case TRUTH_XOR_EXPR:
9577 return tree_binary_nonzero_warnv_p (code, type,
9578 TREE_OPERAND (t, 0),
9579 TREE_OPERAND (t, 1),
9580 strict_overflow_p);
9581
9582 case COND_EXPR:
9583 case CONSTRUCTOR:
9584 case OBJ_TYPE_REF:
9585 case ASSERT_EXPR:
9586 case ADDR_EXPR:
9587 case WITH_SIZE_EXPR:
9588 case SSA_NAME:
9589 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9590
9591 case COMPOUND_EXPR:
9592 case MODIFY_EXPR:
9593 case BIND_EXPR:
9594 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9595 strict_overflow_p);
9596
9597 case SAVE_EXPR:
9598 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9599 strict_overflow_p);
9600
9601 case CALL_EXPR:
9602 {
9603 tree fndecl = get_callee_fndecl (t);
9604 if (!fndecl) return false;
9605 if (flag_delete_null_pointer_checks && !flag_check_new
9606 && DECL_IS_OPERATOR_NEW (fndecl)
9607 && !TREE_NOTHROW (fndecl))
9608 return true;
9609 if (flag_delete_null_pointer_checks
9610 && lookup_attribute ("returns_nonnull",
9611 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9612 return true;
9613 return alloca_call_p (t);
9614 }
9615
9616 default:
9617 break;
9618 }
9619 return false;
9620 }
9621
9622 /* Return true when T is an address and is known to be nonzero.
9623 Handle warnings about undefined signed overflow. */
9624
9625 static bool
9626 tree_expr_nonzero_p (tree t)
9627 {
9628 bool ret, strict_overflow_p;
9629
9630 strict_overflow_p = false;
9631 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9632 if (strict_overflow_p)
9633 fold_overflow_warning (("assuming signed overflow does not occur when "
9634 "determining that expression is always "
9635 "non-zero"),
9636 WARN_STRICT_OVERFLOW_MISC);
9637 return ret;
9638 }
9639
9640 /* Fold a binary expression of code CODE and type TYPE with operands
9641 OP0 and OP1. LOC is the location of the resulting expression.
9642 Return the folded expression if folding is successful. Otherwise,
9643 return NULL_TREE. */
9644
9645 tree
9646 fold_binary_loc (location_t loc,
9647 enum tree_code code, tree type, tree op0, tree op1)
9648 {
9649 enum tree_code_class kind = TREE_CODE_CLASS (code);
9650 tree arg0, arg1, tem;
9651 tree t1 = NULL_TREE;
9652 bool strict_overflow_p;
9653 unsigned int prec;
9654
9655 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9656 && TREE_CODE_LENGTH (code) == 2
9657 && op0 != NULL_TREE
9658 && op1 != NULL_TREE);
9659
9660 arg0 = op0;
9661 arg1 = op1;
9662
9663 /* Strip any conversions that don't change the mode. This is
9664 safe for every expression, except for a comparison expression
9665 because its signedness is derived from its operands. So, in
9666 the latter case, only strip conversions that don't change the
9667 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9668 preserved.
9669
9670 Note that this is done as an internal manipulation within the
9671 constant folder, in order to find the simplest representation
9672 of the arguments so that their form can be studied. In any
9673 cases, the appropriate type conversions should be put back in
9674 the tree that will get out of the constant folder. */
9675
9676 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9677 {
9678 STRIP_SIGN_NOPS (arg0);
9679 STRIP_SIGN_NOPS (arg1);
9680 }
9681 else
9682 {
9683 STRIP_NOPS (arg0);
9684 STRIP_NOPS (arg1);
9685 }
9686
9687 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9688 constant but we can't do arithmetic on them. */
9689 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9690 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9691 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9692 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9693 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9694 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9695 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9696 {
9697 if (kind == tcc_binary)
9698 {
9699 /* Make sure type and arg0 have the same saturating flag. */
9700 gcc_assert (TYPE_SATURATING (type)
9701 == TYPE_SATURATING (TREE_TYPE (arg0)));
9702 tem = const_binop (code, arg0, arg1);
9703 }
9704 else if (kind == tcc_comparison)
9705 tem = fold_relational_const (code, type, arg0, arg1);
9706 else
9707 tem = NULL_TREE;
9708
9709 if (tem != NULL_TREE)
9710 {
9711 if (TREE_TYPE (tem) != type)
9712 tem = fold_convert_loc (loc, type, tem);
9713 return tem;
9714 }
9715 }
9716
9717 /* If this is a commutative operation, and ARG0 is a constant, move it
9718 to ARG1 to reduce the number of tests below. */
9719 if (commutative_tree_code (code)
9720 && tree_swap_operands_p (arg0, arg1, true))
9721 return fold_build2_loc (loc, code, type, op1, op0);
9722
9723 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9724 to ARG1 to reduce the number of tests below. */
9725 if (kind == tcc_comparison
9726 && tree_swap_operands_p (arg0, arg1, true))
9727 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9728
9729 tem = generic_simplify (loc, code, type, op0, op1);
9730 if (tem)
9731 return tem;
9732
9733 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9734
9735 First check for cases where an arithmetic operation is applied to a
9736 compound, conditional, or comparison operation. Push the arithmetic
9737 operation inside the compound or conditional to see if any folding
9738 can then be done. Convert comparison to conditional for this purpose.
9739 The also optimizes non-constant cases that used to be done in
9740 expand_expr.
9741
9742 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9743 one of the operands is a comparison and the other is a comparison, a
9744 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9745 code below would make the expression more complex. Change it to a
9746 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9747 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9748
9749 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9750 || code == EQ_EXPR || code == NE_EXPR)
9751 && TREE_CODE (type) != VECTOR_TYPE
9752 && ((truth_value_p (TREE_CODE (arg0))
9753 && (truth_value_p (TREE_CODE (arg1))
9754 || (TREE_CODE (arg1) == BIT_AND_EXPR
9755 && integer_onep (TREE_OPERAND (arg1, 1)))))
9756 || (truth_value_p (TREE_CODE (arg1))
9757 && (truth_value_p (TREE_CODE (arg0))
9758 || (TREE_CODE (arg0) == BIT_AND_EXPR
9759 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9760 {
9761 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9762 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9763 : TRUTH_XOR_EXPR,
9764 boolean_type_node,
9765 fold_convert_loc (loc, boolean_type_node, arg0),
9766 fold_convert_loc (loc, boolean_type_node, arg1));
9767
9768 if (code == EQ_EXPR)
9769 tem = invert_truthvalue_loc (loc, tem);
9770
9771 return fold_convert_loc (loc, type, tem);
9772 }
9773
9774 if (TREE_CODE_CLASS (code) == tcc_binary
9775 || TREE_CODE_CLASS (code) == tcc_comparison)
9776 {
9777 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9778 {
9779 tem = fold_build2_loc (loc, code, type,
9780 fold_convert_loc (loc, TREE_TYPE (op0),
9781 TREE_OPERAND (arg0, 1)), op1);
9782 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9783 tem);
9784 }
9785 if (TREE_CODE (arg1) == COMPOUND_EXPR
9786 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9787 {
9788 tem = fold_build2_loc (loc, code, type, op0,
9789 fold_convert_loc (loc, TREE_TYPE (op1),
9790 TREE_OPERAND (arg1, 1)));
9791 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9792 tem);
9793 }
9794
9795 if (TREE_CODE (arg0) == COND_EXPR
9796 || TREE_CODE (arg0) == VEC_COND_EXPR
9797 || COMPARISON_CLASS_P (arg0))
9798 {
9799 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9800 arg0, arg1,
9801 /*cond_first_p=*/1);
9802 if (tem != NULL_TREE)
9803 return tem;
9804 }
9805
9806 if (TREE_CODE (arg1) == COND_EXPR
9807 || TREE_CODE (arg1) == VEC_COND_EXPR
9808 || COMPARISON_CLASS_P (arg1))
9809 {
9810 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9811 arg1, arg0,
9812 /*cond_first_p=*/0);
9813 if (tem != NULL_TREE)
9814 return tem;
9815 }
9816 }
9817
9818 switch (code)
9819 {
9820 case MEM_REF:
9821 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9822 if (TREE_CODE (arg0) == ADDR_EXPR
9823 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9824 {
9825 tree iref = TREE_OPERAND (arg0, 0);
9826 return fold_build2 (MEM_REF, type,
9827 TREE_OPERAND (iref, 0),
9828 int_const_binop (PLUS_EXPR, arg1,
9829 TREE_OPERAND (iref, 1)));
9830 }
9831
9832 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9833 if (TREE_CODE (arg0) == ADDR_EXPR
9834 && handled_component_p (TREE_OPERAND (arg0, 0)))
9835 {
9836 tree base;
9837 HOST_WIDE_INT coffset;
9838 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9839 &coffset);
9840 if (!base)
9841 return NULL_TREE;
9842 return fold_build2 (MEM_REF, type,
9843 build_fold_addr_expr (base),
9844 int_const_binop (PLUS_EXPR, arg1,
9845 size_int (coffset)));
9846 }
9847
9848 return NULL_TREE;
9849
9850 case POINTER_PLUS_EXPR:
9851 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9852 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9853 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9854 return fold_convert_loc (loc, type,
9855 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9856 fold_convert_loc (loc, sizetype,
9857 arg1),
9858 fold_convert_loc (loc, sizetype,
9859 arg0)));
9860
9861 /* PTR_CST +p CST -> CST1 */
9862 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9863 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9864 fold_convert_loc (loc, type, arg1));
9865
9866 return NULL_TREE;
9867
9868 case PLUS_EXPR:
9869 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9870 {
9871 /* X + (X / CST) * -CST is X % CST. */
9872 if (TREE_CODE (arg1) == MULT_EXPR
9873 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9874 && operand_equal_p (arg0,
9875 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9876 {
9877 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9878 tree cst1 = TREE_OPERAND (arg1, 1);
9879 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9880 cst1, cst0);
9881 if (sum && integer_zerop (sum))
9882 return fold_convert_loc (loc, type,
9883 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9884 TREE_TYPE (arg0), arg0,
9885 cst0));
9886 }
9887 }
9888
9889 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9890 one. Make sure the type is not saturating and has the signedness of
9891 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9892 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9893 if ((TREE_CODE (arg0) == MULT_EXPR
9894 || TREE_CODE (arg1) == MULT_EXPR)
9895 && !TYPE_SATURATING (type)
9896 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9897 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9898 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9899 {
9900 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9901 if (tem)
9902 return tem;
9903 }
9904
9905 if (! FLOAT_TYPE_P (type))
9906 {
9907 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9908 with a constant, and the two constants have no bits in common,
9909 we should treat this as a BIT_IOR_EXPR since this may produce more
9910 simplifications. */
9911 if (TREE_CODE (arg0) == BIT_AND_EXPR
9912 && TREE_CODE (arg1) == BIT_AND_EXPR
9913 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9914 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9915 && wi::bit_and (TREE_OPERAND (arg0, 1),
9916 TREE_OPERAND (arg1, 1)) == 0)
9917 {
9918 code = BIT_IOR_EXPR;
9919 goto bit_ior;
9920 }
9921
9922 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9923 (plus (plus (mult) (mult)) (foo)) so that we can
9924 take advantage of the factoring cases below. */
9925 if (TYPE_OVERFLOW_WRAPS (type)
9926 && (((TREE_CODE (arg0) == PLUS_EXPR
9927 || TREE_CODE (arg0) == MINUS_EXPR)
9928 && TREE_CODE (arg1) == MULT_EXPR)
9929 || ((TREE_CODE (arg1) == PLUS_EXPR
9930 || TREE_CODE (arg1) == MINUS_EXPR)
9931 && TREE_CODE (arg0) == MULT_EXPR)))
9932 {
9933 tree parg0, parg1, parg, marg;
9934 enum tree_code pcode;
9935
9936 if (TREE_CODE (arg1) == MULT_EXPR)
9937 parg = arg0, marg = arg1;
9938 else
9939 parg = arg1, marg = arg0;
9940 pcode = TREE_CODE (parg);
9941 parg0 = TREE_OPERAND (parg, 0);
9942 parg1 = TREE_OPERAND (parg, 1);
9943 STRIP_NOPS (parg0);
9944 STRIP_NOPS (parg1);
9945
9946 if (TREE_CODE (parg0) == MULT_EXPR
9947 && TREE_CODE (parg1) != MULT_EXPR)
9948 return fold_build2_loc (loc, pcode, type,
9949 fold_build2_loc (loc, PLUS_EXPR, type,
9950 fold_convert_loc (loc, type,
9951 parg0),
9952 fold_convert_loc (loc, type,
9953 marg)),
9954 fold_convert_loc (loc, type, parg1));
9955 if (TREE_CODE (parg0) != MULT_EXPR
9956 && TREE_CODE (parg1) == MULT_EXPR)
9957 return
9958 fold_build2_loc (loc, PLUS_EXPR, type,
9959 fold_convert_loc (loc, type, parg0),
9960 fold_build2_loc (loc, pcode, type,
9961 fold_convert_loc (loc, type, marg),
9962 fold_convert_loc (loc, type,
9963 parg1)));
9964 }
9965 }
9966 else
9967 {
9968 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9969 to __complex__ ( x, y ). This is not the same for SNaNs or
9970 if signed zeros are involved. */
9971 if (!HONOR_SNANS (element_mode (arg0))
9972 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9973 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9974 {
9975 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9976 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9977 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9978 bool arg0rz = false, arg0iz = false;
9979 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9980 || (arg0i && (arg0iz = real_zerop (arg0i))))
9981 {
9982 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9983 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9984 if (arg0rz && arg1i && real_zerop (arg1i))
9985 {
9986 tree rp = arg1r ? arg1r
9987 : build1 (REALPART_EXPR, rtype, arg1);
9988 tree ip = arg0i ? arg0i
9989 : build1 (IMAGPART_EXPR, rtype, arg0);
9990 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9991 }
9992 else if (arg0iz && arg1r && real_zerop (arg1r))
9993 {
9994 tree rp = arg0r ? arg0r
9995 : build1 (REALPART_EXPR, rtype, arg0);
9996 tree ip = arg1i ? arg1i
9997 : build1 (IMAGPART_EXPR, rtype, arg1);
9998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9999 }
10000 }
10001 }
10002
10003 if (flag_unsafe_math_optimizations
10004 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10005 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10006 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10007 return tem;
10008
10009 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10010 We associate floats only if the user has specified
10011 -fassociative-math. */
10012 if (flag_associative_math
10013 && TREE_CODE (arg1) == PLUS_EXPR
10014 && TREE_CODE (arg0) != MULT_EXPR)
10015 {
10016 tree tree10 = TREE_OPERAND (arg1, 0);
10017 tree tree11 = TREE_OPERAND (arg1, 1);
10018 if (TREE_CODE (tree11) == MULT_EXPR
10019 && TREE_CODE (tree10) == MULT_EXPR)
10020 {
10021 tree tree0;
10022 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10023 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10024 }
10025 }
10026 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10027 We associate floats only if the user has specified
10028 -fassociative-math. */
10029 if (flag_associative_math
10030 && TREE_CODE (arg0) == PLUS_EXPR
10031 && TREE_CODE (arg1) != MULT_EXPR)
10032 {
10033 tree tree00 = TREE_OPERAND (arg0, 0);
10034 tree tree01 = TREE_OPERAND (arg0, 1);
10035 if (TREE_CODE (tree01) == MULT_EXPR
10036 && TREE_CODE (tree00) == MULT_EXPR)
10037 {
10038 tree tree0;
10039 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10040 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10041 }
10042 }
10043 }
10044
10045 bit_rotate:
10046 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10047 is a rotate of A by C1 bits. */
10048 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10049 is a rotate of A by B bits. */
10050 {
10051 enum tree_code code0, code1;
10052 tree rtype;
10053 code0 = TREE_CODE (arg0);
10054 code1 = TREE_CODE (arg1);
10055 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10056 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10057 && operand_equal_p (TREE_OPERAND (arg0, 0),
10058 TREE_OPERAND (arg1, 0), 0)
10059 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10060 TYPE_UNSIGNED (rtype))
10061 /* Only create rotates in complete modes. Other cases are not
10062 expanded properly. */
10063 && (element_precision (rtype)
10064 == element_precision (TYPE_MODE (rtype))))
10065 {
10066 tree tree01, tree11;
10067 enum tree_code code01, code11;
10068
10069 tree01 = TREE_OPERAND (arg0, 1);
10070 tree11 = TREE_OPERAND (arg1, 1);
10071 STRIP_NOPS (tree01);
10072 STRIP_NOPS (tree11);
10073 code01 = TREE_CODE (tree01);
10074 code11 = TREE_CODE (tree11);
10075 if (code01 == INTEGER_CST
10076 && code11 == INTEGER_CST
10077 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10078 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10079 {
10080 tem = build2_loc (loc, LROTATE_EXPR,
10081 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10082 TREE_OPERAND (arg0, 0),
10083 code0 == LSHIFT_EXPR ? tree01 : tree11);
10084 return fold_convert_loc (loc, type, tem);
10085 }
10086 else if (code11 == MINUS_EXPR)
10087 {
10088 tree tree110, tree111;
10089 tree110 = TREE_OPERAND (tree11, 0);
10090 tree111 = TREE_OPERAND (tree11, 1);
10091 STRIP_NOPS (tree110);
10092 STRIP_NOPS (tree111);
10093 if (TREE_CODE (tree110) == INTEGER_CST
10094 && 0 == compare_tree_int (tree110,
10095 element_precision
10096 (TREE_TYPE (TREE_OPERAND
10097 (arg0, 0))))
10098 && operand_equal_p (tree01, tree111, 0))
10099 return
10100 fold_convert_loc (loc, type,
10101 build2 ((code0 == LSHIFT_EXPR
10102 ? LROTATE_EXPR
10103 : RROTATE_EXPR),
10104 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10105 TREE_OPERAND (arg0, 0), tree01));
10106 }
10107 else if (code01 == MINUS_EXPR)
10108 {
10109 tree tree010, tree011;
10110 tree010 = TREE_OPERAND (tree01, 0);
10111 tree011 = TREE_OPERAND (tree01, 1);
10112 STRIP_NOPS (tree010);
10113 STRIP_NOPS (tree011);
10114 if (TREE_CODE (tree010) == INTEGER_CST
10115 && 0 == compare_tree_int (tree010,
10116 element_precision
10117 (TREE_TYPE (TREE_OPERAND
10118 (arg0, 0))))
10119 && operand_equal_p (tree11, tree011, 0))
10120 return fold_convert_loc
10121 (loc, type,
10122 build2 ((code0 != LSHIFT_EXPR
10123 ? LROTATE_EXPR
10124 : RROTATE_EXPR),
10125 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10126 TREE_OPERAND (arg0, 0), tree11));
10127 }
10128 }
10129 }
10130
10131 associate:
10132 /* In most languages, can't associate operations on floats through
10133 parentheses. Rather than remember where the parentheses were, we
10134 don't associate floats at all, unless the user has specified
10135 -fassociative-math.
10136 And, we need to make sure type is not saturating. */
10137
10138 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10139 && !TYPE_SATURATING (type))
10140 {
10141 tree var0, con0, lit0, minus_lit0;
10142 tree var1, con1, lit1, minus_lit1;
10143 tree atype = type;
10144 bool ok = true;
10145
10146 /* Split both trees into variables, constants, and literals. Then
10147 associate each group together, the constants with literals,
10148 then the result with variables. This increases the chances of
10149 literals being recombined later and of generating relocatable
10150 expressions for the sum of a constant and literal. */
10151 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10152 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10153 code == MINUS_EXPR);
10154
10155 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10156 if (code == MINUS_EXPR)
10157 code = PLUS_EXPR;
10158
10159 /* With undefined overflow prefer doing association in a type
10160 which wraps on overflow, if that is one of the operand types. */
10161 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10162 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10163 {
10164 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10165 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10166 atype = TREE_TYPE (arg0);
10167 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10168 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10169 atype = TREE_TYPE (arg1);
10170 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10171 }
10172
10173 /* With undefined overflow we can only associate constants with one
10174 variable, and constants whose association doesn't overflow. */
10175 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10176 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10177 {
10178 if (var0 && var1)
10179 {
10180 tree tmp0 = var0;
10181 tree tmp1 = var1;
10182
10183 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10184 tmp0 = TREE_OPERAND (tmp0, 0);
10185 if (CONVERT_EXPR_P (tmp0)
10186 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10187 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10188 <= TYPE_PRECISION (atype)))
10189 tmp0 = TREE_OPERAND (tmp0, 0);
10190 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10191 tmp1 = TREE_OPERAND (tmp1, 0);
10192 if (CONVERT_EXPR_P (tmp1)
10193 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10194 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10195 <= TYPE_PRECISION (atype)))
10196 tmp1 = TREE_OPERAND (tmp1, 0);
10197 /* The only case we can still associate with two variables
10198 is if they are the same, modulo negation and bit-pattern
10199 preserving conversions. */
10200 if (!operand_equal_p (tmp0, tmp1, 0))
10201 ok = false;
10202 }
10203 }
10204
10205 /* Only do something if we found more than two objects. Otherwise,
10206 nothing has changed and we risk infinite recursion. */
10207 if (ok
10208 && (2 < ((var0 != 0) + (var1 != 0)
10209 + (con0 != 0) + (con1 != 0)
10210 + (lit0 != 0) + (lit1 != 0)
10211 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10212 {
10213 bool any_overflows = false;
10214 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10215 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10216 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10217 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10218 var0 = associate_trees (loc, var0, var1, code, atype);
10219 con0 = associate_trees (loc, con0, con1, code, atype);
10220 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10221 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10222 code, atype);
10223
10224 /* Preserve the MINUS_EXPR if the negative part of the literal is
10225 greater than the positive part. Otherwise, the multiplicative
10226 folding code (i.e extract_muldiv) may be fooled in case
10227 unsigned constants are subtracted, like in the following
10228 example: ((X*2 + 4) - 8U)/2. */
10229 if (minus_lit0 && lit0)
10230 {
10231 if (TREE_CODE (lit0) == INTEGER_CST
10232 && TREE_CODE (minus_lit0) == INTEGER_CST
10233 && tree_int_cst_lt (lit0, minus_lit0))
10234 {
10235 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10236 MINUS_EXPR, atype);
10237 lit0 = 0;
10238 }
10239 else
10240 {
10241 lit0 = associate_trees (loc, lit0, minus_lit0,
10242 MINUS_EXPR, atype);
10243 minus_lit0 = 0;
10244 }
10245 }
10246
10247 /* Don't introduce overflows through reassociation. */
10248 if (!any_overflows
10249 && ((lit0 && TREE_OVERFLOW (lit0))
10250 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10251 return NULL_TREE;
10252
10253 if (minus_lit0)
10254 {
10255 if (con0 == 0)
10256 return
10257 fold_convert_loc (loc, type,
10258 associate_trees (loc, var0, minus_lit0,
10259 MINUS_EXPR, atype));
10260 else
10261 {
10262 con0 = associate_trees (loc, con0, minus_lit0,
10263 MINUS_EXPR, atype);
10264 return
10265 fold_convert_loc (loc, type,
10266 associate_trees (loc, var0, con0,
10267 PLUS_EXPR, atype));
10268 }
10269 }
10270
10271 con0 = associate_trees (loc, con0, lit0, code, atype);
10272 return
10273 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10274 code, atype));
10275 }
10276 }
10277
10278 return NULL_TREE;
10279
10280 case MINUS_EXPR:
10281 /* Pointer simplifications for subtraction, simple reassociations. */
10282 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10283 {
10284 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10285 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10286 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10287 {
10288 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10289 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10290 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10291 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10292 return fold_build2_loc (loc, PLUS_EXPR, type,
10293 fold_build2_loc (loc, MINUS_EXPR, type,
10294 arg00, arg10),
10295 fold_build2_loc (loc, MINUS_EXPR, type,
10296 arg01, arg11));
10297 }
10298 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10299 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10300 {
10301 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10302 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10303 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10304 fold_convert_loc (loc, type, arg1));
10305 if (tmp)
10306 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10307 }
10308 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10309 simplifies. */
10310 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10311 {
10312 tree arg10 = fold_convert_loc (loc, type,
10313 TREE_OPERAND (arg1, 0));
10314 tree arg11 = fold_convert_loc (loc, type,
10315 TREE_OPERAND (arg1, 1));
10316 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10317 fold_convert_loc (loc, type, arg0),
10318 arg10);
10319 if (tmp)
10320 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10321 }
10322 }
10323 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10324 if (TREE_CODE (arg0) == NEGATE_EXPR
10325 && negate_expr_p (arg1)
10326 && reorder_operands_p (arg0, arg1))
10327 return fold_build2_loc (loc, MINUS_EXPR, type,
10328 fold_convert_loc (loc, type,
10329 negate_expr (arg1)),
10330 fold_convert_loc (loc, type,
10331 TREE_OPERAND (arg0, 0)));
10332 /* Convert -A - 1 to ~A. */
10333 if (TREE_CODE (arg0) == NEGATE_EXPR
10334 && integer_each_onep (arg1)
10335 && !TYPE_OVERFLOW_TRAPS (type))
10336 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10337 fold_convert_loc (loc, type,
10338 TREE_OPERAND (arg0, 0)));
10339
10340 /* Convert -1 - A to ~A. */
10341 if (TREE_CODE (type) != COMPLEX_TYPE
10342 && integer_all_onesp (arg0))
10343 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10344
10345
10346 /* X - (X / Y) * Y is X % Y. */
10347 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10348 && TREE_CODE (arg1) == MULT_EXPR
10349 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10350 && operand_equal_p (arg0,
10351 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10352 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10353 TREE_OPERAND (arg1, 1), 0))
10354 return
10355 fold_convert_loc (loc, type,
10356 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10357 arg0, TREE_OPERAND (arg1, 1)));
10358
10359 if (! FLOAT_TYPE_P (type))
10360 {
10361 /* Fold A - (A & B) into ~B & A. */
10362 if (!TREE_SIDE_EFFECTS (arg0)
10363 && TREE_CODE (arg1) == BIT_AND_EXPR)
10364 {
10365 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10366 {
10367 tree arg10 = fold_convert_loc (loc, type,
10368 TREE_OPERAND (arg1, 0));
10369 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10370 fold_build1_loc (loc, BIT_NOT_EXPR,
10371 type, arg10),
10372 fold_convert_loc (loc, type, arg0));
10373 }
10374 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10375 {
10376 tree arg11 = fold_convert_loc (loc,
10377 type, TREE_OPERAND (arg1, 1));
10378 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10379 fold_build1_loc (loc, BIT_NOT_EXPR,
10380 type, arg11),
10381 fold_convert_loc (loc, type, arg0));
10382 }
10383 }
10384
10385 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10386 any power of 2 minus 1. */
10387 if (TREE_CODE (arg0) == BIT_AND_EXPR
10388 && TREE_CODE (arg1) == BIT_AND_EXPR
10389 && operand_equal_p (TREE_OPERAND (arg0, 0),
10390 TREE_OPERAND (arg1, 0), 0))
10391 {
10392 tree mask0 = TREE_OPERAND (arg0, 1);
10393 tree mask1 = TREE_OPERAND (arg1, 1);
10394 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10395
10396 if (operand_equal_p (tem, mask1, 0))
10397 {
10398 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10399 TREE_OPERAND (arg0, 0), mask1);
10400 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10401 }
10402 }
10403 }
10404
10405 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10406 __complex__ ( x, -y ). This is not the same for SNaNs or if
10407 signed zeros are involved. */
10408 if (!HONOR_SNANS (element_mode (arg0))
10409 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10410 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10411 {
10412 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10413 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10414 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10415 bool arg0rz = false, arg0iz = false;
10416 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10417 || (arg0i && (arg0iz = real_zerop (arg0i))))
10418 {
10419 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10420 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10421 if (arg0rz && arg1i && real_zerop (arg1i))
10422 {
10423 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10424 arg1r ? arg1r
10425 : build1 (REALPART_EXPR, rtype, arg1));
10426 tree ip = arg0i ? arg0i
10427 : build1 (IMAGPART_EXPR, rtype, arg0);
10428 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10429 }
10430 else if (arg0iz && arg1r && real_zerop (arg1r))
10431 {
10432 tree rp = arg0r ? arg0r
10433 : build1 (REALPART_EXPR, rtype, arg0);
10434 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10435 arg1i ? arg1i
10436 : build1 (IMAGPART_EXPR, rtype, arg1));
10437 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10438 }
10439 }
10440 }
10441
10442 /* A - B -> A + (-B) if B is easily negatable. */
10443 if (negate_expr_p (arg1)
10444 && !TYPE_OVERFLOW_SANITIZED (type)
10445 && ((FLOAT_TYPE_P (type)
10446 /* Avoid this transformation if B is a positive REAL_CST. */
10447 && (TREE_CODE (arg1) != REAL_CST
10448 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10449 || INTEGRAL_TYPE_P (type)))
10450 return fold_build2_loc (loc, PLUS_EXPR, type,
10451 fold_convert_loc (loc, type, arg0),
10452 fold_convert_loc (loc, type,
10453 negate_expr (arg1)));
10454
10455 /* Try folding difference of addresses. */
10456 {
10457 HOST_WIDE_INT diff;
10458
10459 if ((TREE_CODE (arg0) == ADDR_EXPR
10460 || TREE_CODE (arg1) == ADDR_EXPR)
10461 && ptr_difference_const (arg0, arg1, &diff))
10462 return build_int_cst_type (type, diff);
10463 }
10464
10465 /* Fold &a[i] - &a[j] to i-j. */
10466 if (TREE_CODE (arg0) == ADDR_EXPR
10467 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10468 && TREE_CODE (arg1) == ADDR_EXPR
10469 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10470 {
10471 tree tem = fold_addr_of_array_ref_difference (loc, type,
10472 TREE_OPERAND (arg0, 0),
10473 TREE_OPERAND (arg1, 0));
10474 if (tem)
10475 return tem;
10476 }
10477
10478 if (FLOAT_TYPE_P (type)
10479 && flag_unsafe_math_optimizations
10480 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10481 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10482 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10483 return tem;
10484
10485 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10486 one. Make sure the type is not saturating and has the signedness of
10487 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10488 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10489 if ((TREE_CODE (arg0) == MULT_EXPR
10490 || TREE_CODE (arg1) == MULT_EXPR)
10491 && !TYPE_SATURATING (type)
10492 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10493 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10494 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10495 {
10496 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10497 if (tem)
10498 return tem;
10499 }
10500
10501 goto associate;
10502
10503 case MULT_EXPR:
10504 /* (-A) * (-B) -> A * B */
10505 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10506 return fold_build2_loc (loc, MULT_EXPR, type,
10507 fold_convert_loc (loc, type,
10508 TREE_OPERAND (arg0, 0)),
10509 fold_convert_loc (loc, type,
10510 negate_expr (arg1)));
10511 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10512 return fold_build2_loc (loc, MULT_EXPR, type,
10513 fold_convert_loc (loc, type,
10514 negate_expr (arg0)),
10515 fold_convert_loc (loc, type,
10516 TREE_OPERAND (arg1, 0)));
10517
10518 if (! FLOAT_TYPE_P (type))
10519 {
10520 /* Transform x * -C into -x * C if x is easily negatable. */
10521 if (TREE_CODE (arg1) == INTEGER_CST
10522 && tree_int_cst_sgn (arg1) == -1
10523 && negate_expr_p (arg0)
10524 && (tem = negate_expr (arg1)) != arg1
10525 && !TREE_OVERFLOW (tem))
10526 return fold_build2_loc (loc, MULT_EXPR, type,
10527 fold_convert_loc (loc, type,
10528 negate_expr (arg0)),
10529 tem);
10530
10531 /* (a * (1 << b)) is (a << b) */
10532 if (TREE_CODE (arg1) == LSHIFT_EXPR
10533 && integer_onep (TREE_OPERAND (arg1, 0)))
10534 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10535 TREE_OPERAND (arg1, 1));
10536 if (TREE_CODE (arg0) == LSHIFT_EXPR
10537 && integer_onep (TREE_OPERAND (arg0, 0)))
10538 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10539 TREE_OPERAND (arg0, 1));
10540
10541 /* (A + A) * C -> A * 2 * C */
10542 if (TREE_CODE (arg0) == PLUS_EXPR
10543 && TREE_CODE (arg1) == INTEGER_CST
10544 && operand_equal_p (TREE_OPERAND (arg0, 0),
10545 TREE_OPERAND (arg0, 1), 0))
10546 return fold_build2_loc (loc, MULT_EXPR, type,
10547 omit_one_operand_loc (loc, type,
10548 TREE_OPERAND (arg0, 0),
10549 TREE_OPERAND (arg0, 1)),
10550 fold_build2_loc (loc, MULT_EXPR, type,
10551 build_int_cst (type, 2) , arg1));
10552
10553 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10554 sign-changing only. */
10555 if (TREE_CODE (arg1) == INTEGER_CST
10556 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10557 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10558 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10559
10560 strict_overflow_p = false;
10561 if (TREE_CODE (arg1) == INTEGER_CST
10562 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10563 &strict_overflow_p)))
10564 {
10565 if (strict_overflow_p)
10566 fold_overflow_warning (("assuming signed overflow does not "
10567 "occur when simplifying "
10568 "multiplication"),
10569 WARN_STRICT_OVERFLOW_MISC);
10570 return fold_convert_loc (loc, type, tem);
10571 }
10572
10573 /* Optimize z * conj(z) for integer complex numbers. */
10574 if (TREE_CODE (arg0) == CONJ_EXPR
10575 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10576 return fold_mult_zconjz (loc, type, arg1);
10577 if (TREE_CODE (arg1) == CONJ_EXPR
10578 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10579 return fold_mult_zconjz (loc, type, arg0);
10580 }
10581 else
10582 {
10583 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10584 the result for floating point types due to rounding so it is applied
10585 only if -fassociative-math was specify. */
10586 if (flag_associative_math
10587 && TREE_CODE (arg0) == RDIV_EXPR
10588 && TREE_CODE (arg1) == REAL_CST
10589 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10590 {
10591 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10592 arg1);
10593 if (tem)
10594 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10595 TREE_OPERAND (arg0, 1));
10596 }
10597
10598 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10599 if (operand_equal_p (arg0, arg1, 0))
10600 {
10601 tree tem = fold_strip_sign_ops (arg0);
10602 if (tem != NULL_TREE)
10603 {
10604 tem = fold_convert_loc (loc, type, tem);
10605 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10606 }
10607 }
10608
10609 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10610 This is not the same for NaNs or if signed zeros are
10611 involved. */
10612 if (!HONOR_NANS (element_mode (arg0))
10613 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10614 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10615 && TREE_CODE (arg1) == COMPLEX_CST
10616 && real_zerop (TREE_REALPART (arg1)))
10617 {
10618 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10619 if (real_onep (TREE_IMAGPART (arg1)))
10620 return
10621 fold_build2_loc (loc, COMPLEX_EXPR, type,
10622 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10623 rtype, arg0)),
10624 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10625 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10626 return
10627 fold_build2_loc (loc, COMPLEX_EXPR, type,
10628 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10629 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10630 rtype, arg0)));
10631 }
10632
10633 /* Optimize z * conj(z) for floating point complex numbers.
10634 Guarded by flag_unsafe_math_optimizations as non-finite
10635 imaginary components don't produce scalar results. */
10636 if (flag_unsafe_math_optimizations
10637 && TREE_CODE (arg0) == CONJ_EXPR
10638 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10639 return fold_mult_zconjz (loc, type, arg1);
10640 if (flag_unsafe_math_optimizations
10641 && TREE_CODE (arg1) == CONJ_EXPR
10642 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10643 return fold_mult_zconjz (loc, type, arg0);
10644
10645 if (flag_unsafe_math_optimizations)
10646 {
10647 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10648 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10649
10650 /* Optimizations of root(...)*root(...). */
10651 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10652 {
10653 tree rootfn, arg;
10654 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10655 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10656
10657 /* Optimize sqrt(x)*sqrt(x) as x. */
10658 if (BUILTIN_SQRT_P (fcode0)
10659 && operand_equal_p (arg00, arg10, 0)
10660 && ! HONOR_SNANS (element_mode (type)))
10661 return arg00;
10662
10663 /* Optimize root(x)*root(y) as root(x*y). */
10664 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10665 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10666 return build_call_expr_loc (loc, rootfn, 1, arg);
10667 }
10668
10669 /* Optimize expN(x)*expN(y) as expN(x+y). */
10670 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10671 {
10672 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10673 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10674 CALL_EXPR_ARG (arg0, 0),
10675 CALL_EXPR_ARG (arg1, 0));
10676 return build_call_expr_loc (loc, expfn, 1, arg);
10677 }
10678
10679 /* Optimizations of pow(...)*pow(...). */
10680 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10681 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10682 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10683 {
10684 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10685 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10686 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10687 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10688
10689 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10690 if (operand_equal_p (arg01, arg11, 0))
10691 {
10692 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10693 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10694 arg00, arg10);
10695 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10696 }
10697
10698 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10699 if (operand_equal_p (arg00, arg10, 0))
10700 {
10701 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10702 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10703 arg01, arg11);
10704 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10705 }
10706 }
10707
10708 /* Optimize tan(x)*cos(x) as sin(x). */
10709 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10710 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10711 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10712 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10713 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10714 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10715 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10716 CALL_EXPR_ARG (arg1, 0), 0))
10717 {
10718 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10719
10720 if (sinfn != NULL_TREE)
10721 return build_call_expr_loc (loc, sinfn, 1,
10722 CALL_EXPR_ARG (arg0, 0));
10723 }
10724
10725 /* Optimize x*pow(x,c) as pow(x,c+1). */
10726 if (fcode1 == BUILT_IN_POW
10727 || fcode1 == BUILT_IN_POWF
10728 || fcode1 == BUILT_IN_POWL)
10729 {
10730 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10731 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10732 if (TREE_CODE (arg11) == REAL_CST
10733 && !TREE_OVERFLOW (arg11)
10734 && operand_equal_p (arg0, arg10, 0))
10735 {
10736 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10737 REAL_VALUE_TYPE c;
10738 tree arg;
10739
10740 c = TREE_REAL_CST (arg11);
10741 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10742 arg = build_real (type, c);
10743 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10744 }
10745 }
10746
10747 /* Optimize pow(x,c)*x as pow(x,c+1). */
10748 if (fcode0 == BUILT_IN_POW
10749 || fcode0 == BUILT_IN_POWF
10750 || fcode0 == BUILT_IN_POWL)
10751 {
10752 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10753 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10754 if (TREE_CODE (arg01) == REAL_CST
10755 && !TREE_OVERFLOW (arg01)
10756 && operand_equal_p (arg1, arg00, 0))
10757 {
10758 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10759 REAL_VALUE_TYPE c;
10760 tree arg;
10761
10762 c = TREE_REAL_CST (arg01);
10763 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10764 arg = build_real (type, c);
10765 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10766 }
10767 }
10768
10769 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10770 if (!in_gimple_form
10771 && optimize
10772 && operand_equal_p (arg0, arg1, 0))
10773 {
10774 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10775
10776 if (powfn)
10777 {
10778 tree arg = build_real (type, dconst2);
10779 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10780 }
10781 }
10782 }
10783 }
10784 goto associate;
10785
10786 case BIT_IOR_EXPR:
10787 bit_ior:
10788 /* ~X | X is -1. */
10789 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10790 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10791 {
10792 t1 = build_zero_cst (type);
10793 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10794 return omit_one_operand_loc (loc, type, t1, arg1);
10795 }
10796
10797 /* X | ~X is -1. */
10798 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10799 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10800 {
10801 t1 = build_zero_cst (type);
10802 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10803 return omit_one_operand_loc (loc, type, t1, arg0);
10804 }
10805
10806 /* Canonicalize (X & C1) | C2. */
10807 if (TREE_CODE (arg0) == BIT_AND_EXPR
10808 && TREE_CODE (arg1) == INTEGER_CST
10809 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10810 {
10811 int width = TYPE_PRECISION (type), w;
10812 wide_int c1 = TREE_OPERAND (arg0, 1);
10813 wide_int c2 = arg1;
10814
10815 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10816 if ((c1 & c2) == c1)
10817 return omit_one_operand_loc (loc, type, arg1,
10818 TREE_OPERAND (arg0, 0));
10819
10820 wide_int msk = wi::mask (width, false,
10821 TYPE_PRECISION (TREE_TYPE (arg1)));
10822
10823 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10824 if (msk.and_not (c1 | c2) == 0)
10825 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10826 TREE_OPERAND (arg0, 0), arg1);
10827
10828 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10829 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10830 mode which allows further optimizations. */
10831 c1 &= msk;
10832 c2 &= msk;
10833 wide_int c3 = c1.and_not (c2);
10834 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10835 {
10836 wide_int mask = wi::mask (w, false,
10837 TYPE_PRECISION (type));
10838 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10839 {
10840 c3 = mask;
10841 break;
10842 }
10843 }
10844
10845 if (c3 != c1)
10846 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10847 fold_build2_loc (loc, BIT_AND_EXPR, type,
10848 TREE_OPERAND (arg0, 0),
10849 wide_int_to_tree (type,
10850 c3)),
10851 arg1);
10852 }
10853
10854 /* (X & ~Y) | (~X & Y) is X ^ Y */
10855 if (TREE_CODE (arg0) == BIT_AND_EXPR
10856 && TREE_CODE (arg1) == BIT_AND_EXPR)
10857 {
10858 tree a0, a1, l0, l1, n0, n1;
10859
10860 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10861 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10862
10863 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10864 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10865
10866 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10867 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10868
10869 if ((operand_equal_p (n0, a0, 0)
10870 && operand_equal_p (n1, a1, 0))
10871 || (operand_equal_p (n0, a1, 0)
10872 && operand_equal_p (n1, a0, 0)))
10873 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10874 }
10875
10876 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10877 if (t1 != NULL_TREE)
10878 return t1;
10879
10880 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10881
10882 This results in more efficient code for machines without a NAND
10883 instruction. Combine will canonicalize to the first form
10884 which will allow use of NAND instructions provided by the
10885 backend if they exist. */
10886 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10887 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10888 {
10889 return
10890 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10891 build2 (BIT_AND_EXPR, type,
10892 fold_convert_loc (loc, type,
10893 TREE_OPERAND (arg0, 0)),
10894 fold_convert_loc (loc, type,
10895 TREE_OPERAND (arg1, 0))));
10896 }
10897
10898 /* See if this can be simplified into a rotate first. If that
10899 is unsuccessful continue in the association code. */
10900 goto bit_rotate;
10901
10902 case BIT_XOR_EXPR:
10903 /* ~X ^ X is -1. */
10904 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10905 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10906 {
10907 t1 = build_zero_cst (type);
10908 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10909 return omit_one_operand_loc (loc, type, t1, arg1);
10910 }
10911
10912 /* X ^ ~X is -1. */
10913 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10914 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10915 {
10916 t1 = build_zero_cst (type);
10917 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10918 return omit_one_operand_loc (loc, type, t1, arg0);
10919 }
10920
10921 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10922 with a constant, and the two constants have no bits in common,
10923 we should treat this as a BIT_IOR_EXPR since this may produce more
10924 simplifications. */
10925 if (TREE_CODE (arg0) == BIT_AND_EXPR
10926 && TREE_CODE (arg1) == BIT_AND_EXPR
10927 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10928 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10929 && wi::bit_and (TREE_OPERAND (arg0, 1),
10930 TREE_OPERAND (arg1, 1)) == 0)
10931 {
10932 code = BIT_IOR_EXPR;
10933 goto bit_ior;
10934 }
10935
10936 /* (X | Y) ^ X -> Y & ~ X*/
10937 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10938 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10939 {
10940 tree t2 = TREE_OPERAND (arg0, 1);
10941 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10942 arg1);
10943 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10944 fold_convert_loc (loc, type, t2),
10945 fold_convert_loc (loc, type, t1));
10946 return t1;
10947 }
10948
10949 /* (Y | X) ^ X -> Y & ~ X*/
10950 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10951 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10952 {
10953 tree t2 = TREE_OPERAND (arg0, 0);
10954 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10955 arg1);
10956 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10957 fold_convert_loc (loc, type, t2),
10958 fold_convert_loc (loc, type, t1));
10959 return t1;
10960 }
10961
10962 /* X ^ (X | Y) -> Y & ~ X*/
10963 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10964 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10965 {
10966 tree t2 = TREE_OPERAND (arg1, 1);
10967 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10968 arg0);
10969 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10970 fold_convert_loc (loc, type, t2),
10971 fold_convert_loc (loc, type, t1));
10972 return t1;
10973 }
10974
10975 /* X ^ (Y | X) -> Y & ~ X*/
10976 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10977 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10978 {
10979 tree t2 = TREE_OPERAND (arg1, 0);
10980 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10981 arg0);
10982 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10983 fold_convert_loc (loc, type, t2),
10984 fold_convert_loc (loc, type, t1));
10985 return t1;
10986 }
10987
10988 /* Convert ~X ^ ~Y to X ^ Y. */
10989 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10990 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10991 return fold_build2_loc (loc, code, type,
10992 fold_convert_loc (loc, type,
10993 TREE_OPERAND (arg0, 0)),
10994 fold_convert_loc (loc, type,
10995 TREE_OPERAND (arg1, 0)));
10996
10997 /* Convert ~X ^ C to X ^ ~C. */
10998 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10999 && TREE_CODE (arg1) == INTEGER_CST)
11000 return fold_build2_loc (loc, code, type,
11001 fold_convert_loc (loc, type,
11002 TREE_OPERAND (arg0, 0)),
11003 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11004
11005 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11006 if (TREE_CODE (arg0) == BIT_AND_EXPR
11007 && INTEGRAL_TYPE_P (type)
11008 && integer_onep (TREE_OPERAND (arg0, 1))
11009 && integer_onep (arg1))
11010 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11011 build_zero_cst (TREE_TYPE (arg0)));
11012
11013 /* Fold (X & Y) ^ Y as ~X & Y. */
11014 if (TREE_CODE (arg0) == BIT_AND_EXPR
11015 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11016 {
11017 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11018 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11019 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11020 fold_convert_loc (loc, type, arg1));
11021 }
11022 /* Fold (X & Y) ^ X as ~Y & X. */
11023 if (TREE_CODE (arg0) == BIT_AND_EXPR
11024 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11025 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11026 {
11027 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11028 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11029 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11030 fold_convert_loc (loc, type, arg1));
11031 }
11032 /* Fold X ^ (X & Y) as X & ~Y. */
11033 if (TREE_CODE (arg1) == BIT_AND_EXPR
11034 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11035 {
11036 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11037 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11038 fold_convert_loc (loc, type, arg0),
11039 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11040 }
11041 /* Fold X ^ (Y & X) as ~Y & X. */
11042 if (TREE_CODE (arg1) == BIT_AND_EXPR
11043 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11044 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11045 {
11046 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11047 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11048 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11049 fold_convert_loc (loc, type, arg0));
11050 }
11051
11052 /* See if this can be simplified into a rotate first. If that
11053 is unsuccessful continue in the association code. */
11054 goto bit_rotate;
11055
11056 case BIT_AND_EXPR:
11057 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11058 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11059 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11060 || (TREE_CODE (arg0) == EQ_EXPR
11061 && integer_zerop (TREE_OPERAND (arg0, 1))))
11062 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11063 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11064
11065 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11066 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11067 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11068 || (TREE_CODE (arg1) == EQ_EXPR
11069 && integer_zerop (TREE_OPERAND (arg1, 1))))
11070 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11071 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11072
11073 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11074 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11075 && INTEGRAL_TYPE_P (type)
11076 && integer_onep (TREE_OPERAND (arg0, 1))
11077 && integer_onep (arg1))
11078 {
11079 tree tem2;
11080 tem = TREE_OPERAND (arg0, 0);
11081 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11082 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11083 tem, tem2);
11084 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11085 build_zero_cst (TREE_TYPE (tem)));
11086 }
11087 /* Fold ~X & 1 as (X & 1) == 0. */
11088 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11089 && INTEGRAL_TYPE_P (type)
11090 && integer_onep (arg1))
11091 {
11092 tree tem2;
11093 tem = TREE_OPERAND (arg0, 0);
11094 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11095 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11096 tem, tem2);
11097 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11098 build_zero_cst (TREE_TYPE (tem)));
11099 }
11100 /* Fold !X & 1 as X == 0. */
11101 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11102 && integer_onep (arg1))
11103 {
11104 tem = TREE_OPERAND (arg0, 0);
11105 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11106 build_zero_cst (TREE_TYPE (tem)));
11107 }
11108
11109 /* Fold (X ^ Y) & Y as ~X & Y. */
11110 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11111 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11112 {
11113 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11114 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11115 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11116 fold_convert_loc (loc, type, arg1));
11117 }
11118 /* Fold (X ^ Y) & X as ~Y & X. */
11119 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11120 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11121 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11122 {
11123 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11124 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11125 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11126 fold_convert_loc (loc, type, arg1));
11127 }
11128 /* Fold X & (X ^ Y) as X & ~Y. */
11129 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11130 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11131 {
11132 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11133 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11134 fold_convert_loc (loc, type, arg0),
11135 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11136 }
11137 /* Fold X & (Y ^ X) as ~Y & X. */
11138 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11139 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11140 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11141 {
11142 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11143 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11144 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11145 fold_convert_loc (loc, type, arg0));
11146 }
11147
11148 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11149 multiple of 1 << CST. */
11150 if (TREE_CODE (arg1) == INTEGER_CST)
11151 {
11152 wide_int cst1 = arg1;
11153 wide_int ncst1 = -cst1;
11154 if ((cst1 & ncst1) == ncst1
11155 && multiple_of_p (type, arg0,
11156 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11157 return fold_convert_loc (loc, type, arg0);
11158 }
11159
11160 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11161 bits from CST2. */
11162 if (TREE_CODE (arg1) == INTEGER_CST
11163 && TREE_CODE (arg0) == MULT_EXPR
11164 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11165 {
11166 wide_int warg1 = arg1;
11167 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11168
11169 if (masked == 0)
11170 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11171 arg0, arg1);
11172 else if (masked != warg1)
11173 {
11174 /* Avoid the transform if arg1 is a mask of some
11175 mode which allows further optimizations. */
11176 int pop = wi::popcount (warg1);
11177 if (!(pop >= BITS_PER_UNIT
11178 && exact_log2 (pop) != -1
11179 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11180 return fold_build2_loc (loc, code, type, op0,
11181 wide_int_to_tree (type, masked));
11182 }
11183 }
11184
11185 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11186 ((A & N) + B) & M -> (A + B) & M
11187 Similarly if (N & M) == 0,
11188 ((A | N) + B) & M -> (A + B) & M
11189 and for - instead of + (or unary - instead of +)
11190 and/or ^ instead of |.
11191 If B is constant and (B & M) == 0, fold into A & M. */
11192 if (TREE_CODE (arg1) == INTEGER_CST)
11193 {
11194 wide_int cst1 = arg1;
11195 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11196 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11197 && (TREE_CODE (arg0) == PLUS_EXPR
11198 || TREE_CODE (arg0) == MINUS_EXPR
11199 || TREE_CODE (arg0) == NEGATE_EXPR)
11200 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11201 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11202 {
11203 tree pmop[2];
11204 int which = 0;
11205 wide_int cst0;
11206
11207 /* Now we know that arg0 is (C + D) or (C - D) or
11208 -C and arg1 (M) is == (1LL << cst) - 1.
11209 Store C into PMOP[0] and D into PMOP[1]. */
11210 pmop[0] = TREE_OPERAND (arg0, 0);
11211 pmop[1] = NULL;
11212 if (TREE_CODE (arg0) != NEGATE_EXPR)
11213 {
11214 pmop[1] = TREE_OPERAND (arg0, 1);
11215 which = 1;
11216 }
11217
11218 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11219 which = -1;
11220
11221 for (; which >= 0; which--)
11222 switch (TREE_CODE (pmop[which]))
11223 {
11224 case BIT_AND_EXPR:
11225 case BIT_IOR_EXPR:
11226 case BIT_XOR_EXPR:
11227 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11228 != INTEGER_CST)
11229 break;
11230 cst0 = TREE_OPERAND (pmop[which], 1);
11231 cst0 &= cst1;
11232 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11233 {
11234 if (cst0 != cst1)
11235 break;
11236 }
11237 else if (cst0 != 0)
11238 break;
11239 /* If C or D is of the form (A & N) where
11240 (N & M) == M, or of the form (A | N) or
11241 (A ^ N) where (N & M) == 0, replace it with A. */
11242 pmop[which] = TREE_OPERAND (pmop[which], 0);
11243 break;
11244 case INTEGER_CST:
11245 /* If C or D is a N where (N & M) == 0, it can be
11246 omitted (assumed 0). */
11247 if ((TREE_CODE (arg0) == PLUS_EXPR
11248 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11249 && (cst1 & pmop[which]) == 0)
11250 pmop[which] = NULL;
11251 break;
11252 default:
11253 break;
11254 }
11255
11256 /* Only build anything new if we optimized one or both arguments
11257 above. */
11258 if (pmop[0] != TREE_OPERAND (arg0, 0)
11259 || (TREE_CODE (arg0) != NEGATE_EXPR
11260 && pmop[1] != TREE_OPERAND (arg0, 1)))
11261 {
11262 tree utype = TREE_TYPE (arg0);
11263 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11264 {
11265 /* Perform the operations in a type that has defined
11266 overflow behavior. */
11267 utype = unsigned_type_for (TREE_TYPE (arg0));
11268 if (pmop[0] != NULL)
11269 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11270 if (pmop[1] != NULL)
11271 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11272 }
11273
11274 if (TREE_CODE (arg0) == NEGATE_EXPR)
11275 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11276 else if (TREE_CODE (arg0) == PLUS_EXPR)
11277 {
11278 if (pmop[0] != NULL && pmop[1] != NULL)
11279 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11280 pmop[0], pmop[1]);
11281 else if (pmop[0] != NULL)
11282 tem = pmop[0];
11283 else if (pmop[1] != NULL)
11284 tem = pmop[1];
11285 else
11286 return build_int_cst (type, 0);
11287 }
11288 else if (pmop[0] == NULL)
11289 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11290 else
11291 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11292 pmop[0], pmop[1]);
11293 /* TEM is now the new binary +, - or unary - replacement. */
11294 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11295 fold_convert_loc (loc, utype, arg1));
11296 return fold_convert_loc (loc, type, tem);
11297 }
11298 }
11299 }
11300
11301 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11302 if (t1 != NULL_TREE)
11303 return t1;
11304 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11305 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11306 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11307 {
11308 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11309
11310 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11311 if (mask == -1)
11312 return
11313 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11314 }
11315
11316 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11317
11318 This results in more efficient code for machines without a NOR
11319 instruction. Combine will canonicalize to the first form
11320 which will allow use of NOR instructions provided by the
11321 backend if they exist. */
11322 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11323 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11324 {
11325 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11326 build2 (BIT_IOR_EXPR, type,
11327 fold_convert_loc (loc, type,
11328 TREE_OPERAND (arg0, 0)),
11329 fold_convert_loc (loc, type,
11330 TREE_OPERAND (arg1, 0))));
11331 }
11332
11333 /* If arg0 is derived from the address of an object or function, we may
11334 be able to fold this expression using the object or function's
11335 alignment. */
11336 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11337 {
11338 unsigned HOST_WIDE_INT modulus, residue;
11339 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11340
11341 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11342 integer_onep (arg1));
11343
11344 /* This works because modulus is a power of 2. If this weren't the
11345 case, we'd have to replace it by its greatest power-of-2
11346 divisor: modulus & -modulus. */
11347 if (low < modulus)
11348 return build_int_cst (type, residue & low);
11349 }
11350
11351 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11352 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11353 if the new mask might be further optimized. */
11354 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11355 || TREE_CODE (arg0) == RSHIFT_EXPR)
11356 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11357 && TREE_CODE (arg1) == INTEGER_CST
11358 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11359 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11360 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11361 < TYPE_PRECISION (TREE_TYPE (arg0))))
11362 {
11363 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11364 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11365 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11366 tree shift_type = TREE_TYPE (arg0);
11367
11368 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11369 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11370 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11371 && TYPE_PRECISION (TREE_TYPE (arg0))
11372 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11373 {
11374 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11375 tree arg00 = TREE_OPERAND (arg0, 0);
11376 /* See if more bits can be proven as zero because of
11377 zero extension. */
11378 if (TREE_CODE (arg00) == NOP_EXPR
11379 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11380 {
11381 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11382 if (TYPE_PRECISION (inner_type)
11383 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11384 && TYPE_PRECISION (inner_type) < prec)
11385 {
11386 prec = TYPE_PRECISION (inner_type);
11387 /* See if we can shorten the right shift. */
11388 if (shiftc < prec)
11389 shift_type = inner_type;
11390 /* Otherwise X >> C1 is all zeros, so we'll optimize
11391 it into (X, 0) later on by making sure zerobits
11392 is all ones. */
11393 }
11394 }
11395 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11396 if (shiftc < prec)
11397 {
11398 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11399 zerobits <<= prec - shiftc;
11400 }
11401 /* For arithmetic shift if sign bit could be set, zerobits
11402 can contain actually sign bits, so no transformation is
11403 possible, unless MASK masks them all away. In that
11404 case the shift needs to be converted into logical shift. */
11405 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11406 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11407 {
11408 if ((mask & zerobits) == 0)
11409 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11410 else
11411 zerobits = 0;
11412 }
11413 }
11414
11415 /* ((X << 16) & 0xff00) is (X, 0). */
11416 if ((mask & zerobits) == mask)
11417 return omit_one_operand_loc (loc, type,
11418 build_int_cst (type, 0), arg0);
11419
11420 newmask = mask | zerobits;
11421 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11422 {
11423 /* Only do the transformation if NEWMASK is some integer
11424 mode's mask. */
11425 for (prec = BITS_PER_UNIT;
11426 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11427 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11428 break;
11429 if (prec < HOST_BITS_PER_WIDE_INT
11430 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11431 {
11432 tree newmaskt;
11433
11434 if (shift_type != TREE_TYPE (arg0))
11435 {
11436 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11437 fold_convert_loc (loc, shift_type,
11438 TREE_OPERAND (arg0, 0)),
11439 TREE_OPERAND (arg0, 1));
11440 tem = fold_convert_loc (loc, type, tem);
11441 }
11442 else
11443 tem = op0;
11444 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11445 if (!tree_int_cst_equal (newmaskt, arg1))
11446 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11447 }
11448 }
11449 }
11450
11451 goto associate;
11452
11453 case RDIV_EXPR:
11454 /* Don't touch a floating-point divide by zero unless the mode
11455 of the constant can represent infinity. */
11456 if (TREE_CODE (arg1) == REAL_CST
11457 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11458 && real_zerop (arg1))
11459 return NULL_TREE;
11460
11461 /* (-A) / (-B) -> A / B */
11462 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11463 return fold_build2_loc (loc, RDIV_EXPR, type,
11464 TREE_OPERAND (arg0, 0),
11465 negate_expr (arg1));
11466 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11467 return fold_build2_loc (loc, RDIV_EXPR, type,
11468 negate_expr (arg0),
11469 TREE_OPERAND (arg1, 0));
11470
11471 /* Convert A/B/C to A/(B*C). */
11472 if (flag_reciprocal_math
11473 && TREE_CODE (arg0) == RDIV_EXPR)
11474 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11475 fold_build2_loc (loc, MULT_EXPR, type,
11476 TREE_OPERAND (arg0, 1), arg1));
11477
11478 /* Convert A/(B/C) to (A/B)*C. */
11479 if (flag_reciprocal_math
11480 && TREE_CODE (arg1) == RDIV_EXPR)
11481 return fold_build2_loc (loc, MULT_EXPR, type,
11482 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11483 TREE_OPERAND (arg1, 0)),
11484 TREE_OPERAND (arg1, 1));
11485
11486 /* Convert C1/(X*C2) into (C1/C2)/X. */
11487 if (flag_reciprocal_math
11488 && TREE_CODE (arg1) == MULT_EXPR
11489 && TREE_CODE (arg0) == REAL_CST
11490 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11491 {
11492 tree tem = const_binop (RDIV_EXPR, arg0,
11493 TREE_OPERAND (arg1, 1));
11494 if (tem)
11495 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11496 TREE_OPERAND (arg1, 0));
11497 }
11498
11499 if (flag_unsafe_math_optimizations)
11500 {
11501 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11502 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11503
11504 /* Optimize sin(x)/cos(x) as tan(x). */
11505 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11506 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11507 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11508 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11509 CALL_EXPR_ARG (arg1, 0), 0))
11510 {
11511 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11512
11513 if (tanfn != NULL_TREE)
11514 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11515 }
11516
11517 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11518 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11519 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11520 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11521 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11522 CALL_EXPR_ARG (arg1, 0), 0))
11523 {
11524 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11525
11526 if (tanfn != NULL_TREE)
11527 {
11528 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11529 CALL_EXPR_ARG (arg0, 0));
11530 return fold_build2_loc (loc, RDIV_EXPR, type,
11531 build_real (type, dconst1), tmp);
11532 }
11533 }
11534
11535 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11536 NaNs or Infinities. */
11537 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11538 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11539 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11540 {
11541 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11542 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11543
11544 if (! HONOR_NANS (element_mode (arg00))
11545 && ! HONOR_INFINITIES (element_mode (arg00))
11546 && operand_equal_p (arg00, arg01, 0))
11547 {
11548 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11549
11550 if (cosfn != NULL_TREE)
11551 return build_call_expr_loc (loc, cosfn, 1, arg00);
11552 }
11553 }
11554
11555 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11556 NaNs or Infinities. */
11557 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11558 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11559 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11560 {
11561 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11562 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11563
11564 if (! HONOR_NANS (element_mode (arg00))
11565 && ! HONOR_INFINITIES (element_mode (arg00))
11566 && operand_equal_p (arg00, arg01, 0))
11567 {
11568 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11569
11570 if (cosfn != NULL_TREE)
11571 {
11572 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11573 return fold_build2_loc (loc, RDIV_EXPR, type,
11574 build_real (type, dconst1),
11575 tmp);
11576 }
11577 }
11578 }
11579
11580 /* Optimize pow(x,c)/x as pow(x,c-1). */
11581 if (fcode0 == BUILT_IN_POW
11582 || fcode0 == BUILT_IN_POWF
11583 || fcode0 == BUILT_IN_POWL)
11584 {
11585 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11586 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11587 if (TREE_CODE (arg01) == REAL_CST
11588 && !TREE_OVERFLOW (arg01)
11589 && operand_equal_p (arg1, arg00, 0))
11590 {
11591 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11592 REAL_VALUE_TYPE c;
11593 tree arg;
11594
11595 c = TREE_REAL_CST (arg01);
11596 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11597 arg = build_real (type, c);
11598 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11599 }
11600 }
11601
11602 /* Optimize a/root(b/c) into a*root(c/b). */
11603 if (BUILTIN_ROOT_P (fcode1))
11604 {
11605 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11606
11607 if (TREE_CODE (rootarg) == RDIV_EXPR)
11608 {
11609 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11610 tree b = TREE_OPERAND (rootarg, 0);
11611 tree c = TREE_OPERAND (rootarg, 1);
11612
11613 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11614
11615 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11616 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11617 }
11618 }
11619
11620 /* Optimize x/expN(y) into x*expN(-y). */
11621 if (BUILTIN_EXPONENT_P (fcode1))
11622 {
11623 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11624 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11625 arg1 = build_call_expr_loc (loc,
11626 expfn, 1,
11627 fold_convert_loc (loc, type, arg));
11628 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11629 }
11630
11631 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11632 if (fcode1 == BUILT_IN_POW
11633 || fcode1 == BUILT_IN_POWF
11634 || fcode1 == BUILT_IN_POWL)
11635 {
11636 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11637 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11638 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11639 tree neg11 = fold_convert_loc (loc, type,
11640 negate_expr (arg11));
11641 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11642 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11643 }
11644 }
11645 return NULL_TREE;
11646
11647 case TRUNC_DIV_EXPR:
11648 /* Optimize (X & (-A)) / A where A is a power of 2,
11649 to X >> log2(A) */
11650 if (TREE_CODE (arg0) == BIT_AND_EXPR
11651 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11652 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11653 {
11654 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11655 arg1, TREE_OPERAND (arg0, 1));
11656 if (sum && integer_zerop (sum)) {
11657 tree pow2 = build_int_cst (integer_type_node,
11658 wi::exact_log2 (arg1));
11659 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11660 TREE_OPERAND (arg0, 0), pow2);
11661 }
11662 }
11663
11664 /* Fall through */
11665
11666 case FLOOR_DIV_EXPR:
11667 /* Simplify A / (B << N) where A and B are positive and B is
11668 a power of 2, to A >> (N + log2(B)). */
11669 strict_overflow_p = false;
11670 if (TREE_CODE (arg1) == LSHIFT_EXPR
11671 && (TYPE_UNSIGNED (type)
11672 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11673 {
11674 tree sval = TREE_OPERAND (arg1, 0);
11675 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11676 {
11677 tree sh_cnt = TREE_OPERAND (arg1, 1);
11678 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11679 wi::exact_log2 (sval));
11680
11681 if (strict_overflow_p)
11682 fold_overflow_warning (("assuming signed overflow does not "
11683 "occur when simplifying A / (B << N)"),
11684 WARN_STRICT_OVERFLOW_MISC);
11685
11686 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11687 sh_cnt, pow2);
11688 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11689 fold_convert_loc (loc, type, arg0), sh_cnt);
11690 }
11691 }
11692
11693 /* Fall through */
11694
11695 case ROUND_DIV_EXPR:
11696 case CEIL_DIV_EXPR:
11697 case EXACT_DIV_EXPR:
11698 if (integer_zerop (arg1))
11699 return NULL_TREE;
11700
11701 /* Convert -A / -B to A / B when the type is signed and overflow is
11702 undefined. */
11703 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11704 && TREE_CODE (arg0) == NEGATE_EXPR
11705 && negate_expr_p (arg1))
11706 {
11707 if (INTEGRAL_TYPE_P (type))
11708 fold_overflow_warning (("assuming signed overflow does not occur "
11709 "when distributing negation across "
11710 "division"),
11711 WARN_STRICT_OVERFLOW_MISC);
11712 return fold_build2_loc (loc, code, type,
11713 fold_convert_loc (loc, type,
11714 TREE_OPERAND (arg0, 0)),
11715 fold_convert_loc (loc, type,
11716 negate_expr (arg1)));
11717 }
11718 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11719 && TREE_CODE (arg1) == NEGATE_EXPR
11720 && negate_expr_p (arg0))
11721 {
11722 if (INTEGRAL_TYPE_P (type))
11723 fold_overflow_warning (("assuming signed overflow does not occur "
11724 "when distributing negation across "
11725 "division"),
11726 WARN_STRICT_OVERFLOW_MISC);
11727 return fold_build2_loc (loc, code, type,
11728 fold_convert_loc (loc, type,
11729 negate_expr (arg0)),
11730 fold_convert_loc (loc, type,
11731 TREE_OPERAND (arg1, 0)));
11732 }
11733
11734 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11735 operation, EXACT_DIV_EXPR.
11736
11737 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11738 At one time others generated faster code, it's not clear if they do
11739 after the last round to changes to the DIV code in expmed.c. */
11740 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11741 && multiple_of_p (type, arg0, arg1))
11742 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11743
11744 strict_overflow_p = false;
11745 if (TREE_CODE (arg1) == INTEGER_CST
11746 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11747 &strict_overflow_p)))
11748 {
11749 if (strict_overflow_p)
11750 fold_overflow_warning (("assuming signed overflow does not occur "
11751 "when simplifying division"),
11752 WARN_STRICT_OVERFLOW_MISC);
11753 return fold_convert_loc (loc, type, tem);
11754 }
11755
11756 return NULL_TREE;
11757
11758 case CEIL_MOD_EXPR:
11759 case FLOOR_MOD_EXPR:
11760 case ROUND_MOD_EXPR:
11761 case TRUNC_MOD_EXPR:
11762 /* X % -Y is the same as X % Y. */
11763 if (code == TRUNC_MOD_EXPR
11764 && !TYPE_UNSIGNED (type)
11765 && TREE_CODE (arg1) == NEGATE_EXPR
11766 && !TYPE_OVERFLOW_TRAPS (type))
11767 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11768 fold_convert_loc (loc, type,
11769 TREE_OPERAND (arg1, 0)));
11770
11771 strict_overflow_p = false;
11772 if (TREE_CODE (arg1) == INTEGER_CST
11773 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11774 &strict_overflow_p)))
11775 {
11776 if (strict_overflow_p)
11777 fold_overflow_warning (("assuming signed overflow does not occur "
11778 "when simplifying modulus"),
11779 WARN_STRICT_OVERFLOW_MISC);
11780 return fold_convert_loc (loc, type, tem);
11781 }
11782
11783 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11784 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11785 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11786 && (TYPE_UNSIGNED (type)
11787 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11788 {
11789 tree c = arg1;
11790 /* Also optimize A % (C << N) where C is a power of 2,
11791 to A & ((C << N) - 1). */
11792 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11793 c = TREE_OPERAND (arg1, 0);
11794
11795 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11796 {
11797 tree mask
11798 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11799 build_int_cst (TREE_TYPE (arg1), 1));
11800 if (strict_overflow_p)
11801 fold_overflow_warning (("assuming signed overflow does not "
11802 "occur when simplifying "
11803 "X % (power of two)"),
11804 WARN_STRICT_OVERFLOW_MISC);
11805 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11806 fold_convert_loc (loc, type, arg0),
11807 fold_convert_loc (loc, type, mask));
11808 }
11809 }
11810
11811 return NULL_TREE;
11812
11813 case LROTATE_EXPR:
11814 case RROTATE_EXPR:
11815 case RSHIFT_EXPR:
11816 case LSHIFT_EXPR:
11817 /* Since negative shift count is not well-defined,
11818 don't try to compute it in the compiler. */
11819 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11820 return NULL_TREE;
11821
11822 prec = element_precision (type);
11823
11824 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11825 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11826 && tree_to_uhwi (arg1) < prec
11827 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11828 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11829 {
11830 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11831 + tree_to_uhwi (arg1));
11832
11833 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11834 being well defined. */
11835 if (low >= prec)
11836 {
11837 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11838 low = low % prec;
11839 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11840 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11841 TREE_OPERAND (arg0, 0));
11842 else
11843 low = prec - 1;
11844 }
11845
11846 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11847 build_int_cst (TREE_TYPE (arg1), low));
11848 }
11849
11850 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11851 into x & ((unsigned)-1 >> c) for unsigned types. */
11852 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11853 || (TYPE_UNSIGNED (type)
11854 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11855 && tree_fits_uhwi_p (arg1)
11856 && tree_to_uhwi (arg1) < prec
11857 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11858 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11859 {
11860 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11861 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11862 tree lshift;
11863 tree arg00;
11864
11865 if (low0 == low1)
11866 {
11867 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11868
11869 lshift = build_minus_one_cst (type);
11870 lshift = const_binop (code, lshift, arg1);
11871
11872 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11873 }
11874 }
11875
11876 /* If we have a rotate of a bit operation with the rotate count and
11877 the second operand of the bit operation both constant,
11878 permute the two operations. */
11879 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11880 && (TREE_CODE (arg0) == BIT_AND_EXPR
11881 || TREE_CODE (arg0) == BIT_IOR_EXPR
11882 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11883 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11884 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11885 fold_build2_loc (loc, code, type,
11886 TREE_OPERAND (arg0, 0), arg1),
11887 fold_build2_loc (loc, code, type,
11888 TREE_OPERAND (arg0, 1), arg1));
11889
11890 /* Two consecutive rotates adding up to the some integer
11891 multiple of the precision of the type can be ignored. */
11892 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11893 && TREE_CODE (arg0) == RROTATE_EXPR
11894 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11895 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
11896 prec) == 0)
11897 return TREE_OPERAND (arg0, 0);
11898
11899 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11900 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11901 if the latter can be further optimized. */
11902 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11903 && TREE_CODE (arg0) == BIT_AND_EXPR
11904 && TREE_CODE (arg1) == INTEGER_CST
11905 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11906 {
11907 tree mask = fold_build2_loc (loc, code, type,
11908 fold_convert_loc (loc, type,
11909 TREE_OPERAND (arg0, 1)),
11910 arg1);
11911 tree shift = fold_build2_loc (loc, code, type,
11912 fold_convert_loc (loc, type,
11913 TREE_OPERAND (arg0, 0)),
11914 arg1);
11915 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11916 if (tem)
11917 return tem;
11918 }
11919
11920 return NULL_TREE;
11921
11922 case MIN_EXPR:
11923 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11924 if (tem)
11925 return tem;
11926 goto associate;
11927
11928 case MAX_EXPR:
11929 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11930 if (tem)
11931 return tem;
11932 goto associate;
11933
11934 case TRUTH_ANDIF_EXPR:
11935 /* Note that the operands of this must be ints
11936 and their values must be 0 or 1.
11937 ("true" is a fixed value perhaps depending on the language.) */
11938 /* If first arg is constant zero, return it. */
11939 if (integer_zerop (arg0))
11940 return fold_convert_loc (loc, type, arg0);
11941 case TRUTH_AND_EXPR:
11942 /* If either arg is constant true, drop it. */
11943 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11944 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11945 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11946 /* Preserve sequence points. */
11947 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11948 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11949 /* If second arg is constant zero, result is zero, but first arg
11950 must be evaluated. */
11951 if (integer_zerop (arg1))
11952 return omit_one_operand_loc (loc, type, arg1, arg0);
11953 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11954 case will be handled here. */
11955 if (integer_zerop (arg0))
11956 return omit_one_operand_loc (loc, type, arg0, arg1);
11957
11958 /* !X && X is always false. */
11959 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11960 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11961 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11962 /* X && !X is always false. */
11963 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11964 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11965 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11966
11967 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11968 means A >= Y && A != MAX, but in this case we know that
11969 A < X <= MAX. */
11970
11971 if (!TREE_SIDE_EFFECTS (arg0)
11972 && !TREE_SIDE_EFFECTS (arg1))
11973 {
11974 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11975 if (tem && !operand_equal_p (tem, arg0, 0))
11976 return fold_build2_loc (loc, code, type, tem, arg1);
11977
11978 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11979 if (tem && !operand_equal_p (tem, arg1, 0))
11980 return fold_build2_loc (loc, code, type, arg0, tem);
11981 }
11982
11983 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11984 != NULL_TREE)
11985 return tem;
11986
11987 return NULL_TREE;
11988
11989 case TRUTH_ORIF_EXPR:
11990 /* Note that the operands of this must be ints
11991 and their values must be 0 or true.
11992 ("true" is a fixed value perhaps depending on the language.) */
11993 /* If first arg is constant true, return it. */
11994 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11995 return fold_convert_loc (loc, type, arg0);
11996 case TRUTH_OR_EXPR:
11997 /* If either arg is constant zero, drop it. */
11998 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11999 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12000 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12001 /* Preserve sequence points. */
12002 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12003 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12004 /* If second arg is constant true, result is true, but we must
12005 evaluate first arg. */
12006 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12007 return omit_one_operand_loc (loc, type, arg1, arg0);
12008 /* Likewise for first arg, but note this only occurs here for
12009 TRUTH_OR_EXPR. */
12010 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12011 return omit_one_operand_loc (loc, type, arg0, arg1);
12012
12013 /* !X || X is always true. */
12014 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12015 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12016 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12017 /* X || !X is always true. */
12018 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12019 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12020 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12021
12022 /* (X && !Y) || (!X && Y) is X ^ Y */
12023 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12024 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12025 {
12026 tree a0, a1, l0, l1, n0, n1;
12027
12028 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12029 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12030
12031 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12032 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12033
12034 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12035 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12036
12037 if ((operand_equal_p (n0, a0, 0)
12038 && operand_equal_p (n1, a1, 0))
12039 || (operand_equal_p (n0, a1, 0)
12040 && operand_equal_p (n1, a0, 0)))
12041 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12042 }
12043
12044 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12045 != NULL_TREE)
12046 return tem;
12047
12048 return NULL_TREE;
12049
12050 case TRUTH_XOR_EXPR:
12051 /* If the second arg is constant zero, drop it. */
12052 if (integer_zerop (arg1))
12053 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12054 /* If the second arg is constant true, this is a logical inversion. */
12055 if (integer_onep (arg1))
12056 {
12057 tem = invert_truthvalue_loc (loc, arg0);
12058 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12059 }
12060 /* Identical arguments cancel to zero. */
12061 if (operand_equal_p (arg0, arg1, 0))
12062 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12063
12064 /* !X ^ X is always true. */
12065 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12066 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12067 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12068
12069 /* X ^ !X is always true. */
12070 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12071 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12072 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12073
12074 return NULL_TREE;
12075
12076 case EQ_EXPR:
12077 case NE_EXPR:
12078 STRIP_NOPS (arg0);
12079 STRIP_NOPS (arg1);
12080
12081 tem = fold_comparison (loc, code, type, op0, op1);
12082 if (tem != NULL_TREE)
12083 return tem;
12084
12085 /* bool_var != 0 becomes bool_var. */
12086 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12087 && code == NE_EXPR)
12088 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12089
12090 /* bool_var == 1 becomes bool_var. */
12091 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12092 && code == EQ_EXPR)
12093 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12094
12095 /* bool_var != 1 becomes !bool_var. */
12096 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12097 && code == NE_EXPR)
12098 return fold_convert_loc (loc, type,
12099 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12100 TREE_TYPE (arg0), arg0));
12101
12102 /* bool_var == 0 becomes !bool_var. */
12103 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12104 && code == EQ_EXPR)
12105 return fold_convert_loc (loc, type,
12106 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12107 TREE_TYPE (arg0), arg0));
12108
12109 /* !exp != 0 becomes !exp */
12110 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12111 && code == NE_EXPR)
12112 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12113
12114 /* If this is an equality comparison of the address of two non-weak,
12115 unaliased symbols neither of which are extern (since we do not
12116 have access to attributes for externs), then we know the result. */
12117 if (TREE_CODE (arg0) == ADDR_EXPR
12118 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12119 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12120 && ! lookup_attribute ("alias",
12121 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12122 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12123 && TREE_CODE (arg1) == ADDR_EXPR
12124 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12125 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12126 && ! lookup_attribute ("alias",
12127 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12128 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12129 {
12130 /* We know that we're looking at the address of two
12131 non-weak, unaliased, static _DECL nodes.
12132
12133 It is both wasteful and incorrect to call operand_equal_p
12134 to compare the two ADDR_EXPR nodes. It is wasteful in that
12135 all we need to do is test pointer equality for the arguments
12136 to the two ADDR_EXPR nodes. It is incorrect to use
12137 operand_equal_p as that function is NOT equivalent to a
12138 C equality test. It can in fact return false for two
12139 objects which would test as equal using the C equality
12140 operator. */
12141 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12142 return constant_boolean_node (equal
12143 ? code == EQ_EXPR : code != EQ_EXPR,
12144 type);
12145 }
12146
12147 /* Similarly for a NEGATE_EXPR. */
12148 if (TREE_CODE (arg0) == NEGATE_EXPR
12149 && TREE_CODE (arg1) == INTEGER_CST
12150 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12151 arg1)))
12152 && TREE_CODE (tem) == INTEGER_CST
12153 && !TREE_OVERFLOW (tem))
12154 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12155
12156 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12157 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12158 && TREE_CODE (arg1) == INTEGER_CST
12159 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12160 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12161 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12162 fold_convert_loc (loc,
12163 TREE_TYPE (arg0),
12164 arg1),
12165 TREE_OPERAND (arg0, 1)));
12166
12167 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12168 if ((TREE_CODE (arg0) == PLUS_EXPR
12169 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12170 || TREE_CODE (arg0) == MINUS_EXPR)
12171 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12172 0)),
12173 arg1, 0)
12174 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12175 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12176 {
12177 tree val = TREE_OPERAND (arg0, 1);
12178 return omit_two_operands_loc (loc, type,
12179 fold_build2_loc (loc, code, type,
12180 val,
12181 build_int_cst (TREE_TYPE (val),
12182 0)),
12183 TREE_OPERAND (arg0, 0), arg1);
12184 }
12185
12186 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12187 if (TREE_CODE (arg0) == MINUS_EXPR
12188 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12189 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12190 1)),
12191 arg1, 0)
12192 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12193 {
12194 return omit_two_operands_loc (loc, type,
12195 code == NE_EXPR
12196 ? boolean_true_node : boolean_false_node,
12197 TREE_OPERAND (arg0, 1), arg1);
12198 }
12199
12200 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12201 if (TREE_CODE (arg0) == ABS_EXPR
12202 && (integer_zerop (arg1) || real_zerop (arg1)))
12203 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12204
12205 /* If this is an EQ or NE comparison with zero and ARG0 is
12206 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12207 two operations, but the latter can be done in one less insn
12208 on machines that have only two-operand insns or on which a
12209 constant cannot be the first operand. */
12210 if (TREE_CODE (arg0) == BIT_AND_EXPR
12211 && integer_zerop (arg1))
12212 {
12213 tree arg00 = TREE_OPERAND (arg0, 0);
12214 tree arg01 = TREE_OPERAND (arg0, 1);
12215 if (TREE_CODE (arg00) == LSHIFT_EXPR
12216 && integer_onep (TREE_OPERAND (arg00, 0)))
12217 {
12218 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12219 arg01, TREE_OPERAND (arg00, 1));
12220 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12221 build_int_cst (TREE_TYPE (arg0), 1));
12222 return fold_build2_loc (loc, code, type,
12223 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12224 arg1);
12225 }
12226 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12227 && integer_onep (TREE_OPERAND (arg01, 0)))
12228 {
12229 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12230 arg00, TREE_OPERAND (arg01, 1));
12231 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12232 build_int_cst (TREE_TYPE (arg0), 1));
12233 return fold_build2_loc (loc, code, type,
12234 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12235 arg1);
12236 }
12237 }
12238
12239 /* If this is an NE or EQ comparison of zero against the result of a
12240 signed MOD operation whose second operand is a power of 2, make
12241 the MOD operation unsigned since it is simpler and equivalent. */
12242 if (integer_zerop (arg1)
12243 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12244 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12245 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12246 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12247 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12248 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12249 {
12250 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12251 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12252 fold_convert_loc (loc, newtype,
12253 TREE_OPERAND (arg0, 0)),
12254 fold_convert_loc (loc, newtype,
12255 TREE_OPERAND (arg0, 1)));
12256
12257 return fold_build2_loc (loc, code, type, newmod,
12258 fold_convert_loc (loc, newtype, arg1));
12259 }
12260
12261 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12262 C1 is a valid shift constant, and C2 is a power of two, i.e.
12263 a single bit. */
12264 if (TREE_CODE (arg0) == BIT_AND_EXPR
12265 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12266 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12267 == INTEGER_CST
12268 && integer_pow2p (TREE_OPERAND (arg0, 1))
12269 && integer_zerop (arg1))
12270 {
12271 tree itype = TREE_TYPE (arg0);
12272 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12273 prec = TYPE_PRECISION (itype);
12274
12275 /* Check for a valid shift count. */
12276 if (wi::ltu_p (arg001, prec))
12277 {
12278 tree arg01 = TREE_OPERAND (arg0, 1);
12279 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12280 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12281 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12282 can be rewritten as (X & (C2 << C1)) != 0. */
12283 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12284 {
12285 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12286 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12287 return fold_build2_loc (loc, code, type, tem,
12288 fold_convert_loc (loc, itype, arg1));
12289 }
12290 /* Otherwise, for signed (arithmetic) shifts,
12291 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12292 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12293 else if (!TYPE_UNSIGNED (itype))
12294 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12295 arg000, build_int_cst (itype, 0));
12296 /* Otherwise, of unsigned (logical) shifts,
12297 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12298 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12299 else
12300 return omit_one_operand_loc (loc, type,
12301 code == EQ_EXPR ? integer_one_node
12302 : integer_zero_node,
12303 arg000);
12304 }
12305 }
12306
12307 /* If we have (A & C) == C where C is a power of 2, convert this into
12308 (A & C) != 0. Similarly for NE_EXPR. */
12309 if (TREE_CODE (arg0) == BIT_AND_EXPR
12310 && integer_pow2p (TREE_OPERAND (arg0, 1))
12311 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12312 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12313 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12314 integer_zero_node));
12315
12316 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12317 bit, then fold the expression into A < 0 or A >= 0. */
12318 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12319 if (tem)
12320 return tem;
12321
12322 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12323 Similarly for NE_EXPR. */
12324 if (TREE_CODE (arg0) == BIT_AND_EXPR
12325 && TREE_CODE (arg1) == INTEGER_CST
12326 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12327 {
12328 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12329 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12330 TREE_OPERAND (arg0, 1));
12331 tree dandnotc
12332 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12333 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12334 notc);
12335 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12336 if (integer_nonzerop (dandnotc))
12337 return omit_one_operand_loc (loc, type, rslt, arg0);
12338 }
12339
12340 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12341 Similarly for NE_EXPR. */
12342 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12343 && TREE_CODE (arg1) == INTEGER_CST
12344 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12345 {
12346 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12347 tree candnotd
12348 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12349 TREE_OPERAND (arg0, 1),
12350 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12351 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12352 if (integer_nonzerop (candnotd))
12353 return omit_one_operand_loc (loc, type, rslt, arg0);
12354 }
12355
12356 /* If this is a comparison of a field, we may be able to simplify it. */
12357 if ((TREE_CODE (arg0) == COMPONENT_REF
12358 || TREE_CODE (arg0) == BIT_FIELD_REF)
12359 /* Handle the constant case even without -O
12360 to make sure the warnings are given. */
12361 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12362 {
12363 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12364 if (t1)
12365 return t1;
12366 }
12367
12368 /* Optimize comparisons of strlen vs zero to a compare of the
12369 first character of the string vs zero. To wit,
12370 strlen(ptr) == 0 => *ptr == 0
12371 strlen(ptr) != 0 => *ptr != 0
12372 Other cases should reduce to one of these two (or a constant)
12373 due to the return value of strlen being unsigned. */
12374 if (TREE_CODE (arg0) == CALL_EXPR
12375 && integer_zerop (arg1))
12376 {
12377 tree fndecl = get_callee_fndecl (arg0);
12378
12379 if (fndecl
12380 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12381 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12382 && call_expr_nargs (arg0) == 1
12383 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12384 {
12385 tree iref = build_fold_indirect_ref_loc (loc,
12386 CALL_EXPR_ARG (arg0, 0));
12387 return fold_build2_loc (loc, code, type, iref,
12388 build_int_cst (TREE_TYPE (iref), 0));
12389 }
12390 }
12391
12392 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12393 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12394 if (TREE_CODE (arg0) == RSHIFT_EXPR
12395 && integer_zerop (arg1)
12396 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12397 {
12398 tree arg00 = TREE_OPERAND (arg0, 0);
12399 tree arg01 = TREE_OPERAND (arg0, 1);
12400 tree itype = TREE_TYPE (arg00);
12401 if (wi::eq_p (arg01, element_precision (itype) - 1))
12402 {
12403 if (TYPE_UNSIGNED (itype))
12404 {
12405 itype = signed_type_for (itype);
12406 arg00 = fold_convert_loc (loc, itype, arg00);
12407 }
12408 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12409 type, arg00, build_zero_cst (itype));
12410 }
12411 }
12412
12413 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12414 if (integer_zerop (arg1)
12415 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12416 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12417 TREE_OPERAND (arg0, 1));
12418
12419 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12420 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12421 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12422 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12423 build_zero_cst (TREE_TYPE (arg0)));
12424 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12425 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12426 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12427 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12428 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12429 build_zero_cst (TREE_TYPE (arg0)));
12430
12431 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12432 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12433 && TREE_CODE (arg1) == INTEGER_CST
12434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12435 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12436 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12437 TREE_OPERAND (arg0, 1), arg1));
12438
12439 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12440 (X & C) == 0 when C is a single bit. */
12441 if (TREE_CODE (arg0) == BIT_AND_EXPR
12442 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12443 && integer_zerop (arg1)
12444 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12445 {
12446 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12447 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12448 TREE_OPERAND (arg0, 1));
12449 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12450 type, tem,
12451 fold_convert_loc (loc, TREE_TYPE (arg0),
12452 arg1));
12453 }
12454
12455 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12456 constant C is a power of two, i.e. a single bit. */
12457 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12458 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12459 && integer_zerop (arg1)
12460 && integer_pow2p (TREE_OPERAND (arg0, 1))
12461 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12462 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12463 {
12464 tree arg00 = TREE_OPERAND (arg0, 0);
12465 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12466 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12467 }
12468
12469 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12470 when is C is a power of two, i.e. a single bit. */
12471 if (TREE_CODE (arg0) == BIT_AND_EXPR
12472 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12473 && integer_zerop (arg1)
12474 && integer_pow2p (TREE_OPERAND (arg0, 1))
12475 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12476 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12477 {
12478 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12479 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12480 arg000, TREE_OPERAND (arg0, 1));
12481 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12482 tem, build_int_cst (TREE_TYPE (tem), 0));
12483 }
12484
12485 if (integer_zerop (arg1)
12486 && tree_expr_nonzero_p (arg0))
12487 {
12488 tree res = constant_boolean_node (code==NE_EXPR, type);
12489 return omit_one_operand_loc (loc, type, res, arg0);
12490 }
12491
12492 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12493 if (TREE_CODE (arg0) == NEGATE_EXPR
12494 && TREE_CODE (arg1) == NEGATE_EXPR)
12495 return fold_build2_loc (loc, code, type,
12496 TREE_OPERAND (arg0, 0),
12497 fold_convert_loc (loc, TREE_TYPE (arg0),
12498 TREE_OPERAND (arg1, 0)));
12499
12500 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12501 if (TREE_CODE (arg0) == BIT_AND_EXPR
12502 && TREE_CODE (arg1) == BIT_AND_EXPR)
12503 {
12504 tree arg00 = TREE_OPERAND (arg0, 0);
12505 tree arg01 = TREE_OPERAND (arg0, 1);
12506 tree arg10 = TREE_OPERAND (arg1, 0);
12507 tree arg11 = TREE_OPERAND (arg1, 1);
12508 tree itype = TREE_TYPE (arg0);
12509
12510 if (operand_equal_p (arg01, arg11, 0))
12511 return fold_build2_loc (loc, code, type,
12512 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12513 fold_build2_loc (loc,
12514 BIT_XOR_EXPR, itype,
12515 arg00, arg10),
12516 arg01),
12517 build_zero_cst (itype));
12518
12519 if (operand_equal_p (arg01, arg10, 0))
12520 return fold_build2_loc (loc, code, type,
12521 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12522 fold_build2_loc (loc,
12523 BIT_XOR_EXPR, itype,
12524 arg00, arg11),
12525 arg01),
12526 build_zero_cst (itype));
12527
12528 if (operand_equal_p (arg00, arg11, 0))
12529 return fold_build2_loc (loc, code, type,
12530 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12531 fold_build2_loc (loc,
12532 BIT_XOR_EXPR, itype,
12533 arg01, arg10),
12534 arg00),
12535 build_zero_cst (itype));
12536
12537 if (operand_equal_p (arg00, arg10, 0))
12538 return fold_build2_loc (loc, code, type,
12539 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12540 fold_build2_loc (loc,
12541 BIT_XOR_EXPR, itype,
12542 arg01, arg11),
12543 arg00),
12544 build_zero_cst (itype));
12545 }
12546
12547 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12548 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12549 {
12550 tree arg00 = TREE_OPERAND (arg0, 0);
12551 tree arg01 = TREE_OPERAND (arg0, 1);
12552 tree arg10 = TREE_OPERAND (arg1, 0);
12553 tree arg11 = TREE_OPERAND (arg1, 1);
12554 tree itype = TREE_TYPE (arg0);
12555
12556 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12557 operand_equal_p guarantees no side-effects so we don't need
12558 to use omit_one_operand on Z. */
12559 if (operand_equal_p (arg01, arg11, 0))
12560 return fold_build2_loc (loc, code, type, arg00,
12561 fold_convert_loc (loc, TREE_TYPE (arg00),
12562 arg10));
12563 if (operand_equal_p (arg01, arg10, 0))
12564 return fold_build2_loc (loc, code, type, arg00,
12565 fold_convert_loc (loc, TREE_TYPE (arg00),
12566 arg11));
12567 if (operand_equal_p (arg00, arg11, 0))
12568 return fold_build2_loc (loc, code, type, arg01,
12569 fold_convert_loc (loc, TREE_TYPE (arg01),
12570 arg10));
12571 if (operand_equal_p (arg00, arg10, 0))
12572 return fold_build2_loc (loc, code, type, arg01,
12573 fold_convert_loc (loc, TREE_TYPE (arg01),
12574 arg11));
12575
12576 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12577 if (TREE_CODE (arg01) == INTEGER_CST
12578 && TREE_CODE (arg11) == INTEGER_CST)
12579 {
12580 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12581 fold_convert_loc (loc, itype, arg11));
12582 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12583 return fold_build2_loc (loc, code, type, tem,
12584 fold_convert_loc (loc, itype, arg10));
12585 }
12586 }
12587
12588 /* Attempt to simplify equality/inequality comparisons of complex
12589 values. Only lower the comparison if the result is known or
12590 can be simplified to a single scalar comparison. */
12591 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12592 || TREE_CODE (arg0) == COMPLEX_CST)
12593 && (TREE_CODE (arg1) == COMPLEX_EXPR
12594 || TREE_CODE (arg1) == COMPLEX_CST))
12595 {
12596 tree real0, imag0, real1, imag1;
12597 tree rcond, icond;
12598
12599 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12600 {
12601 real0 = TREE_OPERAND (arg0, 0);
12602 imag0 = TREE_OPERAND (arg0, 1);
12603 }
12604 else
12605 {
12606 real0 = TREE_REALPART (arg0);
12607 imag0 = TREE_IMAGPART (arg0);
12608 }
12609
12610 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12611 {
12612 real1 = TREE_OPERAND (arg1, 0);
12613 imag1 = TREE_OPERAND (arg1, 1);
12614 }
12615 else
12616 {
12617 real1 = TREE_REALPART (arg1);
12618 imag1 = TREE_IMAGPART (arg1);
12619 }
12620
12621 rcond = fold_binary_loc (loc, code, type, real0, real1);
12622 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12623 {
12624 if (integer_zerop (rcond))
12625 {
12626 if (code == EQ_EXPR)
12627 return omit_two_operands_loc (loc, type, boolean_false_node,
12628 imag0, imag1);
12629 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12630 }
12631 else
12632 {
12633 if (code == NE_EXPR)
12634 return omit_two_operands_loc (loc, type, boolean_true_node,
12635 imag0, imag1);
12636 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12637 }
12638 }
12639
12640 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12641 if (icond && TREE_CODE (icond) == INTEGER_CST)
12642 {
12643 if (integer_zerop (icond))
12644 {
12645 if (code == EQ_EXPR)
12646 return omit_two_operands_loc (loc, type, boolean_false_node,
12647 real0, real1);
12648 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12649 }
12650 else
12651 {
12652 if (code == NE_EXPR)
12653 return omit_two_operands_loc (loc, type, boolean_true_node,
12654 real0, real1);
12655 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12656 }
12657 }
12658 }
12659
12660 return NULL_TREE;
12661
12662 case LT_EXPR:
12663 case GT_EXPR:
12664 case LE_EXPR:
12665 case GE_EXPR:
12666 tem = fold_comparison (loc, code, type, op0, op1);
12667 if (tem != NULL_TREE)
12668 return tem;
12669
12670 /* Transform comparisons of the form X +- C CMP X. */
12671 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12672 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12673 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12674 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12675 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12676 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12677 {
12678 tree arg01 = TREE_OPERAND (arg0, 1);
12679 enum tree_code code0 = TREE_CODE (arg0);
12680 int is_positive;
12681
12682 if (TREE_CODE (arg01) == REAL_CST)
12683 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12684 else
12685 is_positive = tree_int_cst_sgn (arg01);
12686
12687 /* (X - c) > X becomes false. */
12688 if (code == GT_EXPR
12689 && ((code0 == MINUS_EXPR && is_positive >= 0)
12690 || (code0 == PLUS_EXPR && is_positive <= 0)))
12691 {
12692 if (TREE_CODE (arg01) == INTEGER_CST
12693 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12694 fold_overflow_warning (("assuming signed overflow does not "
12695 "occur when assuming that (X - c) > X "
12696 "is always false"),
12697 WARN_STRICT_OVERFLOW_ALL);
12698 return constant_boolean_node (0, type);
12699 }
12700
12701 /* Likewise (X + c) < X becomes false. */
12702 if (code == LT_EXPR
12703 && ((code0 == PLUS_EXPR && is_positive >= 0)
12704 || (code0 == MINUS_EXPR && is_positive <= 0)))
12705 {
12706 if (TREE_CODE (arg01) == INTEGER_CST
12707 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12708 fold_overflow_warning (("assuming signed overflow does not "
12709 "occur when assuming that "
12710 "(X + c) < X is always false"),
12711 WARN_STRICT_OVERFLOW_ALL);
12712 return constant_boolean_node (0, type);
12713 }
12714
12715 /* Convert (X - c) <= X to true. */
12716 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12717 && code == LE_EXPR
12718 && ((code0 == MINUS_EXPR && is_positive >= 0)
12719 || (code0 == PLUS_EXPR && is_positive <= 0)))
12720 {
12721 if (TREE_CODE (arg01) == INTEGER_CST
12722 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12723 fold_overflow_warning (("assuming signed overflow does not "
12724 "occur when assuming that "
12725 "(X - c) <= X is always true"),
12726 WARN_STRICT_OVERFLOW_ALL);
12727 return constant_boolean_node (1, type);
12728 }
12729
12730 /* Convert (X + c) >= X to true. */
12731 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12732 && code == GE_EXPR
12733 && ((code0 == PLUS_EXPR && is_positive >= 0)
12734 || (code0 == MINUS_EXPR && is_positive <= 0)))
12735 {
12736 if (TREE_CODE (arg01) == INTEGER_CST
12737 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12738 fold_overflow_warning (("assuming signed overflow does not "
12739 "occur when assuming that "
12740 "(X + c) >= X is always true"),
12741 WARN_STRICT_OVERFLOW_ALL);
12742 return constant_boolean_node (1, type);
12743 }
12744
12745 if (TREE_CODE (arg01) == INTEGER_CST)
12746 {
12747 /* Convert X + c > X and X - c < X to true for integers. */
12748 if (code == GT_EXPR
12749 && ((code0 == PLUS_EXPR && is_positive > 0)
12750 || (code0 == MINUS_EXPR && is_positive < 0)))
12751 {
12752 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12753 fold_overflow_warning (("assuming signed overflow does "
12754 "not occur when assuming that "
12755 "(X + c) > X is always true"),
12756 WARN_STRICT_OVERFLOW_ALL);
12757 return constant_boolean_node (1, type);
12758 }
12759
12760 if (code == LT_EXPR
12761 && ((code0 == MINUS_EXPR && is_positive > 0)
12762 || (code0 == PLUS_EXPR && is_positive < 0)))
12763 {
12764 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12765 fold_overflow_warning (("assuming signed overflow does "
12766 "not occur when assuming that "
12767 "(X - c) < X is always true"),
12768 WARN_STRICT_OVERFLOW_ALL);
12769 return constant_boolean_node (1, type);
12770 }
12771
12772 /* Convert X + c <= X and X - c >= X to false for integers. */
12773 if (code == LE_EXPR
12774 && ((code0 == PLUS_EXPR && is_positive > 0)
12775 || (code0 == MINUS_EXPR && is_positive < 0)))
12776 {
12777 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12778 fold_overflow_warning (("assuming signed overflow does "
12779 "not occur when assuming that "
12780 "(X + c) <= X is always false"),
12781 WARN_STRICT_OVERFLOW_ALL);
12782 return constant_boolean_node (0, type);
12783 }
12784
12785 if (code == GE_EXPR
12786 && ((code0 == MINUS_EXPR && is_positive > 0)
12787 || (code0 == PLUS_EXPR && is_positive < 0)))
12788 {
12789 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12790 fold_overflow_warning (("assuming signed overflow does "
12791 "not occur when assuming that "
12792 "(X - c) >= X is always false"),
12793 WARN_STRICT_OVERFLOW_ALL);
12794 return constant_boolean_node (0, type);
12795 }
12796 }
12797 }
12798
12799 /* Comparisons with the highest or lowest possible integer of
12800 the specified precision will have known values. */
12801 {
12802 tree arg1_type = TREE_TYPE (arg1);
12803 unsigned int prec = TYPE_PRECISION (arg1_type);
12804
12805 if (TREE_CODE (arg1) == INTEGER_CST
12806 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12807 {
12808 wide_int max = wi::max_value (arg1_type);
12809 wide_int signed_max = wi::max_value (prec, SIGNED);
12810 wide_int min = wi::min_value (arg1_type);
12811
12812 if (wi::eq_p (arg1, max))
12813 switch (code)
12814 {
12815 case GT_EXPR:
12816 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12817
12818 case GE_EXPR:
12819 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12820
12821 case LE_EXPR:
12822 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12823
12824 case LT_EXPR:
12825 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12826
12827 /* The GE_EXPR and LT_EXPR cases above are not normally
12828 reached because of previous transformations. */
12829
12830 default:
12831 break;
12832 }
12833 else if (wi::eq_p (arg1, max - 1))
12834 switch (code)
12835 {
12836 case GT_EXPR:
12837 arg1 = const_binop (PLUS_EXPR, arg1,
12838 build_int_cst (TREE_TYPE (arg1), 1));
12839 return fold_build2_loc (loc, EQ_EXPR, type,
12840 fold_convert_loc (loc,
12841 TREE_TYPE (arg1), arg0),
12842 arg1);
12843 case LE_EXPR:
12844 arg1 = const_binop (PLUS_EXPR, arg1,
12845 build_int_cst (TREE_TYPE (arg1), 1));
12846 return fold_build2_loc (loc, NE_EXPR, type,
12847 fold_convert_loc (loc, TREE_TYPE (arg1),
12848 arg0),
12849 arg1);
12850 default:
12851 break;
12852 }
12853 else if (wi::eq_p (arg1, min))
12854 switch (code)
12855 {
12856 case LT_EXPR:
12857 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12858
12859 case LE_EXPR:
12860 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12861
12862 case GE_EXPR:
12863 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12864
12865 case GT_EXPR:
12866 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12867
12868 default:
12869 break;
12870 }
12871 else if (wi::eq_p (arg1, min + 1))
12872 switch (code)
12873 {
12874 case GE_EXPR:
12875 arg1 = const_binop (MINUS_EXPR, arg1,
12876 build_int_cst (TREE_TYPE (arg1), 1));
12877 return fold_build2_loc (loc, NE_EXPR, type,
12878 fold_convert_loc (loc,
12879 TREE_TYPE (arg1), arg0),
12880 arg1);
12881 case LT_EXPR:
12882 arg1 = const_binop (MINUS_EXPR, arg1,
12883 build_int_cst (TREE_TYPE (arg1), 1));
12884 return fold_build2_loc (loc, EQ_EXPR, type,
12885 fold_convert_loc (loc, TREE_TYPE (arg1),
12886 arg0),
12887 arg1);
12888 default:
12889 break;
12890 }
12891
12892 else if (wi::eq_p (arg1, signed_max)
12893 && TYPE_UNSIGNED (arg1_type)
12894 /* We will flip the signedness of the comparison operator
12895 associated with the mode of arg1, so the sign bit is
12896 specified by this mode. Check that arg1 is the signed
12897 max associated with this sign bit. */
12898 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
12899 /* signed_type does not work on pointer types. */
12900 && INTEGRAL_TYPE_P (arg1_type))
12901 {
12902 /* The following case also applies to X < signed_max+1
12903 and X >= signed_max+1 because previous transformations. */
12904 if (code == LE_EXPR || code == GT_EXPR)
12905 {
12906 tree st = signed_type_for (arg1_type);
12907 return fold_build2_loc (loc,
12908 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12909 type, fold_convert_loc (loc, st, arg0),
12910 build_int_cst (st, 0));
12911 }
12912 }
12913 }
12914 }
12915
12916 /* If we are comparing an ABS_EXPR with a constant, we can
12917 convert all the cases into explicit comparisons, but they may
12918 well not be faster than doing the ABS and one comparison.
12919 But ABS (X) <= C is a range comparison, which becomes a subtraction
12920 and a comparison, and is probably faster. */
12921 if (code == LE_EXPR
12922 && TREE_CODE (arg1) == INTEGER_CST
12923 && TREE_CODE (arg0) == ABS_EXPR
12924 && ! TREE_SIDE_EFFECTS (arg0)
12925 && (0 != (tem = negate_expr (arg1)))
12926 && TREE_CODE (tem) == INTEGER_CST
12927 && !TREE_OVERFLOW (tem))
12928 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12929 build2 (GE_EXPR, type,
12930 TREE_OPERAND (arg0, 0), tem),
12931 build2 (LE_EXPR, type,
12932 TREE_OPERAND (arg0, 0), arg1));
12933
12934 /* Convert ABS_EXPR<x> >= 0 to true. */
12935 strict_overflow_p = false;
12936 if (code == GE_EXPR
12937 && (integer_zerop (arg1)
12938 || (! HONOR_NANS (element_mode (arg0))
12939 && real_zerop (arg1)))
12940 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12941 {
12942 if (strict_overflow_p)
12943 fold_overflow_warning (("assuming signed overflow does not occur "
12944 "when simplifying comparison of "
12945 "absolute value and zero"),
12946 WARN_STRICT_OVERFLOW_CONDITIONAL);
12947 return omit_one_operand_loc (loc, type,
12948 constant_boolean_node (true, type),
12949 arg0);
12950 }
12951
12952 /* Convert ABS_EXPR<x> < 0 to false. */
12953 strict_overflow_p = false;
12954 if (code == LT_EXPR
12955 && (integer_zerop (arg1) || real_zerop (arg1))
12956 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12957 {
12958 if (strict_overflow_p)
12959 fold_overflow_warning (("assuming signed overflow does not occur "
12960 "when simplifying comparison of "
12961 "absolute value and zero"),
12962 WARN_STRICT_OVERFLOW_CONDITIONAL);
12963 return omit_one_operand_loc (loc, type,
12964 constant_boolean_node (false, type),
12965 arg0);
12966 }
12967
12968 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12969 and similarly for >= into !=. */
12970 if ((code == LT_EXPR || code == GE_EXPR)
12971 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12972 && TREE_CODE (arg1) == LSHIFT_EXPR
12973 && integer_onep (TREE_OPERAND (arg1, 0)))
12974 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12975 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12976 TREE_OPERAND (arg1, 1)),
12977 build_zero_cst (TREE_TYPE (arg0)));
12978
12979 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12980 otherwise Y might be >= # of bits in X's type and thus e.g.
12981 (unsigned char) (1 << Y) for Y 15 might be 0.
12982 If the cast is widening, then 1 << Y should have unsigned type,
12983 otherwise if Y is number of bits in the signed shift type minus 1,
12984 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12985 31 might be 0xffffffff80000000. */
12986 if ((code == LT_EXPR || code == GE_EXPR)
12987 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12988 && CONVERT_EXPR_P (arg1)
12989 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12990 && (element_precision (TREE_TYPE (arg1))
12991 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12992 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12993 || (element_precision (TREE_TYPE (arg1))
12994 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12995 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12996 {
12997 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12998 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12999 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13000 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13001 build_zero_cst (TREE_TYPE (arg0)));
13002 }
13003
13004 return NULL_TREE;
13005
13006 case UNORDERED_EXPR:
13007 case ORDERED_EXPR:
13008 case UNLT_EXPR:
13009 case UNLE_EXPR:
13010 case UNGT_EXPR:
13011 case UNGE_EXPR:
13012 case UNEQ_EXPR:
13013 case LTGT_EXPR:
13014 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13015 {
13016 t1 = fold_relational_const (code, type, arg0, arg1);
13017 if (t1 != NULL_TREE)
13018 return t1;
13019 }
13020
13021 /* If the first operand is NaN, the result is constant. */
13022 if (TREE_CODE (arg0) == REAL_CST
13023 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13024 && (code != LTGT_EXPR || ! flag_trapping_math))
13025 {
13026 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13027 ? integer_zero_node
13028 : integer_one_node;
13029 return omit_one_operand_loc (loc, type, t1, arg1);
13030 }
13031
13032 /* If the second operand is NaN, the result is constant. */
13033 if (TREE_CODE (arg1) == REAL_CST
13034 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13035 && (code != LTGT_EXPR || ! flag_trapping_math))
13036 {
13037 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13038 ? integer_zero_node
13039 : integer_one_node;
13040 return omit_one_operand_loc (loc, type, t1, arg0);
13041 }
13042
13043 /* Simplify unordered comparison of something with itself. */
13044 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13045 && operand_equal_p (arg0, arg1, 0))
13046 return constant_boolean_node (1, type);
13047
13048 if (code == LTGT_EXPR
13049 && !flag_trapping_math
13050 && operand_equal_p (arg0, arg1, 0))
13051 return constant_boolean_node (0, type);
13052
13053 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13054 {
13055 tree targ0 = strip_float_extensions (arg0);
13056 tree targ1 = strip_float_extensions (arg1);
13057 tree newtype = TREE_TYPE (targ0);
13058
13059 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13060 newtype = TREE_TYPE (targ1);
13061
13062 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13063 return fold_build2_loc (loc, code, type,
13064 fold_convert_loc (loc, newtype, targ0),
13065 fold_convert_loc (loc, newtype, targ1));
13066 }
13067
13068 return NULL_TREE;
13069
13070 case COMPOUND_EXPR:
13071 /* When pedantic, a compound expression can be neither an lvalue
13072 nor an integer constant expression. */
13073 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13074 return NULL_TREE;
13075 /* Don't let (0, 0) be null pointer constant. */
13076 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13077 : fold_convert_loc (loc, type, arg1);
13078 return pedantic_non_lvalue_loc (loc, tem);
13079
13080 case COMPLEX_EXPR:
13081 if ((TREE_CODE (arg0) == REAL_CST
13082 && TREE_CODE (arg1) == REAL_CST)
13083 || (TREE_CODE (arg0) == INTEGER_CST
13084 && TREE_CODE (arg1) == INTEGER_CST))
13085 return build_complex (type, arg0, arg1);
13086 return NULL_TREE;
13087
13088 case ASSERT_EXPR:
13089 /* An ASSERT_EXPR should never be passed to fold_binary. */
13090 gcc_unreachable ();
13091
13092 case VEC_PACK_TRUNC_EXPR:
13093 case VEC_PACK_FIX_TRUNC_EXPR:
13094 {
13095 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13096 tree *elts;
13097
13098 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13099 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13100 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13101 return NULL_TREE;
13102
13103 elts = XALLOCAVEC (tree, nelts);
13104 if (!vec_cst_ctor_to_array (arg0, elts)
13105 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13106 return NULL_TREE;
13107
13108 for (i = 0; i < nelts; i++)
13109 {
13110 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13111 ? NOP_EXPR : FIX_TRUNC_EXPR,
13112 TREE_TYPE (type), elts[i]);
13113 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13114 return NULL_TREE;
13115 }
13116
13117 return build_vector (type, elts);
13118 }
13119
13120 case VEC_WIDEN_MULT_LO_EXPR:
13121 case VEC_WIDEN_MULT_HI_EXPR:
13122 case VEC_WIDEN_MULT_EVEN_EXPR:
13123 case VEC_WIDEN_MULT_ODD_EXPR:
13124 {
13125 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13126 unsigned int out, ofs, scale;
13127 tree *elts;
13128
13129 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13130 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13131 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13132 return NULL_TREE;
13133
13134 elts = XALLOCAVEC (tree, nelts * 4);
13135 if (!vec_cst_ctor_to_array (arg0, elts)
13136 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13137 return NULL_TREE;
13138
13139 if (code == VEC_WIDEN_MULT_LO_EXPR)
13140 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13141 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13142 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13143 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13144 scale = 1, ofs = 0;
13145 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13146 scale = 1, ofs = 1;
13147
13148 for (out = 0; out < nelts; out++)
13149 {
13150 unsigned int in1 = (out << scale) + ofs;
13151 unsigned int in2 = in1 + nelts * 2;
13152 tree t1, t2;
13153
13154 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13155 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13156
13157 if (t1 == NULL_TREE || t2 == NULL_TREE)
13158 return NULL_TREE;
13159 elts[out] = const_binop (MULT_EXPR, t1, t2);
13160 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13161 return NULL_TREE;
13162 }
13163
13164 return build_vector (type, elts);
13165 }
13166
13167 default:
13168 return NULL_TREE;
13169 } /* switch (code) */
13170 }
13171
13172 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13173 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13174 of GOTO_EXPR. */
13175
13176 static tree
13177 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13178 {
13179 switch (TREE_CODE (*tp))
13180 {
13181 case LABEL_EXPR:
13182 return *tp;
13183
13184 case GOTO_EXPR:
13185 *walk_subtrees = 0;
13186
13187 /* ... fall through ... */
13188
13189 default:
13190 return NULL_TREE;
13191 }
13192 }
13193
13194 /* Return whether the sub-tree ST contains a label which is accessible from
13195 outside the sub-tree. */
13196
13197 static bool
13198 contains_label_p (tree st)
13199 {
13200 return
13201 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13202 }
13203
13204 /* Fold a ternary expression of code CODE and type TYPE with operands
13205 OP0, OP1, and OP2. Return the folded expression if folding is
13206 successful. Otherwise, return NULL_TREE. */
13207
13208 tree
13209 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13210 tree op0, tree op1, tree op2)
13211 {
13212 tree tem;
13213 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13214 enum tree_code_class kind = TREE_CODE_CLASS (code);
13215
13216 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13217 && TREE_CODE_LENGTH (code) == 3);
13218
13219 /* If this is a commutative operation, and OP0 is a constant, move it
13220 to OP1 to reduce the number of tests below. */
13221 if (commutative_ternary_tree_code (code)
13222 && tree_swap_operands_p (op0, op1, true))
13223 return fold_build3_loc (loc, code, type, op1, op0, op2);
13224
13225 tem = generic_simplify (loc, code, type, op0, op1, op2);
13226 if (tem)
13227 return tem;
13228
13229 /* Strip any conversions that don't change the mode. This is safe
13230 for every expression, except for a comparison expression because
13231 its signedness is derived from its operands. So, in the latter
13232 case, only strip conversions that don't change the signedness.
13233
13234 Note that this is done as an internal manipulation within the
13235 constant folder, in order to find the simplest representation of
13236 the arguments so that their form can be studied. In any cases,
13237 the appropriate type conversions should be put back in the tree
13238 that will get out of the constant folder. */
13239 if (op0)
13240 {
13241 arg0 = op0;
13242 STRIP_NOPS (arg0);
13243 }
13244
13245 if (op1)
13246 {
13247 arg1 = op1;
13248 STRIP_NOPS (arg1);
13249 }
13250
13251 if (op2)
13252 {
13253 arg2 = op2;
13254 STRIP_NOPS (arg2);
13255 }
13256
13257 switch (code)
13258 {
13259 case COMPONENT_REF:
13260 if (TREE_CODE (arg0) == CONSTRUCTOR
13261 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13262 {
13263 unsigned HOST_WIDE_INT idx;
13264 tree field, value;
13265 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13266 if (field == arg1)
13267 return value;
13268 }
13269 return NULL_TREE;
13270
13271 case COND_EXPR:
13272 case VEC_COND_EXPR:
13273 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13274 so all simple results must be passed through pedantic_non_lvalue. */
13275 if (TREE_CODE (arg0) == INTEGER_CST)
13276 {
13277 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13278 tem = integer_zerop (arg0) ? op2 : op1;
13279 /* Only optimize constant conditions when the selected branch
13280 has the same type as the COND_EXPR. This avoids optimizing
13281 away "c ? x : throw", where the throw has a void type.
13282 Avoid throwing away that operand which contains label. */
13283 if ((!TREE_SIDE_EFFECTS (unused_op)
13284 || !contains_label_p (unused_op))
13285 && (! VOID_TYPE_P (TREE_TYPE (tem))
13286 || VOID_TYPE_P (type)))
13287 return pedantic_non_lvalue_loc (loc, tem);
13288 return NULL_TREE;
13289 }
13290 else if (TREE_CODE (arg0) == VECTOR_CST)
13291 {
13292 if ((TREE_CODE (arg1) == VECTOR_CST
13293 || TREE_CODE (arg1) == CONSTRUCTOR)
13294 && (TREE_CODE (arg2) == VECTOR_CST
13295 || TREE_CODE (arg2) == CONSTRUCTOR))
13296 {
13297 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13298 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13299 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13300 for (i = 0; i < nelts; i++)
13301 {
13302 tree val = VECTOR_CST_ELT (arg0, i);
13303 if (integer_all_onesp (val))
13304 sel[i] = i;
13305 else if (integer_zerop (val))
13306 sel[i] = nelts + i;
13307 else /* Currently unreachable. */
13308 return NULL_TREE;
13309 }
13310 tree t = fold_vec_perm (type, arg1, arg2, sel);
13311 if (t != NULL_TREE)
13312 return t;
13313 }
13314 }
13315
13316 /* If we have A op B ? A : C, we may be able to convert this to a
13317 simpler expression, depending on the operation and the values
13318 of B and C. Signed zeros prevent all of these transformations,
13319 for reasons given above each one.
13320
13321 Also try swapping the arguments and inverting the conditional. */
13322 if (COMPARISON_CLASS_P (arg0)
13323 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13324 arg1, TREE_OPERAND (arg0, 1))
13325 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13326 {
13327 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13328 if (tem)
13329 return tem;
13330 }
13331
13332 if (COMPARISON_CLASS_P (arg0)
13333 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13334 op2,
13335 TREE_OPERAND (arg0, 1))
13336 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13337 {
13338 location_t loc0 = expr_location_or (arg0, loc);
13339 tem = fold_invert_truthvalue (loc0, arg0);
13340 if (tem && COMPARISON_CLASS_P (tem))
13341 {
13342 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13343 if (tem)
13344 return tem;
13345 }
13346 }
13347
13348 /* If the second operand is simpler than the third, swap them
13349 since that produces better jump optimization results. */
13350 if (truth_value_p (TREE_CODE (arg0))
13351 && tree_swap_operands_p (op1, op2, false))
13352 {
13353 location_t loc0 = expr_location_or (arg0, loc);
13354 /* See if this can be inverted. If it can't, possibly because
13355 it was a floating-point inequality comparison, don't do
13356 anything. */
13357 tem = fold_invert_truthvalue (loc0, arg0);
13358 if (tem)
13359 return fold_build3_loc (loc, code, type, tem, op2, op1);
13360 }
13361
13362 /* Convert A ? 1 : 0 to simply A. */
13363 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13364 : (integer_onep (op1)
13365 && !VECTOR_TYPE_P (type)))
13366 && integer_zerop (op2)
13367 /* If we try to convert OP0 to our type, the
13368 call to fold will try to move the conversion inside
13369 a COND, which will recurse. In that case, the COND_EXPR
13370 is probably the best choice, so leave it alone. */
13371 && type == TREE_TYPE (arg0))
13372 return pedantic_non_lvalue_loc (loc, arg0);
13373
13374 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13375 over COND_EXPR in cases such as floating point comparisons. */
13376 if (integer_zerop (op1)
13377 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13378 : (integer_onep (op2)
13379 && !VECTOR_TYPE_P (type)))
13380 && truth_value_p (TREE_CODE (arg0)))
13381 return pedantic_non_lvalue_loc (loc,
13382 fold_convert_loc (loc, type,
13383 invert_truthvalue_loc (loc,
13384 arg0)));
13385
13386 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13387 if (TREE_CODE (arg0) == LT_EXPR
13388 && integer_zerop (TREE_OPERAND (arg0, 1))
13389 && integer_zerop (op2)
13390 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13391 {
13392 /* sign_bit_p looks through both zero and sign extensions,
13393 but for this optimization only sign extensions are
13394 usable. */
13395 tree tem2 = TREE_OPERAND (arg0, 0);
13396 while (tem != tem2)
13397 {
13398 if (TREE_CODE (tem2) != NOP_EXPR
13399 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13400 {
13401 tem = NULL_TREE;
13402 break;
13403 }
13404 tem2 = TREE_OPERAND (tem2, 0);
13405 }
13406 /* sign_bit_p only checks ARG1 bits within A's precision.
13407 If <sign bit of A> has wider type than A, bits outside
13408 of A's precision in <sign bit of A> need to be checked.
13409 If they are all 0, this optimization needs to be done
13410 in unsigned A's type, if they are all 1 in signed A's type,
13411 otherwise this can't be done. */
13412 if (tem
13413 && TYPE_PRECISION (TREE_TYPE (tem))
13414 < TYPE_PRECISION (TREE_TYPE (arg1))
13415 && TYPE_PRECISION (TREE_TYPE (tem))
13416 < TYPE_PRECISION (type))
13417 {
13418 int inner_width, outer_width;
13419 tree tem_type;
13420
13421 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13422 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13423 if (outer_width > TYPE_PRECISION (type))
13424 outer_width = TYPE_PRECISION (type);
13425
13426 wide_int mask = wi::shifted_mask
13427 (inner_width, outer_width - inner_width, false,
13428 TYPE_PRECISION (TREE_TYPE (arg1)));
13429
13430 wide_int common = mask & arg1;
13431 if (common == mask)
13432 {
13433 tem_type = signed_type_for (TREE_TYPE (tem));
13434 tem = fold_convert_loc (loc, tem_type, tem);
13435 }
13436 else if (common == 0)
13437 {
13438 tem_type = unsigned_type_for (TREE_TYPE (tem));
13439 tem = fold_convert_loc (loc, tem_type, tem);
13440 }
13441 else
13442 tem = NULL;
13443 }
13444
13445 if (tem)
13446 return
13447 fold_convert_loc (loc, type,
13448 fold_build2_loc (loc, BIT_AND_EXPR,
13449 TREE_TYPE (tem), tem,
13450 fold_convert_loc (loc,
13451 TREE_TYPE (tem),
13452 arg1)));
13453 }
13454
13455 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13456 already handled above. */
13457 if (TREE_CODE (arg0) == BIT_AND_EXPR
13458 && integer_onep (TREE_OPERAND (arg0, 1))
13459 && integer_zerop (op2)
13460 && integer_pow2p (arg1))
13461 {
13462 tree tem = TREE_OPERAND (arg0, 0);
13463 STRIP_NOPS (tem);
13464 if (TREE_CODE (tem) == RSHIFT_EXPR
13465 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13466 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13467 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13468 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13469 TREE_OPERAND (tem, 0), arg1);
13470 }
13471
13472 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13473 is probably obsolete because the first operand should be a
13474 truth value (that's why we have the two cases above), but let's
13475 leave it in until we can confirm this for all front-ends. */
13476 if (integer_zerop (op2)
13477 && TREE_CODE (arg0) == NE_EXPR
13478 && integer_zerop (TREE_OPERAND (arg0, 1))
13479 && integer_pow2p (arg1)
13480 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13481 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13482 arg1, OEP_ONLY_CONST))
13483 return pedantic_non_lvalue_loc (loc,
13484 fold_convert_loc (loc, type,
13485 TREE_OPERAND (arg0, 0)));
13486
13487 /* Disable the transformations below for vectors, since
13488 fold_binary_op_with_conditional_arg may undo them immediately,
13489 yielding an infinite loop. */
13490 if (code == VEC_COND_EXPR)
13491 return NULL_TREE;
13492
13493 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13494 if (integer_zerop (op2)
13495 && truth_value_p (TREE_CODE (arg0))
13496 && truth_value_p (TREE_CODE (arg1))
13497 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13498 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13499 : TRUTH_ANDIF_EXPR,
13500 type, fold_convert_loc (loc, type, arg0), arg1);
13501
13502 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13503 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13504 && truth_value_p (TREE_CODE (arg0))
13505 && truth_value_p (TREE_CODE (arg1))
13506 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13507 {
13508 location_t loc0 = expr_location_or (arg0, loc);
13509 /* Only perform transformation if ARG0 is easily inverted. */
13510 tem = fold_invert_truthvalue (loc0, arg0);
13511 if (tem)
13512 return fold_build2_loc (loc, code == VEC_COND_EXPR
13513 ? BIT_IOR_EXPR
13514 : TRUTH_ORIF_EXPR,
13515 type, fold_convert_loc (loc, type, tem),
13516 arg1);
13517 }
13518
13519 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13520 if (integer_zerop (arg1)
13521 && truth_value_p (TREE_CODE (arg0))
13522 && truth_value_p (TREE_CODE (op2))
13523 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13524 {
13525 location_t loc0 = expr_location_or (arg0, loc);
13526 /* Only perform transformation if ARG0 is easily inverted. */
13527 tem = fold_invert_truthvalue (loc0, arg0);
13528 if (tem)
13529 return fold_build2_loc (loc, code == VEC_COND_EXPR
13530 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13531 type, fold_convert_loc (loc, type, tem),
13532 op2);
13533 }
13534
13535 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13536 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13537 && truth_value_p (TREE_CODE (arg0))
13538 && truth_value_p (TREE_CODE (op2))
13539 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13540 return fold_build2_loc (loc, code == VEC_COND_EXPR
13541 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13542 type, fold_convert_loc (loc, type, arg0), op2);
13543
13544 return NULL_TREE;
13545
13546 case CALL_EXPR:
13547 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13548 of fold_ternary on them. */
13549 gcc_unreachable ();
13550
13551 case BIT_FIELD_REF:
13552 if ((TREE_CODE (arg0) == VECTOR_CST
13553 || (TREE_CODE (arg0) == CONSTRUCTOR
13554 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13555 && (type == TREE_TYPE (TREE_TYPE (arg0))
13556 || (TREE_CODE (type) == VECTOR_TYPE
13557 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13558 {
13559 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13560 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13561 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13562 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13563
13564 if (n != 0
13565 && (idx % width) == 0
13566 && (n % width) == 0
13567 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13568 {
13569 idx = idx / width;
13570 n = n / width;
13571
13572 if (TREE_CODE (arg0) == VECTOR_CST)
13573 {
13574 if (n == 1)
13575 return VECTOR_CST_ELT (arg0, idx);
13576
13577 tree *vals = XALLOCAVEC (tree, n);
13578 for (unsigned i = 0; i < n; ++i)
13579 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13580 return build_vector (type, vals);
13581 }
13582
13583 /* Constructor elements can be subvectors. */
13584 unsigned HOST_WIDE_INT k = 1;
13585 if (CONSTRUCTOR_NELTS (arg0) != 0)
13586 {
13587 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13588 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13589 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13590 }
13591
13592 /* We keep an exact subset of the constructor elements. */
13593 if ((idx % k) == 0 && (n % k) == 0)
13594 {
13595 if (CONSTRUCTOR_NELTS (arg0) == 0)
13596 return build_constructor (type, NULL);
13597 idx /= k;
13598 n /= k;
13599 if (n == 1)
13600 {
13601 if (idx < CONSTRUCTOR_NELTS (arg0))
13602 return CONSTRUCTOR_ELT (arg0, idx)->value;
13603 return build_zero_cst (type);
13604 }
13605
13606 vec<constructor_elt, va_gc> *vals;
13607 vec_alloc (vals, n);
13608 for (unsigned i = 0;
13609 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13610 ++i)
13611 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13612 CONSTRUCTOR_ELT
13613 (arg0, idx + i)->value);
13614 return build_constructor (type, vals);
13615 }
13616 /* The bitfield references a single constructor element. */
13617 else if (idx + n <= (idx / k + 1) * k)
13618 {
13619 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13620 return build_zero_cst (type);
13621 else if (n == k)
13622 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13623 else
13624 return fold_build3_loc (loc, code, type,
13625 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13626 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13627 }
13628 }
13629 }
13630
13631 /* A bit-field-ref that referenced the full argument can be stripped. */
13632 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13633 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13634 && integer_zerop (op2))
13635 return fold_convert_loc (loc, type, arg0);
13636
13637 /* On constants we can use native encode/interpret to constant
13638 fold (nearly) all BIT_FIELD_REFs. */
13639 if (CONSTANT_CLASS_P (arg0)
13640 && can_native_interpret_type_p (type)
13641 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13642 /* This limitation should not be necessary, we just need to
13643 round this up to mode size. */
13644 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13645 /* Need bit-shifting of the buffer to relax the following. */
13646 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13647 {
13648 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13649 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13650 unsigned HOST_WIDE_INT clen;
13651 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13652 /* ??? We cannot tell native_encode_expr to start at
13653 some random byte only. So limit us to a reasonable amount
13654 of work. */
13655 if (clen <= 4096)
13656 {
13657 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13658 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13659 if (len > 0
13660 && len * BITS_PER_UNIT >= bitpos + bitsize)
13661 {
13662 tree v = native_interpret_expr (type,
13663 b + bitpos / BITS_PER_UNIT,
13664 bitsize / BITS_PER_UNIT);
13665 if (v)
13666 return v;
13667 }
13668 }
13669 }
13670
13671 return NULL_TREE;
13672
13673 case FMA_EXPR:
13674 /* For integers we can decompose the FMA if possible. */
13675 if (TREE_CODE (arg0) == INTEGER_CST
13676 && TREE_CODE (arg1) == INTEGER_CST)
13677 return fold_build2_loc (loc, PLUS_EXPR, type,
13678 const_binop (MULT_EXPR, arg0, arg1), arg2);
13679 if (integer_zerop (arg2))
13680 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13681
13682 return fold_fma (loc, type, arg0, arg1, arg2);
13683
13684 case VEC_PERM_EXPR:
13685 if (TREE_CODE (arg2) == VECTOR_CST)
13686 {
13687 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13688 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13689 unsigned char *sel2 = sel + nelts;
13690 bool need_mask_canon = false;
13691 bool need_mask_canon2 = false;
13692 bool all_in_vec0 = true;
13693 bool all_in_vec1 = true;
13694 bool maybe_identity = true;
13695 bool single_arg = (op0 == op1);
13696 bool changed = false;
13697
13698 mask2 = 2 * nelts - 1;
13699 mask = single_arg ? (nelts - 1) : mask2;
13700 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13701 for (i = 0; i < nelts; i++)
13702 {
13703 tree val = VECTOR_CST_ELT (arg2, i);
13704 if (TREE_CODE (val) != INTEGER_CST)
13705 return NULL_TREE;
13706
13707 /* Make sure that the perm value is in an acceptable
13708 range. */
13709 wide_int t = val;
13710 need_mask_canon |= wi::gtu_p (t, mask);
13711 need_mask_canon2 |= wi::gtu_p (t, mask2);
13712 sel[i] = t.to_uhwi () & mask;
13713 sel2[i] = t.to_uhwi () & mask2;
13714
13715 if (sel[i] < nelts)
13716 all_in_vec1 = false;
13717 else
13718 all_in_vec0 = false;
13719
13720 if ((sel[i] & (nelts-1)) != i)
13721 maybe_identity = false;
13722 }
13723
13724 if (maybe_identity)
13725 {
13726 if (all_in_vec0)
13727 return op0;
13728 if (all_in_vec1)
13729 return op1;
13730 }
13731
13732 if (all_in_vec0)
13733 op1 = op0;
13734 else if (all_in_vec1)
13735 {
13736 op0 = op1;
13737 for (i = 0; i < nelts; i++)
13738 sel[i] -= nelts;
13739 need_mask_canon = true;
13740 }
13741
13742 if ((TREE_CODE (op0) == VECTOR_CST
13743 || TREE_CODE (op0) == CONSTRUCTOR)
13744 && (TREE_CODE (op1) == VECTOR_CST
13745 || TREE_CODE (op1) == CONSTRUCTOR))
13746 {
13747 tree t = fold_vec_perm (type, op0, op1, sel);
13748 if (t != NULL_TREE)
13749 return t;
13750 }
13751
13752 if (op0 == op1 && !single_arg)
13753 changed = true;
13754
13755 /* Some targets are deficient and fail to expand a single
13756 argument permutation while still allowing an equivalent
13757 2-argument version. */
13758 if (need_mask_canon && arg2 == op2
13759 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13760 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13761 {
13762 need_mask_canon = need_mask_canon2;
13763 sel = sel2;
13764 }
13765
13766 if (need_mask_canon && arg2 == op2)
13767 {
13768 tree *tsel = XALLOCAVEC (tree, nelts);
13769 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13770 for (i = 0; i < nelts; i++)
13771 tsel[i] = build_int_cst (eltype, sel[i]);
13772 op2 = build_vector (TREE_TYPE (arg2), tsel);
13773 changed = true;
13774 }
13775
13776 if (changed)
13777 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13778 }
13779 return NULL_TREE;
13780
13781 default:
13782 return NULL_TREE;
13783 } /* switch (code) */
13784 }
13785
13786 /* Perform constant folding and related simplification of EXPR.
13787 The related simplifications include x*1 => x, x*0 => 0, etc.,
13788 and application of the associative law.
13789 NOP_EXPR conversions may be removed freely (as long as we
13790 are careful not to change the type of the overall expression).
13791 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13792 but we can constant-fold them if they have constant operands. */
13793
13794 #ifdef ENABLE_FOLD_CHECKING
13795 # define fold(x) fold_1 (x)
13796 static tree fold_1 (tree);
13797 static
13798 #endif
13799 tree
13800 fold (tree expr)
13801 {
13802 const tree t = expr;
13803 enum tree_code code = TREE_CODE (t);
13804 enum tree_code_class kind = TREE_CODE_CLASS (code);
13805 tree tem;
13806 location_t loc = EXPR_LOCATION (expr);
13807
13808 /* Return right away if a constant. */
13809 if (kind == tcc_constant)
13810 return t;
13811
13812 /* CALL_EXPR-like objects with variable numbers of operands are
13813 treated specially. */
13814 if (kind == tcc_vl_exp)
13815 {
13816 if (code == CALL_EXPR)
13817 {
13818 tem = fold_call_expr (loc, expr, false);
13819 return tem ? tem : expr;
13820 }
13821 return expr;
13822 }
13823
13824 if (IS_EXPR_CODE_CLASS (kind))
13825 {
13826 tree type = TREE_TYPE (t);
13827 tree op0, op1, op2;
13828
13829 switch (TREE_CODE_LENGTH (code))
13830 {
13831 case 1:
13832 op0 = TREE_OPERAND (t, 0);
13833 tem = fold_unary_loc (loc, code, type, op0);
13834 return tem ? tem : expr;
13835 case 2:
13836 op0 = TREE_OPERAND (t, 0);
13837 op1 = TREE_OPERAND (t, 1);
13838 tem = fold_binary_loc (loc, code, type, op0, op1);
13839 return tem ? tem : expr;
13840 case 3:
13841 op0 = TREE_OPERAND (t, 0);
13842 op1 = TREE_OPERAND (t, 1);
13843 op2 = TREE_OPERAND (t, 2);
13844 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13845 return tem ? tem : expr;
13846 default:
13847 break;
13848 }
13849 }
13850
13851 switch (code)
13852 {
13853 case ARRAY_REF:
13854 {
13855 tree op0 = TREE_OPERAND (t, 0);
13856 tree op1 = TREE_OPERAND (t, 1);
13857
13858 if (TREE_CODE (op1) == INTEGER_CST
13859 && TREE_CODE (op0) == CONSTRUCTOR
13860 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13861 {
13862 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13863 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13864 unsigned HOST_WIDE_INT begin = 0;
13865
13866 /* Find a matching index by means of a binary search. */
13867 while (begin != end)
13868 {
13869 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13870 tree index = (*elts)[middle].index;
13871
13872 if (TREE_CODE (index) == INTEGER_CST
13873 && tree_int_cst_lt (index, op1))
13874 begin = middle + 1;
13875 else if (TREE_CODE (index) == INTEGER_CST
13876 && tree_int_cst_lt (op1, index))
13877 end = middle;
13878 else if (TREE_CODE (index) == RANGE_EXPR
13879 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13880 begin = middle + 1;
13881 else if (TREE_CODE (index) == RANGE_EXPR
13882 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13883 end = middle;
13884 else
13885 return (*elts)[middle].value;
13886 }
13887 }
13888
13889 return t;
13890 }
13891
13892 /* Return a VECTOR_CST if possible. */
13893 case CONSTRUCTOR:
13894 {
13895 tree type = TREE_TYPE (t);
13896 if (TREE_CODE (type) != VECTOR_TYPE)
13897 return t;
13898
13899 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13900 unsigned HOST_WIDE_INT idx, pos = 0;
13901 tree value;
13902
13903 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13904 {
13905 if (!CONSTANT_CLASS_P (value))
13906 return t;
13907 if (TREE_CODE (value) == VECTOR_CST)
13908 {
13909 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13910 vec[pos++] = VECTOR_CST_ELT (value, i);
13911 }
13912 else
13913 vec[pos++] = value;
13914 }
13915 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13916 vec[pos] = build_zero_cst (TREE_TYPE (type));
13917
13918 return build_vector (type, vec);
13919 }
13920
13921 case CONST_DECL:
13922 return fold (DECL_INITIAL (t));
13923
13924 default:
13925 return t;
13926 } /* switch (code) */
13927 }
13928
13929 #ifdef ENABLE_FOLD_CHECKING
13930 #undef fold
13931
13932 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13933 hash_table<pointer_hash<const tree_node> > *);
13934 static void fold_check_failed (const_tree, const_tree);
13935 void print_fold_checksum (const_tree);
13936
13937 /* When --enable-checking=fold, compute a digest of expr before
13938 and after actual fold call to see if fold did not accidentally
13939 change original expr. */
13940
13941 tree
13942 fold (tree expr)
13943 {
13944 tree ret;
13945 struct md5_ctx ctx;
13946 unsigned char checksum_before[16], checksum_after[16];
13947 hash_table<pointer_hash<const tree_node> > ht (32);
13948
13949 md5_init_ctx (&ctx);
13950 fold_checksum_tree (expr, &ctx, &ht);
13951 md5_finish_ctx (&ctx, checksum_before);
13952 ht.empty ();
13953
13954 ret = fold_1 (expr);
13955
13956 md5_init_ctx (&ctx);
13957 fold_checksum_tree (expr, &ctx, &ht);
13958 md5_finish_ctx (&ctx, checksum_after);
13959
13960 if (memcmp (checksum_before, checksum_after, 16))
13961 fold_check_failed (expr, ret);
13962
13963 return ret;
13964 }
13965
13966 void
13967 print_fold_checksum (const_tree expr)
13968 {
13969 struct md5_ctx ctx;
13970 unsigned char checksum[16], cnt;
13971 hash_table<pointer_hash<const tree_node> > ht (32);
13972
13973 md5_init_ctx (&ctx);
13974 fold_checksum_tree (expr, &ctx, &ht);
13975 md5_finish_ctx (&ctx, checksum);
13976 for (cnt = 0; cnt < 16; ++cnt)
13977 fprintf (stderr, "%02x", checksum[cnt]);
13978 putc ('\n', stderr);
13979 }
13980
13981 static void
13982 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13983 {
13984 internal_error ("fold check: original tree changed by fold");
13985 }
13986
13987 static void
13988 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13989 hash_table<pointer_hash <const tree_node> > *ht)
13990 {
13991 const tree_node **slot;
13992 enum tree_code code;
13993 union tree_node buf;
13994 int i, len;
13995
13996 recursive_label:
13997 if (expr == NULL)
13998 return;
13999 slot = ht->find_slot (expr, INSERT);
14000 if (*slot != NULL)
14001 return;
14002 *slot = expr;
14003 code = TREE_CODE (expr);
14004 if (TREE_CODE_CLASS (code) == tcc_declaration
14005 && DECL_ASSEMBLER_NAME_SET_P (expr))
14006 {
14007 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14008 memcpy ((char *) &buf, expr, tree_size (expr));
14009 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14010 expr = (tree) &buf;
14011 }
14012 else if (TREE_CODE_CLASS (code) == tcc_type
14013 && (TYPE_POINTER_TO (expr)
14014 || TYPE_REFERENCE_TO (expr)
14015 || TYPE_CACHED_VALUES_P (expr)
14016 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14017 || TYPE_NEXT_VARIANT (expr)))
14018 {
14019 /* Allow these fields to be modified. */
14020 tree tmp;
14021 memcpy ((char *) &buf, expr, tree_size (expr));
14022 expr = tmp = (tree) &buf;
14023 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14024 TYPE_POINTER_TO (tmp) = NULL;
14025 TYPE_REFERENCE_TO (tmp) = NULL;
14026 TYPE_NEXT_VARIANT (tmp) = NULL;
14027 if (TYPE_CACHED_VALUES_P (tmp))
14028 {
14029 TYPE_CACHED_VALUES_P (tmp) = 0;
14030 TYPE_CACHED_VALUES (tmp) = NULL;
14031 }
14032 }
14033 md5_process_bytes (expr, tree_size (expr), ctx);
14034 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14035 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14036 if (TREE_CODE_CLASS (code) != tcc_type
14037 && TREE_CODE_CLASS (code) != tcc_declaration
14038 && code != TREE_LIST
14039 && code != SSA_NAME
14040 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14041 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14042 switch (TREE_CODE_CLASS (code))
14043 {
14044 case tcc_constant:
14045 switch (code)
14046 {
14047 case STRING_CST:
14048 md5_process_bytes (TREE_STRING_POINTER (expr),
14049 TREE_STRING_LENGTH (expr), ctx);
14050 break;
14051 case COMPLEX_CST:
14052 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14053 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14054 break;
14055 case VECTOR_CST:
14056 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14057 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14058 break;
14059 default:
14060 break;
14061 }
14062 break;
14063 case tcc_exceptional:
14064 switch (code)
14065 {
14066 case TREE_LIST:
14067 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14068 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14069 expr = TREE_CHAIN (expr);
14070 goto recursive_label;
14071 break;
14072 case TREE_VEC:
14073 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14074 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14075 break;
14076 default:
14077 break;
14078 }
14079 break;
14080 case tcc_expression:
14081 case tcc_reference:
14082 case tcc_comparison:
14083 case tcc_unary:
14084 case tcc_binary:
14085 case tcc_statement:
14086 case tcc_vl_exp:
14087 len = TREE_OPERAND_LENGTH (expr);
14088 for (i = 0; i < len; ++i)
14089 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14090 break;
14091 case tcc_declaration:
14092 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14093 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14094 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14095 {
14096 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14097 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14098 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14099 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14100 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14101 }
14102
14103 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14104 {
14105 if (TREE_CODE (expr) == FUNCTION_DECL)
14106 {
14107 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14108 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14109 }
14110 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14111 }
14112 break;
14113 case tcc_type:
14114 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14115 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14116 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14117 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14118 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14119 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14120 if (INTEGRAL_TYPE_P (expr)
14121 || SCALAR_FLOAT_TYPE_P (expr))
14122 {
14123 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14124 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14125 }
14126 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14127 if (TREE_CODE (expr) == RECORD_TYPE
14128 || TREE_CODE (expr) == UNION_TYPE
14129 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14130 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14131 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14132 break;
14133 default:
14134 break;
14135 }
14136 }
14137
14138 /* Helper function for outputting the checksum of a tree T. When
14139 debugging with gdb, you can "define mynext" to be "next" followed
14140 by "call debug_fold_checksum (op0)", then just trace down till the
14141 outputs differ. */
14142
14143 DEBUG_FUNCTION void
14144 debug_fold_checksum (const_tree t)
14145 {
14146 int i;
14147 unsigned char checksum[16];
14148 struct md5_ctx ctx;
14149 hash_table<pointer_hash<const tree_node> > ht (32);
14150
14151 md5_init_ctx (&ctx);
14152 fold_checksum_tree (t, &ctx, &ht);
14153 md5_finish_ctx (&ctx, checksum);
14154 ht.empty ();
14155
14156 for (i = 0; i < 16; i++)
14157 fprintf (stderr, "%d ", checksum[i]);
14158
14159 fprintf (stderr, "\n");
14160 }
14161
14162 #endif
14163
14164 /* Fold a unary tree expression with code CODE of type TYPE with an
14165 operand OP0. LOC is the location of the resulting expression.
14166 Return a folded expression if successful. Otherwise, return a tree
14167 expression with code CODE of type TYPE with an operand OP0. */
14168
14169 tree
14170 fold_build1_stat_loc (location_t loc,
14171 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14172 {
14173 tree tem;
14174 #ifdef ENABLE_FOLD_CHECKING
14175 unsigned char checksum_before[16], checksum_after[16];
14176 struct md5_ctx ctx;
14177 hash_table<pointer_hash<const tree_node> > ht (32);
14178
14179 md5_init_ctx (&ctx);
14180 fold_checksum_tree (op0, &ctx, &ht);
14181 md5_finish_ctx (&ctx, checksum_before);
14182 ht.empty ();
14183 #endif
14184
14185 tem = fold_unary_loc (loc, code, type, op0);
14186 if (!tem)
14187 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14188
14189 #ifdef ENABLE_FOLD_CHECKING
14190 md5_init_ctx (&ctx);
14191 fold_checksum_tree (op0, &ctx, &ht);
14192 md5_finish_ctx (&ctx, checksum_after);
14193
14194 if (memcmp (checksum_before, checksum_after, 16))
14195 fold_check_failed (op0, tem);
14196 #endif
14197 return tem;
14198 }
14199
14200 /* Fold a binary tree expression with code CODE of type TYPE with
14201 operands OP0 and OP1. LOC is the location of the resulting
14202 expression. Return a folded expression if successful. Otherwise,
14203 return a tree expression with code CODE of type TYPE with operands
14204 OP0 and OP1. */
14205
14206 tree
14207 fold_build2_stat_loc (location_t loc,
14208 enum tree_code code, tree type, tree op0, tree op1
14209 MEM_STAT_DECL)
14210 {
14211 tree tem;
14212 #ifdef ENABLE_FOLD_CHECKING
14213 unsigned char checksum_before_op0[16],
14214 checksum_before_op1[16],
14215 checksum_after_op0[16],
14216 checksum_after_op1[16];
14217 struct md5_ctx ctx;
14218 hash_table<pointer_hash<const tree_node> > ht (32);
14219
14220 md5_init_ctx (&ctx);
14221 fold_checksum_tree (op0, &ctx, &ht);
14222 md5_finish_ctx (&ctx, checksum_before_op0);
14223 ht.empty ();
14224
14225 md5_init_ctx (&ctx);
14226 fold_checksum_tree (op1, &ctx, &ht);
14227 md5_finish_ctx (&ctx, checksum_before_op1);
14228 ht.empty ();
14229 #endif
14230
14231 tem = fold_binary_loc (loc, code, type, op0, op1);
14232 if (!tem)
14233 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14234
14235 #ifdef ENABLE_FOLD_CHECKING
14236 md5_init_ctx (&ctx);
14237 fold_checksum_tree (op0, &ctx, &ht);
14238 md5_finish_ctx (&ctx, checksum_after_op0);
14239 ht.empty ();
14240
14241 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14242 fold_check_failed (op0, tem);
14243
14244 md5_init_ctx (&ctx);
14245 fold_checksum_tree (op1, &ctx, &ht);
14246 md5_finish_ctx (&ctx, checksum_after_op1);
14247
14248 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14249 fold_check_failed (op1, tem);
14250 #endif
14251 return tem;
14252 }
14253
14254 /* Fold a ternary tree expression with code CODE of type TYPE with
14255 operands OP0, OP1, and OP2. Return a folded expression if
14256 successful. Otherwise, return a tree expression with code CODE of
14257 type TYPE with operands OP0, OP1, and OP2. */
14258
14259 tree
14260 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14261 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14262 {
14263 tree tem;
14264 #ifdef ENABLE_FOLD_CHECKING
14265 unsigned char checksum_before_op0[16],
14266 checksum_before_op1[16],
14267 checksum_before_op2[16],
14268 checksum_after_op0[16],
14269 checksum_after_op1[16],
14270 checksum_after_op2[16];
14271 struct md5_ctx ctx;
14272 hash_table<pointer_hash<const tree_node> > ht (32);
14273
14274 md5_init_ctx (&ctx);
14275 fold_checksum_tree (op0, &ctx, &ht);
14276 md5_finish_ctx (&ctx, checksum_before_op0);
14277 ht.empty ();
14278
14279 md5_init_ctx (&ctx);
14280 fold_checksum_tree (op1, &ctx, &ht);
14281 md5_finish_ctx (&ctx, checksum_before_op1);
14282 ht.empty ();
14283
14284 md5_init_ctx (&ctx);
14285 fold_checksum_tree (op2, &ctx, &ht);
14286 md5_finish_ctx (&ctx, checksum_before_op2);
14287 ht.empty ();
14288 #endif
14289
14290 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14291 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14292 if (!tem)
14293 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14294
14295 #ifdef ENABLE_FOLD_CHECKING
14296 md5_init_ctx (&ctx);
14297 fold_checksum_tree (op0, &ctx, &ht);
14298 md5_finish_ctx (&ctx, checksum_after_op0);
14299 ht.empty ();
14300
14301 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14302 fold_check_failed (op0, tem);
14303
14304 md5_init_ctx (&ctx);
14305 fold_checksum_tree (op1, &ctx, &ht);
14306 md5_finish_ctx (&ctx, checksum_after_op1);
14307 ht.empty ();
14308
14309 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14310 fold_check_failed (op1, tem);
14311
14312 md5_init_ctx (&ctx);
14313 fold_checksum_tree (op2, &ctx, &ht);
14314 md5_finish_ctx (&ctx, checksum_after_op2);
14315
14316 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14317 fold_check_failed (op2, tem);
14318 #endif
14319 return tem;
14320 }
14321
14322 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14323 arguments in ARGARRAY, and a null static chain.
14324 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14325 of type TYPE from the given operands as constructed by build_call_array. */
14326
14327 tree
14328 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14329 int nargs, tree *argarray)
14330 {
14331 tree tem;
14332 #ifdef ENABLE_FOLD_CHECKING
14333 unsigned char checksum_before_fn[16],
14334 checksum_before_arglist[16],
14335 checksum_after_fn[16],
14336 checksum_after_arglist[16];
14337 struct md5_ctx ctx;
14338 hash_table<pointer_hash<const tree_node> > ht (32);
14339 int i;
14340
14341 md5_init_ctx (&ctx);
14342 fold_checksum_tree (fn, &ctx, &ht);
14343 md5_finish_ctx (&ctx, checksum_before_fn);
14344 ht.empty ();
14345
14346 md5_init_ctx (&ctx);
14347 for (i = 0; i < nargs; i++)
14348 fold_checksum_tree (argarray[i], &ctx, &ht);
14349 md5_finish_ctx (&ctx, checksum_before_arglist);
14350 ht.empty ();
14351 #endif
14352
14353 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14354
14355 #ifdef ENABLE_FOLD_CHECKING
14356 md5_init_ctx (&ctx);
14357 fold_checksum_tree (fn, &ctx, &ht);
14358 md5_finish_ctx (&ctx, checksum_after_fn);
14359 ht.empty ();
14360
14361 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14362 fold_check_failed (fn, tem);
14363
14364 md5_init_ctx (&ctx);
14365 for (i = 0; i < nargs; i++)
14366 fold_checksum_tree (argarray[i], &ctx, &ht);
14367 md5_finish_ctx (&ctx, checksum_after_arglist);
14368
14369 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14370 fold_check_failed (NULL_TREE, tem);
14371 #endif
14372 return tem;
14373 }
14374
14375 /* Perform constant folding and related simplification of initializer
14376 expression EXPR. These behave identically to "fold_buildN" but ignore
14377 potential run-time traps and exceptions that fold must preserve. */
14378
14379 #define START_FOLD_INIT \
14380 int saved_signaling_nans = flag_signaling_nans;\
14381 int saved_trapping_math = flag_trapping_math;\
14382 int saved_rounding_math = flag_rounding_math;\
14383 int saved_trapv = flag_trapv;\
14384 int saved_folding_initializer = folding_initializer;\
14385 flag_signaling_nans = 0;\
14386 flag_trapping_math = 0;\
14387 flag_rounding_math = 0;\
14388 flag_trapv = 0;\
14389 folding_initializer = 1;
14390
14391 #define END_FOLD_INIT \
14392 flag_signaling_nans = saved_signaling_nans;\
14393 flag_trapping_math = saved_trapping_math;\
14394 flag_rounding_math = saved_rounding_math;\
14395 flag_trapv = saved_trapv;\
14396 folding_initializer = saved_folding_initializer;
14397
14398 tree
14399 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14400 tree type, tree op)
14401 {
14402 tree result;
14403 START_FOLD_INIT;
14404
14405 result = fold_build1_loc (loc, code, type, op);
14406
14407 END_FOLD_INIT;
14408 return result;
14409 }
14410
14411 tree
14412 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14413 tree type, tree op0, tree op1)
14414 {
14415 tree result;
14416 START_FOLD_INIT;
14417
14418 result = fold_build2_loc (loc, code, type, op0, op1);
14419
14420 END_FOLD_INIT;
14421 return result;
14422 }
14423
14424 tree
14425 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14426 int nargs, tree *argarray)
14427 {
14428 tree result;
14429 START_FOLD_INIT;
14430
14431 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14432
14433 END_FOLD_INIT;
14434 return result;
14435 }
14436
14437 #undef START_FOLD_INIT
14438 #undef END_FOLD_INIT
14439
14440 /* Determine if first argument is a multiple of second argument. Return 0 if
14441 it is not, or we cannot easily determined it to be.
14442
14443 An example of the sort of thing we care about (at this point; this routine
14444 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14445 fold cases do now) is discovering that
14446
14447 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14448
14449 is a multiple of
14450
14451 SAVE_EXPR (J * 8)
14452
14453 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14454
14455 This code also handles discovering that
14456
14457 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14458
14459 is a multiple of 8 so we don't have to worry about dealing with a
14460 possible remainder.
14461
14462 Note that we *look* inside a SAVE_EXPR only to determine how it was
14463 calculated; it is not safe for fold to do much of anything else with the
14464 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14465 at run time. For example, the latter example above *cannot* be implemented
14466 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14467 evaluation time of the original SAVE_EXPR is not necessarily the same at
14468 the time the new expression is evaluated. The only optimization of this
14469 sort that would be valid is changing
14470
14471 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14472
14473 divided by 8 to
14474
14475 SAVE_EXPR (I) * SAVE_EXPR (J)
14476
14477 (where the same SAVE_EXPR (J) is used in the original and the
14478 transformed version). */
14479
14480 int
14481 multiple_of_p (tree type, const_tree top, const_tree bottom)
14482 {
14483 if (operand_equal_p (top, bottom, 0))
14484 return 1;
14485
14486 if (TREE_CODE (type) != INTEGER_TYPE)
14487 return 0;
14488
14489 switch (TREE_CODE (top))
14490 {
14491 case BIT_AND_EXPR:
14492 /* Bitwise and provides a power of two multiple. If the mask is
14493 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14494 if (!integer_pow2p (bottom))
14495 return 0;
14496 /* FALLTHRU */
14497
14498 case MULT_EXPR:
14499 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14500 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14501
14502 case PLUS_EXPR:
14503 case MINUS_EXPR:
14504 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14505 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14506
14507 case LSHIFT_EXPR:
14508 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14509 {
14510 tree op1, t1;
14511
14512 op1 = TREE_OPERAND (top, 1);
14513 /* const_binop may not detect overflow correctly,
14514 so check for it explicitly here. */
14515 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14516 && 0 != (t1 = fold_convert (type,
14517 const_binop (LSHIFT_EXPR,
14518 size_one_node,
14519 op1)))
14520 && !TREE_OVERFLOW (t1))
14521 return multiple_of_p (type, t1, bottom);
14522 }
14523 return 0;
14524
14525 case NOP_EXPR:
14526 /* Can't handle conversions from non-integral or wider integral type. */
14527 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14528 || (TYPE_PRECISION (type)
14529 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14530 return 0;
14531
14532 /* .. fall through ... */
14533
14534 case SAVE_EXPR:
14535 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14536
14537 case COND_EXPR:
14538 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14539 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14540
14541 case INTEGER_CST:
14542 if (TREE_CODE (bottom) != INTEGER_CST
14543 || integer_zerop (bottom)
14544 || (TYPE_UNSIGNED (type)
14545 && (tree_int_cst_sgn (top) < 0
14546 || tree_int_cst_sgn (bottom) < 0)))
14547 return 0;
14548 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14549 SIGNED);
14550
14551 default:
14552 return 0;
14553 }
14554 }
14555
14556 /* Return true if CODE or TYPE is known to be non-negative. */
14557
14558 static bool
14559 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14560 {
14561 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14562 && truth_value_p (code))
14563 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14564 have a signed:1 type (where the value is -1 and 0). */
14565 return true;
14566 return false;
14567 }
14568
14569 /* Return true if (CODE OP0) is known to be non-negative. If the return
14570 value is based on the assumption that signed overflow is undefined,
14571 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14572 *STRICT_OVERFLOW_P. */
14573
14574 bool
14575 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14576 bool *strict_overflow_p)
14577 {
14578 if (TYPE_UNSIGNED (type))
14579 return true;
14580
14581 switch (code)
14582 {
14583 case ABS_EXPR:
14584 /* We can't return 1 if flag_wrapv is set because
14585 ABS_EXPR<INT_MIN> = INT_MIN. */
14586 if (!INTEGRAL_TYPE_P (type))
14587 return true;
14588 if (TYPE_OVERFLOW_UNDEFINED (type))
14589 {
14590 *strict_overflow_p = true;
14591 return true;
14592 }
14593 break;
14594
14595 case NON_LVALUE_EXPR:
14596 case FLOAT_EXPR:
14597 case FIX_TRUNC_EXPR:
14598 return tree_expr_nonnegative_warnv_p (op0,
14599 strict_overflow_p);
14600
14601 CASE_CONVERT:
14602 {
14603 tree inner_type = TREE_TYPE (op0);
14604 tree outer_type = type;
14605
14606 if (TREE_CODE (outer_type) == REAL_TYPE)
14607 {
14608 if (TREE_CODE (inner_type) == REAL_TYPE)
14609 return tree_expr_nonnegative_warnv_p (op0,
14610 strict_overflow_p);
14611 if (INTEGRAL_TYPE_P (inner_type))
14612 {
14613 if (TYPE_UNSIGNED (inner_type))
14614 return true;
14615 return tree_expr_nonnegative_warnv_p (op0,
14616 strict_overflow_p);
14617 }
14618 }
14619 else if (INTEGRAL_TYPE_P (outer_type))
14620 {
14621 if (TREE_CODE (inner_type) == REAL_TYPE)
14622 return tree_expr_nonnegative_warnv_p (op0,
14623 strict_overflow_p);
14624 if (INTEGRAL_TYPE_P (inner_type))
14625 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14626 && TYPE_UNSIGNED (inner_type);
14627 }
14628 }
14629 break;
14630
14631 default:
14632 return tree_simple_nonnegative_warnv_p (code, type);
14633 }
14634
14635 /* We don't know sign of `t', so be conservative and return false. */
14636 return false;
14637 }
14638
14639 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14640 value is based on the assumption that signed overflow is undefined,
14641 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14642 *STRICT_OVERFLOW_P. */
14643
14644 bool
14645 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14646 tree op1, bool *strict_overflow_p)
14647 {
14648 if (TYPE_UNSIGNED (type))
14649 return true;
14650
14651 switch (code)
14652 {
14653 case POINTER_PLUS_EXPR:
14654 case PLUS_EXPR:
14655 if (FLOAT_TYPE_P (type))
14656 return (tree_expr_nonnegative_warnv_p (op0,
14657 strict_overflow_p)
14658 && tree_expr_nonnegative_warnv_p (op1,
14659 strict_overflow_p));
14660
14661 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14662 both unsigned and at least 2 bits shorter than the result. */
14663 if (TREE_CODE (type) == INTEGER_TYPE
14664 && TREE_CODE (op0) == NOP_EXPR
14665 && TREE_CODE (op1) == NOP_EXPR)
14666 {
14667 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14668 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14669 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14670 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14671 {
14672 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14673 TYPE_PRECISION (inner2)) + 1;
14674 return prec < TYPE_PRECISION (type);
14675 }
14676 }
14677 break;
14678
14679 case MULT_EXPR:
14680 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14681 {
14682 /* x * x is always non-negative for floating point x
14683 or without overflow. */
14684 if (operand_equal_p (op0, op1, 0)
14685 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14686 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14687 {
14688 if (TYPE_OVERFLOW_UNDEFINED (type))
14689 *strict_overflow_p = true;
14690 return true;
14691 }
14692 }
14693
14694 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14695 both unsigned and their total bits is shorter than the result. */
14696 if (TREE_CODE (type) == INTEGER_TYPE
14697 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14698 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14699 {
14700 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14701 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14702 : TREE_TYPE (op0);
14703 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14704 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14705 : TREE_TYPE (op1);
14706
14707 bool unsigned0 = TYPE_UNSIGNED (inner0);
14708 bool unsigned1 = TYPE_UNSIGNED (inner1);
14709
14710 if (TREE_CODE (op0) == INTEGER_CST)
14711 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14712
14713 if (TREE_CODE (op1) == INTEGER_CST)
14714 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14715
14716 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14717 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14718 {
14719 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14720 ? tree_int_cst_min_precision (op0, UNSIGNED)
14721 : TYPE_PRECISION (inner0);
14722
14723 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14724 ? tree_int_cst_min_precision (op1, UNSIGNED)
14725 : TYPE_PRECISION (inner1);
14726
14727 return precision0 + precision1 < TYPE_PRECISION (type);
14728 }
14729 }
14730 return false;
14731
14732 case BIT_AND_EXPR:
14733 case MAX_EXPR:
14734 return (tree_expr_nonnegative_warnv_p (op0,
14735 strict_overflow_p)
14736 || tree_expr_nonnegative_warnv_p (op1,
14737 strict_overflow_p));
14738
14739 case BIT_IOR_EXPR:
14740 case BIT_XOR_EXPR:
14741 case MIN_EXPR:
14742 case RDIV_EXPR:
14743 case TRUNC_DIV_EXPR:
14744 case CEIL_DIV_EXPR:
14745 case FLOOR_DIV_EXPR:
14746 case ROUND_DIV_EXPR:
14747 return (tree_expr_nonnegative_warnv_p (op0,
14748 strict_overflow_p)
14749 && tree_expr_nonnegative_warnv_p (op1,
14750 strict_overflow_p));
14751
14752 case TRUNC_MOD_EXPR:
14753 case CEIL_MOD_EXPR:
14754 case FLOOR_MOD_EXPR:
14755 case ROUND_MOD_EXPR:
14756 return tree_expr_nonnegative_warnv_p (op0,
14757 strict_overflow_p);
14758 default:
14759 return tree_simple_nonnegative_warnv_p (code, type);
14760 }
14761
14762 /* We don't know sign of `t', so be conservative and return false. */
14763 return false;
14764 }
14765
14766 /* Return true if T is known to be non-negative. If the return
14767 value is based on the assumption that signed overflow is undefined,
14768 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14769 *STRICT_OVERFLOW_P. */
14770
14771 bool
14772 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14773 {
14774 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14775 return true;
14776
14777 switch (TREE_CODE (t))
14778 {
14779 case INTEGER_CST:
14780 return tree_int_cst_sgn (t) >= 0;
14781
14782 case REAL_CST:
14783 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14784
14785 case FIXED_CST:
14786 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14787
14788 case COND_EXPR:
14789 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14790 strict_overflow_p)
14791 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14792 strict_overflow_p));
14793 default:
14794 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14795 TREE_TYPE (t));
14796 }
14797 /* We don't know sign of `t', so be conservative and return false. */
14798 return false;
14799 }
14800
14801 /* Return true if T is known to be non-negative. If the return
14802 value is based on the assumption that signed overflow is undefined,
14803 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14804 *STRICT_OVERFLOW_P. */
14805
14806 bool
14807 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14808 tree arg0, tree arg1, bool *strict_overflow_p)
14809 {
14810 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14811 switch (DECL_FUNCTION_CODE (fndecl))
14812 {
14813 CASE_FLT_FN (BUILT_IN_ACOS):
14814 CASE_FLT_FN (BUILT_IN_ACOSH):
14815 CASE_FLT_FN (BUILT_IN_CABS):
14816 CASE_FLT_FN (BUILT_IN_COSH):
14817 CASE_FLT_FN (BUILT_IN_ERFC):
14818 CASE_FLT_FN (BUILT_IN_EXP):
14819 CASE_FLT_FN (BUILT_IN_EXP10):
14820 CASE_FLT_FN (BUILT_IN_EXP2):
14821 CASE_FLT_FN (BUILT_IN_FABS):
14822 CASE_FLT_FN (BUILT_IN_FDIM):
14823 CASE_FLT_FN (BUILT_IN_HYPOT):
14824 CASE_FLT_FN (BUILT_IN_POW10):
14825 CASE_INT_FN (BUILT_IN_FFS):
14826 CASE_INT_FN (BUILT_IN_PARITY):
14827 CASE_INT_FN (BUILT_IN_POPCOUNT):
14828 CASE_INT_FN (BUILT_IN_CLZ):
14829 CASE_INT_FN (BUILT_IN_CLRSB):
14830 case BUILT_IN_BSWAP32:
14831 case BUILT_IN_BSWAP64:
14832 /* Always true. */
14833 return true;
14834
14835 CASE_FLT_FN (BUILT_IN_SQRT):
14836 /* sqrt(-0.0) is -0.0. */
14837 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14838 return true;
14839 return tree_expr_nonnegative_warnv_p (arg0,
14840 strict_overflow_p);
14841
14842 CASE_FLT_FN (BUILT_IN_ASINH):
14843 CASE_FLT_FN (BUILT_IN_ATAN):
14844 CASE_FLT_FN (BUILT_IN_ATANH):
14845 CASE_FLT_FN (BUILT_IN_CBRT):
14846 CASE_FLT_FN (BUILT_IN_CEIL):
14847 CASE_FLT_FN (BUILT_IN_ERF):
14848 CASE_FLT_FN (BUILT_IN_EXPM1):
14849 CASE_FLT_FN (BUILT_IN_FLOOR):
14850 CASE_FLT_FN (BUILT_IN_FMOD):
14851 CASE_FLT_FN (BUILT_IN_FREXP):
14852 CASE_FLT_FN (BUILT_IN_ICEIL):
14853 CASE_FLT_FN (BUILT_IN_IFLOOR):
14854 CASE_FLT_FN (BUILT_IN_IRINT):
14855 CASE_FLT_FN (BUILT_IN_IROUND):
14856 CASE_FLT_FN (BUILT_IN_LCEIL):
14857 CASE_FLT_FN (BUILT_IN_LDEXP):
14858 CASE_FLT_FN (BUILT_IN_LFLOOR):
14859 CASE_FLT_FN (BUILT_IN_LLCEIL):
14860 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14861 CASE_FLT_FN (BUILT_IN_LLRINT):
14862 CASE_FLT_FN (BUILT_IN_LLROUND):
14863 CASE_FLT_FN (BUILT_IN_LRINT):
14864 CASE_FLT_FN (BUILT_IN_LROUND):
14865 CASE_FLT_FN (BUILT_IN_MODF):
14866 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14867 CASE_FLT_FN (BUILT_IN_RINT):
14868 CASE_FLT_FN (BUILT_IN_ROUND):
14869 CASE_FLT_FN (BUILT_IN_SCALB):
14870 CASE_FLT_FN (BUILT_IN_SCALBLN):
14871 CASE_FLT_FN (BUILT_IN_SCALBN):
14872 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14873 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14874 CASE_FLT_FN (BUILT_IN_SINH):
14875 CASE_FLT_FN (BUILT_IN_TANH):
14876 CASE_FLT_FN (BUILT_IN_TRUNC):
14877 /* True if the 1st argument is nonnegative. */
14878 return tree_expr_nonnegative_warnv_p (arg0,
14879 strict_overflow_p);
14880
14881 CASE_FLT_FN (BUILT_IN_FMAX):
14882 /* True if the 1st OR 2nd arguments are nonnegative. */
14883 return (tree_expr_nonnegative_warnv_p (arg0,
14884 strict_overflow_p)
14885 || (tree_expr_nonnegative_warnv_p (arg1,
14886 strict_overflow_p)));
14887
14888 CASE_FLT_FN (BUILT_IN_FMIN):
14889 /* True if the 1st AND 2nd arguments are nonnegative. */
14890 return (tree_expr_nonnegative_warnv_p (arg0,
14891 strict_overflow_p)
14892 && (tree_expr_nonnegative_warnv_p (arg1,
14893 strict_overflow_p)));
14894
14895 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14896 /* True if the 2nd argument is nonnegative. */
14897 return tree_expr_nonnegative_warnv_p (arg1,
14898 strict_overflow_p);
14899
14900 CASE_FLT_FN (BUILT_IN_POWI):
14901 /* True if the 1st argument is nonnegative or the second
14902 argument is an even integer. */
14903 if (TREE_CODE (arg1) == INTEGER_CST
14904 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14905 return true;
14906 return tree_expr_nonnegative_warnv_p (arg0,
14907 strict_overflow_p);
14908
14909 CASE_FLT_FN (BUILT_IN_POW):
14910 /* True if the 1st argument is nonnegative or the second
14911 argument is an even integer valued real. */
14912 if (TREE_CODE (arg1) == REAL_CST)
14913 {
14914 REAL_VALUE_TYPE c;
14915 HOST_WIDE_INT n;
14916
14917 c = TREE_REAL_CST (arg1);
14918 n = real_to_integer (&c);
14919 if ((n & 1) == 0)
14920 {
14921 REAL_VALUE_TYPE cint;
14922 real_from_integer (&cint, VOIDmode, n, SIGNED);
14923 if (real_identical (&c, &cint))
14924 return true;
14925 }
14926 }
14927 return tree_expr_nonnegative_warnv_p (arg0,
14928 strict_overflow_p);
14929
14930 default:
14931 break;
14932 }
14933 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14934 type);
14935 }
14936
14937 /* Return true if T is known to be non-negative. If the return
14938 value is based on the assumption that signed overflow is undefined,
14939 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14940 *STRICT_OVERFLOW_P. */
14941
14942 static bool
14943 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14944 {
14945 enum tree_code code = TREE_CODE (t);
14946 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14947 return true;
14948
14949 switch (code)
14950 {
14951 case TARGET_EXPR:
14952 {
14953 tree temp = TARGET_EXPR_SLOT (t);
14954 t = TARGET_EXPR_INITIAL (t);
14955
14956 /* If the initializer is non-void, then it's a normal expression
14957 that will be assigned to the slot. */
14958 if (!VOID_TYPE_P (t))
14959 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14960
14961 /* Otherwise, the initializer sets the slot in some way. One common
14962 way is an assignment statement at the end of the initializer. */
14963 while (1)
14964 {
14965 if (TREE_CODE (t) == BIND_EXPR)
14966 t = expr_last (BIND_EXPR_BODY (t));
14967 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14968 || TREE_CODE (t) == TRY_CATCH_EXPR)
14969 t = expr_last (TREE_OPERAND (t, 0));
14970 else if (TREE_CODE (t) == STATEMENT_LIST)
14971 t = expr_last (t);
14972 else
14973 break;
14974 }
14975 if (TREE_CODE (t) == MODIFY_EXPR
14976 && TREE_OPERAND (t, 0) == temp)
14977 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14978 strict_overflow_p);
14979
14980 return false;
14981 }
14982
14983 case CALL_EXPR:
14984 {
14985 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14986 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14987
14988 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14989 get_callee_fndecl (t),
14990 arg0,
14991 arg1,
14992 strict_overflow_p);
14993 }
14994 case COMPOUND_EXPR:
14995 case MODIFY_EXPR:
14996 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14997 strict_overflow_p);
14998 case BIND_EXPR:
14999 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15000 strict_overflow_p);
15001 case SAVE_EXPR:
15002 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15003 strict_overflow_p);
15004
15005 default:
15006 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15007 TREE_TYPE (t));
15008 }
15009
15010 /* We don't know sign of `t', so be conservative and return false. */
15011 return false;
15012 }
15013
15014 /* Return true if T is known to be non-negative. If the return
15015 value is based on the assumption that signed overflow is undefined,
15016 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15017 *STRICT_OVERFLOW_P. */
15018
15019 bool
15020 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15021 {
15022 enum tree_code code;
15023 if (t == error_mark_node)
15024 return false;
15025
15026 code = TREE_CODE (t);
15027 switch (TREE_CODE_CLASS (code))
15028 {
15029 case tcc_binary:
15030 case tcc_comparison:
15031 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15032 TREE_TYPE (t),
15033 TREE_OPERAND (t, 0),
15034 TREE_OPERAND (t, 1),
15035 strict_overflow_p);
15036
15037 case tcc_unary:
15038 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15039 TREE_TYPE (t),
15040 TREE_OPERAND (t, 0),
15041 strict_overflow_p);
15042
15043 case tcc_constant:
15044 case tcc_declaration:
15045 case tcc_reference:
15046 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15047
15048 default:
15049 break;
15050 }
15051
15052 switch (code)
15053 {
15054 case TRUTH_AND_EXPR:
15055 case TRUTH_OR_EXPR:
15056 case TRUTH_XOR_EXPR:
15057 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15058 TREE_TYPE (t),
15059 TREE_OPERAND (t, 0),
15060 TREE_OPERAND (t, 1),
15061 strict_overflow_p);
15062 case TRUTH_NOT_EXPR:
15063 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15064 TREE_TYPE (t),
15065 TREE_OPERAND (t, 0),
15066 strict_overflow_p);
15067
15068 case COND_EXPR:
15069 case CONSTRUCTOR:
15070 case OBJ_TYPE_REF:
15071 case ASSERT_EXPR:
15072 case ADDR_EXPR:
15073 case WITH_SIZE_EXPR:
15074 case SSA_NAME:
15075 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15076
15077 default:
15078 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15079 }
15080 }
15081
15082 /* Return true if `t' is known to be non-negative. Handle warnings
15083 about undefined signed overflow. */
15084
15085 bool
15086 tree_expr_nonnegative_p (tree t)
15087 {
15088 bool ret, strict_overflow_p;
15089
15090 strict_overflow_p = false;
15091 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15092 if (strict_overflow_p)
15093 fold_overflow_warning (("assuming signed overflow does not occur when "
15094 "determining that expression is always "
15095 "non-negative"),
15096 WARN_STRICT_OVERFLOW_MISC);
15097 return ret;
15098 }
15099
15100
15101 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15102 For floating point we further ensure that T is not denormal.
15103 Similar logic is present in nonzero_address in rtlanal.h.
15104
15105 If the return value is based on the assumption that signed overflow
15106 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15107 change *STRICT_OVERFLOW_P. */
15108
15109 bool
15110 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15111 bool *strict_overflow_p)
15112 {
15113 switch (code)
15114 {
15115 case ABS_EXPR:
15116 return tree_expr_nonzero_warnv_p (op0,
15117 strict_overflow_p);
15118
15119 case NOP_EXPR:
15120 {
15121 tree inner_type = TREE_TYPE (op0);
15122 tree outer_type = type;
15123
15124 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15125 && tree_expr_nonzero_warnv_p (op0,
15126 strict_overflow_p));
15127 }
15128 break;
15129
15130 case NON_LVALUE_EXPR:
15131 return tree_expr_nonzero_warnv_p (op0,
15132 strict_overflow_p);
15133
15134 default:
15135 break;
15136 }
15137
15138 return false;
15139 }
15140
15141 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15142 For floating point we further ensure that T is not denormal.
15143 Similar logic is present in nonzero_address in rtlanal.h.
15144
15145 If the return value is based on the assumption that signed overflow
15146 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15147 change *STRICT_OVERFLOW_P. */
15148
15149 bool
15150 tree_binary_nonzero_warnv_p (enum tree_code code,
15151 tree type,
15152 tree op0,
15153 tree op1, bool *strict_overflow_p)
15154 {
15155 bool sub_strict_overflow_p;
15156 switch (code)
15157 {
15158 case POINTER_PLUS_EXPR:
15159 case PLUS_EXPR:
15160 if (TYPE_OVERFLOW_UNDEFINED (type))
15161 {
15162 /* With the presence of negative values it is hard
15163 to say something. */
15164 sub_strict_overflow_p = false;
15165 if (!tree_expr_nonnegative_warnv_p (op0,
15166 &sub_strict_overflow_p)
15167 || !tree_expr_nonnegative_warnv_p (op1,
15168 &sub_strict_overflow_p))
15169 return false;
15170 /* One of operands must be positive and the other non-negative. */
15171 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15172 overflows, on a twos-complement machine the sum of two
15173 nonnegative numbers can never be zero. */
15174 return (tree_expr_nonzero_warnv_p (op0,
15175 strict_overflow_p)
15176 || tree_expr_nonzero_warnv_p (op1,
15177 strict_overflow_p));
15178 }
15179 break;
15180
15181 case MULT_EXPR:
15182 if (TYPE_OVERFLOW_UNDEFINED (type))
15183 {
15184 if (tree_expr_nonzero_warnv_p (op0,
15185 strict_overflow_p)
15186 && tree_expr_nonzero_warnv_p (op1,
15187 strict_overflow_p))
15188 {
15189 *strict_overflow_p = true;
15190 return true;
15191 }
15192 }
15193 break;
15194
15195 case MIN_EXPR:
15196 sub_strict_overflow_p = false;
15197 if (tree_expr_nonzero_warnv_p (op0,
15198 &sub_strict_overflow_p)
15199 && tree_expr_nonzero_warnv_p (op1,
15200 &sub_strict_overflow_p))
15201 {
15202 if (sub_strict_overflow_p)
15203 *strict_overflow_p = true;
15204 }
15205 break;
15206
15207 case MAX_EXPR:
15208 sub_strict_overflow_p = false;
15209 if (tree_expr_nonzero_warnv_p (op0,
15210 &sub_strict_overflow_p))
15211 {
15212 if (sub_strict_overflow_p)
15213 *strict_overflow_p = true;
15214
15215 /* When both operands are nonzero, then MAX must be too. */
15216 if (tree_expr_nonzero_warnv_p (op1,
15217 strict_overflow_p))
15218 return true;
15219
15220 /* MAX where operand 0 is positive is positive. */
15221 return tree_expr_nonnegative_warnv_p (op0,
15222 strict_overflow_p);
15223 }
15224 /* MAX where operand 1 is positive is positive. */
15225 else if (tree_expr_nonzero_warnv_p (op1,
15226 &sub_strict_overflow_p)
15227 && tree_expr_nonnegative_warnv_p (op1,
15228 &sub_strict_overflow_p))
15229 {
15230 if (sub_strict_overflow_p)
15231 *strict_overflow_p = true;
15232 return true;
15233 }
15234 break;
15235
15236 case BIT_IOR_EXPR:
15237 return (tree_expr_nonzero_warnv_p (op1,
15238 strict_overflow_p)
15239 || tree_expr_nonzero_warnv_p (op0,
15240 strict_overflow_p));
15241
15242 default:
15243 break;
15244 }
15245
15246 return false;
15247 }
15248
15249 /* Return true when T is an address and is known to be nonzero.
15250 For floating point we further ensure that T is not denormal.
15251 Similar logic is present in nonzero_address in rtlanal.h.
15252
15253 If the return value is based on the assumption that signed overflow
15254 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15255 change *STRICT_OVERFLOW_P. */
15256
15257 bool
15258 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15259 {
15260 bool sub_strict_overflow_p;
15261 switch (TREE_CODE (t))
15262 {
15263 case INTEGER_CST:
15264 return !integer_zerop (t);
15265
15266 case ADDR_EXPR:
15267 {
15268 tree base = TREE_OPERAND (t, 0);
15269
15270 if (!DECL_P (base))
15271 base = get_base_address (base);
15272
15273 if (!base)
15274 return false;
15275
15276 /* For objects in symbol table check if we know they are non-zero.
15277 Don't do anything for variables and functions before symtab is built;
15278 it is quite possible that they will be declared weak later. */
15279 if (DECL_P (base) && decl_in_symtab_p (base))
15280 {
15281 struct symtab_node *symbol;
15282
15283 symbol = symtab_node::get_create (base);
15284 if (symbol)
15285 return symbol->nonzero_address ();
15286 else
15287 return false;
15288 }
15289
15290 /* Function local objects are never NULL. */
15291 if (DECL_P (base)
15292 && (DECL_CONTEXT (base)
15293 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15294 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15295 return true;
15296
15297 /* Constants are never weak. */
15298 if (CONSTANT_CLASS_P (base))
15299 return true;
15300
15301 return false;
15302 }
15303
15304 case COND_EXPR:
15305 sub_strict_overflow_p = false;
15306 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15307 &sub_strict_overflow_p)
15308 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15309 &sub_strict_overflow_p))
15310 {
15311 if (sub_strict_overflow_p)
15312 *strict_overflow_p = true;
15313 return true;
15314 }
15315 break;
15316
15317 default:
15318 break;
15319 }
15320 return false;
15321 }
15322
15323 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15324 attempt to fold the expression to a constant without modifying TYPE,
15325 OP0 or OP1.
15326
15327 If the expression could be simplified to a constant, then return
15328 the constant. If the expression would not be simplified to a
15329 constant, then return NULL_TREE. */
15330
15331 tree
15332 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15333 {
15334 tree tem = fold_binary (code, type, op0, op1);
15335 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15336 }
15337
15338 /* Given the components of a unary expression CODE, TYPE and OP0,
15339 attempt to fold the expression to a constant without modifying
15340 TYPE or OP0.
15341
15342 If the expression could be simplified to a constant, then return
15343 the constant. If the expression would not be simplified to a
15344 constant, then return NULL_TREE. */
15345
15346 tree
15347 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15348 {
15349 tree tem = fold_unary (code, type, op0);
15350 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15351 }
15352
15353 /* If EXP represents referencing an element in a constant string
15354 (either via pointer arithmetic or array indexing), return the
15355 tree representing the value accessed, otherwise return NULL. */
15356
15357 tree
15358 fold_read_from_constant_string (tree exp)
15359 {
15360 if ((TREE_CODE (exp) == INDIRECT_REF
15361 || TREE_CODE (exp) == ARRAY_REF)
15362 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15363 {
15364 tree exp1 = TREE_OPERAND (exp, 0);
15365 tree index;
15366 tree string;
15367 location_t loc = EXPR_LOCATION (exp);
15368
15369 if (TREE_CODE (exp) == INDIRECT_REF)
15370 string = string_constant (exp1, &index);
15371 else
15372 {
15373 tree low_bound = array_ref_low_bound (exp);
15374 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15375
15376 /* Optimize the special-case of a zero lower bound.
15377
15378 We convert the low_bound to sizetype to avoid some problems
15379 with constant folding. (E.g. suppose the lower bound is 1,
15380 and its mode is QI. Without the conversion,l (ARRAY
15381 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15382 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15383 if (! integer_zerop (low_bound))
15384 index = size_diffop_loc (loc, index,
15385 fold_convert_loc (loc, sizetype, low_bound));
15386
15387 string = exp1;
15388 }
15389
15390 if (string
15391 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15392 && TREE_CODE (string) == STRING_CST
15393 && TREE_CODE (index) == INTEGER_CST
15394 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15395 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15396 == MODE_INT)
15397 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15398 return build_int_cst_type (TREE_TYPE (exp),
15399 (TREE_STRING_POINTER (string)
15400 [TREE_INT_CST_LOW (index)]));
15401 }
15402 return NULL;
15403 }
15404
15405 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15406 an integer constant, real, or fixed-point constant.
15407
15408 TYPE is the type of the result. */
15409
15410 static tree
15411 fold_negate_const (tree arg0, tree type)
15412 {
15413 tree t = NULL_TREE;
15414
15415 switch (TREE_CODE (arg0))
15416 {
15417 case INTEGER_CST:
15418 {
15419 bool overflow;
15420 wide_int val = wi::neg (arg0, &overflow);
15421 t = force_fit_type (type, val, 1,
15422 (overflow | TREE_OVERFLOW (arg0))
15423 && !TYPE_UNSIGNED (type));
15424 break;
15425 }
15426
15427 case REAL_CST:
15428 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15429 break;
15430
15431 case FIXED_CST:
15432 {
15433 FIXED_VALUE_TYPE f;
15434 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15435 &(TREE_FIXED_CST (arg0)), NULL,
15436 TYPE_SATURATING (type));
15437 t = build_fixed (type, f);
15438 /* Propagate overflow flags. */
15439 if (overflow_p | TREE_OVERFLOW (arg0))
15440 TREE_OVERFLOW (t) = 1;
15441 break;
15442 }
15443
15444 default:
15445 gcc_unreachable ();
15446 }
15447
15448 return t;
15449 }
15450
15451 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15452 an integer constant or real constant.
15453
15454 TYPE is the type of the result. */
15455
15456 tree
15457 fold_abs_const (tree arg0, tree type)
15458 {
15459 tree t = NULL_TREE;
15460
15461 switch (TREE_CODE (arg0))
15462 {
15463 case INTEGER_CST:
15464 {
15465 /* If the value is unsigned or non-negative, then the absolute value
15466 is the same as the ordinary value. */
15467 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15468 t = arg0;
15469
15470 /* If the value is negative, then the absolute value is
15471 its negation. */
15472 else
15473 {
15474 bool overflow;
15475 wide_int val = wi::neg (arg0, &overflow);
15476 t = force_fit_type (type, val, -1,
15477 overflow | TREE_OVERFLOW (arg0));
15478 }
15479 }
15480 break;
15481
15482 case REAL_CST:
15483 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15484 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15485 else
15486 t = arg0;
15487 break;
15488
15489 default:
15490 gcc_unreachable ();
15491 }
15492
15493 return t;
15494 }
15495
15496 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15497 constant. TYPE is the type of the result. */
15498
15499 static tree
15500 fold_not_const (const_tree arg0, tree type)
15501 {
15502 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15503
15504 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15505 }
15506
15507 /* Given CODE, a relational operator, the target type, TYPE and two
15508 constant operands OP0 and OP1, return the result of the
15509 relational operation. If the result is not a compile time
15510 constant, then return NULL_TREE. */
15511
15512 static tree
15513 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15514 {
15515 int result, invert;
15516
15517 /* From here on, the only cases we handle are when the result is
15518 known to be a constant. */
15519
15520 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15521 {
15522 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15523 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15524
15525 /* Handle the cases where either operand is a NaN. */
15526 if (real_isnan (c0) || real_isnan (c1))
15527 {
15528 switch (code)
15529 {
15530 case EQ_EXPR:
15531 case ORDERED_EXPR:
15532 result = 0;
15533 break;
15534
15535 case NE_EXPR:
15536 case UNORDERED_EXPR:
15537 case UNLT_EXPR:
15538 case UNLE_EXPR:
15539 case UNGT_EXPR:
15540 case UNGE_EXPR:
15541 case UNEQ_EXPR:
15542 result = 1;
15543 break;
15544
15545 case LT_EXPR:
15546 case LE_EXPR:
15547 case GT_EXPR:
15548 case GE_EXPR:
15549 case LTGT_EXPR:
15550 if (flag_trapping_math)
15551 return NULL_TREE;
15552 result = 0;
15553 break;
15554
15555 default:
15556 gcc_unreachable ();
15557 }
15558
15559 return constant_boolean_node (result, type);
15560 }
15561
15562 return constant_boolean_node (real_compare (code, c0, c1), type);
15563 }
15564
15565 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15566 {
15567 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15568 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15569 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15570 }
15571
15572 /* Handle equality/inequality of complex constants. */
15573 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15574 {
15575 tree rcond = fold_relational_const (code, type,
15576 TREE_REALPART (op0),
15577 TREE_REALPART (op1));
15578 tree icond = fold_relational_const (code, type,
15579 TREE_IMAGPART (op0),
15580 TREE_IMAGPART (op1));
15581 if (code == EQ_EXPR)
15582 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15583 else if (code == NE_EXPR)
15584 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15585 else
15586 return NULL_TREE;
15587 }
15588
15589 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15590 {
15591 unsigned count = VECTOR_CST_NELTS (op0);
15592 tree *elts = XALLOCAVEC (tree, count);
15593 gcc_assert (VECTOR_CST_NELTS (op1) == count
15594 && TYPE_VECTOR_SUBPARTS (type) == count);
15595
15596 for (unsigned i = 0; i < count; i++)
15597 {
15598 tree elem_type = TREE_TYPE (type);
15599 tree elem0 = VECTOR_CST_ELT (op0, i);
15600 tree elem1 = VECTOR_CST_ELT (op1, i);
15601
15602 tree tem = fold_relational_const (code, elem_type,
15603 elem0, elem1);
15604
15605 if (tem == NULL_TREE)
15606 return NULL_TREE;
15607
15608 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15609 }
15610
15611 return build_vector (type, elts);
15612 }
15613
15614 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15615
15616 To compute GT, swap the arguments and do LT.
15617 To compute GE, do LT and invert the result.
15618 To compute LE, swap the arguments, do LT and invert the result.
15619 To compute NE, do EQ and invert the result.
15620
15621 Therefore, the code below must handle only EQ and LT. */
15622
15623 if (code == LE_EXPR || code == GT_EXPR)
15624 {
15625 tree tem = op0;
15626 op0 = op1;
15627 op1 = tem;
15628 code = swap_tree_comparison (code);
15629 }
15630
15631 /* Note that it is safe to invert for real values here because we
15632 have already handled the one case that it matters. */
15633
15634 invert = 0;
15635 if (code == NE_EXPR || code == GE_EXPR)
15636 {
15637 invert = 1;
15638 code = invert_tree_comparison (code, false);
15639 }
15640
15641 /* Compute a result for LT or EQ if args permit;
15642 Otherwise return T. */
15643 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15644 {
15645 if (code == EQ_EXPR)
15646 result = tree_int_cst_equal (op0, op1);
15647 else
15648 result = tree_int_cst_lt (op0, op1);
15649 }
15650 else
15651 return NULL_TREE;
15652
15653 if (invert)
15654 result ^= 1;
15655 return constant_boolean_node (result, type);
15656 }
15657
15658 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15659 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15660 itself. */
15661
15662 tree
15663 fold_build_cleanup_point_expr (tree type, tree expr)
15664 {
15665 /* If the expression does not have side effects then we don't have to wrap
15666 it with a cleanup point expression. */
15667 if (!TREE_SIDE_EFFECTS (expr))
15668 return expr;
15669
15670 /* If the expression is a return, check to see if the expression inside the
15671 return has no side effects or the right hand side of the modify expression
15672 inside the return. If either don't have side effects set we don't need to
15673 wrap the expression in a cleanup point expression. Note we don't check the
15674 left hand side of the modify because it should always be a return decl. */
15675 if (TREE_CODE (expr) == RETURN_EXPR)
15676 {
15677 tree op = TREE_OPERAND (expr, 0);
15678 if (!op || !TREE_SIDE_EFFECTS (op))
15679 return expr;
15680 op = TREE_OPERAND (op, 1);
15681 if (!TREE_SIDE_EFFECTS (op))
15682 return expr;
15683 }
15684
15685 return build1 (CLEANUP_POINT_EXPR, type, expr);
15686 }
15687
15688 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15689 of an indirection through OP0, or NULL_TREE if no simplification is
15690 possible. */
15691
15692 tree
15693 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15694 {
15695 tree sub = op0;
15696 tree subtype;
15697
15698 STRIP_NOPS (sub);
15699 subtype = TREE_TYPE (sub);
15700 if (!POINTER_TYPE_P (subtype))
15701 return NULL_TREE;
15702
15703 if (TREE_CODE (sub) == ADDR_EXPR)
15704 {
15705 tree op = TREE_OPERAND (sub, 0);
15706 tree optype = TREE_TYPE (op);
15707 /* *&CONST_DECL -> to the value of the const decl. */
15708 if (TREE_CODE (op) == CONST_DECL)
15709 return DECL_INITIAL (op);
15710 /* *&p => p; make sure to handle *&"str"[cst] here. */
15711 if (type == optype)
15712 {
15713 tree fop = fold_read_from_constant_string (op);
15714 if (fop)
15715 return fop;
15716 else
15717 return op;
15718 }
15719 /* *(foo *)&fooarray => fooarray[0] */
15720 else if (TREE_CODE (optype) == ARRAY_TYPE
15721 && type == TREE_TYPE (optype)
15722 && (!in_gimple_form
15723 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15724 {
15725 tree type_domain = TYPE_DOMAIN (optype);
15726 tree min_val = size_zero_node;
15727 if (type_domain && TYPE_MIN_VALUE (type_domain))
15728 min_val = TYPE_MIN_VALUE (type_domain);
15729 if (in_gimple_form
15730 && TREE_CODE (min_val) != INTEGER_CST)
15731 return NULL_TREE;
15732 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15733 NULL_TREE, NULL_TREE);
15734 }
15735 /* *(foo *)&complexfoo => __real__ complexfoo */
15736 else if (TREE_CODE (optype) == COMPLEX_TYPE
15737 && type == TREE_TYPE (optype))
15738 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15739 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15740 else if (TREE_CODE (optype) == VECTOR_TYPE
15741 && type == TREE_TYPE (optype))
15742 {
15743 tree part_width = TYPE_SIZE (type);
15744 tree index = bitsize_int (0);
15745 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15746 }
15747 }
15748
15749 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15750 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15751 {
15752 tree op00 = TREE_OPERAND (sub, 0);
15753 tree op01 = TREE_OPERAND (sub, 1);
15754
15755 STRIP_NOPS (op00);
15756 if (TREE_CODE (op00) == ADDR_EXPR)
15757 {
15758 tree op00type;
15759 op00 = TREE_OPERAND (op00, 0);
15760 op00type = TREE_TYPE (op00);
15761
15762 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15763 if (TREE_CODE (op00type) == VECTOR_TYPE
15764 && type == TREE_TYPE (op00type))
15765 {
15766 HOST_WIDE_INT offset = tree_to_shwi (op01);
15767 tree part_width = TYPE_SIZE (type);
15768 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15769 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15770 tree index = bitsize_int (indexi);
15771
15772 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15773 return fold_build3_loc (loc,
15774 BIT_FIELD_REF, type, op00,
15775 part_width, index);
15776
15777 }
15778 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15779 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15780 && type == TREE_TYPE (op00type))
15781 {
15782 tree size = TYPE_SIZE_UNIT (type);
15783 if (tree_int_cst_equal (size, op01))
15784 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15785 }
15786 /* ((foo *)&fooarray)[1] => fooarray[1] */
15787 else if (TREE_CODE (op00type) == ARRAY_TYPE
15788 && type == TREE_TYPE (op00type))
15789 {
15790 tree type_domain = TYPE_DOMAIN (op00type);
15791 tree min_val = size_zero_node;
15792 if (type_domain && TYPE_MIN_VALUE (type_domain))
15793 min_val = TYPE_MIN_VALUE (type_domain);
15794 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15795 TYPE_SIZE_UNIT (type));
15796 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15797 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15798 NULL_TREE, NULL_TREE);
15799 }
15800 }
15801 }
15802
15803 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15804 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15805 && type == TREE_TYPE (TREE_TYPE (subtype))
15806 && (!in_gimple_form
15807 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15808 {
15809 tree type_domain;
15810 tree min_val = size_zero_node;
15811 sub = build_fold_indirect_ref_loc (loc, sub);
15812 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15813 if (type_domain && TYPE_MIN_VALUE (type_domain))
15814 min_val = TYPE_MIN_VALUE (type_domain);
15815 if (in_gimple_form
15816 && TREE_CODE (min_val) != INTEGER_CST)
15817 return NULL_TREE;
15818 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15819 NULL_TREE);
15820 }
15821
15822 return NULL_TREE;
15823 }
15824
15825 /* Builds an expression for an indirection through T, simplifying some
15826 cases. */
15827
15828 tree
15829 build_fold_indirect_ref_loc (location_t loc, tree t)
15830 {
15831 tree type = TREE_TYPE (TREE_TYPE (t));
15832 tree sub = fold_indirect_ref_1 (loc, type, t);
15833
15834 if (sub)
15835 return sub;
15836
15837 return build1_loc (loc, INDIRECT_REF, type, t);
15838 }
15839
15840 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15841
15842 tree
15843 fold_indirect_ref_loc (location_t loc, tree t)
15844 {
15845 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15846
15847 if (sub)
15848 return sub;
15849 else
15850 return t;
15851 }
15852
15853 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15854 whose result is ignored. The type of the returned tree need not be
15855 the same as the original expression. */
15856
15857 tree
15858 fold_ignored_result (tree t)
15859 {
15860 if (!TREE_SIDE_EFFECTS (t))
15861 return integer_zero_node;
15862
15863 for (;;)
15864 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15865 {
15866 case tcc_unary:
15867 t = TREE_OPERAND (t, 0);
15868 break;
15869
15870 case tcc_binary:
15871 case tcc_comparison:
15872 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15873 t = TREE_OPERAND (t, 0);
15874 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15875 t = TREE_OPERAND (t, 1);
15876 else
15877 return t;
15878 break;
15879
15880 case tcc_expression:
15881 switch (TREE_CODE (t))
15882 {
15883 case COMPOUND_EXPR:
15884 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15885 return t;
15886 t = TREE_OPERAND (t, 0);
15887 break;
15888
15889 case COND_EXPR:
15890 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15891 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15892 return t;
15893 t = TREE_OPERAND (t, 0);
15894 break;
15895
15896 default:
15897 return t;
15898 }
15899 break;
15900
15901 default:
15902 return t;
15903 }
15904 }
15905
15906 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15907
15908 tree
15909 round_up_loc (location_t loc, tree value, unsigned int divisor)
15910 {
15911 tree div = NULL_TREE;
15912
15913 if (divisor == 1)
15914 return value;
15915
15916 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15917 have to do anything. Only do this when we are not given a const,
15918 because in that case, this check is more expensive than just
15919 doing it. */
15920 if (TREE_CODE (value) != INTEGER_CST)
15921 {
15922 div = build_int_cst (TREE_TYPE (value), divisor);
15923
15924 if (multiple_of_p (TREE_TYPE (value), value, div))
15925 return value;
15926 }
15927
15928 /* If divisor is a power of two, simplify this to bit manipulation. */
15929 if (divisor == (divisor & -divisor))
15930 {
15931 if (TREE_CODE (value) == INTEGER_CST)
15932 {
15933 wide_int val = value;
15934 bool overflow_p;
15935
15936 if ((val & (divisor - 1)) == 0)
15937 return value;
15938
15939 overflow_p = TREE_OVERFLOW (value);
15940 val &= ~(divisor - 1);
15941 val += divisor;
15942 if (val == 0)
15943 overflow_p = true;
15944
15945 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15946 }
15947 else
15948 {
15949 tree t;
15950
15951 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15952 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15953 t = build_int_cst (TREE_TYPE (value), -divisor);
15954 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15955 }
15956 }
15957 else
15958 {
15959 if (!div)
15960 div = build_int_cst (TREE_TYPE (value), divisor);
15961 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15962 value = size_binop_loc (loc, MULT_EXPR, value, div);
15963 }
15964
15965 return value;
15966 }
15967
15968 /* Likewise, but round down. */
15969
15970 tree
15971 round_down_loc (location_t loc, tree value, int divisor)
15972 {
15973 tree div = NULL_TREE;
15974
15975 gcc_assert (divisor > 0);
15976 if (divisor == 1)
15977 return value;
15978
15979 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15980 have to do anything. Only do this when we are not given a const,
15981 because in that case, this check is more expensive than just
15982 doing it. */
15983 if (TREE_CODE (value) != INTEGER_CST)
15984 {
15985 div = build_int_cst (TREE_TYPE (value), divisor);
15986
15987 if (multiple_of_p (TREE_TYPE (value), value, div))
15988 return value;
15989 }
15990
15991 /* If divisor is a power of two, simplify this to bit manipulation. */
15992 if (divisor == (divisor & -divisor))
15993 {
15994 tree t;
15995
15996 t = build_int_cst (TREE_TYPE (value), -divisor);
15997 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15998 }
15999 else
16000 {
16001 if (!div)
16002 div = build_int_cst (TREE_TYPE (value), divisor);
16003 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16004 value = size_binop_loc (loc, MULT_EXPR, value, div);
16005 }
16006
16007 return value;
16008 }
16009
16010 /* Returns the pointer to the base of the object addressed by EXP and
16011 extracts the information about the offset of the access, storing it
16012 to PBITPOS and POFFSET. */
16013
16014 static tree
16015 split_address_to_core_and_offset (tree exp,
16016 HOST_WIDE_INT *pbitpos, tree *poffset)
16017 {
16018 tree core;
16019 machine_mode mode;
16020 int unsignedp, volatilep;
16021 HOST_WIDE_INT bitsize;
16022 location_t loc = EXPR_LOCATION (exp);
16023
16024 if (TREE_CODE (exp) == ADDR_EXPR)
16025 {
16026 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16027 poffset, &mode, &unsignedp, &volatilep,
16028 false);
16029 core = build_fold_addr_expr_loc (loc, core);
16030 }
16031 else
16032 {
16033 core = exp;
16034 *pbitpos = 0;
16035 *poffset = NULL_TREE;
16036 }
16037
16038 return core;
16039 }
16040
16041 /* Returns true if addresses of E1 and E2 differ by a constant, false
16042 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16043
16044 bool
16045 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16046 {
16047 tree core1, core2;
16048 HOST_WIDE_INT bitpos1, bitpos2;
16049 tree toffset1, toffset2, tdiff, type;
16050
16051 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16052 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16053
16054 if (bitpos1 % BITS_PER_UNIT != 0
16055 || bitpos2 % BITS_PER_UNIT != 0
16056 || !operand_equal_p (core1, core2, 0))
16057 return false;
16058
16059 if (toffset1 && toffset2)
16060 {
16061 type = TREE_TYPE (toffset1);
16062 if (type != TREE_TYPE (toffset2))
16063 toffset2 = fold_convert (type, toffset2);
16064
16065 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16066 if (!cst_and_fits_in_hwi (tdiff))
16067 return false;
16068
16069 *diff = int_cst_value (tdiff);
16070 }
16071 else if (toffset1 || toffset2)
16072 {
16073 /* If only one of the offsets is non-constant, the difference cannot
16074 be a constant. */
16075 return false;
16076 }
16077 else
16078 *diff = 0;
16079
16080 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16081 return true;
16082 }
16083
16084 /* Simplify the floating point expression EXP when the sign of the
16085 result is not significant. Return NULL_TREE if no simplification
16086 is possible. */
16087
16088 tree
16089 fold_strip_sign_ops (tree exp)
16090 {
16091 tree arg0, arg1;
16092 location_t loc = EXPR_LOCATION (exp);
16093
16094 switch (TREE_CODE (exp))
16095 {
16096 case ABS_EXPR:
16097 case NEGATE_EXPR:
16098 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16099 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16100
16101 case MULT_EXPR:
16102 case RDIV_EXPR:
16103 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16104 return NULL_TREE;
16105 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16106 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16107 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16108 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16109 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16110 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16111 break;
16112
16113 case COMPOUND_EXPR:
16114 arg0 = TREE_OPERAND (exp, 0);
16115 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16116 if (arg1)
16117 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16118 break;
16119
16120 case COND_EXPR:
16121 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16122 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16123 if (arg0 || arg1)
16124 return fold_build3_loc (loc,
16125 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16126 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16127 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16128 break;
16129
16130 case CALL_EXPR:
16131 {
16132 const enum built_in_function fcode = builtin_mathfn_code (exp);
16133 switch (fcode)
16134 {
16135 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16136 /* Strip copysign function call, return the 1st argument. */
16137 arg0 = CALL_EXPR_ARG (exp, 0);
16138 arg1 = CALL_EXPR_ARG (exp, 1);
16139 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16140
16141 default:
16142 /* Strip sign ops from the argument of "odd" math functions. */
16143 if (negate_mathfn_p (fcode))
16144 {
16145 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16146 if (arg0)
16147 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16148 }
16149 break;
16150 }
16151 }
16152 break;
16153
16154 default:
16155 break;
16156 }
16157 return NULL_TREE;
16158 }