genattrtab.c (write_header): Include hash-set.h...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "machmode.h"
50 #include "vec.h"
51 #include "double-int.h"
52 #include "input.h"
53 #include "alias.h"
54 #include "symtab.h"
55 #include "wide-int.h"
56 #include "inchash.h"
57 #include "tree.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
60 #include "calls.h"
61 #include "tree-iterator.h"
62 #include "realmpfr.h"
63 #include "rtl.h"
64 #include "expr.h"
65 #include "tm_p.h"
66 #include "target.h"
67 #include "diagnostic-core.h"
68 #include "intl.h"
69 #include "langhooks.h"
70 #include "md5.h"
71 #include "predict.h"
72 #include "vec.h"
73 #include "hashtab.h"
74 #include "hard-reg-set.h"
75 #include "input.h"
76 #include "function.h"
77 #include "basic-block.h"
78 #include "tree-ssa-alias.h"
79 #include "internal-fn.h"
80 #include "tree-eh.h"
81 #include "gimple-expr.h"
82 #include "is-a.h"
83 #include "gimple.h"
84 #include "gimplify.h"
85 #include "tree-dfa.h"
86 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
87 #include "builtins.h"
88 #include "hash-map.h"
89 #include "plugin-api.h"
90 #include "ipa-ref.h"
91 #include "cgraph.h"
92 #include "generic-match.h"
93 #include "optabs.h"
94
95 /* Nonzero if we are folding constants inside an initializer; zero
96 otherwise. */
97 int folding_initializer = 0;
98
99 /* The following constants represent a bit based encoding of GCC's
100 comparison operators. This encoding simplifies transformations
101 on relational comparison operators, such as AND and OR. */
102 enum comparison_code {
103 COMPCODE_FALSE = 0,
104 COMPCODE_LT = 1,
105 COMPCODE_EQ = 2,
106 COMPCODE_LE = 3,
107 COMPCODE_GT = 4,
108 COMPCODE_LTGT = 5,
109 COMPCODE_GE = 6,
110 COMPCODE_ORD = 7,
111 COMPCODE_UNORD = 8,
112 COMPCODE_UNLT = 9,
113 COMPCODE_UNEQ = 10,
114 COMPCODE_UNLE = 11,
115 COMPCODE_UNGT = 12,
116 COMPCODE_NE = 13,
117 COMPCODE_UNGE = 14,
118 COMPCODE_TRUE = 15
119 };
120
121 static bool negate_mathfn_p (enum built_in_function);
122 static bool negate_expr_p (tree);
123 static tree negate_expr (tree);
124 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static int operand_equal_for_comparison_p (tree, tree, tree);
129 static int twoval_comparison_p (tree, tree *, tree *, int *);
130 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
132 static tree make_bit_field_ref (location_t, tree, tree,
133 HOST_WIDE_INT, HOST_WIDE_INT, int);
134 static tree optimize_bit_field_compare (location_t, enum tree_code,
135 tree, tree, tree);
136 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
137 HOST_WIDE_INT *,
138 machine_mode *, int *, int *,
139 tree *, tree *);
140 static int simple_operand_p (const_tree);
141 static bool simple_operand_p_2 (tree);
142 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
143 static tree range_predecessor (tree);
144 static tree range_successor (tree);
145 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
146 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
147 static tree unextend (tree, int, int, tree);
148 static tree optimize_minmax_comparison (location_t, enum tree_code,
149 tree, tree, tree);
150 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
151 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
152 static tree fold_binary_op_with_conditional_arg (location_t,
153 enum tree_code, tree,
154 tree, tree,
155 tree, tree, int);
156 static tree fold_mathfn_compare (location_t,
157 enum built_in_function, enum tree_code,
158 tree, tree, tree);
159 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
160 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
161 static bool reorder_operands_p (const_tree, const_tree);
162 static tree fold_negate_const (tree, tree);
163 static tree fold_not_const (const_tree, tree);
164 static tree fold_relational_const (enum tree_code, tree, tree, tree);
165 static tree fold_convert_const (enum tree_code, tree, tree);
166 static tree fold_view_convert_expr (tree, tree);
167 static bool vec_cst_ctor_to_array (tree, tree *);
168
169
170 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
171 Otherwise, return LOC. */
172
173 static location_t
174 expr_location_or (tree t, location_t loc)
175 {
176 location_t tloc = EXPR_LOCATION (t);
177 return tloc == UNKNOWN_LOCATION ? loc : tloc;
178 }
179
180 /* Similar to protected_set_expr_location, but never modify x in place,
181 if location can and needs to be set, unshare it. */
182
183 static inline tree
184 protected_set_expr_location_unshare (tree x, location_t loc)
185 {
186 if (CAN_HAVE_LOCATION_P (x)
187 && EXPR_LOCATION (x) != loc
188 && !(TREE_CODE (x) == SAVE_EXPR
189 || TREE_CODE (x) == TARGET_EXPR
190 || TREE_CODE (x) == BIND_EXPR))
191 {
192 x = copy_node (x);
193 SET_EXPR_LOCATION (x, loc);
194 }
195 return x;
196 }
197 \f
198 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
199 division and returns the quotient. Otherwise returns
200 NULL_TREE. */
201
202 tree
203 div_if_zero_remainder (const_tree arg1, const_tree arg2)
204 {
205 widest_int quo;
206
207 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
208 SIGNED, &quo))
209 return wide_int_to_tree (TREE_TYPE (arg1), quo);
210
211 return NULL_TREE;
212 }
213 \f
214 /* This is nonzero if we should defer warnings about undefined
215 overflow. This facility exists because these warnings are a
216 special case. The code to estimate loop iterations does not want
217 to issue any warnings, since it works with expressions which do not
218 occur in user code. Various bits of cleanup code call fold(), but
219 only use the result if it has certain characteristics (e.g., is a
220 constant); that code only wants to issue a warning if the result is
221 used. */
222
223 static int fold_deferring_overflow_warnings;
224
225 /* If a warning about undefined overflow is deferred, this is the
226 warning. Note that this may cause us to turn two warnings into
227 one, but that is fine since it is sufficient to only give one
228 warning per expression. */
229
230 static const char* fold_deferred_overflow_warning;
231
232 /* If a warning about undefined overflow is deferred, this is the
233 level at which the warning should be emitted. */
234
235 static enum warn_strict_overflow_code fold_deferred_overflow_code;
236
237 /* Start deferring overflow warnings. We could use a stack here to
238 permit nested calls, but at present it is not necessary. */
239
240 void
241 fold_defer_overflow_warnings (void)
242 {
243 ++fold_deferring_overflow_warnings;
244 }
245
246 /* Stop deferring overflow warnings. If there is a pending warning,
247 and ISSUE is true, then issue the warning if appropriate. STMT is
248 the statement with which the warning should be associated (used for
249 location information); STMT may be NULL. CODE is the level of the
250 warning--a warn_strict_overflow_code value. This function will use
251 the smaller of CODE and the deferred code when deciding whether to
252 issue the warning. CODE may be zero to mean to always use the
253 deferred code. */
254
255 void
256 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
257 {
258 const char *warnmsg;
259 location_t locus;
260
261 gcc_assert (fold_deferring_overflow_warnings > 0);
262 --fold_deferring_overflow_warnings;
263 if (fold_deferring_overflow_warnings > 0)
264 {
265 if (fold_deferred_overflow_warning != NULL
266 && code != 0
267 && code < (int) fold_deferred_overflow_code)
268 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
269 return;
270 }
271
272 warnmsg = fold_deferred_overflow_warning;
273 fold_deferred_overflow_warning = NULL;
274
275 if (!issue || warnmsg == NULL)
276 return;
277
278 if (gimple_no_warning_p (stmt))
279 return;
280
281 /* Use the smallest code level when deciding to issue the
282 warning. */
283 if (code == 0 || code > (int) fold_deferred_overflow_code)
284 code = fold_deferred_overflow_code;
285
286 if (!issue_strict_overflow_warning (code))
287 return;
288
289 if (stmt == NULL)
290 locus = input_location;
291 else
292 locus = gimple_location (stmt);
293 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
294 }
295
296 /* Stop deferring overflow warnings, ignoring any deferred
297 warnings. */
298
299 void
300 fold_undefer_and_ignore_overflow_warnings (void)
301 {
302 fold_undefer_overflow_warnings (false, NULL, 0);
303 }
304
305 /* Whether we are deferring overflow warnings. */
306
307 bool
308 fold_deferring_overflow_warnings_p (void)
309 {
310 return fold_deferring_overflow_warnings > 0;
311 }
312
313 /* This is called when we fold something based on the fact that signed
314 overflow is undefined. */
315
316 static void
317 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
318 {
319 if (fold_deferring_overflow_warnings > 0)
320 {
321 if (fold_deferred_overflow_warning == NULL
322 || wc < fold_deferred_overflow_code)
323 {
324 fold_deferred_overflow_warning = gmsgid;
325 fold_deferred_overflow_code = wc;
326 }
327 }
328 else if (issue_strict_overflow_warning (wc))
329 warning (OPT_Wstrict_overflow, gmsgid);
330 }
331 \f
332 /* Return true if the built-in mathematical function specified by CODE
333 is odd, i.e. -f(x) == f(-x). */
334
335 static bool
336 negate_mathfn_p (enum built_in_function code)
337 {
338 switch (code)
339 {
340 CASE_FLT_FN (BUILT_IN_ASIN):
341 CASE_FLT_FN (BUILT_IN_ASINH):
342 CASE_FLT_FN (BUILT_IN_ATAN):
343 CASE_FLT_FN (BUILT_IN_ATANH):
344 CASE_FLT_FN (BUILT_IN_CASIN):
345 CASE_FLT_FN (BUILT_IN_CASINH):
346 CASE_FLT_FN (BUILT_IN_CATAN):
347 CASE_FLT_FN (BUILT_IN_CATANH):
348 CASE_FLT_FN (BUILT_IN_CBRT):
349 CASE_FLT_FN (BUILT_IN_CPROJ):
350 CASE_FLT_FN (BUILT_IN_CSIN):
351 CASE_FLT_FN (BUILT_IN_CSINH):
352 CASE_FLT_FN (BUILT_IN_CTAN):
353 CASE_FLT_FN (BUILT_IN_CTANH):
354 CASE_FLT_FN (BUILT_IN_ERF):
355 CASE_FLT_FN (BUILT_IN_LLROUND):
356 CASE_FLT_FN (BUILT_IN_LROUND):
357 CASE_FLT_FN (BUILT_IN_ROUND):
358 CASE_FLT_FN (BUILT_IN_SIN):
359 CASE_FLT_FN (BUILT_IN_SINH):
360 CASE_FLT_FN (BUILT_IN_TAN):
361 CASE_FLT_FN (BUILT_IN_TANH):
362 CASE_FLT_FN (BUILT_IN_TRUNC):
363 return true;
364
365 CASE_FLT_FN (BUILT_IN_LLRINT):
366 CASE_FLT_FN (BUILT_IN_LRINT):
367 CASE_FLT_FN (BUILT_IN_NEARBYINT):
368 CASE_FLT_FN (BUILT_IN_RINT):
369 return !flag_rounding_math;
370
371 default:
372 break;
373 }
374 return false;
375 }
376
377 /* Check whether we may negate an integer constant T without causing
378 overflow. */
379
380 bool
381 may_negate_without_overflow_p (const_tree t)
382 {
383 tree type;
384
385 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386
387 type = TREE_TYPE (t);
388 if (TYPE_UNSIGNED (type))
389 return false;
390
391 return !wi::only_sign_bit_p (t);
392 }
393
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
396
397 static bool
398 negate_expr_p (tree t)
399 {
400 tree type;
401
402 if (t == 0)
403 return false;
404
405 type = TREE_TYPE (t);
406
407 STRIP_SIGN_NOPS (t);
408 switch (TREE_CODE (t))
409 {
410 case INTEGER_CST:
411 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
412 return true;
413
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t);
416 case BIT_NOT_EXPR:
417 return (INTEGRAL_TYPE_P (type)
418 && TYPE_OVERFLOW_WRAPS (type));
419
420 case FIXED_CST:
421 return true;
422
423 case NEGATE_EXPR:
424 return !TYPE_OVERFLOW_SANITIZED (type);
425
426 case REAL_CST:
427 /* We want to canonicalize to positive real constants. Pretend
428 that only negative ones can be easily negated. */
429 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
430
431 case COMPLEX_CST:
432 return negate_expr_p (TREE_REALPART (t))
433 && negate_expr_p (TREE_IMAGPART (t));
434
435 case VECTOR_CST:
436 {
437 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
438 return true;
439
440 int count = TYPE_VECTOR_SUBPARTS (type), i;
441
442 for (i = 0; i < count; i++)
443 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
444 return false;
445
446 return true;
447 }
448
449 case COMPLEX_EXPR:
450 return negate_expr_p (TREE_OPERAND (t, 0))
451 && negate_expr_p (TREE_OPERAND (t, 1));
452
453 case CONJ_EXPR:
454 return negate_expr_p (TREE_OPERAND (t, 0));
455
456 case PLUS_EXPR:
457 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
458 || HONOR_SIGNED_ZEROS (element_mode (type)))
459 return false;
460 /* -(A + B) -> (-B) - A. */
461 if (negate_expr_p (TREE_OPERAND (t, 1))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1)))
464 return true;
465 /* -(A + B) -> (-A) - B. */
466 return negate_expr_p (TREE_OPERAND (t, 0));
467
468 case MINUS_EXPR:
469 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
470 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
471 && !HONOR_SIGNED_ZEROS (element_mode (type))
472 && reorder_operands_p (TREE_OPERAND (t, 0),
473 TREE_OPERAND (t, 1));
474
475 case MULT_EXPR:
476 if (TYPE_UNSIGNED (TREE_TYPE (t)))
477 break;
478
479 /* Fall through. */
480
481 case RDIV_EXPR:
482 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
483 return negate_expr_p (TREE_OPERAND (t, 1))
484 || negate_expr_p (TREE_OPERAND (t, 0));
485 break;
486
487 case TRUNC_DIV_EXPR:
488 case ROUND_DIV_EXPR:
489 case EXACT_DIV_EXPR:
490 /* In general we can't negate A / B, because if A is INT_MIN and
491 B is 1, we may turn this into INT_MIN / -1 which is undefined
492 and actually traps on some architectures. But if overflow is
493 undefined, we can negate, because - (INT_MIN / 1) is an
494 overflow. */
495 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
496 {
497 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
498 break;
499 /* If overflow is undefined then we have to be careful because
500 we ask whether it's ok to associate the negate with the
501 division which is not ok for example for
502 -((a - b) / c) where (-(a - b)) / c may invoke undefined
503 overflow because of negating INT_MIN. So do not use
504 negate_expr_p here but open-code the two important cases. */
505 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
506 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
507 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
508 return true;
509 }
510 else if (negate_expr_p (TREE_OPERAND (t, 0)))
511 return true;
512 return negate_expr_p (TREE_OPERAND (t, 1));
513
514 case NOP_EXPR:
515 /* Negate -((double)float) as (double)(-float). */
516 if (TREE_CODE (type) == REAL_TYPE)
517 {
518 tree tem = strip_float_extensions (t);
519 if (tem != t)
520 return negate_expr_p (tem);
521 }
522 break;
523
524 case CALL_EXPR:
525 /* Negate -f(x) as f(-x). */
526 if (negate_mathfn_p (builtin_mathfn_code (t)))
527 return negate_expr_p (CALL_EXPR_ARG (t, 0));
528 break;
529
530 case RSHIFT_EXPR:
531 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
532 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
533 {
534 tree op1 = TREE_OPERAND (t, 1);
535 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
536 return true;
537 }
538 break;
539
540 default:
541 break;
542 }
543 return false;
544 }
545
546 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
547 simplification is possible.
548 If negate_expr_p would return true for T, NULL_TREE will never be
549 returned. */
550
551 static tree
552 fold_negate_expr (location_t loc, tree t)
553 {
554 tree type = TREE_TYPE (t);
555 tree tem;
556
557 switch (TREE_CODE (t))
558 {
559 /* Convert - (~A) to A + 1. */
560 case BIT_NOT_EXPR:
561 if (INTEGRAL_TYPE_P (type))
562 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
563 build_one_cst (type));
564 break;
565
566 case INTEGER_CST:
567 tem = fold_negate_const (t, type);
568 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
569 || (ANY_INTEGRAL_TYPE_P (type)
570 && !TYPE_OVERFLOW_TRAPS (type)
571 && TYPE_OVERFLOW_WRAPS (type))
572 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
573 return tem;
574 break;
575
576 case REAL_CST:
577 tem = fold_negate_const (t, type);
578 return tem;
579
580 case FIXED_CST:
581 tem = fold_negate_const (t, type);
582 return tem;
583
584 case COMPLEX_CST:
585 {
586 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
587 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
588 if (rpart && ipart)
589 return build_complex (type, rpart, ipart);
590 }
591 break;
592
593 case VECTOR_CST:
594 {
595 int count = TYPE_VECTOR_SUBPARTS (type), i;
596 tree *elts = XALLOCAVEC (tree, count);
597
598 for (i = 0; i < count; i++)
599 {
600 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
601 if (elts[i] == NULL_TREE)
602 return NULL_TREE;
603 }
604
605 return build_vector (type, elts);
606 }
607
608 case COMPLEX_EXPR:
609 if (negate_expr_p (t))
610 return fold_build2_loc (loc, COMPLEX_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
612 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
613 break;
614
615 case CONJ_EXPR:
616 if (negate_expr_p (t))
617 return fold_build1_loc (loc, CONJ_EXPR, type,
618 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
619 break;
620
621 case NEGATE_EXPR:
622 if (!TYPE_OVERFLOW_SANITIZED (type))
623 return TREE_OPERAND (t, 0);
624 break;
625
626 case PLUS_EXPR:
627 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 {
630 /* -(A + B) -> (-B) - A. */
631 if (negate_expr_p (TREE_OPERAND (t, 1))
632 && reorder_operands_p (TREE_OPERAND (t, 0),
633 TREE_OPERAND (t, 1)))
634 {
635 tem = negate_expr (TREE_OPERAND (t, 1));
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 tem, TREE_OPERAND (t, 0));
638 }
639
640 /* -(A + B) -> (-A) - B. */
641 if (negate_expr_p (TREE_OPERAND (t, 0)))
642 {
643 tem = negate_expr (TREE_OPERAND (t, 0));
644 return fold_build2_loc (loc, MINUS_EXPR, type,
645 tem, TREE_OPERAND (t, 1));
646 }
647 }
648 break;
649
650 case MINUS_EXPR:
651 /* - (A - B) -> B - A */
652 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
653 && !HONOR_SIGNED_ZEROS (element_mode (type))
654 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
655 return fold_build2_loc (loc, MINUS_EXPR, type,
656 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
657 break;
658
659 case MULT_EXPR:
660 if (TYPE_UNSIGNED (type))
661 break;
662
663 /* Fall through. */
664
665 case RDIV_EXPR:
666 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
667 {
668 tem = TREE_OPERAND (t, 1);
669 if (negate_expr_p (tem))
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 TREE_OPERAND (t, 0), negate_expr (tem));
672 tem = TREE_OPERAND (t, 0);
673 if (negate_expr_p (tem))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 negate_expr (tem), TREE_OPERAND (t, 1));
676 }
677 break;
678
679 case TRUNC_DIV_EXPR:
680 case ROUND_DIV_EXPR:
681 case EXACT_DIV_EXPR:
682 /* In general we can't negate A / B, because if A is INT_MIN and
683 B is 1, we may turn this into INT_MIN / -1 which is undefined
684 and actually traps on some architectures. But if overflow is
685 undefined, we can negate, because - (INT_MIN / 1) is an
686 overflow. */
687 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
688 {
689 const char * const warnmsg = G_("assuming signed overflow does not "
690 "occur when negating a division");
691 tem = TREE_OPERAND (t, 1);
692 if (negate_expr_p (tem))
693 {
694 if (INTEGRAL_TYPE_P (type)
695 && (TREE_CODE (tem) != INTEGER_CST
696 || integer_onep (tem)))
697 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
698 return fold_build2_loc (loc, TREE_CODE (t), type,
699 TREE_OPERAND (t, 0), negate_expr (tem));
700 }
701 /* If overflow is undefined then we have to be careful because
702 we ask whether it's ok to associate the negate with the
703 division which is not ok for example for
704 -((a - b) / c) where (-(a - b)) / c may invoke undefined
705 overflow because of negating INT_MIN. So do not use
706 negate_expr_p here but open-code the two important cases. */
707 tem = TREE_OPERAND (t, 0);
708 if ((INTEGRAL_TYPE_P (type)
709 && (TREE_CODE (tem) == NEGATE_EXPR
710 || (TREE_CODE (tem) == INTEGER_CST
711 && may_negate_without_overflow_p (tem))))
712 || !INTEGRAL_TYPE_P (type))
713 return fold_build2_loc (loc, TREE_CODE (t), type,
714 negate_expr (tem), TREE_OPERAND (t, 1));
715 }
716 break;
717
718 case NOP_EXPR:
719 /* Convert -((double)float) into (double)(-float). */
720 if (TREE_CODE (type) == REAL_TYPE)
721 {
722 tem = strip_float_extensions (t);
723 if (tem != t && negate_expr_p (tem))
724 return fold_convert_loc (loc, type, negate_expr (tem));
725 }
726 break;
727
728 case CALL_EXPR:
729 /* Negate -f(x) as f(-x). */
730 if (negate_mathfn_p (builtin_mathfn_code (t))
731 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
732 {
733 tree fndecl, arg;
734
735 fndecl = get_callee_fndecl (t);
736 arg = negate_expr (CALL_EXPR_ARG (t, 0));
737 return build_call_expr_loc (loc, fndecl, 1, arg);
738 }
739 break;
740
741 case RSHIFT_EXPR:
742 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
743 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
744 {
745 tree op1 = TREE_OPERAND (t, 1);
746 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
747 {
748 tree ntype = TYPE_UNSIGNED (type)
749 ? signed_type_for (type)
750 : unsigned_type_for (type);
751 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
752 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
753 return fold_convert_loc (loc, type, temp);
754 }
755 }
756 break;
757
758 default:
759 break;
760 }
761
762 return NULL_TREE;
763 }
764
765 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
766 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
767 return NULL_TREE. */
768
769 static tree
770 negate_expr (tree t)
771 {
772 tree type, tem;
773 location_t loc;
774
775 if (t == NULL_TREE)
776 return NULL_TREE;
777
778 loc = EXPR_LOCATION (t);
779 type = TREE_TYPE (t);
780 STRIP_SIGN_NOPS (t);
781
782 tem = fold_negate_expr (loc, t);
783 if (!tem)
784 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
785 return fold_convert_loc (loc, type, tem);
786 }
787 \f
788 /* Split a tree IN into a constant, literal and variable parts that could be
789 combined with CODE to make IN. "constant" means an expression with
790 TREE_CONSTANT but that isn't an actual constant. CODE must be a
791 commutative arithmetic operation. Store the constant part into *CONP,
792 the literal in *LITP and return the variable part. If a part isn't
793 present, set it to null. If the tree does not decompose in this way,
794 return the entire tree as the variable part and the other parts as null.
795
796 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
797 case, we negate an operand that was subtracted. Except if it is a
798 literal for which we use *MINUS_LITP instead.
799
800 If NEGATE_P is true, we are negating all of IN, again except a literal
801 for which we use *MINUS_LITP instead.
802
803 If IN is itself a literal or constant, return it as appropriate.
804
805 Note that we do not guarantee that any of the three values will be the
806 same type as IN, but they will have the same signedness and mode. */
807
808 static tree
809 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
810 tree *minus_litp, int negate_p)
811 {
812 tree var = 0;
813
814 *conp = 0;
815 *litp = 0;
816 *minus_litp = 0;
817
818 /* Strip any conversions that don't change the machine mode or signedness. */
819 STRIP_SIGN_NOPS (in);
820
821 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
822 || TREE_CODE (in) == FIXED_CST)
823 *litp = in;
824 else if (TREE_CODE (in) == code
825 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
826 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
827 /* We can associate addition and subtraction together (even
828 though the C standard doesn't say so) for integers because
829 the value is not affected. For reals, the value might be
830 affected, so we can't. */
831 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
832 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
833 {
834 tree op0 = TREE_OPERAND (in, 0);
835 tree op1 = TREE_OPERAND (in, 1);
836 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
837 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
838
839 /* First see if either of the operands is a literal, then a constant. */
840 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
841 || TREE_CODE (op0) == FIXED_CST)
842 *litp = op0, op0 = 0;
843 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
844 || TREE_CODE (op1) == FIXED_CST)
845 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
846
847 if (op0 != 0 && TREE_CONSTANT (op0))
848 *conp = op0, op0 = 0;
849 else if (op1 != 0 && TREE_CONSTANT (op1))
850 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
851
852 /* If we haven't dealt with either operand, this is not a case we can
853 decompose. Otherwise, VAR is either of the ones remaining, if any. */
854 if (op0 != 0 && op1 != 0)
855 var = in;
856 else if (op0 != 0)
857 var = op0;
858 else
859 var = op1, neg_var_p = neg1_p;
860
861 /* Now do any needed negations. */
862 if (neg_litp_p)
863 *minus_litp = *litp, *litp = 0;
864 if (neg_conp_p)
865 *conp = negate_expr (*conp);
866 if (neg_var_p)
867 var = negate_expr (var);
868 }
869 else if (TREE_CODE (in) == BIT_NOT_EXPR
870 && code == PLUS_EXPR)
871 {
872 /* -X - 1 is folded to ~X, undo that here. */
873 *minus_litp = build_one_cst (TREE_TYPE (in));
874 var = negate_expr (TREE_OPERAND (in, 0));
875 }
876 else if (TREE_CONSTANT (in))
877 *conp = in;
878 else
879 var = in;
880
881 if (negate_p)
882 {
883 if (*litp)
884 *minus_litp = *litp, *litp = 0;
885 else if (*minus_litp)
886 *litp = *minus_litp, *minus_litp = 0;
887 *conp = negate_expr (*conp);
888 var = negate_expr (var);
889 }
890
891 return var;
892 }
893
894 /* Re-associate trees split by the above function. T1 and T2 are
895 either expressions to associate or null. Return the new
896 expression, if any. LOC is the location of the new expression. If
897 we build an operation, do it in TYPE and with CODE. */
898
899 static tree
900 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 {
902 if (t1 == 0)
903 return t2;
904 else if (t2 == 0)
905 return t1;
906
907 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
908 try to fold this since we will have infinite recursion. But do
909 deal with any NEGATE_EXPRs. */
910 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 {
913 if (code == PLUS_EXPR)
914 {
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
927 }
928 else if (code == MINUS_EXPR)
929 {
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
932 }
933
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
940 }
941 \f
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
944
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
952
953 switch (code)
954 {
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
960
961 default:
962 break;
963 }
964
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969
970
971 /* Combine two integer constants ARG1 and ARG2 under operation CODE
972 to produce a new constant. Return NULL_TREE if we don't know how
973 to evaluate CODE at compile-time. */
974
975 static tree
976 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
977 int overflowable)
978 {
979 wide_int res;
980 tree t;
981 tree type = TREE_TYPE (arg1);
982 signop sign = TYPE_SIGN (type);
983 bool overflow = false;
984
985 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
986 TYPE_SIGN (TREE_TYPE (parg2)));
987
988 switch (code)
989 {
990 case BIT_IOR_EXPR:
991 res = wi::bit_or (arg1, arg2);
992 break;
993
994 case BIT_XOR_EXPR:
995 res = wi::bit_xor (arg1, arg2);
996 break;
997
998 case BIT_AND_EXPR:
999 res = wi::bit_and (arg1, arg2);
1000 break;
1001
1002 case RSHIFT_EXPR:
1003 case LSHIFT_EXPR:
1004 if (wi::neg_p (arg2))
1005 {
1006 arg2 = -arg2;
1007 if (code == RSHIFT_EXPR)
1008 code = LSHIFT_EXPR;
1009 else
1010 code = RSHIFT_EXPR;
1011 }
1012
1013 if (code == RSHIFT_EXPR)
1014 /* It's unclear from the C standard whether shifts can overflow.
1015 The following code ignores overflow; perhaps a C standard
1016 interpretation ruling is needed. */
1017 res = wi::rshift (arg1, arg2, sign);
1018 else
1019 res = wi::lshift (arg1, arg2);
1020 break;
1021
1022 case RROTATE_EXPR:
1023 case LROTATE_EXPR:
1024 if (wi::neg_p (arg2))
1025 {
1026 arg2 = -arg2;
1027 if (code == RROTATE_EXPR)
1028 code = LROTATE_EXPR;
1029 else
1030 code = RROTATE_EXPR;
1031 }
1032
1033 if (code == RROTATE_EXPR)
1034 res = wi::rrotate (arg1, arg2);
1035 else
1036 res = wi::lrotate (arg1, arg2);
1037 break;
1038
1039 case PLUS_EXPR:
1040 res = wi::add (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case MINUS_EXPR:
1044 res = wi::sub (arg1, arg2, sign, &overflow);
1045 break;
1046
1047 case MULT_EXPR:
1048 res = wi::mul (arg1, arg2, sign, &overflow);
1049 break;
1050
1051 case MULT_HIGHPART_EXPR:
1052 res = wi::mul_high (arg1, arg2, sign);
1053 break;
1054
1055 case TRUNC_DIV_EXPR:
1056 case EXACT_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1060 break;
1061
1062 case FLOOR_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_floor (arg1, arg2, sign, &overflow);
1066 break;
1067
1068 case CEIL_DIV_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1072 break;
1073
1074 case ROUND_DIV_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::div_round (arg1, arg2, sign, &overflow);
1078 break;
1079
1080 case TRUNC_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1084 break;
1085
1086 case FLOOR_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1090 break;
1091
1092 case CEIL_MOD_EXPR:
1093 if (arg2 == 0)
1094 return NULL_TREE;
1095 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1096 break;
1097
1098 case ROUND_MOD_EXPR:
1099 if (arg2 == 0)
1100 return NULL_TREE;
1101 res = wi::mod_round (arg1, arg2, sign, &overflow);
1102 break;
1103
1104 case MIN_EXPR:
1105 res = wi::min (arg1, arg2, sign);
1106 break;
1107
1108 case MAX_EXPR:
1109 res = wi::max (arg1, arg2, sign);
1110 break;
1111
1112 default:
1113 return NULL_TREE;
1114 }
1115
1116 t = force_fit_type (type, res, overflowable,
1117 (((sign == SIGNED || overflowable == -1)
1118 && overflow)
1119 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1120
1121 return t;
1122 }
1123
1124 tree
1125 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1126 {
1127 return int_const_binop_1 (code, arg1, arg2, 1);
1128 }
1129
1130 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1131 constant. We assume ARG1 and ARG2 have the same data type, or at least
1132 are the same kind of constant and the same machine mode. Return zero if
1133 combining the constants is not allowed in the current operating mode. */
1134
1135 static tree
1136 const_binop (enum tree_code code, tree arg1, tree arg2)
1137 {
1138 /* Sanity check for the recursive cases. */
1139 if (!arg1 || !arg2)
1140 return NULL_TREE;
1141
1142 STRIP_NOPS (arg1);
1143 STRIP_NOPS (arg2);
1144
1145 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1146 {
1147 if (code == POINTER_PLUS_EXPR)
1148 return int_const_binop (PLUS_EXPR,
1149 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1150
1151 return int_const_binop (code, arg1, arg2);
1152 }
1153
1154 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1155 {
1156 machine_mode mode;
1157 REAL_VALUE_TYPE d1;
1158 REAL_VALUE_TYPE d2;
1159 REAL_VALUE_TYPE value;
1160 REAL_VALUE_TYPE result;
1161 bool inexact;
1162 tree t, type;
1163
1164 /* The following codes are handled by real_arithmetic. */
1165 switch (code)
1166 {
1167 case PLUS_EXPR:
1168 case MINUS_EXPR:
1169 case MULT_EXPR:
1170 case RDIV_EXPR:
1171 case MIN_EXPR:
1172 case MAX_EXPR:
1173 break;
1174
1175 default:
1176 return NULL_TREE;
1177 }
1178
1179 d1 = TREE_REAL_CST (arg1);
1180 d2 = TREE_REAL_CST (arg2);
1181
1182 type = TREE_TYPE (arg1);
1183 mode = TYPE_MODE (type);
1184
1185 /* Don't perform operation if we honor signaling NaNs and
1186 either operand is a NaN. */
1187 if (HONOR_SNANS (mode)
1188 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1189 return NULL_TREE;
1190
1191 /* Don't perform operation if it would raise a division
1192 by zero exception. */
1193 if (code == RDIV_EXPR
1194 && REAL_VALUES_EQUAL (d2, dconst0)
1195 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1196 return NULL_TREE;
1197
1198 /* If either operand is a NaN, just return it. Otherwise, set up
1199 for floating-point trap; we return an overflow. */
1200 if (REAL_VALUE_ISNAN (d1))
1201 return arg1;
1202 else if (REAL_VALUE_ISNAN (d2))
1203 return arg2;
1204
1205 inexact = real_arithmetic (&value, code, &d1, &d2);
1206 real_convert (&result, mode, &value);
1207
1208 /* Don't constant fold this floating point operation if
1209 the result has overflowed and flag_trapping_math. */
1210 if (flag_trapping_math
1211 && MODE_HAS_INFINITIES (mode)
1212 && REAL_VALUE_ISINF (result)
1213 && !REAL_VALUE_ISINF (d1)
1214 && !REAL_VALUE_ISINF (d2))
1215 return NULL_TREE;
1216
1217 /* Don't constant fold this floating point operation if the
1218 result may dependent upon the run-time rounding mode and
1219 flag_rounding_math is set, or if GCC's software emulation
1220 is unable to accurately represent the result. */
1221 if ((flag_rounding_math
1222 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1223 && (inexact || !real_identical (&result, &value)))
1224 return NULL_TREE;
1225
1226 t = build_real (type, result);
1227
1228 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1229 return t;
1230 }
1231
1232 if (TREE_CODE (arg1) == FIXED_CST)
1233 {
1234 FIXED_VALUE_TYPE f1;
1235 FIXED_VALUE_TYPE f2;
1236 FIXED_VALUE_TYPE result;
1237 tree t, type;
1238 int sat_p;
1239 bool overflow_p;
1240
1241 /* The following codes are handled by fixed_arithmetic. */
1242 switch (code)
1243 {
1244 case PLUS_EXPR:
1245 case MINUS_EXPR:
1246 case MULT_EXPR:
1247 case TRUNC_DIV_EXPR:
1248 if (TREE_CODE (arg2) != FIXED_CST)
1249 return NULL_TREE;
1250 f2 = TREE_FIXED_CST (arg2);
1251 break;
1252
1253 case LSHIFT_EXPR:
1254 case RSHIFT_EXPR:
1255 {
1256 if (TREE_CODE (arg2) != INTEGER_CST)
1257 return NULL_TREE;
1258 wide_int w2 = arg2;
1259 f2.data.high = w2.elt (1);
1260 f2.data.low = w2.elt (0);
1261 f2.mode = SImode;
1262 }
1263 break;
1264
1265 default:
1266 return NULL_TREE;
1267 }
1268
1269 f1 = TREE_FIXED_CST (arg1);
1270 type = TREE_TYPE (arg1);
1271 sat_p = TYPE_SATURATING (type);
1272 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1273 t = build_fixed (type, result);
1274 /* Propagate overflow flags. */
1275 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1276 TREE_OVERFLOW (t) = 1;
1277 return t;
1278 }
1279
1280 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1281 {
1282 tree type = TREE_TYPE (arg1);
1283 tree r1 = TREE_REALPART (arg1);
1284 tree i1 = TREE_IMAGPART (arg1);
1285 tree r2 = TREE_REALPART (arg2);
1286 tree i2 = TREE_IMAGPART (arg2);
1287 tree real, imag;
1288
1289 switch (code)
1290 {
1291 case PLUS_EXPR:
1292 case MINUS_EXPR:
1293 real = const_binop (code, r1, r2);
1294 imag = const_binop (code, i1, i2);
1295 break;
1296
1297 case MULT_EXPR:
1298 if (COMPLEX_FLOAT_TYPE_P (type))
1299 return do_mpc_arg2 (arg1, arg2, type,
1300 /* do_nonfinite= */ folding_initializer,
1301 mpc_mul);
1302
1303 real = const_binop (MINUS_EXPR,
1304 const_binop (MULT_EXPR, r1, r2),
1305 const_binop (MULT_EXPR, i1, i2));
1306 imag = const_binop (PLUS_EXPR,
1307 const_binop (MULT_EXPR, r1, i2),
1308 const_binop (MULT_EXPR, i1, r2));
1309 break;
1310
1311 case RDIV_EXPR:
1312 if (COMPLEX_FLOAT_TYPE_P (type))
1313 return do_mpc_arg2 (arg1, arg2, type,
1314 /* do_nonfinite= */ folding_initializer,
1315 mpc_div);
1316 /* Fallthru ... */
1317 case TRUNC_DIV_EXPR:
1318 case CEIL_DIV_EXPR:
1319 case FLOOR_DIV_EXPR:
1320 case ROUND_DIV_EXPR:
1321 if (flag_complex_method == 0)
1322 {
1323 /* Keep this algorithm in sync with
1324 tree-complex.c:expand_complex_div_straight().
1325
1326 Expand complex division to scalars, straightforward algorithm.
1327 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1328 t = br*br + bi*bi
1329 */
1330 tree magsquared
1331 = const_binop (PLUS_EXPR,
1332 const_binop (MULT_EXPR, r2, r2),
1333 const_binop (MULT_EXPR, i2, i2));
1334 tree t1
1335 = const_binop (PLUS_EXPR,
1336 const_binop (MULT_EXPR, r1, r2),
1337 const_binop (MULT_EXPR, i1, i2));
1338 tree t2
1339 = const_binop (MINUS_EXPR,
1340 const_binop (MULT_EXPR, i1, r2),
1341 const_binop (MULT_EXPR, r1, i2));
1342
1343 real = const_binop (code, t1, magsquared);
1344 imag = const_binop (code, t2, magsquared);
1345 }
1346 else
1347 {
1348 /* Keep this algorithm in sync with
1349 tree-complex.c:expand_complex_div_wide().
1350
1351 Expand complex division to scalars, modified algorithm to minimize
1352 overflow with wide input ranges. */
1353 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1354 fold_abs_const (r2, TREE_TYPE (type)),
1355 fold_abs_const (i2, TREE_TYPE (type)));
1356
1357 if (integer_nonzerop (compare))
1358 {
1359 /* In the TRUE branch, we compute
1360 ratio = br/bi;
1361 div = (br * ratio) + bi;
1362 tr = (ar * ratio) + ai;
1363 ti = (ai * ratio) - ar;
1364 tr = tr / div;
1365 ti = ti / div; */
1366 tree ratio = const_binop (code, r2, i2);
1367 tree div = const_binop (PLUS_EXPR, i2,
1368 const_binop (MULT_EXPR, r2, ratio));
1369 real = const_binop (MULT_EXPR, r1, ratio);
1370 real = const_binop (PLUS_EXPR, real, i1);
1371 real = const_binop (code, real, div);
1372
1373 imag = const_binop (MULT_EXPR, i1, ratio);
1374 imag = const_binop (MINUS_EXPR, imag, r1);
1375 imag = const_binop (code, imag, div);
1376 }
1377 else
1378 {
1379 /* In the FALSE branch, we compute
1380 ratio = d/c;
1381 divisor = (d * ratio) + c;
1382 tr = (b * ratio) + a;
1383 ti = b - (a * ratio);
1384 tr = tr / div;
1385 ti = ti / div; */
1386 tree ratio = const_binop (code, i2, r2);
1387 tree div = const_binop (PLUS_EXPR, r2,
1388 const_binop (MULT_EXPR, i2, ratio));
1389
1390 real = const_binop (MULT_EXPR, i1, ratio);
1391 real = const_binop (PLUS_EXPR, real, r1);
1392 real = const_binop (code, real, div);
1393
1394 imag = const_binop (MULT_EXPR, r1, ratio);
1395 imag = const_binop (MINUS_EXPR, i1, imag);
1396 imag = const_binop (code, imag, div);
1397 }
1398 }
1399 break;
1400
1401 default:
1402 return NULL_TREE;
1403 }
1404
1405 if (real && imag)
1406 return build_complex (type, real, imag);
1407 }
1408
1409 if (TREE_CODE (arg1) == VECTOR_CST
1410 && TREE_CODE (arg2) == VECTOR_CST)
1411 {
1412 tree type = TREE_TYPE (arg1);
1413 int count = TYPE_VECTOR_SUBPARTS (type), i;
1414 tree *elts = XALLOCAVEC (tree, count);
1415
1416 for (i = 0; i < count; i++)
1417 {
1418 tree elem1 = VECTOR_CST_ELT (arg1, i);
1419 tree elem2 = VECTOR_CST_ELT (arg2, i);
1420
1421 elts[i] = const_binop (code, elem1, elem2);
1422
1423 /* It is possible that const_binop cannot handle the given
1424 code and return NULL_TREE */
1425 if (elts[i] == NULL_TREE)
1426 return NULL_TREE;
1427 }
1428
1429 return build_vector (type, elts);
1430 }
1431
1432 /* Shifts allow a scalar offset for a vector. */
1433 if (TREE_CODE (arg1) == VECTOR_CST
1434 && TREE_CODE (arg2) == INTEGER_CST)
1435 {
1436 tree type = TREE_TYPE (arg1);
1437 int count = TYPE_VECTOR_SUBPARTS (type), i;
1438 tree *elts = XALLOCAVEC (tree, count);
1439
1440 for (i = 0; i < count; i++)
1441 {
1442 tree elem1 = VECTOR_CST_ELT (arg1, i);
1443
1444 elts[i] = const_binop (code, elem1, arg2);
1445
1446 /* It is possible that const_binop cannot handle the given
1447 code and return NULL_TREE. */
1448 if (elts[i] == NULL_TREE)
1449 return NULL_TREE;
1450 }
1451
1452 return build_vector (type, elts);
1453 }
1454 return NULL_TREE;
1455 }
1456
1457 /* Overload that adds a TYPE parameter to be able to dispatch
1458 to fold_relational_const. */
1459
1460 tree
1461 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1462 {
1463 if (TREE_CODE_CLASS (code) == tcc_comparison)
1464 return fold_relational_const (code, type, arg1, arg2);
1465
1466 /* ??? Until we make the const_binop worker take the type of the
1467 result as argument put those cases that need it here. */
1468 switch (code)
1469 {
1470 case COMPLEX_EXPR:
1471 if ((TREE_CODE (arg1) == REAL_CST
1472 && TREE_CODE (arg2) == REAL_CST)
1473 || (TREE_CODE (arg1) == INTEGER_CST
1474 && TREE_CODE (arg2) == INTEGER_CST))
1475 return build_complex (type, arg1, arg2);
1476 return NULL_TREE;
1477
1478 case VEC_PACK_TRUNC_EXPR:
1479 case VEC_PACK_FIX_TRUNC_EXPR:
1480 {
1481 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1482 tree *elts;
1483
1484 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1485 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1486 if (TREE_CODE (arg1) != VECTOR_CST
1487 || TREE_CODE (arg2) != VECTOR_CST)
1488 return NULL_TREE;
1489
1490 elts = XALLOCAVEC (tree, nelts);
1491 if (!vec_cst_ctor_to_array (arg1, elts)
1492 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1493 return NULL_TREE;
1494
1495 for (i = 0; i < nelts; i++)
1496 {
1497 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1498 ? NOP_EXPR : FIX_TRUNC_EXPR,
1499 TREE_TYPE (type), elts[i]);
1500 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1501 return NULL_TREE;
1502 }
1503
1504 return build_vector (type, elts);
1505 }
1506
1507 case VEC_WIDEN_MULT_LO_EXPR:
1508 case VEC_WIDEN_MULT_HI_EXPR:
1509 case VEC_WIDEN_MULT_EVEN_EXPR:
1510 case VEC_WIDEN_MULT_ODD_EXPR:
1511 {
1512 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1513 unsigned int out, ofs, scale;
1514 tree *elts;
1515
1516 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1517 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1518 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1519 return NULL_TREE;
1520
1521 elts = XALLOCAVEC (tree, nelts * 4);
1522 if (!vec_cst_ctor_to_array (arg1, elts)
1523 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1524 return NULL_TREE;
1525
1526 if (code == VEC_WIDEN_MULT_LO_EXPR)
1527 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1528 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1529 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1530 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1531 scale = 1, ofs = 0;
1532 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1533 scale = 1, ofs = 1;
1534
1535 for (out = 0; out < nelts; out++)
1536 {
1537 unsigned int in1 = (out << scale) + ofs;
1538 unsigned int in2 = in1 + nelts * 2;
1539 tree t1, t2;
1540
1541 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1542 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1543
1544 if (t1 == NULL_TREE || t2 == NULL_TREE)
1545 return NULL_TREE;
1546 elts[out] = const_binop (MULT_EXPR, t1, t2);
1547 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1548 return NULL_TREE;
1549 }
1550
1551 return build_vector (type, elts);
1552 }
1553
1554 default:;
1555 }
1556
1557 /* Make sure type and arg0 have the same saturating flag. */
1558 gcc_checking_assert (TYPE_SATURATING (type)
1559 == TYPE_SATURATING (TREE_TYPE (arg1)));
1560 return const_binop (code, arg1, arg2);
1561 }
1562
1563 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1564 Return zero if computing the constants is not possible. */
1565
1566 tree
1567 const_unop (enum tree_code code, tree type, tree arg0)
1568 {
1569 switch (code)
1570 {
1571 CASE_CONVERT:
1572 case FLOAT_EXPR:
1573 case FIX_TRUNC_EXPR:
1574 case FIXED_CONVERT_EXPR:
1575 return fold_convert_const (code, type, arg0);
1576
1577 case ADDR_SPACE_CONVERT_EXPR:
1578 if (integer_zerop (arg0))
1579 return fold_convert_const (code, type, arg0);
1580 break;
1581
1582 case VIEW_CONVERT_EXPR:
1583 return fold_view_convert_expr (type, arg0);
1584
1585 case NEGATE_EXPR:
1586 {
1587 /* Can't call fold_negate_const directly here as that doesn't
1588 handle all cases and we might not be able to negate some
1589 constants. */
1590 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1591 if (tem && CONSTANT_CLASS_P (tem))
1592 return tem;
1593 break;
1594 }
1595
1596 case ABS_EXPR:
1597 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1598 return fold_abs_const (arg0, type);
1599 break;
1600
1601 case CONJ_EXPR:
1602 if (TREE_CODE (arg0) == COMPLEX_CST)
1603 {
1604 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1605 TREE_TYPE (type));
1606 return build_complex (type, TREE_REALPART (arg0), ipart);
1607 }
1608 break;
1609
1610 case BIT_NOT_EXPR:
1611 if (TREE_CODE (arg0) == INTEGER_CST)
1612 return fold_not_const (arg0, type);
1613 /* Perform BIT_NOT_EXPR on each element individually. */
1614 else if (TREE_CODE (arg0) == VECTOR_CST)
1615 {
1616 tree *elements;
1617 tree elem;
1618 unsigned count = VECTOR_CST_NELTS (arg0), i;
1619
1620 elements = XALLOCAVEC (tree, count);
1621 for (i = 0; i < count; i++)
1622 {
1623 elem = VECTOR_CST_ELT (arg0, i);
1624 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1625 if (elem == NULL_TREE)
1626 break;
1627 elements[i] = elem;
1628 }
1629 if (i == count)
1630 return build_vector (type, elements);
1631 }
1632 break;
1633
1634 case TRUTH_NOT_EXPR:
1635 if (TREE_CODE (arg0) == INTEGER_CST)
1636 return constant_boolean_node (integer_zerop (arg0), type);
1637 break;
1638
1639 case REALPART_EXPR:
1640 if (TREE_CODE (arg0) == COMPLEX_CST)
1641 return fold_convert (type, TREE_REALPART (arg0));
1642 break;
1643
1644 case IMAGPART_EXPR:
1645 if (TREE_CODE (arg0) == COMPLEX_CST)
1646 return fold_convert (type, TREE_IMAGPART (arg0));
1647 break;
1648
1649 case VEC_UNPACK_LO_EXPR:
1650 case VEC_UNPACK_HI_EXPR:
1651 case VEC_UNPACK_FLOAT_LO_EXPR:
1652 case VEC_UNPACK_FLOAT_HI_EXPR:
1653 {
1654 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1655 tree *elts;
1656 enum tree_code subcode;
1657
1658 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1659 if (TREE_CODE (arg0) != VECTOR_CST)
1660 return NULL_TREE;
1661
1662 elts = XALLOCAVEC (tree, nelts * 2);
1663 if (!vec_cst_ctor_to_array (arg0, elts))
1664 return NULL_TREE;
1665
1666 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1667 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1668 elts += nelts;
1669
1670 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1671 subcode = NOP_EXPR;
1672 else
1673 subcode = FLOAT_EXPR;
1674
1675 for (i = 0; i < nelts; i++)
1676 {
1677 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1678 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1679 return NULL_TREE;
1680 }
1681
1682 return build_vector (type, elts);
1683 }
1684
1685 case REDUC_MIN_EXPR:
1686 case REDUC_MAX_EXPR:
1687 case REDUC_PLUS_EXPR:
1688 {
1689 unsigned int nelts, i;
1690 tree *elts;
1691 enum tree_code subcode;
1692
1693 if (TREE_CODE (arg0) != VECTOR_CST)
1694 return NULL_TREE;
1695 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1696
1697 elts = XALLOCAVEC (tree, nelts);
1698 if (!vec_cst_ctor_to_array (arg0, elts))
1699 return NULL_TREE;
1700
1701 switch (code)
1702 {
1703 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1704 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1705 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1706 default: gcc_unreachable ();
1707 }
1708
1709 for (i = 1; i < nelts; i++)
1710 {
1711 elts[0] = const_binop (subcode, elts[0], elts[i]);
1712 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1713 return NULL_TREE;
1714 }
1715
1716 return elts[0];
1717 }
1718
1719 default:
1720 break;
1721 }
1722
1723 return NULL_TREE;
1724 }
1725
1726 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1727 indicates which particular sizetype to create. */
1728
1729 tree
1730 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1731 {
1732 return build_int_cst (sizetype_tab[(int) kind], number);
1733 }
1734 \f
1735 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1736 is a tree code. The type of the result is taken from the operands.
1737 Both must be equivalent integer types, ala int_binop_types_match_p.
1738 If the operands are constant, so is the result. */
1739
1740 tree
1741 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1742 {
1743 tree type = TREE_TYPE (arg0);
1744
1745 if (arg0 == error_mark_node || arg1 == error_mark_node)
1746 return error_mark_node;
1747
1748 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1749 TREE_TYPE (arg1)));
1750
1751 /* Handle the special case of two integer constants faster. */
1752 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1753 {
1754 /* And some specific cases even faster than that. */
1755 if (code == PLUS_EXPR)
1756 {
1757 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1758 return arg1;
1759 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1760 return arg0;
1761 }
1762 else if (code == MINUS_EXPR)
1763 {
1764 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1765 return arg0;
1766 }
1767 else if (code == MULT_EXPR)
1768 {
1769 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1770 return arg1;
1771 }
1772
1773 /* Handle general case of two integer constants. For sizetype
1774 constant calculations we always want to know about overflow,
1775 even in the unsigned case. */
1776 return int_const_binop_1 (code, arg0, arg1, -1);
1777 }
1778
1779 return fold_build2_loc (loc, code, type, arg0, arg1);
1780 }
1781
1782 /* Given two values, either both of sizetype or both of bitsizetype,
1783 compute the difference between the two values. Return the value
1784 in signed type corresponding to the type of the operands. */
1785
1786 tree
1787 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1788 {
1789 tree type = TREE_TYPE (arg0);
1790 tree ctype;
1791
1792 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1793 TREE_TYPE (arg1)));
1794
1795 /* If the type is already signed, just do the simple thing. */
1796 if (!TYPE_UNSIGNED (type))
1797 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1798
1799 if (type == sizetype)
1800 ctype = ssizetype;
1801 else if (type == bitsizetype)
1802 ctype = sbitsizetype;
1803 else
1804 ctype = signed_type_for (type);
1805
1806 /* If either operand is not a constant, do the conversions to the signed
1807 type and subtract. The hardware will do the right thing with any
1808 overflow in the subtraction. */
1809 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1810 return size_binop_loc (loc, MINUS_EXPR,
1811 fold_convert_loc (loc, ctype, arg0),
1812 fold_convert_loc (loc, ctype, arg1));
1813
1814 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1815 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1816 overflow) and negate (which can't either). Special-case a result
1817 of zero while we're here. */
1818 if (tree_int_cst_equal (arg0, arg1))
1819 return build_int_cst (ctype, 0);
1820 else if (tree_int_cst_lt (arg1, arg0))
1821 return fold_convert_loc (loc, ctype,
1822 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1823 else
1824 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1825 fold_convert_loc (loc, ctype,
1826 size_binop_loc (loc,
1827 MINUS_EXPR,
1828 arg1, arg0)));
1829 }
1830 \f
1831 /* A subroutine of fold_convert_const handling conversions of an
1832 INTEGER_CST to another integer type. */
1833
1834 static tree
1835 fold_convert_const_int_from_int (tree type, const_tree arg1)
1836 {
1837 /* Given an integer constant, make new constant with new type,
1838 appropriately sign-extended or truncated. Use widest_int
1839 so that any extension is done according ARG1's type. */
1840 return force_fit_type (type, wi::to_widest (arg1),
1841 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1842 TREE_OVERFLOW (arg1));
1843 }
1844
1845 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1846 to an integer type. */
1847
1848 static tree
1849 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1850 {
1851 bool overflow = false;
1852 tree t;
1853
1854 /* The following code implements the floating point to integer
1855 conversion rules required by the Java Language Specification,
1856 that IEEE NaNs are mapped to zero and values that overflow
1857 the target precision saturate, i.e. values greater than
1858 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1859 are mapped to INT_MIN. These semantics are allowed by the
1860 C and C++ standards that simply state that the behavior of
1861 FP-to-integer conversion is unspecified upon overflow. */
1862
1863 wide_int val;
1864 REAL_VALUE_TYPE r;
1865 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1866
1867 switch (code)
1868 {
1869 case FIX_TRUNC_EXPR:
1870 real_trunc (&r, VOIDmode, &x);
1871 break;
1872
1873 default:
1874 gcc_unreachable ();
1875 }
1876
1877 /* If R is NaN, return zero and show we have an overflow. */
1878 if (REAL_VALUE_ISNAN (r))
1879 {
1880 overflow = true;
1881 val = wi::zero (TYPE_PRECISION (type));
1882 }
1883
1884 /* See if R is less than the lower bound or greater than the
1885 upper bound. */
1886
1887 if (! overflow)
1888 {
1889 tree lt = TYPE_MIN_VALUE (type);
1890 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1891 if (REAL_VALUES_LESS (r, l))
1892 {
1893 overflow = true;
1894 val = lt;
1895 }
1896 }
1897
1898 if (! overflow)
1899 {
1900 tree ut = TYPE_MAX_VALUE (type);
1901 if (ut)
1902 {
1903 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1904 if (REAL_VALUES_LESS (u, r))
1905 {
1906 overflow = true;
1907 val = ut;
1908 }
1909 }
1910 }
1911
1912 if (! overflow)
1913 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1914
1915 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1916 return t;
1917 }
1918
1919 /* A subroutine of fold_convert_const handling conversions of a
1920 FIXED_CST to an integer type. */
1921
1922 static tree
1923 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1924 {
1925 tree t;
1926 double_int temp, temp_trunc;
1927 unsigned int mode;
1928
1929 /* Right shift FIXED_CST to temp by fbit. */
1930 temp = TREE_FIXED_CST (arg1).data;
1931 mode = TREE_FIXED_CST (arg1).mode;
1932 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1933 {
1934 temp = temp.rshift (GET_MODE_FBIT (mode),
1935 HOST_BITS_PER_DOUBLE_INT,
1936 SIGNED_FIXED_POINT_MODE_P (mode));
1937
1938 /* Left shift temp to temp_trunc by fbit. */
1939 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1940 HOST_BITS_PER_DOUBLE_INT,
1941 SIGNED_FIXED_POINT_MODE_P (mode));
1942 }
1943 else
1944 {
1945 temp = double_int_zero;
1946 temp_trunc = double_int_zero;
1947 }
1948
1949 /* If FIXED_CST is negative, we need to round the value toward 0.
1950 By checking if the fractional bits are not zero to add 1 to temp. */
1951 if (SIGNED_FIXED_POINT_MODE_P (mode)
1952 && temp_trunc.is_negative ()
1953 && TREE_FIXED_CST (arg1).data != temp_trunc)
1954 temp += double_int_one;
1955
1956 /* Given a fixed-point constant, make new constant with new type,
1957 appropriately sign-extended or truncated. */
1958 t = force_fit_type (type, temp, -1,
1959 (temp.is_negative ()
1960 && (TYPE_UNSIGNED (type)
1961 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1962 | TREE_OVERFLOW (arg1));
1963
1964 return t;
1965 }
1966
1967 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1968 to another floating point type. */
1969
1970 static tree
1971 fold_convert_const_real_from_real (tree type, const_tree arg1)
1972 {
1973 REAL_VALUE_TYPE value;
1974 tree t;
1975
1976 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1977 t = build_real (type, value);
1978
1979 /* If converting an infinity or NAN to a representation that doesn't
1980 have one, set the overflow bit so that we can produce some kind of
1981 error message at the appropriate point if necessary. It's not the
1982 most user-friendly message, but it's better than nothing. */
1983 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1984 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1985 TREE_OVERFLOW (t) = 1;
1986 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1987 && !MODE_HAS_NANS (TYPE_MODE (type)))
1988 TREE_OVERFLOW (t) = 1;
1989 /* Regular overflow, conversion produced an infinity in a mode that
1990 can't represent them. */
1991 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1992 && REAL_VALUE_ISINF (value)
1993 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1994 TREE_OVERFLOW (t) = 1;
1995 else
1996 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1997 return t;
1998 }
1999
2000 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2001 to a floating point type. */
2002
2003 static tree
2004 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2005 {
2006 REAL_VALUE_TYPE value;
2007 tree t;
2008
2009 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2010 t = build_real (type, value);
2011
2012 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2013 return t;
2014 }
2015
2016 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2017 to another fixed-point type. */
2018
2019 static tree
2020 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2021 {
2022 FIXED_VALUE_TYPE value;
2023 tree t;
2024 bool overflow_p;
2025
2026 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2027 TYPE_SATURATING (type));
2028 t = build_fixed (type, value);
2029
2030 /* Propagate overflow flags. */
2031 if (overflow_p | TREE_OVERFLOW (arg1))
2032 TREE_OVERFLOW (t) = 1;
2033 return t;
2034 }
2035
2036 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2037 to a fixed-point type. */
2038
2039 static tree
2040 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2041 {
2042 FIXED_VALUE_TYPE value;
2043 tree t;
2044 bool overflow_p;
2045 double_int di;
2046
2047 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2048
2049 di.low = TREE_INT_CST_ELT (arg1, 0);
2050 if (TREE_INT_CST_NUNITS (arg1) == 1)
2051 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2052 else
2053 di.high = TREE_INT_CST_ELT (arg1, 1);
2054
2055 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2056 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2057 TYPE_SATURATING (type));
2058 t = build_fixed (type, value);
2059
2060 /* Propagate overflow flags. */
2061 if (overflow_p | TREE_OVERFLOW (arg1))
2062 TREE_OVERFLOW (t) = 1;
2063 return t;
2064 }
2065
2066 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2067 to a fixed-point type. */
2068
2069 static tree
2070 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2071 {
2072 FIXED_VALUE_TYPE value;
2073 tree t;
2074 bool overflow_p;
2075
2076 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2077 &TREE_REAL_CST (arg1),
2078 TYPE_SATURATING (type));
2079 t = build_fixed (type, value);
2080
2081 /* Propagate overflow flags. */
2082 if (overflow_p | TREE_OVERFLOW (arg1))
2083 TREE_OVERFLOW (t) = 1;
2084 return t;
2085 }
2086
2087 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2088 type TYPE. If no simplification can be done return NULL_TREE. */
2089
2090 static tree
2091 fold_convert_const (enum tree_code code, tree type, tree arg1)
2092 {
2093 if (TREE_TYPE (arg1) == type)
2094 return arg1;
2095
2096 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2097 || TREE_CODE (type) == OFFSET_TYPE)
2098 {
2099 if (TREE_CODE (arg1) == INTEGER_CST)
2100 return fold_convert_const_int_from_int (type, arg1);
2101 else if (TREE_CODE (arg1) == REAL_CST)
2102 return fold_convert_const_int_from_real (code, type, arg1);
2103 else if (TREE_CODE (arg1) == FIXED_CST)
2104 return fold_convert_const_int_from_fixed (type, arg1);
2105 }
2106 else if (TREE_CODE (type) == REAL_TYPE)
2107 {
2108 if (TREE_CODE (arg1) == INTEGER_CST)
2109 return build_real_from_int_cst (type, arg1);
2110 else if (TREE_CODE (arg1) == REAL_CST)
2111 return fold_convert_const_real_from_real (type, arg1);
2112 else if (TREE_CODE (arg1) == FIXED_CST)
2113 return fold_convert_const_real_from_fixed (type, arg1);
2114 }
2115 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2116 {
2117 if (TREE_CODE (arg1) == FIXED_CST)
2118 return fold_convert_const_fixed_from_fixed (type, arg1);
2119 else if (TREE_CODE (arg1) == INTEGER_CST)
2120 return fold_convert_const_fixed_from_int (type, arg1);
2121 else if (TREE_CODE (arg1) == REAL_CST)
2122 return fold_convert_const_fixed_from_real (type, arg1);
2123 }
2124 return NULL_TREE;
2125 }
2126
2127 /* Construct a vector of zero elements of vector type TYPE. */
2128
2129 static tree
2130 build_zero_vector (tree type)
2131 {
2132 tree t;
2133
2134 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2135 return build_vector_from_val (type, t);
2136 }
2137
2138 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2139
2140 bool
2141 fold_convertible_p (const_tree type, const_tree arg)
2142 {
2143 tree orig = TREE_TYPE (arg);
2144
2145 if (type == orig)
2146 return true;
2147
2148 if (TREE_CODE (arg) == ERROR_MARK
2149 || TREE_CODE (type) == ERROR_MARK
2150 || TREE_CODE (orig) == ERROR_MARK)
2151 return false;
2152
2153 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2154 return true;
2155
2156 switch (TREE_CODE (type))
2157 {
2158 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2159 case POINTER_TYPE: case REFERENCE_TYPE:
2160 case OFFSET_TYPE:
2161 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2162 || TREE_CODE (orig) == OFFSET_TYPE)
2163 return true;
2164 return (TREE_CODE (orig) == VECTOR_TYPE
2165 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2166
2167 case REAL_TYPE:
2168 case FIXED_POINT_TYPE:
2169 case COMPLEX_TYPE:
2170 case VECTOR_TYPE:
2171 case VOID_TYPE:
2172 return TREE_CODE (type) == TREE_CODE (orig);
2173
2174 default:
2175 return false;
2176 }
2177 }
2178
2179 /* Convert expression ARG to type TYPE. Used by the middle-end for
2180 simple conversions in preference to calling the front-end's convert. */
2181
2182 tree
2183 fold_convert_loc (location_t loc, tree type, tree arg)
2184 {
2185 tree orig = TREE_TYPE (arg);
2186 tree tem;
2187
2188 if (type == orig)
2189 return arg;
2190
2191 if (TREE_CODE (arg) == ERROR_MARK
2192 || TREE_CODE (type) == ERROR_MARK
2193 || TREE_CODE (orig) == ERROR_MARK)
2194 return error_mark_node;
2195
2196 switch (TREE_CODE (type))
2197 {
2198 case POINTER_TYPE:
2199 case REFERENCE_TYPE:
2200 /* Handle conversions between pointers to different address spaces. */
2201 if (POINTER_TYPE_P (orig)
2202 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2203 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2204 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2205 /* fall through */
2206
2207 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2208 case OFFSET_TYPE:
2209 if (TREE_CODE (arg) == INTEGER_CST)
2210 {
2211 tem = fold_convert_const (NOP_EXPR, type, arg);
2212 if (tem != NULL_TREE)
2213 return tem;
2214 }
2215 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2216 || TREE_CODE (orig) == OFFSET_TYPE)
2217 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2218 if (TREE_CODE (orig) == COMPLEX_TYPE)
2219 return fold_convert_loc (loc, type,
2220 fold_build1_loc (loc, REALPART_EXPR,
2221 TREE_TYPE (orig), arg));
2222 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2223 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2224 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2225
2226 case REAL_TYPE:
2227 if (TREE_CODE (arg) == INTEGER_CST)
2228 {
2229 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2230 if (tem != NULL_TREE)
2231 return tem;
2232 }
2233 else if (TREE_CODE (arg) == REAL_CST)
2234 {
2235 tem = fold_convert_const (NOP_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2238 }
2239 else if (TREE_CODE (arg) == FIXED_CST)
2240 {
2241 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2242 if (tem != NULL_TREE)
2243 return tem;
2244 }
2245
2246 switch (TREE_CODE (orig))
2247 {
2248 case INTEGER_TYPE:
2249 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2250 case POINTER_TYPE: case REFERENCE_TYPE:
2251 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2252
2253 case REAL_TYPE:
2254 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2255
2256 case FIXED_POINT_TYPE:
2257 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2258
2259 case COMPLEX_TYPE:
2260 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2261 return fold_convert_loc (loc, type, tem);
2262
2263 default:
2264 gcc_unreachable ();
2265 }
2266
2267 case FIXED_POINT_TYPE:
2268 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2269 || TREE_CODE (arg) == REAL_CST)
2270 {
2271 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2272 if (tem != NULL_TREE)
2273 goto fold_convert_exit;
2274 }
2275
2276 switch (TREE_CODE (orig))
2277 {
2278 case FIXED_POINT_TYPE:
2279 case INTEGER_TYPE:
2280 case ENUMERAL_TYPE:
2281 case BOOLEAN_TYPE:
2282 case REAL_TYPE:
2283 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2284
2285 case COMPLEX_TYPE:
2286 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2287 return fold_convert_loc (loc, type, tem);
2288
2289 default:
2290 gcc_unreachable ();
2291 }
2292
2293 case COMPLEX_TYPE:
2294 switch (TREE_CODE (orig))
2295 {
2296 case INTEGER_TYPE:
2297 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2298 case POINTER_TYPE: case REFERENCE_TYPE:
2299 case REAL_TYPE:
2300 case FIXED_POINT_TYPE:
2301 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2302 fold_convert_loc (loc, TREE_TYPE (type), arg),
2303 fold_convert_loc (loc, TREE_TYPE (type),
2304 integer_zero_node));
2305 case COMPLEX_TYPE:
2306 {
2307 tree rpart, ipart;
2308
2309 if (TREE_CODE (arg) == COMPLEX_EXPR)
2310 {
2311 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2312 TREE_OPERAND (arg, 0));
2313 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2314 TREE_OPERAND (arg, 1));
2315 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2316 }
2317
2318 arg = save_expr (arg);
2319 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2321 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2322 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2323 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2324 }
2325
2326 default:
2327 gcc_unreachable ();
2328 }
2329
2330 case VECTOR_TYPE:
2331 if (integer_zerop (arg))
2332 return build_zero_vector (type);
2333 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2334 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2335 || TREE_CODE (orig) == VECTOR_TYPE);
2336 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2337
2338 case VOID_TYPE:
2339 tem = fold_ignored_result (arg);
2340 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2341
2342 default:
2343 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2344 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2345 gcc_unreachable ();
2346 }
2347 fold_convert_exit:
2348 protected_set_expr_location_unshare (tem, loc);
2349 return tem;
2350 }
2351 \f
2352 /* Return false if expr can be assumed not to be an lvalue, true
2353 otherwise. */
2354
2355 static bool
2356 maybe_lvalue_p (const_tree x)
2357 {
2358 /* We only need to wrap lvalue tree codes. */
2359 switch (TREE_CODE (x))
2360 {
2361 case VAR_DECL:
2362 case PARM_DECL:
2363 case RESULT_DECL:
2364 case LABEL_DECL:
2365 case FUNCTION_DECL:
2366 case SSA_NAME:
2367
2368 case COMPONENT_REF:
2369 case MEM_REF:
2370 case INDIRECT_REF:
2371 case ARRAY_REF:
2372 case ARRAY_RANGE_REF:
2373 case BIT_FIELD_REF:
2374 case OBJ_TYPE_REF:
2375
2376 case REALPART_EXPR:
2377 case IMAGPART_EXPR:
2378 case PREINCREMENT_EXPR:
2379 case PREDECREMENT_EXPR:
2380 case SAVE_EXPR:
2381 case TRY_CATCH_EXPR:
2382 case WITH_CLEANUP_EXPR:
2383 case COMPOUND_EXPR:
2384 case MODIFY_EXPR:
2385 case TARGET_EXPR:
2386 case COND_EXPR:
2387 case BIND_EXPR:
2388 break;
2389
2390 default:
2391 /* Assume the worst for front-end tree codes. */
2392 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2393 break;
2394 return false;
2395 }
2396
2397 return true;
2398 }
2399
2400 /* Return an expr equal to X but certainly not valid as an lvalue. */
2401
2402 tree
2403 non_lvalue_loc (location_t loc, tree x)
2404 {
2405 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2406 us. */
2407 if (in_gimple_form)
2408 return x;
2409
2410 if (! maybe_lvalue_p (x))
2411 return x;
2412 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2413 }
2414
2415 /* When pedantic, return an expr equal to X but certainly not valid as a
2416 pedantic lvalue. Otherwise, return X. */
2417
2418 static tree
2419 pedantic_non_lvalue_loc (location_t loc, tree x)
2420 {
2421 return protected_set_expr_location_unshare (x, loc);
2422 }
2423 \f
2424 /* Given a tree comparison code, return the code that is the logical inverse.
2425 It is generally not safe to do this for floating-point comparisons, except
2426 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2427 ERROR_MARK in this case. */
2428
2429 enum tree_code
2430 invert_tree_comparison (enum tree_code code, bool honor_nans)
2431 {
2432 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2433 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2434 return ERROR_MARK;
2435
2436 switch (code)
2437 {
2438 case EQ_EXPR:
2439 return NE_EXPR;
2440 case NE_EXPR:
2441 return EQ_EXPR;
2442 case GT_EXPR:
2443 return honor_nans ? UNLE_EXPR : LE_EXPR;
2444 case GE_EXPR:
2445 return honor_nans ? UNLT_EXPR : LT_EXPR;
2446 case LT_EXPR:
2447 return honor_nans ? UNGE_EXPR : GE_EXPR;
2448 case LE_EXPR:
2449 return honor_nans ? UNGT_EXPR : GT_EXPR;
2450 case LTGT_EXPR:
2451 return UNEQ_EXPR;
2452 case UNEQ_EXPR:
2453 return LTGT_EXPR;
2454 case UNGT_EXPR:
2455 return LE_EXPR;
2456 case UNGE_EXPR:
2457 return LT_EXPR;
2458 case UNLT_EXPR:
2459 return GE_EXPR;
2460 case UNLE_EXPR:
2461 return GT_EXPR;
2462 case ORDERED_EXPR:
2463 return UNORDERED_EXPR;
2464 case UNORDERED_EXPR:
2465 return ORDERED_EXPR;
2466 default:
2467 gcc_unreachable ();
2468 }
2469 }
2470
2471 /* Similar, but return the comparison that results if the operands are
2472 swapped. This is safe for floating-point. */
2473
2474 enum tree_code
2475 swap_tree_comparison (enum tree_code code)
2476 {
2477 switch (code)
2478 {
2479 case EQ_EXPR:
2480 case NE_EXPR:
2481 case ORDERED_EXPR:
2482 case UNORDERED_EXPR:
2483 case LTGT_EXPR:
2484 case UNEQ_EXPR:
2485 return code;
2486 case GT_EXPR:
2487 return LT_EXPR;
2488 case GE_EXPR:
2489 return LE_EXPR;
2490 case LT_EXPR:
2491 return GT_EXPR;
2492 case LE_EXPR:
2493 return GE_EXPR;
2494 case UNGT_EXPR:
2495 return UNLT_EXPR;
2496 case UNGE_EXPR:
2497 return UNLE_EXPR;
2498 case UNLT_EXPR:
2499 return UNGT_EXPR;
2500 case UNLE_EXPR:
2501 return UNGE_EXPR;
2502 default:
2503 gcc_unreachable ();
2504 }
2505 }
2506
2507
2508 /* Convert a comparison tree code from an enum tree_code representation
2509 into a compcode bit-based encoding. This function is the inverse of
2510 compcode_to_comparison. */
2511
2512 static enum comparison_code
2513 comparison_to_compcode (enum tree_code code)
2514 {
2515 switch (code)
2516 {
2517 case LT_EXPR:
2518 return COMPCODE_LT;
2519 case EQ_EXPR:
2520 return COMPCODE_EQ;
2521 case LE_EXPR:
2522 return COMPCODE_LE;
2523 case GT_EXPR:
2524 return COMPCODE_GT;
2525 case NE_EXPR:
2526 return COMPCODE_NE;
2527 case GE_EXPR:
2528 return COMPCODE_GE;
2529 case ORDERED_EXPR:
2530 return COMPCODE_ORD;
2531 case UNORDERED_EXPR:
2532 return COMPCODE_UNORD;
2533 case UNLT_EXPR:
2534 return COMPCODE_UNLT;
2535 case UNEQ_EXPR:
2536 return COMPCODE_UNEQ;
2537 case UNLE_EXPR:
2538 return COMPCODE_UNLE;
2539 case UNGT_EXPR:
2540 return COMPCODE_UNGT;
2541 case LTGT_EXPR:
2542 return COMPCODE_LTGT;
2543 case UNGE_EXPR:
2544 return COMPCODE_UNGE;
2545 default:
2546 gcc_unreachable ();
2547 }
2548 }
2549
2550 /* Convert a compcode bit-based encoding of a comparison operator back
2551 to GCC's enum tree_code representation. This function is the
2552 inverse of comparison_to_compcode. */
2553
2554 static enum tree_code
2555 compcode_to_comparison (enum comparison_code code)
2556 {
2557 switch (code)
2558 {
2559 case COMPCODE_LT:
2560 return LT_EXPR;
2561 case COMPCODE_EQ:
2562 return EQ_EXPR;
2563 case COMPCODE_LE:
2564 return LE_EXPR;
2565 case COMPCODE_GT:
2566 return GT_EXPR;
2567 case COMPCODE_NE:
2568 return NE_EXPR;
2569 case COMPCODE_GE:
2570 return GE_EXPR;
2571 case COMPCODE_ORD:
2572 return ORDERED_EXPR;
2573 case COMPCODE_UNORD:
2574 return UNORDERED_EXPR;
2575 case COMPCODE_UNLT:
2576 return UNLT_EXPR;
2577 case COMPCODE_UNEQ:
2578 return UNEQ_EXPR;
2579 case COMPCODE_UNLE:
2580 return UNLE_EXPR;
2581 case COMPCODE_UNGT:
2582 return UNGT_EXPR;
2583 case COMPCODE_LTGT:
2584 return LTGT_EXPR;
2585 case COMPCODE_UNGE:
2586 return UNGE_EXPR;
2587 default:
2588 gcc_unreachable ();
2589 }
2590 }
2591
2592 /* Return a tree for the comparison which is the combination of
2593 doing the AND or OR (depending on CODE) of the two operations LCODE
2594 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2595 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2596 if this makes the transformation invalid. */
2597
2598 tree
2599 combine_comparisons (location_t loc,
2600 enum tree_code code, enum tree_code lcode,
2601 enum tree_code rcode, tree truth_type,
2602 tree ll_arg, tree lr_arg)
2603 {
2604 bool honor_nans = HONOR_NANS (ll_arg);
2605 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2606 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2607 int compcode;
2608
2609 switch (code)
2610 {
2611 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2612 compcode = lcompcode & rcompcode;
2613 break;
2614
2615 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2616 compcode = lcompcode | rcompcode;
2617 break;
2618
2619 default:
2620 return NULL_TREE;
2621 }
2622
2623 if (!honor_nans)
2624 {
2625 /* Eliminate unordered comparisons, as well as LTGT and ORD
2626 which are not used unless the mode has NaNs. */
2627 compcode &= ~COMPCODE_UNORD;
2628 if (compcode == COMPCODE_LTGT)
2629 compcode = COMPCODE_NE;
2630 else if (compcode == COMPCODE_ORD)
2631 compcode = COMPCODE_TRUE;
2632 }
2633 else if (flag_trapping_math)
2634 {
2635 /* Check that the original operation and the optimized ones will trap
2636 under the same condition. */
2637 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2638 && (lcompcode != COMPCODE_EQ)
2639 && (lcompcode != COMPCODE_ORD);
2640 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2641 && (rcompcode != COMPCODE_EQ)
2642 && (rcompcode != COMPCODE_ORD);
2643 bool trap = (compcode & COMPCODE_UNORD) == 0
2644 && (compcode != COMPCODE_EQ)
2645 && (compcode != COMPCODE_ORD);
2646
2647 /* In a short-circuited boolean expression the LHS might be
2648 such that the RHS, if evaluated, will never trap. For
2649 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2650 if neither x nor y is NaN. (This is a mixed blessing: for
2651 example, the expression above will never trap, hence
2652 optimizing it to x < y would be invalid). */
2653 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2654 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2655 rtrap = false;
2656
2657 /* If the comparison was short-circuited, and only the RHS
2658 trapped, we may now generate a spurious trap. */
2659 if (rtrap && !ltrap
2660 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2661 return NULL_TREE;
2662
2663 /* If we changed the conditions that cause a trap, we lose. */
2664 if ((ltrap || rtrap) != trap)
2665 return NULL_TREE;
2666 }
2667
2668 if (compcode == COMPCODE_TRUE)
2669 return constant_boolean_node (true, truth_type);
2670 else if (compcode == COMPCODE_FALSE)
2671 return constant_boolean_node (false, truth_type);
2672 else
2673 {
2674 enum tree_code tcode;
2675
2676 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2677 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2678 }
2679 }
2680 \f
2681 /* Return nonzero if two operands (typically of the same tree node)
2682 are necessarily equal. If either argument has side-effects this
2683 function returns zero. FLAGS modifies behavior as follows:
2684
2685 If OEP_ONLY_CONST is set, only return nonzero for constants.
2686 This function tests whether the operands are indistinguishable;
2687 it does not test whether they are equal using C's == operation.
2688 The distinction is important for IEEE floating point, because
2689 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2690 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2691
2692 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2693 even though it may hold multiple values during a function.
2694 This is because a GCC tree node guarantees that nothing else is
2695 executed between the evaluation of its "operands" (which may often
2696 be evaluated in arbitrary order). Hence if the operands themselves
2697 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2698 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2699 unset means assuming isochronic (or instantaneous) tree equivalence.
2700 Unless comparing arbitrary expression trees, such as from different
2701 statements, this flag can usually be left unset.
2702
2703 If OEP_PURE_SAME is set, then pure functions with identical arguments
2704 are considered the same. It is used when the caller has other ways
2705 to ensure that global memory is unchanged in between. */
2706
2707 int
2708 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2709 {
2710 /* If either is ERROR_MARK, they aren't equal. */
2711 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2712 || TREE_TYPE (arg0) == error_mark_node
2713 || TREE_TYPE (arg1) == error_mark_node)
2714 return 0;
2715
2716 /* Similar, if either does not have a type (like a released SSA name),
2717 they aren't equal. */
2718 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2719 return 0;
2720
2721 /* Check equality of integer constants before bailing out due to
2722 precision differences. */
2723 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2724 return tree_int_cst_equal (arg0, arg1);
2725
2726 /* If both types don't have the same signedness, then we can't consider
2727 them equal. We must check this before the STRIP_NOPS calls
2728 because they may change the signedness of the arguments. As pointers
2729 strictly don't have a signedness, require either two pointers or
2730 two non-pointers as well. */
2731 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2732 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2733 return 0;
2734
2735 /* We cannot consider pointers to different address space equal. */
2736 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2737 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2738 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2739 return 0;
2740
2741 /* If both types don't have the same precision, then it is not safe
2742 to strip NOPs. */
2743 if (element_precision (TREE_TYPE (arg0))
2744 != element_precision (TREE_TYPE (arg1)))
2745 return 0;
2746
2747 STRIP_NOPS (arg0);
2748 STRIP_NOPS (arg1);
2749
2750 /* In case both args are comparisons but with different comparison
2751 code, try to swap the comparison operands of one arg to produce
2752 a match and compare that variant. */
2753 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2754 && COMPARISON_CLASS_P (arg0)
2755 && COMPARISON_CLASS_P (arg1))
2756 {
2757 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2758
2759 if (TREE_CODE (arg0) == swap_code)
2760 return operand_equal_p (TREE_OPERAND (arg0, 0),
2761 TREE_OPERAND (arg1, 1), flags)
2762 && operand_equal_p (TREE_OPERAND (arg0, 1),
2763 TREE_OPERAND (arg1, 0), flags);
2764 }
2765
2766 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2767 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2768 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2769 return 0;
2770
2771 /* This is needed for conversions and for COMPONENT_REF.
2772 Might as well play it safe and always test this. */
2773 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2774 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2775 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2776 return 0;
2777
2778 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2779 We don't care about side effects in that case because the SAVE_EXPR
2780 takes care of that for us. In all other cases, two expressions are
2781 equal if they have no side effects. If we have two identical
2782 expressions with side effects that should be treated the same due
2783 to the only side effects being identical SAVE_EXPR's, that will
2784 be detected in the recursive calls below.
2785 If we are taking an invariant address of two identical objects
2786 they are necessarily equal as well. */
2787 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2788 && (TREE_CODE (arg0) == SAVE_EXPR
2789 || (flags & OEP_CONSTANT_ADDRESS_OF)
2790 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2791 return 1;
2792
2793 /* Next handle constant cases, those for which we can return 1 even
2794 if ONLY_CONST is set. */
2795 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2796 switch (TREE_CODE (arg0))
2797 {
2798 case INTEGER_CST:
2799 return tree_int_cst_equal (arg0, arg1);
2800
2801 case FIXED_CST:
2802 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2803 TREE_FIXED_CST (arg1));
2804
2805 case REAL_CST:
2806 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2807 TREE_REAL_CST (arg1)))
2808 return 1;
2809
2810
2811 if (!HONOR_SIGNED_ZEROS (arg0))
2812 {
2813 /* If we do not distinguish between signed and unsigned zero,
2814 consider them equal. */
2815 if (real_zerop (arg0) && real_zerop (arg1))
2816 return 1;
2817 }
2818 return 0;
2819
2820 case VECTOR_CST:
2821 {
2822 unsigned i;
2823
2824 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2825 return 0;
2826
2827 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2828 {
2829 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2830 VECTOR_CST_ELT (arg1, i), flags))
2831 return 0;
2832 }
2833 return 1;
2834 }
2835
2836 case COMPLEX_CST:
2837 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2838 flags)
2839 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2840 flags));
2841
2842 case STRING_CST:
2843 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2844 && ! memcmp (TREE_STRING_POINTER (arg0),
2845 TREE_STRING_POINTER (arg1),
2846 TREE_STRING_LENGTH (arg0)));
2847
2848 case ADDR_EXPR:
2849 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2850 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2851 ? OEP_CONSTANT_ADDRESS_OF : 0);
2852 default:
2853 break;
2854 }
2855
2856 if (flags & OEP_ONLY_CONST)
2857 return 0;
2858
2859 /* Define macros to test an operand from arg0 and arg1 for equality and a
2860 variant that allows null and views null as being different from any
2861 non-null value. In the latter case, if either is null, the both
2862 must be; otherwise, do the normal comparison. */
2863 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2864 TREE_OPERAND (arg1, N), flags)
2865
2866 #define OP_SAME_WITH_NULL(N) \
2867 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2868 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2869
2870 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2871 {
2872 case tcc_unary:
2873 /* Two conversions are equal only if signedness and modes match. */
2874 switch (TREE_CODE (arg0))
2875 {
2876 CASE_CONVERT:
2877 case FIX_TRUNC_EXPR:
2878 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2879 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2880 return 0;
2881 break;
2882 default:
2883 break;
2884 }
2885
2886 return OP_SAME (0);
2887
2888
2889 case tcc_comparison:
2890 case tcc_binary:
2891 if (OP_SAME (0) && OP_SAME (1))
2892 return 1;
2893
2894 /* For commutative ops, allow the other order. */
2895 return (commutative_tree_code (TREE_CODE (arg0))
2896 && operand_equal_p (TREE_OPERAND (arg0, 0),
2897 TREE_OPERAND (arg1, 1), flags)
2898 && operand_equal_p (TREE_OPERAND (arg0, 1),
2899 TREE_OPERAND (arg1, 0), flags));
2900
2901 case tcc_reference:
2902 /* If either of the pointer (or reference) expressions we are
2903 dereferencing contain a side effect, these cannot be equal,
2904 but their addresses can be. */
2905 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2906 && (TREE_SIDE_EFFECTS (arg0)
2907 || TREE_SIDE_EFFECTS (arg1)))
2908 return 0;
2909
2910 switch (TREE_CODE (arg0))
2911 {
2912 case INDIRECT_REF:
2913 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2914 return OP_SAME (0);
2915
2916 case REALPART_EXPR:
2917 case IMAGPART_EXPR:
2918 return OP_SAME (0);
2919
2920 case TARGET_MEM_REF:
2921 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2922 /* Require equal extra operands and then fall through to MEM_REF
2923 handling of the two common operands. */
2924 if (!OP_SAME_WITH_NULL (2)
2925 || !OP_SAME_WITH_NULL (3)
2926 || !OP_SAME_WITH_NULL (4))
2927 return 0;
2928 /* Fallthru. */
2929 case MEM_REF:
2930 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2931 /* Require equal access sizes, and similar pointer types.
2932 We can have incomplete types for array references of
2933 variable-sized arrays from the Fortran frontend
2934 though. Also verify the types are compatible. */
2935 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2936 || (TYPE_SIZE (TREE_TYPE (arg0))
2937 && TYPE_SIZE (TREE_TYPE (arg1))
2938 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2939 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2940 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2941 && alias_ptr_types_compatible_p
2942 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2943 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2944 && OP_SAME (0) && OP_SAME (1));
2945
2946 case ARRAY_REF:
2947 case ARRAY_RANGE_REF:
2948 /* Operands 2 and 3 may be null.
2949 Compare the array index by value if it is constant first as we
2950 may have different types but same value here. */
2951 if (!OP_SAME (0))
2952 return 0;
2953 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2954 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2955 TREE_OPERAND (arg1, 1))
2956 || OP_SAME (1))
2957 && OP_SAME_WITH_NULL (2)
2958 && OP_SAME_WITH_NULL (3));
2959
2960 case COMPONENT_REF:
2961 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2962 may be NULL when we're called to compare MEM_EXPRs. */
2963 if (!OP_SAME_WITH_NULL (0)
2964 || !OP_SAME (1))
2965 return 0;
2966 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2967 return OP_SAME_WITH_NULL (2);
2968
2969 case BIT_FIELD_REF:
2970 if (!OP_SAME (0))
2971 return 0;
2972 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2973 return OP_SAME (1) && OP_SAME (2);
2974
2975 default:
2976 return 0;
2977 }
2978
2979 case tcc_expression:
2980 switch (TREE_CODE (arg0))
2981 {
2982 case ADDR_EXPR:
2983 case TRUTH_NOT_EXPR:
2984 return OP_SAME (0);
2985
2986 case TRUTH_ANDIF_EXPR:
2987 case TRUTH_ORIF_EXPR:
2988 return OP_SAME (0) && OP_SAME (1);
2989
2990 case FMA_EXPR:
2991 case WIDEN_MULT_PLUS_EXPR:
2992 case WIDEN_MULT_MINUS_EXPR:
2993 if (!OP_SAME (2))
2994 return 0;
2995 /* The multiplcation operands are commutative. */
2996 /* FALLTHRU */
2997
2998 case TRUTH_AND_EXPR:
2999 case TRUTH_OR_EXPR:
3000 case TRUTH_XOR_EXPR:
3001 if (OP_SAME (0) && OP_SAME (1))
3002 return 1;
3003
3004 /* Otherwise take into account this is a commutative operation. */
3005 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3006 TREE_OPERAND (arg1, 1), flags)
3007 && operand_equal_p (TREE_OPERAND (arg0, 1),
3008 TREE_OPERAND (arg1, 0), flags));
3009
3010 case COND_EXPR:
3011 case VEC_COND_EXPR:
3012 case DOT_PROD_EXPR:
3013 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3014
3015 default:
3016 return 0;
3017 }
3018
3019 case tcc_vl_exp:
3020 switch (TREE_CODE (arg0))
3021 {
3022 case CALL_EXPR:
3023 /* If the CALL_EXPRs call different functions, then they
3024 clearly can not be equal. */
3025 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3026 flags))
3027 return 0;
3028
3029 {
3030 unsigned int cef = call_expr_flags (arg0);
3031 if (flags & OEP_PURE_SAME)
3032 cef &= ECF_CONST | ECF_PURE;
3033 else
3034 cef &= ECF_CONST;
3035 if (!cef)
3036 return 0;
3037 }
3038
3039 /* Now see if all the arguments are the same. */
3040 {
3041 const_call_expr_arg_iterator iter0, iter1;
3042 const_tree a0, a1;
3043 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3044 a1 = first_const_call_expr_arg (arg1, &iter1);
3045 a0 && a1;
3046 a0 = next_const_call_expr_arg (&iter0),
3047 a1 = next_const_call_expr_arg (&iter1))
3048 if (! operand_equal_p (a0, a1, flags))
3049 return 0;
3050
3051 /* If we get here and both argument lists are exhausted
3052 then the CALL_EXPRs are equal. */
3053 return ! (a0 || a1);
3054 }
3055 default:
3056 return 0;
3057 }
3058
3059 case tcc_declaration:
3060 /* Consider __builtin_sqrt equal to sqrt. */
3061 return (TREE_CODE (arg0) == FUNCTION_DECL
3062 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3063 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3064 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3065
3066 default:
3067 return 0;
3068 }
3069
3070 #undef OP_SAME
3071 #undef OP_SAME_WITH_NULL
3072 }
3073 \f
3074 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3075 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3076
3077 When in doubt, return 0. */
3078
3079 static int
3080 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3081 {
3082 int unsignedp1, unsignedpo;
3083 tree primarg0, primarg1, primother;
3084 unsigned int correct_width;
3085
3086 if (operand_equal_p (arg0, arg1, 0))
3087 return 1;
3088
3089 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3090 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3091 return 0;
3092
3093 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3094 and see if the inner values are the same. This removes any
3095 signedness comparison, which doesn't matter here. */
3096 primarg0 = arg0, primarg1 = arg1;
3097 STRIP_NOPS (primarg0);
3098 STRIP_NOPS (primarg1);
3099 if (operand_equal_p (primarg0, primarg1, 0))
3100 return 1;
3101
3102 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3103 actual comparison operand, ARG0.
3104
3105 First throw away any conversions to wider types
3106 already present in the operands. */
3107
3108 primarg1 = get_narrower (arg1, &unsignedp1);
3109 primother = get_narrower (other, &unsignedpo);
3110
3111 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3112 if (unsignedp1 == unsignedpo
3113 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3114 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3115 {
3116 tree type = TREE_TYPE (arg0);
3117
3118 /* Make sure shorter operand is extended the right way
3119 to match the longer operand. */
3120 primarg1 = fold_convert (signed_or_unsigned_type_for
3121 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3122
3123 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3124 return 1;
3125 }
3126
3127 return 0;
3128 }
3129 \f
3130 /* See if ARG is an expression that is either a comparison or is performing
3131 arithmetic on comparisons. The comparisons must only be comparing
3132 two different values, which will be stored in *CVAL1 and *CVAL2; if
3133 they are nonzero it means that some operands have already been found.
3134 No variables may be used anywhere else in the expression except in the
3135 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3136 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3137
3138 If this is true, return 1. Otherwise, return zero. */
3139
3140 static int
3141 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3142 {
3143 enum tree_code code = TREE_CODE (arg);
3144 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3145
3146 /* We can handle some of the tcc_expression cases here. */
3147 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3148 tclass = tcc_unary;
3149 else if (tclass == tcc_expression
3150 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3151 || code == COMPOUND_EXPR))
3152 tclass = tcc_binary;
3153
3154 else if (tclass == tcc_expression && code == SAVE_EXPR
3155 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3156 {
3157 /* If we've already found a CVAL1 or CVAL2, this expression is
3158 two complex to handle. */
3159 if (*cval1 || *cval2)
3160 return 0;
3161
3162 tclass = tcc_unary;
3163 *save_p = 1;
3164 }
3165
3166 switch (tclass)
3167 {
3168 case tcc_unary:
3169 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3170
3171 case tcc_binary:
3172 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3173 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3174 cval1, cval2, save_p));
3175
3176 case tcc_constant:
3177 return 1;
3178
3179 case tcc_expression:
3180 if (code == COND_EXPR)
3181 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3182 cval1, cval2, save_p)
3183 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3184 cval1, cval2, save_p)
3185 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3186 cval1, cval2, save_p));
3187 return 0;
3188
3189 case tcc_comparison:
3190 /* First see if we can handle the first operand, then the second. For
3191 the second operand, we know *CVAL1 can't be zero. It must be that
3192 one side of the comparison is each of the values; test for the
3193 case where this isn't true by failing if the two operands
3194 are the same. */
3195
3196 if (operand_equal_p (TREE_OPERAND (arg, 0),
3197 TREE_OPERAND (arg, 1), 0))
3198 return 0;
3199
3200 if (*cval1 == 0)
3201 *cval1 = TREE_OPERAND (arg, 0);
3202 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3203 ;
3204 else if (*cval2 == 0)
3205 *cval2 = TREE_OPERAND (arg, 0);
3206 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3207 ;
3208 else
3209 return 0;
3210
3211 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3212 ;
3213 else if (*cval2 == 0)
3214 *cval2 = TREE_OPERAND (arg, 1);
3215 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3216 ;
3217 else
3218 return 0;
3219
3220 return 1;
3221
3222 default:
3223 return 0;
3224 }
3225 }
3226 \f
3227 /* ARG is a tree that is known to contain just arithmetic operations and
3228 comparisons. Evaluate the operations in the tree substituting NEW0 for
3229 any occurrence of OLD0 as an operand of a comparison and likewise for
3230 NEW1 and OLD1. */
3231
3232 static tree
3233 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3234 tree old1, tree new1)
3235 {
3236 tree type = TREE_TYPE (arg);
3237 enum tree_code code = TREE_CODE (arg);
3238 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3239
3240 /* We can handle some of the tcc_expression cases here. */
3241 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3242 tclass = tcc_unary;
3243 else if (tclass == tcc_expression
3244 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3245 tclass = tcc_binary;
3246
3247 switch (tclass)
3248 {
3249 case tcc_unary:
3250 return fold_build1_loc (loc, code, type,
3251 eval_subst (loc, TREE_OPERAND (arg, 0),
3252 old0, new0, old1, new1));
3253
3254 case tcc_binary:
3255 return fold_build2_loc (loc, code, type,
3256 eval_subst (loc, TREE_OPERAND (arg, 0),
3257 old0, new0, old1, new1),
3258 eval_subst (loc, TREE_OPERAND (arg, 1),
3259 old0, new0, old1, new1));
3260
3261 case tcc_expression:
3262 switch (code)
3263 {
3264 case SAVE_EXPR:
3265 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3266 old1, new1);
3267
3268 case COMPOUND_EXPR:
3269 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3270 old1, new1);
3271
3272 case COND_EXPR:
3273 return fold_build3_loc (loc, code, type,
3274 eval_subst (loc, TREE_OPERAND (arg, 0),
3275 old0, new0, old1, new1),
3276 eval_subst (loc, TREE_OPERAND (arg, 1),
3277 old0, new0, old1, new1),
3278 eval_subst (loc, TREE_OPERAND (arg, 2),
3279 old0, new0, old1, new1));
3280 default:
3281 break;
3282 }
3283 /* Fall through - ??? */
3284
3285 case tcc_comparison:
3286 {
3287 tree arg0 = TREE_OPERAND (arg, 0);
3288 tree arg1 = TREE_OPERAND (arg, 1);
3289
3290 /* We need to check both for exact equality and tree equality. The
3291 former will be true if the operand has a side-effect. In that
3292 case, we know the operand occurred exactly once. */
3293
3294 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3295 arg0 = new0;
3296 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3297 arg0 = new1;
3298
3299 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3300 arg1 = new0;
3301 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3302 arg1 = new1;
3303
3304 return fold_build2_loc (loc, code, type, arg0, arg1);
3305 }
3306
3307 default:
3308 return arg;
3309 }
3310 }
3311 \f
3312 /* Return a tree for the case when the result of an expression is RESULT
3313 converted to TYPE and OMITTED was previously an operand of the expression
3314 but is now not needed (e.g., we folded OMITTED * 0).
3315
3316 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3317 the conversion of RESULT to TYPE. */
3318
3319 tree
3320 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3321 {
3322 tree t = fold_convert_loc (loc, type, result);
3323
3324 /* If the resulting operand is an empty statement, just return the omitted
3325 statement casted to void. */
3326 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3327 return build1_loc (loc, NOP_EXPR, void_type_node,
3328 fold_ignored_result (omitted));
3329
3330 if (TREE_SIDE_EFFECTS (omitted))
3331 return build2_loc (loc, COMPOUND_EXPR, type,
3332 fold_ignored_result (omitted), t);
3333
3334 return non_lvalue_loc (loc, t);
3335 }
3336
3337 /* Return a tree for the case when the result of an expression is RESULT
3338 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3339 of the expression but are now not needed.
3340
3341 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3342 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3343 evaluated before OMITTED2. Otherwise, if neither has side effects,
3344 just do the conversion of RESULT to TYPE. */
3345
3346 tree
3347 omit_two_operands_loc (location_t loc, tree type, tree result,
3348 tree omitted1, tree omitted2)
3349 {
3350 tree t = fold_convert_loc (loc, type, result);
3351
3352 if (TREE_SIDE_EFFECTS (omitted2))
3353 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3354 if (TREE_SIDE_EFFECTS (omitted1))
3355 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3356
3357 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3358 }
3359
3360 \f
3361 /* Return a simplified tree node for the truth-negation of ARG. This
3362 never alters ARG itself. We assume that ARG is an operation that
3363 returns a truth value (0 or 1).
3364
3365 FIXME: one would think we would fold the result, but it causes
3366 problems with the dominator optimizer. */
3367
3368 static tree
3369 fold_truth_not_expr (location_t loc, tree arg)
3370 {
3371 tree type = TREE_TYPE (arg);
3372 enum tree_code code = TREE_CODE (arg);
3373 location_t loc1, loc2;
3374
3375 /* If this is a comparison, we can simply invert it, except for
3376 floating-point non-equality comparisons, in which case we just
3377 enclose a TRUTH_NOT_EXPR around what we have. */
3378
3379 if (TREE_CODE_CLASS (code) == tcc_comparison)
3380 {
3381 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3382 if (FLOAT_TYPE_P (op_type)
3383 && flag_trapping_math
3384 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3385 && code != NE_EXPR && code != EQ_EXPR)
3386 return NULL_TREE;
3387
3388 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3389 if (code == ERROR_MARK)
3390 return NULL_TREE;
3391
3392 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3393 TREE_OPERAND (arg, 1));
3394 }
3395
3396 switch (code)
3397 {
3398 case INTEGER_CST:
3399 return constant_boolean_node (integer_zerop (arg), type);
3400
3401 case TRUTH_AND_EXPR:
3402 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3403 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3404 return build2_loc (loc, TRUTH_OR_EXPR, type,
3405 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3406 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3407
3408 case TRUTH_OR_EXPR:
3409 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3410 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3411 return build2_loc (loc, TRUTH_AND_EXPR, type,
3412 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3413 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3414
3415 case TRUTH_XOR_EXPR:
3416 /* Here we can invert either operand. We invert the first operand
3417 unless the second operand is a TRUTH_NOT_EXPR in which case our
3418 result is the XOR of the first operand with the inside of the
3419 negation of the second operand. */
3420
3421 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3422 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3423 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3424 else
3425 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3426 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3427 TREE_OPERAND (arg, 1));
3428
3429 case TRUTH_ANDIF_EXPR:
3430 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3431 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3432 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3433 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3434 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3435
3436 case TRUTH_ORIF_EXPR:
3437 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3438 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3439 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3440 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3441 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3442
3443 case TRUTH_NOT_EXPR:
3444 return TREE_OPERAND (arg, 0);
3445
3446 case COND_EXPR:
3447 {
3448 tree arg1 = TREE_OPERAND (arg, 1);
3449 tree arg2 = TREE_OPERAND (arg, 2);
3450
3451 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3452 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3453
3454 /* A COND_EXPR may have a throw as one operand, which
3455 then has void type. Just leave void operands
3456 as they are. */
3457 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3458 VOID_TYPE_P (TREE_TYPE (arg1))
3459 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3460 VOID_TYPE_P (TREE_TYPE (arg2))
3461 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3462 }
3463
3464 case COMPOUND_EXPR:
3465 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3466 return build2_loc (loc, COMPOUND_EXPR, type,
3467 TREE_OPERAND (arg, 0),
3468 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3469
3470 case NON_LVALUE_EXPR:
3471 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3472 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3473
3474 CASE_CONVERT:
3475 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3476 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3477
3478 /* ... fall through ... */
3479
3480 case FLOAT_EXPR:
3481 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3482 return build1_loc (loc, TREE_CODE (arg), type,
3483 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3484
3485 case BIT_AND_EXPR:
3486 if (!integer_onep (TREE_OPERAND (arg, 1)))
3487 return NULL_TREE;
3488 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3489
3490 case SAVE_EXPR:
3491 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3492
3493 case CLEANUP_POINT_EXPR:
3494 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3495 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3496 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3497
3498 default:
3499 return NULL_TREE;
3500 }
3501 }
3502
3503 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3504 assume that ARG is an operation that returns a truth value (0 or 1
3505 for scalars, 0 or -1 for vectors). Return the folded expression if
3506 folding is successful. Otherwise, return NULL_TREE. */
3507
3508 static tree
3509 fold_invert_truthvalue (location_t loc, tree arg)
3510 {
3511 tree type = TREE_TYPE (arg);
3512 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3513 ? BIT_NOT_EXPR
3514 : TRUTH_NOT_EXPR,
3515 type, arg);
3516 }
3517
3518 /* Return a simplified tree node for the truth-negation of ARG. This
3519 never alters ARG itself. We assume that ARG is an operation that
3520 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3521
3522 tree
3523 invert_truthvalue_loc (location_t loc, tree arg)
3524 {
3525 if (TREE_CODE (arg) == ERROR_MARK)
3526 return arg;
3527
3528 tree type = TREE_TYPE (arg);
3529 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3530 ? BIT_NOT_EXPR
3531 : TRUTH_NOT_EXPR,
3532 type, arg);
3533 }
3534
3535 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3536 operands are another bit-wise operation with a common input. If so,
3537 distribute the bit operations to save an operation and possibly two if
3538 constants are involved. For example, convert
3539 (A | B) & (A | C) into A | (B & C)
3540 Further simplification will occur if B and C are constants.
3541
3542 If this optimization cannot be done, 0 will be returned. */
3543
3544 static tree
3545 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3546 tree arg0, tree arg1)
3547 {
3548 tree common;
3549 tree left, right;
3550
3551 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3552 || TREE_CODE (arg0) == code
3553 || (TREE_CODE (arg0) != BIT_AND_EXPR
3554 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3555 return 0;
3556
3557 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3558 {
3559 common = TREE_OPERAND (arg0, 0);
3560 left = TREE_OPERAND (arg0, 1);
3561 right = TREE_OPERAND (arg1, 1);
3562 }
3563 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3564 {
3565 common = TREE_OPERAND (arg0, 0);
3566 left = TREE_OPERAND (arg0, 1);
3567 right = TREE_OPERAND (arg1, 0);
3568 }
3569 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3570 {
3571 common = TREE_OPERAND (arg0, 1);
3572 left = TREE_OPERAND (arg0, 0);
3573 right = TREE_OPERAND (arg1, 1);
3574 }
3575 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3576 {
3577 common = TREE_OPERAND (arg0, 1);
3578 left = TREE_OPERAND (arg0, 0);
3579 right = TREE_OPERAND (arg1, 0);
3580 }
3581 else
3582 return 0;
3583
3584 common = fold_convert_loc (loc, type, common);
3585 left = fold_convert_loc (loc, type, left);
3586 right = fold_convert_loc (loc, type, right);
3587 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3588 fold_build2_loc (loc, code, type, left, right));
3589 }
3590
3591 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3592 with code CODE. This optimization is unsafe. */
3593 static tree
3594 distribute_real_division (location_t loc, enum tree_code code, tree type,
3595 tree arg0, tree arg1)
3596 {
3597 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3598 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3599
3600 /* (A / C) +- (B / C) -> (A +- B) / C. */
3601 if (mul0 == mul1
3602 && operand_equal_p (TREE_OPERAND (arg0, 1),
3603 TREE_OPERAND (arg1, 1), 0))
3604 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3605 fold_build2_loc (loc, code, type,
3606 TREE_OPERAND (arg0, 0),
3607 TREE_OPERAND (arg1, 0)),
3608 TREE_OPERAND (arg0, 1));
3609
3610 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3611 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3612 TREE_OPERAND (arg1, 0), 0)
3613 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3614 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3615 {
3616 REAL_VALUE_TYPE r0, r1;
3617 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3618 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3619 if (!mul0)
3620 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3621 if (!mul1)
3622 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3623 real_arithmetic (&r0, code, &r0, &r1);
3624 return fold_build2_loc (loc, MULT_EXPR, type,
3625 TREE_OPERAND (arg0, 0),
3626 build_real (type, r0));
3627 }
3628
3629 return NULL_TREE;
3630 }
3631 \f
3632 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3633 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3634
3635 static tree
3636 make_bit_field_ref (location_t loc, tree inner, tree type,
3637 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3638 {
3639 tree result, bftype;
3640
3641 if (bitpos == 0)
3642 {
3643 tree size = TYPE_SIZE (TREE_TYPE (inner));
3644 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3645 || POINTER_TYPE_P (TREE_TYPE (inner)))
3646 && tree_fits_shwi_p (size)
3647 && tree_to_shwi (size) == bitsize)
3648 return fold_convert_loc (loc, type, inner);
3649 }
3650
3651 bftype = type;
3652 if (TYPE_PRECISION (bftype) != bitsize
3653 || TYPE_UNSIGNED (bftype) == !unsignedp)
3654 bftype = build_nonstandard_integer_type (bitsize, 0);
3655
3656 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3657 size_int (bitsize), bitsize_int (bitpos));
3658
3659 if (bftype != type)
3660 result = fold_convert_loc (loc, type, result);
3661
3662 return result;
3663 }
3664
3665 /* Optimize a bit-field compare.
3666
3667 There are two cases: First is a compare against a constant and the
3668 second is a comparison of two items where the fields are at the same
3669 bit position relative to the start of a chunk (byte, halfword, word)
3670 large enough to contain it. In these cases we can avoid the shift
3671 implicit in bitfield extractions.
3672
3673 For constants, we emit a compare of the shifted constant with the
3674 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3675 compared. For two fields at the same position, we do the ANDs with the
3676 similar mask and compare the result of the ANDs.
3677
3678 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3679 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3680 are the left and right operands of the comparison, respectively.
3681
3682 If the optimization described above can be done, we return the resulting
3683 tree. Otherwise we return zero. */
3684
3685 static tree
3686 optimize_bit_field_compare (location_t loc, enum tree_code code,
3687 tree compare_type, tree lhs, tree rhs)
3688 {
3689 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3690 tree type = TREE_TYPE (lhs);
3691 tree unsigned_type;
3692 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3693 machine_mode lmode, rmode, nmode;
3694 int lunsignedp, runsignedp;
3695 int lvolatilep = 0, rvolatilep = 0;
3696 tree linner, rinner = NULL_TREE;
3697 tree mask;
3698 tree offset;
3699
3700 /* Get all the information about the extractions being done. If the bit size
3701 if the same as the size of the underlying object, we aren't doing an
3702 extraction at all and so can do nothing. We also don't want to
3703 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3704 then will no longer be able to replace it. */
3705 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3706 &lunsignedp, &lvolatilep, false);
3707 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3708 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3709 return 0;
3710
3711 if (!const_p)
3712 {
3713 /* If this is not a constant, we can only do something if bit positions,
3714 sizes, and signedness are the same. */
3715 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3716 &runsignedp, &rvolatilep, false);
3717
3718 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3719 || lunsignedp != runsignedp || offset != 0
3720 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3721 return 0;
3722 }
3723
3724 /* See if we can find a mode to refer to this field. We should be able to,
3725 but fail if we can't. */
3726 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3727 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3728 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3729 TYPE_ALIGN (TREE_TYPE (rinner))),
3730 word_mode, false);
3731 if (nmode == VOIDmode)
3732 return 0;
3733
3734 /* Set signed and unsigned types of the precision of this mode for the
3735 shifts below. */
3736 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3737
3738 /* Compute the bit position and size for the new reference and our offset
3739 within it. If the new reference is the same size as the original, we
3740 won't optimize anything, so return zero. */
3741 nbitsize = GET_MODE_BITSIZE (nmode);
3742 nbitpos = lbitpos & ~ (nbitsize - 1);
3743 lbitpos -= nbitpos;
3744 if (nbitsize == lbitsize)
3745 return 0;
3746
3747 if (BYTES_BIG_ENDIAN)
3748 lbitpos = nbitsize - lbitsize - lbitpos;
3749
3750 /* Make the mask to be used against the extracted field. */
3751 mask = build_int_cst_type (unsigned_type, -1);
3752 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3753 mask = const_binop (RSHIFT_EXPR, mask,
3754 size_int (nbitsize - lbitsize - lbitpos));
3755
3756 if (! const_p)
3757 /* If not comparing with constant, just rework the comparison
3758 and return. */
3759 return fold_build2_loc (loc, code, compare_type,
3760 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3761 make_bit_field_ref (loc, linner,
3762 unsigned_type,
3763 nbitsize, nbitpos,
3764 1),
3765 mask),
3766 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3767 make_bit_field_ref (loc, rinner,
3768 unsigned_type,
3769 nbitsize, nbitpos,
3770 1),
3771 mask));
3772
3773 /* Otherwise, we are handling the constant case. See if the constant is too
3774 big for the field. Warn and return a tree of for 0 (false) if so. We do
3775 this not only for its own sake, but to avoid having to test for this
3776 error case below. If we didn't, we might generate wrong code.
3777
3778 For unsigned fields, the constant shifted right by the field length should
3779 be all zero. For signed fields, the high-order bits should agree with
3780 the sign bit. */
3781
3782 if (lunsignedp)
3783 {
3784 if (wi::lrshift (rhs, lbitsize) != 0)
3785 {
3786 warning (0, "comparison is always %d due to width of bit-field",
3787 code == NE_EXPR);
3788 return constant_boolean_node (code == NE_EXPR, compare_type);
3789 }
3790 }
3791 else
3792 {
3793 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3794 if (tem != 0 && tem != -1)
3795 {
3796 warning (0, "comparison is always %d due to width of bit-field",
3797 code == NE_EXPR);
3798 return constant_boolean_node (code == NE_EXPR, compare_type);
3799 }
3800 }
3801
3802 /* Single-bit compares should always be against zero. */
3803 if (lbitsize == 1 && ! integer_zerop (rhs))
3804 {
3805 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3806 rhs = build_int_cst (type, 0);
3807 }
3808
3809 /* Make a new bitfield reference, shift the constant over the
3810 appropriate number of bits and mask it with the computed mask
3811 (in case this was a signed field). If we changed it, make a new one. */
3812 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3813
3814 rhs = const_binop (BIT_AND_EXPR,
3815 const_binop (LSHIFT_EXPR,
3816 fold_convert_loc (loc, unsigned_type, rhs),
3817 size_int (lbitpos)),
3818 mask);
3819
3820 lhs = build2_loc (loc, code, compare_type,
3821 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3822 return lhs;
3823 }
3824 \f
3825 /* Subroutine for fold_truth_andor_1: decode a field reference.
3826
3827 If EXP is a comparison reference, we return the innermost reference.
3828
3829 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3830 set to the starting bit number.
3831
3832 If the innermost field can be completely contained in a mode-sized
3833 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3834
3835 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3836 otherwise it is not changed.
3837
3838 *PUNSIGNEDP is set to the signedness of the field.
3839
3840 *PMASK is set to the mask used. This is either contained in a
3841 BIT_AND_EXPR or derived from the width of the field.
3842
3843 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3844
3845 Return 0 if this is not a component reference or is one that we can't
3846 do anything with. */
3847
3848 static tree
3849 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3850 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3851 int *punsignedp, int *pvolatilep,
3852 tree *pmask, tree *pand_mask)
3853 {
3854 tree outer_type = 0;
3855 tree and_mask = 0;
3856 tree mask, inner, offset;
3857 tree unsigned_type;
3858 unsigned int precision;
3859
3860 /* All the optimizations using this function assume integer fields.
3861 There are problems with FP fields since the type_for_size call
3862 below can fail for, e.g., XFmode. */
3863 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3864 return 0;
3865
3866 /* We are interested in the bare arrangement of bits, so strip everything
3867 that doesn't affect the machine mode. However, record the type of the
3868 outermost expression if it may matter below. */
3869 if (CONVERT_EXPR_P (exp)
3870 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3871 outer_type = TREE_TYPE (exp);
3872 STRIP_NOPS (exp);
3873
3874 if (TREE_CODE (exp) == BIT_AND_EXPR)
3875 {
3876 and_mask = TREE_OPERAND (exp, 1);
3877 exp = TREE_OPERAND (exp, 0);
3878 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3879 if (TREE_CODE (and_mask) != INTEGER_CST)
3880 return 0;
3881 }
3882
3883 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3884 punsignedp, pvolatilep, false);
3885 if ((inner == exp && and_mask == 0)
3886 || *pbitsize < 0 || offset != 0
3887 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3888 return 0;
3889
3890 /* If the number of bits in the reference is the same as the bitsize of
3891 the outer type, then the outer type gives the signedness. Otherwise
3892 (in case of a small bitfield) the signedness is unchanged. */
3893 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3894 *punsignedp = TYPE_UNSIGNED (outer_type);
3895
3896 /* Compute the mask to access the bitfield. */
3897 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3898 precision = TYPE_PRECISION (unsigned_type);
3899
3900 mask = build_int_cst_type (unsigned_type, -1);
3901
3902 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3903 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3904
3905 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3906 if (and_mask != 0)
3907 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3908 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3909
3910 *pmask = mask;
3911 *pand_mask = and_mask;
3912 return inner;
3913 }
3914
3915 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3916 bit positions and MASK is SIGNED. */
3917
3918 static int
3919 all_ones_mask_p (const_tree mask, unsigned int size)
3920 {
3921 tree type = TREE_TYPE (mask);
3922 unsigned int precision = TYPE_PRECISION (type);
3923
3924 /* If this function returns true when the type of the mask is
3925 UNSIGNED, then there will be errors. In particular see
3926 gcc.c-torture/execute/990326-1.c. There does not appear to be
3927 any documentation paper trail as to why this is so. But the pre
3928 wide-int worked with that restriction and it has been preserved
3929 here. */
3930 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3931 return false;
3932
3933 return wi::mask (size, false, precision) == mask;
3934 }
3935
3936 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3937 represents the sign bit of EXP's type. If EXP represents a sign
3938 or zero extension, also test VAL against the unextended type.
3939 The return value is the (sub)expression whose sign bit is VAL,
3940 or NULL_TREE otherwise. */
3941
3942 tree
3943 sign_bit_p (tree exp, const_tree val)
3944 {
3945 int width;
3946 tree t;
3947
3948 /* Tree EXP must have an integral type. */
3949 t = TREE_TYPE (exp);
3950 if (! INTEGRAL_TYPE_P (t))
3951 return NULL_TREE;
3952
3953 /* Tree VAL must be an integer constant. */
3954 if (TREE_CODE (val) != INTEGER_CST
3955 || TREE_OVERFLOW (val))
3956 return NULL_TREE;
3957
3958 width = TYPE_PRECISION (t);
3959 if (wi::only_sign_bit_p (val, width))
3960 return exp;
3961
3962 /* Handle extension from a narrower type. */
3963 if (TREE_CODE (exp) == NOP_EXPR
3964 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3965 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3966
3967 return NULL_TREE;
3968 }
3969
3970 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3971 to be evaluated unconditionally. */
3972
3973 static int
3974 simple_operand_p (const_tree exp)
3975 {
3976 /* Strip any conversions that don't change the machine mode. */
3977 STRIP_NOPS (exp);
3978
3979 return (CONSTANT_CLASS_P (exp)
3980 || TREE_CODE (exp) == SSA_NAME
3981 || (DECL_P (exp)
3982 && ! TREE_ADDRESSABLE (exp)
3983 && ! TREE_THIS_VOLATILE (exp)
3984 && ! DECL_NONLOCAL (exp)
3985 /* Don't regard global variables as simple. They may be
3986 allocated in ways unknown to the compiler (shared memory,
3987 #pragma weak, etc). */
3988 && ! TREE_PUBLIC (exp)
3989 && ! DECL_EXTERNAL (exp)
3990 /* Weakrefs are not safe to be read, since they can be NULL.
3991 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3992 have DECL_WEAK flag set. */
3993 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3994 /* Loading a static variable is unduly expensive, but global
3995 registers aren't expensive. */
3996 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3997 }
3998
3999 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4000 to be evaluated unconditionally.
4001 I addition to simple_operand_p, we assume that comparisons, conversions,
4002 and logic-not operations are simple, if their operands are simple, too. */
4003
4004 static bool
4005 simple_operand_p_2 (tree exp)
4006 {
4007 enum tree_code code;
4008
4009 if (TREE_SIDE_EFFECTS (exp)
4010 || tree_could_trap_p (exp))
4011 return false;
4012
4013 while (CONVERT_EXPR_P (exp))
4014 exp = TREE_OPERAND (exp, 0);
4015
4016 code = TREE_CODE (exp);
4017
4018 if (TREE_CODE_CLASS (code) == tcc_comparison)
4019 return (simple_operand_p (TREE_OPERAND (exp, 0))
4020 && simple_operand_p (TREE_OPERAND (exp, 1)));
4021
4022 if (code == TRUTH_NOT_EXPR)
4023 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4024
4025 return simple_operand_p (exp);
4026 }
4027
4028 \f
4029 /* The following functions are subroutines to fold_range_test and allow it to
4030 try to change a logical combination of comparisons into a range test.
4031
4032 For example, both
4033 X == 2 || X == 3 || X == 4 || X == 5
4034 and
4035 X >= 2 && X <= 5
4036 are converted to
4037 (unsigned) (X - 2) <= 3
4038
4039 We describe each set of comparisons as being either inside or outside
4040 a range, using a variable named like IN_P, and then describe the
4041 range with a lower and upper bound. If one of the bounds is omitted,
4042 it represents either the highest or lowest value of the type.
4043
4044 In the comments below, we represent a range by two numbers in brackets
4045 preceded by a "+" to designate being inside that range, or a "-" to
4046 designate being outside that range, so the condition can be inverted by
4047 flipping the prefix. An omitted bound is represented by a "-". For
4048 example, "- [-, 10]" means being outside the range starting at the lowest
4049 possible value and ending at 10, in other words, being greater than 10.
4050 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4051 always false.
4052
4053 We set up things so that the missing bounds are handled in a consistent
4054 manner so neither a missing bound nor "true" and "false" need to be
4055 handled using a special case. */
4056
4057 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4058 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4059 and UPPER1_P are nonzero if the respective argument is an upper bound
4060 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4061 must be specified for a comparison. ARG1 will be converted to ARG0's
4062 type if both are specified. */
4063
4064 static tree
4065 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4066 tree arg1, int upper1_p)
4067 {
4068 tree tem;
4069 int result;
4070 int sgn0, sgn1;
4071
4072 /* If neither arg represents infinity, do the normal operation.
4073 Else, if not a comparison, return infinity. Else handle the special
4074 comparison rules. Note that most of the cases below won't occur, but
4075 are handled for consistency. */
4076
4077 if (arg0 != 0 && arg1 != 0)
4078 {
4079 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4080 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4081 STRIP_NOPS (tem);
4082 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4083 }
4084
4085 if (TREE_CODE_CLASS (code) != tcc_comparison)
4086 return 0;
4087
4088 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4089 for neither. In real maths, we cannot assume open ended ranges are
4090 the same. But, this is computer arithmetic, where numbers are finite.
4091 We can therefore make the transformation of any unbounded range with
4092 the value Z, Z being greater than any representable number. This permits
4093 us to treat unbounded ranges as equal. */
4094 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4095 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4096 switch (code)
4097 {
4098 case EQ_EXPR:
4099 result = sgn0 == sgn1;
4100 break;
4101 case NE_EXPR:
4102 result = sgn0 != sgn1;
4103 break;
4104 case LT_EXPR:
4105 result = sgn0 < sgn1;
4106 break;
4107 case LE_EXPR:
4108 result = sgn0 <= sgn1;
4109 break;
4110 case GT_EXPR:
4111 result = sgn0 > sgn1;
4112 break;
4113 case GE_EXPR:
4114 result = sgn0 >= sgn1;
4115 break;
4116 default:
4117 gcc_unreachable ();
4118 }
4119
4120 return constant_boolean_node (result, type);
4121 }
4122 \f
4123 /* Helper routine for make_range. Perform one step for it, return
4124 new expression if the loop should continue or NULL_TREE if it should
4125 stop. */
4126
4127 tree
4128 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4129 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4130 bool *strict_overflow_p)
4131 {
4132 tree arg0_type = TREE_TYPE (arg0);
4133 tree n_low, n_high, low = *p_low, high = *p_high;
4134 int in_p = *p_in_p, n_in_p;
4135
4136 switch (code)
4137 {
4138 case TRUTH_NOT_EXPR:
4139 /* We can only do something if the range is testing for zero. */
4140 if (low == NULL_TREE || high == NULL_TREE
4141 || ! integer_zerop (low) || ! integer_zerop (high))
4142 return NULL_TREE;
4143 *p_in_p = ! in_p;
4144 return arg0;
4145
4146 case EQ_EXPR: case NE_EXPR:
4147 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4148 /* We can only do something if the range is testing for zero
4149 and if the second operand is an integer constant. Note that
4150 saying something is "in" the range we make is done by
4151 complementing IN_P since it will set in the initial case of
4152 being not equal to zero; "out" is leaving it alone. */
4153 if (low == NULL_TREE || high == NULL_TREE
4154 || ! integer_zerop (low) || ! integer_zerop (high)
4155 || TREE_CODE (arg1) != INTEGER_CST)
4156 return NULL_TREE;
4157
4158 switch (code)
4159 {
4160 case NE_EXPR: /* - [c, c] */
4161 low = high = arg1;
4162 break;
4163 case EQ_EXPR: /* + [c, c] */
4164 in_p = ! in_p, low = high = arg1;
4165 break;
4166 case GT_EXPR: /* - [-, c] */
4167 low = 0, high = arg1;
4168 break;
4169 case GE_EXPR: /* + [c, -] */
4170 in_p = ! in_p, low = arg1, high = 0;
4171 break;
4172 case LT_EXPR: /* - [c, -] */
4173 low = arg1, high = 0;
4174 break;
4175 case LE_EXPR: /* + [-, c] */
4176 in_p = ! in_p, low = 0, high = arg1;
4177 break;
4178 default:
4179 gcc_unreachable ();
4180 }
4181
4182 /* If this is an unsigned comparison, we also know that EXP is
4183 greater than or equal to zero. We base the range tests we make
4184 on that fact, so we record it here so we can parse existing
4185 range tests. We test arg0_type since often the return type
4186 of, e.g. EQ_EXPR, is boolean. */
4187 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4188 {
4189 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4190 in_p, low, high, 1,
4191 build_int_cst (arg0_type, 0),
4192 NULL_TREE))
4193 return NULL_TREE;
4194
4195 in_p = n_in_p, low = n_low, high = n_high;
4196
4197 /* If the high bound is missing, but we have a nonzero low
4198 bound, reverse the range so it goes from zero to the low bound
4199 minus 1. */
4200 if (high == 0 && low && ! integer_zerop (low))
4201 {
4202 in_p = ! in_p;
4203 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4204 build_int_cst (TREE_TYPE (low), 1), 0);
4205 low = build_int_cst (arg0_type, 0);
4206 }
4207 }
4208
4209 *p_low = low;
4210 *p_high = high;
4211 *p_in_p = in_p;
4212 return arg0;
4213
4214 case NEGATE_EXPR:
4215 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4216 low and high are non-NULL, then normalize will DTRT. */
4217 if (!TYPE_UNSIGNED (arg0_type)
4218 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4219 {
4220 if (low == NULL_TREE)
4221 low = TYPE_MIN_VALUE (arg0_type);
4222 if (high == NULL_TREE)
4223 high = TYPE_MAX_VALUE (arg0_type);
4224 }
4225
4226 /* (-x) IN [a,b] -> x in [-b, -a] */
4227 n_low = range_binop (MINUS_EXPR, exp_type,
4228 build_int_cst (exp_type, 0),
4229 0, high, 1);
4230 n_high = range_binop (MINUS_EXPR, exp_type,
4231 build_int_cst (exp_type, 0),
4232 0, low, 0);
4233 if (n_high != 0 && TREE_OVERFLOW (n_high))
4234 return NULL_TREE;
4235 goto normalize;
4236
4237 case BIT_NOT_EXPR:
4238 /* ~ X -> -X - 1 */
4239 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4240 build_int_cst (exp_type, 1));
4241
4242 case PLUS_EXPR:
4243 case MINUS_EXPR:
4244 if (TREE_CODE (arg1) != INTEGER_CST)
4245 return NULL_TREE;
4246
4247 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4248 move a constant to the other side. */
4249 if (!TYPE_UNSIGNED (arg0_type)
4250 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4251 return NULL_TREE;
4252
4253 /* If EXP is signed, any overflow in the computation is undefined,
4254 so we don't worry about it so long as our computations on
4255 the bounds don't overflow. For unsigned, overflow is defined
4256 and this is exactly the right thing. */
4257 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4258 arg0_type, low, 0, arg1, 0);
4259 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4260 arg0_type, high, 1, arg1, 0);
4261 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4262 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4263 return NULL_TREE;
4264
4265 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4266 *strict_overflow_p = true;
4267
4268 normalize:
4269 /* Check for an unsigned range which has wrapped around the maximum
4270 value thus making n_high < n_low, and normalize it. */
4271 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4272 {
4273 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4274 build_int_cst (TREE_TYPE (n_high), 1), 0);
4275 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4276 build_int_cst (TREE_TYPE (n_low), 1), 0);
4277
4278 /* If the range is of the form +/- [ x+1, x ], we won't
4279 be able to normalize it. But then, it represents the
4280 whole range or the empty set, so make it
4281 +/- [ -, - ]. */
4282 if (tree_int_cst_equal (n_low, low)
4283 && tree_int_cst_equal (n_high, high))
4284 low = high = 0;
4285 else
4286 in_p = ! in_p;
4287 }
4288 else
4289 low = n_low, high = n_high;
4290
4291 *p_low = low;
4292 *p_high = high;
4293 *p_in_p = in_p;
4294 return arg0;
4295
4296 CASE_CONVERT:
4297 case NON_LVALUE_EXPR:
4298 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4299 return NULL_TREE;
4300
4301 if (! INTEGRAL_TYPE_P (arg0_type)
4302 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4303 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4304 return NULL_TREE;
4305
4306 n_low = low, n_high = high;
4307
4308 if (n_low != 0)
4309 n_low = fold_convert_loc (loc, arg0_type, n_low);
4310
4311 if (n_high != 0)
4312 n_high = fold_convert_loc (loc, arg0_type, n_high);
4313
4314 /* If we're converting arg0 from an unsigned type, to exp,
4315 a signed type, we will be doing the comparison as unsigned.
4316 The tests above have already verified that LOW and HIGH
4317 are both positive.
4318
4319 So we have to ensure that we will handle large unsigned
4320 values the same way that the current signed bounds treat
4321 negative values. */
4322
4323 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4324 {
4325 tree high_positive;
4326 tree equiv_type;
4327 /* For fixed-point modes, we need to pass the saturating flag
4328 as the 2nd parameter. */
4329 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4330 equiv_type
4331 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4332 TYPE_SATURATING (arg0_type));
4333 else
4334 equiv_type
4335 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4336
4337 /* A range without an upper bound is, naturally, unbounded.
4338 Since convert would have cropped a very large value, use
4339 the max value for the destination type. */
4340 high_positive
4341 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4342 : TYPE_MAX_VALUE (arg0_type);
4343
4344 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4345 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4346 fold_convert_loc (loc, arg0_type,
4347 high_positive),
4348 build_int_cst (arg0_type, 1));
4349
4350 /* If the low bound is specified, "and" the range with the
4351 range for which the original unsigned value will be
4352 positive. */
4353 if (low != 0)
4354 {
4355 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4356 1, fold_convert_loc (loc, arg0_type,
4357 integer_zero_node),
4358 high_positive))
4359 return NULL_TREE;
4360
4361 in_p = (n_in_p == in_p);
4362 }
4363 else
4364 {
4365 /* Otherwise, "or" the range with the range of the input
4366 that will be interpreted as negative. */
4367 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4368 1, fold_convert_loc (loc, arg0_type,
4369 integer_zero_node),
4370 high_positive))
4371 return NULL_TREE;
4372
4373 in_p = (in_p != n_in_p);
4374 }
4375 }
4376
4377 *p_low = n_low;
4378 *p_high = n_high;
4379 *p_in_p = in_p;
4380 return arg0;
4381
4382 default:
4383 return NULL_TREE;
4384 }
4385 }
4386
4387 /* Given EXP, a logical expression, set the range it is testing into
4388 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4389 actually being tested. *PLOW and *PHIGH will be made of the same
4390 type as the returned expression. If EXP is not a comparison, we
4391 will most likely not be returning a useful value and range. Set
4392 *STRICT_OVERFLOW_P to true if the return value is only valid
4393 because signed overflow is undefined; otherwise, do not change
4394 *STRICT_OVERFLOW_P. */
4395
4396 tree
4397 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4398 bool *strict_overflow_p)
4399 {
4400 enum tree_code code;
4401 tree arg0, arg1 = NULL_TREE;
4402 tree exp_type, nexp;
4403 int in_p;
4404 tree low, high;
4405 location_t loc = EXPR_LOCATION (exp);
4406
4407 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4408 and see if we can refine the range. Some of the cases below may not
4409 happen, but it doesn't seem worth worrying about this. We "continue"
4410 the outer loop when we've changed something; otherwise we "break"
4411 the switch, which will "break" the while. */
4412
4413 in_p = 0;
4414 low = high = build_int_cst (TREE_TYPE (exp), 0);
4415
4416 while (1)
4417 {
4418 code = TREE_CODE (exp);
4419 exp_type = TREE_TYPE (exp);
4420 arg0 = NULL_TREE;
4421
4422 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4423 {
4424 if (TREE_OPERAND_LENGTH (exp) > 0)
4425 arg0 = TREE_OPERAND (exp, 0);
4426 if (TREE_CODE_CLASS (code) == tcc_binary
4427 || TREE_CODE_CLASS (code) == tcc_comparison
4428 || (TREE_CODE_CLASS (code) == tcc_expression
4429 && TREE_OPERAND_LENGTH (exp) > 1))
4430 arg1 = TREE_OPERAND (exp, 1);
4431 }
4432 if (arg0 == NULL_TREE)
4433 break;
4434
4435 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4436 &high, &in_p, strict_overflow_p);
4437 if (nexp == NULL_TREE)
4438 break;
4439 exp = nexp;
4440 }
4441
4442 /* If EXP is a constant, we can evaluate whether this is true or false. */
4443 if (TREE_CODE (exp) == INTEGER_CST)
4444 {
4445 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4446 exp, 0, low, 0))
4447 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4448 exp, 1, high, 1)));
4449 low = high = 0;
4450 exp = 0;
4451 }
4452
4453 *pin_p = in_p, *plow = low, *phigh = high;
4454 return exp;
4455 }
4456 \f
4457 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4458 type, TYPE, return an expression to test if EXP is in (or out of, depending
4459 on IN_P) the range. Return 0 if the test couldn't be created. */
4460
4461 tree
4462 build_range_check (location_t loc, tree type, tree exp, int in_p,
4463 tree low, tree high)
4464 {
4465 tree etype = TREE_TYPE (exp), value;
4466
4467 #ifdef HAVE_canonicalize_funcptr_for_compare
4468 /* Disable this optimization for function pointer expressions
4469 on targets that require function pointer canonicalization. */
4470 if (HAVE_canonicalize_funcptr_for_compare
4471 && TREE_CODE (etype) == POINTER_TYPE
4472 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4473 return NULL_TREE;
4474 #endif
4475
4476 if (! in_p)
4477 {
4478 value = build_range_check (loc, type, exp, 1, low, high);
4479 if (value != 0)
4480 return invert_truthvalue_loc (loc, value);
4481
4482 return 0;
4483 }
4484
4485 if (low == 0 && high == 0)
4486 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4487
4488 if (low == 0)
4489 return fold_build2_loc (loc, LE_EXPR, type, exp,
4490 fold_convert_loc (loc, etype, high));
4491
4492 if (high == 0)
4493 return fold_build2_loc (loc, GE_EXPR, type, exp,
4494 fold_convert_loc (loc, etype, low));
4495
4496 if (operand_equal_p (low, high, 0))
4497 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4498 fold_convert_loc (loc, etype, low));
4499
4500 if (integer_zerop (low))
4501 {
4502 if (! TYPE_UNSIGNED (etype))
4503 {
4504 etype = unsigned_type_for (etype);
4505 high = fold_convert_loc (loc, etype, high);
4506 exp = fold_convert_loc (loc, etype, exp);
4507 }
4508 return build_range_check (loc, type, exp, 1, 0, high);
4509 }
4510
4511 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4512 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4513 {
4514 int prec = TYPE_PRECISION (etype);
4515
4516 if (wi::mask (prec - 1, false, prec) == high)
4517 {
4518 if (TYPE_UNSIGNED (etype))
4519 {
4520 tree signed_etype = signed_type_for (etype);
4521 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4522 etype
4523 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4524 else
4525 etype = signed_etype;
4526 exp = fold_convert_loc (loc, etype, exp);
4527 }
4528 return fold_build2_loc (loc, GT_EXPR, type, exp,
4529 build_int_cst (etype, 0));
4530 }
4531 }
4532
4533 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4534 This requires wrap-around arithmetics for the type of the expression.
4535 First make sure that arithmetics in this type is valid, then make sure
4536 that it wraps around. */
4537 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4538 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4539 TYPE_UNSIGNED (etype));
4540
4541 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4542 {
4543 tree utype, minv, maxv;
4544
4545 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4546 for the type in question, as we rely on this here. */
4547 utype = unsigned_type_for (etype);
4548 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4549 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4550 build_int_cst (TREE_TYPE (maxv), 1), 1);
4551 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4552
4553 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4554 minv, 1, maxv, 1)))
4555 etype = utype;
4556 else
4557 return 0;
4558 }
4559
4560 high = fold_convert_loc (loc, etype, high);
4561 low = fold_convert_loc (loc, etype, low);
4562 exp = fold_convert_loc (loc, etype, exp);
4563
4564 value = const_binop (MINUS_EXPR, high, low);
4565
4566
4567 if (POINTER_TYPE_P (etype))
4568 {
4569 if (value != 0 && !TREE_OVERFLOW (value))
4570 {
4571 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4572 return build_range_check (loc, type,
4573 fold_build_pointer_plus_loc (loc, exp, low),
4574 1, build_int_cst (etype, 0), value);
4575 }
4576 return 0;
4577 }
4578
4579 if (value != 0 && !TREE_OVERFLOW (value))
4580 return build_range_check (loc, type,
4581 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4582 1, build_int_cst (etype, 0), value);
4583
4584 return 0;
4585 }
4586 \f
4587 /* Return the predecessor of VAL in its type, handling the infinite case. */
4588
4589 static tree
4590 range_predecessor (tree val)
4591 {
4592 tree type = TREE_TYPE (val);
4593
4594 if (INTEGRAL_TYPE_P (type)
4595 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4596 return 0;
4597 else
4598 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4599 build_int_cst (TREE_TYPE (val), 1), 0);
4600 }
4601
4602 /* Return the successor of VAL in its type, handling the infinite case. */
4603
4604 static tree
4605 range_successor (tree val)
4606 {
4607 tree type = TREE_TYPE (val);
4608
4609 if (INTEGRAL_TYPE_P (type)
4610 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4611 return 0;
4612 else
4613 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4614 build_int_cst (TREE_TYPE (val), 1), 0);
4615 }
4616
4617 /* Given two ranges, see if we can merge them into one. Return 1 if we
4618 can, 0 if we can't. Set the output range into the specified parameters. */
4619
4620 bool
4621 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4622 tree high0, int in1_p, tree low1, tree high1)
4623 {
4624 int no_overlap;
4625 int subset;
4626 int temp;
4627 tree tem;
4628 int in_p;
4629 tree low, high;
4630 int lowequal = ((low0 == 0 && low1 == 0)
4631 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4632 low0, 0, low1, 0)));
4633 int highequal = ((high0 == 0 && high1 == 0)
4634 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4635 high0, 1, high1, 1)));
4636
4637 /* Make range 0 be the range that starts first, or ends last if they
4638 start at the same value. Swap them if it isn't. */
4639 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4640 low0, 0, low1, 0))
4641 || (lowequal
4642 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4643 high1, 1, high0, 1))))
4644 {
4645 temp = in0_p, in0_p = in1_p, in1_p = temp;
4646 tem = low0, low0 = low1, low1 = tem;
4647 tem = high0, high0 = high1, high1 = tem;
4648 }
4649
4650 /* Now flag two cases, whether the ranges are disjoint or whether the
4651 second range is totally subsumed in the first. Note that the tests
4652 below are simplified by the ones above. */
4653 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4654 high0, 1, low1, 0));
4655 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4656 high1, 1, high0, 1));
4657
4658 /* We now have four cases, depending on whether we are including or
4659 excluding the two ranges. */
4660 if (in0_p && in1_p)
4661 {
4662 /* If they don't overlap, the result is false. If the second range
4663 is a subset it is the result. Otherwise, the range is from the start
4664 of the second to the end of the first. */
4665 if (no_overlap)
4666 in_p = 0, low = high = 0;
4667 else if (subset)
4668 in_p = 1, low = low1, high = high1;
4669 else
4670 in_p = 1, low = low1, high = high0;
4671 }
4672
4673 else if (in0_p && ! in1_p)
4674 {
4675 /* If they don't overlap, the result is the first range. If they are
4676 equal, the result is false. If the second range is a subset of the
4677 first, and the ranges begin at the same place, we go from just after
4678 the end of the second range to the end of the first. If the second
4679 range is not a subset of the first, or if it is a subset and both
4680 ranges end at the same place, the range starts at the start of the
4681 first range and ends just before the second range.
4682 Otherwise, we can't describe this as a single range. */
4683 if (no_overlap)
4684 in_p = 1, low = low0, high = high0;
4685 else if (lowequal && highequal)
4686 in_p = 0, low = high = 0;
4687 else if (subset && lowequal)
4688 {
4689 low = range_successor (high1);
4690 high = high0;
4691 in_p = 1;
4692 if (low == 0)
4693 {
4694 /* We are in the weird situation where high0 > high1 but
4695 high1 has no successor. Punt. */
4696 return 0;
4697 }
4698 }
4699 else if (! subset || highequal)
4700 {
4701 low = low0;
4702 high = range_predecessor (low1);
4703 in_p = 1;
4704 if (high == 0)
4705 {
4706 /* low0 < low1 but low1 has no predecessor. Punt. */
4707 return 0;
4708 }
4709 }
4710 else
4711 return 0;
4712 }
4713
4714 else if (! in0_p && in1_p)
4715 {
4716 /* If they don't overlap, the result is the second range. If the second
4717 is a subset of the first, the result is false. Otherwise,
4718 the range starts just after the first range and ends at the
4719 end of the second. */
4720 if (no_overlap)
4721 in_p = 1, low = low1, high = high1;
4722 else if (subset || highequal)
4723 in_p = 0, low = high = 0;
4724 else
4725 {
4726 low = range_successor (high0);
4727 high = high1;
4728 in_p = 1;
4729 if (low == 0)
4730 {
4731 /* high1 > high0 but high0 has no successor. Punt. */
4732 return 0;
4733 }
4734 }
4735 }
4736
4737 else
4738 {
4739 /* The case where we are excluding both ranges. Here the complex case
4740 is if they don't overlap. In that case, the only time we have a
4741 range is if they are adjacent. If the second is a subset of the
4742 first, the result is the first. Otherwise, the range to exclude
4743 starts at the beginning of the first range and ends at the end of the
4744 second. */
4745 if (no_overlap)
4746 {
4747 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4748 range_successor (high0),
4749 1, low1, 0)))
4750 in_p = 0, low = low0, high = high1;
4751 else
4752 {
4753 /* Canonicalize - [min, x] into - [-, x]. */
4754 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4755 switch (TREE_CODE (TREE_TYPE (low0)))
4756 {
4757 case ENUMERAL_TYPE:
4758 if (TYPE_PRECISION (TREE_TYPE (low0))
4759 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4760 break;
4761 /* FALLTHROUGH */
4762 case INTEGER_TYPE:
4763 if (tree_int_cst_equal (low0,
4764 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4765 low0 = 0;
4766 break;
4767 case POINTER_TYPE:
4768 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4769 && integer_zerop (low0))
4770 low0 = 0;
4771 break;
4772 default:
4773 break;
4774 }
4775
4776 /* Canonicalize - [x, max] into - [x, -]. */
4777 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4778 switch (TREE_CODE (TREE_TYPE (high1)))
4779 {
4780 case ENUMERAL_TYPE:
4781 if (TYPE_PRECISION (TREE_TYPE (high1))
4782 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4783 break;
4784 /* FALLTHROUGH */
4785 case INTEGER_TYPE:
4786 if (tree_int_cst_equal (high1,
4787 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4788 high1 = 0;
4789 break;
4790 case POINTER_TYPE:
4791 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4792 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4793 high1, 1,
4794 build_int_cst (TREE_TYPE (high1), 1),
4795 1)))
4796 high1 = 0;
4797 break;
4798 default:
4799 break;
4800 }
4801
4802 /* The ranges might be also adjacent between the maximum and
4803 minimum values of the given type. For
4804 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4805 return + [x + 1, y - 1]. */
4806 if (low0 == 0 && high1 == 0)
4807 {
4808 low = range_successor (high0);
4809 high = range_predecessor (low1);
4810 if (low == 0 || high == 0)
4811 return 0;
4812
4813 in_p = 1;
4814 }
4815 else
4816 return 0;
4817 }
4818 }
4819 else if (subset)
4820 in_p = 0, low = low0, high = high0;
4821 else
4822 in_p = 0, low = low0, high = high1;
4823 }
4824
4825 *pin_p = in_p, *plow = low, *phigh = high;
4826 return 1;
4827 }
4828 \f
4829
4830 /* Subroutine of fold, looking inside expressions of the form
4831 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4832 of the COND_EXPR. This function is being used also to optimize
4833 A op B ? C : A, by reversing the comparison first.
4834
4835 Return a folded expression whose code is not a COND_EXPR
4836 anymore, or NULL_TREE if no folding opportunity is found. */
4837
4838 static tree
4839 fold_cond_expr_with_comparison (location_t loc, tree type,
4840 tree arg0, tree arg1, tree arg2)
4841 {
4842 enum tree_code comp_code = TREE_CODE (arg0);
4843 tree arg00 = TREE_OPERAND (arg0, 0);
4844 tree arg01 = TREE_OPERAND (arg0, 1);
4845 tree arg1_type = TREE_TYPE (arg1);
4846 tree tem;
4847
4848 STRIP_NOPS (arg1);
4849 STRIP_NOPS (arg2);
4850
4851 /* If we have A op 0 ? A : -A, consider applying the following
4852 transformations:
4853
4854 A == 0? A : -A same as -A
4855 A != 0? A : -A same as A
4856 A >= 0? A : -A same as abs (A)
4857 A > 0? A : -A same as abs (A)
4858 A <= 0? A : -A same as -abs (A)
4859 A < 0? A : -A same as -abs (A)
4860
4861 None of these transformations work for modes with signed
4862 zeros. If A is +/-0, the first two transformations will
4863 change the sign of the result (from +0 to -0, or vice
4864 versa). The last four will fix the sign of the result,
4865 even though the original expressions could be positive or
4866 negative, depending on the sign of A.
4867
4868 Note that all these transformations are correct if A is
4869 NaN, since the two alternatives (A and -A) are also NaNs. */
4870 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4871 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4872 ? real_zerop (arg01)
4873 : integer_zerop (arg01))
4874 && ((TREE_CODE (arg2) == NEGATE_EXPR
4875 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4876 /* In the case that A is of the form X-Y, '-A' (arg2) may
4877 have already been folded to Y-X, check for that. */
4878 || (TREE_CODE (arg1) == MINUS_EXPR
4879 && TREE_CODE (arg2) == MINUS_EXPR
4880 && operand_equal_p (TREE_OPERAND (arg1, 0),
4881 TREE_OPERAND (arg2, 1), 0)
4882 && operand_equal_p (TREE_OPERAND (arg1, 1),
4883 TREE_OPERAND (arg2, 0), 0))))
4884 switch (comp_code)
4885 {
4886 case EQ_EXPR:
4887 case UNEQ_EXPR:
4888 tem = fold_convert_loc (loc, arg1_type, arg1);
4889 return pedantic_non_lvalue_loc (loc,
4890 fold_convert_loc (loc, type,
4891 negate_expr (tem)));
4892 case NE_EXPR:
4893 case LTGT_EXPR:
4894 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4895 case UNGE_EXPR:
4896 case UNGT_EXPR:
4897 if (flag_trapping_math)
4898 break;
4899 /* Fall through. */
4900 case GE_EXPR:
4901 case GT_EXPR:
4902 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4903 arg1 = fold_convert_loc (loc, signed_type_for
4904 (TREE_TYPE (arg1)), arg1);
4905 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4906 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4907 case UNLE_EXPR:
4908 case UNLT_EXPR:
4909 if (flag_trapping_math)
4910 break;
4911 case LE_EXPR:
4912 case LT_EXPR:
4913 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4914 arg1 = fold_convert_loc (loc, signed_type_for
4915 (TREE_TYPE (arg1)), arg1);
4916 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4917 return negate_expr (fold_convert_loc (loc, type, tem));
4918 default:
4919 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4920 break;
4921 }
4922
4923 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4924 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4925 both transformations are correct when A is NaN: A != 0
4926 is then true, and A == 0 is false. */
4927
4928 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4929 && integer_zerop (arg01) && integer_zerop (arg2))
4930 {
4931 if (comp_code == NE_EXPR)
4932 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4933 else if (comp_code == EQ_EXPR)
4934 return build_zero_cst (type);
4935 }
4936
4937 /* Try some transformations of A op B ? A : B.
4938
4939 A == B? A : B same as B
4940 A != B? A : B same as A
4941 A >= B? A : B same as max (A, B)
4942 A > B? A : B same as max (B, A)
4943 A <= B? A : B same as min (A, B)
4944 A < B? A : B same as min (B, A)
4945
4946 As above, these transformations don't work in the presence
4947 of signed zeros. For example, if A and B are zeros of
4948 opposite sign, the first two transformations will change
4949 the sign of the result. In the last four, the original
4950 expressions give different results for (A=+0, B=-0) and
4951 (A=-0, B=+0), but the transformed expressions do not.
4952
4953 The first two transformations are correct if either A or B
4954 is a NaN. In the first transformation, the condition will
4955 be false, and B will indeed be chosen. In the case of the
4956 second transformation, the condition A != B will be true,
4957 and A will be chosen.
4958
4959 The conversions to max() and min() are not correct if B is
4960 a number and A is not. The conditions in the original
4961 expressions will be false, so all four give B. The min()
4962 and max() versions would give a NaN instead. */
4963 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4964 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4965 /* Avoid these transformations if the COND_EXPR may be used
4966 as an lvalue in the C++ front-end. PR c++/19199. */
4967 && (in_gimple_form
4968 || VECTOR_TYPE_P (type)
4969 || (! lang_GNU_CXX ()
4970 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4971 || ! maybe_lvalue_p (arg1)
4972 || ! maybe_lvalue_p (arg2)))
4973 {
4974 tree comp_op0 = arg00;
4975 tree comp_op1 = arg01;
4976 tree comp_type = TREE_TYPE (comp_op0);
4977
4978 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4979 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4980 {
4981 comp_type = type;
4982 comp_op0 = arg1;
4983 comp_op1 = arg2;
4984 }
4985
4986 switch (comp_code)
4987 {
4988 case EQ_EXPR:
4989 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4990 case NE_EXPR:
4991 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4992 case LE_EXPR:
4993 case LT_EXPR:
4994 case UNLE_EXPR:
4995 case UNLT_EXPR:
4996 /* In C++ a ?: expression can be an lvalue, so put the
4997 operand which will be used if they are equal first
4998 so that we can convert this back to the
4999 corresponding COND_EXPR. */
5000 if (!HONOR_NANS (arg1))
5001 {
5002 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5003 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5004 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5005 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5006 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5007 comp_op1, comp_op0);
5008 return pedantic_non_lvalue_loc (loc,
5009 fold_convert_loc (loc, type, tem));
5010 }
5011 break;
5012 case GE_EXPR:
5013 case GT_EXPR:
5014 case UNGE_EXPR:
5015 case UNGT_EXPR:
5016 if (!HONOR_NANS (arg1))
5017 {
5018 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5019 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5020 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5021 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5022 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5023 comp_op1, comp_op0);
5024 return pedantic_non_lvalue_loc (loc,
5025 fold_convert_loc (loc, type, tem));
5026 }
5027 break;
5028 case UNEQ_EXPR:
5029 if (!HONOR_NANS (arg1))
5030 return pedantic_non_lvalue_loc (loc,
5031 fold_convert_loc (loc, type, arg2));
5032 break;
5033 case LTGT_EXPR:
5034 if (!HONOR_NANS (arg1))
5035 return pedantic_non_lvalue_loc (loc,
5036 fold_convert_loc (loc, type, arg1));
5037 break;
5038 default:
5039 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5040 break;
5041 }
5042 }
5043
5044 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5045 we might still be able to simplify this. For example,
5046 if C1 is one less or one more than C2, this might have started
5047 out as a MIN or MAX and been transformed by this function.
5048 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5049
5050 if (INTEGRAL_TYPE_P (type)
5051 && TREE_CODE (arg01) == INTEGER_CST
5052 && TREE_CODE (arg2) == INTEGER_CST)
5053 switch (comp_code)
5054 {
5055 case EQ_EXPR:
5056 if (TREE_CODE (arg1) == INTEGER_CST)
5057 break;
5058 /* We can replace A with C1 in this case. */
5059 arg1 = fold_convert_loc (loc, type, arg01);
5060 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5061
5062 case LT_EXPR:
5063 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5064 MIN_EXPR, to preserve the signedness of the comparison. */
5065 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5066 OEP_ONLY_CONST)
5067 && operand_equal_p (arg01,
5068 const_binop (PLUS_EXPR, arg2,
5069 build_int_cst (type, 1)),
5070 OEP_ONLY_CONST))
5071 {
5072 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5073 fold_convert_loc (loc, TREE_TYPE (arg00),
5074 arg2));
5075 return pedantic_non_lvalue_loc (loc,
5076 fold_convert_loc (loc, type, tem));
5077 }
5078 break;
5079
5080 case LE_EXPR:
5081 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5082 as above. */
5083 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5084 OEP_ONLY_CONST)
5085 && operand_equal_p (arg01,
5086 const_binop (MINUS_EXPR, arg2,
5087 build_int_cst (type, 1)),
5088 OEP_ONLY_CONST))
5089 {
5090 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5091 fold_convert_loc (loc, TREE_TYPE (arg00),
5092 arg2));
5093 return pedantic_non_lvalue_loc (loc,
5094 fold_convert_loc (loc, type, tem));
5095 }
5096 break;
5097
5098 case GT_EXPR:
5099 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5100 MAX_EXPR, to preserve the signedness of the comparison. */
5101 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5102 OEP_ONLY_CONST)
5103 && operand_equal_p (arg01,
5104 const_binop (MINUS_EXPR, arg2,
5105 build_int_cst (type, 1)),
5106 OEP_ONLY_CONST))
5107 {
5108 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5109 fold_convert_loc (loc, TREE_TYPE (arg00),
5110 arg2));
5111 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5112 }
5113 break;
5114
5115 case GE_EXPR:
5116 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5117 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5118 OEP_ONLY_CONST)
5119 && operand_equal_p (arg01,
5120 const_binop (PLUS_EXPR, arg2,
5121 build_int_cst (type, 1)),
5122 OEP_ONLY_CONST))
5123 {
5124 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5125 fold_convert_loc (loc, TREE_TYPE (arg00),
5126 arg2));
5127 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5128 }
5129 break;
5130 case NE_EXPR:
5131 break;
5132 default:
5133 gcc_unreachable ();
5134 }
5135
5136 return NULL_TREE;
5137 }
5138
5139
5140 \f
5141 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5142 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5143 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5144 false) >= 2)
5145 #endif
5146
5147 /* EXP is some logical combination of boolean tests. See if we can
5148 merge it into some range test. Return the new tree if so. */
5149
5150 static tree
5151 fold_range_test (location_t loc, enum tree_code code, tree type,
5152 tree op0, tree op1)
5153 {
5154 int or_op = (code == TRUTH_ORIF_EXPR
5155 || code == TRUTH_OR_EXPR);
5156 int in0_p, in1_p, in_p;
5157 tree low0, low1, low, high0, high1, high;
5158 bool strict_overflow_p = false;
5159 tree tem, lhs, rhs;
5160 const char * const warnmsg = G_("assuming signed overflow does not occur "
5161 "when simplifying range test");
5162
5163 if (!INTEGRAL_TYPE_P (type))
5164 return 0;
5165
5166 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5167 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5168
5169 /* If this is an OR operation, invert both sides; we will invert
5170 again at the end. */
5171 if (or_op)
5172 in0_p = ! in0_p, in1_p = ! in1_p;
5173
5174 /* If both expressions are the same, if we can merge the ranges, and we
5175 can build the range test, return it or it inverted. If one of the
5176 ranges is always true or always false, consider it to be the same
5177 expression as the other. */
5178 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5179 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5180 in1_p, low1, high1)
5181 && 0 != (tem = (build_range_check (loc, type,
5182 lhs != 0 ? lhs
5183 : rhs != 0 ? rhs : integer_zero_node,
5184 in_p, low, high))))
5185 {
5186 if (strict_overflow_p)
5187 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5188 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5189 }
5190
5191 /* On machines where the branch cost is expensive, if this is a
5192 short-circuited branch and the underlying object on both sides
5193 is the same, make a non-short-circuit operation. */
5194 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5195 && lhs != 0 && rhs != 0
5196 && (code == TRUTH_ANDIF_EXPR
5197 || code == TRUTH_ORIF_EXPR)
5198 && operand_equal_p (lhs, rhs, 0))
5199 {
5200 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5201 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5202 which cases we can't do this. */
5203 if (simple_operand_p (lhs))
5204 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5205 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5206 type, op0, op1);
5207
5208 else if (!lang_hooks.decls.global_bindings_p ()
5209 && !CONTAINS_PLACEHOLDER_P (lhs))
5210 {
5211 tree common = save_expr (lhs);
5212
5213 if (0 != (lhs = build_range_check (loc, type, common,
5214 or_op ? ! in0_p : in0_p,
5215 low0, high0))
5216 && (0 != (rhs = build_range_check (loc, type, common,
5217 or_op ? ! in1_p : in1_p,
5218 low1, high1))))
5219 {
5220 if (strict_overflow_p)
5221 fold_overflow_warning (warnmsg,
5222 WARN_STRICT_OVERFLOW_COMPARISON);
5223 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5224 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5225 type, lhs, rhs);
5226 }
5227 }
5228 }
5229
5230 return 0;
5231 }
5232 \f
5233 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5234 bit value. Arrange things so the extra bits will be set to zero if and
5235 only if C is signed-extended to its full width. If MASK is nonzero,
5236 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5237
5238 static tree
5239 unextend (tree c, int p, int unsignedp, tree mask)
5240 {
5241 tree type = TREE_TYPE (c);
5242 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5243 tree temp;
5244
5245 if (p == modesize || unsignedp)
5246 return c;
5247
5248 /* We work by getting just the sign bit into the low-order bit, then
5249 into the high-order bit, then sign-extend. We then XOR that value
5250 with C. */
5251 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5252
5253 /* We must use a signed type in order to get an arithmetic right shift.
5254 However, we must also avoid introducing accidental overflows, so that
5255 a subsequent call to integer_zerop will work. Hence we must
5256 do the type conversion here. At this point, the constant is either
5257 zero or one, and the conversion to a signed type can never overflow.
5258 We could get an overflow if this conversion is done anywhere else. */
5259 if (TYPE_UNSIGNED (type))
5260 temp = fold_convert (signed_type_for (type), temp);
5261
5262 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5263 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5264 if (mask != 0)
5265 temp = const_binop (BIT_AND_EXPR, temp,
5266 fold_convert (TREE_TYPE (c), mask));
5267 /* If necessary, convert the type back to match the type of C. */
5268 if (TYPE_UNSIGNED (type))
5269 temp = fold_convert (type, temp);
5270
5271 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5272 }
5273 \f
5274 /* For an expression that has the form
5275 (A && B) || ~B
5276 or
5277 (A || B) && ~B,
5278 we can drop one of the inner expressions and simplify to
5279 A || ~B
5280 or
5281 A && ~B
5282 LOC is the location of the resulting expression. OP is the inner
5283 logical operation; the left-hand side in the examples above, while CMPOP
5284 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5285 removing a condition that guards another, as in
5286 (A != NULL && A->...) || A == NULL
5287 which we must not transform. If RHS_ONLY is true, only eliminate the
5288 right-most operand of the inner logical operation. */
5289
5290 static tree
5291 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5292 bool rhs_only)
5293 {
5294 tree type = TREE_TYPE (cmpop);
5295 enum tree_code code = TREE_CODE (cmpop);
5296 enum tree_code truthop_code = TREE_CODE (op);
5297 tree lhs = TREE_OPERAND (op, 0);
5298 tree rhs = TREE_OPERAND (op, 1);
5299 tree orig_lhs = lhs, orig_rhs = rhs;
5300 enum tree_code rhs_code = TREE_CODE (rhs);
5301 enum tree_code lhs_code = TREE_CODE (lhs);
5302 enum tree_code inv_code;
5303
5304 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5305 return NULL_TREE;
5306
5307 if (TREE_CODE_CLASS (code) != tcc_comparison)
5308 return NULL_TREE;
5309
5310 if (rhs_code == truthop_code)
5311 {
5312 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5313 if (newrhs != NULL_TREE)
5314 {
5315 rhs = newrhs;
5316 rhs_code = TREE_CODE (rhs);
5317 }
5318 }
5319 if (lhs_code == truthop_code && !rhs_only)
5320 {
5321 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5322 if (newlhs != NULL_TREE)
5323 {
5324 lhs = newlhs;
5325 lhs_code = TREE_CODE (lhs);
5326 }
5327 }
5328
5329 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5330 if (inv_code == rhs_code
5331 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5332 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5333 return lhs;
5334 if (!rhs_only && inv_code == lhs_code
5335 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5336 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5337 return rhs;
5338 if (rhs != orig_rhs || lhs != orig_lhs)
5339 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5340 lhs, rhs);
5341 return NULL_TREE;
5342 }
5343
5344 /* Find ways of folding logical expressions of LHS and RHS:
5345 Try to merge two comparisons to the same innermost item.
5346 Look for range tests like "ch >= '0' && ch <= '9'".
5347 Look for combinations of simple terms on machines with expensive branches
5348 and evaluate the RHS unconditionally.
5349
5350 For example, if we have p->a == 2 && p->b == 4 and we can make an
5351 object large enough to span both A and B, we can do this with a comparison
5352 against the object ANDed with the a mask.
5353
5354 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5355 operations to do this with one comparison.
5356
5357 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5358 function and the one above.
5359
5360 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5361 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5362
5363 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5364 two operands.
5365
5366 We return the simplified tree or 0 if no optimization is possible. */
5367
5368 static tree
5369 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5370 tree lhs, tree rhs)
5371 {
5372 /* If this is the "or" of two comparisons, we can do something if
5373 the comparisons are NE_EXPR. If this is the "and", we can do something
5374 if the comparisons are EQ_EXPR. I.e.,
5375 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5376
5377 WANTED_CODE is this operation code. For single bit fields, we can
5378 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5379 comparison for one-bit fields. */
5380
5381 enum tree_code wanted_code;
5382 enum tree_code lcode, rcode;
5383 tree ll_arg, lr_arg, rl_arg, rr_arg;
5384 tree ll_inner, lr_inner, rl_inner, rr_inner;
5385 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5386 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5387 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5388 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5389 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5390 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5391 machine_mode lnmode, rnmode;
5392 tree ll_mask, lr_mask, rl_mask, rr_mask;
5393 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5394 tree l_const, r_const;
5395 tree lntype, rntype, result;
5396 HOST_WIDE_INT first_bit, end_bit;
5397 int volatilep;
5398
5399 /* Start by getting the comparison codes. Fail if anything is volatile.
5400 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5401 it were surrounded with a NE_EXPR. */
5402
5403 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5404 return 0;
5405
5406 lcode = TREE_CODE (lhs);
5407 rcode = TREE_CODE (rhs);
5408
5409 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5410 {
5411 lhs = build2 (NE_EXPR, truth_type, lhs,
5412 build_int_cst (TREE_TYPE (lhs), 0));
5413 lcode = NE_EXPR;
5414 }
5415
5416 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5417 {
5418 rhs = build2 (NE_EXPR, truth_type, rhs,
5419 build_int_cst (TREE_TYPE (rhs), 0));
5420 rcode = NE_EXPR;
5421 }
5422
5423 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5424 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5425 return 0;
5426
5427 ll_arg = TREE_OPERAND (lhs, 0);
5428 lr_arg = TREE_OPERAND (lhs, 1);
5429 rl_arg = TREE_OPERAND (rhs, 0);
5430 rr_arg = TREE_OPERAND (rhs, 1);
5431
5432 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5433 if (simple_operand_p (ll_arg)
5434 && simple_operand_p (lr_arg))
5435 {
5436 if (operand_equal_p (ll_arg, rl_arg, 0)
5437 && operand_equal_p (lr_arg, rr_arg, 0))
5438 {
5439 result = combine_comparisons (loc, code, lcode, rcode,
5440 truth_type, ll_arg, lr_arg);
5441 if (result)
5442 return result;
5443 }
5444 else if (operand_equal_p (ll_arg, rr_arg, 0)
5445 && operand_equal_p (lr_arg, rl_arg, 0))
5446 {
5447 result = combine_comparisons (loc, code, lcode,
5448 swap_tree_comparison (rcode),
5449 truth_type, ll_arg, lr_arg);
5450 if (result)
5451 return result;
5452 }
5453 }
5454
5455 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5456 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5457
5458 /* If the RHS can be evaluated unconditionally and its operands are
5459 simple, it wins to evaluate the RHS unconditionally on machines
5460 with expensive branches. In this case, this isn't a comparison
5461 that can be merged. */
5462
5463 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5464 false) >= 2
5465 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5466 && simple_operand_p (rl_arg)
5467 && simple_operand_p (rr_arg))
5468 {
5469 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5470 if (code == TRUTH_OR_EXPR
5471 && lcode == NE_EXPR && integer_zerop (lr_arg)
5472 && rcode == NE_EXPR && integer_zerop (rr_arg)
5473 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5474 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5475 return build2_loc (loc, NE_EXPR, truth_type,
5476 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5477 ll_arg, rl_arg),
5478 build_int_cst (TREE_TYPE (ll_arg), 0));
5479
5480 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5481 if (code == TRUTH_AND_EXPR
5482 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5483 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5484 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5485 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5486 return build2_loc (loc, EQ_EXPR, truth_type,
5487 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5488 ll_arg, rl_arg),
5489 build_int_cst (TREE_TYPE (ll_arg), 0));
5490 }
5491
5492 /* See if the comparisons can be merged. Then get all the parameters for
5493 each side. */
5494
5495 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5496 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5497 return 0;
5498
5499 volatilep = 0;
5500 ll_inner = decode_field_reference (loc, ll_arg,
5501 &ll_bitsize, &ll_bitpos, &ll_mode,
5502 &ll_unsignedp, &volatilep, &ll_mask,
5503 &ll_and_mask);
5504 lr_inner = decode_field_reference (loc, lr_arg,
5505 &lr_bitsize, &lr_bitpos, &lr_mode,
5506 &lr_unsignedp, &volatilep, &lr_mask,
5507 &lr_and_mask);
5508 rl_inner = decode_field_reference (loc, rl_arg,
5509 &rl_bitsize, &rl_bitpos, &rl_mode,
5510 &rl_unsignedp, &volatilep, &rl_mask,
5511 &rl_and_mask);
5512 rr_inner = decode_field_reference (loc, rr_arg,
5513 &rr_bitsize, &rr_bitpos, &rr_mode,
5514 &rr_unsignedp, &volatilep, &rr_mask,
5515 &rr_and_mask);
5516
5517 /* It must be true that the inner operation on the lhs of each
5518 comparison must be the same if we are to be able to do anything.
5519 Then see if we have constants. If not, the same must be true for
5520 the rhs's. */
5521 if (volatilep || ll_inner == 0 || rl_inner == 0
5522 || ! operand_equal_p (ll_inner, rl_inner, 0))
5523 return 0;
5524
5525 if (TREE_CODE (lr_arg) == INTEGER_CST
5526 && TREE_CODE (rr_arg) == INTEGER_CST)
5527 l_const = lr_arg, r_const = rr_arg;
5528 else if (lr_inner == 0 || rr_inner == 0
5529 || ! operand_equal_p (lr_inner, rr_inner, 0))
5530 return 0;
5531 else
5532 l_const = r_const = 0;
5533
5534 /* If either comparison code is not correct for our logical operation,
5535 fail. However, we can convert a one-bit comparison against zero into
5536 the opposite comparison against that bit being set in the field. */
5537
5538 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5539 if (lcode != wanted_code)
5540 {
5541 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5542 {
5543 /* Make the left operand unsigned, since we are only interested
5544 in the value of one bit. Otherwise we are doing the wrong
5545 thing below. */
5546 ll_unsignedp = 1;
5547 l_const = ll_mask;
5548 }
5549 else
5550 return 0;
5551 }
5552
5553 /* This is analogous to the code for l_const above. */
5554 if (rcode != wanted_code)
5555 {
5556 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5557 {
5558 rl_unsignedp = 1;
5559 r_const = rl_mask;
5560 }
5561 else
5562 return 0;
5563 }
5564
5565 /* See if we can find a mode that contains both fields being compared on
5566 the left. If we can't, fail. Otherwise, update all constants and masks
5567 to be relative to a field of that size. */
5568 first_bit = MIN (ll_bitpos, rl_bitpos);
5569 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5570 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5571 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5572 volatilep);
5573 if (lnmode == VOIDmode)
5574 return 0;
5575
5576 lnbitsize = GET_MODE_BITSIZE (lnmode);
5577 lnbitpos = first_bit & ~ (lnbitsize - 1);
5578 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5579 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5580
5581 if (BYTES_BIG_ENDIAN)
5582 {
5583 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5584 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5585 }
5586
5587 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5588 size_int (xll_bitpos));
5589 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5590 size_int (xrl_bitpos));
5591
5592 if (l_const)
5593 {
5594 l_const = fold_convert_loc (loc, lntype, l_const);
5595 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5596 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5597 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5598 fold_build1_loc (loc, BIT_NOT_EXPR,
5599 lntype, ll_mask))))
5600 {
5601 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5602
5603 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5604 }
5605 }
5606 if (r_const)
5607 {
5608 r_const = fold_convert_loc (loc, lntype, r_const);
5609 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5610 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5611 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5612 fold_build1_loc (loc, BIT_NOT_EXPR,
5613 lntype, rl_mask))))
5614 {
5615 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5616
5617 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5618 }
5619 }
5620
5621 /* If the right sides are not constant, do the same for it. Also,
5622 disallow this optimization if a size or signedness mismatch occurs
5623 between the left and right sides. */
5624 if (l_const == 0)
5625 {
5626 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5627 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5628 /* Make sure the two fields on the right
5629 correspond to the left without being swapped. */
5630 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5631 return 0;
5632
5633 first_bit = MIN (lr_bitpos, rr_bitpos);
5634 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5635 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5636 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5637 volatilep);
5638 if (rnmode == VOIDmode)
5639 return 0;
5640
5641 rnbitsize = GET_MODE_BITSIZE (rnmode);
5642 rnbitpos = first_bit & ~ (rnbitsize - 1);
5643 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5644 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5645
5646 if (BYTES_BIG_ENDIAN)
5647 {
5648 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5649 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5650 }
5651
5652 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5653 rntype, lr_mask),
5654 size_int (xlr_bitpos));
5655 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5656 rntype, rr_mask),
5657 size_int (xrr_bitpos));
5658
5659 /* Make a mask that corresponds to both fields being compared.
5660 Do this for both items being compared. If the operands are the
5661 same size and the bits being compared are in the same position
5662 then we can do this by masking both and comparing the masked
5663 results. */
5664 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5665 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5666 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5667 {
5668 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5669 ll_unsignedp || rl_unsignedp);
5670 if (! all_ones_mask_p (ll_mask, lnbitsize))
5671 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5672
5673 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5674 lr_unsignedp || rr_unsignedp);
5675 if (! all_ones_mask_p (lr_mask, rnbitsize))
5676 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5677
5678 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5679 }
5680
5681 /* There is still another way we can do something: If both pairs of
5682 fields being compared are adjacent, we may be able to make a wider
5683 field containing them both.
5684
5685 Note that we still must mask the lhs/rhs expressions. Furthermore,
5686 the mask must be shifted to account for the shift done by
5687 make_bit_field_ref. */
5688 if ((ll_bitsize + ll_bitpos == rl_bitpos
5689 && lr_bitsize + lr_bitpos == rr_bitpos)
5690 || (ll_bitpos == rl_bitpos + rl_bitsize
5691 && lr_bitpos == rr_bitpos + rr_bitsize))
5692 {
5693 tree type;
5694
5695 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5696 ll_bitsize + rl_bitsize,
5697 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5698 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5699 lr_bitsize + rr_bitsize,
5700 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5701
5702 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5703 size_int (MIN (xll_bitpos, xrl_bitpos)));
5704 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5705 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5706
5707 /* Convert to the smaller type before masking out unwanted bits. */
5708 type = lntype;
5709 if (lntype != rntype)
5710 {
5711 if (lnbitsize > rnbitsize)
5712 {
5713 lhs = fold_convert_loc (loc, rntype, lhs);
5714 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5715 type = rntype;
5716 }
5717 else if (lnbitsize < rnbitsize)
5718 {
5719 rhs = fold_convert_loc (loc, lntype, rhs);
5720 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5721 type = lntype;
5722 }
5723 }
5724
5725 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5726 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5727
5728 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5729 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5730
5731 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5732 }
5733
5734 return 0;
5735 }
5736
5737 /* Handle the case of comparisons with constants. If there is something in
5738 common between the masks, those bits of the constants must be the same.
5739 If not, the condition is always false. Test for this to avoid generating
5740 incorrect code below. */
5741 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5742 if (! integer_zerop (result)
5743 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5744 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5745 {
5746 if (wanted_code == NE_EXPR)
5747 {
5748 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5749 return constant_boolean_node (true, truth_type);
5750 }
5751 else
5752 {
5753 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5754 return constant_boolean_node (false, truth_type);
5755 }
5756 }
5757
5758 /* Construct the expression we will return. First get the component
5759 reference we will make. Unless the mask is all ones the width of
5760 that field, perform the mask operation. Then compare with the
5761 merged constant. */
5762 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5763 ll_unsignedp || rl_unsignedp);
5764
5765 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5766 if (! all_ones_mask_p (ll_mask, lnbitsize))
5767 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5768
5769 return build2_loc (loc, wanted_code, truth_type, result,
5770 const_binop (BIT_IOR_EXPR, l_const, r_const));
5771 }
5772 \f
5773 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5774 constant. */
5775
5776 static tree
5777 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5778 tree op0, tree op1)
5779 {
5780 tree arg0 = op0;
5781 enum tree_code op_code;
5782 tree comp_const;
5783 tree minmax_const;
5784 int consts_equal, consts_lt;
5785 tree inner;
5786
5787 STRIP_SIGN_NOPS (arg0);
5788
5789 op_code = TREE_CODE (arg0);
5790 minmax_const = TREE_OPERAND (arg0, 1);
5791 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5792 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5793 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5794 inner = TREE_OPERAND (arg0, 0);
5795
5796 /* If something does not permit us to optimize, return the original tree. */
5797 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5798 || TREE_CODE (comp_const) != INTEGER_CST
5799 || TREE_OVERFLOW (comp_const)
5800 || TREE_CODE (minmax_const) != INTEGER_CST
5801 || TREE_OVERFLOW (minmax_const))
5802 return NULL_TREE;
5803
5804 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5805 and GT_EXPR, doing the rest with recursive calls using logical
5806 simplifications. */
5807 switch (code)
5808 {
5809 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5810 {
5811 tree tem
5812 = optimize_minmax_comparison (loc,
5813 invert_tree_comparison (code, false),
5814 type, op0, op1);
5815 if (tem)
5816 return invert_truthvalue_loc (loc, tem);
5817 return NULL_TREE;
5818 }
5819
5820 case GE_EXPR:
5821 return
5822 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5823 optimize_minmax_comparison
5824 (loc, EQ_EXPR, type, arg0, comp_const),
5825 optimize_minmax_comparison
5826 (loc, GT_EXPR, type, arg0, comp_const));
5827
5828 case EQ_EXPR:
5829 if (op_code == MAX_EXPR && consts_equal)
5830 /* MAX (X, 0) == 0 -> X <= 0 */
5831 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5832
5833 else if (op_code == MAX_EXPR && consts_lt)
5834 /* MAX (X, 0) == 5 -> X == 5 */
5835 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5836
5837 else if (op_code == MAX_EXPR)
5838 /* MAX (X, 0) == -1 -> false */
5839 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5840
5841 else if (consts_equal)
5842 /* MIN (X, 0) == 0 -> X >= 0 */
5843 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5844
5845 else if (consts_lt)
5846 /* MIN (X, 0) == 5 -> false */
5847 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5848
5849 else
5850 /* MIN (X, 0) == -1 -> X == -1 */
5851 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5852
5853 case GT_EXPR:
5854 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5855 /* MAX (X, 0) > 0 -> X > 0
5856 MAX (X, 0) > 5 -> X > 5 */
5857 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5858
5859 else if (op_code == MAX_EXPR)
5860 /* MAX (X, 0) > -1 -> true */
5861 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5862
5863 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5864 /* MIN (X, 0) > 0 -> false
5865 MIN (X, 0) > 5 -> false */
5866 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5867
5868 else
5869 /* MIN (X, 0) > -1 -> X > -1 */
5870 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5871
5872 default:
5873 return NULL_TREE;
5874 }
5875 }
5876 \f
5877 /* T is an integer expression that is being multiplied, divided, or taken a
5878 modulus (CODE says which and what kind of divide or modulus) by a
5879 constant C. See if we can eliminate that operation by folding it with
5880 other operations already in T. WIDE_TYPE, if non-null, is a type that
5881 should be used for the computation if wider than our type.
5882
5883 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5884 (X * 2) + (Y * 4). We must, however, be assured that either the original
5885 expression would not overflow or that overflow is undefined for the type
5886 in the language in question.
5887
5888 If we return a non-null expression, it is an equivalent form of the
5889 original computation, but need not be in the original type.
5890
5891 We set *STRICT_OVERFLOW_P to true if the return values depends on
5892 signed overflow being undefined. Otherwise we do not change
5893 *STRICT_OVERFLOW_P. */
5894
5895 static tree
5896 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5897 bool *strict_overflow_p)
5898 {
5899 /* To avoid exponential search depth, refuse to allow recursion past
5900 three levels. Beyond that (1) it's highly unlikely that we'll find
5901 something interesting and (2) we've probably processed it before
5902 when we built the inner expression. */
5903
5904 static int depth;
5905 tree ret;
5906
5907 if (depth > 3)
5908 return NULL;
5909
5910 depth++;
5911 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5912 depth--;
5913
5914 return ret;
5915 }
5916
5917 static tree
5918 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5919 bool *strict_overflow_p)
5920 {
5921 tree type = TREE_TYPE (t);
5922 enum tree_code tcode = TREE_CODE (t);
5923 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5924 > GET_MODE_SIZE (TYPE_MODE (type)))
5925 ? wide_type : type);
5926 tree t1, t2;
5927 int same_p = tcode == code;
5928 tree op0 = NULL_TREE, op1 = NULL_TREE;
5929 bool sub_strict_overflow_p;
5930
5931 /* Don't deal with constants of zero here; they confuse the code below. */
5932 if (integer_zerop (c))
5933 return NULL_TREE;
5934
5935 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5936 op0 = TREE_OPERAND (t, 0);
5937
5938 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5939 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5940
5941 /* Note that we need not handle conditional operations here since fold
5942 already handles those cases. So just do arithmetic here. */
5943 switch (tcode)
5944 {
5945 case INTEGER_CST:
5946 /* For a constant, we can always simplify if we are a multiply
5947 or (for divide and modulus) if it is a multiple of our constant. */
5948 if (code == MULT_EXPR
5949 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5950 return const_binop (code, fold_convert (ctype, t),
5951 fold_convert (ctype, c));
5952 break;
5953
5954 CASE_CONVERT: case NON_LVALUE_EXPR:
5955 /* If op0 is an expression ... */
5956 if ((COMPARISON_CLASS_P (op0)
5957 || UNARY_CLASS_P (op0)
5958 || BINARY_CLASS_P (op0)
5959 || VL_EXP_CLASS_P (op0)
5960 || EXPRESSION_CLASS_P (op0))
5961 /* ... and has wrapping overflow, and its type is smaller
5962 than ctype, then we cannot pass through as widening. */
5963 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5964 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5965 && (TYPE_PRECISION (ctype)
5966 > TYPE_PRECISION (TREE_TYPE (op0))))
5967 /* ... or this is a truncation (t is narrower than op0),
5968 then we cannot pass through this narrowing. */
5969 || (TYPE_PRECISION (type)
5970 < TYPE_PRECISION (TREE_TYPE (op0)))
5971 /* ... or signedness changes for division or modulus,
5972 then we cannot pass through this conversion. */
5973 || (code != MULT_EXPR
5974 && (TYPE_UNSIGNED (ctype)
5975 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5976 /* ... or has undefined overflow while the converted to
5977 type has not, we cannot do the operation in the inner type
5978 as that would introduce undefined overflow. */
5979 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5980 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5981 && !TYPE_OVERFLOW_UNDEFINED (type))))
5982 break;
5983
5984 /* Pass the constant down and see if we can make a simplification. If
5985 we can, replace this expression with the inner simplification for
5986 possible later conversion to our or some other type. */
5987 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5988 && TREE_CODE (t2) == INTEGER_CST
5989 && !TREE_OVERFLOW (t2)
5990 && (0 != (t1 = extract_muldiv (op0, t2, code,
5991 code == MULT_EXPR
5992 ? ctype : NULL_TREE,
5993 strict_overflow_p))))
5994 return t1;
5995 break;
5996
5997 case ABS_EXPR:
5998 /* If widening the type changes it from signed to unsigned, then we
5999 must avoid building ABS_EXPR itself as unsigned. */
6000 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6001 {
6002 tree cstype = (*signed_type_for) (ctype);
6003 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6004 != 0)
6005 {
6006 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6007 return fold_convert (ctype, t1);
6008 }
6009 break;
6010 }
6011 /* If the constant is negative, we cannot simplify this. */
6012 if (tree_int_cst_sgn (c) == -1)
6013 break;
6014 /* FALLTHROUGH */
6015 case NEGATE_EXPR:
6016 /* For division and modulus, type can't be unsigned, as e.g.
6017 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6018 For signed types, even with wrapping overflow, this is fine. */
6019 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6020 break;
6021 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6022 != 0)
6023 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6024 break;
6025
6026 case MIN_EXPR: case MAX_EXPR:
6027 /* If widening the type changes the signedness, then we can't perform
6028 this optimization as that changes the result. */
6029 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6030 break;
6031
6032 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6033 sub_strict_overflow_p = false;
6034 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6035 &sub_strict_overflow_p)) != 0
6036 && (t2 = extract_muldiv (op1, c, code, wide_type,
6037 &sub_strict_overflow_p)) != 0)
6038 {
6039 if (tree_int_cst_sgn (c) < 0)
6040 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6041 if (sub_strict_overflow_p)
6042 *strict_overflow_p = true;
6043 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6044 fold_convert (ctype, t2));
6045 }
6046 break;
6047
6048 case LSHIFT_EXPR: case RSHIFT_EXPR:
6049 /* If the second operand is constant, this is a multiplication
6050 or floor division, by a power of two, so we can treat it that
6051 way unless the multiplier or divisor overflows. Signed
6052 left-shift overflow is implementation-defined rather than
6053 undefined in C90, so do not convert signed left shift into
6054 multiplication. */
6055 if (TREE_CODE (op1) == INTEGER_CST
6056 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6057 /* const_binop may not detect overflow correctly,
6058 so check for it explicitly here. */
6059 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6060 && 0 != (t1 = fold_convert (ctype,
6061 const_binop (LSHIFT_EXPR,
6062 size_one_node,
6063 op1)))
6064 && !TREE_OVERFLOW (t1))
6065 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6066 ? MULT_EXPR : FLOOR_DIV_EXPR,
6067 ctype,
6068 fold_convert (ctype, op0),
6069 t1),
6070 c, code, wide_type, strict_overflow_p);
6071 break;
6072
6073 case PLUS_EXPR: case MINUS_EXPR:
6074 /* See if we can eliminate the operation on both sides. If we can, we
6075 can return a new PLUS or MINUS. If we can't, the only remaining
6076 cases where we can do anything are if the second operand is a
6077 constant. */
6078 sub_strict_overflow_p = false;
6079 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6080 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6081 if (t1 != 0 && t2 != 0
6082 && (code == MULT_EXPR
6083 /* If not multiplication, we can only do this if both operands
6084 are divisible by c. */
6085 || (multiple_of_p (ctype, op0, c)
6086 && multiple_of_p (ctype, op1, c))))
6087 {
6088 if (sub_strict_overflow_p)
6089 *strict_overflow_p = true;
6090 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6091 fold_convert (ctype, t2));
6092 }
6093
6094 /* If this was a subtraction, negate OP1 and set it to be an addition.
6095 This simplifies the logic below. */
6096 if (tcode == MINUS_EXPR)
6097 {
6098 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6099 /* If OP1 was not easily negatable, the constant may be OP0. */
6100 if (TREE_CODE (op0) == INTEGER_CST)
6101 {
6102 tree tem = op0;
6103 op0 = op1;
6104 op1 = tem;
6105 tem = t1;
6106 t1 = t2;
6107 t2 = tem;
6108 }
6109 }
6110
6111 if (TREE_CODE (op1) != INTEGER_CST)
6112 break;
6113
6114 /* If either OP1 or C are negative, this optimization is not safe for
6115 some of the division and remainder types while for others we need
6116 to change the code. */
6117 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6118 {
6119 if (code == CEIL_DIV_EXPR)
6120 code = FLOOR_DIV_EXPR;
6121 else if (code == FLOOR_DIV_EXPR)
6122 code = CEIL_DIV_EXPR;
6123 else if (code != MULT_EXPR
6124 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6125 break;
6126 }
6127
6128 /* If it's a multiply or a division/modulus operation of a multiple
6129 of our constant, do the operation and verify it doesn't overflow. */
6130 if (code == MULT_EXPR
6131 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6132 {
6133 op1 = const_binop (code, fold_convert (ctype, op1),
6134 fold_convert (ctype, c));
6135 /* We allow the constant to overflow with wrapping semantics. */
6136 if (op1 == 0
6137 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6138 break;
6139 }
6140 else
6141 break;
6142
6143 /* If we have an unsigned type, we cannot widen the operation since it
6144 will change the result if the original computation overflowed. */
6145 if (TYPE_UNSIGNED (ctype) && ctype != type)
6146 break;
6147
6148 /* If we were able to eliminate our operation from the first side,
6149 apply our operation to the second side and reform the PLUS. */
6150 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6151 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6152
6153 /* The last case is if we are a multiply. In that case, we can
6154 apply the distributive law to commute the multiply and addition
6155 if the multiplication of the constants doesn't overflow
6156 and overflow is defined. With undefined overflow
6157 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6158 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6159 return fold_build2 (tcode, ctype,
6160 fold_build2 (code, ctype,
6161 fold_convert (ctype, op0),
6162 fold_convert (ctype, c)),
6163 op1);
6164
6165 break;
6166
6167 case MULT_EXPR:
6168 /* We have a special case here if we are doing something like
6169 (C * 8) % 4 since we know that's zero. */
6170 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6171 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6172 /* If the multiplication can overflow we cannot optimize this. */
6173 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6174 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6175 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6176 {
6177 *strict_overflow_p = true;
6178 return omit_one_operand (type, integer_zero_node, op0);
6179 }
6180
6181 /* ... fall through ... */
6182
6183 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6184 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6185 /* If we can extract our operation from the LHS, do so and return a
6186 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6187 do something only if the second operand is a constant. */
6188 if (same_p
6189 && (t1 = extract_muldiv (op0, c, code, wide_type,
6190 strict_overflow_p)) != 0)
6191 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6192 fold_convert (ctype, op1));
6193 else if (tcode == MULT_EXPR && code == MULT_EXPR
6194 && (t1 = extract_muldiv (op1, c, code, wide_type,
6195 strict_overflow_p)) != 0)
6196 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6197 fold_convert (ctype, t1));
6198 else if (TREE_CODE (op1) != INTEGER_CST)
6199 return 0;
6200
6201 /* If these are the same operation types, we can associate them
6202 assuming no overflow. */
6203 if (tcode == code)
6204 {
6205 bool overflow_p = false;
6206 bool overflow_mul_p;
6207 signop sign = TYPE_SIGN (ctype);
6208 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6209 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6210 if (overflow_mul_p
6211 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6212 overflow_p = true;
6213 if (!overflow_p)
6214 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6215 wide_int_to_tree (ctype, mul));
6216 }
6217
6218 /* If these operations "cancel" each other, we have the main
6219 optimizations of this pass, which occur when either constant is a
6220 multiple of the other, in which case we replace this with either an
6221 operation or CODE or TCODE.
6222
6223 If we have an unsigned type, we cannot do this since it will change
6224 the result if the original computation overflowed. */
6225 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6226 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6227 || (tcode == MULT_EXPR
6228 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6229 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6230 && code != MULT_EXPR)))
6231 {
6232 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6233 {
6234 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6235 *strict_overflow_p = true;
6236 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6237 fold_convert (ctype,
6238 const_binop (TRUNC_DIV_EXPR,
6239 op1, c)));
6240 }
6241 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6242 {
6243 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6244 *strict_overflow_p = true;
6245 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6246 fold_convert (ctype,
6247 const_binop (TRUNC_DIV_EXPR,
6248 c, op1)));
6249 }
6250 }
6251 break;
6252
6253 default:
6254 break;
6255 }
6256
6257 return 0;
6258 }
6259 \f
6260 /* Return a node which has the indicated constant VALUE (either 0 or
6261 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6262 and is of the indicated TYPE. */
6263
6264 tree
6265 constant_boolean_node (bool value, tree type)
6266 {
6267 if (type == integer_type_node)
6268 return value ? integer_one_node : integer_zero_node;
6269 else if (type == boolean_type_node)
6270 return value ? boolean_true_node : boolean_false_node;
6271 else if (TREE_CODE (type) == VECTOR_TYPE)
6272 return build_vector_from_val (type,
6273 build_int_cst (TREE_TYPE (type),
6274 value ? -1 : 0));
6275 else
6276 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6277 }
6278
6279
6280 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6281 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6282 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6283 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6284 COND is the first argument to CODE; otherwise (as in the example
6285 given here), it is the second argument. TYPE is the type of the
6286 original expression. Return NULL_TREE if no simplification is
6287 possible. */
6288
6289 static tree
6290 fold_binary_op_with_conditional_arg (location_t loc,
6291 enum tree_code code,
6292 tree type, tree op0, tree op1,
6293 tree cond, tree arg, int cond_first_p)
6294 {
6295 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6296 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6297 tree test, true_value, false_value;
6298 tree lhs = NULL_TREE;
6299 tree rhs = NULL_TREE;
6300 enum tree_code cond_code = COND_EXPR;
6301
6302 if (TREE_CODE (cond) == COND_EXPR
6303 || TREE_CODE (cond) == VEC_COND_EXPR)
6304 {
6305 test = TREE_OPERAND (cond, 0);
6306 true_value = TREE_OPERAND (cond, 1);
6307 false_value = TREE_OPERAND (cond, 2);
6308 /* If this operand throws an expression, then it does not make
6309 sense to try to perform a logical or arithmetic operation
6310 involving it. */
6311 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6312 lhs = true_value;
6313 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6314 rhs = false_value;
6315 }
6316 else
6317 {
6318 tree testtype = TREE_TYPE (cond);
6319 test = cond;
6320 true_value = constant_boolean_node (true, testtype);
6321 false_value = constant_boolean_node (false, testtype);
6322 }
6323
6324 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6325 cond_code = VEC_COND_EXPR;
6326
6327 /* This transformation is only worthwhile if we don't have to wrap ARG
6328 in a SAVE_EXPR and the operation can be simplified without recursing
6329 on at least one of the branches once its pushed inside the COND_EXPR. */
6330 if (!TREE_CONSTANT (arg)
6331 && (TREE_SIDE_EFFECTS (arg)
6332 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6333 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6334 return NULL_TREE;
6335
6336 arg = fold_convert_loc (loc, arg_type, arg);
6337 if (lhs == 0)
6338 {
6339 true_value = fold_convert_loc (loc, cond_type, true_value);
6340 if (cond_first_p)
6341 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6342 else
6343 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6344 }
6345 if (rhs == 0)
6346 {
6347 false_value = fold_convert_loc (loc, cond_type, false_value);
6348 if (cond_first_p)
6349 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6350 else
6351 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6352 }
6353
6354 /* Check that we have simplified at least one of the branches. */
6355 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6356 return NULL_TREE;
6357
6358 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6359 }
6360
6361 \f
6362 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6363
6364 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6365 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6366 ADDEND is the same as X.
6367
6368 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6369 and finite. The problematic cases are when X is zero, and its mode
6370 has signed zeros. In the case of rounding towards -infinity,
6371 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6372 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6373
6374 bool
6375 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6376 {
6377 if (!real_zerop (addend))
6378 return false;
6379
6380 /* Don't allow the fold with -fsignaling-nans. */
6381 if (HONOR_SNANS (element_mode (type)))
6382 return false;
6383
6384 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6385 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6386 return true;
6387
6388 /* In a vector or complex, we would need to check the sign of all zeros. */
6389 if (TREE_CODE (addend) != REAL_CST)
6390 return false;
6391
6392 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6393 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6394 negate = !negate;
6395
6396 /* The mode has signed zeros, and we have to honor their sign.
6397 In this situation, there is only one case we can return true for.
6398 X - 0 is the same as X unless rounding towards -infinity is
6399 supported. */
6400 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6401 }
6402
6403 /* Subroutine of fold() that checks comparisons of built-in math
6404 functions against real constants.
6405
6406 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6407 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6408 is the type of the result and ARG0 and ARG1 are the operands of the
6409 comparison. ARG1 must be a TREE_REAL_CST.
6410
6411 The function returns the constant folded tree if a simplification
6412 can be made, and NULL_TREE otherwise. */
6413
6414 static tree
6415 fold_mathfn_compare (location_t loc,
6416 enum built_in_function fcode, enum tree_code code,
6417 tree type, tree arg0, tree arg1)
6418 {
6419 REAL_VALUE_TYPE c;
6420
6421 if (BUILTIN_SQRT_P (fcode))
6422 {
6423 tree arg = CALL_EXPR_ARG (arg0, 0);
6424 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6425
6426 c = TREE_REAL_CST (arg1);
6427 if (REAL_VALUE_NEGATIVE (c))
6428 {
6429 /* sqrt(x) < y is always false, if y is negative. */
6430 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6431 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6432
6433 /* sqrt(x) > y is always true, if y is negative and we
6434 don't care about NaNs, i.e. negative values of x. */
6435 if (code == NE_EXPR || !HONOR_NANS (mode))
6436 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6437
6438 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6439 return fold_build2_loc (loc, GE_EXPR, type, arg,
6440 build_real (TREE_TYPE (arg), dconst0));
6441 }
6442 else if (code == GT_EXPR || code == GE_EXPR)
6443 {
6444 REAL_VALUE_TYPE c2;
6445
6446 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6447 real_convert (&c2, mode, &c2);
6448
6449 if (REAL_VALUE_ISINF (c2))
6450 {
6451 /* sqrt(x) > y is x == +Inf, when y is very large. */
6452 if (HONOR_INFINITIES (mode))
6453 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6454 build_real (TREE_TYPE (arg), c2));
6455
6456 /* sqrt(x) > y is always false, when y is very large
6457 and we don't care about infinities. */
6458 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6459 }
6460
6461 /* sqrt(x) > c is the same as x > c*c. */
6462 return fold_build2_loc (loc, code, type, arg,
6463 build_real (TREE_TYPE (arg), c2));
6464 }
6465 else if (code == LT_EXPR || code == LE_EXPR)
6466 {
6467 REAL_VALUE_TYPE c2;
6468
6469 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6470 real_convert (&c2, mode, &c2);
6471
6472 if (REAL_VALUE_ISINF (c2))
6473 {
6474 /* sqrt(x) < y is always true, when y is a very large
6475 value and we don't care about NaNs or Infinities. */
6476 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6477 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6478
6479 /* sqrt(x) < y is x != +Inf when y is very large and we
6480 don't care about NaNs. */
6481 if (! HONOR_NANS (mode))
6482 return fold_build2_loc (loc, NE_EXPR, type, arg,
6483 build_real (TREE_TYPE (arg), c2));
6484
6485 /* sqrt(x) < y is x >= 0 when y is very large and we
6486 don't care about Infinities. */
6487 if (! HONOR_INFINITIES (mode))
6488 return fold_build2_loc (loc, GE_EXPR, type, arg,
6489 build_real (TREE_TYPE (arg), dconst0));
6490
6491 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6492 arg = save_expr (arg);
6493 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6494 fold_build2_loc (loc, GE_EXPR, type, arg,
6495 build_real (TREE_TYPE (arg),
6496 dconst0)),
6497 fold_build2_loc (loc, NE_EXPR, type, arg,
6498 build_real (TREE_TYPE (arg),
6499 c2)));
6500 }
6501
6502 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6503 if (! HONOR_NANS (mode))
6504 return fold_build2_loc (loc, code, type, arg,
6505 build_real (TREE_TYPE (arg), c2));
6506
6507 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6508 arg = save_expr (arg);
6509 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6510 fold_build2_loc (loc, GE_EXPR, type, arg,
6511 build_real (TREE_TYPE (arg),
6512 dconst0)),
6513 fold_build2_loc (loc, code, type, arg,
6514 build_real (TREE_TYPE (arg),
6515 c2)));
6516 }
6517 }
6518
6519 return NULL_TREE;
6520 }
6521
6522 /* Subroutine of fold() that optimizes comparisons against Infinities,
6523 either +Inf or -Inf.
6524
6525 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6526 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6527 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6528
6529 The function returns the constant folded tree if a simplification
6530 can be made, and NULL_TREE otherwise. */
6531
6532 static tree
6533 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6534 tree arg0, tree arg1)
6535 {
6536 machine_mode mode;
6537 REAL_VALUE_TYPE max;
6538 tree temp;
6539 bool neg;
6540
6541 mode = TYPE_MODE (TREE_TYPE (arg0));
6542
6543 /* For negative infinity swap the sense of the comparison. */
6544 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6545 if (neg)
6546 code = swap_tree_comparison (code);
6547
6548 switch (code)
6549 {
6550 case GT_EXPR:
6551 /* x > +Inf is always false, if with ignore sNANs. */
6552 if (HONOR_SNANS (mode))
6553 return NULL_TREE;
6554 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6555
6556 case LE_EXPR:
6557 /* x <= +Inf is always true, if we don't case about NaNs. */
6558 if (! HONOR_NANS (mode))
6559 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6560
6561 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6562 arg0 = save_expr (arg0);
6563 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6564
6565 case EQ_EXPR:
6566 case GE_EXPR:
6567 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6568 real_maxval (&max, neg, mode);
6569 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6570 arg0, build_real (TREE_TYPE (arg0), max));
6571
6572 case LT_EXPR:
6573 /* x < +Inf is always equal to x <= DBL_MAX. */
6574 real_maxval (&max, neg, mode);
6575 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6576 arg0, build_real (TREE_TYPE (arg0), max));
6577
6578 case NE_EXPR:
6579 /* x != +Inf is always equal to !(x > DBL_MAX). */
6580 real_maxval (&max, neg, mode);
6581 if (! HONOR_NANS (mode))
6582 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6583 arg0, build_real (TREE_TYPE (arg0), max));
6584
6585 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6586 arg0, build_real (TREE_TYPE (arg0), max));
6587 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6588
6589 default:
6590 break;
6591 }
6592
6593 return NULL_TREE;
6594 }
6595
6596 /* Subroutine of fold() that optimizes comparisons of a division by
6597 a nonzero integer constant against an integer constant, i.e.
6598 X/C1 op C2.
6599
6600 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6601 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6602 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6603
6604 The function returns the constant folded tree if a simplification
6605 can be made, and NULL_TREE otherwise. */
6606
6607 static tree
6608 fold_div_compare (location_t loc,
6609 enum tree_code code, tree type, tree arg0, tree arg1)
6610 {
6611 tree prod, tmp, hi, lo;
6612 tree arg00 = TREE_OPERAND (arg0, 0);
6613 tree arg01 = TREE_OPERAND (arg0, 1);
6614 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6615 bool neg_overflow = false;
6616 bool overflow;
6617
6618 /* We have to do this the hard way to detect unsigned overflow.
6619 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6620 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6621 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6622 neg_overflow = false;
6623
6624 if (sign == UNSIGNED)
6625 {
6626 tmp = int_const_binop (MINUS_EXPR, arg01,
6627 build_int_cst (TREE_TYPE (arg01), 1));
6628 lo = prod;
6629
6630 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6631 val = wi::add (prod, tmp, sign, &overflow);
6632 hi = force_fit_type (TREE_TYPE (arg00), val,
6633 -1, overflow | TREE_OVERFLOW (prod));
6634 }
6635 else if (tree_int_cst_sgn (arg01) >= 0)
6636 {
6637 tmp = int_const_binop (MINUS_EXPR, arg01,
6638 build_int_cst (TREE_TYPE (arg01), 1));
6639 switch (tree_int_cst_sgn (arg1))
6640 {
6641 case -1:
6642 neg_overflow = true;
6643 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6644 hi = prod;
6645 break;
6646
6647 case 0:
6648 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6649 hi = tmp;
6650 break;
6651
6652 case 1:
6653 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6654 lo = prod;
6655 break;
6656
6657 default:
6658 gcc_unreachable ();
6659 }
6660 }
6661 else
6662 {
6663 /* A negative divisor reverses the relational operators. */
6664 code = swap_tree_comparison (code);
6665
6666 tmp = int_const_binop (PLUS_EXPR, arg01,
6667 build_int_cst (TREE_TYPE (arg01), 1));
6668 switch (tree_int_cst_sgn (arg1))
6669 {
6670 case -1:
6671 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6672 lo = prod;
6673 break;
6674
6675 case 0:
6676 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6677 lo = tmp;
6678 break;
6679
6680 case 1:
6681 neg_overflow = true;
6682 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6683 hi = prod;
6684 break;
6685
6686 default:
6687 gcc_unreachable ();
6688 }
6689 }
6690
6691 switch (code)
6692 {
6693 case EQ_EXPR:
6694 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6695 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6696 if (TREE_OVERFLOW (hi))
6697 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6698 if (TREE_OVERFLOW (lo))
6699 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6700 return build_range_check (loc, type, arg00, 1, lo, hi);
6701
6702 case NE_EXPR:
6703 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6704 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6705 if (TREE_OVERFLOW (hi))
6706 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6707 if (TREE_OVERFLOW (lo))
6708 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6709 return build_range_check (loc, type, arg00, 0, lo, hi);
6710
6711 case LT_EXPR:
6712 if (TREE_OVERFLOW (lo))
6713 {
6714 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6715 return omit_one_operand_loc (loc, type, tmp, arg00);
6716 }
6717 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6718
6719 case LE_EXPR:
6720 if (TREE_OVERFLOW (hi))
6721 {
6722 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6723 return omit_one_operand_loc (loc, type, tmp, arg00);
6724 }
6725 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6726
6727 case GT_EXPR:
6728 if (TREE_OVERFLOW (hi))
6729 {
6730 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6731 return omit_one_operand_loc (loc, type, tmp, arg00);
6732 }
6733 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6734
6735 case GE_EXPR:
6736 if (TREE_OVERFLOW (lo))
6737 {
6738 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6739 return omit_one_operand_loc (loc, type, tmp, arg00);
6740 }
6741 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6742
6743 default:
6744 break;
6745 }
6746
6747 return NULL_TREE;
6748 }
6749
6750
6751 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6752 equality/inequality test, then return a simplified form of the test
6753 using a sign testing. Otherwise return NULL. TYPE is the desired
6754 result type. */
6755
6756 static tree
6757 fold_single_bit_test_into_sign_test (location_t loc,
6758 enum tree_code code, tree arg0, tree arg1,
6759 tree result_type)
6760 {
6761 /* If this is testing a single bit, we can optimize the test. */
6762 if ((code == NE_EXPR || code == EQ_EXPR)
6763 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6764 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6765 {
6766 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6767 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6768 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6769
6770 if (arg00 != NULL_TREE
6771 /* This is only a win if casting to a signed type is cheap,
6772 i.e. when arg00's type is not a partial mode. */
6773 && TYPE_PRECISION (TREE_TYPE (arg00))
6774 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6775 {
6776 tree stype = signed_type_for (TREE_TYPE (arg00));
6777 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6778 result_type,
6779 fold_convert_loc (loc, stype, arg00),
6780 build_int_cst (stype, 0));
6781 }
6782 }
6783
6784 return NULL_TREE;
6785 }
6786
6787 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6788 equality/inequality test, then return a simplified form of
6789 the test using shifts and logical operations. Otherwise return
6790 NULL. TYPE is the desired result type. */
6791
6792 tree
6793 fold_single_bit_test (location_t loc, enum tree_code code,
6794 tree arg0, tree arg1, tree result_type)
6795 {
6796 /* If this is testing a single bit, we can optimize the test. */
6797 if ((code == NE_EXPR || code == EQ_EXPR)
6798 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6799 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6800 {
6801 tree inner = TREE_OPERAND (arg0, 0);
6802 tree type = TREE_TYPE (arg0);
6803 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6804 machine_mode operand_mode = TYPE_MODE (type);
6805 int ops_unsigned;
6806 tree signed_type, unsigned_type, intermediate_type;
6807 tree tem, one;
6808
6809 /* First, see if we can fold the single bit test into a sign-bit
6810 test. */
6811 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6812 result_type);
6813 if (tem)
6814 return tem;
6815
6816 /* Otherwise we have (A & C) != 0 where C is a single bit,
6817 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6818 Similarly for (A & C) == 0. */
6819
6820 /* If INNER is a right shift of a constant and it plus BITNUM does
6821 not overflow, adjust BITNUM and INNER. */
6822 if (TREE_CODE (inner) == RSHIFT_EXPR
6823 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6824 && bitnum < TYPE_PRECISION (type)
6825 && wi::ltu_p (TREE_OPERAND (inner, 1),
6826 TYPE_PRECISION (type) - bitnum))
6827 {
6828 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6829 inner = TREE_OPERAND (inner, 0);
6830 }
6831
6832 /* If we are going to be able to omit the AND below, we must do our
6833 operations as unsigned. If we must use the AND, we have a choice.
6834 Normally unsigned is faster, but for some machines signed is. */
6835 #ifdef LOAD_EXTEND_OP
6836 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6837 && !flag_syntax_only) ? 0 : 1;
6838 #else
6839 ops_unsigned = 1;
6840 #endif
6841
6842 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6843 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6844 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6845 inner = fold_convert_loc (loc, intermediate_type, inner);
6846
6847 if (bitnum != 0)
6848 inner = build2 (RSHIFT_EXPR, intermediate_type,
6849 inner, size_int (bitnum));
6850
6851 one = build_int_cst (intermediate_type, 1);
6852
6853 if (code == EQ_EXPR)
6854 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6855
6856 /* Put the AND last so it can combine with more things. */
6857 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6858
6859 /* Make sure to return the proper type. */
6860 inner = fold_convert_loc (loc, result_type, inner);
6861
6862 return inner;
6863 }
6864 return NULL_TREE;
6865 }
6866
6867 /* Check whether we are allowed to reorder operands arg0 and arg1,
6868 such that the evaluation of arg1 occurs before arg0. */
6869
6870 static bool
6871 reorder_operands_p (const_tree arg0, const_tree arg1)
6872 {
6873 if (! flag_evaluation_order)
6874 return true;
6875 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6876 return true;
6877 return ! TREE_SIDE_EFFECTS (arg0)
6878 && ! TREE_SIDE_EFFECTS (arg1);
6879 }
6880
6881 /* Test whether it is preferable two swap two operands, ARG0 and
6882 ARG1, for example because ARG0 is an integer constant and ARG1
6883 isn't. If REORDER is true, only recommend swapping if we can
6884 evaluate the operands in reverse order. */
6885
6886 bool
6887 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6888 {
6889 if (CONSTANT_CLASS_P (arg1))
6890 return 0;
6891 if (CONSTANT_CLASS_P (arg0))
6892 return 1;
6893
6894 STRIP_NOPS (arg0);
6895 STRIP_NOPS (arg1);
6896
6897 if (TREE_CONSTANT (arg1))
6898 return 0;
6899 if (TREE_CONSTANT (arg0))
6900 return 1;
6901
6902 if (reorder && flag_evaluation_order
6903 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6904 return 0;
6905
6906 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6907 for commutative and comparison operators. Ensuring a canonical
6908 form allows the optimizers to find additional redundancies without
6909 having to explicitly check for both orderings. */
6910 if (TREE_CODE (arg0) == SSA_NAME
6911 && TREE_CODE (arg1) == SSA_NAME
6912 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6913 return 1;
6914
6915 /* Put SSA_NAMEs last. */
6916 if (TREE_CODE (arg1) == SSA_NAME)
6917 return 0;
6918 if (TREE_CODE (arg0) == SSA_NAME)
6919 return 1;
6920
6921 /* Put variables last. */
6922 if (DECL_P (arg1))
6923 return 0;
6924 if (DECL_P (arg0))
6925 return 1;
6926
6927 return 0;
6928 }
6929
6930 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6931 ARG0 is extended to a wider type. */
6932
6933 static tree
6934 fold_widened_comparison (location_t loc, enum tree_code code,
6935 tree type, tree arg0, tree arg1)
6936 {
6937 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6938 tree arg1_unw;
6939 tree shorter_type, outer_type;
6940 tree min, max;
6941 bool above, below;
6942
6943 if (arg0_unw == arg0)
6944 return NULL_TREE;
6945 shorter_type = TREE_TYPE (arg0_unw);
6946
6947 #ifdef HAVE_canonicalize_funcptr_for_compare
6948 /* Disable this optimization if we're casting a function pointer
6949 type on targets that require function pointer canonicalization. */
6950 if (HAVE_canonicalize_funcptr_for_compare
6951 && TREE_CODE (shorter_type) == POINTER_TYPE
6952 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6953 return NULL_TREE;
6954 #endif
6955
6956 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6957 return NULL_TREE;
6958
6959 arg1_unw = get_unwidened (arg1, NULL_TREE);
6960
6961 /* If possible, express the comparison in the shorter mode. */
6962 if ((code == EQ_EXPR || code == NE_EXPR
6963 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6964 && (TREE_TYPE (arg1_unw) == shorter_type
6965 || ((TYPE_PRECISION (shorter_type)
6966 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6967 && (TYPE_UNSIGNED (shorter_type)
6968 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6969 || (TREE_CODE (arg1_unw) == INTEGER_CST
6970 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6971 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6972 && int_fits_type_p (arg1_unw, shorter_type))))
6973 return fold_build2_loc (loc, code, type, arg0_unw,
6974 fold_convert_loc (loc, shorter_type, arg1_unw));
6975
6976 if (TREE_CODE (arg1_unw) != INTEGER_CST
6977 || TREE_CODE (shorter_type) != INTEGER_TYPE
6978 || !int_fits_type_p (arg1_unw, shorter_type))
6979 return NULL_TREE;
6980
6981 /* If we are comparing with the integer that does not fit into the range
6982 of the shorter type, the result is known. */
6983 outer_type = TREE_TYPE (arg1_unw);
6984 min = lower_bound_in_type (outer_type, shorter_type);
6985 max = upper_bound_in_type (outer_type, shorter_type);
6986
6987 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6988 max, arg1_unw));
6989 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6990 arg1_unw, min));
6991
6992 switch (code)
6993 {
6994 case EQ_EXPR:
6995 if (above || below)
6996 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6997 break;
6998
6999 case NE_EXPR:
7000 if (above || below)
7001 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7002 break;
7003
7004 case LT_EXPR:
7005 case LE_EXPR:
7006 if (above)
7007 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7008 else if (below)
7009 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7010
7011 case GT_EXPR:
7012 case GE_EXPR:
7013 if (above)
7014 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7015 else if (below)
7016 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7017
7018 default:
7019 break;
7020 }
7021
7022 return NULL_TREE;
7023 }
7024
7025 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7026 ARG0 just the signedness is changed. */
7027
7028 static tree
7029 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7030 tree arg0, tree arg1)
7031 {
7032 tree arg0_inner;
7033 tree inner_type, outer_type;
7034
7035 if (!CONVERT_EXPR_P (arg0))
7036 return NULL_TREE;
7037
7038 outer_type = TREE_TYPE (arg0);
7039 arg0_inner = TREE_OPERAND (arg0, 0);
7040 inner_type = TREE_TYPE (arg0_inner);
7041
7042 #ifdef HAVE_canonicalize_funcptr_for_compare
7043 /* Disable this optimization if we're casting a function pointer
7044 type on targets that require function pointer canonicalization. */
7045 if (HAVE_canonicalize_funcptr_for_compare
7046 && TREE_CODE (inner_type) == POINTER_TYPE
7047 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7048 return NULL_TREE;
7049 #endif
7050
7051 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7052 return NULL_TREE;
7053
7054 if (TREE_CODE (arg1) != INTEGER_CST
7055 && !(CONVERT_EXPR_P (arg1)
7056 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7057 return NULL_TREE;
7058
7059 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7060 && code != NE_EXPR
7061 && code != EQ_EXPR)
7062 return NULL_TREE;
7063
7064 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7065 return NULL_TREE;
7066
7067 if (TREE_CODE (arg1) == INTEGER_CST)
7068 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7069 TREE_OVERFLOW (arg1));
7070 else
7071 arg1 = fold_convert_loc (loc, inner_type, arg1);
7072
7073 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7074 }
7075
7076
7077 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7078 means A >= Y && A != MAX, but in this case we know that
7079 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7080
7081 static tree
7082 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7083 {
7084 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7085
7086 if (TREE_CODE (bound) == LT_EXPR)
7087 a = TREE_OPERAND (bound, 0);
7088 else if (TREE_CODE (bound) == GT_EXPR)
7089 a = TREE_OPERAND (bound, 1);
7090 else
7091 return NULL_TREE;
7092
7093 typea = TREE_TYPE (a);
7094 if (!INTEGRAL_TYPE_P (typea)
7095 && !POINTER_TYPE_P (typea))
7096 return NULL_TREE;
7097
7098 if (TREE_CODE (ineq) == LT_EXPR)
7099 {
7100 a1 = TREE_OPERAND (ineq, 1);
7101 y = TREE_OPERAND (ineq, 0);
7102 }
7103 else if (TREE_CODE (ineq) == GT_EXPR)
7104 {
7105 a1 = TREE_OPERAND (ineq, 0);
7106 y = TREE_OPERAND (ineq, 1);
7107 }
7108 else
7109 return NULL_TREE;
7110
7111 if (TREE_TYPE (a1) != typea)
7112 return NULL_TREE;
7113
7114 if (POINTER_TYPE_P (typea))
7115 {
7116 /* Convert the pointer types into integer before taking the difference. */
7117 tree ta = fold_convert_loc (loc, ssizetype, a);
7118 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7119 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7120 }
7121 else
7122 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7123
7124 if (!diff || !integer_onep (diff))
7125 return NULL_TREE;
7126
7127 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7128 }
7129
7130 /* Fold a sum or difference of at least one multiplication.
7131 Returns the folded tree or NULL if no simplification could be made. */
7132
7133 static tree
7134 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7135 tree arg0, tree arg1)
7136 {
7137 tree arg00, arg01, arg10, arg11;
7138 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7139
7140 /* (A * C) +- (B * C) -> (A+-B) * C.
7141 (A * C) +- A -> A * (C+-1).
7142 We are most concerned about the case where C is a constant,
7143 but other combinations show up during loop reduction. Since
7144 it is not difficult, try all four possibilities. */
7145
7146 if (TREE_CODE (arg0) == MULT_EXPR)
7147 {
7148 arg00 = TREE_OPERAND (arg0, 0);
7149 arg01 = TREE_OPERAND (arg0, 1);
7150 }
7151 else if (TREE_CODE (arg0) == INTEGER_CST)
7152 {
7153 arg00 = build_one_cst (type);
7154 arg01 = arg0;
7155 }
7156 else
7157 {
7158 /* We cannot generate constant 1 for fract. */
7159 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7160 return NULL_TREE;
7161 arg00 = arg0;
7162 arg01 = build_one_cst (type);
7163 }
7164 if (TREE_CODE (arg1) == MULT_EXPR)
7165 {
7166 arg10 = TREE_OPERAND (arg1, 0);
7167 arg11 = TREE_OPERAND (arg1, 1);
7168 }
7169 else if (TREE_CODE (arg1) == INTEGER_CST)
7170 {
7171 arg10 = build_one_cst (type);
7172 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7173 the purpose of this canonicalization. */
7174 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7175 && negate_expr_p (arg1)
7176 && code == PLUS_EXPR)
7177 {
7178 arg11 = negate_expr (arg1);
7179 code = MINUS_EXPR;
7180 }
7181 else
7182 arg11 = arg1;
7183 }
7184 else
7185 {
7186 /* We cannot generate constant 1 for fract. */
7187 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7188 return NULL_TREE;
7189 arg10 = arg1;
7190 arg11 = build_one_cst (type);
7191 }
7192 same = NULL_TREE;
7193
7194 if (operand_equal_p (arg01, arg11, 0))
7195 same = arg01, alt0 = arg00, alt1 = arg10;
7196 else if (operand_equal_p (arg00, arg10, 0))
7197 same = arg00, alt0 = arg01, alt1 = arg11;
7198 else if (operand_equal_p (arg00, arg11, 0))
7199 same = arg00, alt0 = arg01, alt1 = arg10;
7200 else if (operand_equal_p (arg01, arg10, 0))
7201 same = arg01, alt0 = arg00, alt1 = arg11;
7202
7203 /* No identical multiplicands; see if we can find a common
7204 power-of-two factor in non-power-of-two multiplies. This
7205 can help in multi-dimensional array access. */
7206 else if (tree_fits_shwi_p (arg01)
7207 && tree_fits_shwi_p (arg11))
7208 {
7209 HOST_WIDE_INT int01, int11, tmp;
7210 bool swap = false;
7211 tree maybe_same;
7212 int01 = tree_to_shwi (arg01);
7213 int11 = tree_to_shwi (arg11);
7214
7215 /* Move min of absolute values to int11. */
7216 if (absu_hwi (int01) < absu_hwi (int11))
7217 {
7218 tmp = int01, int01 = int11, int11 = tmp;
7219 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7220 maybe_same = arg01;
7221 swap = true;
7222 }
7223 else
7224 maybe_same = arg11;
7225
7226 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7227 /* The remainder should not be a constant, otherwise we
7228 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7229 increased the number of multiplications necessary. */
7230 && TREE_CODE (arg10) != INTEGER_CST)
7231 {
7232 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7233 build_int_cst (TREE_TYPE (arg00),
7234 int01 / int11));
7235 alt1 = arg10;
7236 same = maybe_same;
7237 if (swap)
7238 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7239 }
7240 }
7241
7242 if (same)
7243 return fold_build2_loc (loc, MULT_EXPR, type,
7244 fold_build2_loc (loc, code, type,
7245 fold_convert_loc (loc, type, alt0),
7246 fold_convert_loc (loc, type, alt1)),
7247 fold_convert_loc (loc, type, same));
7248
7249 return NULL_TREE;
7250 }
7251
7252 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7253 specified by EXPR into the buffer PTR of length LEN bytes.
7254 Return the number of bytes placed in the buffer, or zero
7255 upon failure. */
7256
7257 static int
7258 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7259 {
7260 tree type = TREE_TYPE (expr);
7261 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7262 int byte, offset, word, words;
7263 unsigned char value;
7264
7265 if ((off == -1 && total_bytes > len)
7266 || off >= total_bytes)
7267 return 0;
7268 if (off == -1)
7269 off = 0;
7270 words = total_bytes / UNITS_PER_WORD;
7271
7272 for (byte = 0; byte < total_bytes; byte++)
7273 {
7274 int bitpos = byte * BITS_PER_UNIT;
7275 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7276 number of bytes. */
7277 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7278
7279 if (total_bytes > UNITS_PER_WORD)
7280 {
7281 word = byte / UNITS_PER_WORD;
7282 if (WORDS_BIG_ENDIAN)
7283 word = (words - 1) - word;
7284 offset = word * UNITS_PER_WORD;
7285 if (BYTES_BIG_ENDIAN)
7286 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7287 else
7288 offset += byte % UNITS_PER_WORD;
7289 }
7290 else
7291 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7292 if (offset >= off
7293 && offset - off < len)
7294 ptr[offset - off] = value;
7295 }
7296 return MIN (len, total_bytes - off);
7297 }
7298
7299
7300 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7301 specified by EXPR into the buffer PTR of length LEN bytes.
7302 Return the number of bytes placed in the buffer, or zero
7303 upon failure. */
7304
7305 static int
7306 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7307 {
7308 tree type = TREE_TYPE (expr);
7309 machine_mode mode = TYPE_MODE (type);
7310 int total_bytes = GET_MODE_SIZE (mode);
7311 FIXED_VALUE_TYPE value;
7312 tree i_value, i_type;
7313
7314 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7315 return 0;
7316
7317 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7318
7319 if (NULL_TREE == i_type
7320 || TYPE_PRECISION (i_type) != total_bytes)
7321 return 0;
7322
7323 value = TREE_FIXED_CST (expr);
7324 i_value = double_int_to_tree (i_type, value.data);
7325
7326 return native_encode_int (i_value, ptr, len, off);
7327 }
7328
7329
7330 /* Subroutine of native_encode_expr. Encode the REAL_CST
7331 specified by EXPR into the buffer PTR of length LEN bytes.
7332 Return the number of bytes placed in the buffer, or zero
7333 upon failure. */
7334
7335 static int
7336 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7337 {
7338 tree type = TREE_TYPE (expr);
7339 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7340 int byte, offset, word, words, bitpos;
7341 unsigned char value;
7342
7343 /* There are always 32 bits in each long, no matter the size of
7344 the hosts long. We handle floating point representations with
7345 up to 192 bits. */
7346 long tmp[6];
7347
7348 if ((off == -1 && total_bytes > len)
7349 || off >= total_bytes)
7350 return 0;
7351 if (off == -1)
7352 off = 0;
7353 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7354
7355 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7356
7357 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7358 bitpos += BITS_PER_UNIT)
7359 {
7360 byte = (bitpos / BITS_PER_UNIT) & 3;
7361 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7362
7363 if (UNITS_PER_WORD < 4)
7364 {
7365 word = byte / UNITS_PER_WORD;
7366 if (WORDS_BIG_ENDIAN)
7367 word = (words - 1) - word;
7368 offset = word * UNITS_PER_WORD;
7369 if (BYTES_BIG_ENDIAN)
7370 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7371 else
7372 offset += byte % UNITS_PER_WORD;
7373 }
7374 else
7375 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7376 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7377 if (offset >= off
7378 && offset - off < len)
7379 ptr[offset - off] = value;
7380 }
7381 return MIN (len, total_bytes - off);
7382 }
7383
7384 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7385 specified by EXPR into the buffer PTR of length LEN bytes.
7386 Return the number of bytes placed in the buffer, or zero
7387 upon failure. */
7388
7389 static int
7390 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7391 {
7392 int rsize, isize;
7393 tree part;
7394
7395 part = TREE_REALPART (expr);
7396 rsize = native_encode_expr (part, ptr, len, off);
7397 if (off == -1
7398 && rsize == 0)
7399 return 0;
7400 part = TREE_IMAGPART (expr);
7401 if (off != -1)
7402 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7403 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7404 if (off == -1
7405 && isize != rsize)
7406 return 0;
7407 return rsize + isize;
7408 }
7409
7410
7411 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7412 specified by EXPR into the buffer PTR of length LEN bytes.
7413 Return the number of bytes placed in the buffer, or zero
7414 upon failure. */
7415
7416 static int
7417 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7418 {
7419 unsigned i, count;
7420 int size, offset;
7421 tree itype, elem;
7422
7423 offset = 0;
7424 count = VECTOR_CST_NELTS (expr);
7425 itype = TREE_TYPE (TREE_TYPE (expr));
7426 size = GET_MODE_SIZE (TYPE_MODE (itype));
7427 for (i = 0; i < count; i++)
7428 {
7429 if (off >= size)
7430 {
7431 off -= size;
7432 continue;
7433 }
7434 elem = VECTOR_CST_ELT (expr, i);
7435 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7436 if ((off == -1 && res != size)
7437 || res == 0)
7438 return 0;
7439 offset += res;
7440 if (offset >= len)
7441 return offset;
7442 if (off != -1)
7443 off = 0;
7444 }
7445 return offset;
7446 }
7447
7448
7449 /* Subroutine of native_encode_expr. Encode the STRING_CST
7450 specified by EXPR into the buffer PTR of length LEN bytes.
7451 Return the number of bytes placed in the buffer, or zero
7452 upon failure. */
7453
7454 static int
7455 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7456 {
7457 tree type = TREE_TYPE (expr);
7458 HOST_WIDE_INT total_bytes;
7459
7460 if (TREE_CODE (type) != ARRAY_TYPE
7461 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7462 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7463 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7464 return 0;
7465 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7466 if ((off == -1 && total_bytes > len)
7467 || off >= total_bytes)
7468 return 0;
7469 if (off == -1)
7470 off = 0;
7471 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7472 {
7473 int written = 0;
7474 if (off < TREE_STRING_LENGTH (expr))
7475 {
7476 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7477 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7478 }
7479 memset (ptr + written, 0,
7480 MIN (total_bytes - written, len - written));
7481 }
7482 else
7483 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7484 return MIN (total_bytes - off, len);
7485 }
7486
7487
7488 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7489 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7490 buffer PTR of length LEN bytes. If OFF is not -1 then start
7491 the encoding at byte offset OFF and encode at most LEN bytes.
7492 Return the number of bytes placed in the buffer, or zero upon failure. */
7493
7494 int
7495 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7496 {
7497 switch (TREE_CODE (expr))
7498 {
7499 case INTEGER_CST:
7500 return native_encode_int (expr, ptr, len, off);
7501
7502 case REAL_CST:
7503 return native_encode_real (expr, ptr, len, off);
7504
7505 case FIXED_CST:
7506 return native_encode_fixed (expr, ptr, len, off);
7507
7508 case COMPLEX_CST:
7509 return native_encode_complex (expr, ptr, len, off);
7510
7511 case VECTOR_CST:
7512 return native_encode_vector (expr, ptr, len, off);
7513
7514 case STRING_CST:
7515 return native_encode_string (expr, ptr, len, off);
7516
7517 default:
7518 return 0;
7519 }
7520 }
7521
7522
7523 /* Subroutine of native_interpret_expr. Interpret the contents of
7524 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7525 If the buffer cannot be interpreted, return NULL_TREE. */
7526
7527 static tree
7528 native_interpret_int (tree type, const unsigned char *ptr, int len)
7529 {
7530 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7531
7532 if (total_bytes > len
7533 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7534 return NULL_TREE;
7535
7536 wide_int result = wi::from_buffer (ptr, total_bytes);
7537
7538 return wide_int_to_tree (type, result);
7539 }
7540
7541
7542 /* Subroutine of native_interpret_expr. Interpret the contents of
7543 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7544 If the buffer cannot be interpreted, return NULL_TREE. */
7545
7546 static tree
7547 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7548 {
7549 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7550 double_int result;
7551 FIXED_VALUE_TYPE fixed_value;
7552
7553 if (total_bytes > len
7554 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7555 return NULL_TREE;
7556
7557 result = double_int::from_buffer (ptr, total_bytes);
7558 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7559
7560 return build_fixed (type, fixed_value);
7561 }
7562
7563
7564 /* Subroutine of native_interpret_expr. Interpret the contents of
7565 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7566 If the buffer cannot be interpreted, return NULL_TREE. */
7567
7568 static tree
7569 native_interpret_real (tree type, const unsigned char *ptr, int len)
7570 {
7571 machine_mode mode = TYPE_MODE (type);
7572 int total_bytes = GET_MODE_SIZE (mode);
7573 int byte, offset, word, words, bitpos;
7574 unsigned char value;
7575 /* There are always 32 bits in each long, no matter the size of
7576 the hosts long. We handle floating point representations with
7577 up to 192 bits. */
7578 REAL_VALUE_TYPE r;
7579 long tmp[6];
7580
7581 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7582 if (total_bytes > len || total_bytes > 24)
7583 return NULL_TREE;
7584 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7585
7586 memset (tmp, 0, sizeof (tmp));
7587 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7588 bitpos += BITS_PER_UNIT)
7589 {
7590 byte = (bitpos / BITS_PER_UNIT) & 3;
7591 if (UNITS_PER_WORD < 4)
7592 {
7593 word = byte / UNITS_PER_WORD;
7594 if (WORDS_BIG_ENDIAN)
7595 word = (words - 1) - word;
7596 offset = word * UNITS_PER_WORD;
7597 if (BYTES_BIG_ENDIAN)
7598 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7599 else
7600 offset += byte % UNITS_PER_WORD;
7601 }
7602 else
7603 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7604 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7605
7606 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7607 }
7608
7609 real_from_target (&r, tmp, mode);
7610 return build_real (type, r);
7611 }
7612
7613
7614 /* Subroutine of native_interpret_expr. Interpret the contents of
7615 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7616 If the buffer cannot be interpreted, return NULL_TREE. */
7617
7618 static tree
7619 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7620 {
7621 tree etype, rpart, ipart;
7622 int size;
7623
7624 etype = TREE_TYPE (type);
7625 size = GET_MODE_SIZE (TYPE_MODE (etype));
7626 if (size * 2 > len)
7627 return NULL_TREE;
7628 rpart = native_interpret_expr (etype, ptr, size);
7629 if (!rpart)
7630 return NULL_TREE;
7631 ipart = native_interpret_expr (etype, ptr+size, size);
7632 if (!ipart)
7633 return NULL_TREE;
7634 return build_complex (type, rpart, ipart);
7635 }
7636
7637
7638 /* Subroutine of native_interpret_expr. Interpret the contents of
7639 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7640 If the buffer cannot be interpreted, return NULL_TREE. */
7641
7642 static tree
7643 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7644 {
7645 tree etype, elem;
7646 int i, size, count;
7647 tree *elements;
7648
7649 etype = TREE_TYPE (type);
7650 size = GET_MODE_SIZE (TYPE_MODE (etype));
7651 count = TYPE_VECTOR_SUBPARTS (type);
7652 if (size * count > len)
7653 return NULL_TREE;
7654
7655 elements = XALLOCAVEC (tree, count);
7656 for (i = count - 1; i >= 0; i--)
7657 {
7658 elem = native_interpret_expr (etype, ptr+(i*size), size);
7659 if (!elem)
7660 return NULL_TREE;
7661 elements[i] = elem;
7662 }
7663 return build_vector (type, elements);
7664 }
7665
7666
7667 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7668 the buffer PTR of length LEN as a constant of type TYPE. For
7669 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7670 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7671 return NULL_TREE. */
7672
7673 tree
7674 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7675 {
7676 switch (TREE_CODE (type))
7677 {
7678 case INTEGER_TYPE:
7679 case ENUMERAL_TYPE:
7680 case BOOLEAN_TYPE:
7681 case POINTER_TYPE:
7682 case REFERENCE_TYPE:
7683 return native_interpret_int (type, ptr, len);
7684
7685 case REAL_TYPE:
7686 return native_interpret_real (type, ptr, len);
7687
7688 case FIXED_POINT_TYPE:
7689 return native_interpret_fixed (type, ptr, len);
7690
7691 case COMPLEX_TYPE:
7692 return native_interpret_complex (type, ptr, len);
7693
7694 case VECTOR_TYPE:
7695 return native_interpret_vector (type, ptr, len);
7696
7697 default:
7698 return NULL_TREE;
7699 }
7700 }
7701
7702 /* Returns true if we can interpret the contents of a native encoding
7703 as TYPE. */
7704
7705 static bool
7706 can_native_interpret_type_p (tree type)
7707 {
7708 switch (TREE_CODE (type))
7709 {
7710 case INTEGER_TYPE:
7711 case ENUMERAL_TYPE:
7712 case BOOLEAN_TYPE:
7713 case POINTER_TYPE:
7714 case REFERENCE_TYPE:
7715 case FIXED_POINT_TYPE:
7716 case REAL_TYPE:
7717 case COMPLEX_TYPE:
7718 case VECTOR_TYPE:
7719 return true;
7720 default:
7721 return false;
7722 }
7723 }
7724
7725 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7726 TYPE at compile-time. If we're unable to perform the conversion
7727 return NULL_TREE. */
7728
7729 static tree
7730 fold_view_convert_expr (tree type, tree expr)
7731 {
7732 /* We support up to 512-bit values (for V8DFmode). */
7733 unsigned char buffer[64];
7734 int len;
7735
7736 /* Check that the host and target are sane. */
7737 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7738 return NULL_TREE;
7739
7740 len = native_encode_expr (expr, buffer, sizeof (buffer));
7741 if (len == 0)
7742 return NULL_TREE;
7743
7744 return native_interpret_expr (type, buffer, len);
7745 }
7746
7747 /* Build an expression for the address of T. Folds away INDIRECT_REF
7748 to avoid confusing the gimplify process. */
7749
7750 tree
7751 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7752 {
7753 /* The size of the object is not relevant when talking about its address. */
7754 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7755 t = TREE_OPERAND (t, 0);
7756
7757 if (TREE_CODE (t) == INDIRECT_REF)
7758 {
7759 t = TREE_OPERAND (t, 0);
7760
7761 if (TREE_TYPE (t) != ptrtype)
7762 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7763 }
7764 else if (TREE_CODE (t) == MEM_REF
7765 && integer_zerop (TREE_OPERAND (t, 1)))
7766 return TREE_OPERAND (t, 0);
7767 else if (TREE_CODE (t) == MEM_REF
7768 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7769 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7770 TREE_OPERAND (t, 0),
7771 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7772 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7773 {
7774 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7775
7776 if (TREE_TYPE (t) != ptrtype)
7777 t = fold_convert_loc (loc, ptrtype, t);
7778 }
7779 else
7780 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7781
7782 return t;
7783 }
7784
7785 /* Build an expression for the address of T. */
7786
7787 tree
7788 build_fold_addr_expr_loc (location_t loc, tree t)
7789 {
7790 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7791
7792 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7793 }
7794
7795 /* Fold a unary expression of code CODE and type TYPE with operand
7796 OP0. Return the folded expression if folding is successful.
7797 Otherwise, return NULL_TREE. */
7798
7799 tree
7800 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7801 {
7802 tree tem;
7803 tree arg0;
7804 enum tree_code_class kind = TREE_CODE_CLASS (code);
7805
7806 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7807 && TREE_CODE_LENGTH (code) == 1);
7808
7809 arg0 = op0;
7810 if (arg0)
7811 {
7812 if (CONVERT_EXPR_CODE_P (code)
7813 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7814 {
7815 /* Don't use STRIP_NOPS, because signedness of argument type
7816 matters. */
7817 STRIP_SIGN_NOPS (arg0);
7818 }
7819 else
7820 {
7821 /* Strip any conversions that don't change the mode. This
7822 is safe for every expression, except for a comparison
7823 expression because its signedness is derived from its
7824 operands.
7825
7826 Note that this is done as an internal manipulation within
7827 the constant folder, in order to find the simplest
7828 representation of the arguments so that their form can be
7829 studied. In any cases, the appropriate type conversions
7830 should be put back in the tree that will get out of the
7831 constant folder. */
7832 STRIP_NOPS (arg0);
7833 }
7834
7835 if (CONSTANT_CLASS_P (arg0))
7836 {
7837 tree tem = const_unop (code, type, arg0);
7838 if (tem)
7839 {
7840 if (TREE_TYPE (tem) != type)
7841 tem = fold_convert_loc (loc, type, tem);
7842 return tem;
7843 }
7844 }
7845 }
7846
7847 tem = generic_simplify (loc, code, type, op0);
7848 if (tem)
7849 return tem;
7850
7851 if (TREE_CODE_CLASS (code) == tcc_unary)
7852 {
7853 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7854 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7855 fold_build1_loc (loc, code, type,
7856 fold_convert_loc (loc, TREE_TYPE (op0),
7857 TREE_OPERAND (arg0, 1))));
7858 else if (TREE_CODE (arg0) == COND_EXPR)
7859 {
7860 tree arg01 = TREE_OPERAND (arg0, 1);
7861 tree arg02 = TREE_OPERAND (arg0, 2);
7862 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7863 arg01 = fold_build1_loc (loc, code, type,
7864 fold_convert_loc (loc,
7865 TREE_TYPE (op0), arg01));
7866 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7867 arg02 = fold_build1_loc (loc, code, type,
7868 fold_convert_loc (loc,
7869 TREE_TYPE (op0), arg02));
7870 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7871 arg01, arg02);
7872
7873 /* If this was a conversion, and all we did was to move into
7874 inside the COND_EXPR, bring it back out. But leave it if
7875 it is a conversion from integer to integer and the
7876 result precision is no wider than a word since such a
7877 conversion is cheap and may be optimized away by combine,
7878 while it couldn't if it were outside the COND_EXPR. Then return
7879 so we don't get into an infinite recursion loop taking the
7880 conversion out and then back in. */
7881
7882 if ((CONVERT_EXPR_CODE_P (code)
7883 || code == NON_LVALUE_EXPR)
7884 && TREE_CODE (tem) == COND_EXPR
7885 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7886 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7887 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7888 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7889 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7890 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7891 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7892 && (INTEGRAL_TYPE_P
7893 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7894 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7895 || flag_syntax_only))
7896 tem = build1_loc (loc, code, type,
7897 build3 (COND_EXPR,
7898 TREE_TYPE (TREE_OPERAND
7899 (TREE_OPERAND (tem, 1), 0)),
7900 TREE_OPERAND (tem, 0),
7901 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7902 TREE_OPERAND (TREE_OPERAND (tem, 2),
7903 0)));
7904 return tem;
7905 }
7906 }
7907
7908 switch (code)
7909 {
7910 case NON_LVALUE_EXPR:
7911 if (!maybe_lvalue_p (op0))
7912 return fold_convert_loc (loc, type, op0);
7913 return NULL_TREE;
7914
7915 CASE_CONVERT:
7916 case FLOAT_EXPR:
7917 case FIX_TRUNC_EXPR:
7918 if (COMPARISON_CLASS_P (op0))
7919 {
7920 /* If we have (type) (a CMP b) and type is an integral type, return
7921 new expression involving the new type. Canonicalize
7922 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7923 non-integral type.
7924 Do not fold the result as that would not simplify further, also
7925 folding again results in recursions. */
7926 if (TREE_CODE (type) == BOOLEAN_TYPE)
7927 return build2_loc (loc, TREE_CODE (op0), type,
7928 TREE_OPERAND (op0, 0),
7929 TREE_OPERAND (op0, 1));
7930 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7931 && TREE_CODE (type) != VECTOR_TYPE)
7932 return build3_loc (loc, COND_EXPR, type, op0,
7933 constant_boolean_node (true, type),
7934 constant_boolean_node (false, type));
7935 }
7936
7937 /* Handle (T *)&A.B.C for A being of type T and B and C
7938 living at offset zero. This occurs frequently in
7939 C++ upcasting and then accessing the base. */
7940 if (TREE_CODE (op0) == ADDR_EXPR
7941 && POINTER_TYPE_P (type)
7942 && handled_component_p (TREE_OPERAND (op0, 0)))
7943 {
7944 HOST_WIDE_INT bitsize, bitpos;
7945 tree offset;
7946 machine_mode mode;
7947 int unsignedp, volatilep;
7948 tree base = TREE_OPERAND (op0, 0);
7949 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7950 &mode, &unsignedp, &volatilep, false);
7951 /* If the reference was to a (constant) zero offset, we can use
7952 the address of the base if it has the same base type
7953 as the result type and the pointer type is unqualified. */
7954 if (! offset && bitpos == 0
7955 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7956 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7957 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7958 return fold_convert_loc (loc, type,
7959 build_fold_addr_expr_loc (loc, base));
7960 }
7961
7962 if (TREE_CODE (op0) == MODIFY_EXPR
7963 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7964 /* Detect assigning a bitfield. */
7965 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7966 && DECL_BIT_FIELD
7967 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7968 {
7969 /* Don't leave an assignment inside a conversion
7970 unless assigning a bitfield. */
7971 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7972 /* First do the assignment, then return converted constant. */
7973 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7974 TREE_NO_WARNING (tem) = 1;
7975 TREE_USED (tem) = 1;
7976 return tem;
7977 }
7978
7979 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7980 constants (if x has signed type, the sign bit cannot be set
7981 in c). This folds extension into the BIT_AND_EXPR.
7982 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7983 very likely don't have maximal range for their precision and this
7984 transformation effectively doesn't preserve non-maximal ranges. */
7985 if (TREE_CODE (type) == INTEGER_TYPE
7986 && TREE_CODE (op0) == BIT_AND_EXPR
7987 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7988 {
7989 tree and_expr = op0;
7990 tree and0 = TREE_OPERAND (and_expr, 0);
7991 tree and1 = TREE_OPERAND (and_expr, 1);
7992 int change = 0;
7993
7994 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7995 || (TYPE_PRECISION (type)
7996 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7997 change = 1;
7998 else if (TYPE_PRECISION (TREE_TYPE (and1))
7999 <= HOST_BITS_PER_WIDE_INT
8000 && tree_fits_uhwi_p (and1))
8001 {
8002 unsigned HOST_WIDE_INT cst;
8003
8004 cst = tree_to_uhwi (and1);
8005 cst &= HOST_WIDE_INT_M1U
8006 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8007 change = (cst == 0);
8008 #ifdef LOAD_EXTEND_OP
8009 if (change
8010 && !flag_syntax_only
8011 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8012 == ZERO_EXTEND))
8013 {
8014 tree uns = unsigned_type_for (TREE_TYPE (and0));
8015 and0 = fold_convert_loc (loc, uns, and0);
8016 and1 = fold_convert_loc (loc, uns, and1);
8017 }
8018 #endif
8019 }
8020 if (change)
8021 {
8022 tem = force_fit_type (type, wi::to_widest (and1), 0,
8023 TREE_OVERFLOW (and1));
8024 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8025 fold_convert_loc (loc, type, and0), tem);
8026 }
8027 }
8028
8029 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8030 when one of the new casts will fold away. Conservatively we assume
8031 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8032 if (POINTER_TYPE_P (type)
8033 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8034 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8035 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8036 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8037 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8038 {
8039 tree arg00 = TREE_OPERAND (arg0, 0);
8040 tree arg01 = TREE_OPERAND (arg0, 1);
8041
8042 return fold_build_pointer_plus_loc
8043 (loc, fold_convert_loc (loc, type, arg00), arg01);
8044 }
8045
8046 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8047 of the same precision, and X is an integer type not narrower than
8048 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8049 if (INTEGRAL_TYPE_P (type)
8050 && TREE_CODE (op0) == BIT_NOT_EXPR
8051 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8052 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8053 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8054 {
8055 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8056 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8057 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8058 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8059 fold_convert_loc (loc, type, tem));
8060 }
8061
8062 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8063 type of X and Y (integer types only). */
8064 if (INTEGRAL_TYPE_P (type)
8065 && TREE_CODE (op0) == MULT_EXPR
8066 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8067 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8068 {
8069 /* Be careful not to introduce new overflows. */
8070 tree mult_type;
8071 if (TYPE_OVERFLOW_WRAPS (type))
8072 mult_type = type;
8073 else
8074 mult_type = unsigned_type_for (type);
8075
8076 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8077 {
8078 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8079 fold_convert_loc (loc, mult_type,
8080 TREE_OPERAND (op0, 0)),
8081 fold_convert_loc (loc, mult_type,
8082 TREE_OPERAND (op0, 1)));
8083 return fold_convert_loc (loc, type, tem);
8084 }
8085 }
8086
8087 return NULL_TREE;
8088
8089 case VIEW_CONVERT_EXPR:
8090 if (TREE_CODE (op0) == MEM_REF)
8091 return fold_build2_loc (loc, MEM_REF, type,
8092 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8093
8094 return NULL_TREE;
8095
8096 case NEGATE_EXPR:
8097 tem = fold_negate_expr (loc, arg0);
8098 if (tem)
8099 return fold_convert_loc (loc, type, tem);
8100 return NULL_TREE;
8101
8102 case ABS_EXPR:
8103 /* Convert fabs((double)float) into (double)fabsf(float). */
8104 if (TREE_CODE (arg0) == NOP_EXPR
8105 && TREE_CODE (type) == REAL_TYPE)
8106 {
8107 tree targ0 = strip_float_extensions (arg0);
8108 if (targ0 != arg0)
8109 return fold_convert_loc (loc, type,
8110 fold_build1_loc (loc, ABS_EXPR,
8111 TREE_TYPE (targ0),
8112 targ0));
8113 }
8114 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8115 else if (TREE_CODE (arg0) == ABS_EXPR)
8116 return arg0;
8117
8118 /* Strip sign ops from argument. */
8119 if (TREE_CODE (type) == REAL_TYPE)
8120 {
8121 tem = fold_strip_sign_ops (arg0);
8122 if (tem)
8123 return fold_build1_loc (loc, ABS_EXPR, type,
8124 fold_convert_loc (loc, type, tem));
8125 }
8126 return NULL_TREE;
8127
8128 case CONJ_EXPR:
8129 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8130 return fold_convert_loc (loc, type, arg0);
8131 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8132 {
8133 tree itype = TREE_TYPE (type);
8134 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8135 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8136 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8137 negate_expr (ipart));
8138 }
8139 if (TREE_CODE (arg0) == CONJ_EXPR)
8140 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8141 return NULL_TREE;
8142
8143 case BIT_NOT_EXPR:
8144 /* Convert ~ (-A) to A - 1. */
8145 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8146 return fold_build2_loc (loc, MINUS_EXPR, type,
8147 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8148 build_int_cst (type, 1));
8149 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8150 else if (INTEGRAL_TYPE_P (type)
8151 && ((TREE_CODE (arg0) == MINUS_EXPR
8152 && integer_onep (TREE_OPERAND (arg0, 1)))
8153 || (TREE_CODE (arg0) == PLUS_EXPR
8154 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8155 {
8156 /* Perform the negation in ARG0's type and only then convert
8157 to TYPE as to avoid introducing undefined behavior. */
8158 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8159 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8160 TREE_OPERAND (arg0, 0));
8161 return fold_convert_loc (loc, type, t);
8162 }
8163 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8164 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8165 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8166 fold_convert_loc (loc, type,
8167 TREE_OPERAND (arg0, 0)))))
8168 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8169 fold_convert_loc (loc, type,
8170 TREE_OPERAND (arg0, 1)));
8171 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8172 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8173 fold_convert_loc (loc, type,
8174 TREE_OPERAND (arg0, 1)))))
8175 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8176 fold_convert_loc (loc, type,
8177 TREE_OPERAND (arg0, 0)), tem);
8178
8179 return NULL_TREE;
8180
8181 case TRUTH_NOT_EXPR:
8182 /* Note that the operand of this must be an int
8183 and its values must be 0 or 1.
8184 ("true" is a fixed value perhaps depending on the language,
8185 but we don't handle values other than 1 correctly yet.) */
8186 tem = fold_truth_not_expr (loc, arg0);
8187 if (!tem)
8188 return NULL_TREE;
8189 return fold_convert_loc (loc, type, tem);
8190
8191 case REALPART_EXPR:
8192 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8193 return fold_convert_loc (loc, type, arg0);
8194 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8195 {
8196 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8197 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8198 fold_build1_loc (loc, REALPART_EXPR, itype,
8199 TREE_OPERAND (arg0, 0)),
8200 fold_build1_loc (loc, REALPART_EXPR, itype,
8201 TREE_OPERAND (arg0, 1)));
8202 return fold_convert_loc (loc, type, tem);
8203 }
8204 if (TREE_CODE (arg0) == CONJ_EXPR)
8205 {
8206 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8207 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8208 TREE_OPERAND (arg0, 0));
8209 return fold_convert_loc (loc, type, tem);
8210 }
8211 if (TREE_CODE (arg0) == CALL_EXPR)
8212 {
8213 tree fn = get_callee_fndecl (arg0);
8214 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8215 switch (DECL_FUNCTION_CODE (fn))
8216 {
8217 CASE_FLT_FN (BUILT_IN_CEXPI):
8218 fn = mathfn_built_in (type, BUILT_IN_COS);
8219 if (fn)
8220 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8221 break;
8222
8223 default:
8224 break;
8225 }
8226 }
8227 return NULL_TREE;
8228
8229 case IMAGPART_EXPR:
8230 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8231 return build_zero_cst (type);
8232 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8233 {
8234 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8235 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8236 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8237 TREE_OPERAND (arg0, 0)),
8238 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8239 TREE_OPERAND (arg0, 1)));
8240 return fold_convert_loc (loc, type, tem);
8241 }
8242 if (TREE_CODE (arg0) == CONJ_EXPR)
8243 {
8244 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8245 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8246 return fold_convert_loc (loc, type, negate_expr (tem));
8247 }
8248 if (TREE_CODE (arg0) == CALL_EXPR)
8249 {
8250 tree fn = get_callee_fndecl (arg0);
8251 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8252 switch (DECL_FUNCTION_CODE (fn))
8253 {
8254 CASE_FLT_FN (BUILT_IN_CEXPI):
8255 fn = mathfn_built_in (type, BUILT_IN_SIN);
8256 if (fn)
8257 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8258 break;
8259
8260 default:
8261 break;
8262 }
8263 }
8264 return NULL_TREE;
8265
8266 case INDIRECT_REF:
8267 /* Fold *&X to X if X is an lvalue. */
8268 if (TREE_CODE (op0) == ADDR_EXPR)
8269 {
8270 tree op00 = TREE_OPERAND (op0, 0);
8271 if ((TREE_CODE (op00) == VAR_DECL
8272 || TREE_CODE (op00) == PARM_DECL
8273 || TREE_CODE (op00) == RESULT_DECL)
8274 && !TREE_READONLY (op00))
8275 return op00;
8276 }
8277 return NULL_TREE;
8278
8279 default:
8280 return NULL_TREE;
8281 } /* switch (code) */
8282 }
8283
8284
8285 /* If the operation was a conversion do _not_ mark a resulting constant
8286 with TREE_OVERFLOW if the original constant was not. These conversions
8287 have implementation defined behavior and retaining the TREE_OVERFLOW
8288 flag here would confuse later passes such as VRP. */
8289 tree
8290 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8291 tree type, tree op0)
8292 {
8293 tree res = fold_unary_loc (loc, code, type, op0);
8294 if (res
8295 && TREE_CODE (res) == INTEGER_CST
8296 && TREE_CODE (op0) == INTEGER_CST
8297 && CONVERT_EXPR_CODE_P (code))
8298 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8299
8300 return res;
8301 }
8302
8303 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8304 operands OP0 and OP1. LOC is the location of the resulting expression.
8305 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8306 Return the folded expression if folding is successful. Otherwise,
8307 return NULL_TREE. */
8308 static tree
8309 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8310 tree arg0, tree arg1, tree op0, tree op1)
8311 {
8312 tree tem;
8313
8314 /* We only do these simplifications if we are optimizing. */
8315 if (!optimize)
8316 return NULL_TREE;
8317
8318 /* Check for things like (A || B) && (A || C). We can convert this
8319 to A || (B && C). Note that either operator can be any of the four
8320 truth and/or operations and the transformation will still be
8321 valid. Also note that we only care about order for the
8322 ANDIF and ORIF operators. If B contains side effects, this
8323 might change the truth-value of A. */
8324 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8325 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8326 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8327 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8328 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8329 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8330 {
8331 tree a00 = TREE_OPERAND (arg0, 0);
8332 tree a01 = TREE_OPERAND (arg0, 1);
8333 tree a10 = TREE_OPERAND (arg1, 0);
8334 tree a11 = TREE_OPERAND (arg1, 1);
8335 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8336 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8337 && (code == TRUTH_AND_EXPR
8338 || code == TRUTH_OR_EXPR));
8339
8340 if (operand_equal_p (a00, a10, 0))
8341 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8342 fold_build2_loc (loc, code, type, a01, a11));
8343 else if (commutative && operand_equal_p (a00, a11, 0))
8344 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8345 fold_build2_loc (loc, code, type, a01, a10));
8346 else if (commutative && operand_equal_p (a01, a10, 0))
8347 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8348 fold_build2_loc (loc, code, type, a00, a11));
8349
8350 /* This case if tricky because we must either have commutative
8351 operators or else A10 must not have side-effects. */
8352
8353 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8354 && operand_equal_p (a01, a11, 0))
8355 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8356 fold_build2_loc (loc, code, type, a00, a10),
8357 a01);
8358 }
8359
8360 /* See if we can build a range comparison. */
8361 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8362 return tem;
8363
8364 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8365 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8366 {
8367 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8368 if (tem)
8369 return fold_build2_loc (loc, code, type, tem, arg1);
8370 }
8371
8372 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8373 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8374 {
8375 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8376 if (tem)
8377 return fold_build2_loc (loc, code, type, arg0, tem);
8378 }
8379
8380 /* Check for the possibility of merging component references. If our
8381 lhs is another similar operation, try to merge its rhs with our
8382 rhs. Then try to merge our lhs and rhs. */
8383 if (TREE_CODE (arg0) == code
8384 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8385 TREE_OPERAND (arg0, 1), arg1)))
8386 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8387
8388 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8389 return tem;
8390
8391 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8392 && (code == TRUTH_AND_EXPR
8393 || code == TRUTH_ANDIF_EXPR
8394 || code == TRUTH_OR_EXPR
8395 || code == TRUTH_ORIF_EXPR))
8396 {
8397 enum tree_code ncode, icode;
8398
8399 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8400 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8401 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8402
8403 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8404 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8405 We don't want to pack more than two leafs to a non-IF AND/OR
8406 expression.
8407 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8408 equal to IF-CODE, then we don't want to add right-hand operand.
8409 If the inner right-hand side of left-hand operand has
8410 side-effects, or isn't simple, then we can't add to it,
8411 as otherwise we might destroy if-sequence. */
8412 if (TREE_CODE (arg0) == icode
8413 && simple_operand_p_2 (arg1)
8414 /* Needed for sequence points to handle trappings, and
8415 side-effects. */
8416 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8417 {
8418 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8419 arg1);
8420 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8421 tem);
8422 }
8423 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8424 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8425 else if (TREE_CODE (arg1) == icode
8426 && simple_operand_p_2 (arg0)
8427 /* Needed for sequence points to handle trappings, and
8428 side-effects. */
8429 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8430 {
8431 tem = fold_build2_loc (loc, ncode, type,
8432 arg0, TREE_OPERAND (arg1, 0));
8433 return fold_build2_loc (loc, icode, type, tem,
8434 TREE_OPERAND (arg1, 1));
8435 }
8436 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8437 into (A OR B).
8438 For sequence point consistancy, we need to check for trapping,
8439 and side-effects. */
8440 else if (code == icode && simple_operand_p_2 (arg0)
8441 && simple_operand_p_2 (arg1))
8442 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8443 }
8444
8445 return NULL_TREE;
8446 }
8447
8448 /* Fold a binary expression of code CODE and type TYPE with operands
8449 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8450 Return the folded expression if folding is successful. Otherwise,
8451 return NULL_TREE. */
8452
8453 static tree
8454 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8455 {
8456 enum tree_code compl_code;
8457
8458 if (code == MIN_EXPR)
8459 compl_code = MAX_EXPR;
8460 else if (code == MAX_EXPR)
8461 compl_code = MIN_EXPR;
8462 else
8463 gcc_unreachable ();
8464
8465 /* MIN (MAX (a, b), b) == b. */
8466 if (TREE_CODE (op0) == compl_code
8467 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8468 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8469
8470 /* MIN (MAX (b, a), b) == b. */
8471 if (TREE_CODE (op0) == compl_code
8472 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8473 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8474 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8475
8476 /* MIN (a, MAX (a, b)) == a. */
8477 if (TREE_CODE (op1) == compl_code
8478 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8479 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8480 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8481
8482 /* MIN (a, MAX (b, a)) == a. */
8483 if (TREE_CODE (op1) == compl_code
8484 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8485 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8486 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8487
8488 return NULL_TREE;
8489 }
8490
8491 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8492 by changing CODE to reduce the magnitude of constants involved in
8493 ARG0 of the comparison.
8494 Returns a canonicalized comparison tree if a simplification was
8495 possible, otherwise returns NULL_TREE.
8496 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8497 valid if signed overflow is undefined. */
8498
8499 static tree
8500 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8501 tree arg0, tree arg1,
8502 bool *strict_overflow_p)
8503 {
8504 enum tree_code code0 = TREE_CODE (arg0);
8505 tree t, cst0 = NULL_TREE;
8506 int sgn0;
8507 bool swap = false;
8508
8509 /* Match A +- CST code arg1 and CST code arg1. We can change the
8510 first form only if overflow is undefined. */
8511 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8512 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8513 /* In principle pointers also have undefined overflow behavior,
8514 but that causes problems elsewhere. */
8515 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8516 && (code0 == MINUS_EXPR
8517 || code0 == PLUS_EXPR)
8518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8519 || code0 == INTEGER_CST))
8520 return NULL_TREE;
8521
8522 /* Identify the constant in arg0 and its sign. */
8523 if (code0 == INTEGER_CST)
8524 cst0 = arg0;
8525 else
8526 cst0 = TREE_OPERAND (arg0, 1);
8527 sgn0 = tree_int_cst_sgn (cst0);
8528
8529 /* Overflowed constants and zero will cause problems. */
8530 if (integer_zerop (cst0)
8531 || TREE_OVERFLOW (cst0))
8532 return NULL_TREE;
8533
8534 /* See if we can reduce the magnitude of the constant in
8535 arg0 by changing the comparison code. */
8536 if (code0 == INTEGER_CST)
8537 {
8538 /* CST <= arg1 -> CST-1 < arg1. */
8539 if (code == LE_EXPR && sgn0 == 1)
8540 code = LT_EXPR;
8541 /* -CST < arg1 -> -CST-1 <= arg1. */
8542 else if (code == LT_EXPR && sgn0 == -1)
8543 code = LE_EXPR;
8544 /* CST > arg1 -> CST-1 >= arg1. */
8545 else if (code == GT_EXPR && sgn0 == 1)
8546 code = GE_EXPR;
8547 /* -CST >= arg1 -> -CST-1 > arg1. */
8548 else if (code == GE_EXPR && sgn0 == -1)
8549 code = GT_EXPR;
8550 else
8551 return NULL_TREE;
8552 /* arg1 code' CST' might be more canonical. */
8553 swap = true;
8554 }
8555 else
8556 {
8557 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8558 if (code == LT_EXPR
8559 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8560 code = LE_EXPR;
8561 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8562 else if (code == GT_EXPR
8563 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8564 code = GE_EXPR;
8565 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8566 else if (code == LE_EXPR
8567 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8568 code = LT_EXPR;
8569 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8570 else if (code == GE_EXPR
8571 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8572 code = GT_EXPR;
8573 else
8574 return NULL_TREE;
8575 *strict_overflow_p = true;
8576 }
8577
8578 /* Now build the constant reduced in magnitude. But not if that
8579 would produce one outside of its types range. */
8580 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8581 && ((sgn0 == 1
8582 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8583 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8584 || (sgn0 == -1
8585 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8586 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8587 /* We cannot swap the comparison here as that would cause us to
8588 endlessly recurse. */
8589 return NULL_TREE;
8590
8591 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8592 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8593 if (code0 != INTEGER_CST)
8594 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8595 t = fold_convert (TREE_TYPE (arg1), t);
8596
8597 /* If swapping might yield to a more canonical form, do so. */
8598 if (swap)
8599 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8600 else
8601 return fold_build2_loc (loc, code, type, t, arg1);
8602 }
8603
8604 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8605 overflow further. Try to decrease the magnitude of constants involved
8606 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8607 and put sole constants at the second argument position.
8608 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8609
8610 static tree
8611 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8612 tree arg0, tree arg1)
8613 {
8614 tree t;
8615 bool strict_overflow_p;
8616 const char * const warnmsg = G_("assuming signed overflow does not occur "
8617 "when reducing constant in comparison");
8618
8619 /* Try canonicalization by simplifying arg0. */
8620 strict_overflow_p = false;
8621 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8622 &strict_overflow_p);
8623 if (t)
8624 {
8625 if (strict_overflow_p)
8626 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8627 return t;
8628 }
8629
8630 /* Try canonicalization by simplifying arg1 using the swapped
8631 comparison. */
8632 code = swap_tree_comparison (code);
8633 strict_overflow_p = false;
8634 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8635 &strict_overflow_p);
8636 if (t && strict_overflow_p)
8637 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8638 return t;
8639 }
8640
8641 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8642 space. This is used to avoid issuing overflow warnings for
8643 expressions like &p->x which can not wrap. */
8644
8645 static bool
8646 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8647 {
8648 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8649 return true;
8650
8651 if (bitpos < 0)
8652 return true;
8653
8654 wide_int wi_offset;
8655 int precision = TYPE_PRECISION (TREE_TYPE (base));
8656 if (offset == NULL_TREE)
8657 wi_offset = wi::zero (precision);
8658 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8659 return true;
8660 else
8661 wi_offset = offset;
8662
8663 bool overflow;
8664 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8665 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8666 if (overflow)
8667 return true;
8668
8669 if (!wi::fits_uhwi_p (total))
8670 return true;
8671
8672 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8673 if (size <= 0)
8674 return true;
8675
8676 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8677 array. */
8678 if (TREE_CODE (base) == ADDR_EXPR)
8679 {
8680 HOST_WIDE_INT base_size;
8681
8682 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8683 if (base_size > 0 && size < base_size)
8684 size = base_size;
8685 }
8686
8687 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8688 }
8689
8690 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8691 kind INTEGER_CST. This makes sure to properly sign-extend the
8692 constant. */
8693
8694 static HOST_WIDE_INT
8695 size_low_cst (const_tree t)
8696 {
8697 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8698 int prec = TYPE_PRECISION (TREE_TYPE (t));
8699 if (prec < HOST_BITS_PER_WIDE_INT)
8700 return sext_hwi (w, prec);
8701 return w;
8702 }
8703
8704 /* Subroutine of fold_binary. This routine performs all of the
8705 transformations that are common to the equality/inequality
8706 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8707 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8708 fold_binary should call fold_binary. Fold a comparison with
8709 tree code CODE and type TYPE with operands OP0 and OP1. Return
8710 the folded comparison or NULL_TREE. */
8711
8712 static tree
8713 fold_comparison (location_t loc, enum tree_code code, tree type,
8714 tree op0, tree op1)
8715 {
8716 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8717 tree arg0, arg1, tem;
8718
8719 arg0 = op0;
8720 arg1 = op1;
8721
8722 STRIP_SIGN_NOPS (arg0);
8723 STRIP_SIGN_NOPS (arg1);
8724
8725 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8726 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8727 && (equality_code
8728 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8729 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8730 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8731 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8732 && TREE_CODE (arg1) == INTEGER_CST
8733 && !TREE_OVERFLOW (arg1))
8734 {
8735 const enum tree_code
8736 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8737 tree const1 = TREE_OPERAND (arg0, 1);
8738 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8739 tree variable = TREE_OPERAND (arg0, 0);
8740 tree new_const = int_const_binop (reverse_op, const2, const1);
8741
8742 /* If the constant operation overflowed this can be
8743 simplified as a comparison against INT_MAX/INT_MIN. */
8744 if (TREE_OVERFLOW (new_const)
8745 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8746 {
8747 int const1_sgn = tree_int_cst_sgn (const1);
8748 enum tree_code code2 = code;
8749
8750 /* Get the sign of the constant on the lhs if the
8751 operation were VARIABLE + CONST1. */
8752 if (TREE_CODE (arg0) == MINUS_EXPR)
8753 const1_sgn = -const1_sgn;
8754
8755 /* The sign of the constant determines if we overflowed
8756 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8757 Canonicalize to the INT_MIN overflow by swapping the comparison
8758 if necessary. */
8759 if (const1_sgn == -1)
8760 code2 = swap_tree_comparison (code);
8761
8762 /* We now can look at the canonicalized case
8763 VARIABLE + 1 CODE2 INT_MIN
8764 and decide on the result. */
8765 switch (code2)
8766 {
8767 case EQ_EXPR:
8768 case LT_EXPR:
8769 case LE_EXPR:
8770 return
8771 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8772
8773 case NE_EXPR:
8774 case GE_EXPR:
8775 case GT_EXPR:
8776 return
8777 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8778
8779 default:
8780 gcc_unreachable ();
8781 }
8782 }
8783 else
8784 {
8785 if (!equality_code)
8786 fold_overflow_warning ("assuming signed overflow does not occur "
8787 "when changing X +- C1 cmp C2 to "
8788 "X cmp C2 -+ C1",
8789 WARN_STRICT_OVERFLOW_COMPARISON);
8790 return fold_build2_loc (loc, code, type, variable, new_const);
8791 }
8792 }
8793
8794 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8795 if (TREE_CODE (arg0) == MINUS_EXPR
8796 && equality_code
8797 && integer_zerop (arg1))
8798 {
8799 /* ??? The transformation is valid for the other operators if overflow
8800 is undefined for the type, but performing it here badly interacts
8801 with the transformation in fold_cond_expr_with_comparison which
8802 attempts to synthetize ABS_EXPR. */
8803 if (!equality_code)
8804 fold_overflow_warning ("assuming signed overflow does not occur "
8805 "when changing X - Y cmp 0 to X cmp Y",
8806 WARN_STRICT_OVERFLOW_COMPARISON);
8807 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8808 TREE_OPERAND (arg0, 1));
8809 }
8810
8811 /* For comparisons of pointers we can decompose it to a compile time
8812 comparison of the base objects and the offsets into the object.
8813 This requires at least one operand being an ADDR_EXPR or a
8814 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8815 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8816 && (TREE_CODE (arg0) == ADDR_EXPR
8817 || TREE_CODE (arg1) == ADDR_EXPR
8818 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8819 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8820 {
8821 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8822 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8823 machine_mode mode;
8824 int volatilep, unsignedp;
8825 bool indirect_base0 = false, indirect_base1 = false;
8826
8827 /* Get base and offset for the access. Strip ADDR_EXPR for
8828 get_inner_reference, but put it back by stripping INDIRECT_REF
8829 off the base object if possible. indirect_baseN will be true
8830 if baseN is not an address but refers to the object itself. */
8831 base0 = arg0;
8832 if (TREE_CODE (arg0) == ADDR_EXPR)
8833 {
8834 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8835 &bitsize, &bitpos0, &offset0, &mode,
8836 &unsignedp, &volatilep, false);
8837 if (TREE_CODE (base0) == INDIRECT_REF)
8838 base0 = TREE_OPERAND (base0, 0);
8839 else
8840 indirect_base0 = true;
8841 }
8842 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8843 {
8844 base0 = TREE_OPERAND (arg0, 0);
8845 STRIP_SIGN_NOPS (base0);
8846 if (TREE_CODE (base0) == ADDR_EXPR)
8847 {
8848 base0 = TREE_OPERAND (base0, 0);
8849 indirect_base0 = true;
8850 }
8851 offset0 = TREE_OPERAND (arg0, 1);
8852 if (tree_fits_shwi_p (offset0))
8853 {
8854 HOST_WIDE_INT off = size_low_cst (offset0);
8855 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8856 * BITS_PER_UNIT)
8857 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8858 {
8859 bitpos0 = off * BITS_PER_UNIT;
8860 offset0 = NULL_TREE;
8861 }
8862 }
8863 }
8864
8865 base1 = arg1;
8866 if (TREE_CODE (arg1) == ADDR_EXPR)
8867 {
8868 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8869 &bitsize, &bitpos1, &offset1, &mode,
8870 &unsignedp, &volatilep, false);
8871 if (TREE_CODE (base1) == INDIRECT_REF)
8872 base1 = TREE_OPERAND (base1, 0);
8873 else
8874 indirect_base1 = true;
8875 }
8876 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8877 {
8878 base1 = TREE_OPERAND (arg1, 0);
8879 STRIP_SIGN_NOPS (base1);
8880 if (TREE_CODE (base1) == ADDR_EXPR)
8881 {
8882 base1 = TREE_OPERAND (base1, 0);
8883 indirect_base1 = true;
8884 }
8885 offset1 = TREE_OPERAND (arg1, 1);
8886 if (tree_fits_shwi_p (offset1))
8887 {
8888 HOST_WIDE_INT off = size_low_cst (offset1);
8889 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8890 * BITS_PER_UNIT)
8891 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8892 {
8893 bitpos1 = off * BITS_PER_UNIT;
8894 offset1 = NULL_TREE;
8895 }
8896 }
8897 }
8898
8899 /* A local variable can never be pointed to by
8900 the default SSA name of an incoming parameter. */
8901 if ((TREE_CODE (arg0) == ADDR_EXPR
8902 && indirect_base0
8903 && TREE_CODE (base0) == VAR_DECL
8904 && auto_var_in_fn_p (base0, current_function_decl)
8905 && !indirect_base1
8906 && TREE_CODE (base1) == SSA_NAME
8907 && SSA_NAME_IS_DEFAULT_DEF (base1)
8908 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8909 || (TREE_CODE (arg1) == ADDR_EXPR
8910 && indirect_base1
8911 && TREE_CODE (base1) == VAR_DECL
8912 && auto_var_in_fn_p (base1, current_function_decl)
8913 && !indirect_base0
8914 && TREE_CODE (base0) == SSA_NAME
8915 && SSA_NAME_IS_DEFAULT_DEF (base0)
8916 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8917 {
8918 if (code == NE_EXPR)
8919 return constant_boolean_node (1, type);
8920 else if (code == EQ_EXPR)
8921 return constant_boolean_node (0, type);
8922 }
8923 /* If we have equivalent bases we might be able to simplify. */
8924 else if (indirect_base0 == indirect_base1
8925 && operand_equal_p (base0, base1, 0))
8926 {
8927 /* We can fold this expression to a constant if the non-constant
8928 offset parts are equal. */
8929 if ((offset0 == offset1
8930 || (offset0 && offset1
8931 && operand_equal_p (offset0, offset1, 0)))
8932 && (code == EQ_EXPR
8933 || code == NE_EXPR
8934 || (indirect_base0 && DECL_P (base0))
8935 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8936
8937 {
8938 if (!equality_code
8939 && bitpos0 != bitpos1
8940 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8941 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8942 fold_overflow_warning (("assuming pointer wraparound does not "
8943 "occur when comparing P +- C1 with "
8944 "P +- C2"),
8945 WARN_STRICT_OVERFLOW_CONDITIONAL);
8946
8947 switch (code)
8948 {
8949 case EQ_EXPR:
8950 return constant_boolean_node (bitpos0 == bitpos1, type);
8951 case NE_EXPR:
8952 return constant_boolean_node (bitpos0 != bitpos1, type);
8953 case LT_EXPR:
8954 return constant_boolean_node (bitpos0 < bitpos1, type);
8955 case LE_EXPR:
8956 return constant_boolean_node (bitpos0 <= bitpos1, type);
8957 case GE_EXPR:
8958 return constant_boolean_node (bitpos0 >= bitpos1, type);
8959 case GT_EXPR:
8960 return constant_boolean_node (bitpos0 > bitpos1, type);
8961 default:;
8962 }
8963 }
8964 /* We can simplify the comparison to a comparison of the variable
8965 offset parts if the constant offset parts are equal.
8966 Be careful to use signed sizetype here because otherwise we
8967 mess with array offsets in the wrong way. This is possible
8968 because pointer arithmetic is restricted to retain within an
8969 object and overflow on pointer differences is undefined as of
8970 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8971 else if (bitpos0 == bitpos1
8972 && (equality_code
8973 || (indirect_base0 && DECL_P (base0))
8974 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8975 {
8976 /* By converting to signed sizetype we cover middle-end pointer
8977 arithmetic which operates on unsigned pointer types of size
8978 type size and ARRAY_REF offsets which are properly sign or
8979 zero extended from their type in case it is narrower than
8980 sizetype. */
8981 if (offset0 == NULL_TREE)
8982 offset0 = build_int_cst (ssizetype, 0);
8983 else
8984 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8985 if (offset1 == NULL_TREE)
8986 offset1 = build_int_cst (ssizetype, 0);
8987 else
8988 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8989
8990 if (!equality_code
8991 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8992 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8993 fold_overflow_warning (("assuming pointer wraparound does not "
8994 "occur when comparing P +- C1 with "
8995 "P +- C2"),
8996 WARN_STRICT_OVERFLOW_COMPARISON);
8997
8998 return fold_build2_loc (loc, code, type, offset0, offset1);
8999 }
9000 }
9001 /* For non-equal bases we can simplify if they are addresses
9002 declarations with different addresses. */
9003 else if (indirect_base0 && indirect_base1
9004 /* We know that !operand_equal_p (base0, base1, 0)
9005 because the if condition was false. But make
9006 sure two decls are not the same. */
9007 && base0 != base1
9008 && TREE_CODE (arg0) == ADDR_EXPR
9009 && TREE_CODE (arg1) == ADDR_EXPR
9010 && DECL_P (base0)
9011 && DECL_P (base1)
9012 /* Watch for aliases. */
9013 && (!decl_in_symtab_p (base0)
9014 || !decl_in_symtab_p (base1)
9015 || !symtab_node::get_create (base0)->equal_address_to
9016 (symtab_node::get_create (base1))))
9017 {
9018 if (code == EQ_EXPR)
9019 return omit_two_operands_loc (loc, type, boolean_false_node,
9020 arg0, arg1);
9021 else if (code == NE_EXPR)
9022 return omit_two_operands_loc (loc, type, boolean_true_node,
9023 arg0, arg1);
9024 }
9025 /* For equal offsets we can simplify to a comparison of the
9026 base addresses. */
9027 else if (bitpos0 == bitpos1
9028 && (indirect_base0
9029 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9030 && (indirect_base1
9031 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9032 && ((offset0 == offset1)
9033 || (offset0 && offset1
9034 && operand_equal_p (offset0, offset1, 0))))
9035 {
9036 if (indirect_base0)
9037 base0 = build_fold_addr_expr_loc (loc, base0);
9038 if (indirect_base1)
9039 base1 = build_fold_addr_expr_loc (loc, base1);
9040 return fold_build2_loc (loc, code, type, base0, base1);
9041 }
9042 }
9043
9044 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9045 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9046 the resulting offset is smaller in absolute value than the
9047 original one and has the same sign. */
9048 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9049 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9050 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9051 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9052 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9053 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9054 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9055 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9056 {
9057 tree const1 = TREE_OPERAND (arg0, 1);
9058 tree const2 = TREE_OPERAND (arg1, 1);
9059 tree variable1 = TREE_OPERAND (arg0, 0);
9060 tree variable2 = TREE_OPERAND (arg1, 0);
9061 tree cst;
9062 const char * const warnmsg = G_("assuming signed overflow does not "
9063 "occur when combining constants around "
9064 "a comparison");
9065
9066 /* Put the constant on the side where it doesn't overflow and is
9067 of lower absolute value and of same sign than before. */
9068 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9069 ? MINUS_EXPR : PLUS_EXPR,
9070 const2, const1);
9071 if (!TREE_OVERFLOW (cst)
9072 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9073 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9074 {
9075 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9076 return fold_build2_loc (loc, code, type,
9077 variable1,
9078 fold_build2_loc (loc, TREE_CODE (arg1),
9079 TREE_TYPE (arg1),
9080 variable2, cst));
9081 }
9082
9083 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9084 ? MINUS_EXPR : PLUS_EXPR,
9085 const1, const2);
9086 if (!TREE_OVERFLOW (cst)
9087 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9088 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9089 {
9090 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9091 return fold_build2_loc (loc, code, type,
9092 fold_build2_loc (loc, TREE_CODE (arg0),
9093 TREE_TYPE (arg0),
9094 variable1, cst),
9095 variable2);
9096 }
9097 }
9098
9099 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9100 signed arithmetic case. That form is created by the compiler
9101 often enough for folding it to be of value. One example is in
9102 computing loop trip counts after Operator Strength Reduction. */
9103 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9104 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9105 && TREE_CODE (arg0) == MULT_EXPR
9106 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9107 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9108 && integer_zerop (arg1))
9109 {
9110 tree const1 = TREE_OPERAND (arg0, 1);
9111 tree const2 = arg1; /* zero */
9112 tree variable1 = TREE_OPERAND (arg0, 0);
9113 enum tree_code cmp_code = code;
9114
9115 /* Handle unfolded multiplication by zero. */
9116 if (integer_zerop (const1))
9117 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9118
9119 fold_overflow_warning (("assuming signed overflow does not occur when "
9120 "eliminating multiplication in comparison "
9121 "with zero"),
9122 WARN_STRICT_OVERFLOW_COMPARISON);
9123
9124 /* If const1 is negative we swap the sense of the comparison. */
9125 if (tree_int_cst_sgn (const1) < 0)
9126 cmp_code = swap_tree_comparison (cmp_code);
9127
9128 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9129 }
9130
9131 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9132 if (tem)
9133 return tem;
9134
9135 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9136 {
9137 tree targ0 = strip_float_extensions (arg0);
9138 tree targ1 = strip_float_extensions (arg1);
9139 tree newtype = TREE_TYPE (targ0);
9140
9141 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9142 newtype = TREE_TYPE (targ1);
9143
9144 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9145 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9146 return fold_build2_loc (loc, code, type,
9147 fold_convert_loc (loc, newtype, targ0),
9148 fold_convert_loc (loc, newtype, targ1));
9149
9150 /* (-a) CMP (-b) -> b CMP a */
9151 if (TREE_CODE (arg0) == NEGATE_EXPR
9152 && TREE_CODE (arg1) == NEGATE_EXPR)
9153 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9154 TREE_OPERAND (arg0, 0));
9155
9156 if (TREE_CODE (arg1) == REAL_CST)
9157 {
9158 REAL_VALUE_TYPE cst;
9159 cst = TREE_REAL_CST (arg1);
9160
9161 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9162 if (TREE_CODE (arg0) == NEGATE_EXPR)
9163 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9164 TREE_OPERAND (arg0, 0),
9165 build_real (TREE_TYPE (arg1),
9166 real_value_negate (&cst)));
9167
9168 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9169 /* a CMP (-0) -> a CMP 0 */
9170 if (REAL_VALUE_MINUS_ZERO (cst))
9171 return fold_build2_loc (loc, code, type, arg0,
9172 build_real (TREE_TYPE (arg1), dconst0));
9173
9174 /* x != NaN is always true, other ops are always false. */
9175 if (REAL_VALUE_ISNAN (cst)
9176 && ! HONOR_SNANS (arg1))
9177 {
9178 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9179 return omit_one_operand_loc (loc, type, tem, arg0);
9180 }
9181
9182 /* Fold comparisons against infinity. */
9183 if (REAL_VALUE_ISINF (cst)
9184 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9185 {
9186 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9187 if (tem != NULL_TREE)
9188 return tem;
9189 }
9190 }
9191
9192 /* If this is a comparison of a real constant with a PLUS_EXPR
9193 or a MINUS_EXPR of a real constant, we can convert it into a
9194 comparison with a revised real constant as long as no overflow
9195 occurs when unsafe_math_optimizations are enabled. */
9196 if (flag_unsafe_math_optimizations
9197 && TREE_CODE (arg1) == REAL_CST
9198 && (TREE_CODE (arg0) == PLUS_EXPR
9199 || TREE_CODE (arg0) == MINUS_EXPR)
9200 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9201 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9202 ? MINUS_EXPR : PLUS_EXPR,
9203 arg1, TREE_OPERAND (arg0, 1)))
9204 && !TREE_OVERFLOW (tem))
9205 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9206
9207 /* Likewise, we can simplify a comparison of a real constant with
9208 a MINUS_EXPR whose first operand is also a real constant, i.e.
9209 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9210 floating-point types only if -fassociative-math is set. */
9211 if (flag_associative_math
9212 && TREE_CODE (arg1) == REAL_CST
9213 && TREE_CODE (arg0) == MINUS_EXPR
9214 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9215 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9216 arg1))
9217 && !TREE_OVERFLOW (tem))
9218 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9219 TREE_OPERAND (arg0, 1), tem);
9220
9221 /* Fold comparisons against built-in math functions. */
9222 if (TREE_CODE (arg1) == REAL_CST
9223 && flag_unsafe_math_optimizations
9224 && ! flag_errno_math)
9225 {
9226 enum built_in_function fcode = builtin_mathfn_code (arg0);
9227
9228 if (fcode != END_BUILTINS)
9229 {
9230 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9231 if (tem != NULL_TREE)
9232 return tem;
9233 }
9234 }
9235 }
9236
9237 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9238 && CONVERT_EXPR_P (arg0))
9239 {
9240 /* If we are widening one operand of an integer comparison,
9241 see if the other operand is similarly being widened. Perhaps we
9242 can do the comparison in the narrower type. */
9243 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9244 if (tem)
9245 return tem;
9246
9247 /* Or if we are changing signedness. */
9248 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9249 if (tem)
9250 return tem;
9251 }
9252
9253 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9254 constant, we can simplify it. */
9255 if (TREE_CODE (arg1) == INTEGER_CST
9256 && (TREE_CODE (arg0) == MIN_EXPR
9257 || TREE_CODE (arg0) == MAX_EXPR)
9258 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9259 {
9260 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9261 if (tem)
9262 return tem;
9263 }
9264
9265 /* Simplify comparison of something with itself. (For IEEE
9266 floating-point, we can only do some of these simplifications.) */
9267 if (operand_equal_p (arg0, arg1, 0))
9268 {
9269 switch (code)
9270 {
9271 case EQ_EXPR:
9272 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9273 || ! HONOR_NANS (arg0))
9274 return constant_boolean_node (1, type);
9275 break;
9276
9277 case GE_EXPR:
9278 case LE_EXPR:
9279 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9280 || ! HONOR_NANS (arg0))
9281 return constant_boolean_node (1, type);
9282 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9283
9284 case NE_EXPR:
9285 /* For NE, we can only do this simplification if integer
9286 or we don't honor IEEE floating point NaNs. */
9287 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9288 && HONOR_NANS (arg0))
9289 break;
9290 /* ... fall through ... */
9291 case GT_EXPR:
9292 case LT_EXPR:
9293 return constant_boolean_node (0, type);
9294 default:
9295 gcc_unreachable ();
9296 }
9297 }
9298
9299 /* If we are comparing an expression that just has comparisons
9300 of two integer values, arithmetic expressions of those comparisons,
9301 and constants, we can simplify it. There are only three cases
9302 to check: the two values can either be equal, the first can be
9303 greater, or the second can be greater. Fold the expression for
9304 those three values. Since each value must be 0 or 1, we have
9305 eight possibilities, each of which corresponds to the constant 0
9306 or 1 or one of the six possible comparisons.
9307
9308 This handles common cases like (a > b) == 0 but also handles
9309 expressions like ((x > y) - (y > x)) > 0, which supposedly
9310 occur in macroized code. */
9311
9312 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9313 {
9314 tree cval1 = 0, cval2 = 0;
9315 int save_p = 0;
9316
9317 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9318 /* Don't handle degenerate cases here; they should already
9319 have been handled anyway. */
9320 && cval1 != 0 && cval2 != 0
9321 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9322 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9323 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9324 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9325 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9326 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9327 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9328 {
9329 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9330 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9331
9332 /* We can't just pass T to eval_subst in case cval1 or cval2
9333 was the same as ARG1. */
9334
9335 tree high_result
9336 = fold_build2_loc (loc, code, type,
9337 eval_subst (loc, arg0, cval1, maxval,
9338 cval2, minval),
9339 arg1);
9340 tree equal_result
9341 = fold_build2_loc (loc, code, type,
9342 eval_subst (loc, arg0, cval1, maxval,
9343 cval2, maxval),
9344 arg1);
9345 tree low_result
9346 = fold_build2_loc (loc, code, type,
9347 eval_subst (loc, arg0, cval1, minval,
9348 cval2, maxval),
9349 arg1);
9350
9351 /* All three of these results should be 0 or 1. Confirm they are.
9352 Then use those values to select the proper code to use. */
9353
9354 if (TREE_CODE (high_result) == INTEGER_CST
9355 && TREE_CODE (equal_result) == INTEGER_CST
9356 && TREE_CODE (low_result) == INTEGER_CST)
9357 {
9358 /* Make a 3-bit mask with the high-order bit being the
9359 value for `>', the next for '=', and the low for '<'. */
9360 switch ((integer_onep (high_result) * 4)
9361 + (integer_onep (equal_result) * 2)
9362 + integer_onep (low_result))
9363 {
9364 case 0:
9365 /* Always false. */
9366 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9367 case 1:
9368 code = LT_EXPR;
9369 break;
9370 case 2:
9371 code = EQ_EXPR;
9372 break;
9373 case 3:
9374 code = LE_EXPR;
9375 break;
9376 case 4:
9377 code = GT_EXPR;
9378 break;
9379 case 5:
9380 code = NE_EXPR;
9381 break;
9382 case 6:
9383 code = GE_EXPR;
9384 break;
9385 case 7:
9386 /* Always true. */
9387 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9388 }
9389
9390 if (save_p)
9391 {
9392 tem = save_expr (build2 (code, type, cval1, cval2));
9393 SET_EXPR_LOCATION (tem, loc);
9394 return tem;
9395 }
9396 return fold_build2_loc (loc, code, type, cval1, cval2);
9397 }
9398 }
9399 }
9400
9401 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9402 into a single range test. */
9403 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9404 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9405 && TREE_CODE (arg1) == INTEGER_CST
9406 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9407 && !integer_zerop (TREE_OPERAND (arg0, 1))
9408 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9409 && !TREE_OVERFLOW (arg1))
9410 {
9411 tem = fold_div_compare (loc, code, type, arg0, arg1);
9412 if (tem != NULL_TREE)
9413 return tem;
9414 }
9415
9416 /* Fold ~X op ~Y as Y op X. */
9417 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9418 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9419 {
9420 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9421 return fold_build2_loc (loc, code, type,
9422 fold_convert_loc (loc, cmp_type,
9423 TREE_OPERAND (arg1, 0)),
9424 TREE_OPERAND (arg0, 0));
9425 }
9426
9427 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9428 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9429 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9430 {
9431 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9432 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9433 TREE_OPERAND (arg0, 0),
9434 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9435 fold_convert_loc (loc, cmp_type, arg1)));
9436 }
9437
9438 return NULL_TREE;
9439 }
9440
9441
9442 /* Subroutine of fold_binary. Optimize complex multiplications of the
9443 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9444 argument EXPR represents the expression "z" of type TYPE. */
9445
9446 static tree
9447 fold_mult_zconjz (location_t loc, tree type, tree expr)
9448 {
9449 tree itype = TREE_TYPE (type);
9450 tree rpart, ipart, tem;
9451
9452 if (TREE_CODE (expr) == COMPLEX_EXPR)
9453 {
9454 rpart = TREE_OPERAND (expr, 0);
9455 ipart = TREE_OPERAND (expr, 1);
9456 }
9457 else if (TREE_CODE (expr) == COMPLEX_CST)
9458 {
9459 rpart = TREE_REALPART (expr);
9460 ipart = TREE_IMAGPART (expr);
9461 }
9462 else
9463 {
9464 expr = save_expr (expr);
9465 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9466 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9467 }
9468
9469 rpart = save_expr (rpart);
9470 ipart = save_expr (ipart);
9471 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9472 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9473 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9474 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9475 build_zero_cst (itype));
9476 }
9477
9478
9479 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9480 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9481 guarantees that P and N have the same least significant log2(M) bits.
9482 N is not otherwise constrained. In particular, N is not normalized to
9483 0 <= N < M as is common. In general, the precise value of P is unknown.
9484 M is chosen as large as possible such that constant N can be determined.
9485
9486 Returns M and sets *RESIDUE to N.
9487
9488 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9489 account. This is not always possible due to PR 35705.
9490 */
9491
9492 static unsigned HOST_WIDE_INT
9493 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9494 bool allow_func_align)
9495 {
9496 enum tree_code code;
9497
9498 *residue = 0;
9499
9500 code = TREE_CODE (expr);
9501 if (code == ADDR_EXPR)
9502 {
9503 unsigned int bitalign;
9504 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9505 *residue /= BITS_PER_UNIT;
9506 return bitalign / BITS_PER_UNIT;
9507 }
9508 else if (code == POINTER_PLUS_EXPR)
9509 {
9510 tree op0, op1;
9511 unsigned HOST_WIDE_INT modulus;
9512 enum tree_code inner_code;
9513
9514 op0 = TREE_OPERAND (expr, 0);
9515 STRIP_NOPS (op0);
9516 modulus = get_pointer_modulus_and_residue (op0, residue,
9517 allow_func_align);
9518
9519 op1 = TREE_OPERAND (expr, 1);
9520 STRIP_NOPS (op1);
9521 inner_code = TREE_CODE (op1);
9522 if (inner_code == INTEGER_CST)
9523 {
9524 *residue += TREE_INT_CST_LOW (op1);
9525 return modulus;
9526 }
9527 else if (inner_code == MULT_EXPR)
9528 {
9529 op1 = TREE_OPERAND (op1, 1);
9530 if (TREE_CODE (op1) == INTEGER_CST)
9531 {
9532 unsigned HOST_WIDE_INT align;
9533
9534 /* Compute the greatest power-of-2 divisor of op1. */
9535 align = TREE_INT_CST_LOW (op1);
9536 align &= -align;
9537
9538 /* If align is non-zero and less than *modulus, replace
9539 *modulus with align., If align is 0, then either op1 is 0
9540 or the greatest power-of-2 divisor of op1 doesn't fit in an
9541 unsigned HOST_WIDE_INT. In either case, no additional
9542 constraint is imposed. */
9543 if (align)
9544 modulus = MIN (modulus, align);
9545
9546 return modulus;
9547 }
9548 }
9549 }
9550
9551 /* If we get here, we were unable to determine anything useful about the
9552 expression. */
9553 return 1;
9554 }
9555
9556 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9557 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9558
9559 static bool
9560 vec_cst_ctor_to_array (tree arg, tree *elts)
9561 {
9562 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9563
9564 if (TREE_CODE (arg) == VECTOR_CST)
9565 {
9566 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9567 elts[i] = VECTOR_CST_ELT (arg, i);
9568 }
9569 else if (TREE_CODE (arg) == CONSTRUCTOR)
9570 {
9571 constructor_elt *elt;
9572
9573 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9574 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9575 return false;
9576 else
9577 elts[i] = elt->value;
9578 }
9579 else
9580 return false;
9581 for (; i < nelts; i++)
9582 elts[i]
9583 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9584 return true;
9585 }
9586
9587 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9588 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9589 NULL_TREE otherwise. */
9590
9591 static tree
9592 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9593 {
9594 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9595 tree *elts;
9596 bool need_ctor = false;
9597
9598 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9599 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9600 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9601 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9602 return NULL_TREE;
9603
9604 elts = XALLOCAVEC (tree, nelts * 3);
9605 if (!vec_cst_ctor_to_array (arg0, elts)
9606 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9607 return NULL_TREE;
9608
9609 for (i = 0; i < nelts; i++)
9610 {
9611 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9612 need_ctor = true;
9613 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9614 }
9615
9616 if (need_ctor)
9617 {
9618 vec<constructor_elt, va_gc> *v;
9619 vec_alloc (v, nelts);
9620 for (i = 0; i < nelts; i++)
9621 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9622 return build_constructor (type, v);
9623 }
9624 else
9625 return build_vector (type, &elts[2 * nelts]);
9626 }
9627
9628 /* Try to fold a pointer difference of type TYPE two address expressions of
9629 array references AREF0 and AREF1 using location LOC. Return a
9630 simplified expression for the difference or NULL_TREE. */
9631
9632 static tree
9633 fold_addr_of_array_ref_difference (location_t loc, tree type,
9634 tree aref0, tree aref1)
9635 {
9636 tree base0 = TREE_OPERAND (aref0, 0);
9637 tree base1 = TREE_OPERAND (aref1, 0);
9638 tree base_offset = build_int_cst (type, 0);
9639
9640 /* If the bases are array references as well, recurse. If the bases
9641 are pointer indirections compute the difference of the pointers.
9642 If the bases are equal, we are set. */
9643 if ((TREE_CODE (base0) == ARRAY_REF
9644 && TREE_CODE (base1) == ARRAY_REF
9645 && (base_offset
9646 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9647 || (INDIRECT_REF_P (base0)
9648 && INDIRECT_REF_P (base1)
9649 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9650 TREE_OPERAND (base0, 0),
9651 TREE_OPERAND (base1, 0))))
9652 || operand_equal_p (base0, base1, 0))
9653 {
9654 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9655 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9656 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9657 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9658 return fold_build2_loc (loc, PLUS_EXPR, type,
9659 base_offset,
9660 fold_build2_loc (loc, MULT_EXPR, type,
9661 diff, esz));
9662 }
9663 return NULL_TREE;
9664 }
9665
9666 /* If the real or vector real constant CST of type TYPE has an exact
9667 inverse, return it, else return NULL. */
9668
9669 tree
9670 exact_inverse (tree type, tree cst)
9671 {
9672 REAL_VALUE_TYPE r;
9673 tree unit_type, *elts;
9674 machine_mode mode;
9675 unsigned vec_nelts, i;
9676
9677 switch (TREE_CODE (cst))
9678 {
9679 case REAL_CST:
9680 r = TREE_REAL_CST (cst);
9681
9682 if (exact_real_inverse (TYPE_MODE (type), &r))
9683 return build_real (type, r);
9684
9685 return NULL_TREE;
9686
9687 case VECTOR_CST:
9688 vec_nelts = VECTOR_CST_NELTS (cst);
9689 elts = XALLOCAVEC (tree, vec_nelts);
9690 unit_type = TREE_TYPE (type);
9691 mode = TYPE_MODE (unit_type);
9692
9693 for (i = 0; i < vec_nelts; i++)
9694 {
9695 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9696 if (!exact_real_inverse (mode, &r))
9697 return NULL_TREE;
9698 elts[i] = build_real (unit_type, r);
9699 }
9700
9701 return build_vector (type, elts);
9702
9703 default:
9704 return NULL_TREE;
9705 }
9706 }
9707
9708 /* Mask out the tz least significant bits of X of type TYPE where
9709 tz is the number of trailing zeroes in Y. */
9710 static wide_int
9711 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9712 {
9713 int tz = wi::ctz (y);
9714 if (tz > 0)
9715 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9716 return x;
9717 }
9718
9719 /* Return true when T is an address and is known to be nonzero.
9720 For floating point we further ensure that T is not denormal.
9721 Similar logic is present in nonzero_address in rtlanal.h.
9722
9723 If the return value is based on the assumption that signed overflow
9724 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9725 change *STRICT_OVERFLOW_P. */
9726
9727 static bool
9728 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9729 {
9730 tree type = TREE_TYPE (t);
9731 enum tree_code code;
9732
9733 /* Doing something useful for floating point would need more work. */
9734 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9735 return false;
9736
9737 code = TREE_CODE (t);
9738 switch (TREE_CODE_CLASS (code))
9739 {
9740 case tcc_unary:
9741 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9742 strict_overflow_p);
9743 case tcc_binary:
9744 case tcc_comparison:
9745 return tree_binary_nonzero_warnv_p (code, type,
9746 TREE_OPERAND (t, 0),
9747 TREE_OPERAND (t, 1),
9748 strict_overflow_p);
9749 case tcc_constant:
9750 case tcc_declaration:
9751 case tcc_reference:
9752 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9753
9754 default:
9755 break;
9756 }
9757
9758 switch (code)
9759 {
9760 case TRUTH_NOT_EXPR:
9761 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9762 strict_overflow_p);
9763
9764 case TRUTH_AND_EXPR:
9765 case TRUTH_OR_EXPR:
9766 case TRUTH_XOR_EXPR:
9767 return tree_binary_nonzero_warnv_p (code, type,
9768 TREE_OPERAND (t, 0),
9769 TREE_OPERAND (t, 1),
9770 strict_overflow_p);
9771
9772 case COND_EXPR:
9773 case CONSTRUCTOR:
9774 case OBJ_TYPE_REF:
9775 case ASSERT_EXPR:
9776 case ADDR_EXPR:
9777 case WITH_SIZE_EXPR:
9778 case SSA_NAME:
9779 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9780
9781 case COMPOUND_EXPR:
9782 case MODIFY_EXPR:
9783 case BIND_EXPR:
9784 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9785 strict_overflow_p);
9786
9787 case SAVE_EXPR:
9788 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9789 strict_overflow_p);
9790
9791 case CALL_EXPR:
9792 {
9793 tree fndecl = get_callee_fndecl (t);
9794 if (!fndecl) return false;
9795 if (flag_delete_null_pointer_checks && !flag_check_new
9796 && DECL_IS_OPERATOR_NEW (fndecl)
9797 && !TREE_NOTHROW (fndecl))
9798 return true;
9799 if (flag_delete_null_pointer_checks
9800 && lookup_attribute ("returns_nonnull",
9801 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9802 return true;
9803 return alloca_call_p (t);
9804 }
9805
9806 default:
9807 break;
9808 }
9809 return false;
9810 }
9811
9812 /* Return true when T is an address and is known to be nonzero.
9813 Handle warnings about undefined signed overflow. */
9814
9815 static bool
9816 tree_expr_nonzero_p (tree t)
9817 {
9818 bool ret, strict_overflow_p;
9819
9820 strict_overflow_p = false;
9821 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9822 if (strict_overflow_p)
9823 fold_overflow_warning (("assuming signed overflow does not occur when "
9824 "determining that expression is always "
9825 "non-zero"),
9826 WARN_STRICT_OVERFLOW_MISC);
9827 return ret;
9828 }
9829
9830 /* Fold a binary expression of code CODE and type TYPE with operands
9831 OP0 and OP1. LOC is the location of the resulting expression.
9832 Return the folded expression if folding is successful. Otherwise,
9833 return NULL_TREE. */
9834
9835 tree
9836 fold_binary_loc (location_t loc,
9837 enum tree_code code, tree type, tree op0, tree op1)
9838 {
9839 enum tree_code_class kind = TREE_CODE_CLASS (code);
9840 tree arg0, arg1, tem;
9841 tree t1 = NULL_TREE;
9842 bool strict_overflow_p;
9843 unsigned int prec;
9844
9845 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9846 && TREE_CODE_LENGTH (code) == 2
9847 && op0 != NULL_TREE
9848 && op1 != NULL_TREE);
9849
9850 arg0 = op0;
9851 arg1 = op1;
9852
9853 /* Strip any conversions that don't change the mode. This is
9854 safe for every expression, except for a comparison expression
9855 because its signedness is derived from its operands. So, in
9856 the latter case, only strip conversions that don't change the
9857 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9858 preserved.
9859
9860 Note that this is done as an internal manipulation within the
9861 constant folder, in order to find the simplest representation
9862 of the arguments so that their form can be studied. In any
9863 cases, the appropriate type conversions should be put back in
9864 the tree that will get out of the constant folder. */
9865
9866 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9867 {
9868 STRIP_SIGN_NOPS (arg0);
9869 STRIP_SIGN_NOPS (arg1);
9870 }
9871 else
9872 {
9873 STRIP_NOPS (arg0);
9874 STRIP_NOPS (arg1);
9875 }
9876
9877 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9878 constant but we can't do arithmetic on them. */
9879 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9880 {
9881 tem = const_binop (code, type, arg0, arg1);
9882 if (tem != NULL_TREE)
9883 {
9884 if (TREE_TYPE (tem) != type)
9885 tem = fold_convert_loc (loc, type, tem);
9886 return tem;
9887 }
9888 }
9889
9890 /* If this is a commutative operation, and ARG0 is a constant, move it
9891 to ARG1 to reduce the number of tests below. */
9892 if (commutative_tree_code (code)
9893 && tree_swap_operands_p (arg0, arg1, true))
9894 return fold_build2_loc (loc, code, type, op1, op0);
9895
9896 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9897 to ARG1 to reduce the number of tests below. */
9898 if (kind == tcc_comparison
9899 && tree_swap_operands_p (arg0, arg1, true))
9900 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9901
9902 tem = generic_simplify (loc, code, type, op0, op1);
9903 if (tem)
9904 return tem;
9905
9906 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9907
9908 First check for cases where an arithmetic operation is applied to a
9909 compound, conditional, or comparison operation. Push the arithmetic
9910 operation inside the compound or conditional to see if any folding
9911 can then be done. Convert comparison to conditional for this purpose.
9912 The also optimizes non-constant cases that used to be done in
9913 expand_expr.
9914
9915 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9916 one of the operands is a comparison and the other is a comparison, a
9917 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9918 code below would make the expression more complex. Change it to a
9919 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9920 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9921
9922 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9923 || code == EQ_EXPR || code == NE_EXPR)
9924 && TREE_CODE (type) != VECTOR_TYPE
9925 && ((truth_value_p (TREE_CODE (arg0))
9926 && (truth_value_p (TREE_CODE (arg1))
9927 || (TREE_CODE (arg1) == BIT_AND_EXPR
9928 && integer_onep (TREE_OPERAND (arg1, 1)))))
9929 || (truth_value_p (TREE_CODE (arg1))
9930 && (truth_value_p (TREE_CODE (arg0))
9931 || (TREE_CODE (arg0) == BIT_AND_EXPR
9932 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9933 {
9934 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9935 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9936 : TRUTH_XOR_EXPR,
9937 boolean_type_node,
9938 fold_convert_loc (loc, boolean_type_node, arg0),
9939 fold_convert_loc (loc, boolean_type_node, arg1));
9940
9941 if (code == EQ_EXPR)
9942 tem = invert_truthvalue_loc (loc, tem);
9943
9944 return fold_convert_loc (loc, type, tem);
9945 }
9946
9947 if (TREE_CODE_CLASS (code) == tcc_binary
9948 || TREE_CODE_CLASS (code) == tcc_comparison)
9949 {
9950 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9951 {
9952 tem = fold_build2_loc (loc, code, type,
9953 fold_convert_loc (loc, TREE_TYPE (op0),
9954 TREE_OPERAND (arg0, 1)), op1);
9955 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9956 tem);
9957 }
9958 if (TREE_CODE (arg1) == COMPOUND_EXPR
9959 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9960 {
9961 tem = fold_build2_loc (loc, code, type, op0,
9962 fold_convert_loc (loc, TREE_TYPE (op1),
9963 TREE_OPERAND (arg1, 1)));
9964 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9965 tem);
9966 }
9967
9968 if (TREE_CODE (arg0) == COND_EXPR
9969 || TREE_CODE (arg0) == VEC_COND_EXPR
9970 || COMPARISON_CLASS_P (arg0))
9971 {
9972 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9973 arg0, arg1,
9974 /*cond_first_p=*/1);
9975 if (tem != NULL_TREE)
9976 return tem;
9977 }
9978
9979 if (TREE_CODE (arg1) == COND_EXPR
9980 || TREE_CODE (arg1) == VEC_COND_EXPR
9981 || COMPARISON_CLASS_P (arg1))
9982 {
9983 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9984 arg1, arg0,
9985 /*cond_first_p=*/0);
9986 if (tem != NULL_TREE)
9987 return tem;
9988 }
9989 }
9990
9991 switch (code)
9992 {
9993 case MEM_REF:
9994 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9995 if (TREE_CODE (arg0) == ADDR_EXPR
9996 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9997 {
9998 tree iref = TREE_OPERAND (arg0, 0);
9999 return fold_build2 (MEM_REF, type,
10000 TREE_OPERAND (iref, 0),
10001 int_const_binop (PLUS_EXPR, arg1,
10002 TREE_OPERAND (iref, 1)));
10003 }
10004
10005 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10006 if (TREE_CODE (arg0) == ADDR_EXPR
10007 && handled_component_p (TREE_OPERAND (arg0, 0)))
10008 {
10009 tree base;
10010 HOST_WIDE_INT coffset;
10011 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10012 &coffset);
10013 if (!base)
10014 return NULL_TREE;
10015 return fold_build2 (MEM_REF, type,
10016 build_fold_addr_expr (base),
10017 int_const_binop (PLUS_EXPR, arg1,
10018 size_int (coffset)));
10019 }
10020
10021 return NULL_TREE;
10022
10023 case POINTER_PLUS_EXPR:
10024 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10025 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10026 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10027 return fold_convert_loc (loc, type,
10028 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10029 fold_convert_loc (loc, sizetype,
10030 arg1),
10031 fold_convert_loc (loc, sizetype,
10032 arg0)));
10033
10034 return NULL_TREE;
10035
10036 case PLUS_EXPR:
10037 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10038 {
10039 /* X + (X / CST) * -CST is X % CST. */
10040 if (TREE_CODE (arg1) == MULT_EXPR
10041 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10042 && operand_equal_p (arg0,
10043 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10044 {
10045 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10046 tree cst1 = TREE_OPERAND (arg1, 1);
10047 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10048 cst1, cst0);
10049 if (sum && integer_zerop (sum))
10050 return fold_convert_loc (loc, type,
10051 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10052 TREE_TYPE (arg0), arg0,
10053 cst0));
10054 }
10055 }
10056
10057 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10058 one. Make sure the type is not saturating and has the signedness of
10059 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10060 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10061 if ((TREE_CODE (arg0) == MULT_EXPR
10062 || TREE_CODE (arg1) == MULT_EXPR)
10063 && !TYPE_SATURATING (type)
10064 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10065 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10066 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10067 {
10068 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10069 if (tem)
10070 return tem;
10071 }
10072
10073 if (! FLOAT_TYPE_P (type))
10074 {
10075 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10076 with a constant, and the two constants have no bits in common,
10077 we should treat this as a BIT_IOR_EXPR since this may produce more
10078 simplifications. */
10079 if (TREE_CODE (arg0) == BIT_AND_EXPR
10080 && TREE_CODE (arg1) == BIT_AND_EXPR
10081 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10082 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10083 && wi::bit_and (TREE_OPERAND (arg0, 1),
10084 TREE_OPERAND (arg1, 1)) == 0)
10085 {
10086 code = BIT_IOR_EXPR;
10087 goto bit_ior;
10088 }
10089
10090 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10091 (plus (plus (mult) (mult)) (foo)) so that we can
10092 take advantage of the factoring cases below. */
10093 if (ANY_INTEGRAL_TYPE_P (type)
10094 && TYPE_OVERFLOW_WRAPS (type)
10095 && (((TREE_CODE (arg0) == PLUS_EXPR
10096 || TREE_CODE (arg0) == MINUS_EXPR)
10097 && TREE_CODE (arg1) == MULT_EXPR)
10098 || ((TREE_CODE (arg1) == PLUS_EXPR
10099 || TREE_CODE (arg1) == MINUS_EXPR)
10100 && TREE_CODE (arg0) == MULT_EXPR)))
10101 {
10102 tree parg0, parg1, parg, marg;
10103 enum tree_code pcode;
10104
10105 if (TREE_CODE (arg1) == MULT_EXPR)
10106 parg = arg0, marg = arg1;
10107 else
10108 parg = arg1, marg = arg0;
10109 pcode = TREE_CODE (parg);
10110 parg0 = TREE_OPERAND (parg, 0);
10111 parg1 = TREE_OPERAND (parg, 1);
10112 STRIP_NOPS (parg0);
10113 STRIP_NOPS (parg1);
10114
10115 if (TREE_CODE (parg0) == MULT_EXPR
10116 && TREE_CODE (parg1) != MULT_EXPR)
10117 return fold_build2_loc (loc, pcode, type,
10118 fold_build2_loc (loc, PLUS_EXPR, type,
10119 fold_convert_loc (loc, type,
10120 parg0),
10121 fold_convert_loc (loc, type,
10122 marg)),
10123 fold_convert_loc (loc, type, parg1));
10124 if (TREE_CODE (parg0) != MULT_EXPR
10125 && TREE_CODE (parg1) == MULT_EXPR)
10126 return
10127 fold_build2_loc (loc, PLUS_EXPR, type,
10128 fold_convert_loc (loc, type, parg0),
10129 fold_build2_loc (loc, pcode, type,
10130 fold_convert_loc (loc, type, marg),
10131 fold_convert_loc (loc, type,
10132 parg1)));
10133 }
10134 }
10135 else
10136 {
10137 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10138 to __complex__ ( x, y ). This is not the same for SNaNs or
10139 if signed zeros are involved. */
10140 if (!HONOR_SNANS (element_mode (arg0))
10141 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10142 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10143 {
10144 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10145 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10146 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10147 bool arg0rz = false, arg0iz = false;
10148 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10149 || (arg0i && (arg0iz = real_zerop (arg0i))))
10150 {
10151 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10152 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10153 if (arg0rz && arg1i && real_zerop (arg1i))
10154 {
10155 tree rp = arg1r ? arg1r
10156 : build1 (REALPART_EXPR, rtype, arg1);
10157 tree ip = arg0i ? arg0i
10158 : build1 (IMAGPART_EXPR, rtype, arg0);
10159 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10160 }
10161 else if (arg0iz && arg1r && real_zerop (arg1r))
10162 {
10163 tree rp = arg0r ? arg0r
10164 : build1 (REALPART_EXPR, rtype, arg0);
10165 tree ip = arg1i ? arg1i
10166 : build1 (IMAGPART_EXPR, rtype, arg1);
10167 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10168 }
10169 }
10170 }
10171
10172 if (flag_unsafe_math_optimizations
10173 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10174 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10175 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10176 return tem;
10177
10178 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10179 We associate floats only if the user has specified
10180 -fassociative-math. */
10181 if (flag_associative_math
10182 && TREE_CODE (arg1) == PLUS_EXPR
10183 && TREE_CODE (arg0) != MULT_EXPR)
10184 {
10185 tree tree10 = TREE_OPERAND (arg1, 0);
10186 tree tree11 = TREE_OPERAND (arg1, 1);
10187 if (TREE_CODE (tree11) == MULT_EXPR
10188 && TREE_CODE (tree10) == MULT_EXPR)
10189 {
10190 tree tree0;
10191 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10192 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10193 }
10194 }
10195 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10196 We associate floats only if the user has specified
10197 -fassociative-math. */
10198 if (flag_associative_math
10199 && TREE_CODE (arg0) == PLUS_EXPR
10200 && TREE_CODE (arg1) != MULT_EXPR)
10201 {
10202 tree tree00 = TREE_OPERAND (arg0, 0);
10203 tree tree01 = TREE_OPERAND (arg0, 1);
10204 if (TREE_CODE (tree01) == MULT_EXPR
10205 && TREE_CODE (tree00) == MULT_EXPR)
10206 {
10207 tree tree0;
10208 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10209 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10210 }
10211 }
10212 }
10213
10214 bit_rotate:
10215 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10216 is a rotate of A by C1 bits. */
10217 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10218 is a rotate of A by B bits. */
10219 {
10220 enum tree_code code0, code1;
10221 tree rtype;
10222 code0 = TREE_CODE (arg0);
10223 code1 = TREE_CODE (arg1);
10224 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10225 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10226 && operand_equal_p (TREE_OPERAND (arg0, 0),
10227 TREE_OPERAND (arg1, 0), 0)
10228 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10229 TYPE_UNSIGNED (rtype))
10230 /* Only create rotates in complete modes. Other cases are not
10231 expanded properly. */
10232 && (element_precision (rtype)
10233 == element_precision (TYPE_MODE (rtype))))
10234 {
10235 tree tree01, tree11;
10236 enum tree_code code01, code11;
10237
10238 tree01 = TREE_OPERAND (arg0, 1);
10239 tree11 = TREE_OPERAND (arg1, 1);
10240 STRIP_NOPS (tree01);
10241 STRIP_NOPS (tree11);
10242 code01 = TREE_CODE (tree01);
10243 code11 = TREE_CODE (tree11);
10244 if (code01 == INTEGER_CST
10245 && code11 == INTEGER_CST
10246 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10247 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10248 {
10249 tem = build2_loc (loc, LROTATE_EXPR,
10250 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10251 TREE_OPERAND (arg0, 0),
10252 code0 == LSHIFT_EXPR ? tree01 : tree11);
10253 return fold_convert_loc (loc, type, tem);
10254 }
10255 else if (code11 == MINUS_EXPR)
10256 {
10257 tree tree110, tree111;
10258 tree110 = TREE_OPERAND (tree11, 0);
10259 tree111 = TREE_OPERAND (tree11, 1);
10260 STRIP_NOPS (tree110);
10261 STRIP_NOPS (tree111);
10262 if (TREE_CODE (tree110) == INTEGER_CST
10263 && 0 == compare_tree_int (tree110,
10264 element_precision
10265 (TREE_TYPE (TREE_OPERAND
10266 (arg0, 0))))
10267 && operand_equal_p (tree01, tree111, 0))
10268 return
10269 fold_convert_loc (loc, type,
10270 build2 ((code0 == LSHIFT_EXPR
10271 ? LROTATE_EXPR
10272 : RROTATE_EXPR),
10273 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10274 TREE_OPERAND (arg0, 0), tree01));
10275 }
10276 else if (code01 == MINUS_EXPR)
10277 {
10278 tree tree010, tree011;
10279 tree010 = TREE_OPERAND (tree01, 0);
10280 tree011 = TREE_OPERAND (tree01, 1);
10281 STRIP_NOPS (tree010);
10282 STRIP_NOPS (tree011);
10283 if (TREE_CODE (tree010) == INTEGER_CST
10284 && 0 == compare_tree_int (tree010,
10285 element_precision
10286 (TREE_TYPE (TREE_OPERAND
10287 (arg0, 0))))
10288 && operand_equal_p (tree11, tree011, 0))
10289 return fold_convert_loc
10290 (loc, type,
10291 build2 ((code0 != LSHIFT_EXPR
10292 ? LROTATE_EXPR
10293 : RROTATE_EXPR),
10294 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10295 TREE_OPERAND (arg0, 0), tree11));
10296 }
10297 }
10298 }
10299
10300 associate:
10301 /* In most languages, can't associate operations on floats through
10302 parentheses. Rather than remember where the parentheses were, we
10303 don't associate floats at all, unless the user has specified
10304 -fassociative-math.
10305 And, we need to make sure type is not saturating. */
10306
10307 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10308 && !TYPE_SATURATING (type))
10309 {
10310 tree var0, con0, lit0, minus_lit0;
10311 tree var1, con1, lit1, minus_lit1;
10312 tree atype = type;
10313 bool ok = true;
10314
10315 /* Split both trees into variables, constants, and literals. Then
10316 associate each group together, the constants with literals,
10317 then the result with variables. This increases the chances of
10318 literals being recombined later and of generating relocatable
10319 expressions for the sum of a constant and literal. */
10320 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10321 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10322 code == MINUS_EXPR);
10323
10324 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10325 if (code == MINUS_EXPR)
10326 code = PLUS_EXPR;
10327
10328 /* With undefined overflow prefer doing association in a type
10329 which wraps on overflow, if that is one of the operand types. */
10330 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10331 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10332 {
10333 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10334 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10335 atype = TREE_TYPE (arg0);
10336 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10337 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10338 atype = TREE_TYPE (arg1);
10339 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10340 }
10341
10342 /* With undefined overflow we can only associate constants with one
10343 variable, and constants whose association doesn't overflow. */
10344 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10345 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10346 {
10347 if (var0 && var1)
10348 {
10349 tree tmp0 = var0;
10350 tree tmp1 = var1;
10351
10352 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10353 tmp0 = TREE_OPERAND (tmp0, 0);
10354 if (CONVERT_EXPR_P (tmp0)
10355 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10356 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10357 <= TYPE_PRECISION (atype)))
10358 tmp0 = TREE_OPERAND (tmp0, 0);
10359 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10360 tmp1 = TREE_OPERAND (tmp1, 0);
10361 if (CONVERT_EXPR_P (tmp1)
10362 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10363 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10364 <= TYPE_PRECISION (atype)))
10365 tmp1 = TREE_OPERAND (tmp1, 0);
10366 /* The only case we can still associate with two variables
10367 is if they are the same, modulo negation and bit-pattern
10368 preserving conversions. */
10369 if (!operand_equal_p (tmp0, tmp1, 0))
10370 ok = false;
10371 }
10372 }
10373
10374 /* Only do something if we found more than two objects. Otherwise,
10375 nothing has changed and we risk infinite recursion. */
10376 if (ok
10377 && (2 < ((var0 != 0) + (var1 != 0)
10378 + (con0 != 0) + (con1 != 0)
10379 + (lit0 != 0) + (lit1 != 0)
10380 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10381 {
10382 bool any_overflows = false;
10383 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10384 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10385 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10386 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10387 var0 = associate_trees (loc, var0, var1, code, atype);
10388 con0 = associate_trees (loc, con0, con1, code, atype);
10389 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10390 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10391 code, atype);
10392
10393 /* Preserve the MINUS_EXPR if the negative part of the literal is
10394 greater than the positive part. Otherwise, the multiplicative
10395 folding code (i.e extract_muldiv) may be fooled in case
10396 unsigned constants are subtracted, like in the following
10397 example: ((X*2 + 4) - 8U)/2. */
10398 if (minus_lit0 && lit0)
10399 {
10400 if (TREE_CODE (lit0) == INTEGER_CST
10401 && TREE_CODE (minus_lit0) == INTEGER_CST
10402 && tree_int_cst_lt (lit0, minus_lit0))
10403 {
10404 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10405 MINUS_EXPR, atype);
10406 lit0 = 0;
10407 }
10408 else
10409 {
10410 lit0 = associate_trees (loc, lit0, minus_lit0,
10411 MINUS_EXPR, atype);
10412 minus_lit0 = 0;
10413 }
10414 }
10415
10416 /* Don't introduce overflows through reassociation. */
10417 if (!any_overflows
10418 && ((lit0 && TREE_OVERFLOW_P (lit0))
10419 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10420 return NULL_TREE;
10421
10422 if (minus_lit0)
10423 {
10424 if (con0 == 0)
10425 return
10426 fold_convert_loc (loc, type,
10427 associate_trees (loc, var0, minus_lit0,
10428 MINUS_EXPR, atype));
10429 else
10430 {
10431 con0 = associate_trees (loc, con0, minus_lit0,
10432 MINUS_EXPR, atype);
10433 return
10434 fold_convert_loc (loc, type,
10435 associate_trees (loc, var0, con0,
10436 PLUS_EXPR, atype));
10437 }
10438 }
10439
10440 con0 = associate_trees (loc, con0, lit0, code, atype);
10441 return
10442 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10443 code, atype));
10444 }
10445 }
10446
10447 return NULL_TREE;
10448
10449 case MINUS_EXPR:
10450 /* Pointer simplifications for subtraction, simple reassociations. */
10451 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10452 {
10453 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10454 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10455 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10456 {
10457 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10458 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10459 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10460 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10461 return fold_build2_loc (loc, PLUS_EXPR, type,
10462 fold_build2_loc (loc, MINUS_EXPR, type,
10463 arg00, arg10),
10464 fold_build2_loc (loc, MINUS_EXPR, type,
10465 arg01, arg11));
10466 }
10467 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10468 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10469 {
10470 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10471 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10472 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10473 fold_convert_loc (loc, type, arg1));
10474 if (tmp)
10475 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10476 }
10477 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10478 simplifies. */
10479 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10480 {
10481 tree arg10 = fold_convert_loc (loc, type,
10482 TREE_OPERAND (arg1, 0));
10483 tree arg11 = fold_convert_loc (loc, type,
10484 TREE_OPERAND (arg1, 1));
10485 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10486 fold_convert_loc (loc, type, arg0),
10487 arg10);
10488 if (tmp)
10489 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10490 }
10491 }
10492 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10493 if (TREE_CODE (arg0) == NEGATE_EXPR
10494 && negate_expr_p (arg1)
10495 && reorder_operands_p (arg0, arg1))
10496 return fold_build2_loc (loc, MINUS_EXPR, type,
10497 fold_convert_loc (loc, type,
10498 negate_expr (arg1)),
10499 fold_convert_loc (loc, type,
10500 TREE_OPERAND (arg0, 0)));
10501
10502 /* X - (X / Y) * Y is X % Y. */
10503 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10504 && TREE_CODE (arg1) == MULT_EXPR
10505 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10506 && operand_equal_p (arg0,
10507 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10508 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10509 TREE_OPERAND (arg1, 1), 0))
10510 return
10511 fold_convert_loc (loc, type,
10512 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10513 arg0, TREE_OPERAND (arg1, 1)));
10514
10515 if (! FLOAT_TYPE_P (type))
10516 {
10517 /* Fold A - (A & B) into ~B & A. */
10518 if (!TREE_SIDE_EFFECTS (arg0)
10519 && TREE_CODE (arg1) == BIT_AND_EXPR)
10520 {
10521 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10522 {
10523 tree arg10 = fold_convert_loc (loc, type,
10524 TREE_OPERAND (arg1, 0));
10525 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10526 fold_build1_loc (loc, BIT_NOT_EXPR,
10527 type, arg10),
10528 fold_convert_loc (loc, type, arg0));
10529 }
10530 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10531 {
10532 tree arg11 = fold_convert_loc (loc,
10533 type, TREE_OPERAND (arg1, 1));
10534 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10535 fold_build1_loc (loc, BIT_NOT_EXPR,
10536 type, arg11),
10537 fold_convert_loc (loc, type, arg0));
10538 }
10539 }
10540
10541 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10542 any power of 2 minus 1. */
10543 if (TREE_CODE (arg0) == BIT_AND_EXPR
10544 && TREE_CODE (arg1) == BIT_AND_EXPR
10545 && operand_equal_p (TREE_OPERAND (arg0, 0),
10546 TREE_OPERAND (arg1, 0), 0))
10547 {
10548 tree mask0 = TREE_OPERAND (arg0, 1);
10549 tree mask1 = TREE_OPERAND (arg1, 1);
10550 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10551
10552 if (operand_equal_p (tem, mask1, 0))
10553 {
10554 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10555 TREE_OPERAND (arg0, 0), mask1);
10556 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10557 }
10558 }
10559 }
10560
10561 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10562 __complex__ ( x, -y ). This is not the same for SNaNs or if
10563 signed zeros are involved. */
10564 if (!HONOR_SNANS (element_mode (arg0))
10565 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10566 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10567 {
10568 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10569 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10570 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10571 bool arg0rz = false, arg0iz = false;
10572 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10573 || (arg0i && (arg0iz = real_zerop (arg0i))))
10574 {
10575 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10576 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10577 if (arg0rz && arg1i && real_zerop (arg1i))
10578 {
10579 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10580 arg1r ? arg1r
10581 : build1 (REALPART_EXPR, rtype, arg1));
10582 tree ip = arg0i ? arg0i
10583 : build1 (IMAGPART_EXPR, rtype, arg0);
10584 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10585 }
10586 else if (arg0iz && arg1r && real_zerop (arg1r))
10587 {
10588 tree rp = arg0r ? arg0r
10589 : build1 (REALPART_EXPR, rtype, arg0);
10590 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10591 arg1i ? arg1i
10592 : build1 (IMAGPART_EXPR, rtype, arg1));
10593 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10594 }
10595 }
10596 }
10597
10598 /* A - B -> A + (-B) if B is easily negatable. */
10599 if (negate_expr_p (arg1)
10600 && !TYPE_OVERFLOW_SANITIZED (type)
10601 && ((FLOAT_TYPE_P (type)
10602 /* Avoid this transformation if B is a positive REAL_CST. */
10603 && (TREE_CODE (arg1) != REAL_CST
10604 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10605 || INTEGRAL_TYPE_P (type)))
10606 return fold_build2_loc (loc, PLUS_EXPR, type,
10607 fold_convert_loc (loc, type, arg0),
10608 fold_convert_loc (loc, type,
10609 negate_expr (arg1)));
10610
10611 /* Try folding difference of addresses. */
10612 {
10613 HOST_WIDE_INT diff;
10614
10615 if ((TREE_CODE (arg0) == ADDR_EXPR
10616 || TREE_CODE (arg1) == ADDR_EXPR)
10617 && ptr_difference_const (arg0, arg1, &diff))
10618 return build_int_cst_type (type, diff);
10619 }
10620
10621 /* Fold &a[i] - &a[j] to i-j. */
10622 if (TREE_CODE (arg0) == ADDR_EXPR
10623 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10624 && TREE_CODE (arg1) == ADDR_EXPR
10625 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10626 {
10627 tree tem = fold_addr_of_array_ref_difference (loc, type,
10628 TREE_OPERAND (arg0, 0),
10629 TREE_OPERAND (arg1, 0));
10630 if (tem)
10631 return tem;
10632 }
10633
10634 if (FLOAT_TYPE_P (type)
10635 && flag_unsafe_math_optimizations
10636 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10637 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10638 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10639 return tem;
10640
10641 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10642 one. Make sure the type is not saturating and has the signedness of
10643 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10644 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10645 if ((TREE_CODE (arg0) == MULT_EXPR
10646 || TREE_CODE (arg1) == MULT_EXPR)
10647 && !TYPE_SATURATING (type)
10648 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10649 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10650 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10651 {
10652 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10653 if (tem)
10654 return tem;
10655 }
10656
10657 goto associate;
10658
10659 case MULT_EXPR:
10660 /* (-A) * (-B) -> A * B */
10661 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10662 return fold_build2_loc (loc, MULT_EXPR, type,
10663 fold_convert_loc (loc, type,
10664 TREE_OPERAND (arg0, 0)),
10665 fold_convert_loc (loc, type,
10666 negate_expr (arg1)));
10667 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10668 return fold_build2_loc (loc, MULT_EXPR, type,
10669 fold_convert_loc (loc, type,
10670 negate_expr (arg0)),
10671 fold_convert_loc (loc, type,
10672 TREE_OPERAND (arg1, 0)));
10673
10674 if (! FLOAT_TYPE_P (type))
10675 {
10676 /* Transform x * -C into -x * C if x is easily negatable. */
10677 if (TREE_CODE (arg1) == INTEGER_CST
10678 && tree_int_cst_sgn (arg1) == -1
10679 && negate_expr_p (arg0)
10680 && (tem = negate_expr (arg1)) != arg1
10681 && !TREE_OVERFLOW (tem))
10682 return fold_build2_loc (loc, MULT_EXPR, type,
10683 fold_convert_loc (loc, type,
10684 negate_expr (arg0)),
10685 tem);
10686
10687 /* (a * (1 << b)) is (a << b) */
10688 if (TREE_CODE (arg1) == LSHIFT_EXPR
10689 && integer_onep (TREE_OPERAND (arg1, 0)))
10690 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10691 TREE_OPERAND (arg1, 1));
10692 if (TREE_CODE (arg0) == LSHIFT_EXPR
10693 && integer_onep (TREE_OPERAND (arg0, 0)))
10694 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10695 TREE_OPERAND (arg0, 1));
10696
10697 /* (A + A) * C -> A * 2 * C */
10698 if (TREE_CODE (arg0) == PLUS_EXPR
10699 && TREE_CODE (arg1) == INTEGER_CST
10700 && operand_equal_p (TREE_OPERAND (arg0, 0),
10701 TREE_OPERAND (arg0, 1), 0))
10702 return fold_build2_loc (loc, MULT_EXPR, type,
10703 omit_one_operand_loc (loc, type,
10704 TREE_OPERAND (arg0, 0),
10705 TREE_OPERAND (arg0, 1)),
10706 fold_build2_loc (loc, MULT_EXPR, type,
10707 build_int_cst (type, 2) , arg1));
10708
10709 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10710 sign-changing only. */
10711 if (TREE_CODE (arg1) == INTEGER_CST
10712 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10713 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10714 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10715
10716 strict_overflow_p = false;
10717 if (TREE_CODE (arg1) == INTEGER_CST
10718 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10719 &strict_overflow_p)))
10720 {
10721 if (strict_overflow_p)
10722 fold_overflow_warning (("assuming signed overflow does not "
10723 "occur when simplifying "
10724 "multiplication"),
10725 WARN_STRICT_OVERFLOW_MISC);
10726 return fold_convert_loc (loc, type, tem);
10727 }
10728
10729 /* Optimize z * conj(z) for integer complex numbers. */
10730 if (TREE_CODE (arg0) == CONJ_EXPR
10731 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10732 return fold_mult_zconjz (loc, type, arg1);
10733 if (TREE_CODE (arg1) == CONJ_EXPR
10734 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10735 return fold_mult_zconjz (loc, type, arg0);
10736 }
10737 else
10738 {
10739 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10740 the result for floating point types due to rounding so it is applied
10741 only if -fassociative-math was specify. */
10742 if (flag_associative_math
10743 && TREE_CODE (arg0) == RDIV_EXPR
10744 && TREE_CODE (arg1) == REAL_CST
10745 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10746 {
10747 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10748 arg1);
10749 if (tem)
10750 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10751 TREE_OPERAND (arg0, 1));
10752 }
10753
10754 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10755 if (operand_equal_p (arg0, arg1, 0))
10756 {
10757 tree tem = fold_strip_sign_ops (arg0);
10758 if (tem != NULL_TREE)
10759 {
10760 tem = fold_convert_loc (loc, type, tem);
10761 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10762 }
10763 }
10764
10765 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10766 This is not the same for NaNs or if signed zeros are
10767 involved. */
10768 if (!HONOR_NANS (arg0)
10769 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10770 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10771 && TREE_CODE (arg1) == COMPLEX_CST
10772 && real_zerop (TREE_REALPART (arg1)))
10773 {
10774 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10775 if (real_onep (TREE_IMAGPART (arg1)))
10776 return
10777 fold_build2_loc (loc, COMPLEX_EXPR, type,
10778 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10779 rtype, arg0)),
10780 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10781 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10782 return
10783 fold_build2_loc (loc, COMPLEX_EXPR, type,
10784 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10785 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10786 rtype, arg0)));
10787 }
10788
10789 /* Optimize z * conj(z) for floating point complex numbers.
10790 Guarded by flag_unsafe_math_optimizations as non-finite
10791 imaginary components don't produce scalar results. */
10792 if (flag_unsafe_math_optimizations
10793 && TREE_CODE (arg0) == CONJ_EXPR
10794 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10795 return fold_mult_zconjz (loc, type, arg1);
10796 if (flag_unsafe_math_optimizations
10797 && TREE_CODE (arg1) == CONJ_EXPR
10798 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10799 return fold_mult_zconjz (loc, type, arg0);
10800
10801 if (flag_unsafe_math_optimizations)
10802 {
10803 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10804 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10805
10806 /* Optimizations of root(...)*root(...). */
10807 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10808 {
10809 tree rootfn, arg;
10810 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10811 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10812
10813 /* Optimize sqrt(x)*sqrt(x) as x. */
10814 if (BUILTIN_SQRT_P (fcode0)
10815 && operand_equal_p (arg00, arg10, 0)
10816 && ! HONOR_SNANS (element_mode (type)))
10817 return arg00;
10818
10819 /* Optimize root(x)*root(y) as root(x*y). */
10820 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10821 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10822 return build_call_expr_loc (loc, rootfn, 1, arg);
10823 }
10824
10825 /* Optimize expN(x)*expN(y) as expN(x+y). */
10826 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10827 {
10828 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10829 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10830 CALL_EXPR_ARG (arg0, 0),
10831 CALL_EXPR_ARG (arg1, 0));
10832 return build_call_expr_loc (loc, expfn, 1, arg);
10833 }
10834
10835 /* Optimizations of pow(...)*pow(...). */
10836 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10837 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10838 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10839 {
10840 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10841 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10842 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10843 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10844
10845 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10846 if (operand_equal_p (arg01, arg11, 0))
10847 {
10848 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10849 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10850 arg00, arg10);
10851 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10852 }
10853
10854 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10855 if (operand_equal_p (arg00, arg10, 0))
10856 {
10857 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10858 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10859 arg01, arg11);
10860 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10861 }
10862 }
10863
10864 /* Optimize tan(x)*cos(x) as sin(x). */
10865 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10866 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10867 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10868 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10869 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10870 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10871 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10872 CALL_EXPR_ARG (arg1, 0), 0))
10873 {
10874 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10875
10876 if (sinfn != NULL_TREE)
10877 return build_call_expr_loc (loc, sinfn, 1,
10878 CALL_EXPR_ARG (arg0, 0));
10879 }
10880
10881 /* Optimize x*pow(x,c) as pow(x,c+1). */
10882 if (fcode1 == BUILT_IN_POW
10883 || fcode1 == BUILT_IN_POWF
10884 || fcode1 == BUILT_IN_POWL)
10885 {
10886 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10887 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10888 if (TREE_CODE (arg11) == REAL_CST
10889 && !TREE_OVERFLOW (arg11)
10890 && operand_equal_p (arg0, arg10, 0))
10891 {
10892 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10893 REAL_VALUE_TYPE c;
10894 tree arg;
10895
10896 c = TREE_REAL_CST (arg11);
10897 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10898 arg = build_real (type, c);
10899 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10900 }
10901 }
10902
10903 /* Optimize pow(x,c)*x as pow(x,c+1). */
10904 if (fcode0 == BUILT_IN_POW
10905 || fcode0 == BUILT_IN_POWF
10906 || fcode0 == BUILT_IN_POWL)
10907 {
10908 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10909 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10910 if (TREE_CODE (arg01) == REAL_CST
10911 && !TREE_OVERFLOW (arg01)
10912 && operand_equal_p (arg1, arg00, 0))
10913 {
10914 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10915 REAL_VALUE_TYPE c;
10916 tree arg;
10917
10918 c = TREE_REAL_CST (arg01);
10919 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10920 arg = build_real (type, c);
10921 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10922 }
10923 }
10924
10925 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10926 if (!in_gimple_form
10927 && optimize
10928 && operand_equal_p (arg0, arg1, 0))
10929 {
10930 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10931
10932 if (powfn)
10933 {
10934 tree arg = build_real (type, dconst2);
10935 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10936 }
10937 }
10938 }
10939 }
10940 goto associate;
10941
10942 case BIT_IOR_EXPR:
10943 bit_ior:
10944 /* ~X | X is -1. */
10945 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10946 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10947 {
10948 t1 = build_zero_cst (type);
10949 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10950 return omit_one_operand_loc (loc, type, t1, arg1);
10951 }
10952
10953 /* X | ~X is -1. */
10954 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10955 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10956 {
10957 t1 = build_zero_cst (type);
10958 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10959 return omit_one_operand_loc (loc, type, t1, arg0);
10960 }
10961
10962 /* Canonicalize (X & C1) | C2. */
10963 if (TREE_CODE (arg0) == BIT_AND_EXPR
10964 && TREE_CODE (arg1) == INTEGER_CST
10965 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10966 {
10967 int width = TYPE_PRECISION (type), w;
10968 wide_int c1 = TREE_OPERAND (arg0, 1);
10969 wide_int c2 = arg1;
10970
10971 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10972 if ((c1 & c2) == c1)
10973 return omit_one_operand_loc (loc, type, arg1,
10974 TREE_OPERAND (arg0, 0));
10975
10976 wide_int msk = wi::mask (width, false,
10977 TYPE_PRECISION (TREE_TYPE (arg1)));
10978
10979 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10980 if (msk.and_not (c1 | c2) == 0)
10981 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10982 TREE_OPERAND (arg0, 0), arg1);
10983
10984 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10985 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10986 mode which allows further optimizations. */
10987 c1 &= msk;
10988 c2 &= msk;
10989 wide_int c3 = c1.and_not (c2);
10990 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10991 {
10992 wide_int mask = wi::mask (w, false,
10993 TYPE_PRECISION (type));
10994 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10995 {
10996 c3 = mask;
10997 break;
10998 }
10999 }
11000
11001 if (c3 != c1)
11002 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11003 fold_build2_loc (loc, BIT_AND_EXPR, type,
11004 TREE_OPERAND (arg0, 0),
11005 wide_int_to_tree (type,
11006 c3)),
11007 arg1);
11008 }
11009
11010 /* (X & ~Y) | (~X & Y) is X ^ Y */
11011 if (TREE_CODE (arg0) == BIT_AND_EXPR
11012 && TREE_CODE (arg1) == BIT_AND_EXPR)
11013 {
11014 tree a0, a1, l0, l1, n0, n1;
11015
11016 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11017 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11018
11019 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11020 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11021
11022 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11023 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11024
11025 if ((operand_equal_p (n0, a0, 0)
11026 && operand_equal_p (n1, a1, 0))
11027 || (operand_equal_p (n0, a1, 0)
11028 && operand_equal_p (n1, a0, 0)))
11029 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11030 }
11031
11032 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11033 if (t1 != NULL_TREE)
11034 return t1;
11035
11036 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11037
11038 This results in more efficient code for machines without a NAND
11039 instruction. Combine will canonicalize to the first form
11040 which will allow use of NAND instructions provided by the
11041 backend if they exist. */
11042 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11043 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11044 {
11045 return
11046 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11047 build2 (BIT_AND_EXPR, type,
11048 fold_convert_loc (loc, type,
11049 TREE_OPERAND (arg0, 0)),
11050 fold_convert_loc (loc, type,
11051 TREE_OPERAND (arg1, 0))));
11052 }
11053
11054 /* See if this can be simplified into a rotate first. If that
11055 is unsuccessful continue in the association code. */
11056 goto bit_rotate;
11057
11058 case BIT_XOR_EXPR:
11059 /* ~X ^ X is -1. */
11060 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11061 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11062 {
11063 t1 = build_zero_cst (type);
11064 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11065 return omit_one_operand_loc (loc, type, t1, arg1);
11066 }
11067
11068 /* X ^ ~X is -1. */
11069 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11070 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11071 {
11072 t1 = build_zero_cst (type);
11073 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11074 return omit_one_operand_loc (loc, type, t1, arg0);
11075 }
11076
11077 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11078 with a constant, and the two constants have no bits in common,
11079 we should treat this as a BIT_IOR_EXPR since this may produce more
11080 simplifications. */
11081 if (TREE_CODE (arg0) == BIT_AND_EXPR
11082 && TREE_CODE (arg1) == BIT_AND_EXPR
11083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11084 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11085 && wi::bit_and (TREE_OPERAND (arg0, 1),
11086 TREE_OPERAND (arg1, 1)) == 0)
11087 {
11088 code = BIT_IOR_EXPR;
11089 goto bit_ior;
11090 }
11091
11092 /* (X | Y) ^ X -> Y & ~ X*/
11093 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11095 {
11096 tree t2 = TREE_OPERAND (arg0, 1);
11097 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11098 arg1);
11099 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11100 fold_convert_loc (loc, type, t2),
11101 fold_convert_loc (loc, type, t1));
11102 return t1;
11103 }
11104
11105 /* (Y | X) ^ X -> Y & ~ X*/
11106 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11107 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11108 {
11109 tree t2 = TREE_OPERAND (arg0, 0);
11110 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11111 arg1);
11112 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11113 fold_convert_loc (loc, type, t2),
11114 fold_convert_loc (loc, type, t1));
11115 return t1;
11116 }
11117
11118 /* X ^ (X | Y) -> Y & ~ X*/
11119 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11120 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11121 {
11122 tree t2 = TREE_OPERAND (arg1, 1);
11123 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11124 arg0);
11125 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11126 fold_convert_loc (loc, type, t2),
11127 fold_convert_loc (loc, type, t1));
11128 return t1;
11129 }
11130
11131 /* X ^ (Y | X) -> Y & ~ X*/
11132 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11133 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11134 {
11135 tree t2 = TREE_OPERAND (arg1, 0);
11136 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11137 arg0);
11138 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11139 fold_convert_loc (loc, type, t2),
11140 fold_convert_loc (loc, type, t1));
11141 return t1;
11142 }
11143
11144 /* Convert ~X ^ ~Y to X ^ Y. */
11145 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11146 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11147 return fold_build2_loc (loc, code, type,
11148 fold_convert_loc (loc, type,
11149 TREE_OPERAND (arg0, 0)),
11150 fold_convert_loc (loc, type,
11151 TREE_OPERAND (arg1, 0)));
11152
11153 /* Convert ~X ^ C to X ^ ~C. */
11154 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11155 && TREE_CODE (arg1) == INTEGER_CST)
11156 return fold_build2_loc (loc, code, type,
11157 fold_convert_loc (loc, type,
11158 TREE_OPERAND (arg0, 0)),
11159 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11160
11161 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11162 if (TREE_CODE (arg0) == BIT_AND_EXPR
11163 && INTEGRAL_TYPE_P (type)
11164 && integer_onep (TREE_OPERAND (arg0, 1))
11165 && integer_onep (arg1))
11166 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11167 build_zero_cst (TREE_TYPE (arg0)));
11168
11169 /* Fold (X & Y) ^ Y as ~X & Y. */
11170 if (TREE_CODE (arg0) == BIT_AND_EXPR
11171 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11172 {
11173 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11174 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11175 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11176 fold_convert_loc (loc, type, arg1));
11177 }
11178 /* Fold (X & Y) ^ X as ~Y & X. */
11179 if (TREE_CODE (arg0) == BIT_AND_EXPR
11180 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11181 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11182 {
11183 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11184 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11185 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11186 fold_convert_loc (loc, type, arg1));
11187 }
11188 /* Fold X ^ (X & Y) as X & ~Y. */
11189 if (TREE_CODE (arg1) == BIT_AND_EXPR
11190 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11191 {
11192 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11193 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11194 fold_convert_loc (loc, type, arg0),
11195 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11196 }
11197 /* Fold X ^ (Y & X) as ~Y & X. */
11198 if (TREE_CODE (arg1) == BIT_AND_EXPR
11199 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11200 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11201 {
11202 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11203 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11204 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11205 fold_convert_loc (loc, type, arg0));
11206 }
11207
11208 /* See if this can be simplified into a rotate first. If that
11209 is unsuccessful continue in the association code. */
11210 goto bit_rotate;
11211
11212 case BIT_AND_EXPR:
11213 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11214 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11215 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11216 || (TREE_CODE (arg0) == EQ_EXPR
11217 && integer_zerop (TREE_OPERAND (arg0, 1))))
11218 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11219 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11220
11221 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11222 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11223 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11224 || (TREE_CODE (arg1) == EQ_EXPR
11225 && integer_zerop (TREE_OPERAND (arg1, 1))))
11226 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11227 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11228
11229 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11230 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11231 && INTEGRAL_TYPE_P (type)
11232 && integer_onep (TREE_OPERAND (arg0, 1))
11233 && integer_onep (arg1))
11234 {
11235 tree tem2;
11236 tem = TREE_OPERAND (arg0, 0);
11237 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11238 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11239 tem, tem2);
11240 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11241 build_zero_cst (TREE_TYPE (tem)));
11242 }
11243 /* Fold ~X & 1 as (X & 1) == 0. */
11244 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11245 && INTEGRAL_TYPE_P (type)
11246 && integer_onep (arg1))
11247 {
11248 tree tem2;
11249 tem = TREE_OPERAND (arg0, 0);
11250 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11251 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11252 tem, tem2);
11253 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11254 build_zero_cst (TREE_TYPE (tem)));
11255 }
11256 /* Fold !X & 1 as X == 0. */
11257 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11258 && integer_onep (arg1))
11259 {
11260 tem = TREE_OPERAND (arg0, 0);
11261 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11262 build_zero_cst (TREE_TYPE (tem)));
11263 }
11264
11265 /* Fold (X ^ Y) & Y as ~X & Y. */
11266 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11267 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11268 {
11269 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11270 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11271 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11272 fold_convert_loc (loc, type, arg1));
11273 }
11274 /* Fold (X ^ Y) & X as ~Y & X. */
11275 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11276 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11277 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11278 {
11279 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11280 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11281 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11282 fold_convert_loc (loc, type, arg1));
11283 }
11284 /* Fold X & (X ^ Y) as X & ~Y. */
11285 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11286 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11287 {
11288 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11289 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11290 fold_convert_loc (loc, type, arg0),
11291 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11292 }
11293 /* Fold X & (Y ^ X) as ~Y & X. */
11294 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11295 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11296 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11297 {
11298 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11299 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11300 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11301 fold_convert_loc (loc, type, arg0));
11302 }
11303
11304 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11305 multiple of 1 << CST. */
11306 if (TREE_CODE (arg1) == INTEGER_CST)
11307 {
11308 wide_int cst1 = arg1;
11309 wide_int ncst1 = -cst1;
11310 if ((cst1 & ncst1) == ncst1
11311 && multiple_of_p (type, arg0,
11312 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11313 return fold_convert_loc (loc, type, arg0);
11314 }
11315
11316 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11317 bits from CST2. */
11318 if (TREE_CODE (arg1) == INTEGER_CST
11319 && TREE_CODE (arg0) == MULT_EXPR
11320 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11321 {
11322 wide_int warg1 = arg1;
11323 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11324
11325 if (masked == 0)
11326 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11327 arg0, arg1);
11328 else if (masked != warg1)
11329 {
11330 /* Avoid the transform if arg1 is a mask of some
11331 mode which allows further optimizations. */
11332 int pop = wi::popcount (warg1);
11333 if (!(pop >= BITS_PER_UNIT
11334 && exact_log2 (pop) != -1
11335 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11336 return fold_build2_loc (loc, code, type, op0,
11337 wide_int_to_tree (type, masked));
11338 }
11339 }
11340
11341 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11342 ((A & N) + B) & M -> (A + B) & M
11343 Similarly if (N & M) == 0,
11344 ((A | N) + B) & M -> (A + B) & M
11345 and for - instead of + (or unary - instead of +)
11346 and/or ^ instead of |.
11347 If B is constant and (B & M) == 0, fold into A & M. */
11348 if (TREE_CODE (arg1) == INTEGER_CST)
11349 {
11350 wide_int cst1 = arg1;
11351 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11352 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11353 && (TREE_CODE (arg0) == PLUS_EXPR
11354 || TREE_CODE (arg0) == MINUS_EXPR
11355 || TREE_CODE (arg0) == NEGATE_EXPR)
11356 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11357 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11358 {
11359 tree pmop[2];
11360 int which = 0;
11361 wide_int cst0;
11362
11363 /* Now we know that arg0 is (C + D) or (C - D) or
11364 -C and arg1 (M) is == (1LL << cst) - 1.
11365 Store C into PMOP[0] and D into PMOP[1]. */
11366 pmop[0] = TREE_OPERAND (arg0, 0);
11367 pmop[1] = NULL;
11368 if (TREE_CODE (arg0) != NEGATE_EXPR)
11369 {
11370 pmop[1] = TREE_OPERAND (arg0, 1);
11371 which = 1;
11372 }
11373
11374 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11375 which = -1;
11376
11377 for (; which >= 0; which--)
11378 switch (TREE_CODE (pmop[which]))
11379 {
11380 case BIT_AND_EXPR:
11381 case BIT_IOR_EXPR:
11382 case BIT_XOR_EXPR:
11383 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11384 != INTEGER_CST)
11385 break;
11386 cst0 = TREE_OPERAND (pmop[which], 1);
11387 cst0 &= cst1;
11388 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11389 {
11390 if (cst0 != cst1)
11391 break;
11392 }
11393 else if (cst0 != 0)
11394 break;
11395 /* If C or D is of the form (A & N) where
11396 (N & M) == M, or of the form (A | N) or
11397 (A ^ N) where (N & M) == 0, replace it with A. */
11398 pmop[which] = TREE_OPERAND (pmop[which], 0);
11399 break;
11400 case INTEGER_CST:
11401 /* If C or D is a N where (N & M) == 0, it can be
11402 omitted (assumed 0). */
11403 if ((TREE_CODE (arg0) == PLUS_EXPR
11404 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11405 && (cst1 & pmop[which]) == 0)
11406 pmop[which] = NULL;
11407 break;
11408 default:
11409 break;
11410 }
11411
11412 /* Only build anything new if we optimized one or both arguments
11413 above. */
11414 if (pmop[0] != TREE_OPERAND (arg0, 0)
11415 || (TREE_CODE (arg0) != NEGATE_EXPR
11416 && pmop[1] != TREE_OPERAND (arg0, 1)))
11417 {
11418 tree utype = TREE_TYPE (arg0);
11419 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11420 {
11421 /* Perform the operations in a type that has defined
11422 overflow behavior. */
11423 utype = unsigned_type_for (TREE_TYPE (arg0));
11424 if (pmop[0] != NULL)
11425 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11426 if (pmop[1] != NULL)
11427 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11428 }
11429
11430 if (TREE_CODE (arg0) == NEGATE_EXPR)
11431 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11432 else if (TREE_CODE (arg0) == PLUS_EXPR)
11433 {
11434 if (pmop[0] != NULL && pmop[1] != NULL)
11435 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11436 pmop[0], pmop[1]);
11437 else if (pmop[0] != NULL)
11438 tem = pmop[0];
11439 else if (pmop[1] != NULL)
11440 tem = pmop[1];
11441 else
11442 return build_int_cst (type, 0);
11443 }
11444 else if (pmop[0] == NULL)
11445 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11446 else
11447 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11448 pmop[0], pmop[1]);
11449 /* TEM is now the new binary +, - or unary - replacement. */
11450 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11451 fold_convert_loc (loc, utype, arg1));
11452 return fold_convert_loc (loc, type, tem);
11453 }
11454 }
11455 }
11456
11457 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11458 if (t1 != NULL_TREE)
11459 return t1;
11460 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11461 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11462 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11463 {
11464 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11465
11466 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11467 if (mask == -1)
11468 return
11469 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11470 }
11471
11472 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11473
11474 This results in more efficient code for machines without a NOR
11475 instruction. Combine will canonicalize to the first form
11476 which will allow use of NOR instructions provided by the
11477 backend if they exist. */
11478 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11479 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11480 {
11481 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11482 build2 (BIT_IOR_EXPR, type,
11483 fold_convert_loc (loc, type,
11484 TREE_OPERAND (arg0, 0)),
11485 fold_convert_loc (loc, type,
11486 TREE_OPERAND (arg1, 0))));
11487 }
11488
11489 /* If arg0 is derived from the address of an object or function, we may
11490 be able to fold this expression using the object or function's
11491 alignment. */
11492 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11493 {
11494 unsigned HOST_WIDE_INT modulus, residue;
11495 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11496
11497 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11498 integer_onep (arg1));
11499
11500 /* This works because modulus is a power of 2. If this weren't the
11501 case, we'd have to replace it by its greatest power-of-2
11502 divisor: modulus & -modulus. */
11503 if (low < modulus)
11504 return build_int_cst (type, residue & low);
11505 }
11506
11507 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11508 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11509 if the new mask might be further optimized. */
11510 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11511 || TREE_CODE (arg0) == RSHIFT_EXPR)
11512 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11513 && TREE_CODE (arg1) == INTEGER_CST
11514 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11515 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11516 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11517 < TYPE_PRECISION (TREE_TYPE (arg0))))
11518 {
11519 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11520 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11521 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11522 tree shift_type = TREE_TYPE (arg0);
11523
11524 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11525 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11526 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11527 && TYPE_PRECISION (TREE_TYPE (arg0))
11528 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11529 {
11530 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11531 tree arg00 = TREE_OPERAND (arg0, 0);
11532 /* See if more bits can be proven as zero because of
11533 zero extension. */
11534 if (TREE_CODE (arg00) == NOP_EXPR
11535 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11536 {
11537 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11538 if (TYPE_PRECISION (inner_type)
11539 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11540 && TYPE_PRECISION (inner_type) < prec)
11541 {
11542 prec = TYPE_PRECISION (inner_type);
11543 /* See if we can shorten the right shift. */
11544 if (shiftc < prec)
11545 shift_type = inner_type;
11546 /* Otherwise X >> C1 is all zeros, so we'll optimize
11547 it into (X, 0) later on by making sure zerobits
11548 is all ones. */
11549 }
11550 }
11551 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11552 if (shiftc < prec)
11553 {
11554 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11555 zerobits <<= prec - shiftc;
11556 }
11557 /* For arithmetic shift if sign bit could be set, zerobits
11558 can contain actually sign bits, so no transformation is
11559 possible, unless MASK masks them all away. In that
11560 case the shift needs to be converted into logical shift. */
11561 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11562 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11563 {
11564 if ((mask & zerobits) == 0)
11565 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11566 else
11567 zerobits = 0;
11568 }
11569 }
11570
11571 /* ((X << 16) & 0xff00) is (X, 0). */
11572 if ((mask & zerobits) == mask)
11573 return omit_one_operand_loc (loc, type,
11574 build_int_cst (type, 0), arg0);
11575
11576 newmask = mask | zerobits;
11577 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11578 {
11579 /* Only do the transformation if NEWMASK is some integer
11580 mode's mask. */
11581 for (prec = BITS_PER_UNIT;
11582 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11583 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11584 break;
11585 if (prec < HOST_BITS_PER_WIDE_INT
11586 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11587 {
11588 tree newmaskt;
11589
11590 if (shift_type != TREE_TYPE (arg0))
11591 {
11592 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11593 fold_convert_loc (loc, shift_type,
11594 TREE_OPERAND (arg0, 0)),
11595 TREE_OPERAND (arg0, 1));
11596 tem = fold_convert_loc (loc, type, tem);
11597 }
11598 else
11599 tem = op0;
11600 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11601 if (!tree_int_cst_equal (newmaskt, arg1))
11602 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11603 }
11604 }
11605 }
11606
11607 goto associate;
11608
11609 case RDIV_EXPR:
11610 /* Don't touch a floating-point divide by zero unless the mode
11611 of the constant can represent infinity. */
11612 if (TREE_CODE (arg1) == REAL_CST
11613 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11614 && real_zerop (arg1))
11615 return NULL_TREE;
11616
11617 /* (-A) / (-B) -> A / B */
11618 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11619 return fold_build2_loc (loc, RDIV_EXPR, type,
11620 TREE_OPERAND (arg0, 0),
11621 negate_expr (arg1));
11622 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11623 return fold_build2_loc (loc, RDIV_EXPR, type,
11624 negate_expr (arg0),
11625 TREE_OPERAND (arg1, 0));
11626
11627 /* Convert A/B/C to A/(B*C). */
11628 if (flag_reciprocal_math
11629 && TREE_CODE (arg0) == RDIV_EXPR)
11630 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11631 fold_build2_loc (loc, MULT_EXPR, type,
11632 TREE_OPERAND (arg0, 1), arg1));
11633
11634 /* Convert A/(B/C) to (A/B)*C. */
11635 if (flag_reciprocal_math
11636 && TREE_CODE (arg1) == RDIV_EXPR)
11637 return fold_build2_loc (loc, MULT_EXPR, type,
11638 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11639 TREE_OPERAND (arg1, 0)),
11640 TREE_OPERAND (arg1, 1));
11641
11642 /* Convert C1/(X*C2) into (C1/C2)/X. */
11643 if (flag_reciprocal_math
11644 && TREE_CODE (arg1) == MULT_EXPR
11645 && TREE_CODE (arg0) == REAL_CST
11646 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11647 {
11648 tree tem = const_binop (RDIV_EXPR, arg0,
11649 TREE_OPERAND (arg1, 1));
11650 if (tem)
11651 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11652 TREE_OPERAND (arg1, 0));
11653 }
11654
11655 if (flag_unsafe_math_optimizations)
11656 {
11657 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11658 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11659
11660 /* Optimize sin(x)/cos(x) as tan(x). */
11661 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11662 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11663 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11664 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11665 CALL_EXPR_ARG (arg1, 0), 0))
11666 {
11667 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11668
11669 if (tanfn != NULL_TREE)
11670 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11671 }
11672
11673 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11674 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11675 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11676 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11677 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11678 CALL_EXPR_ARG (arg1, 0), 0))
11679 {
11680 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11681
11682 if (tanfn != NULL_TREE)
11683 {
11684 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11685 CALL_EXPR_ARG (arg0, 0));
11686 return fold_build2_loc (loc, RDIV_EXPR, type,
11687 build_real (type, dconst1), tmp);
11688 }
11689 }
11690
11691 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11692 NaNs or Infinities. */
11693 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11694 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11695 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11696 {
11697 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11698 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11699
11700 if (! HONOR_NANS (arg00)
11701 && ! HONOR_INFINITIES (element_mode (arg00))
11702 && operand_equal_p (arg00, arg01, 0))
11703 {
11704 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11705
11706 if (cosfn != NULL_TREE)
11707 return build_call_expr_loc (loc, cosfn, 1, arg00);
11708 }
11709 }
11710
11711 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11712 NaNs or Infinities. */
11713 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11714 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11715 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11716 {
11717 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11718 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11719
11720 if (! HONOR_NANS (arg00)
11721 && ! HONOR_INFINITIES (element_mode (arg00))
11722 && operand_equal_p (arg00, arg01, 0))
11723 {
11724 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11725
11726 if (cosfn != NULL_TREE)
11727 {
11728 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11729 return fold_build2_loc (loc, RDIV_EXPR, type,
11730 build_real (type, dconst1),
11731 tmp);
11732 }
11733 }
11734 }
11735
11736 /* Optimize pow(x,c)/x as pow(x,c-1). */
11737 if (fcode0 == BUILT_IN_POW
11738 || fcode0 == BUILT_IN_POWF
11739 || fcode0 == BUILT_IN_POWL)
11740 {
11741 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11742 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11743 if (TREE_CODE (arg01) == REAL_CST
11744 && !TREE_OVERFLOW (arg01)
11745 && operand_equal_p (arg1, arg00, 0))
11746 {
11747 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11748 REAL_VALUE_TYPE c;
11749 tree arg;
11750
11751 c = TREE_REAL_CST (arg01);
11752 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11753 arg = build_real (type, c);
11754 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11755 }
11756 }
11757
11758 /* Optimize a/root(b/c) into a*root(c/b). */
11759 if (BUILTIN_ROOT_P (fcode1))
11760 {
11761 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11762
11763 if (TREE_CODE (rootarg) == RDIV_EXPR)
11764 {
11765 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11766 tree b = TREE_OPERAND (rootarg, 0);
11767 tree c = TREE_OPERAND (rootarg, 1);
11768
11769 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11770
11771 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11772 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11773 }
11774 }
11775
11776 /* Optimize x/expN(y) into x*expN(-y). */
11777 if (BUILTIN_EXPONENT_P (fcode1))
11778 {
11779 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11780 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11781 arg1 = build_call_expr_loc (loc,
11782 expfn, 1,
11783 fold_convert_loc (loc, type, arg));
11784 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11785 }
11786
11787 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11788 if (fcode1 == BUILT_IN_POW
11789 || fcode1 == BUILT_IN_POWF
11790 || fcode1 == BUILT_IN_POWL)
11791 {
11792 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11793 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11794 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11795 tree neg11 = fold_convert_loc (loc, type,
11796 negate_expr (arg11));
11797 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11798 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11799 }
11800 }
11801 return NULL_TREE;
11802
11803 case TRUNC_DIV_EXPR:
11804 /* Optimize (X & (-A)) / A where A is a power of 2,
11805 to X >> log2(A) */
11806 if (TREE_CODE (arg0) == BIT_AND_EXPR
11807 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11808 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11809 {
11810 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11811 arg1, TREE_OPERAND (arg0, 1));
11812 if (sum && integer_zerop (sum)) {
11813 tree pow2 = build_int_cst (integer_type_node,
11814 wi::exact_log2 (arg1));
11815 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11816 TREE_OPERAND (arg0, 0), pow2);
11817 }
11818 }
11819
11820 /* Fall through */
11821
11822 case FLOOR_DIV_EXPR:
11823 /* Simplify A / (B << N) where A and B are positive and B is
11824 a power of 2, to A >> (N + log2(B)). */
11825 strict_overflow_p = false;
11826 if (TREE_CODE (arg1) == LSHIFT_EXPR
11827 && (TYPE_UNSIGNED (type)
11828 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11829 {
11830 tree sval = TREE_OPERAND (arg1, 0);
11831 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11832 {
11833 tree sh_cnt = TREE_OPERAND (arg1, 1);
11834 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11835 wi::exact_log2 (sval));
11836
11837 if (strict_overflow_p)
11838 fold_overflow_warning (("assuming signed overflow does not "
11839 "occur when simplifying A / (B << N)"),
11840 WARN_STRICT_OVERFLOW_MISC);
11841
11842 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11843 sh_cnt, pow2);
11844 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11845 fold_convert_loc (loc, type, arg0), sh_cnt);
11846 }
11847 }
11848
11849 /* Fall through */
11850
11851 case ROUND_DIV_EXPR:
11852 case CEIL_DIV_EXPR:
11853 case EXACT_DIV_EXPR:
11854 if (integer_zerop (arg1))
11855 return NULL_TREE;
11856
11857 /* Convert -A / -B to A / B when the type is signed and overflow is
11858 undefined. */
11859 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11860 && TREE_CODE (arg0) == NEGATE_EXPR
11861 && negate_expr_p (arg1))
11862 {
11863 if (INTEGRAL_TYPE_P (type))
11864 fold_overflow_warning (("assuming signed overflow does not occur "
11865 "when distributing negation across "
11866 "division"),
11867 WARN_STRICT_OVERFLOW_MISC);
11868 return fold_build2_loc (loc, code, type,
11869 fold_convert_loc (loc, type,
11870 TREE_OPERAND (arg0, 0)),
11871 fold_convert_loc (loc, type,
11872 negate_expr (arg1)));
11873 }
11874 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11875 && TREE_CODE (arg1) == NEGATE_EXPR
11876 && negate_expr_p (arg0))
11877 {
11878 if (INTEGRAL_TYPE_P (type))
11879 fold_overflow_warning (("assuming signed overflow does not occur "
11880 "when distributing negation across "
11881 "division"),
11882 WARN_STRICT_OVERFLOW_MISC);
11883 return fold_build2_loc (loc, code, type,
11884 fold_convert_loc (loc, type,
11885 negate_expr (arg0)),
11886 fold_convert_loc (loc, type,
11887 TREE_OPERAND (arg1, 0)));
11888 }
11889
11890 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11891 operation, EXACT_DIV_EXPR.
11892
11893 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11894 At one time others generated faster code, it's not clear if they do
11895 after the last round to changes to the DIV code in expmed.c. */
11896 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11897 && multiple_of_p (type, arg0, arg1))
11898 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11899
11900 strict_overflow_p = false;
11901 if (TREE_CODE (arg1) == INTEGER_CST
11902 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11903 &strict_overflow_p)))
11904 {
11905 if (strict_overflow_p)
11906 fold_overflow_warning (("assuming signed overflow does not occur "
11907 "when simplifying division"),
11908 WARN_STRICT_OVERFLOW_MISC);
11909 return fold_convert_loc (loc, type, tem);
11910 }
11911
11912 return NULL_TREE;
11913
11914 case CEIL_MOD_EXPR:
11915 case FLOOR_MOD_EXPR:
11916 case ROUND_MOD_EXPR:
11917 case TRUNC_MOD_EXPR:
11918 /* X % -Y is the same as X % Y. */
11919 if (code == TRUNC_MOD_EXPR
11920 && !TYPE_UNSIGNED (type)
11921 && TREE_CODE (arg1) == NEGATE_EXPR
11922 && !TYPE_OVERFLOW_TRAPS (type))
11923 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11924 fold_convert_loc (loc, type,
11925 TREE_OPERAND (arg1, 0)));
11926
11927 strict_overflow_p = false;
11928 if (TREE_CODE (arg1) == INTEGER_CST
11929 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11930 &strict_overflow_p)))
11931 {
11932 if (strict_overflow_p)
11933 fold_overflow_warning (("assuming signed overflow does not occur "
11934 "when simplifying modulus"),
11935 WARN_STRICT_OVERFLOW_MISC);
11936 return fold_convert_loc (loc, type, tem);
11937 }
11938
11939 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11940 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11941 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11942 && (TYPE_UNSIGNED (type)
11943 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11944 {
11945 tree c = arg1;
11946 /* Also optimize A % (C << N) where C is a power of 2,
11947 to A & ((C << N) - 1). */
11948 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11949 c = TREE_OPERAND (arg1, 0);
11950
11951 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11952 {
11953 tree mask
11954 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11955 build_int_cst (TREE_TYPE (arg1), 1));
11956 if (strict_overflow_p)
11957 fold_overflow_warning (("assuming signed overflow does not "
11958 "occur when simplifying "
11959 "X % (power of two)"),
11960 WARN_STRICT_OVERFLOW_MISC);
11961 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11962 fold_convert_loc (loc, type, arg0),
11963 fold_convert_loc (loc, type, mask));
11964 }
11965 }
11966
11967 return NULL_TREE;
11968
11969 case LROTATE_EXPR:
11970 case RROTATE_EXPR:
11971 case RSHIFT_EXPR:
11972 case LSHIFT_EXPR:
11973 /* Since negative shift count is not well-defined,
11974 don't try to compute it in the compiler. */
11975 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11976 return NULL_TREE;
11977
11978 prec = element_precision (type);
11979
11980 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11981 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11982 && tree_to_uhwi (arg1) < prec
11983 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11984 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11985 {
11986 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11987 + tree_to_uhwi (arg1));
11988
11989 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11990 being well defined. */
11991 if (low >= prec)
11992 {
11993 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11994 low = low % prec;
11995 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11996 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11997 TREE_OPERAND (arg0, 0));
11998 else
11999 low = prec - 1;
12000 }
12001
12002 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12003 build_int_cst (TREE_TYPE (arg1), low));
12004 }
12005
12006 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12007 into x & ((unsigned)-1 >> c) for unsigned types. */
12008 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12009 || (TYPE_UNSIGNED (type)
12010 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12011 && tree_fits_uhwi_p (arg1)
12012 && tree_to_uhwi (arg1) < prec
12013 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12014 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12015 {
12016 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12017 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12018 tree lshift;
12019 tree arg00;
12020
12021 if (low0 == low1)
12022 {
12023 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12024
12025 lshift = build_minus_one_cst (type);
12026 lshift = const_binop (code, lshift, arg1);
12027
12028 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12029 }
12030 }
12031
12032 /* If we have a rotate of a bit operation with the rotate count and
12033 the second operand of the bit operation both constant,
12034 permute the two operations. */
12035 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12036 && (TREE_CODE (arg0) == BIT_AND_EXPR
12037 || TREE_CODE (arg0) == BIT_IOR_EXPR
12038 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12039 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12040 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12041 fold_build2_loc (loc, code, type,
12042 TREE_OPERAND (arg0, 0), arg1),
12043 fold_build2_loc (loc, code, type,
12044 TREE_OPERAND (arg0, 1), arg1));
12045
12046 /* Two consecutive rotates adding up to the some integer
12047 multiple of the precision of the type can be ignored. */
12048 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12049 && TREE_CODE (arg0) == RROTATE_EXPR
12050 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12051 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12052 prec) == 0)
12053 return TREE_OPERAND (arg0, 0);
12054
12055 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12056 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12057 if the latter can be further optimized. */
12058 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12059 && TREE_CODE (arg0) == BIT_AND_EXPR
12060 && TREE_CODE (arg1) == INTEGER_CST
12061 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12062 {
12063 tree mask = fold_build2_loc (loc, code, type,
12064 fold_convert_loc (loc, type,
12065 TREE_OPERAND (arg0, 1)),
12066 arg1);
12067 tree shift = fold_build2_loc (loc, code, type,
12068 fold_convert_loc (loc, type,
12069 TREE_OPERAND (arg0, 0)),
12070 arg1);
12071 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12072 if (tem)
12073 return tem;
12074 }
12075
12076 return NULL_TREE;
12077
12078 case MIN_EXPR:
12079 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12080 if (tem)
12081 return tem;
12082 goto associate;
12083
12084 case MAX_EXPR:
12085 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12086 if (tem)
12087 return tem;
12088 goto associate;
12089
12090 case TRUTH_ANDIF_EXPR:
12091 /* Note that the operands of this must be ints
12092 and their values must be 0 or 1.
12093 ("true" is a fixed value perhaps depending on the language.) */
12094 /* If first arg is constant zero, return it. */
12095 if (integer_zerop (arg0))
12096 return fold_convert_loc (loc, type, arg0);
12097 case TRUTH_AND_EXPR:
12098 /* If either arg is constant true, drop it. */
12099 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12100 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12101 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12102 /* Preserve sequence points. */
12103 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12104 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12105 /* If second arg is constant zero, result is zero, but first arg
12106 must be evaluated. */
12107 if (integer_zerop (arg1))
12108 return omit_one_operand_loc (loc, type, arg1, arg0);
12109 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12110 case will be handled here. */
12111 if (integer_zerop (arg0))
12112 return omit_one_operand_loc (loc, type, arg0, arg1);
12113
12114 /* !X && X is always false. */
12115 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12116 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12117 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12118 /* X && !X is always false. */
12119 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12120 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12121 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12122
12123 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12124 means A >= Y && A != MAX, but in this case we know that
12125 A < X <= MAX. */
12126
12127 if (!TREE_SIDE_EFFECTS (arg0)
12128 && !TREE_SIDE_EFFECTS (arg1))
12129 {
12130 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12131 if (tem && !operand_equal_p (tem, arg0, 0))
12132 return fold_build2_loc (loc, code, type, tem, arg1);
12133
12134 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12135 if (tem && !operand_equal_p (tem, arg1, 0))
12136 return fold_build2_loc (loc, code, type, arg0, tem);
12137 }
12138
12139 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12140 != NULL_TREE)
12141 return tem;
12142
12143 return NULL_TREE;
12144
12145 case TRUTH_ORIF_EXPR:
12146 /* Note that the operands of this must be ints
12147 and their values must be 0 or true.
12148 ("true" is a fixed value perhaps depending on the language.) */
12149 /* If first arg is constant true, return it. */
12150 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12151 return fold_convert_loc (loc, type, arg0);
12152 case TRUTH_OR_EXPR:
12153 /* If either arg is constant zero, drop it. */
12154 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12155 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12156 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12157 /* Preserve sequence points. */
12158 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12159 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12160 /* If second arg is constant true, result is true, but we must
12161 evaluate first arg. */
12162 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12163 return omit_one_operand_loc (loc, type, arg1, arg0);
12164 /* Likewise for first arg, but note this only occurs here for
12165 TRUTH_OR_EXPR. */
12166 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12167 return omit_one_operand_loc (loc, type, arg0, arg1);
12168
12169 /* !X || X is always true. */
12170 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12171 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12172 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12173 /* X || !X is always true. */
12174 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12175 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12176 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12177
12178 /* (X && !Y) || (!X && Y) is X ^ Y */
12179 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12180 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12181 {
12182 tree a0, a1, l0, l1, n0, n1;
12183
12184 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12185 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12186
12187 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12188 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12189
12190 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12191 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12192
12193 if ((operand_equal_p (n0, a0, 0)
12194 && operand_equal_p (n1, a1, 0))
12195 || (operand_equal_p (n0, a1, 0)
12196 && operand_equal_p (n1, a0, 0)))
12197 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12198 }
12199
12200 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12201 != NULL_TREE)
12202 return tem;
12203
12204 return NULL_TREE;
12205
12206 case TRUTH_XOR_EXPR:
12207 /* If the second arg is constant zero, drop it. */
12208 if (integer_zerop (arg1))
12209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12210 /* If the second arg is constant true, this is a logical inversion. */
12211 if (integer_onep (arg1))
12212 {
12213 tem = invert_truthvalue_loc (loc, arg0);
12214 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12215 }
12216 /* Identical arguments cancel to zero. */
12217 if (operand_equal_p (arg0, arg1, 0))
12218 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12219
12220 /* !X ^ X is always true. */
12221 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12222 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12223 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12224
12225 /* X ^ !X is always true. */
12226 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12227 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12228 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12229
12230 return NULL_TREE;
12231
12232 case EQ_EXPR:
12233 case NE_EXPR:
12234 STRIP_NOPS (arg0);
12235 STRIP_NOPS (arg1);
12236
12237 tem = fold_comparison (loc, code, type, op0, op1);
12238 if (tem != NULL_TREE)
12239 return tem;
12240
12241 /* bool_var != 0 becomes bool_var. */
12242 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12243 && code == NE_EXPR)
12244 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12245
12246 /* bool_var == 1 becomes bool_var. */
12247 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12248 && code == EQ_EXPR)
12249 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12250
12251 /* bool_var != 1 becomes !bool_var. */
12252 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12253 && code == NE_EXPR)
12254 return fold_convert_loc (loc, type,
12255 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12256 TREE_TYPE (arg0), arg0));
12257
12258 /* bool_var == 0 becomes !bool_var. */
12259 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12260 && code == EQ_EXPR)
12261 return fold_convert_loc (loc, type,
12262 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12263 TREE_TYPE (arg0), arg0));
12264
12265 /* !exp != 0 becomes !exp */
12266 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12267 && code == NE_EXPR)
12268 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12269
12270 /* If this is an equality comparison of the address of two non-weak,
12271 unaliased symbols neither of which are extern (since we do not
12272 have access to attributes for externs), then we know the result. */
12273 if (TREE_CODE (arg0) == ADDR_EXPR
12274 && DECL_P (TREE_OPERAND (arg0, 0))
12275 && TREE_CODE (arg1) == ADDR_EXPR
12276 && DECL_P (TREE_OPERAND (arg1, 0)))
12277 {
12278 int equal;
12279
12280 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12281 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12282 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12283 ->equal_address_to (symtab_node::get_create
12284 (TREE_OPERAND (arg1, 0)));
12285 else
12286 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12287 if (equal != 2)
12288 return constant_boolean_node (equal
12289 ? code == EQ_EXPR : code != EQ_EXPR,
12290 type);
12291 }
12292
12293 /* Similarly for a NEGATE_EXPR. */
12294 if (TREE_CODE (arg0) == NEGATE_EXPR
12295 && TREE_CODE (arg1) == INTEGER_CST
12296 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12297 arg1)))
12298 && TREE_CODE (tem) == INTEGER_CST
12299 && !TREE_OVERFLOW (tem))
12300 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12301
12302 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12303 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12304 && TREE_CODE (arg1) == INTEGER_CST
12305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12306 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12307 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12308 fold_convert_loc (loc,
12309 TREE_TYPE (arg0),
12310 arg1),
12311 TREE_OPERAND (arg0, 1)));
12312
12313 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12314 if ((TREE_CODE (arg0) == PLUS_EXPR
12315 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12316 || TREE_CODE (arg0) == MINUS_EXPR)
12317 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12318 0)),
12319 arg1, 0)
12320 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12321 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12322 {
12323 tree val = TREE_OPERAND (arg0, 1);
12324 return omit_two_operands_loc (loc, type,
12325 fold_build2_loc (loc, code, type,
12326 val,
12327 build_int_cst (TREE_TYPE (val),
12328 0)),
12329 TREE_OPERAND (arg0, 0), arg1);
12330 }
12331
12332 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12333 if (TREE_CODE (arg0) == MINUS_EXPR
12334 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12335 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12336 1)),
12337 arg1, 0)
12338 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12339 {
12340 return omit_two_operands_loc (loc, type,
12341 code == NE_EXPR
12342 ? boolean_true_node : boolean_false_node,
12343 TREE_OPERAND (arg0, 1), arg1);
12344 }
12345
12346 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12347 if (TREE_CODE (arg0) == ABS_EXPR
12348 && (integer_zerop (arg1) || real_zerop (arg1)))
12349 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12350
12351 /* If this is an EQ or NE comparison with zero and ARG0 is
12352 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12353 two operations, but the latter can be done in one less insn
12354 on machines that have only two-operand insns or on which a
12355 constant cannot be the first operand. */
12356 if (TREE_CODE (arg0) == BIT_AND_EXPR
12357 && integer_zerop (arg1))
12358 {
12359 tree arg00 = TREE_OPERAND (arg0, 0);
12360 tree arg01 = TREE_OPERAND (arg0, 1);
12361 if (TREE_CODE (arg00) == LSHIFT_EXPR
12362 && integer_onep (TREE_OPERAND (arg00, 0)))
12363 {
12364 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12365 arg01, TREE_OPERAND (arg00, 1));
12366 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12367 build_int_cst (TREE_TYPE (arg0), 1));
12368 return fold_build2_loc (loc, code, type,
12369 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12370 arg1);
12371 }
12372 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12373 && integer_onep (TREE_OPERAND (arg01, 0)))
12374 {
12375 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12376 arg00, TREE_OPERAND (arg01, 1));
12377 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12378 build_int_cst (TREE_TYPE (arg0), 1));
12379 return fold_build2_loc (loc, code, type,
12380 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12381 arg1);
12382 }
12383 }
12384
12385 /* If this is an NE or EQ comparison of zero against the result of a
12386 signed MOD operation whose second operand is a power of 2, make
12387 the MOD operation unsigned since it is simpler and equivalent. */
12388 if (integer_zerop (arg1)
12389 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12390 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12391 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12392 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12393 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12394 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12395 {
12396 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12397 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12398 fold_convert_loc (loc, newtype,
12399 TREE_OPERAND (arg0, 0)),
12400 fold_convert_loc (loc, newtype,
12401 TREE_OPERAND (arg0, 1)));
12402
12403 return fold_build2_loc (loc, code, type, newmod,
12404 fold_convert_loc (loc, newtype, arg1));
12405 }
12406
12407 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12408 C1 is a valid shift constant, and C2 is a power of two, i.e.
12409 a single bit. */
12410 if (TREE_CODE (arg0) == BIT_AND_EXPR
12411 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12412 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12413 == INTEGER_CST
12414 && integer_pow2p (TREE_OPERAND (arg0, 1))
12415 && integer_zerop (arg1))
12416 {
12417 tree itype = TREE_TYPE (arg0);
12418 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12419 prec = TYPE_PRECISION (itype);
12420
12421 /* Check for a valid shift count. */
12422 if (wi::ltu_p (arg001, prec))
12423 {
12424 tree arg01 = TREE_OPERAND (arg0, 1);
12425 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12426 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12427 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12428 can be rewritten as (X & (C2 << C1)) != 0. */
12429 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12430 {
12431 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12432 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12433 return fold_build2_loc (loc, code, type, tem,
12434 fold_convert_loc (loc, itype, arg1));
12435 }
12436 /* Otherwise, for signed (arithmetic) shifts,
12437 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12438 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12439 else if (!TYPE_UNSIGNED (itype))
12440 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12441 arg000, build_int_cst (itype, 0));
12442 /* Otherwise, of unsigned (logical) shifts,
12443 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12444 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12445 else
12446 return omit_one_operand_loc (loc, type,
12447 code == EQ_EXPR ? integer_one_node
12448 : integer_zero_node,
12449 arg000);
12450 }
12451 }
12452
12453 /* If we have (A & C) == C where C is a power of 2, convert this into
12454 (A & C) != 0. Similarly for NE_EXPR. */
12455 if (TREE_CODE (arg0) == BIT_AND_EXPR
12456 && integer_pow2p (TREE_OPERAND (arg0, 1))
12457 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12458 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12459 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12460 integer_zero_node));
12461
12462 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12463 bit, then fold the expression into A < 0 or A >= 0. */
12464 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12465 if (tem)
12466 return tem;
12467
12468 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12469 Similarly for NE_EXPR. */
12470 if (TREE_CODE (arg0) == BIT_AND_EXPR
12471 && TREE_CODE (arg1) == INTEGER_CST
12472 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12473 {
12474 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12475 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12476 TREE_OPERAND (arg0, 1));
12477 tree dandnotc
12478 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12479 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12480 notc);
12481 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12482 if (integer_nonzerop (dandnotc))
12483 return omit_one_operand_loc (loc, type, rslt, arg0);
12484 }
12485
12486 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12487 Similarly for NE_EXPR. */
12488 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12489 && TREE_CODE (arg1) == INTEGER_CST
12490 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12491 {
12492 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12493 tree candnotd
12494 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12495 TREE_OPERAND (arg0, 1),
12496 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12497 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12498 if (integer_nonzerop (candnotd))
12499 return omit_one_operand_loc (loc, type, rslt, arg0);
12500 }
12501
12502 /* If this is a comparison of a field, we may be able to simplify it. */
12503 if ((TREE_CODE (arg0) == COMPONENT_REF
12504 || TREE_CODE (arg0) == BIT_FIELD_REF)
12505 /* Handle the constant case even without -O
12506 to make sure the warnings are given. */
12507 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12508 {
12509 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12510 if (t1)
12511 return t1;
12512 }
12513
12514 /* Optimize comparisons of strlen vs zero to a compare of the
12515 first character of the string vs zero. To wit,
12516 strlen(ptr) == 0 => *ptr == 0
12517 strlen(ptr) != 0 => *ptr != 0
12518 Other cases should reduce to one of these two (or a constant)
12519 due to the return value of strlen being unsigned. */
12520 if (TREE_CODE (arg0) == CALL_EXPR
12521 && integer_zerop (arg1))
12522 {
12523 tree fndecl = get_callee_fndecl (arg0);
12524
12525 if (fndecl
12526 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12527 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12528 && call_expr_nargs (arg0) == 1
12529 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12530 {
12531 tree iref = build_fold_indirect_ref_loc (loc,
12532 CALL_EXPR_ARG (arg0, 0));
12533 return fold_build2_loc (loc, code, type, iref,
12534 build_int_cst (TREE_TYPE (iref), 0));
12535 }
12536 }
12537
12538 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12539 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12540 if (TREE_CODE (arg0) == RSHIFT_EXPR
12541 && integer_zerop (arg1)
12542 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12543 {
12544 tree arg00 = TREE_OPERAND (arg0, 0);
12545 tree arg01 = TREE_OPERAND (arg0, 1);
12546 tree itype = TREE_TYPE (arg00);
12547 if (wi::eq_p (arg01, element_precision (itype) - 1))
12548 {
12549 if (TYPE_UNSIGNED (itype))
12550 {
12551 itype = signed_type_for (itype);
12552 arg00 = fold_convert_loc (loc, itype, arg00);
12553 }
12554 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12555 type, arg00, build_zero_cst (itype));
12556 }
12557 }
12558
12559 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12560 if (integer_zerop (arg1)
12561 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12562 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12563 TREE_OPERAND (arg0, 1));
12564
12565 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12566 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12567 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12568 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12569 build_zero_cst (TREE_TYPE (arg0)));
12570 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12571 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12572 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12573 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12574 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12575 build_zero_cst (TREE_TYPE (arg0)));
12576
12577 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12578 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12579 && TREE_CODE (arg1) == INTEGER_CST
12580 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12581 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12582 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12583 TREE_OPERAND (arg0, 1), arg1));
12584
12585 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12586 (X & C) == 0 when C is a single bit. */
12587 if (TREE_CODE (arg0) == BIT_AND_EXPR
12588 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12589 && integer_zerop (arg1)
12590 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12591 {
12592 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12593 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12594 TREE_OPERAND (arg0, 1));
12595 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12596 type, tem,
12597 fold_convert_loc (loc, TREE_TYPE (arg0),
12598 arg1));
12599 }
12600
12601 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12602 constant C is a power of two, i.e. a single bit. */
12603 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12604 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12605 && integer_zerop (arg1)
12606 && integer_pow2p (TREE_OPERAND (arg0, 1))
12607 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12608 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12609 {
12610 tree arg00 = TREE_OPERAND (arg0, 0);
12611 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12612 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12613 }
12614
12615 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12616 when is C is a power of two, i.e. a single bit. */
12617 if (TREE_CODE (arg0) == BIT_AND_EXPR
12618 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12619 && integer_zerop (arg1)
12620 && integer_pow2p (TREE_OPERAND (arg0, 1))
12621 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12622 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12623 {
12624 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12625 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12626 arg000, TREE_OPERAND (arg0, 1));
12627 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12628 tem, build_int_cst (TREE_TYPE (tem), 0));
12629 }
12630
12631 if (integer_zerop (arg1)
12632 && tree_expr_nonzero_p (arg0))
12633 {
12634 tree res = constant_boolean_node (code==NE_EXPR, type);
12635 return omit_one_operand_loc (loc, type, res, arg0);
12636 }
12637
12638 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12639 if (TREE_CODE (arg0) == NEGATE_EXPR
12640 && TREE_CODE (arg1) == NEGATE_EXPR)
12641 return fold_build2_loc (loc, code, type,
12642 TREE_OPERAND (arg0, 0),
12643 fold_convert_loc (loc, TREE_TYPE (arg0),
12644 TREE_OPERAND (arg1, 0)));
12645
12646 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12647 if (TREE_CODE (arg0) == BIT_AND_EXPR
12648 && TREE_CODE (arg1) == BIT_AND_EXPR)
12649 {
12650 tree arg00 = TREE_OPERAND (arg0, 0);
12651 tree arg01 = TREE_OPERAND (arg0, 1);
12652 tree arg10 = TREE_OPERAND (arg1, 0);
12653 tree arg11 = TREE_OPERAND (arg1, 1);
12654 tree itype = TREE_TYPE (arg0);
12655
12656 if (operand_equal_p (arg01, arg11, 0))
12657 return fold_build2_loc (loc, code, type,
12658 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12659 fold_build2_loc (loc,
12660 BIT_XOR_EXPR, itype,
12661 arg00, arg10),
12662 arg01),
12663 build_zero_cst (itype));
12664
12665 if (operand_equal_p (arg01, arg10, 0))
12666 return fold_build2_loc (loc, code, type,
12667 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12668 fold_build2_loc (loc,
12669 BIT_XOR_EXPR, itype,
12670 arg00, arg11),
12671 arg01),
12672 build_zero_cst (itype));
12673
12674 if (operand_equal_p (arg00, arg11, 0))
12675 return fold_build2_loc (loc, code, type,
12676 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12677 fold_build2_loc (loc,
12678 BIT_XOR_EXPR, itype,
12679 arg01, arg10),
12680 arg00),
12681 build_zero_cst (itype));
12682
12683 if (operand_equal_p (arg00, arg10, 0))
12684 return fold_build2_loc (loc, code, type,
12685 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12686 fold_build2_loc (loc,
12687 BIT_XOR_EXPR, itype,
12688 arg01, arg11),
12689 arg00),
12690 build_zero_cst (itype));
12691 }
12692
12693 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12694 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12695 {
12696 tree arg00 = TREE_OPERAND (arg0, 0);
12697 tree arg01 = TREE_OPERAND (arg0, 1);
12698 tree arg10 = TREE_OPERAND (arg1, 0);
12699 tree arg11 = TREE_OPERAND (arg1, 1);
12700 tree itype = TREE_TYPE (arg0);
12701
12702 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12703 operand_equal_p guarantees no side-effects so we don't need
12704 to use omit_one_operand on Z. */
12705 if (operand_equal_p (arg01, arg11, 0))
12706 return fold_build2_loc (loc, code, type, arg00,
12707 fold_convert_loc (loc, TREE_TYPE (arg00),
12708 arg10));
12709 if (operand_equal_p (arg01, arg10, 0))
12710 return fold_build2_loc (loc, code, type, arg00,
12711 fold_convert_loc (loc, TREE_TYPE (arg00),
12712 arg11));
12713 if (operand_equal_p (arg00, arg11, 0))
12714 return fold_build2_loc (loc, code, type, arg01,
12715 fold_convert_loc (loc, TREE_TYPE (arg01),
12716 arg10));
12717 if (operand_equal_p (arg00, arg10, 0))
12718 return fold_build2_loc (loc, code, type, arg01,
12719 fold_convert_loc (loc, TREE_TYPE (arg01),
12720 arg11));
12721
12722 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12723 if (TREE_CODE (arg01) == INTEGER_CST
12724 && TREE_CODE (arg11) == INTEGER_CST)
12725 {
12726 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12727 fold_convert_loc (loc, itype, arg11));
12728 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12729 return fold_build2_loc (loc, code, type, tem,
12730 fold_convert_loc (loc, itype, arg10));
12731 }
12732 }
12733
12734 /* Attempt to simplify equality/inequality comparisons of complex
12735 values. Only lower the comparison if the result is known or
12736 can be simplified to a single scalar comparison. */
12737 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12738 || TREE_CODE (arg0) == COMPLEX_CST)
12739 && (TREE_CODE (arg1) == COMPLEX_EXPR
12740 || TREE_CODE (arg1) == COMPLEX_CST))
12741 {
12742 tree real0, imag0, real1, imag1;
12743 tree rcond, icond;
12744
12745 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12746 {
12747 real0 = TREE_OPERAND (arg0, 0);
12748 imag0 = TREE_OPERAND (arg0, 1);
12749 }
12750 else
12751 {
12752 real0 = TREE_REALPART (arg0);
12753 imag0 = TREE_IMAGPART (arg0);
12754 }
12755
12756 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12757 {
12758 real1 = TREE_OPERAND (arg1, 0);
12759 imag1 = TREE_OPERAND (arg1, 1);
12760 }
12761 else
12762 {
12763 real1 = TREE_REALPART (arg1);
12764 imag1 = TREE_IMAGPART (arg1);
12765 }
12766
12767 rcond = fold_binary_loc (loc, code, type, real0, real1);
12768 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12769 {
12770 if (integer_zerop (rcond))
12771 {
12772 if (code == EQ_EXPR)
12773 return omit_two_operands_loc (loc, type, boolean_false_node,
12774 imag0, imag1);
12775 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12776 }
12777 else
12778 {
12779 if (code == NE_EXPR)
12780 return omit_two_operands_loc (loc, type, boolean_true_node,
12781 imag0, imag1);
12782 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12783 }
12784 }
12785
12786 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12787 if (icond && TREE_CODE (icond) == INTEGER_CST)
12788 {
12789 if (integer_zerop (icond))
12790 {
12791 if (code == EQ_EXPR)
12792 return omit_two_operands_loc (loc, type, boolean_false_node,
12793 real0, real1);
12794 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12795 }
12796 else
12797 {
12798 if (code == NE_EXPR)
12799 return omit_two_operands_loc (loc, type, boolean_true_node,
12800 real0, real1);
12801 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12802 }
12803 }
12804 }
12805
12806 return NULL_TREE;
12807
12808 case LT_EXPR:
12809 case GT_EXPR:
12810 case LE_EXPR:
12811 case GE_EXPR:
12812 tem = fold_comparison (loc, code, type, op0, op1);
12813 if (tem != NULL_TREE)
12814 return tem;
12815
12816 /* Transform comparisons of the form X +- C CMP X. */
12817 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12818 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12819 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12820 && !HONOR_SNANS (arg0))
12821 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12822 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12823 {
12824 tree arg01 = TREE_OPERAND (arg0, 1);
12825 enum tree_code code0 = TREE_CODE (arg0);
12826 int is_positive;
12827
12828 if (TREE_CODE (arg01) == REAL_CST)
12829 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12830 else
12831 is_positive = tree_int_cst_sgn (arg01);
12832
12833 /* (X - c) > X becomes false. */
12834 if (code == GT_EXPR
12835 && ((code0 == MINUS_EXPR && is_positive >= 0)
12836 || (code0 == PLUS_EXPR && is_positive <= 0)))
12837 {
12838 if (TREE_CODE (arg01) == INTEGER_CST
12839 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12840 fold_overflow_warning (("assuming signed overflow does not "
12841 "occur when assuming that (X - c) > X "
12842 "is always false"),
12843 WARN_STRICT_OVERFLOW_ALL);
12844 return constant_boolean_node (0, type);
12845 }
12846
12847 /* Likewise (X + c) < X becomes false. */
12848 if (code == LT_EXPR
12849 && ((code0 == PLUS_EXPR && is_positive >= 0)
12850 || (code0 == MINUS_EXPR && is_positive <= 0)))
12851 {
12852 if (TREE_CODE (arg01) == INTEGER_CST
12853 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12854 fold_overflow_warning (("assuming signed overflow does not "
12855 "occur when assuming that "
12856 "(X + c) < X is always false"),
12857 WARN_STRICT_OVERFLOW_ALL);
12858 return constant_boolean_node (0, type);
12859 }
12860
12861 /* Convert (X - c) <= X to true. */
12862 if (!HONOR_NANS (arg1)
12863 && code == LE_EXPR
12864 && ((code0 == MINUS_EXPR && is_positive >= 0)
12865 || (code0 == PLUS_EXPR && is_positive <= 0)))
12866 {
12867 if (TREE_CODE (arg01) == INTEGER_CST
12868 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12869 fold_overflow_warning (("assuming signed overflow does not "
12870 "occur when assuming that "
12871 "(X - c) <= X is always true"),
12872 WARN_STRICT_OVERFLOW_ALL);
12873 return constant_boolean_node (1, type);
12874 }
12875
12876 /* Convert (X + c) >= X to true. */
12877 if (!HONOR_NANS (arg1)
12878 && code == GE_EXPR
12879 && ((code0 == PLUS_EXPR && is_positive >= 0)
12880 || (code0 == MINUS_EXPR && is_positive <= 0)))
12881 {
12882 if (TREE_CODE (arg01) == INTEGER_CST
12883 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12884 fold_overflow_warning (("assuming signed overflow does not "
12885 "occur when assuming that "
12886 "(X + c) >= X is always true"),
12887 WARN_STRICT_OVERFLOW_ALL);
12888 return constant_boolean_node (1, type);
12889 }
12890
12891 if (TREE_CODE (arg01) == INTEGER_CST)
12892 {
12893 /* Convert X + c > X and X - c < X to true for integers. */
12894 if (code == GT_EXPR
12895 && ((code0 == PLUS_EXPR && is_positive > 0)
12896 || (code0 == MINUS_EXPR && is_positive < 0)))
12897 {
12898 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12899 fold_overflow_warning (("assuming signed overflow does "
12900 "not occur when assuming that "
12901 "(X + c) > X is always true"),
12902 WARN_STRICT_OVERFLOW_ALL);
12903 return constant_boolean_node (1, type);
12904 }
12905
12906 if (code == LT_EXPR
12907 && ((code0 == MINUS_EXPR && is_positive > 0)
12908 || (code0 == PLUS_EXPR && is_positive < 0)))
12909 {
12910 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12911 fold_overflow_warning (("assuming signed overflow does "
12912 "not occur when assuming that "
12913 "(X - c) < X is always true"),
12914 WARN_STRICT_OVERFLOW_ALL);
12915 return constant_boolean_node (1, type);
12916 }
12917
12918 /* Convert X + c <= X and X - c >= X to false for integers. */
12919 if (code == LE_EXPR
12920 && ((code0 == PLUS_EXPR && is_positive > 0)
12921 || (code0 == MINUS_EXPR && is_positive < 0)))
12922 {
12923 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12924 fold_overflow_warning (("assuming signed overflow does "
12925 "not occur when assuming that "
12926 "(X + c) <= X is always false"),
12927 WARN_STRICT_OVERFLOW_ALL);
12928 return constant_boolean_node (0, type);
12929 }
12930
12931 if (code == GE_EXPR
12932 && ((code0 == MINUS_EXPR && is_positive > 0)
12933 || (code0 == PLUS_EXPR && is_positive < 0)))
12934 {
12935 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12936 fold_overflow_warning (("assuming signed overflow does "
12937 "not occur when assuming that "
12938 "(X - c) >= X is always false"),
12939 WARN_STRICT_OVERFLOW_ALL);
12940 return constant_boolean_node (0, type);
12941 }
12942 }
12943 }
12944
12945 /* Comparisons with the highest or lowest possible integer of
12946 the specified precision will have known values. */
12947 {
12948 tree arg1_type = TREE_TYPE (arg1);
12949 unsigned int prec = TYPE_PRECISION (arg1_type);
12950
12951 if (TREE_CODE (arg1) == INTEGER_CST
12952 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12953 {
12954 wide_int max = wi::max_value (arg1_type);
12955 wide_int signed_max = wi::max_value (prec, SIGNED);
12956 wide_int min = wi::min_value (arg1_type);
12957
12958 if (wi::eq_p (arg1, max))
12959 switch (code)
12960 {
12961 case GT_EXPR:
12962 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12963
12964 case GE_EXPR:
12965 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12966
12967 case LE_EXPR:
12968 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12969
12970 case LT_EXPR:
12971 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12972
12973 /* The GE_EXPR and LT_EXPR cases above are not normally
12974 reached because of previous transformations. */
12975
12976 default:
12977 break;
12978 }
12979 else if (wi::eq_p (arg1, max - 1))
12980 switch (code)
12981 {
12982 case GT_EXPR:
12983 arg1 = const_binop (PLUS_EXPR, arg1,
12984 build_int_cst (TREE_TYPE (arg1), 1));
12985 return fold_build2_loc (loc, EQ_EXPR, type,
12986 fold_convert_loc (loc,
12987 TREE_TYPE (arg1), arg0),
12988 arg1);
12989 case LE_EXPR:
12990 arg1 = const_binop (PLUS_EXPR, arg1,
12991 build_int_cst (TREE_TYPE (arg1), 1));
12992 return fold_build2_loc (loc, NE_EXPR, type,
12993 fold_convert_loc (loc, TREE_TYPE (arg1),
12994 arg0),
12995 arg1);
12996 default:
12997 break;
12998 }
12999 else if (wi::eq_p (arg1, min))
13000 switch (code)
13001 {
13002 case LT_EXPR:
13003 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13004
13005 case LE_EXPR:
13006 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13007
13008 case GE_EXPR:
13009 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13010
13011 case GT_EXPR:
13012 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13013
13014 default:
13015 break;
13016 }
13017 else if (wi::eq_p (arg1, min + 1))
13018 switch (code)
13019 {
13020 case GE_EXPR:
13021 arg1 = const_binop (MINUS_EXPR, arg1,
13022 build_int_cst (TREE_TYPE (arg1), 1));
13023 return fold_build2_loc (loc, NE_EXPR, type,
13024 fold_convert_loc (loc,
13025 TREE_TYPE (arg1), arg0),
13026 arg1);
13027 case LT_EXPR:
13028 arg1 = const_binop (MINUS_EXPR, arg1,
13029 build_int_cst (TREE_TYPE (arg1), 1));
13030 return fold_build2_loc (loc, EQ_EXPR, type,
13031 fold_convert_loc (loc, TREE_TYPE (arg1),
13032 arg0),
13033 arg1);
13034 default:
13035 break;
13036 }
13037
13038 else if (wi::eq_p (arg1, signed_max)
13039 && TYPE_UNSIGNED (arg1_type)
13040 /* We will flip the signedness of the comparison operator
13041 associated with the mode of arg1, so the sign bit is
13042 specified by this mode. Check that arg1 is the signed
13043 max associated with this sign bit. */
13044 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13045 /* signed_type does not work on pointer types. */
13046 && INTEGRAL_TYPE_P (arg1_type))
13047 {
13048 /* The following case also applies to X < signed_max+1
13049 and X >= signed_max+1 because previous transformations. */
13050 if (code == LE_EXPR || code == GT_EXPR)
13051 {
13052 tree st = signed_type_for (arg1_type);
13053 return fold_build2_loc (loc,
13054 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13055 type, fold_convert_loc (loc, st, arg0),
13056 build_int_cst (st, 0));
13057 }
13058 }
13059 }
13060 }
13061
13062 /* If we are comparing an ABS_EXPR with a constant, we can
13063 convert all the cases into explicit comparisons, but they may
13064 well not be faster than doing the ABS and one comparison.
13065 But ABS (X) <= C is a range comparison, which becomes a subtraction
13066 and a comparison, and is probably faster. */
13067 if (code == LE_EXPR
13068 && TREE_CODE (arg1) == INTEGER_CST
13069 && TREE_CODE (arg0) == ABS_EXPR
13070 && ! TREE_SIDE_EFFECTS (arg0)
13071 && (0 != (tem = negate_expr (arg1)))
13072 && TREE_CODE (tem) == INTEGER_CST
13073 && !TREE_OVERFLOW (tem))
13074 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13075 build2 (GE_EXPR, type,
13076 TREE_OPERAND (arg0, 0), tem),
13077 build2 (LE_EXPR, type,
13078 TREE_OPERAND (arg0, 0), arg1));
13079
13080 /* Convert ABS_EXPR<x> >= 0 to true. */
13081 strict_overflow_p = false;
13082 if (code == GE_EXPR
13083 && (integer_zerop (arg1)
13084 || (! HONOR_NANS (arg0)
13085 && real_zerop (arg1)))
13086 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13087 {
13088 if (strict_overflow_p)
13089 fold_overflow_warning (("assuming signed overflow does not occur "
13090 "when simplifying comparison of "
13091 "absolute value and zero"),
13092 WARN_STRICT_OVERFLOW_CONDITIONAL);
13093 return omit_one_operand_loc (loc, type,
13094 constant_boolean_node (true, type),
13095 arg0);
13096 }
13097
13098 /* Convert ABS_EXPR<x> < 0 to false. */
13099 strict_overflow_p = false;
13100 if (code == LT_EXPR
13101 && (integer_zerop (arg1) || real_zerop (arg1))
13102 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13103 {
13104 if (strict_overflow_p)
13105 fold_overflow_warning (("assuming signed overflow does not occur "
13106 "when simplifying comparison of "
13107 "absolute value and zero"),
13108 WARN_STRICT_OVERFLOW_CONDITIONAL);
13109 return omit_one_operand_loc (loc, type,
13110 constant_boolean_node (false, type),
13111 arg0);
13112 }
13113
13114 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13115 and similarly for >= into !=. */
13116 if ((code == LT_EXPR || code == GE_EXPR)
13117 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13118 && TREE_CODE (arg1) == LSHIFT_EXPR
13119 && integer_onep (TREE_OPERAND (arg1, 0)))
13120 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13121 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13122 TREE_OPERAND (arg1, 1)),
13123 build_zero_cst (TREE_TYPE (arg0)));
13124
13125 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13126 otherwise Y might be >= # of bits in X's type and thus e.g.
13127 (unsigned char) (1 << Y) for Y 15 might be 0.
13128 If the cast is widening, then 1 << Y should have unsigned type,
13129 otherwise if Y is number of bits in the signed shift type minus 1,
13130 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13131 31 might be 0xffffffff80000000. */
13132 if ((code == LT_EXPR || code == GE_EXPR)
13133 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13134 && CONVERT_EXPR_P (arg1)
13135 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13136 && (element_precision (TREE_TYPE (arg1))
13137 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13138 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13139 || (element_precision (TREE_TYPE (arg1))
13140 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13141 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13142 {
13143 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13144 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13145 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13146 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13147 build_zero_cst (TREE_TYPE (arg0)));
13148 }
13149
13150 return NULL_TREE;
13151
13152 case UNORDERED_EXPR:
13153 case ORDERED_EXPR:
13154 case UNLT_EXPR:
13155 case UNLE_EXPR:
13156 case UNGT_EXPR:
13157 case UNGE_EXPR:
13158 case UNEQ_EXPR:
13159 case LTGT_EXPR:
13160 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13161 {
13162 t1 = fold_relational_const (code, type, arg0, arg1);
13163 if (t1 != NULL_TREE)
13164 return t1;
13165 }
13166
13167 /* If the first operand is NaN, the result is constant. */
13168 if (TREE_CODE (arg0) == REAL_CST
13169 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13170 && (code != LTGT_EXPR || ! flag_trapping_math))
13171 {
13172 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13173 ? integer_zero_node
13174 : integer_one_node;
13175 return omit_one_operand_loc (loc, type, t1, arg1);
13176 }
13177
13178 /* If the second operand is NaN, the result is constant. */
13179 if (TREE_CODE (arg1) == REAL_CST
13180 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13181 && (code != LTGT_EXPR || ! flag_trapping_math))
13182 {
13183 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13184 ? integer_zero_node
13185 : integer_one_node;
13186 return omit_one_operand_loc (loc, type, t1, arg0);
13187 }
13188
13189 /* Simplify unordered comparison of something with itself. */
13190 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13191 && operand_equal_p (arg0, arg1, 0))
13192 return constant_boolean_node (1, type);
13193
13194 if (code == LTGT_EXPR
13195 && !flag_trapping_math
13196 && operand_equal_p (arg0, arg1, 0))
13197 return constant_boolean_node (0, type);
13198
13199 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13200 {
13201 tree targ0 = strip_float_extensions (arg0);
13202 tree targ1 = strip_float_extensions (arg1);
13203 tree newtype = TREE_TYPE (targ0);
13204
13205 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13206 newtype = TREE_TYPE (targ1);
13207
13208 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13209 return fold_build2_loc (loc, code, type,
13210 fold_convert_loc (loc, newtype, targ0),
13211 fold_convert_loc (loc, newtype, targ1));
13212 }
13213
13214 return NULL_TREE;
13215
13216 case COMPOUND_EXPR:
13217 /* When pedantic, a compound expression can be neither an lvalue
13218 nor an integer constant expression. */
13219 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13220 return NULL_TREE;
13221 /* Don't let (0, 0) be null pointer constant. */
13222 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13223 : fold_convert_loc (loc, type, arg1);
13224 return pedantic_non_lvalue_loc (loc, tem);
13225
13226 case ASSERT_EXPR:
13227 /* An ASSERT_EXPR should never be passed to fold_binary. */
13228 gcc_unreachable ();
13229
13230 default:
13231 return NULL_TREE;
13232 } /* switch (code) */
13233 }
13234
13235 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13236 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13237 of GOTO_EXPR. */
13238
13239 static tree
13240 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13241 {
13242 switch (TREE_CODE (*tp))
13243 {
13244 case LABEL_EXPR:
13245 return *tp;
13246
13247 case GOTO_EXPR:
13248 *walk_subtrees = 0;
13249
13250 /* ... fall through ... */
13251
13252 default:
13253 return NULL_TREE;
13254 }
13255 }
13256
13257 /* Return whether the sub-tree ST contains a label which is accessible from
13258 outside the sub-tree. */
13259
13260 static bool
13261 contains_label_p (tree st)
13262 {
13263 return
13264 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13265 }
13266
13267 /* Fold a ternary expression of code CODE and type TYPE with operands
13268 OP0, OP1, and OP2. Return the folded expression if folding is
13269 successful. Otherwise, return NULL_TREE. */
13270
13271 tree
13272 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13273 tree op0, tree op1, tree op2)
13274 {
13275 tree tem;
13276 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13277 enum tree_code_class kind = TREE_CODE_CLASS (code);
13278
13279 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13280 && TREE_CODE_LENGTH (code) == 3);
13281
13282 /* If this is a commutative operation, and OP0 is a constant, move it
13283 to OP1 to reduce the number of tests below. */
13284 if (commutative_ternary_tree_code (code)
13285 && tree_swap_operands_p (op0, op1, true))
13286 return fold_build3_loc (loc, code, type, op1, op0, op2);
13287
13288 tem = generic_simplify (loc, code, type, op0, op1, op2);
13289 if (tem)
13290 return tem;
13291
13292 /* Strip any conversions that don't change the mode. This is safe
13293 for every expression, except for a comparison expression because
13294 its signedness is derived from its operands. So, in the latter
13295 case, only strip conversions that don't change the signedness.
13296
13297 Note that this is done as an internal manipulation within the
13298 constant folder, in order to find the simplest representation of
13299 the arguments so that their form can be studied. In any cases,
13300 the appropriate type conversions should be put back in the tree
13301 that will get out of the constant folder. */
13302 if (op0)
13303 {
13304 arg0 = op0;
13305 STRIP_NOPS (arg0);
13306 }
13307
13308 if (op1)
13309 {
13310 arg1 = op1;
13311 STRIP_NOPS (arg1);
13312 }
13313
13314 if (op2)
13315 {
13316 arg2 = op2;
13317 STRIP_NOPS (arg2);
13318 }
13319
13320 switch (code)
13321 {
13322 case COMPONENT_REF:
13323 if (TREE_CODE (arg0) == CONSTRUCTOR
13324 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13325 {
13326 unsigned HOST_WIDE_INT idx;
13327 tree field, value;
13328 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13329 if (field == arg1)
13330 return value;
13331 }
13332 return NULL_TREE;
13333
13334 case COND_EXPR:
13335 case VEC_COND_EXPR:
13336 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13337 so all simple results must be passed through pedantic_non_lvalue. */
13338 if (TREE_CODE (arg0) == INTEGER_CST)
13339 {
13340 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13341 tem = integer_zerop (arg0) ? op2 : op1;
13342 /* Only optimize constant conditions when the selected branch
13343 has the same type as the COND_EXPR. This avoids optimizing
13344 away "c ? x : throw", where the throw has a void type.
13345 Avoid throwing away that operand which contains label. */
13346 if ((!TREE_SIDE_EFFECTS (unused_op)
13347 || !contains_label_p (unused_op))
13348 && (! VOID_TYPE_P (TREE_TYPE (tem))
13349 || VOID_TYPE_P (type)))
13350 return pedantic_non_lvalue_loc (loc, tem);
13351 return NULL_TREE;
13352 }
13353 else if (TREE_CODE (arg0) == VECTOR_CST)
13354 {
13355 if ((TREE_CODE (arg1) == VECTOR_CST
13356 || TREE_CODE (arg1) == CONSTRUCTOR)
13357 && (TREE_CODE (arg2) == VECTOR_CST
13358 || TREE_CODE (arg2) == CONSTRUCTOR))
13359 {
13360 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13361 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13362 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13363 for (i = 0; i < nelts; i++)
13364 {
13365 tree val = VECTOR_CST_ELT (arg0, i);
13366 if (integer_all_onesp (val))
13367 sel[i] = i;
13368 else if (integer_zerop (val))
13369 sel[i] = nelts + i;
13370 else /* Currently unreachable. */
13371 return NULL_TREE;
13372 }
13373 tree t = fold_vec_perm (type, arg1, arg2, sel);
13374 if (t != NULL_TREE)
13375 return t;
13376 }
13377 }
13378
13379 /* If we have A op B ? A : C, we may be able to convert this to a
13380 simpler expression, depending on the operation and the values
13381 of B and C. Signed zeros prevent all of these transformations,
13382 for reasons given above each one.
13383
13384 Also try swapping the arguments and inverting the conditional. */
13385 if (COMPARISON_CLASS_P (arg0)
13386 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13387 arg1, TREE_OPERAND (arg0, 1))
13388 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13389 {
13390 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13391 if (tem)
13392 return tem;
13393 }
13394
13395 if (COMPARISON_CLASS_P (arg0)
13396 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13397 op2,
13398 TREE_OPERAND (arg0, 1))
13399 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13400 {
13401 location_t loc0 = expr_location_or (arg0, loc);
13402 tem = fold_invert_truthvalue (loc0, arg0);
13403 if (tem && COMPARISON_CLASS_P (tem))
13404 {
13405 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13406 if (tem)
13407 return tem;
13408 }
13409 }
13410
13411 /* If the second operand is simpler than the third, swap them
13412 since that produces better jump optimization results. */
13413 if (truth_value_p (TREE_CODE (arg0))
13414 && tree_swap_operands_p (op1, op2, false))
13415 {
13416 location_t loc0 = expr_location_or (arg0, loc);
13417 /* See if this can be inverted. If it can't, possibly because
13418 it was a floating-point inequality comparison, don't do
13419 anything. */
13420 tem = fold_invert_truthvalue (loc0, arg0);
13421 if (tem)
13422 return fold_build3_loc (loc, code, type, tem, op2, op1);
13423 }
13424
13425 /* Convert A ? 1 : 0 to simply A. */
13426 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13427 : (integer_onep (op1)
13428 && !VECTOR_TYPE_P (type)))
13429 && integer_zerop (op2)
13430 /* If we try to convert OP0 to our type, the
13431 call to fold will try to move the conversion inside
13432 a COND, which will recurse. In that case, the COND_EXPR
13433 is probably the best choice, so leave it alone. */
13434 && type == TREE_TYPE (arg0))
13435 return pedantic_non_lvalue_loc (loc, arg0);
13436
13437 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13438 over COND_EXPR in cases such as floating point comparisons. */
13439 if (integer_zerop (op1)
13440 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13441 : (integer_onep (op2)
13442 && !VECTOR_TYPE_P (type)))
13443 && truth_value_p (TREE_CODE (arg0)))
13444 return pedantic_non_lvalue_loc (loc,
13445 fold_convert_loc (loc, type,
13446 invert_truthvalue_loc (loc,
13447 arg0)));
13448
13449 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13450 if (TREE_CODE (arg0) == LT_EXPR
13451 && integer_zerop (TREE_OPERAND (arg0, 1))
13452 && integer_zerop (op2)
13453 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13454 {
13455 /* sign_bit_p looks through both zero and sign extensions,
13456 but for this optimization only sign extensions are
13457 usable. */
13458 tree tem2 = TREE_OPERAND (arg0, 0);
13459 while (tem != tem2)
13460 {
13461 if (TREE_CODE (tem2) != NOP_EXPR
13462 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13463 {
13464 tem = NULL_TREE;
13465 break;
13466 }
13467 tem2 = TREE_OPERAND (tem2, 0);
13468 }
13469 /* sign_bit_p only checks ARG1 bits within A's precision.
13470 If <sign bit of A> has wider type than A, bits outside
13471 of A's precision in <sign bit of A> need to be checked.
13472 If they are all 0, this optimization needs to be done
13473 in unsigned A's type, if they are all 1 in signed A's type,
13474 otherwise this can't be done. */
13475 if (tem
13476 && TYPE_PRECISION (TREE_TYPE (tem))
13477 < TYPE_PRECISION (TREE_TYPE (arg1))
13478 && TYPE_PRECISION (TREE_TYPE (tem))
13479 < TYPE_PRECISION (type))
13480 {
13481 int inner_width, outer_width;
13482 tree tem_type;
13483
13484 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13485 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13486 if (outer_width > TYPE_PRECISION (type))
13487 outer_width = TYPE_PRECISION (type);
13488
13489 wide_int mask = wi::shifted_mask
13490 (inner_width, outer_width - inner_width, false,
13491 TYPE_PRECISION (TREE_TYPE (arg1)));
13492
13493 wide_int common = mask & arg1;
13494 if (common == mask)
13495 {
13496 tem_type = signed_type_for (TREE_TYPE (tem));
13497 tem = fold_convert_loc (loc, tem_type, tem);
13498 }
13499 else if (common == 0)
13500 {
13501 tem_type = unsigned_type_for (TREE_TYPE (tem));
13502 tem = fold_convert_loc (loc, tem_type, tem);
13503 }
13504 else
13505 tem = NULL;
13506 }
13507
13508 if (tem)
13509 return
13510 fold_convert_loc (loc, type,
13511 fold_build2_loc (loc, BIT_AND_EXPR,
13512 TREE_TYPE (tem), tem,
13513 fold_convert_loc (loc,
13514 TREE_TYPE (tem),
13515 arg1)));
13516 }
13517
13518 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13519 already handled above. */
13520 if (TREE_CODE (arg0) == BIT_AND_EXPR
13521 && integer_onep (TREE_OPERAND (arg0, 1))
13522 && integer_zerop (op2)
13523 && integer_pow2p (arg1))
13524 {
13525 tree tem = TREE_OPERAND (arg0, 0);
13526 STRIP_NOPS (tem);
13527 if (TREE_CODE (tem) == RSHIFT_EXPR
13528 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13529 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13530 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13531 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13532 TREE_OPERAND (tem, 0), arg1);
13533 }
13534
13535 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13536 is probably obsolete because the first operand should be a
13537 truth value (that's why we have the two cases above), but let's
13538 leave it in until we can confirm this for all front-ends. */
13539 if (integer_zerop (op2)
13540 && TREE_CODE (arg0) == NE_EXPR
13541 && integer_zerop (TREE_OPERAND (arg0, 1))
13542 && integer_pow2p (arg1)
13543 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13544 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13545 arg1, OEP_ONLY_CONST))
13546 return pedantic_non_lvalue_loc (loc,
13547 fold_convert_loc (loc, type,
13548 TREE_OPERAND (arg0, 0)));
13549
13550 /* Disable the transformations below for vectors, since
13551 fold_binary_op_with_conditional_arg may undo them immediately,
13552 yielding an infinite loop. */
13553 if (code == VEC_COND_EXPR)
13554 return NULL_TREE;
13555
13556 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13557 if (integer_zerop (op2)
13558 && truth_value_p (TREE_CODE (arg0))
13559 && truth_value_p (TREE_CODE (arg1))
13560 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13561 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13562 : TRUTH_ANDIF_EXPR,
13563 type, fold_convert_loc (loc, type, arg0), arg1);
13564
13565 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13566 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13567 && truth_value_p (TREE_CODE (arg0))
13568 && truth_value_p (TREE_CODE (arg1))
13569 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13570 {
13571 location_t loc0 = expr_location_or (arg0, loc);
13572 /* Only perform transformation if ARG0 is easily inverted. */
13573 tem = fold_invert_truthvalue (loc0, arg0);
13574 if (tem)
13575 return fold_build2_loc (loc, code == VEC_COND_EXPR
13576 ? BIT_IOR_EXPR
13577 : TRUTH_ORIF_EXPR,
13578 type, fold_convert_loc (loc, type, tem),
13579 arg1);
13580 }
13581
13582 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13583 if (integer_zerop (arg1)
13584 && truth_value_p (TREE_CODE (arg0))
13585 && truth_value_p (TREE_CODE (op2))
13586 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13587 {
13588 location_t loc0 = expr_location_or (arg0, loc);
13589 /* Only perform transformation if ARG0 is easily inverted. */
13590 tem = fold_invert_truthvalue (loc0, arg0);
13591 if (tem)
13592 return fold_build2_loc (loc, code == VEC_COND_EXPR
13593 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13594 type, fold_convert_loc (loc, type, tem),
13595 op2);
13596 }
13597
13598 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13599 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13600 && truth_value_p (TREE_CODE (arg0))
13601 && truth_value_p (TREE_CODE (op2))
13602 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13603 return fold_build2_loc (loc, code == VEC_COND_EXPR
13604 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13605 type, fold_convert_loc (loc, type, arg0), op2);
13606
13607 return NULL_TREE;
13608
13609 case CALL_EXPR:
13610 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13611 of fold_ternary on them. */
13612 gcc_unreachable ();
13613
13614 case BIT_FIELD_REF:
13615 if ((TREE_CODE (arg0) == VECTOR_CST
13616 || (TREE_CODE (arg0) == CONSTRUCTOR
13617 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13618 && (type == TREE_TYPE (TREE_TYPE (arg0))
13619 || (TREE_CODE (type) == VECTOR_TYPE
13620 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13621 {
13622 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13623 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13624 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13625 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13626
13627 if (n != 0
13628 && (idx % width) == 0
13629 && (n % width) == 0
13630 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13631 {
13632 idx = idx / width;
13633 n = n / width;
13634
13635 if (TREE_CODE (arg0) == VECTOR_CST)
13636 {
13637 if (n == 1)
13638 return VECTOR_CST_ELT (arg0, idx);
13639
13640 tree *vals = XALLOCAVEC (tree, n);
13641 for (unsigned i = 0; i < n; ++i)
13642 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13643 return build_vector (type, vals);
13644 }
13645
13646 /* Constructor elements can be subvectors. */
13647 unsigned HOST_WIDE_INT k = 1;
13648 if (CONSTRUCTOR_NELTS (arg0) != 0)
13649 {
13650 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13651 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13652 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13653 }
13654
13655 /* We keep an exact subset of the constructor elements. */
13656 if ((idx % k) == 0 && (n % k) == 0)
13657 {
13658 if (CONSTRUCTOR_NELTS (arg0) == 0)
13659 return build_constructor (type, NULL);
13660 idx /= k;
13661 n /= k;
13662 if (n == 1)
13663 {
13664 if (idx < CONSTRUCTOR_NELTS (arg0))
13665 return CONSTRUCTOR_ELT (arg0, idx)->value;
13666 return build_zero_cst (type);
13667 }
13668
13669 vec<constructor_elt, va_gc> *vals;
13670 vec_alloc (vals, n);
13671 for (unsigned i = 0;
13672 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13673 ++i)
13674 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13675 CONSTRUCTOR_ELT
13676 (arg0, idx + i)->value);
13677 return build_constructor (type, vals);
13678 }
13679 /* The bitfield references a single constructor element. */
13680 else if (idx + n <= (idx / k + 1) * k)
13681 {
13682 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13683 return build_zero_cst (type);
13684 else if (n == k)
13685 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13686 else
13687 return fold_build3_loc (loc, code, type,
13688 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13689 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13690 }
13691 }
13692 }
13693
13694 /* A bit-field-ref that referenced the full argument can be stripped. */
13695 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13696 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13697 && integer_zerop (op2))
13698 return fold_convert_loc (loc, type, arg0);
13699
13700 /* On constants we can use native encode/interpret to constant
13701 fold (nearly) all BIT_FIELD_REFs. */
13702 if (CONSTANT_CLASS_P (arg0)
13703 && can_native_interpret_type_p (type)
13704 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13705 /* This limitation should not be necessary, we just need to
13706 round this up to mode size. */
13707 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13708 /* Need bit-shifting of the buffer to relax the following. */
13709 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13710 {
13711 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13712 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13713 unsigned HOST_WIDE_INT clen;
13714 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13715 /* ??? We cannot tell native_encode_expr to start at
13716 some random byte only. So limit us to a reasonable amount
13717 of work. */
13718 if (clen <= 4096)
13719 {
13720 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13721 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13722 if (len > 0
13723 && len * BITS_PER_UNIT >= bitpos + bitsize)
13724 {
13725 tree v = native_interpret_expr (type,
13726 b + bitpos / BITS_PER_UNIT,
13727 bitsize / BITS_PER_UNIT);
13728 if (v)
13729 return v;
13730 }
13731 }
13732 }
13733
13734 return NULL_TREE;
13735
13736 case FMA_EXPR:
13737 /* For integers we can decompose the FMA if possible. */
13738 if (TREE_CODE (arg0) == INTEGER_CST
13739 && TREE_CODE (arg1) == INTEGER_CST)
13740 return fold_build2_loc (loc, PLUS_EXPR, type,
13741 const_binop (MULT_EXPR, arg0, arg1), arg2);
13742 if (integer_zerop (arg2))
13743 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13744
13745 return fold_fma (loc, type, arg0, arg1, arg2);
13746
13747 case VEC_PERM_EXPR:
13748 if (TREE_CODE (arg2) == VECTOR_CST)
13749 {
13750 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13751 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13752 unsigned char *sel2 = sel + nelts;
13753 bool need_mask_canon = false;
13754 bool need_mask_canon2 = false;
13755 bool all_in_vec0 = true;
13756 bool all_in_vec1 = true;
13757 bool maybe_identity = true;
13758 bool single_arg = (op0 == op1);
13759 bool changed = false;
13760
13761 mask2 = 2 * nelts - 1;
13762 mask = single_arg ? (nelts - 1) : mask2;
13763 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13764 for (i = 0; i < nelts; i++)
13765 {
13766 tree val = VECTOR_CST_ELT (arg2, i);
13767 if (TREE_CODE (val) != INTEGER_CST)
13768 return NULL_TREE;
13769
13770 /* Make sure that the perm value is in an acceptable
13771 range. */
13772 wide_int t = val;
13773 need_mask_canon |= wi::gtu_p (t, mask);
13774 need_mask_canon2 |= wi::gtu_p (t, mask2);
13775 sel[i] = t.to_uhwi () & mask;
13776 sel2[i] = t.to_uhwi () & mask2;
13777
13778 if (sel[i] < nelts)
13779 all_in_vec1 = false;
13780 else
13781 all_in_vec0 = false;
13782
13783 if ((sel[i] & (nelts-1)) != i)
13784 maybe_identity = false;
13785 }
13786
13787 if (maybe_identity)
13788 {
13789 if (all_in_vec0)
13790 return op0;
13791 if (all_in_vec1)
13792 return op1;
13793 }
13794
13795 if (all_in_vec0)
13796 op1 = op0;
13797 else if (all_in_vec1)
13798 {
13799 op0 = op1;
13800 for (i = 0; i < nelts; i++)
13801 sel[i] -= nelts;
13802 need_mask_canon = true;
13803 }
13804
13805 if ((TREE_CODE (op0) == VECTOR_CST
13806 || TREE_CODE (op0) == CONSTRUCTOR)
13807 && (TREE_CODE (op1) == VECTOR_CST
13808 || TREE_CODE (op1) == CONSTRUCTOR))
13809 {
13810 tree t = fold_vec_perm (type, op0, op1, sel);
13811 if (t != NULL_TREE)
13812 return t;
13813 }
13814
13815 if (op0 == op1 && !single_arg)
13816 changed = true;
13817
13818 /* Some targets are deficient and fail to expand a single
13819 argument permutation while still allowing an equivalent
13820 2-argument version. */
13821 if (need_mask_canon && arg2 == op2
13822 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13823 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13824 {
13825 need_mask_canon = need_mask_canon2;
13826 sel = sel2;
13827 }
13828
13829 if (need_mask_canon && arg2 == op2)
13830 {
13831 tree *tsel = XALLOCAVEC (tree, nelts);
13832 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13833 for (i = 0; i < nelts; i++)
13834 tsel[i] = build_int_cst (eltype, sel[i]);
13835 op2 = build_vector (TREE_TYPE (arg2), tsel);
13836 changed = true;
13837 }
13838
13839 if (changed)
13840 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13841 }
13842 return NULL_TREE;
13843
13844 default:
13845 return NULL_TREE;
13846 } /* switch (code) */
13847 }
13848
13849 /* Perform constant folding and related simplification of EXPR.
13850 The related simplifications include x*1 => x, x*0 => 0, etc.,
13851 and application of the associative law.
13852 NOP_EXPR conversions may be removed freely (as long as we
13853 are careful not to change the type of the overall expression).
13854 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13855 but we can constant-fold them if they have constant operands. */
13856
13857 #ifdef ENABLE_FOLD_CHECKING
13858 # define fold(x) fold_1 (x)
13859 static tree fold_1 (tree);
13860 static
13861 #endif
13862 tree
13863 fold (tree expr)
13864 {
13865 const tree t = expr;
13866 enum tree_code code = TREE_CODE (t);
13867 enum tree_code_class kind = TREE_CODE_CLASS (code);
13868 tree tem;
13869 location_t loc = EXPR_LOCATION (expr);
13870
13871 /* Return right away if a constant. */
13872 if (kind == tcc_constant)
13873 return t;
13874
13875 /* CALL_EXPR-like objects with variable numbers of operands are
13876 treated specially. */
13877 if (kind == tcc_vl_exp)
13878 {
13879 if (code == CALL_EXPR)
13880 {
13881 tem = fold_call_expr (loc, expr, false);
13882 return tem ? tem : expr;
13883 }
13884 return expr;
13885 }
13886
13887 if (IS_EXPR_CODE_CLASS (kind))
13888 {
13889 tree type = TREE_TYPE (t);
13890 tree op0, op1, op2;
13891
13892 switch (TREE_CODE_LENGTH (code))
13893 {
13894 case 1:
13895 op0 = TREE_OPERAND (t, 0);
13896 tem = fold_unary_loc (loc, code, type, op0);
13897 return tem ? tem : expr;
13898 case 2:
13899 op0 = TREE_OPERAND (t, 0);
13900 op1 = TREE_OPERAND (t, 1);
13901 tem = fold_binary_loc (loc, code, type, op0, op1);
13902 return tem ? tem : expr;
13903 case 3:
13904 op0 = TREE_OPERAND (t, 0);
13905 op1 = TREE_OPERAND (t, 1);
13906 op2 = TREE_OPERAND (t, 2);
13907 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13908 return tem ? tem : expr;
13909 default:
13910 break;
13911 }
13912 }
13913
13914 switch (code)
13915 {
13916 case ARRAY_REF:
13917 {
13918 tree op0 = TREE_OPERAND (t, 0);
13919 tree op1 = TREE_OPERAND (t, 1);
13920
13921 if (TREE_CODE (op1) == INTEGER_CST
13922 && TREE_CODE (op0) == CONSTRUCTOR
13923 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13924 {
13925 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13926 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13927 unsigned HOST_WIDE_INT begin = 0;
13928
13929 /* Find a matching index by means of a binary search. */
13930 while (begin != end)
13931 {
13932 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13933 tree index = (*elts)[middle].index;
13934
13935 if (TREE_CODE (index) == INTEGER_CST
13936 && tree_int_cst_lt (index, op1))
13937 begin = middle + 1;
13938 else if (TREE_CODE (index) == INTEGER_CST
13939 && tree_int_cst_lt (op1, index))
13940 end = middle;
13941 else if (TREE_CODE (index) == RANGE_EXPR
13942 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13943 begin = middle + 1;
13944 else if (TREE_CODE (index) == RANGE_EXPR
13945 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13946 end = middle;
13947 else
13948 return (*elts)[middle].value;
13949 }
13950 }
13951
13952 return t;
13953 }
13954
13955 /* Return a VECTOR_CST if possible. */
13956 case CONSTRUCTOR:
13957 {
13958 tree type = TREE_TYPE (t);
13959 if (TREE_CODE (type) != VECTOR_TYPE)
13960 return t;
13961
13962 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13963 unsigned HOST_WIDE_INT idx, pos = 0;
13964 tree value;
13965
13966 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13967 {
13968 if (!CONSTANT_CLASS_P (value))
13969 return t;
13970 if (TREE_CODE (value) == VECTOR_CST)
13971 {
13972 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13973 vec[pos++] = VECTOR_CST_ELT (value, i);
13974 }
13975 else
13976 vec[pos++] = value;
13977 }
13978 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13979 vec[pos] = build_zero_cst (TREE_TYPE (type));
13980
13981 return build_vector (type, vec);
13982 }
13983
13984 case CONST_DECL:
13985 return fold (DECL_INITIAL (t));
13986
13987 default:
13988 return t;
13989 } /* switch (code) */
13990 }
13991
13992 #ifdef ENABLE_FOLD_CHECKING
13993 #undef fold
13994
13995 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13996 hash_table<pointer_hash<const tree_node> > *);
13997 static void fold_check_failed (const_tree, const_tree);
13998 void print_fold_checksum (const_tree);
13999
14000 /* When --enable-checking=fold, compute a digest of expr before
14001 and after actual fold call to see if fold did not accidentally
14002 change original expr. */
14003
14004 tree
14005 fold (tree expr)
14006 {
14007 tree ret;
14008 struct md5_ctx ctx;
14009 unsigned char checksum_before[16], checksum_after[16];
14010 hash_table<pointer_hash<const tree_node> > ht (32);
14011
14012 md5_init_ctx (&ctx);
14013 fold_checksum_tree (expr, &ctx, &ht);
14014 md5_finish_ctx (&ctx, checksum_before);
14015 ht.empty ();
14016
14017 ret = fold_1 (expr);
14018
14019 md5_init_ctx (&ctx);
14020 fold_checksum_tree (expr, &ctx, &ht);
14021 md5_finish_ctx (&ctx, checksum_after);
14022
14023 if (memcmp (checksum_before, checksum_after, 16))
14024 fold_check_failed (expr, ret);
14025
14026 return ret;
14027 }
14028
14029 void
14030 print_fold_checksum (const_tree expr)
14031 {
14032 struct md5_ctx ctx;
14033 unsigned char checksum[16], cnt;
14034 hash_table<pointer_hash<const tree_node> > ht (32);
14035
14036 md5_init_ctx (&ctx);
14037 fold_checksum_tree (expr, &ctx, &ht);
14038 md5_finish_ctx (&ctx, checksum);
14039 for (cnt = 0; cnt < 16; ++cnt)
14040 fprintf (stderr, "%02x", checksum[cnt]);
14041 putc ('\n', stderr);
14042 }
14043
14044 static void
14045 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14046 {
14047 internal_error ("fold check: original tree changed by fold");
14048 }
14049
14050 static void
14051 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14052 hash_table<pointer_hash <const tree_node> > *ht)
14053 {
14054 const tree_node **slot;
14055 enum tree_code code;
14056 union tree_node buf;
14057 int i, len;
14058
14059 recursive_label:
14060 if (expr == NULL)
14061 return;
14062 slot = ht->find_slot (expr, INSERT);
14063 if (*slot != NULL)
14064 return;
14065 *slot = expr;
14066 code = TREE_CODE (expr);
14067 if (TREE_CODE_CLASS (code) == tcc_declaration
14068 && DECL_ASSEMBLER_NAME_SET_P (expr))
14069 {
14070 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14071 memcpy ((char *) &buf, expr, tree_size (expr));
14072 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14073 expr = (tree) &buf;
14074 }
14075 else if (TREE_CODE_CLASS (code) == tcc_type
14076 && (TYPE_POINTER_TO (expr)
14077 || TYPE_REFERENCE_TO (expr)
14078 || TYPE_CACHED_VALUES_P (expr)
14079 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14080 || TYPE_NEXT_VARIANT (expr)))
14081 {
14082 /* Allow these fields to be modified. */
14083 tree tmp;
14084 memcpy ((char *) &buf, expr, tree_size (expr));
14085 expr = tmp = (tree) &buf;
14086 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14087 TYPE_POINTER_TO (tmp) = NULL;
14088 TYPE_REFERENCE_TO (tmp) = NULL;
14089 TYPE_NEXT_VARIANT (tmp) = NULL;
14090 if (TYPE_CACHED_VALUES_P (tmp))
14091 {
14092 TYPE_CACHED_VALUES_P (tmp) = 0;
14093 TYPE_CACHED_VALUES (tmp) = NULL;
14094 }
14095 }
14096 md5_process_bytes (expr, tree_size (expr), ctx);
14097 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14098 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14099 if (TREE_CODE_CLASS (code) != tcc_type
14100 && TREE_CODE_CLASS (code) != tcc_declaration
14101 && code != TREE_LIST
14102 && code != SSA_NAME
14103 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14104 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14105 switch (TREE_CODE_CLASS (code))
14106 {
14107 case tcc_constant:
14108 switch (code)
14109 {
14110 case STRING_CST:
14111 md5_process_bytes (TREE_STRING_POINTER (expr),
14112 TREE_STRING_LENGTH (expr), ctx);
14113 break;
14114 case COMPLEX_CST:
14115 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14116 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14117 break;
14118 case VECTOR_CST:
14119 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14120 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14121 break;
14122 default:
14123 break;
14124 }
14125 break;
14126 case tcc_exceptional:
14127 switch (code)
14128 {
14129 case TREE_LIST:
14130 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14131 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14132 expr = TREE_CHAIN (expr);
14133 goto recursive_label;
14134 break;
14135 case TREE_VEC:
14136 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14137 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14138 break;
14139 default:
14140 break;
14141 }
14142 break;
14143 case tcc_expression:
14144 case tcc_reference:
14145 case tcc_comparison:
14146 case tcc_unary:
14147 case tcc_binary:
14148 case tcc_statement:
14149 case tcc_vl_exp:
14150 len = TREE_OPERAND_LENGTH (expr);
14151 for (i = 0; i < len; ++i)
14152 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14153 break;
14154 case tcc_declaration:
14155 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14156 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14157 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14158 {
14159 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14160 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14161 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14162 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14163 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14164 }
14165
14166 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14167 {
14168 if (TREE_CODE (expr) == FUNCTION_DECL)
14169 {
14170 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14171 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14172 }
14173 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14174 }
14175 break;
14176 case tcc_type:
14177 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14178 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14179 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14180 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14181 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14182 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14183 if (INTEGRAL_TYPE_P (expr)
14184 || SCALAR_FLOAT_TYPE_P (expr))
14185 {
14186 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14187 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14188 }
14189 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14190 if (TREE_CODE (expr) == RECORD_TYPE
14191 || TREE_CODE (expr) == UNION_TYPE
14192 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14193 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14194 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14195 break;
14196 default:
14197 break;
14198 }
14199 }
14200
14201 /* Helper function for outputting the checksum of a tree T. When
14202 debugging with gdb, you can "define mynext" to be "next" followed
14203 by "call debug_fold_checksum (op0)", then just trace down till the
14204 outputs differ. */
14205
14206 DEBUG_FUNCTION void
14207 debug_fold_checksum (const_tree t)
14208 {
14209 int i;
14210 unsigned char checksum[16];
14211 struct md5_ctx ctx;
14212 hash_table<pointer_hash<const tree_node> > ht (32);
14213
14214 md5_init_ctx (&ctx);
14215 fold_checksum_tree (t, &ctx, &ht);
14216 md5_finish_ctx (&ctx, checksum);
14217 ht.empty ();
14218
14219 for (i = 0; i < 16; i++)
14220 fprintf (stderr, "%d ", checksum[i]);
14221
14222 fprintf (stderr, "\n");
14223 }
14224
14225 #endif
14226
14227 /* Fold a unary tree expression with code CODE of type TYPE with an
14228 operand OP0. LOC is the location of the resulting expression.
14229 Return a folded expression if successful. Otherwise, return a tree
14230 expression with code CODE of type TYPE with an operand OP0. */
14231
14232 tree
14233 fold_build1_stat_loc (location_t loc,
14234 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14235 {
14236 tree tem;
14237 #ifdef ENABLE_FOLD_CHECKING
14238 unsigned char checksum_before[16], checksum_after[16];
14239 struct md5_ctx ctx;
14240 hash_table<pointer_hash<const tree_node> > ht (32);
14241
14242 md5_init_ctx (&ctx);
14243 fold_checksum_tree (op0, &ctx, &ht);
14244 md5_finish_ctx (&ctx, checksum_before);
14245 ht.empty ();
14246 #endif
14247
14248 tem = fold_unary_loc (loc, code, type, op0);
14249 if (!tem)
14250 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14251
14252 #ifdef ENABLE_FOLD_CHECKING
14253 md5_init_ctx (&ctx);
14254 fold_checksum_tree (op0, &ctx, &ht);
14255 md5_finish_ctx (&ctx, checksum_after);
14256
14257 if (memcmp (checksum_before, checksum_after, 16))
14258 fold_check_failed (op0, tem);
14259 #endif
14260 return tem;
14261 }
14262
14263 /* Fold a binary tree expression with code CODE of type TYPE with
14264 operands OP0 and OP1. LOC is the location of the resulting
14265 expression. Return a folded expression if successful. Otherwise,
14266 return a tree expression with code CODE of type TYPE with operands
14267 OP0 and OP1. */
14268
14269 tree
14270 fold_build2_stat_loc (location_t loc,
14271 enum tree_code code, tree type, tree op0, tree op1
14272 MEM_STAT_DECL)
14273 {
14274 tree tem;
14275 #ifdef ENABLE_FOLD_CHECKING
14276 unsigned char checksum_before_op0[16],
14277 checksum_before_op1[16],
14278 checksum_after_op0[16],
14279 checksum_after_op1[16];
14280 struct md5_ctx ctx;
14281 hash_table<pointer_hash<const tree_node> > ht (32);
14282
14283 md5_init_ctx (&ctx);
14284 fold_checksum_tree (op0, &ctx, &ht);
14285 md5_finish_ctx (&ctx, checksum_before_op0);
14286 ht.empty ();
14287
14288 md5_init_ctx (&ctx);
14289 fold_checksum_tree (op1, &ctx, &ht);
14290 md5_finish_ctx (&ctx, checksum_before_op1);
14291 ht.empty ();
14292 #endif
14293
14294 tem = fold_binary_loc (loc, code, type, op0, op1);
14295 if (!tem)
14296 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14297
14298 #ifdef ENABLE_FOLD_CHECKING
14299 md5_init_ctx (&ctx);
14300 fold_checksum_tree (op0, &ctx, &ht);
14301 md5_finish_ctx (&ctx, checksum_after_op0);
14302 ht.empty ();
14303
14304 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14305 fold_check_failed (op0, tem);
14306
14307 md5_init_ctx (&ctx);
14308 fold_checksum_tree (op1, &ctx, &ht);
14309 md5_finish_ctx (&ctx, checksum_after_op1);
14310
14311 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14312 fold_check_failed (op1, tem);
14313 #endif
14314 return tem;
14315 }
14316
14317 /* Fold a ternary tree expression with code CODE of type TYPE with
14318 operands OP0, OP1, and OP2. Return a folded expression if
14319 successful. Otherwise, return a tree expression with code CODE of
14320 type TYPE with operands OP0, OP1, and OP2. */
14321
14322 tree
14323 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14324 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14325 {
14326 tree tem;
14327 #ifdef ENABLE_FOLD_CHECKING
14328 unsigned char checksum_before_op0[16],
14329 checksum_before_op1[16],
14330 checksum_before_op2[16],
14331 checksum_after_op0[16],
14332 checksum_after_op1[16],
14333 checksum_after_op2[16];
14334 struct md5_ctx ctx;
14335 hash_table<pointer_hash<const tree_node> > ht (32);
14336
14337 md5_init_ctx (&ctx);
14338 fold_checksum_tree (op0, &ctx, &ht);
14339 md5_finish_ctx (&ctx, checksum_before_op0);
14340 ht.empty ();
14341
14342 md5_init_ctx (&ctx);
14343 fold_checksum_tree (op1, &ctx, &ht);
14344 md5_finish_ctx (&ctx, checksum_before_op1);
14345 ht.empty ();
14346
14347 md5_init_ctx (&ctx);
14348 fold_checksum_tree (op2, &ctx, &ht);
14349 md5_finish_ctx (&ctx, checksum_before_op2);
14350 ht.empty ();
14351 #endif
14352
14353 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14354 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14355 if (!tem)
14356 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14357
14358 #ifdef ENABLE_FOLD_CHECKING
14359 md5_init_ctx (&ctx);
14360 fold_checksum_tree (op0, &ctx, &ht);
14361 md5_finish_ctx (&ctx, checksum_after_op0);
14362 ht.empty ();
14363
14364 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14365 fold_check_failed (op0, tem);
14366
14367 md5_init_ctx (&ctx);
14368 fold_checksum_tree (op1, &ctx, &ht);
14369 md5_finish_ctx (&ctx, checksum_after_op1);
14370 ht.empty ();
14371
14372 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14373 fold_check_failed (op1, tem);
14374
14375 md5_init_ctx (&ctx);
14376 fold_checksum_tree (op2, &ctx, &ht);
14377 md5_finish_ctx (&ctx, checksum_after_op2);
14378
14379 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14380 fold_check_failed (op2, tem);
14381 #endif
14382 return tem;
14383 }
14384
14385 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14386 arguments in ARGARRAY, and a null static chain.
14387 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14388 of type TYPE from the given operands as constructed by build_call_array. */
14389
14390 tree
14391 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14392 int nargs, tree *argarray)
14393 {
14394 tree tem;
14395 #ifdef ENABLE_FOLD_CHECKING
14396 unsigned char checksum_before_fn[16],
14397 checksum_before_arglist[16],
14398 checksum_after_fn[16],
14399 checksum_after_arglist[16];
14400 struct md5_ctx ctx;
14401 hash_table<pointer_hash<const tree_node> > ht (32);
14402 int i;
14403
14404 md5_init_ctx (&ctx);
14405 fold_checksum_tree (fn, &ctx, &ht);
14406 md5_finish_ctx (&ctx, checksum_before_fn);
14407 ht.empty ();
14408
14409 md5_init_ctx (&ctx);
14410 for (i = 0; i < nargs; i++)
14411 fold_checksum_tree (argarray[i], &ctx, &ht);
14412 md5_finish_ctx (&ctx, checksum_before_arglist);
14413 ht.empty ();
14414 #endif
14415
14416 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14417 if (!tem)
14418 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14419
14420 #ifdef ENABLE_FOLD_CHECKING
14421 md5_init_ctx (&ctx);
14422 fold_checksum_tree (fn, &ctx, &ht);
14423 md5_finish_ctx (&ctx, checksum_after_fn);
14424 ht.empty ();
14425
14426 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14427 fold_check_failed (fn, tem);
14428
14429 md5_init_ctx (&ctx);
14430 for (i = 0; i < nargs; i++)
14431 fold_checksum_tree (argarray[i], &ctx, &ht);
14432 md5_finish_ctx (&ctx, checksum_after_arglist);
14433
14434 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14435 fold_check_failed (NULL_TREE, tem);
14436 #endif
14437 return tem;
14438 }
14439
14440 /* Perform constant folding and related simplification of initializer
14441 expression EXPR. These behave identically to "fold_buildN" but ignore
14442 potential run-time traps and exceptions that fold must preserve. */
14443
14444 #define START_FOLD_INIT \
14445 int saved_signaling_nans = flag_signaling_nans;\
14446 int saved_trapping_math = flag_trapping_math;\
14447 int saved_rounding_math = flag_rounding_math;\
14448 int saved_trapv = flag_trapv;\
14449 int saved_folding_initializer = folding_initializer;\
14450 flag_signaling_nans = 0;\
14451 flag_trapping_math = 0;\
14452 flag_rounding_math = 0;\
14453 flag_trapv = 0;\
14454 folding_initializer = 1;
14455
14456 #define END_FOLD_INIT \
14457 flag_signaling_nans = saved_signaling_nans;\
14458 flag_trapping_math = saved_trapping_math;\
14459 flag_rounding_math = saved_rounding_math;\
14460 flag_trapv = saved_trapv;\
14461 folding_initializer = saved_folding_initializer;
14462
14463 tree
14464 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14465 tree type, tree op)
14466 {
14467 tree result;
14468 START_FOLD_INIT;
14469
14470 result = fold_build1_loc (loc, code, type, op);
14471
14472 END_FOLD_INIT;
14473 return result;
14474 }
14475
14476 tree
14477 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14478 tree type, tree op0, tree op1)
14479 {
14480 tree result;
14481 START_FOLD_INIT;
14482
14483 result = fold_build2_loc (loc, code, type, op0, op1);
14484
14485 END_FOLD_INIT;
14486 return result;
14487 }
14488
14489 tree
14490 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14491 int nargs, tree *argarray)
14492 {
14493 tree result;
14494 START_FOLD_INIT;
14495
14496 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14497
14498 END_FOLD_INIT;
14499 return result;
14500 }
14501
14502 #undef START_FOLD_INIT
14503 #undef END_FOLD_INIT
14504
14505 /* Determine if first argument is a multiple of second argument. Return 0 if
14506 it is not, or we cannot easily determined it to be.
14507
14508 An example of the sort of thing we care about (at this point; this routine
14509 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14510 fold cases do now) is discovering that
14511
14512 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14513
14514 is a multiple of
14515
14516 SAVE_EXPR (J * 8)
14517
14518 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14519
14520 This code also handles discovering that
14521
14522 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14523
14524 is a multiple of 8 so we don't have to worry about dealing with a
14525 possible remainder.
14526
14527 Note that we *look* inside a SAVE_EXPR only to determine how it was
14528 calculated; it is not safe for fold to do much of anything else with the
14529 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14530 at run time. For example, the latter example above *cannot* be implemented
14531 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14532 evaluation time of the original SAVE_EXPR is not necessarily the same at
14533 the time the new expression is evaluated. The only optimization of this
14534 sort that would be valid is changing
14535
14536 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14537
14538 divided by 8 to
14539
14540 SAVE_EXPR (I) * SAVE_EXPR (J)
14541
14542 (where the same SAVE_EXPR (J) is used in the original and the
14543 transformed version). */
14544
14545 int
14546 multiple_of_p (tree type, const_tree top, const_tree bottom)
14547 {
14548 if (operand_equal_p (top, bottom, 0))
14549 return 1;
14550
14551 if (TREE_CODE (type) != INTEGER_TYPE)
14552 return 0;
14553
14554 switch (TREE_CODE (top))
14555 {
14556 case BIT_AND_EXPR:
14557 /* Bitwise and provides a power of two multiple. If the mask is
14558 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14559 if (!integer_pow2p (bottom))
14560 return 0;
14561 /* FALLTHRU */
14562
14563 case MULT_EXPR:
14564 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14565 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14566
14567 case PLUS_EXPR:
14568 case MINUS_EXPR:
14569 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14570 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14571
14572 case LSHIFT_EXPR:
14573 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14574 {
14575 tree op1, t1;
14576
14577 op1 = TREE_OPERAND (top, 1);
14578 /* const_binop may not detect overflow correctly,
14579 so check for it explicitly here. */
14580 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14581 && 0 != (t1 = fold_convert (type,
14582 const_binop (LSHIFT_EXPR,
14583 size_one_node,
14584 op1)))
14585 && !TREE_OVERFLOW (t1))
14586 return multiple_of_p (type, t1, bottom);
14587 }
14588 return 0;
14589
14590 case NOP_EXPR:
14591 /* Can't handle conversions from non-integral or wider integral type. */
14592 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14593 || (TYPE_PRECISION (type)
14594 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14595 return 0;
14596
14597 /* .. fall through ... */
14598
14599 case SAVE_EXPR:
14600 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14601
14602 case COND_EXPR:
14603 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14604 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14605
14606 case INTEGER_CST:
14607 if (TREE_CODE (bottom) != INTEGER_CST
14608 || integer_zerop (bottom)
14609 || (TYPE_UNSIGNED (type)
14610 && (tree_int_cst_sgn (top) < 0
14611 || tree_int_cst_sgn (bottom) < 0)))
14612 return 0;
14613 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14614 SIGNED);
14615
14616 default:
14617 return 0;
14618 }
14619 }
14620
14621 /* Return true if CODE or TYPE is known to be non-negative. */
14622
14623 static bool
14624 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14625 {
14626 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14627 && truth_value_p (code))
14628 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14629 have a signed:1 type (where the value is -1 and 0). */
14630 return true;
14631 return false;
14632 }
14633
14634 /* Return true if (CODE OP0) is known to be non-negative. If the return
14635 value is based on the assumption that signed overflow is undefined,
14636 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14637 *STRICT_OVERFLOW_P. */
14638
14639 bool
14640 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14641 bool *strict_overflow_p)
14642 {
14643 if (TYPE_UNSIGNED (type))
14644 return true;
14645
14646 switch (code)
14647 {
14648 case ABS_EXPR:
14649 /* We can't return 1 if flag_wrapv is set because
14650 ABS_EXPR<INT_MIN> = INT_MIN. */
14651 if (!INTEGRAL_TYPE_P (type))
14652 return true;
14653 if (TYPE_OVERFLOW_UNDEFINED (type))
14654 {
14655 *strict_overflow_p = true;
14656 return true;
14657 }
14658 break;
14659
14660 case NON_LVALUE_EXPR:
14661 case FLOAT_EXPR:
14662 case FIX_TRUNC_EXPR:
14663 return tree_expr_nonnegative_warnv_p (op0,
14664 strict_overflow_p);
14665
14666 CASE_CONVERT:
14667 {
14668 tree inner_type = TREE_TYPE (op0);
14669 tree outer_type = type;
14670
14671 if (TREE_CODE (outer_type) == REAL_TYPE)
14672 {
14673 if (TREE_CODE (inner_type) == REAL_TYPE)
14674 return tree_expr_nonnegative_warnv_p (op0,
14675 strict_overflow_p);
14676 if (INTEGRAL_TYPE_P (inner_type))
14677 {
14678 if (TYPE_UNSIGNED (inner_type))
14679 return true;
14680 return tree_expr_nonnegative_warnv_p (op0,
14681 strict_overflow_p);
14682 }
14683 }
14684 else if (INTEGRAL_TYPE_P (outer_type))
14685 {
14686 if (TREE_CODE (inner_type) == REAL_TYPE)
14687 return tree_expr_nonnegative_warnv_p (op0,
14688 strict_overflow_p);
14689 if (INTEGRAL_TYPE_P (inner_type))
14690 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14691 && TYPE_UNSIGNED (inner_type);
14692 }
14693 }
14694 break;
14695
14696 default:
14697 return tree_simple_nonnegative_warnv_p (code, type);
14698 }
14699
14700 /* We don't know sign of `t', so be conservative and return false. */
14701 return false;
14702 }
14703
14704 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14705 value is based on the assumption that signed overflow is undefined,
14706 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14707 *STRICT_OVERFLOW_P. */
14708
14709 bool
14710 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14711 tree op1, bool *strict_overflow_p)
14712 {
14713 if (TYPE_UNSIGNED (type))
14714 return true;
14715
14716 switch (code)
14717 {
14718 case POINTER_PLUS_EXPR:
14719 case PLUS_EXPR:
14720 if (FLOAT_TYPE_P (type))
14721 return (tree_expr_nonnegative_warnv_p (op0,
14722 strict_overflow_p)
14723 && tree_expr_nonnegative_warnv_p (op1,
14724 strict_overflow_p));
14725
14726 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14727 both unsigned and at least 2 bits shorter than the result. */
14728 if (TREE_CODE (type) == INTEGER_TYPE
14729 && TREE_CODE (op0) == NOP_EXPR
14730 && TREE_CODE (op1) == NOP_EXPR)
14731 {
14732 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14733 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14734 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14735 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14736 {
14737 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14738 TYPE_PRECISION (inner2)) + 1;
14739 return prec < TYPE_PRECISION (type);
14740 }
14741 }
14742 break;
14743
14744 case MULT_EXPR:
14745 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14746 {
14747 /* x * x is always non-negative for floating point x
14748 or without overflow. */
14749 if (operand_equal_p (op0, op1, 0)
14750 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14751 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14752 {
14753 if (ANY_INTEGRAL_TYPE_P (type)
14754 && TYPE_OVERFLOW_UNDEFINED (type))
14755 *strict_overflow_p = true;
14756 return true;
14757 }
14758 }
14759
14760 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14761 both unsigned and their total bits is shorter than the result. */
14762 if (TREE_CODE (type) == INTEGER_TYPE
14763 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14764 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14765 {
14766 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14767 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14768 : TREE_TYPE (op0);
14769 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14770 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14771 : TREE_TYPE (op1);
14772
14773 bool unsigned0 = TYPE_UNSIGNED (inner0);
14774 bool unsigned1 = TYPE_UNSIGNED (inner1);
14775
14776 if (TREE_CODE (op0) == INTEGER_CST)
14777 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14778
14779 if (TREE_CODE (op1) == INTEGER_CST)
14780 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14781
14782 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14783 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14784 {
14785 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14786 ? tree_int_cst_min_precision (op0, UNSIGNED)
14787 : TYPE_PRECISION (inner0);
14788
14789 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14790 ? tree_int_cst_min_precision (op1, UNSIGNED)
14791 : TYPE_PRECISION (inner1);
14792
14793 return precision0 + precision1 < TYPE_PRECISION (type);
14794 }
14795 }
14796 return false;
14797
14798 case BIT_AND_EXPR:
14799 case MAX_EXPR:
14800 return (tree_expr_nonnegative_warnv_p (op0,
14801 strict_overflow_p)
14802 || tree_expr_nonnegative_warnv_p (op1,
14803 strict_overflow_p));
14804
14805 case BIT_IOR_EXPR:
14806 case BIT_XOR_EXPR:
14807 case MIN_EXPR:
14808 case RDIV_EXPR:
14809 case TRUNC_DIV_EXPR:
14810 case CEIL_DIV_EXPR:
14811 case FLOOR_DIV_EXPR:
14812 case ROUND_DIV_EXPR:
14813 return (tree_expr_nonnegative_warnv_p (op0,
14814 strict_overflow_p)
14815 && tree_expr_nonnegative_warnv_p (op1,
14816 strict_overflow_p));
14817
14818 case TRUNC_MOD_EXPR:
14819 case CEIL_MOD_EXPR:
14820 case FLOOR_MOD_EXPR:
14821 case ROUND_MOD_EXPR:
14822 return tree_expr_nonnegative_warnv_p (op0,
14823 strict_overflow_p);
14824 default:
14825 return tree_simple_nonnegative_warnv_p (code, type);
14826 }
14827
14828 /* We don't know sign of `t', so be conservative and return false. */
14829 return false;
14830 }
14831
14832 /* Return true if T is known to be non-negative. If the return
14833 value is based on the assumption that signed overflow is undefined,
14834 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14835 *STRICT_OVERFLOW_P. */
14836
14837 bool
14838 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14839 {
14840 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14841 return true;
14842
14843 switch (TREE_CODE (t))
14844 {
14845 case INTEGER_CST:
14846 return tree_int_cst_sgn (t) >= 0;
14847
14848 case REAL_CST:
14849 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14850
14851 case FIXED_CST:
14852 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14853
14854 case COND_EXPR:
14855 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14856 strict_overflow_p)
14857 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14858 strict_overflow_p));
14859 default:
14860 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14861 TREE_TYPE (t));
14862 }
14863 /* We don't know sign of `t', so be conservative and return false. */
14864 return false;
14865 }
14866
14867 /* Return true if T is known to be non-negative. If the return
14868 value is based on the assumption that signed overflow is undefined,
14869 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14870 *STRICT_OVERFLOW_P. */
14871
14872 bool
14873 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14874 tree arg0, tree arg1, bool *strict_overflow_p)
14875 {
14876 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14877 switch (DECL_FUNCTION_CODE (fndecl))
14878 {
14879 CASE_FLT_FN (BUILT_IN_ACOS):
14880 CASE_FLT_FN (BUILT_IN_ACOSH):
14881 CASE_FLT_FN (BUILT_IN_CABS):
14882 CASE_FLT_FN (BUILT_IN_COSH):
14883 CASE_FLT_FN (BUILT_IN_ERFC):
14884 CASE_FLT_FN (BUILT_IN_EXP):
14885 CASE_FLT_FN (BUILT_IN_EXP10):
14886 CASE_FLT_FN (BUILT_IN_EXP2):
14887 CASE_FLT_FN (BUILT_IN_FABS):
14888 CASE_FLT_FN (BUILT_IN_FDIM):
14889 CASE_FLT_FN (BUILT_IN_HYPOT):
14890 CASE_FLT_FN (BUILT_IN_POW10):
14891 CASE_INT_FN (BUILT_IN_FFS):
14892 CASE_INT_FN (BUILT_IN_PARITY):
14893 CASE_INT_FN (BUILT_IN_POPCOUNT):
14894 CASE_INT_FN (BUILT_IN_CLZ):
14895 CASE_INT_FN (BUILT_IN_CLRSB):
14896 case BUILT_IN_BSWAP32:
14897 case BUILT_IN_BSWAP64:
14898 /* Always true. */
14899 return true;
14900
14901 CASE_FLT_FN (BUILT_IN_SQRT):
14902 /* sqrt(-0.0) is -0.0. */
14903 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14904 return true;
14905 return tree_expr_nonnegative_warnv_p (arg0,
14906 strict_overflow_p);
14907
14908 CASE_FLT_FN (BUILT_IN_ASINH):
14909 CASE_FLT_FN (BUILT_IN_ATAN):
14910 CASE_FLT_FN (BUILT_IN_ATANH):
14911 CASE_FLT_FN (BUILT_IN_CBRT):
14912 CASE_FLT_FN (BUILT_IN_CEIL):
14913 CASE_FLT_FN (BUILT_IN_ERF):
14914 CASE_FLT_FN (BUILT_IN_EXPM1):
14915 CASE_FLT_FN (BUILT_IN_FLOOR):
14916 CASE_FLT_FN (BUILT_IN_FMOD):
14917 CASE_FLT_FN (BUILT_IN_FREXP):
14918 CASE_FLT_FN (BUILT_IN_ICEIL):
14919 CASE_FLT_FN (BUILT_IN_IFLOOR):
14920 CASE_FLT_FN (BUILT_IN_IRINT):
14921 CASE_FLT_FN (BUILT_IN_IROUND):
14922 CASE_FLT_FN (BUILT_IN_LCEIL):
14923 CASE_FLT_FN (BUILT_IN_LDEXP):
14924 CASE_FLT_FN (BUILT_IN_LFLOOR):
14925 CASE_FLT_FN (BUILT_IN_LLCEIL):
14926 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14927 CASE_FLT_FN (BUILT_IN_LLRINT):
14928 CASE_FLT_FN (BUILT_IN_LLROUND):
14929 CASE_FLT_FN (BUILT_IN_LRINT):
14930 CASE_FLT_FN (BUILT_IN_LROUND):
14931 CASE_FLT_FN (BUILT_IN_MODF):
14932 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14933 CASE_FLT_FN (BUILT_IN_RINT):
14934 CASE_FLT_FN (BUILT_IN_ROUND):
14935 CASE_FLT_FN (BUILT_IN_SCALB):
14936 CASE_FLT_FN (BUILT_IN_SCALBLN):
14937 CASE_FLT_FN (BUILT_IN_SCALBN):
14938 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14939 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14940 CASE_FLT_FN (BUILT_IN_SINH):
14941 CASE_FLT_FN (BUILT_IN_TANH):
14942 CASE_FLT_FN (BUILT_IN_TRUNC):
14943 /* True if the 1st argument is nonnegative. */
14944 return tree_expr_nonnegative_warnv_p (arg0,
14945 strict_overflow_p);
14946
14947 CASE_FLT_FN (BUILT_IN_FMAX):
14948 /* True if the 1st OR 2nd arguments are nonnegative. */
14949 return (tree_expr_nonnegative_warnv_p (arg0,
14950 strict_overflow_p)
14951 || (tree_expr_nonnegative_warnv_p (arg1,
14952 strict_overflow_p)));
14953
14954 CASE_FLT_FN (BUILT_IN_FMIN):
14955 /* True if the 1st AND 2nd arguments are nonnegative. */
14956 return (tree_expr_nonnegative_warnv_p (arg0,
14957 strict_overflow_p)
14958 && (tree_expr_nonnegative_warnv_p (arg1,
14959 strict_overflow_p)));
14960
14961 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14962 /* True if the 2nd argument is nonnegative. */
14963 return tree_expr_nonnegative_warnv_p (arg1,
14964 strict_overflow_p);
14965
14966 CASE_FLT_FN (BUILT_IN_POWI):
14967 /* True if the 1st argument is nonnegative or the second
14968 argument is an even integer. */
14969 if (TREE_CODE (arg1) == INTEGER_CST
14970 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14971 return true;
14972 return tree_expr_nonnegative_warnv_p (arg0,
14973 strict_overflow_p);
14974
14975 CASE_FLT_FN (BUILT_IN_POW):
14976 /* True if the 1st argument is nonnegative or the second
14977 argument is an even integer valued real. */
14978 if (TREE_CODE (arg1) == REAL_CST)
14979 {
14980 REAL_VALUE_TYPE c;
14981 HOST_WIDE_INT n;
14982
14983 c = TREE_REAL_CST (arg1);
14984 n = real_to_integer (&c);
14985 if ((n & 1) == 0)
14986 {
14987 REAL_VALUE_TYPE cint;
14988 real_from_integer (&cint, VOIDmode, n, SIGNED);
14989 if (real_identical (&c, &cint))
14990 return true;
14991 }
14992 }
14993 return tree_expr_nonnegative_warnv_p (arg0,
14994 strict_overflow_p);
14995
14996 default:
14997 break;
14998 }
14999 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15000 type);
15001 }
15002
15003 /* Return true if T is known to be non-negative. If the return
15004 value is based on the assumption that signed overflow is undefined,
15005 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15006 *STRICT_OVERFLOW_P. */
15007
15008 static bool
15009 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15010 {
15011 enum tree_code code = TREE_CODE (t);
15012 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15013 return true;
15014
15015 switch (code)
15016 {
15017 case TARGET_EXPR:
15018 {
15019 tree temp = TARGET_EXPR_SLOT (t);
15020 t = TARGET_EXPR_INITIAL (t);
15021
15022 /* If the initializer is non-void, then it's a normal expression
15023 that will be assigned to the slot. */
15024 if (!VOID_TYPE_P (t))
15025 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15026
15027 /* Otherwise, the initializer sets the slot in some way. One common
15028 way is an assignment statement at the end of the initializer. */
15029 while (1)
15030 {
15031 if (TREE_CODE (t) == BIND_EXPR)
15032 t = expr_last (BIND_EXPR_BODY (t));
15033 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15034 || TREE_CODE (t) == TRY_CATCH_EXPR)
15035 t = expr_last (TREE_OPERAND (t, 0));
15036 else if (TREE_CODE (t) == STATEMENT_LIST)
15037 t = expr_last (t);
15038 else
15039 break;
15040 }
15041 if (TREE_CODE (t) == MODIFY_EXPR
15042 && TREE_OPERAND (t, 0) == temp)
15043 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15044 strict_overflow_p);
15045
15046 return false;
15047 }
15048
15049 case CALL_EXPR:
15050 {
15051 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15052 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15053
15054 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15055 get_callee_fndecl (t),
15056 arg0,
15057 arg1,
15058 strict_overflow_p);
15059 }
15060 case COMPOUND_EXPR:
15061 case MODIFY_EXPR:
15062 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15063 strict_overflow_p);
15064 case BIND_EXPR:
15065 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15066 strict_overflow_p);
15067 case SAVE_EXPR:
15068 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15069 strict_overflow_p);
15070
15071 default:
15072 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15073 TREE_TYPE (t));
15074 }
15075
15076 /* We don't know sign of `t', so be conservative and return false. */
15077 return false;
15078 }
15079
15080 /* Return true if T is known to be non-negative. If the return
15081 value is based on the assumption that signed overflow is undefined,
15082 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15083 *STRICT_OVERFLOW_P. */
15084
15085 bool
15086 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15087 {
15088 enum tree_code code;
15089 if (t == error_mark_node)
15090 return false;
15091
15092 code = TREE_CODE (t);
15093 switch (TREE_CODE_CLASS (code))
15094 {
15095 case tcc_binary:
15096 case tcc_comparison:
15097 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15098 TREE_TYPE (t),
15099 TREE_OPERAND (t, 0),
15100 TREE_OPERAND (t, 1),
15101 strict_overflow_p);
15102
15103 case tcc_unary:
15104 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15105 TREE_TYPE (t),
15106 TREE_OPERAND (t, 0),
15107 strict_overflow_p);
15108
15109 case tcc_constant:
15110 case tcc_declaration:
15111 case tcc_reference:
15112 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15113
15114 default:
15115 break;
15116 }
15117
15118 switch (code)
15119 {
15120 case TRUTH_AND_EXPR:
15121 case TRUTH_OR_EXPR:
15122 case TRUTH_XOR_EXPR:
15123 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15124 TREE_TYPE (t),
15125 TREE_OPERAND (t, 0),
15126 TREE_OPERAND (t, 1),
15127 strict_overflow_p);
15128 case TRUTH_NOT_EXPR:
15129 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15130 TREE_TYPE (t),
15131 TREE_OPERAND (t, 0),
15132 strict_overflow_p);
15133
15134 case COND_EXPR:
15135 case CONSTRUCTOR:
15136 case OBJ_TYPE_REF:
15137 case ASSERT_EXPR:
15138 case ADDR_EXPR:
15139 case WITH_SIZE_EXPR:
15140 case SSA_NAME:
15141 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15142
15143 default:
15144 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15145 }
15146 }
15147
15148 /* Return true if `t' is known to be non-negative. Handle warnings
15149 about undefined signed overflow. */
15150
15151 bool
15152 tree_expr_nonnegative_p (tree t)
15153 {
15154 bool ret, strict_overflow_p;
15155
15156 strict_overflow_p = false;
15157 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15158 if (strict_overflow_p)
15159 fold_overflow_warning (("assuming signed overflow does not occur when "
15160 "determining that expression is always "
15161 "non-negative"),
15162 WARN_STRICT_OVERFLOW_MISC);
15163 return ret;
15164 }
15165
15166
15167 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15168 For floating point we further ensure that T is not denormal.
15169 Similar logic is present in nonzero_address in rtlanal.h.
15170
15171 If the return value is based on the assumption that signed overflow
15172 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15173 change *STRICT_OVERFLOW_P. */
15174
15175 bool
15176 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15177 bool *strict_overflow_p)
15178 {
15179 switch (code)
15180 {
15181 case ABS_EXPR:
15182 return tree_expr_nonzero_warnv_p (op0,
15183 strict_overflow_p);
15184
15185 case NOP_EXPR:
15186 {
15187 tree inner_type = TREE_TYPE (op0);
15188 tree outer_type = type;
15189
15190 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15191 && tree_expr_nonzero_warnv_p (op0,
15192 strict_overflow_p));
15193 }
15194 break;
15195
15196 case NON_LVALUE_EXPR:
15197 return tree_expr_nonzero_warnv_p (op0,
15198 strict_overflow_p);
15199
15200 default:
15201 break;
15202 }
15203
15204 return false;
15205 }
15206
15207 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15208 For floating point we further ensure that T is not denormal.
15209 Similar logic is present in nonzero_address in rtlanal.h.
15210
15211 If the return value is based on the assumption that signed overflow
15212 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15213 change *STRICT_OVERFLOW_P. */
15214
15215 bool
15216 tree_binary_nonzero_warnv_p (enum tree_code code,
15217 tree type,
15218 tree op0,
15219 tree op1, bool *strict_overflow_p)
15220 {
15221 bool sub_strict_overflow_p;
15222 switch (code)
15223 {
15224 case POINTER_PLUS_EXPR:
15225 case PLUS_EXPR:
15226 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15227 {
15228 /* With the presence of negative values it is hard
15229 to say something. */
15230 sub_strict_overflow_p = false;
15231 if (!tree_expr_nonnegative_warnv_p (op0,
15232 &sub_strict_overflow_p)
15233 || !tree_expr_nonnegative_warnv_p (op1,
15234 &sub_strict_overflow_p))
15235 return false;
15236 /* One of operands must be positive and the other non-negative. */
15237 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15238 overflows, on a twos-complement machine the sum of two
15239 nonnegative numbers can never be zero. */
15240 return (tree_expr_nonzero_warnv_p (op0,
15241 strict_overflow_p)
15242 || tree_expr_nonzero_warnv_p (op1,
15243 strict_overflow_p));
15244 }
15245 break;
15246
15247 case MULT_EXPR:
15248 if (TYPE_OVERFLOW_UNDEFINED (type))
15249 {
15250 if (tree_expr_nonzero_warnv_p (op0,
15251 strict_overflow_p)
15252 && tree_expr_nonzero_warnv_p (op1,
15253 strict_overflow_p))
15254 {
15255 *strict_overflow_p = true;
15256 return true;
15257 }
15258 }
15259 break;
15260
15261 case MIN_EXPR:
15262 sub_strict_overflow_p = false;
15263 if (tree_expr_nonzero_warnv_p (op0,
15264 &sub_strict_overflow_p)
15265 && tree_expr_nonzero_warnv_p (op1,
15266 &sub_strict_overflow_p))
15267 {
15268 if (sub_strict_overflow_p)
15269 *strict_overflow_p = true;
15270 }
15271 break;
15272
15273 case MAX_EXPR:
15274 sub_strict_overflow_p = false;
15275 if (tree_expr_nonzero_warnv_p (op0,
15276 &sub_strict_overflow_p))
15277 {
15278 if (sub_strict_overflow_p)
15279 *strict_overflow_p = true;
15280
15281 /* When both operands are nonzero, then MAX must be too. */
15282 if (tree_expr_nonzero_warnv_p (op1,
15283 strict_overflow_p))
15284 return true;
15285
15286 /* MAX where operand 0 is positive is positive. */
15287 return tree_expr_nonnegative_warnv_p (op0,
15288 strict_overflow_p);
15289 }
15290 /* MAX where operand 1 is positive is positive. */
15291 else if (tree_expr_nonzero_warnv_p (op1,
15292 &sub_strict_overflow_p)
15293 && tree_expr_nonnegative_warnv_p (op1,
15294 &sub_strict_overflow_p))
15295 {
15296 if (sub_strict_overflow_p)
15297 *strict_overflow_p = true;
15298 return true;
15299 }
15300 break;
15301
15302 case BIT_IOR_EXPR:
15303 return (tree_expr_nonzero_warnv_p (op1,
15304 strict_overflow_p)
15305 || tree_expr_nonzero_warnv_p (op0,
15306 strict_overflow_p));
15307
15308 default:
15309 break;
15310 }
15311
15312 return false;
15313 }
15314
15315 /* Return true when T is an address and is known to be nonzero.
15316 For floating point we further ensure that T is not denormal.
15317 Similar logic is present in nonzero_address in rtlanal.h.
15318
15319 If the return value is based on the assumption that signed overflow
15320 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15321 change *STRICT_OVERFLOW_P. */
15322
15323 bool
15324 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15325 {
15326 bool sub_strict_overflow_p;
15327 switch (TREE_CODE (t))
15328 {
15329 case INTEGER_CST:
15330 return !integer_zerop (t);
15331
15332 case ADDR_EXPR:
15333 {
15334 tree base = TREE_OPERAND (t, 0);
15335
15336 if (!DECL_P (base))
15337 base = get_base_address (base);
15338
15339 if (!base)
15340 return false;
15341
15342 /* For objects in symbol table check if we know they are non-zero.
15343 Don't do anything for variables and functions before symtab is built;
15344 it is quite possible that they will be declared weak later. */
15345 if (DECL_P (base) && decl_in_symtab_p (base))
15346 {
15347 struct symtab_node *symbol;
15348
15349 symbol = symtab_node::get_create (base);
15350 if (symbol)
15351 return symbol->nonzero_address ();
15352 else
15353 return false;
15354 }
15355
15356 /* Function local objects are never NULL. */
15357 if (DECL_P (base)
15358 && (DECL_CONTEXT (base)
15359 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15360 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15361 return true;
15362
15363 /* Constants are never weak. */
15364 if (CONSTANT_CLASS_P (base))
15365 return true;
15366
15367 return false;
15368 }
15369
15370 case COND_EXPR:
15371 sub_strict_overflow_p = false;
15372 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15373 &sub_strict_overflow_p)
15374 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15375 &sub_strict_overflow_p))
15376 {
15377 if (sub_strict_overflow_p)
15378 *strict_overflow_p = true;
15379 return true;
15380 }
15381 break;
15382
15383 default:
15384 break;
15385 }
15386 return false;
15387 }
15388
15389 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15390 attempt to fold the expression to a constant without modifying TYPE,
15391 OP0 or OP1.
15392
15393 If the expression could be simplified to a constant, then return
15394 the constant. If the expression would not be simplified to a
15395 constant, then return NULL_TREE. */
15396
15397 tree
15398 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15399 {
15400 tree tem = fold_binary (code, type, op0, op1);
15401 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15402 }
15403
15404 /* Given the components of a unary expression CODE, TYPE and OP0,
15405 attempt to fold the expression to a constant without modifying
15406 TYPE or OP0.
15407
15408 If the expression could be simplified to a constant, then return
15409 the constant. If the expression would not be simplified to a
15410 constant, then return NULL_TREE. */
15411
15412 tree
15413 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15414 {
15415 tree tem = fold_unary (code, type, op0);
15416 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15417 }
15418
15419 /* If EXP represents referencing an element in a constant string
15420 (either via pointer arithmetic or array indexing), return the
15421 tree representing the value accessed, otherwise return NULL. */
15422
15423 tree
15424 fold_read_from_constant_string (tree exp)
15425 {
15426 if ((TREE_CODE (exp) == INDIRECT_REF
15427 || TREE_CODE (exp) == ARRAY_REF)
15428 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15429 {
15430 tree exp1 = TREE_OPERAND (exp, 0);
15431 tree index;
15432 tree string;
15433 location_t loc = EXPR_LOCATION (exp);
15434
15435 if (TREE_CODE (exp) == INDIRECT_REF)
15436 string = string_constant (exp1, &index);
15437 else
15438 {
15439 tree low_bound = array_ref_low_bound (exp);
15440 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15441
15442 /* Optimize the special-case of a zero lower bound.
15443
15444 We convert the low_bound to sizetype to avoid some problems
15445 with constant folding. (E.g. suppose the lower bound is 1,
15446 and its mode is QI. Without the conversion,l (ARRAY
15447 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15448 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15449 if (! integer_zerop (low_bound))
15450 index = size_diffop_loc (loc, index,
15451 fold_convert_loc (loc, sizetype, low_bound));
15452
15453 string = exp1;
15454 }
15455
15456 if (string
15457 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15458 && TREE_CODE (string) == STRING_CST
15459 && TREE_CODE (index) == INTEGER_CST
15460 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15461 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15462 == MODE_INT)
15463 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15464 return build_int_cst_type (TREE_TYPE (exp),
15465 (TREE_STRING_POINTER (string)
15466 [TREE_INT_CST_LOW (index)]));
15467 }
15468 return NULL;
15469 }
15470
15471 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15472 an integer constant, real, or fixed-point constant.
15473
15474 TYPE is the type of the result. */
15475
15476 static tree
15477 fold_negate_const (tree arg0, tree type)
15478 {
15479 tree t = NULL_TREE;
15480
15481 switch (TREE_CODE (arg0))
15482 {
15483 case INTEGER_CST:
15484 {
15485 bool overflow;
15486 wide_int val = wi::neg (arg0, &overflow);
15487 t = force_fit_type (type, val, 1,
15488 (overflow | TREE_OVERFLOW (arg0))
15489 && !TYPE_UNSIGNED (type));
15490 break;
15491 }
15492
15493 case REAL_CST:
15494 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15495 break;
15496
15497 case FIXED_CST:
15498 {
15499 FIXED_VALUE_TYPE f;
15500 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15501 &(TREE_FIXED_CST (arg0)), NULL,
15502 TYPE_SATURATING (type));
15503 t = build_fixed (type, f);
15504 /* Propagate overflow flags. */
15505 if (overflow_p | TREE_OVERFLOW (arg0))
15506 TREE_OVERFLOW (t) = 1;
15507 break;
15508 }
15509
15510 default:
15511 gcc_unreachable ();
15512 }
15513
15514 return t;
15515 }
15516
15517 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15518 an integer constant or real constant.
15519
15520 TYPE is the type of the result. */
15521
15522 tree
15523 fold_abs_const (tree arg0, tree type)
15524 {
15525 tree t = NULL_TREE;
15526
15527 switch (TREE_CODE (arg0))
15528 {
15529 case INTEGER_CST:
15530 {
15531 /* If the value is unsigned or non-negative, then the absolute value
15532 is the same as the ordinary value. */
15533 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15534 t = arg0;
15535
15536 /* If the value is negative, then the absolute value is
15537 its negation. */
15538 else
15539 {
15540 bool overflow;
15541 wide_int val = wi::neg (arg0, &overflow);
15542 t = force_fit_type (type, val, -1,
15543 overflow | TREE_OVERFLOW (arg0));
15544 }
15545 }
15546 break;
15547
15548 case REAL_CST:
15549 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15550 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15551 else
15552 t = arg0;
15553 break;
15554
15555 default:
15556 gcc_unreachable ();
15557 }
15558
15559 return t;
15560 }
15561
15562 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15563 constant. TYPE is the type of the result. */
15564
15565 static tree
15566 fold_not_const (const_tree arg0, tree type)
15567 {
15568 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15569
15570 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15571 }
15572
15573 /* Given CODE, a relational operator, the target type, TYPE and two
15574 constant operands OP0 and OP1, return the result of the
15575 relational operation. If the result is not a compile time
15576 constant, then return NULL_TREE. */
15577
15578 static tree
15579 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15580 {
15581 int result, invert;
15582
15583 /* From here on, the only cases we handle are when the result is
15584 known to be a constant. */
15585
15586 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15587 {
15588 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15589 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15590
15591 /* Handle the cases where either operand is a NaN. */
15592 if (real_isnan (c0) || real_isnan (c1))
15593 {
15594 switch (code)
15595 {
15596 case EQ_EXPR:
15597 case ORDERED_EXPR:
15598 result = 0;
15599 break;
15600
15601 case NE_EXPR:
15602 case UNORDERED_EXPR:
15603 case UNLT_EXPR:
15604 case UNLE_EXPR:
15605 case UNGT_EXPR:
15606 case UNGE_EXPR:
15607 case UNEQ_EXPR:
15608 result = 1;
15609 break;
15610
15611 case LT_EXPR:
15612 case LE_EXPR:
15613 case GT_EXPR:
15614 case GE_EXPR:
15615 case LTGT_EXPR:
15616 if (flag_trapping_math)
15617 return NULL_TREE;
15618 result = 0;
15619 break;
15620
15621 default:
15622 gcc_unreachable ();
15623 }
15624
15625 return constant_boolean_node (result, type);
15626 }
15627
15628 return constant_boolean_node (real_compare (code, c0, c1), type);
15629 }
15630
15631 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15632 {
15633 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15634 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15635 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15636 }
15637
15638 /* Handle equality/inequality of complex constants. */
15639 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15640 {
15641 tree rcond = fold_relational_const (code, type,
15642 TREE_REALPART (op0),
15643 TREE_REALPART (op1));
15644 tree icond = fold_relational_const (code, type,
15645 TREE_IMAGPART (op0),
15646 TREE_IMAGPART (op1));
15647 if (code == EQ_EXPR)
15648 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15649 else if (code == NE_EXPR)
15650 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15651 else
15652 return NULL_TREE;
15653 }
15654
15655 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15656 {
15657 unsigned count = VECTOR_CST_NELTS (op0);
15658 tree *elts = XALLOCAVEC (tree, count);
15659 gcc_assert (VECTOR_CST_NELTS (op1) == count
15660 && TYPE_VECTOR_SUBPARTS (type) == count);
15661
15662 for (unsigned i = 0; i < count; i++)
15663 {
15664 tree elem_type = TREE_TYPE (type);
15665 tree elem0 = VECTOR_CST_ELT (op0, i);
15666 tree elem1 = VECTOR_CST_ELT (op1, i);
15667
15668 tree tem = fold_relational_const (code, elem_type,
15669 elem0, elem1);
15670
15671 if (tem == NULL_TREE)
15672 return NULL_TREE;
15673
15674 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15675 }
15676
15677 return build_vector (type, elts);
15678 }
15679
15680 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15681
15682 To compute GT, swap the arguments and do LT.
15683 To compute GE, do LT and invert the result.
15684 To compute LE, swap the arguments, do LT and invert the result.
15685 To compute NE, do EQ and invert the result.
15686
15687 Therefore, the code below must handle only EQ and LT. */
15688
15689 if (code == LE_EXPR || code == GT_EXPR)
15690 {
15691 tree tem = op0;
15692 op0 = op1;
15693 op1 = tem;
15694 code = swap_tree_comparison (code);
15695 }
15696
15697 /* Note that it is safe to invert for real values here because we
15698 have already handled the one case that it matters. */
15699
15700 invert = 0;
15701 if (code == NE_EXPR || code == GE_EXPR)
15702 {
15703 invert = 1;
15704 code = invert_tree_comparison (code, false);
15705 }
15706
15707 /* Compute a result for LT or EQ if args permit;
15708 Otherwise return T. */
15709 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15710 {
15711 if (code == EQ_EXPR)
15712 result = tree_int_cst_equal (op0, op1);
15713 else
15714 result = tree_int_cst_lt (op0, op1);
15715 }
15716 else
15717 return NULL_TREE;
15718
15719 if (invert)
15720 result ^= 1;
15721 return constant_boolean_node (result, type);
15722 }
15723
15724 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15725 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15726 itself. */
15727
15728 tree
15729 fold_build_cleanup_point_expr (tree type, tree expr)
15730 {
15731 /* If the expression does not have side effects then we don't have to wrap
15732 it with a cleanup point expression. */
15733 if (!TREE_SIDE_EFFECTS (expr))
15734 return expr;
15735
15736 /* If the expression is a return, check to see if the expression inside the
15737 return has no side effects or the right hand side of the modify expression
15738 inside the return. If either don't have side effects set we don't need to
15739 wrap the expression in a cleanup point expression. Note we don't check the
15740 left hand side of the modify because it should always be a return decl. */
15741 if (TREE_CODE (expr) == RETURN_EXPR)
15742 {
15743 tree op = TREE_OPERAND (expr, 0);
15744 if (!op || !TREE_SIDE_EFFECTS (op))
15745 return expr;
15746 op = TREE_OPERAND (op, 1);
15747 if (!TREE_SIDE_EFFECTS (op))
15748 return expr;
15749 }
15750
15751 return build1 (CLEANUP_POINT_EXPR, type, expr);
15752 }
15753
15754 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15755 of an indirection through OP0, or NULL_TREE if no simplification is
15756 possible. */
15757
15758 tree
15759 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15760 {
15761 tree sub = op0;
15762 tree subtype;
15763
15764 STRIP_NOPS (sub);
15765 subtype = TREE_TYPE (sub);
15766 if (!POINTER_TYPE_P (subtype))
15767 return NULL_TREE;
15768
15769 if (TREE_CODE (sub) == ADDR_EXPR)
15770 {
15771 tree op = TREE_OPERAND (sub, 0);
15772 tree optype = TREE_TYPE (op);
15773 /* *&CONST_DECL -> to the value of the const decl. */
15774 if (TREE_CODE (op) == CONST_DECL)
15775 return DECL_INITIAL (op);
15776 /* *&p => p; make sure to handle *&"str"[cst] here. */
15777 if (type == optype)
15778 {
15779 tree fop = fold_read_from_constant_string (op);
15780 if (fop)
15781 return fop;
15782 else
15783 return op;
15784 }
15785 /* *(foo *)&fooarray => fooarray[0] */
15786 else if (TREE_CODE (optype) == ARRAY_TYPE
15787 && type == TREE_TYPE (optype)
15788 && (!in_gimple_form
15789 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15790 {
15791 tree type_domain = TYPE_DOMAIN (optype);
15792 tree min_val = size_zero_node;
15793 if (type_domain && TYPE_MIN_VALUE (type_domain))
15794 min_val = TYPE_MIN_VALUE (type_domain);
15795 if (in_gimple_form
15796 && TREE_CODE (min_val) != INTEGER_CST)
15797 return NULL_TREE;
15798 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15799 NULL_TREE, NULL_TREE);
15800 }
15801 /* *(foo *)&complexfoo => __real__ complexfoo */
15802 else if (TREE_CODE (optype) == COMPLEX_TYPE
15803 && type == TREE_TYPE (optype))
15804 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15805 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15806 else if (TREE_CODE (optype) == VECTOR_TYPE
15807 && type == TREE_TYPE (optype))
15808 {
15809 tree part_width = TYPE_SIZE (type);
15810 tree index = bitsize_int (0);
15811 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15812 }
15813 }
15814
15815 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15816 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15817 {
15818 tree op00 = TREE_OPERAND (sub, 0);
15819 tree op01 = TREE_OPERAND (sub, 1);
15820
15821 STRIP_NOPS (op00);
15822 if (TREE_CODE (op00) == ADDR_EXPR)
15823 {
15824 tree op00type;
15825 op00 = TREE_OPERAND (op00, 0);
15826 op00type = TREE_TYPE (op00);
15827
15828 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15829 if (TREE_CODE (op00type) == VECTOR_TYPE
15830 && type == TREE_TYPE (op00type))
15831 {
15832 HOST_WIDE_INT offset = tree_to_shwi (op01);
15833 tree part_width = TYPE_SIZE (type);
15834 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15835 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15836 tree index = bitsize_int (indexi);
15837
15838 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15839 return fold_build3_loc (loc,
15840 BIT_FIELD_REF, type, op00,
15841 part_width, index);
15842
15843 }
15844 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15845 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15846 && type == TREE_TYPE (op00type))
15847 {
15848 tree size = TYPE_SIZE_UNIT (type);
15849 if (tree_int_cst_equal (size, op01))
15850 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15851 }
15852 /* ((foo *)&fooarray)[1] => fooarray[1] */
15853 else if (TREE_CODE (op00type) == ARRAY_TYPE
15854 && type == TREE_TYPE (op00type))
15855 {
15856 tree type_domain = TYPE_DOMAIN (op00type);
15857 tree min_val = size_zero_node;
15858 if (type_domain && TYPE_MIN_VALUE (type_domain))
15859 min_val = TYPE_MIN_VALUE (type_domain);
15860 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15861 TYPE_SIZE_UNIT (type));
15862 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15863 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15864 NULL_TREE, NULL_TREE);
15865 }
15866 }
15867 }
15868
15869 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15870 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15871 && type == TREE_TYPE (TREE_TYPE (subtype))
15872 && (!in_gimple_form
15873 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15874 {
15875 tree type_domain;
15876 tree min_val = size_zero_node;
15877 sub = build_fold_indirect_ref_loc (loc, sub);
15878 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15879 if (type_domain && TYPE_MIN_VALUE (type_domain))
15880 min_val = TYPE_MIN_VALUE (type_domain);
15881 if (in_gimple_form
15882 && TREE_CODE (min_val) != INTEGER_CST)
15883 return NULL_TREE;
15884 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15885 NULL_TREE);
15886 }
15887
15888 return NULL_TREE;
15889 }
15890
15891 /* Builds an expression for an indirection through T, simplifying some
15892 cases. */
15893
15894 tree
15895 build_fold_indirect_ref_loc (location_t loc, tree t)
15896 {
15897 tree type = TREE_TYPE (TREE_TYPE (t));
15898 tree sub = fold_indirect_ref_1 (loc, type, t);
15899
15900 if (sub)
15901 return sub;
15902
15903 return build1_loc (loc, INDIRECT_REF, type, t);
15904 }
15905
15906 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15907
15908 tree
15909 fold_indirect_ref_loc (location_t loc, tree t)
15910 {
15911 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15912
15913 if (sub)
15914 return sub;
15915 else
15916 return t;
15917 }
15918
15919 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15920 whose result is ignored. The type of the returned tree need not be
15921 the same as the original expression. */
15922
15923 tree
15924 fold_ignored_result (tree t)
15925 {
15926 if (!TREE_SIDE_EFFECTS (t))
15927 return integer_zero_node;
15928
15929 for (;;)
15930 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15931 {
15932 case tcc_unary:
15933 t = TREE_OPERAND (t, 0);
15934 break;
15935
15936 case tcc_binary:
15937 case tcc_comparison:
15938 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15939 t = TREE_OPERAND (t, 0);
15940 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15941 t = TREE_OPERAND (t, 1);
15942 else
15943 return t;
15944 break;
15945
15946 case tcc_expression:
15947 switch (TREE_CODE (t))
15948 {
15949 case COMPOUND_EXPR:
15950 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15951 return t;
15952 t = TREE_OPERAND (t, 0);
15953 break;
15954
15955 case COND_EXPR:
15956 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15957 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15958 return t;
15959 t = TREE_OPERAND (t, 0);
15960 break;
15961
15962 default:
15963 return t;
15964 }
15965 break;
15966
15967 default:
15968 return t;
15969 }
15970 }
15971
15972 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15973
15974 tree
15975 round_up_loc (location_t loc, tree value, unsigned int divisor)
15976 {
15977 tree div = NULL_TREE;
15978
15979 if (divisor == 1)
15980 return value;
15981
15982 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15983 have to do anything. Only do this when we are not given a const,
15984 because in that case, this check is more expensive than just
15985 doing it. */
15986 if (TREE_CODE (value) != INTEGER_CST)
15987 {
15988 div = build_int_cst (TREE_TYPE (value), divisor);
15989
15990 if (multiple_of_p (TREE_TYPE (value), value, div))
15991 return value;
15992 }
15993
15994 /* If divisor is a power of two, simplify this to bit manipulation. */
15995 if (divisor == (divisor & -divisor))
15996 {
15997 if (TREE_CODE (value) == INTEGER_CST)
15998 {
15999 wide_int val = value;
16000 bool overflow_p;
16001
16002 if ((val & (divisor - 1)) == 0)
16003 return value;
16004
16005 overflow_p = TREE_OVERFLOW (value);
16006 val &= ~(divisor - 1);
16007 val += divisor;
16008 if (val == 0)
16009 overflow_p = true;
16010
16011 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16012 }
16013 else
16014 {
16015 tree t;
16016
16017 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16018 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16019 t = build_int_cst (TREE_TYPE (value), -divisor);
16020 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16021 }
16022 }
16023 else
16024 {
16025 if (!div)
16026 div = build_int_cst (TREE_TYPE (value), divisor);
16027 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16028 value = size_binop_loc (loc, MULT_EXPR, value, div);
16029 }
16030
16031 return value;
16032 }
16033
16034 /* Likewise, but round down. */
16035
16036 tree
16037 round_down_loc (location_t loc, tree value, int divisor)
16038 {
16039 tree div = NULL_TREE;
16040
16041 gcc_assert (divisor > 0);
16042 if (divisor == 1)
16043 return value;
16044
16045 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16046 have to do anything. Only do this when we are not given a const,
16047 because in that case, this check is more expensive than just
16048 doing it. */
16049 if (TREE_CODE (value) != INTEGER_CST)
16050 {
16051 div = build_int_cst (TREE_TYPE (value), divisor);
16052
16053 if (multiple_of_p (TREE_TYPE (value), value, div))
16054 return value;
16055 }
16056
16057 /* If divisor is a power of two, simplify this to bit manipulation. */
16058 if (divisor == (divisor & -divisor))
16059 {
16060 tree t;
16061
16062 t = build_int_cst (TREE_TYPE (value), -divisor);
16063 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16064 }
16065 else
16066 {
16067 if (!div)
16068 div = build_int_cst (TREE_TYPE (value), divisor);
16069 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16070 value = size_binop_loc (loc, MULT_EXPR, value, div);
16071 }
16072
16073 return value;
16074 }
16075
16076 /* Returns the pointer to the base of the object addressed by EXP and
16077 extracts the information about the offset of the access, storing it
16078 to PBITPOS and POFFSET. */
16079
16080 static tree
16081 split_address_to_core_and_offset (tree exp,
16082 HOST_WIDE_INT *pbitpos, tree *poffset)
16083 {
16084 tree core;
16085 machine_mode mode;
16086 int unsignedp, volatilep;
16087 HOST_WIDE_INT bitsize;
16088 location_t loc = EXPR_LOCATION (exp);
16089
16090 if (TREE_CODE (exp) == ADDR_EXPR)
16091 {
16092 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16093 poffset, &mode, &unsignedp, &volatilep,
16094 false);
16095 core = build_fold_addr_expr_loc (loc, core);
16096 }
16097 else
16098 {
16099 core = exp;
16100 *pbitpos = 0;
16101 *poffset = NULL_TREE;
16102 }
16103
16104 return core;
16105 }
16106
16107 /* Returns true if addresses of E1 and E2 differ by a constant, false
16108 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16109
16110 bool
16111 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16112 {
16113 tree core1, core2;
16114 HOST_WIDE_INT bitpos1, bitpos2;
16115 tree toffset1, toffset2, tdiff, type;
16116
16117 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16118 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16119
16120 if (bitpos1 % BITS_PER_UNIT != 0
16121 || bitpos2 % BITS_PER_UNIT != 0
16122 || !operand_equal_p (core1, core2, 0))
16123 return false;
16124
16125 if (toffset1 && toffset2)
16126 {
16127 type = TREE_TYPE (toffset1);
16128 if (type != TREE_TYPE (toffset2))
16129 toffset2 = fold_convert (type, toffset2);
16130
16131 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16132 if (!cst_and_fits_in_hwi (tdiff))
16133 return false;
16134
16135 *diff = int_cst_value (tdiff);
16136 }
16137 else if (toffset1 || toffset2)
16138 {
16139 /* If only one of the offsets is non-constant, the difference cannot
16140 be a constant. */
16141 return false;
16142 }
16143 else
16144 *diff = 0;
16145
16146 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16147 return true;
16148 }
16149
16150 /* Simplify the floating point expression EXP when the sign of the
16151 result is not significant. Return NULL_TREE if no simplification
16152 is possible. */
16153
16154 tree
16155 fold_strip_sign_ops (tree exp)
16156 {
16157 tree arg0, arg1;
16158 location_t loc = EXPR_LOCATION (exp);
16159
16160 switch (TREE_CODE (exp))
16161 {
16162 case ABS_EXPR:
16163 case NEGATE_EXPR:
16164 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16165 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16166
16167 case MULT_EXPR:
16168 case RDIV_EXPR:
16169 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16170 return NULL_TREE;
16171 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16172 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16173 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16174 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16175 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16176 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16177 break;
16178
16179 case COMPOUND_EXPR:
16180 arg0 = TREE_OPERAND (exp, 0);
16181 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16182 if (arg1)
16183 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16184 break;
16185
16186 case COND_EXPR:
16187 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16188 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16189 if (arg0 || arg1)
16190 return fold_build3_loc (loc,
16191 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16192 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16193 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16194 break;
16195
16196 case CALL_EXPR:
16197 {
16198 const enum built_in_function fcode = builtin_mathfn_code (exp);
16199 switch (fcode)
16200 {
16201 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16202 /* Strip copysign function call, return the 1st argument. */
16203 arg0 = CALL_EXPR_ARG (exp, 0);
16204 arg1 = CALL_EXPR_ARG (exp, 1);
16205 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16206
16207 default:
16208 /* Strip sign ops from the argument of "odd" math functions. */
16209 if (negate_mathfn_p (fcode))
16210 {
16211 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16212 if (arg0)
16213 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16214 }
16215 break;
16216 }
16217 }
16218 break;
16219
16220 default:
16221 break;
16222 }
16223 return NULL_TREE;
16224 }
16225
16226 /* Return OFF converted to a pointer offset type suitable as offset for
16227 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16228 tree
16229 convert_to_ptrofftype_loc (location_t loc, tree off)
16230 {
16231 return fold_convert_loc (loc, sizetype, off);
16232 }
16233
16234 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16235 tree
16236 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16237 {
16238 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16239 ptr, convert_to_ptrofftype_loc (loc, off));
16240 }
16241
16242 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16243 tree
16244 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16245 {
16246 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16247 ptr, size_int (off));
16248 }