genmatch.c (expr::gen_transform): Use fold_buildN_loc and build_call_expr_loc.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71 #include "builtins.h"
72 #include "cgraph.h"
73 #include "generic-match.h"
74
75 /* Nonzero if we are folding constants inside an initializer; zero
76 otherwise. */
77 int folding_initializer = 0;
78
79 /* The following constants represent a bit based encoding of GCC's
80 comparison operators. This encoding simplifies transformations
81 on relational comparison operators, such as AND and OR. */
82 enum comparison_code {
83 COMPCODE_FALSE = 0,
84 COMPCODE_LT = 1,
85 COMPCODE_EQ = 2,
86 COMPCODE_LE = 3,
87 COMPCODE_GT = 4,
88 COMPCODE_LTGT = 5,
89 COMPCODE_GE = 6,
90 COMPCODE_ORD = 7,
91 COMPCODE_UNORD = 8,
92 COMPCODE_UNLT = 9,
93 COMPCODE_UNEQ = 10,
94 COMPCODE_UNLE = 11,
95 COMPCODE_UNGT = 12,
96 COMPCODE_NE = 13,
97 COMPCODE_UNGE = 14,
98 COMPCODE_TRUE = 15
99 };
100
101 static bool negate_mathfn_p (enum built_in_function);
102 static bool negate_expr_p (tree);
103 static tree negate_expr (tree);
104 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
105 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
106 static tree const_binop (enum tree_code, tree, tree);
107 static enum comparison_code comparison_to_compcode (enum tree_code);
108 static enum tree_code compcode_to_comparison (enum comparison_code);
109 static int operand_equal_for_comparison_p (tree, tree, tree);
110 static int twoval_comparison_p (tree, tree *, tree *, int *);
111 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
112 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
113 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
114 static tree make_bit_field_ref (location_t, tree, tree,
115 HOST_WIDE_INT, HOST_WIDE_INT, int);
116 static tree optimize_bit_field_compare (location_t, enum tree_code,
117 tree, tree, tree);
118 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
119 HOST_WIDE_INT *,
120 enum machine_mode *, int *, int *,
121 tree *, tree *);
122 static tree sign_bit_p (tree, const_tree);
123 static int simple_operand_p (const_tree);
124 static bool simple_operand_p_2 (tree);
125 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
126 static tree range_predecessor (tree);
127 static tree range_successor (tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree optimize_minmax_comparison (location_t, enum tree_code,
132 tree, tree, tree);
133 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
134 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
135 static tree fold_binary_op_with_conditional_arg (location_t,
136 enum tree_code, tree,
137 tree, tree,
138 tree, tree, int);
139 static tree fold_mathfn_compare (location_t,
140 enum built_in_function, enum tree_code,
141 tree, tree, tree);
142 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
143 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
144 static bool reorder_operands_p (const_tree, const_tree);
145 static tree fold_negate_const (tree, tree);
146 static tree fold_not_const (const_tree, tree);
147 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 static tree fold_convert_const (enum tree_code, tree, tree);
149
150 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
151 Otherwise, return LOC. */
152
153 static location_t
154 expr_location_or (tree t, location_t loc)
155 {
156 location_t tloc = EXPR_LOCATION (t);
157 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 }
159
160 /* Similar to protected_set_expr_location, but never modify x in place,
161 if location can and needs to be set, unshare it. */
162
163 static inline tree
164 protected_set_expr_location_unshare (tree x, location_t loc)
165 {
166 if (CAN_HAVE_LOCATION_P (x)
167 && EXPR_LOCATION (x) != loc
168 && !(TREE_CODE (x) == SAVE_EXPR
169 || TREE_CODE (x) == TARGET_EXPR
170 || TREE_CODE (x) == BIND_EXPR))
171 {
172 x = copy_node (x);
173 SET_EXPR_LOCATION (x, loc);
174 }
175 return x;
176 }
177 \f
178 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
179 division and returns the quotient. Otherwise returns
180 NULL_TREE. */
181
182 tree
183 div_if_zero_remainder (const_tree arg1, const_tree arg2)
184 {
185 widest_int quo;
186
187 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
188 SIGNED, &quo))
189 return wide_int_to_tree (TREE_TYPE (arg1), quo);
190
191 return NULL_TREE;
192 }
193 \f
194 /* This is nonzero if we should defer warnings about undefined
195 overflow. This facility exists because these warnings are a
196 special case. The code to estimate loop iterations does not want
197 to issue any warnings, since it works with expressions which do not
198 occur in user code. Various bits of cleanup code call fold(), but
199 only use the result if it has certain characteristics (e.g., is a
200 constant); that code only wants to issue a warning if the result is
201 used. */
202
203 static int fold_deferring_overflow_warnings;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 warning. Note that this may cause us to turn two warnings into
207 one, but that is fine since it is sufficient to only give one
208 warning per expression. */
209
210 static const char* fold_deferred_overflow_warning;
211
212 /* If a warning about undefined overflow is deferred, this is the
213 level at which the warning should be emitted. */
214
215 static enum warn_strict_overflow_code fold_deferred_overflow_code;
216
217 /* Start deferring overflow warnings. We could use a stack here to
218 permit nested calls, but at present it is not necessary. */
219
220 void
221 fold_defer_overflow_warnings (void)
222 {
223 ++fold_deferring_overflow_warnings;
224 }
225
226 /* Stop deferring overflow warnings. If there is a pending warning,
227 and ISSUE is true, then issue the warning if appropriate. STMT is
228 the statement with which the warning should be associated (used for
229 location information); STMT may be NULL. CODE is the level of the
230 warning--a warn_strict_overflow_code value. This function will use
231 the smaller of CODE and the deferred code when deciding whether to
232 issue the warning. CODE may be zero to mean to always use the
233 deferred code. */
234
235 void
236 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
237 {
238 const char *warnmsg;
239 location_t locus;
240
241 gcc_assert (fold_deferring_overflow_warnings > 0);
242 --fold_deferring_overflow_warnings;
243 if (fold_deferring_overflow_warnings > 0)
244 {
245 if (fold_deferred_overflow_warning != NULL
246 && code != 0
247 && code < (int) fold_deferred_overflow_code)
248 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
249 return;
250 }
251
252 warnmsg = fold_deferred_overflow_warning;
253 fold_deferred_overflow_warning = NULL;
254
255 if (!issue || warnmsg == NULL)
256 return;
257
258 if (gimple_no_warning_p (stmt))
259 return;
260
261 /* Use the smallest code level when deciding to issue the
262 warning. */
263 if (code == 0 || code > (int) fold_deferred_overflow_code)
264 code = fold_deferred_overflow_code;
265
266 if (!issue_strict_overflow_warning (code))
267 return;
268
269 if (stmt == NULL)
270 locus = input_location;
271 else
272 locus = gimple_location (stmt);
273 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
274 }
275
276 /* Stop deferring overflow warnings, ignoring any deferred
277 warnings. */
278
279 void
280 fold_undefer_and_ignore_overflow_warnings (void)
281 {
282 fold_undefer_overflow_warnings (false, NULL, 0);
283 }
284
285 /* Whether we are deferring overflow warnings. */
286
287 bool
288 fold_deferring_overflow_warnings_p (void)
289 {
290 return fold_deferring_overflow_warnings > 0;
291 }
292
293 /* This is called when we fold something based on the fact that signed
294 overflow is undefined. */
295
296 static void
297 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
298 {
299 if (fold_deferring_overflow_warnings > 0)
300 {
301 if (fold_deferred_overflow_warning == NULL
302 || wc < fold_deferred_overflow_code)
303 {
304 fold_deferred_overflow_warning = gmsgid;
305 fold_deferred_overflow_code = wc;
306 }
307 }
308 else if (issue_strict_overflow_warning (wc))
309 warning (OPT_Wstrict_overflow, gmsgid);
310 }
311 \f
312 /* Return true if the built-in mathematical function specified by CODE
313 is odd, i.e. -f(x) == f(-x). */
314
315 static bool
316 negate_mathfn_p (enum built_in_function code)
317 {
318 switch (code)
319 {
320 CASE_FLT_FN (BUILT_IN_ASIN):
321 CASE_FLT_FN (BUILT_IN_ASINH):
322 CASE_FLT_FN (BUILT_IN_ATAN):
323 CASE_FLT_FN (BUILT_IN_ATANH):
324 CASE_FLT_FN (BUILT_IN_CASIN):
325 CASE_FLT_FN (BUILT_IN_CASINH):
326 CASE_FLT_FN (BUILT_IN_CATAN):
327 CASE_FLT_FN (BUILT_IN_CATANH):
328 CASE_FLT_FN (BUILT_IN_CBRT):
329 CASE_FLT_FN (BUILT_IN_CPROJ):
330 CASE_FLT_FN (BUILT_IN_CSIN):
331 CASE_FLT_FN (BUILT_IN_CSINH):
332 CASE_FLT_FN (BUILT_IN_CTAN):
333 CASE_FLT_FN (BUILT_IN_CTANH):
334 CASE_FLT_FN (BUILT_IN_ERF):
335 CASE_FLT_FN (BUILT_IN_LLROUND):
336 CASE_FLT_FN (BUILT_IN_LROUND):
337 CASE_FLT_FN (BUILT_IN_ROUND):
338 CASE_FLT_FN (BUILT_IN_SIN):
339 CASE_FLT_FN (BUILT_IN_SINH):
340 CASE_FLT_FN (BUILT_IN_TAN):
341 CASE_FLT_FN (BUILT_IN_TANH):
342 CASE_FLT_FN (BUILT_IN_TRUNC):
343 return true;
344
345 CASE_FLT_FN (BUILT_IN_LLRINT):
346 CASE_FLT_FN (BUILT_IN_LRINT):
347 CASE_FLT_FN (BUILT_IN_NEARBYINT):
348 CASE_FLT_FN (BUILT_IN_RINT):
349 return !flag_rounding_math;
350
351 default:
352 break;
353 }
354 return false;
355 }
356
357 /* Check whether we may negate an integer constant T without causing
358 overflow. */
359
360 bool
361 may_negate_without_overflow_p (const_tree t)
362 {
363 tree type;
364
365 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366
367 type = TREE_TYPE (t);
368 if (TYPE_UNSIGNED (type))
369 return false;
370
371 return !wi::only_sign_bit_p (t);
372 }
373
374 /* Determine whether an expression T can be cheaply negated using
375 the function negate_expr without introducing undefined overflow. */
376
377 static bool
378 negate_expr_p (tree t)
379 {
380 tree type;
381
382 if (t == 0)
383 return false;
384
385 type = TREE_TYPE (t);
386
387 STRIP_SIGN_NOPS (t);
388 switch (TREE_CODE (t))
389 {
390 case INTEGER_CST:
391 if (TYPE_OVERFLOW_WRAPS (type))
392 return true;
393
394 /* Check that -CST will not overflow type. */
395 return may_negate_without_overflow_p (t);
396 case BIT_NOT_EXPR:
397 return (INTEGRAL_TYPE_P (type)
398 && TYPE_OVERFLOW_WRAPS (type));
399
400 case FIXED_CST:
401 case NEGATE_EXPR:
402 return true;
403
404 case REAL_CST:
405 /* We want to canonicalize to positive real constants. Pretend
406 that only negative ones can be easily negated. */
407 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
408
409 case COMPLEX_CST:
410 return negate_expr_p (TREE_REALPART (t))
411 && negate_expr_p (TREE_IMAGPART (t));
412
413 case VECTOR_CST:
414 {
415 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
416 return true;
417
418 int count = TYPE_VECTOR_SUBPARTS (type), i;
419
420 for (i = 0; i < count; i++)
421 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
422 return false;
423
424 return true;
425 }
426
427 case COMPLEX_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0))
429 && negate_expr_p (TREE_OPERAND (t, 1));
430
431 case CONJ_EXPR:
432 return negate_expr_p (TREE_OPERAND (t, 0));
433
434 case PLUS_EXPR:
435 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
436 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
437 return false;
438 /* -(A + B) -> (-B) - A. */
439 if (negate_expr_p (TREE_OPERAND (t, 1))
440 && reorder_operands_p (TREE_OPERAND (t, 0),
441 TREE_OPERAND (t, 1)))
442 return true;
443 /* -(A + B) -> (-A) - B. */
444 return negate_expr_p (TREE_OPERAND (t, 0));
445
446 case MINUS_EXPR:
447 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
448 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
449 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
450 && reorder_operands_p (TREE_OPERAND (t, 0),
451 TREE_OPERAND (t, 1));
452
453 case MULT_EXPR:
454 if (TYPE_UNSIGNED (TREE_TYPE (t)))
455 break;
456
457 /* Fall through. */
458
459 case RDIV_EXPR:
460 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
461 return negate_expr_p (TREE_OPERAND (t, 1))
462 || negate_expr_p (TREE_OPERAND (t, 0));
463 break;
464
465 case TRUNC_DIV_EXPR:
466 case ROUND_DIV_EXPR:
467 case EXACT_DIV_EXPR:
468 /* In general we can't negate A / B, because if A is INT_MIN and
469 B is 1, we may turn this into INT_MIN / -1 which is undefined
470 and actually traps on some architectures. But if overflow is
471 undefined, we can negate, because - (INT_MIN / 1) is an
472 overflow. */
473 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
474 {
475 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 break;
477 /* If overflow is undefined then we have to be careful because
478 we ask whether it's ok to associate the negate with the
479 division which is not ok for example for
480 -((a - b) / c) where (-(a - b)) / c may invoke undefined
481 overflow because of negating INT_MIN. So do not use
482 negate_expr_p here but open-code the two important cases. */
483 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
484 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
485 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
486 return true;
487 }
488 else if (negate_expr_p (TREE_OPERAND (t, 0)))
489 return true;
490 return negate_expr_p (TREE_OPERAND (t, 1));
491
492 case NOP_EXPR:
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type) == REAL_TYPE)
495 {
496 tree tem = strip_float_extensions (t);
497 if (tem != t)
498 return negate_expr_p (tem);
499 }
500 break;
501
502 case CALL_EXPR:
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (builtin_mathfn_code (t)))
505 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 break;
507
508 case RSHIFT_EXPR:
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
510 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 {
512 tree op1 = TREE_OPERAND (t, 1);
513 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
514 return true;
515 }
516 break;
517
518 default:
519 break;
520 }
521 return false;
522 }
523
524 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
525 simplification is possible.
526 If negate_expr_p would return true for T, NULL_TREE will never be
527 returned. */
528
529 static tree
530 fold_negate_expr (location_t loc, tree t)
531 {
532 tree type = TREE_TYPE (t);
533 tree tem;
534
535 switch (TREE_CODE (t))
536 {
537 /* Convert - (~A) to A + 1. */
538 case BIT_NOT_EXPR:
539 if (INTEGRAL_TYPE_P (type))
540 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
541 build_one_cst (type));
542 break;
543
544 case INTEGER_CST:
545 tem = fold_negate_const (t, type);
546 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
547 || !TYPE_OVERFLOW_TRAPS (type))
548 return tem;
549 break;
550
551 case REAL_CST:
552 tem = fold_negate_const (t, type);
553 /* Two's complement FP formats, such as c4x, may overflow. */
554 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
555 return tem;
556 break;
557
558 case FIXED_CST:
559 tem = fold_negate_const (t, type);
560 return tem;
561
562 case COMPLEX_CST:
563 {
564 tree rpart = negate_expr (TREE_REALPART (t));
565 tree ipart = negate_expr (TREE_IMAGPART (t));
566
567 if ((TREE_CODE (rpart) == REAL_CST
568 && TREE_CODE (ipart) == REAL_CST)
569 || (TREE_CODE (rpart) == INTEGER_CST
570 && TREE_CODE (ipart) == INTEGER_CST))
571 return build_complex (type, rpart, ipart);
572 }
573 break;
574
575 case VECTOR_CST:
576 {
577 int count = TYPE_VECTOR_SUBPARTS (type), i;
578 tree *elts = XALLOCAVEC (tree, count);
579
580 for (i = 0; i < count; i++)
581 {
582 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
583 if (elts[i] == NULL_TREE)
584 return NULL_TREE;
585 }
586
587 return build_vector (type, elts);
588 }
589
590 case COMPLEX_EXPR:
591 if (negate_expr_p (t))
592 return fold_build2_loc (loc, COMPLEX_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
594 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
595 break;
596
597 case CONJ_EXPR:
598 if (negate_expr_p (t))
599 return fold_build1_loc (loc, CONJ_EXPR, type,
600 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
601 break;
602
603 case NEGATE_EXPR:
604 return TREE_OPERAND (t, 0);
605
606 case PLUS_EXPR:
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
609 {
610 /* -(A + B) -> (-B) - A. */
611 if (negate_expr_p (TREE_OPERAND (t, 1))
612 && reorder_operands_p (TREE_OPERAND (t, 0),
613 TREE_OPERAND (t, 1)))
614 {
615 tem = negate_expr (TREE_OPERAND (t, 1));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 0));
618 }
619
620 /* -(A + B) -> (-A) - B. */
621 if (negate_expr_p (TREE_OPERAND (t, 0)))
622 {
623 tem = negate_expr (TREE_OPERAND (t, 0));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 1));
626 }
627 }
628 break;
629
630 case MINUS_EXPR:
631 /* - (A - B) -> B - A */
632 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
633 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
634 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
637 break;
638
639 case MULT_EXPR:
640 if (TYPE_UNSIGNED (type))
641 break;
642
643 /* Fall through. */
644
645 case RDIV_EXPR:
646 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
647 {
648 tem = TREE_OPERAND (t, 1);
649 if (negate_expr_p (tem))
650 return fold_build2_loc (loc, TREE_CODE (t), type,
651 TREE_OPERAND (t, 0), negate_expr (tem));
652 tem = TREE_OPERAND (t, 0);
653 if (negate_expr_p (tem))
654 return fold_build2_loc (loc, TREE_CODE (t), type,
655 negate_expr (tem), TREE_OPERAND (t, 1));
656 }
657 break;
658
659 case TRUNC_DIV_EXPR:
660 case ROUND_DIV_EXPR:
661 case EXACT_DIV_EXPR:
662 /* In general we can't negate A / B, because if A is INT_MIN and
663 B is 1, we may turn this into INT_MIN / -1 which is undefined
664 and actually traps on some architectures. But if overflow is
665 undefined, we can negate, because - (INT_MIN / 1) is an
666 overflow. */
667 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
668 {
669 const char * const warnmsg = G_("assuming signed overflow does not "
670 "occur when negating a division");
671 tem = TREE_OPERAND (t, 1);
672 if (negate_expr_p (tem))
673 {
674 if (INTEGRAL_TYPE_P (type)
675 && (TREE_CODE (tem) != INTEGER_CST
676 || integer_onep (tem)))
677 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 TREE_OPERAND (t, 0), negate_expr (tem));
680 }
681 /* If overflow is undefined then we have to be careful because
682 we ask whether it's ok to associate the negate with the
683 division which is not ok for example for
684 -((a - b) / c) where (-(a - b)) / c may invoke undefined
685 overflow because of negating INT_MIN. So do not use
686 negate_expr_p here but open-code the two important cases. */
687 tem = TREE_OPERAND (t, 0);
688 if ((INTEGRAL_TYPE_P (type)
689 && (TREE_CODE (tem) == NEGATE_EXPR
690 || (TREE_CODE (tem) == INTEGER_CST
691 && may_negate_without_overflow_p (tem))))
692 || !INTEGRAL_TYPE_P (type))
693 return fold_build2_loc (loc, TREE_CODE (t), type,
694 negate_expr (tem), TREE_OPERAND (t, 1));
695 }
696 break;
697
698 case NOP_EXPR:
699 /* Convert -((double)float) into (double)(-float). */
700 if (TREE_CODE (type) == REAL_TYPE)
701 {
702 tem = strip_float_extensions (t);
703 if (tem != t && negate_expr_p (tem))
704 return fold_convert_loc (loc, type, negate_expr (tem));
705 }
706 break;
707
708 case CALL_EXPR:
709 /* Negate -f(x) as f(-x). */
710 if (negate_mathfn_p (builtin_mathfn_code (t))
711 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
712 {
713 tree fndecl, arg;
714
715 fndecl = get_callee_fndecl (t);
716 arg = negate_expr (CALL_EXPR_ARG (t, 0));
717 return build_call_expr_loc (loc, fndecl, 1, arg);
718 }
719 break;
720
721 case RSHIFT_EXPR:
722 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
723 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
724 {
725 tree op1 = TREE_OPERAND (t, 1);
726 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
727 {
728 tree ntype = TYPE_UNSIGNED (type)
729 ? signed_type_for (type)
730 : unsigned_type_for (type);
731 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
732 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
733 return fold_convert_loc (loc, type, temp);
734 }
735 }
736 break;
737
738 default:
739 break;
740 }
741
742 return NULL_TREE;
743 }
744
745 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
746 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
747 return NULL_TREE. */
748
749 static tree
750 negate_expr (tree t)
751 {
752 tree type, tem;
753 location_t loc;
754
755 if (t == NULL_TREE)
756 return NULL_TREE;
757
758 loc = EXPR_LOCATION (t);
759 type = TREE_TYPE (t);
760 STRIP_SIGN_NOPS (t);
761
762 tem = fold_negate_expr (loc, t);
763 if (!tem)
764 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
765 return fold_convert_loc (loc, type, tem);
766 }
767 \f
768 /* Split a tree IN into a constant, literal and variable parts that could be
769 combined with CODE to make IN. "constant" means an expression with
770 TREE_CONSTANT but that isn't an actual constant. CODE must be a
771 commutative arithmetic operation. Store the constant part into *CONP,
772 the literal in *LITP and return the variable part. If a part isn't
773 present, set it to null. If the tree does not decompose in this way,
774 return the entire tree as the variable part and the other parts as null.
775
776 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
777 case, we negate an operand that was subtracted. Except if it is a
778 literal for which we use *MINUS_LITP instead.
779
780 If NEGATE_P is true, we are negating all of IN, again except a literal
781 for which we use *MINUS_LITP instead.
782
783 If IN is itself a literal or constant, return it as appropriate.
784
785 Note that we do not guarantee that any of the three values will be the
786 same type as IN, but they will have the same signedness and mode. */
787
788 static tree
789 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
790 tree *minus_litp, int negate_p)
791 {
792 tree var = 0;
793
794 *conp = 0;
795 *litp = 0;
796 *minus_litp = 0;
797
798 /* Strip any conversions that don't change the machine mode or signedness. */
799 STRIP_SIGN_NOPS (in);
800
801 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
802 || TREE_CODE (in) == FIXED_CST)
803 *litp = in;
804 else if (TREE_CODE (in) == code
805 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
806 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
807 /* We can associate addition and subtraction together (even
808 though the C standard doesn't say so) for integers because
809 the value is not affected. For reals, the value might be
810 affected, so we can't. */
811 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
813 {
814 tree op0 = TREE_OPERAND (in, 0);
815 tree op1 = TREE_OPERAND (in, 1);
816 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
817 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
818
819 /* First see if either of the operands is a literal, then a constant. */
820 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
821 || TREE_CODE (op0) == FIXED_CST)
822 *litp = op0, op0 = 0;
823 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
824 || TREE_CODE (op1) == FIXED_CST)
825 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
826
827 if (op0 != 0 && TREE_CONSTANT (op0))
828 *conp = op0, op0 = 0;
829 else if (op1 != 0 && TREE_CONSTANT (op1))
830 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
831
832 /* If we haven't dealt with either operand, this is not a case we can
833 decompose. Otherwise, VAR is either of the ones remaining, if any. */
834 if (op0 != 0 && op1 != 0)
835 var = in;
836 else if (op0 != 0)
837 var = op0;
838 else
839 var = op1, neg_var_p = neg1_p;
840
841 /* Now do any needed negations. */
842 if (neg_litp_p)
843 *minus_litp = *litp, *litp = 0;
844 if (neg_conp_p)
845 *conp = negate_expr (*conp);
846 if (neg_var_p)
847 var = negate_expr (var);
848 }
849 else if (TREE_CODE (in) == BIT_NOT_EXPR
850 && code == PLUS_EXPR)
851 {
852 /* -X - 1 is folded to ~X, undo that here. */
853 *minus_litp = build_one_cst (TREE_TYPE (in));
854 var = negate_expr (TREE_OPERAND (in, 0));
855 }
856 else if (TREE_CONSTANT (in))
857 *conp = in;
858 else
859 var = in;
860
861 if (negate_p)
862 {
863 if (*litp)
864 *minus_litp = *litp, *litp = 0;
865 else if (*minus_litp)
866 *litp = *minus_litp, *minus_litp = 0;
867 *conp = negate_expr (*conp);
868 var = negate_expr (var);
869 }
870
871 return var;
872 }
873
874 /* Re-associate trees split by the above function. T1 and T2 are
875 either expressions to associate or null. Return the new
876 expression, if any. LOC is the location of the new expression. If
877 we build an operation, do it in TYPE and with CODE. */
878
879 static tree
880 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
881 {
882 if (t1 == 0)
883 return t2;
884 else if (t2 == 0)
885 return t1;
886
887 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
888 try to fold this since we will have infinite recursion. But do
889 deal with any NEGATE_EXPRs. */
890 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
891 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
892 {
893 if (code == PLUS_EXPR)
894 {
895 if (TREE_CODE (t1) == NEGATE_EXPR)
896 return build2_loc (loc, MINUS_EXPR, type,
897 fold_convert_loc (loc, type, t2),
898 fold_convert_loc (loc, type,
899 TREE_OPERAND (t1, 0)));
900 else if (TREE_CODE (t2) == NEGATE_EXPR)
901 return build2_loc (loc, MINUS_EXPR, type,
902 fold_convert_loc (loc, type, t1),
903 fold_convert_loc (loc, type,
904 TREE_OPERAND (t2, 0)));
905 else if (integer_zerop (t2))
906 return fold_convert_loc (loc, type, t1);
907 }
908 else if (code == MINUS_EXPR)
909 {
910 if (integer_zerop (t2))
911 return fold_convert_loc (loc, type, t1);
912 }
913
914 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type, t2));
916 }
917
918 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
919 fold_convert_loc (loc, type, t2));
920 }
921 \f
922 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
923 for use in int_const_binop, size_binop and size_diffop. */
924
925 static bool
926 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
927 {
928 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
929 return false;
930 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
931 return false;
932
933 switch (code)
934 {
935 case LSHIFT_EXPR:
936 case RSHIFT_EXPR:
937 case LROTATE_EXPR:
938 case RROTATE_EXPR:
939 return true;
940
941 default:
942 break;
943 }
944
945 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
946 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
947 && TYPE_MODE (type1) == TYPE_MODE (type2);
948 }
949
950
951 /* Combine two integer constants ARG1 and ARG2 under operation CODE
952 to produce a new constant. Return NULL_TREE if we don't know how
953 to evaluate CODE at compile-time. */
954
955 static tree
956 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
957 int overflowable)
958 {
959 wide_int res;
960 tree t;
961 tree type = TREE_TYPE (arg1);
962 signop sign = TYPE_SIGN (type);
963 bool overflow = false;
964
965 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
966 TYPE_SIGN (TREE_TYPE (parg2)));
967
968 switch (code)
969 {
970 case BIT_IOR_EXPR:
971 res = wi::bit_or (arg1, arg2);
972 break;
973
974 case BIT_XOR_EXPR:
975 res = wi::bit_xor (arg1, arg2);
976 break;
977
978 case BIT_AND_EXPR:
979 res = wi::bit_and (arg1, arg2);
980 break;
981
982 case RSHIFT_EXPR:
983 case LSHIFT_EXPR:
984 if (wi::neg_p (arg2))
985 {
986 arg2 = -arg2;
987 if (code == RSHIFT_EXPR)
988 code = LSHIFT_EXPR;
989 else
990 code = RSHIFT_EXPR;
991 }
992
993 if (code == RSHIFT_EXPR)
994 /* It's unclear from the C standard whether shifts can overflow.
995 The following code ignores overflow; perhaps a C standard
996 interpretation ruling is needed. */
997 res = wi::rshift (arg1, arg2, sign);
998 else
999 res = wi::lshift (arg1, arg2);
1000 break;
1001
1002 case RROTATE_EXPR:
1003 case LROTATE_EXPR:
1004 if (wi::neg_p (arg2))
1005 {
1006 arg2 = -arg2;
1007 if (code == RROTATE_EXPR)
1008 code = LROTATE_EXPR;
1009 else
1010 code = RROTATE_EXPR;
1011 }
1012
1013 if (code == RROTATE_EXPR)
1014 res = wi::rrotate (arg1, arg2);
1015 else
1016 res = wi::lrotate (arg1, arg2);
1017 break;
1018
1019 case PLUS_EXPR:
1020 res = wi::add (arg1, arg2, sign, &overflow);
1021 break;
1022
1023 case MINUS_EXPR:
1024 res = wi::sub (arg1, arg2, sign, &overflow);
1025 break;
1026
1027 case MULT_EXPR:
1028 res = wi::mul (arg1, arg2, sign, &overflow);
1029 break;
1030
1031 case MULT_HIGHPART_EXPR:
1032 res = wi::mul_high (arg1, arg2, sign);
1033 break;
1034
1035 case TRUNC_DIV_EXPR:
1036 case EXACT_DIV_EXPR:
1037 if (arg2 == 0)
1038 return NULL_TREE;
1039 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1040 break;
1041
1042 case FLOOR_DIV_EXPR:
1043 if (arg2 == 0)
1044 return NULL_TREE;
1045 res = wi::div_floor (arg1, arg2, sign, &overflow);
1046 break;
1047
1048 case CEIL_DIV_EXPR:
1049 if (arg2 == 0)
1050 return NULL_TREE;
1051 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1052 break;
1053
1054 case ROUND_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_round (arg1, arg2, sign, &overflow);
1058 break;
1059
1060 case TRUNC_MOD_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1064 break;
1065
1066 case FLOOR_MOD_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1070 break;
1071
1072 case CEIL_MOD_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1076 break;
1077
1078 case ROUND_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_round (arg1, arg2, sign, &overflow);
1082 break;
1083
1084 case MIN_EXPR:
1085 res = wi::min (arg1, arg2, sign);
1086 break;
1087
1088 case MAX_EXPR:
1089 res = wi::max (arg1, arg2, sign);
1090 break;
1091
1092 default:
1093 return NULL_TREE;
1094 }
1095
1096 t = force_fit_type (type, res, overflowable,
1097 (((sign == SIGNED || overflowable == -1)
1098 && overflow)
1099 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1100
1101 return t;
1102 }
1103
1104 tree
1105 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1106 {
1107 return int_const_binop_1 (code, arg1, arg2, 1);
1108 }
1109
1110 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1111 constant. We assume ARG1 and ARG2 have the same data type, or at least
1112 are the same kind of constant and the same machine mode. Return zero if
1113 combining the constants is not allowed in the current operating mode. */
1114
1115 static tree
1116 const_binop (enum tree_code code, tree arg1, tree arg2)
1117 {
1118 /* Sanity check for the recursive cases. */
1119 if (!arg1 || !arg2)
1120 return NULL_TREE;
1121
1122 STRIP_NOPS (arg1);
1123 STRIP_NOPS (arg2);
1124
1125 if (TREE_CODE (arg1) == INTEGER_CST)
1126 return int_const_binop (code, arg1, arg2);
1127
1128 if (TREE_CODE (arg1) == REAL_CST)
1129 {
1130 enum machine_mode mode;
1131 REAL_VALUE_TYPE d1;
1132 REAL_VALUE_TYPE d2;
1133 REAL_VALUE_TYPE value;
1134 REAL_VALUE_TYPE result;
1135 bool inexact;
1136 tree t, type;
1137
1138 /* The following codes are handled by real_arithmetic. */
1139 switch (code)
1140 {
1141 case PLUS_EXPR:
1142 case MINUS_EXPR:
1143 case MULT_EXPR:
1144 case RDIV_EXPR:
1145 case MIN_EXPR:
1146 case MAX_EXPR:
1147 break;
1148
1149 default:
1150 return NULL_TREE;
1151 }
1152
1153 d1 = TREE_REAL_CST (arg1);
1154 d2 = TREE_REAL_CST (arg2);
1155
1156 type = TREE_TYPE (arg1);
1157 mode = TYPE_MODE (type);
1158
1159 /* Don't perform operation if we honor signaling NaNs and
1160 either operand is a NaN. */
1161 if (HONOR_SNANS (mode)
1162 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1163 return NULL_TREE;
1164
1165 /* Don't perform operation if it would raise a division
1166 by zero exception. */
1167 if (code == RDIV_EXPR
1168 && REAL_VALUES_EQUAL (d2, dconst0)
1169 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1170 return NULL_TREE;
1171
1172 /* If either operand is a NaN, just return it. Otherwise, set up
1173 for floating-point trap; we return an overflow. */
1174 if (REAL_VALUE_ISNAN (d1))
1175 return arg1;
1176 else if (REAL_VALUE_ISNAN (d2))
1177 return arg2;
1178
1179 inexact = real_arithmetic (&value, code, &d1, &d2);
1180 real_convert (&result, mode, &value);
1181
1182 /* Don't constant fold this floating point operation if
1183 the result has overflowed and flag_trapping_math. */
1184 if (flag_trapping_math
1185 && MODE_HAS_INFINITIES (mode)
1186 && REAL_VALUE_ISINF (result)
1187 && !REAL_VALUE_ISINF (d1)
1188 && !REAL_VALUE_ISINF (d2))
1189 return NULL_TREE;
1190
1191 /* Don't constant fold this floating point operation if the
1192 result may dependent upon the run-time rounding mode and
1193 flag_rounding_math is set, or if GCC's software emulation
1194 is unable to accurately represent the result. */
1195 if ((flag_rounding_math
1196 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1197 && (inexact || !real_identical (&result, &value)))
1198 return NULL_TREE;
1199
1200 t = build_real (type, result);
1201
1202 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1203 return t;
1204 }
1205
1206 if (TREE_CODE (arg1) == FIXED_CST)
1207 {
1208 FIXED_VALUE_TYPE f1;
1209 FIXED_VALUE_TYPE f2;
1210 FIXED_VALUE_TYPE result;
1211 tree t, type;
1212 int sat_p;
1213 bool overflow_p;
1214
1215 /* The following codes are handled by fixed_arithmetic. */
1216 switch (code)
1217 {
1218 case PLUS_EXPR:
1219 case MINUS_EXPR:
1220 case MULT_EXPR:
1221 case TRUNC_DIV_EXPR:
1222 f2 = TREE_FIXED_CST (arg2);
1223 break;
1224
1225 case LSHIFT_EXPR:
1226 case RSHIFT_EXPR:
1227 {
1228 wide_int w2 = arg2;
1229 f2.data.high = w2.elt (1);
1230 f2.data.low = w2.elt (0);
1231 f2.mode = SImode;
1232 }
1233 break;
1234
1235 default:
1236 return NULL_TREE;
1237 }
1238
1239 f1 = TREE_FIXED_CST (arg1);
1240 type = TREE_TYPE (arg1);
1241 sat_p = TYPE_SATURATING (type);
1242 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1243 t = build_fixed (type, result);
1244 /* Propagate overflow flags. */
1245 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1246 TREE_OVERFLOW (t) = 1;
1247 return t;
1248 }
1249
1250 if (TREE_CODE (arg1) == COMPLEX_CST)
1251 {
1252 tree type = TREE_TYPE (arg1);
1253 tree r1 = TREE_REALPART (arg1);
1254 tree i1 = TREE_IMAGPART (arg1);
1255 tree r2 = TREE_REALPART (arg2);
1256 tree i2 = TREE_IMAGPART (arg2);
1257 tree real, imag;
1258
1259 switch (code)
1260 {
1261 case PLUS_EXPR:
1262 case MINUS_EXPR:
1263 real = const_binop (code, r1, r2);
1264 imag = const_binop (code, i1, i2);
1265 break;
1266
1267 case MULT_EXPR:
1268 if (COMPLEX_FLOAT_TYPE_P (type))
1269 return do_mpc_arg2 (arg1, arg2, type,
1270 /* do_nonfinite= */ folding_initializer,
1271 mpc_mul);
1272
1273 real = const_binop (MINUS_EXPR,
1274 const_binop (MULT_EXPR, r1, r2),
1275 const_binop (MULT_EXPR, i1, i2));
1276 imag = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r1, i2),
1278 const_binop (MULT_EXPR, i1, r2));
1279 break;
1280
1281 case RDIV_EXPR:
1282 if (COMPLEX_FLOAT_TYPE_P (type))
1283 return do_mpc_arg2 (arg1, arg2, type,
1284 /* do_nonfinite= */ folding_initializer,
1285 mpc_div);
1286 /* Fallthru ... */
1287 case TRUNC_DIV_EXPR:
1288 case CEIL_DIV_EXPR:
1289 case FLOOR_DIV_EXPR:
1290 case ROUND_DIV_EXPR:
1291 if (flag_complex_method == 0)
1292 {
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_straight().
1295
1296 Expand complex division to scalars, straightforward algorithm.
1297 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1298 t = br*br + bi*bi
1299 */
1300 tree magsquared
1301 = const_binop (PLUS_EXPR,
1302 const_binop (MULT_EXPR, r2, r2),
1303 const_binop (MULT_EXPR, i2, i2));
1304 tree t1
1305 = const_binop (PLUS_EXPR,
1306 const_binop (MULT_EXPR, r1, r2),
1307 const_binop (MULT_EXPR, i1, i2));
1308 tree t2
1309 = const_binop (MINUS_EXPR,
1310 const_binop (MULT_EXPR, i1, r2),
1311 const_binop (MULT_EXPR, r1, i2));
1312
1313 real = const_binop (code, t1, magsquared);
1314 imag = const_binop (code, t2, magsquared);
1315 }
1316 else
1317 {
1318 /* Keep this algorithm in sync with
1319 tree-complex.c:expand_complex_div_wide().
1320
1321 Expand complex division to scalars, modified algorithm to minimize
1322 overflow with wide input ranges. */
1323 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1324 fold_abs_const (r2, TREE_TYPE (type)),
1325 fold_abs_const (i2, TREE_TYPE (type)));
1326
1327 if (integer_nonzerop (compare))
1328 {
1329 /* In the TRUE branch, we compute
1330 ratio = br/bi;
1331 div = (br * ratio) + bi;
1332 tr = (ar * ratio) + ai;
1333 ti = (ai * ratio) - ar;
1334 tr = tr / div;
1335 ti = ti / div; */
1336 tree ratio = const_binop (code, r2, i2);
1337 tree div = const_binop (PLUS_EXPR, i2,
1338 const_binop (MULT_EXPR, r2, ratio));
1339 real = const_binop (MULT_EXPR, r1, ratio);
1340 real = const_binop (PLUS_EXPR, real, i1);
1341 real = const_binop (code, real, div);
1342
1343 imag = const_binop (MULT_EXPR, i1, ratio);
1344 imag = const_binop (MINUS_EXPR, imag, r1);
1345 imag = const_binop (code, imag, div);
1346 }
1347 else
1348 {
1349 /* In the FALSE branch, we compute
1350 ratio = d/c;
1351 divisor = (d * ratio) + c;
1352 tr = (b * ratio) + a;
1353 ti = b - (a * ratio);
1354 tr = tr / div;
1355 ti = ti / div; */
1356 tree ratio = const_binop (code, i2, r2);
1357 tree div = const_binop (PLUS_EXPR, r2,
1358 const_binop (MULT_EXPR, i2, ratio));
1359
1360 real = const_binop (MULT_EXPR, i1, ratio);
1361 real = const_binop (PLUS_EXPR, real, r1);
1362 real = const_binop (code, real, div);
1363
1364 imag = const_binop (MULT_EXPR, r1, ratio);
1365 imag = const_binop (MINUS_EXPR, i1, imag);
1366 imag = const_binop (code, imag, div);
1367 }
1368 }
1369 break;
1370
1371 default:
1372 return NULL_TREE;
1373 }
1374
1375 if (real && imag)
1376 return build_complex (type, real, imag);
1377 }
1378
1379 if (TREE_CODE (arg1) == VECTOR_CST
1380 && TREE_CODE (arg2) == VECTOR_CST)
1381 {
1382 tree type = TREE_TYPE (arg1);
1383 int count = TYPE_VECTOR_SUBPARTS (type), i;
1384 tree *elts = XALLOCAVEC (tree, count);
1385
1386 for (i = 0; i < count; i++)
1387 {
1388 tree elem1 = VECTOR_CST_ELT (arg1, i);
1389 tree elem2 = VECTOR_CST_ELT (arg2, i);
1390
1391 elts[i] = const_binop (code, elem1, elem2);
1392
1393 /* It is possible that const_binop cannot handle the given
1394 code and return NULL_TREE */
1395 if (elts[i] == NULL_TREE)
1396 return NULL_TREE;
1397 }
1398
1399 return build_vector (type, elts);
1400 }
1401
1402 /* Shifts allow a scalar offset for a vector. */
1403 if (TREE_CODE (arg1) == VECTOR_CST
1404 && TREE_CODE (arg2) == INTEGER_CST)
1405 {
1406 tree type = TREE_TYPE (arg1);
1407 int count = TYPE_VECTOR_SUBPARTS (type), i;
1408 tree *elts = XALLOCAVEC (tree, count);
1409
1410 if (code == VEC_LSHIFT_EXPR
1411 || code == VEC_RSHIFT_EXPR)
1412 {
1413 if (!tree_fits_uhwi_p (arg2))
1414 return NULL_TREE;
1415
1416 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1417 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1418 unsigned HOST_WIDE_INT innerc
1419 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1420 if (shiftc >= outerc || (shiftc % innerc) != 0)
1421 return NULL_TREE;
1422 int offset = shiftc / innerc;
1423 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1424 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1425 for !BYTES_BIG_ENDIAN picks first vector element, but
1426 for BYTES_BIG_ENDIAN last element from the vector. */
1427 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1428 offset = -offset;
1429 tree zero = build_zero_cst (TREE_TYPE (type));
1430 for (i = 0; i < count; i++)
1431 {
1432 if (i + offset < 0 || i + offset >= count)
1433 elts[i] = zero;
1434 else
1435 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1436 }
1437 }
1438 else
1439 for (i = 0; i < count; i++)
1440 {
1441 tree elem1 = VECTOR_CST_ELT (arg1, i);
1442
1443 elts[i] = const_binop (code, elem1, arg2);
1444
1445 /* It is possible that const_binop cannot handle the given
1446 code and return NULL_TREE */
1447 if (elts[i] == NULL_TREE)
1448 return NULL_TREE;
1449 }
1450
1451 return build_vector (type, elts);
1452 }
1453 return NULL_TREE;
1454 }
1455
1456 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1457 indicates which particular sizetype to create. */
1458
1459 tree
1460 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1461 {
1462 return build_int_cst (sizetype_tab[(int) kind], number);
1463 }
1464 \f
1465 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1466 is a tree code. The type of the result is taken from the operands.
1467 Both must be equivalent integer types, ala int_binop_types_match_p.
1468 If the operands are constant, so is the result. */
1469
1470 tree
1471 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1472 {
1473 tree type = TREE_TYPE (arg0);
1474
1475 if (arg0 == error_mark_node || arg1 == error_mark_node)
1476 return error_mark_node;
1477
1478 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1479 TREE_TYPE (arg1)));
1480
1481 /* Handle the special case of two integer constants faster. */
1482 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1483 {
1484 /* And some specific cases even faster than that. */
1485 if (code == PLUS_EXPR)
1486 {
1487 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1488 return arg1;
1489 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1490 return arg0;
1491 }
1492 else if (code == MINUS_EXPR)
1493 {
1494 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1495 return arg0;
1496 }
1497 else if (code == MULT_EXPR)
1498 {
1499 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1500 return arg1;
1501 }
1502
1503 /* Handle general case of two integer constants. For sizetype
1504 constant calculations we always want to know about overflow,
1505 even in the unsigned case. */
1506 return int_const_binop_1 (code, arg0, arg1, -1);
1507 }
1508
1509 return fold_build2_loc (loc, code, type, arg0, arg1);
1510 }
1511
1512 /* Given two values, either both of sizetype or both of bitsizetype,
1513 compute the difference between the two values. Return the value
1514 in signed type corresponding to the type of the operands. */
1515
1516 tree
1517 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1518 {
1519 tree type = TREE_TYPE (arg0);
1520 tree ctype;
1521
1522 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1523 TREE_TYPE (arg1)));
1524
1525 /* If the type is already signed, just do the simple thing. */
1526 if (!TYPE_UNSIGNED (type))
1527 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1528
1529 if (type == sizetype)
1530 ctype = ssizetype;
1531 else if (type == bitsizetype)
1532 ctype = sbitsizetype;
1533 else
1534 ctype = signed_type_for (type);
1535
1536 /* If either operand is not a constant, do the conversions to the signed
1537 type and subtract. The hardware will do the right thing with any
1538 overflow in the subtraction. */
1539 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1540 return size_binop_loc (loc, MINUS_EXPR,
1541 fold_convert_loc (loc, ctype, arg0),
1542 fold_convert_loc (loc, ctype, arg1));
1543
1544 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1545 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1546 overflow) and negate (which can't either). Special-case a result
1547 of zero while we're here. */
1548 if (tree_int_cst_equal (arg0, arg1))
1549 return build_int_cst (ctype, 0);
1550 else if (tree_int_cst_lt (arg1, arg0))
1551 return fold_convert_loc (loc, ctype,
1552 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1553 else
1554 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1555 fold_convert_loc (loc, ctype,
1556 size_binop_loc (loc,
1557 MINUS_EXPR,
1558 arg1, arg0)));
1559 }
1560 \f
1561 /* A subroutine of fold_convert_const handling conversions of an
1562 INTEGER_CST to another integer type. */
1563
1564 static tree
1565 fold_convert_const_int_from_int (tree type, const_tree arg1)
1566 {
1567 /* Given an integer constant, make new constant with new type,
1568 appropriately sign-extended or truncated. Use widest_int
1569 so that any extension is done according ARG1's type. */
1570 return force_fit_type (type, wi::to_widest (arg1),
1571 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1572 TREE_OVERFLOW (arg1));
1573 }
1574
1575 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1576 to an integer type. */
1577
1578 static tree
1579 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1580 {
1581 bool overflow = false;
1582 tree t;
1583
1584 /* The following code implements the floating point to integer
1585 conversion rules required by the Java Language Specification,
1586 that IEEE NaNs are mapped to zero and values that overflow
1587 the target precision saturate, i.e. values greater than
1588 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1589 are mapped to INT_MIN. These semantics are allowed by the
1590 C and C++ standards that simply state that the behavior of
1591 FP-to-integer conversion is unspecified upon overflow. */
1592
1593 wide_int val;
1594 REAL_VALUE_TYPE r;
1595 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1596
1597 switch (code)
1598 {
1599 case FIX_TRUNC_EXPR:
1600 real_trunc (&r, VOIDmode, &x);
1601 break;
1602
1603 default:
1604 gcc_unreachable ();
1605 }
1606
1607 /* If R is NaN, return zero and show we have an overflow. */
1608 if (REAL_VALUE_ISNAN (r))
1609 {
1610 overflow = true;
1611 val = wi::zero (TYPE_PRECISION (type));
1612 }
1613
1614 /* See if R is less than the lower bound or greater than the
1615 upper bound. */
1616
1617 if (! overflow)
1618 {
1619 tree lt = TYPE_MIN_VALUE (type);
1620 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1621 if (REAL_VALUES_LESS (r, l))
1622 {
1623 overflow = true;
1624 val = lt;
1625 }
1626 }
1627
1628 if (! overflow)
1629 {
1630 tree ut = TYPE_MAX_VALUE (type);
1631 if (ut)
1632 {
1633 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1634 if (REAL_VALUES_LESS (u, r))
1635 {
1636 overflow = true;
1637 val = ut;
1638 }
1639 }
1640 }
1641
1642 if (! overflow)
1643 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1644
1645 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1646 return t;
1647 }
1648
1649 /* A subroutine of fold_convert_const handling conversions of a
1650 FIXED_CST to an integer type. */
1651
1652 static tree
1653 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1654 {
1655 tree t;
1656 double_int temp, temp_trunc;
1657 unsigned int mode;
1658
1659 /* Right shift FIXED_CST to temp by fbit. */
1660 temp = TREE_FIXED_CST (arg1).data;
1661 mode = TREE_FIXED_CST (arg1).mode;
1662 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1663 {
1664 temp = temp.rshift (GET_MODE_FBIT (mode),
1665 HOST_BITS_PER_DOUBLE_INT,
1666 SIGNED_FIXED_POINT_MODE_P (mode));
1667
1668 /* Left shift temp to temp_trunc by fbit. */
1669 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1670 HOST_BITS_PER_DOUBLE_INT,
1671 SIGNED_FIXED_POINT_MODE_P (mode));
1672 }
1673 else
1674 {
1675 temp = double_int_zero;
1676 temp_trunc = double_int_zero;
1677 }
1678
1679 /* If FIXED_CST is negative, we need to round the value toward 0.
1680 By checking if the fractional bits are not zero to add 1 to temp. */
1681 if (SIGNED_FIXED_POINT_MODE_P (mode)
1682 && temp_trunc.is_negative ()
1683 && TREE_FIXED_CST (arg1).data != temp_trunc)
1684 temp += double_int_one;
1685
1686 /* Given a fixed-point constant, make new constant with new type,
1687 appropriately sign-extended or truncated. */
1688 t = force_fit_type (type, temp, -1,
1689 (temp.is_negative ()
1690 && (TYPE_UNSIGNED (type)
1691 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1692 | TREE_OVERFLOW (arg1));
1693
1694 return t;
1695 }
1696
1697 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1698 to another floating point type. */
1699
1700 static tree
1701 fold_convert_const_real_from_real (tree type, const_tree arg1)
1702 {
1703 REAL_VALUE_TYPE value;
1704 tree t;
1705
1706 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1707 t = build_real (type, value);
1708
1709 /* If converting an infinity or NAN to a representation that doesn't
1710 have one, set the overflow bit so that we can produce some kind of
1711 error message at the appropriate point if necessary. It's not the
1712 most user-friendly message, but it's better than nothing. */
1713 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1714 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1715 TREE_OVERFLOW (t) = 1;
1716 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1717 && !MODE_HAS_NANS (TYPE_MODE (type)))
1718 TREE_OVERFLOW (t) = 1;
1719 /* Regular overflow, conversion produced an infinity in a mode that
1720 can't represent them. */
1721 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1722 && REAL_VALUE_ISINF (value)
1723 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1724 TREE_OVERFLOW (t) = 1;
1725 else
1726 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1727 return t;
1728 }
1729
1730 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1731 to a floating point type. */
1732
1733 static tree
1734 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1735 {
1736 REAL_VALUE_TYPE value;
1737 tree t;
1738
1739 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1740 t = build_real (type, value);
1741
1742 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1743 return t;
1744 }
1745
1746 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1747 to another fixed-point type. */
1748
1749 static tree
1750 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1751 {
1752 FIXED_VALUE_TYPE value;
1753 tree t;
1754 bool overflow_p;
1755
1756 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1757 TYPE_SATURATING (type));
1758 t = build_fixed (type, value);
1759
1760 /* Propagate overflow flags. */
1761 if (overflow_p | TREE_OVERFLOW (arg1))
1762 TREE_OVERFLOW (t) = 1;
1763 return t;
1764 }
1765
1766 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1767 to a fixed-point type. */
1768
1769 static tree
1770 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1771 {
1772 FIXED_VALUE_TYPE value;
1773 tree t;
1774 bool overflow_p;
1775 double_int di;
1776
1777 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1778
1779 di.low = TREE_INT_CST_ELT (arg1, 0);
1780 if (TREE_INT_CST_NUNITS (arg1) == 1)
1781 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1782 else
1783 di.high = TREE_INT_CST_ELT (arg1, 1);
1784
1785 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1786 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1787 TYPE_SATURATING (type));
1788 t = build_fixed (type, value);
1789
1790 /* Propagate overflow flags. */
1791 if (overflow_p | TREE_OVERFLOW (arg1))
1792 TREE_OVERFLOW (t) = 1;
1793 return t;
1794 }
1795
1796 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1797 to a fixed-point type. */
1798
1799 static tree
1800 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1801 {
1802 FIXED_VALUE_TYPE value;
1803 tree t;
1804 bool overflow_p;
1805
1806 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1807 &TREE_REAL_CST (arg1),
1808 TYPE_SATURATING (type));
1809 t = build_fixed (type, value);
1810
1811 /* Propagate overflow flags. */
1812 if (overflow_p | TREE_OVERFLOW (arg1))
1813 TREE_OVERFLOW (t) = 1;
1814 return t;
1815 }
1816
1817 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1818 type TYPE. If no simplification can be done return NULL_TREE. */
1819
1820 static tree
1821 fold_convert_const (enum tree_code code, tree type, tree arg1)
1822 {
1823 if (TREE_TYPE (arg1) == type)
1824 return arg1;
1825
1826 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1827 || TREE_CODE (type) == OFFSET_TYPE)
1828 {
1829 if (TREE_CODE (arg1) == INTEGER_CST)
1830 return fold_convert_const_int_from_int (type, arg1);
1831 else if (TREE_CODE (arg1) == REAL_CST)
1832 return fold_convert_const_int_from_real (code, type, arg1);
1833 else if (TREE_CODE (arg1) == FIXED_CST)
1834 return fold_convert_const_int_from_fixed (type, arg1);
1835 }
1836 else if (TREE_CODE (type) == REAL_TYPE)
1837 {
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return build_real_from_int_cst (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_real_from_real (type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_real_from_fixed (type, arg1);
1844 }
1845 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1846 {
1847 if (TREE_CODE (arg1) == FIXED_CST)
1848 return fold_convert_const_fixed_from_fixed (type, arg1);
1849 else if (TREE_CODE (arg1) == INTEGER_CST)
1850 return fold_convert_const_fixed_from_int (type, arg1);
1851 else if (TREE_CODE (arg1) == REAL_CST)
1852 return fold_convert_const_fixed_from_real (type, arg1);
1853 }
1854 return NULL_TREE;
1855 }
1856
1857 /* Construct a vector of zero elements of vector type TYPE. */
1858
1859 static tree
1860 build_zero_vector (tree type)
1861 {
1862 tree t;
1863
1864 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1865 return build_vector_from_val (type, t);
1866 }
1867
1868 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1869
1870 bool
1871 fold_convertible_p (const_tree type, const_tree arg)
1872 {
1873 tree orig = TREE_TYPE (arg);
1874
1875 if (type == orig)
1876 return true;
1877
1878 if (TREE_CODE (arg) == ERROR_MARK
1879 || TREE_CODE (type) == ERROR_MARK
1880 || TREE_CODE (orig) == ERROR_MARK)
1881 return false;
1882
1883 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1884 return true;
1885
1886 switch (TREE_CODE (type))
1887 {
1888 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 case OFFSET_TYPE:
1891 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1892 || TREE_CODE (orig) == OFFSET_TYPE)
1893 return true;
1894 return (TREE_CODE (orig) == VECTOR_TYPE
1895 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1896
1897 case REAL_TYPE:
1898 case FIXED_POINT_TYPE:
1899 case COMPLEX_TYPE:
1900 case VECTOR_TYPE:
1901 case VOID_TYPE:
1902 return TREE_CODE (type) == TREE_CODE (orig);
1903
1904 default:
1905 return false;
1906 }
1907 }
1908
1909 /* Convert expression ARG to type TYPE. Used by the middle-end for
1910 simple conversions in preference to calling the front-end's convert. */
1911
1912 tree
1913 fold_convert_loc (location_t loc, tree type, tree arg)
1914 {
1915 tree orig = TREE_TYPE (arg);
1916 tree tem;
1917
1918 if (type == orig)
1919 return arg;
1920
1921 if (TREE_CODE (arg) == ERROR_MARK
1922 || TREE_CODE (type) == ERROR_MARK
1923 || TREE_CODE (orig) == ERROR_MARK)
1924 return error_mark_node;
1925
1926 switch (TREE_CODE (type))
1927 {
1928 case POINTER_TYPE:
1929 case REFERENCE_TYPE:
1930 /* Handle conversions between pointers to different address spaces. */
1931 if (POINTER_TYPE_P (orig)
1932 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1933 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1934 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1935 /* fall through */
1936
1937 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1938 case OFFSET_TYPE:
1939 if (TREE_CODE (arg) == INTEGER_CST)
1940 {
1941 tem = fold_convert_const (NOP_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1943 return tem;
1944 }
1945 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1946 || TREE_CODE (orig) == OFFSET_TYPE)
1947 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1948 if (TREE_CODE (orig) == COMPLEX_TYPE)
1949 return fold_convert_loc (loc, type,
1950 fold_build1_loc (loc, REALPART_EXPR,
1951 TREE_TYPE (orig), arg));
1952 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1953 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1954 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1955
1956 case REAL_TYPE:
1957 if (TREE_CODE (arg) == INTEGER_CST)
1958 {
1959 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1962 }
1963 else if (TREE_CODE (arg) == REAL_CST)
1964 {
1965 tem = fold_convert_const (NOP_EXPR, type, arg);
1966 if (tem != NULL_TREE)
1967 return tem;
1968 }
1969 else if (TREE_CODE (arg) == FIXED_CST)
1970 {
1971 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1972 if (tem != NULL_TREE)
1973 return tem;
1974 }
1975
1976 switch (TREE_CODE (orig))
1977 {
1978 case INTEGER_TYPE:
1979 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1980 case POINTER_TYPE: case REFERENCE_TYPE:
1981 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1982
1983 case REAL_TYPE:
1984 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1985
1986 case FIXED_POINT_TYPE:
1987 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1988
1989 case COMPLEX_TYPE:
1990 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 return fold_convert_loc (loc, type, tem);
1992
1993 default:
1994 gcc_unreachable ();
1995 }
1996
1997 case FIXED_POINT_TYPE:
1998 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1999 || TREE_CODE (arg) == REAL_CST)
2000 {
2001 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2002 if (tem != NULL_TREE)
2003 goto fold_convert_exit;
2004 }
2005
2006 switch (TREE_CODE (orig))
2007 {
2008 case FIXED_POINT_TYPE:
2009 case INTEGER_TYPE:
2010 case ENUMERAL_TYPE:
2011 case BOOLEAN_TYPE:
2012 case REAL_TYPE:
2013 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2014
2015 case COMPLEX_TYPE:
2016 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2017 return fold_convert_loc (loc, type, tem);
2018
2019 default:
2020 gcc_unreachable ();
2021 }
2022
2023 case COMPLEX_TYPE:
2024 switch (TREE_CODE (orig))
2025 {
2026 case INTEGER_TYPE:
2027 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2028 case POINTER_TYPE: case REFERENCE_TYPE:
2029 case REAL_TYPE:
2030 case FIXED_POINT_TYPE:
2031 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2032 fold_convert_loc (loc, TREE_TYPE (type), arg),
2033 fold_convert_loc (loc, TREE_TYPE (type),
2034 integer_zero_node));
2035 case COMPLEX_TYPE:
2036 {
2037 tree rpart, ipart;
2038
2039 if (TREE_CODE (arg) == COMPLEX_EXPR)
2040 {
2041 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2042 TREE_OPERAND (arg, 0));
2043 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2044 TREE_OPERAND (arg, 1));
2045 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2046 }
2047
2048 arg = save_expr (arg);
2049 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2050 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2053 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2054 }
2055
2056 default:
2057 gcc_unreachable ();
2058 }
2059
2060 case VECTOR_TYPE:
2061 if (integer_zerop (arg))
2062 return build_zero_vector (type);
2063 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2064 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2065 || TREE_CODE (orig) == VECTOR_TYPE);
2066 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2067
2068 case VOID_TYPE:
2069 tem = fold_ignored_result (arg);
2070 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2071
2072 default:
2073 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2074 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2075 gcc_unreachable ();
2076 }
2077 fold_convert_exit:
2078 protected_set_expr_location_unshare (tem, loc);
2079 return tem;
2080 }
2081 \f
2082 /* Return false if expr can be assumed not to be an lvalue, true
2083 otherwise. */
2084
2085 static bool
2086 maybe_lvalue_p (const_tree x)
2087 {
2088 /* We only need to wrap lvalue tree codes. */
2089 switch (TREE_CODE (x))
2090 {
2091 case VAR_DECL:
2092 case PARM_DECL:
2093 case RESULT_DECL:
2094 case LABEL_DECL:
2095 case FUNCTION_DECL:
2096 case SSA_NAME:
2097
2098 case COMPONENT_REF:
2099 case MEM_REF:
2100 case INDIRECT_REF:
2101 case ARRAY_REF:
2102 case ARRAY_RANGE_REF:
2103 case BIT_FIELD_REF:
2104 case OBJ_TYPE_REF:
2105
2106 case REALPART_EXPR:
2107 case IMAGPART_EXPR:
2108 case PREINCREMENT_EXPR:
2109 case PREDECREMENT_EXPR:
2110 case SAVE_EXPR:
2111 case TRY_CATCH_EXPR:
2112 case WITH_CLEANUP_EXPR:
2113 case COMPOUND_EXPR:
2114 case MODIFY_EXPR:
2115 case TARGET_EXPR:
2116 case COND_EXPR:
2117 case BIND_EXPR:
2118 break;
2119
2120 default:
2121 /* Assume the worst for front-end tree codes. */
2122 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2123 break;
2124 return false;
2125 }
2126
2127 return true;
2128 }
2129
2130 /* Return an expr equal to X but certainly not valid as an lvalue. */
2131
2132 tree
2133 non_lvalue_loc (location_t loc, tree x)
2134 {
2135 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2136 us. */
2137 if (in_gimple_form)
2138 return x;
2139
2140 if (! maybe_lvalue_p (x))
2141 return x;
2142 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2143 }
2144
2145 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2146 Zero means allow extended lvalues. */
2147
2148 int pedantic_lvalues;
2149
2150 /* When pedantic, return an expr equal to X but certainly not valid as a
2151 pedantic lvalue. Otherwise, return X. */
2152
2153 static tree
2154 pedantic_non_lvalue_loc (location_t loc, tree x)
2155 {
2156 if (pedantic_lvalues)
2157 return non_lvalue_loc (loc, x);
2158
2159 return protected_set_expr_location_unshare (x, loc);
2160 }
2161 \f
2162 /* Given a tree comparison code, return the code that is the logical inverse.
2163 It is generally not safe to do this for floating-point comparisons, except
2164 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2165 ERROR_MARK in this case. */
2166
2167 enum tree_code
2168 invert_tree_comparison (enum tree_code code, bool honor_nans)
2169 {
2170 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2171 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2172 return ERROR_MARK;
2173
2174 switch (code)
2175 {
2176 case EQ_EXPR:
2177 return NE_EXPR;
2178 case NE_EXPR:
2179 return EQ_EXPR;
2180 case GT_EXPR:
2181 return honor_nans ? UNLE_EXPR : LE_EXPR;
2182 case GE_EXPR:
2183 return honor_nans ? UNLT_EXPR : LT_EXPR;
2184 case LT_EXPR:
2185 return honor_nans ? UNGE_EXPR : GE_EXPR;
2186 case LE_EXPR:
2187 return honor_nans ? UNGT_EXPR : GT_EXPR;
2188 case LTGT_EXPR:
2189 return UNEQ_EXPR;
2190 case UNEQ_EXPR:
2191 return LTGT_EXPR;
2192 case UNGT_EXPR:
2193 return LE_EXPR;
2194 case UNGE_EXPR:
2195 return LT_EXPR;
2196 case UNLT_EXPR:
2197 return GE_EXPR;
2198 case UNLE_EXPR:
2199 return GT_EXPR;
2200 case ORDERED_EXPR:
2201 return UNORDERED_EXPR;
2202 case UNORDERED_EXPR:
2203 return ORDERED_EXPR;
2204 default:
2205 gcc_unreachable ();
2206 }
2207 }
2208
2209 /* Similar, but return the comparison that results if the operands are
2210 swapped. This is safe for floating-point. */
2211
2212 enum tree_code
2213 swap_tree_comparison (enum tree_code code)
2214 {
2215 switch (code)
2216 {
2217 case EQ_EXPR:
2218 case NE_EXPR:
2219 case ORDERED_EXPR:
2220 case UNORDERED_EXPR:
2221 case LTGT_EXPR:
2222 case UNEQ_EXPR:
2223 return code;
2224 case GT_EXPR:
2225 return LT_EXPR;
2226 case GE_EXPR:
2227 return LE_EXPR;
2228 case LT_EXPR:
2229 return GT_EXPR;
2230 case LE_EXPR:
2231 return GE_EXPR;
2232 case UNGT_EXPR:
2233 return UNLT_EXPR;
2234 case UNGE_EXPR:
2235 return UNLE_EXPR;
2236 case UNLT_EXPR:
2237 return UNGT_EXPR;
2238 case UNLE_EXPR:
2239 return UNGE_EXPR;
2240 default:
2241 gcc_unreachable ();
2242 }
2243 }
2244
2245
2246 /* Convert a comparison tree code from an enum tree_code representation
2247 into a compcode bit-based encoding. This function is the inverse of
2248 compcode_to_comparison. */
2249
2250 static enum comparison_code
2251 comparison_to_compcode (enum tree_code code)
2252 {
2253 switch (code)
2254 {
2255 case LT_EXPR:
2256 return COMPCODE_LT;
2257 case EQ_EXPR:
2258 return COMPCODE_EQ;
2259 case LE_EXPR:
2260 return COMPCODE_LE;
2261 case GT_EXPR:
2262 return COMPCODE_GT;
2263 case NE_EXPR:
2264 return COMPCODE_NE;
2265 case GE_EXPR:
2266 return COMPCODE_GE;
2267 case ORDERED_EXPR:
2268 return COMPCODE_ORD;
2269 case UNORDERED_EXPR:
2270 return COMPCODE_UNORD;
2271 case UNLT_EXPR:
2272 return COMPCODE_UNLT;
2273 case UNEQ_EXPR:
2274 return COMPCODE_UNEQ;
2275 case UNLE_EXPR:
2276 return COMPCODE_UNLE;
2277 case UNGT_EXPR:
2278 return COMPCODE_UNGT;
2279 case LTGT_EXPR:
2280 return COMPCODE_LTGT;
2281 case UNGE_EXPR:
2282 return COMPCODE_UNGE;
2283 default:
2284 gcc_unreachable ();
2285 }
2286 }
2287
2288 /* Convert a compcode bit-based encoding of a comparison operator back
2289 to GCC's enum tree_code representation. This function is the
2290 inverse of comparison_to_compcode. */
2291
2292 static enum tree_code
2293 compcode_to_comparison (enum comparison_code code)
2294 {
2295 switch (code)
2296 {
2297 case COMPCODE_LT:
2298 return LT_EXPR;
2299 case COMPCODE_EQ:
2300 return EQ_EXPR;
2301 case COMPCODE_LE:
2302 return LE_EXPR;
2303 case COMPCODE_GT:
2304 return GT_EXPR;
2305 case COMPCODE_NE:
2306 return NE_EXPR;
2307 case COMPCODE_GE:
2308 return GE_EXPR;
2309 case COMPCODE_ORD:
2310 return ORDERED_EXPR;
2311 case COMPCODE_UNORD:
2312 return UNORDERED_EXPR;
2313 case COMPCODE_UNLT:
2314 return UNLT_EXPR;
2315 case COMPCODE_UNEQ:
2316 return UNEQ_EXPR;
2317 case COMPCODE_UNLE:
2318 return UNLE_EXPR;
2319 case COMPCODE_UNGT:
2320 return UNGT_EXPR;
2321 case COMPCODE_LTGT:
2322 return LTGT_EXPR;
2323 case COMPCODE_UNGE:
2324 return UNGE_EXPR;
2325 default:
2326 gcc_unreachable ();
2327 }
2328 }
2329
2330 /* Return a tree for the comparison which is the combination of
2331 doing the AND or OR (depending on CODE) of the two operations LCODE
2332 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2333 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2334 if this makes the transformation invalid. */
2335
2336 tree
2337 combine_comparisons (location_t loc,
2338 enum tree_code code, enum tree_code lcode,
2339 enum tree_code rcode, tree truth_type,
2340 tree ll_arg, tree lr_arg)
2341 {
2342 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2343 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2344 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2345 int compcode;
2346
2347 switch (code)
2348 {
2349 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2350 compcode = lcompcode & rcompcode;
2351 break;
2352
2353 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2354 compcode = lcompcode | rcompcode;
2355 break;
2356
2357 default:
2358 return NULL_TREE;
2359 }
2360
2361 if (!honor_nans)
2362 {
2363 /* Eliminate unordered comparisons, as well as LTGT and ORD
2364 which are not used unless the mode has NaNs. */
2365 compcode &= ~COMPCODE_UNORD;
2366 if (compcode == COMPCODE_LTGT)
2367 compcode = COMPCODE_NE;
2368 else if (compcode == COMPCODE_ORD)
2369 compcode = COMPCODE_TRUE;
2370 }
2371 else if (flag_trapping_math)
2372 {
2373 /* Check that the original operation and the optimized ones will trap
2374 under the same condition. */
2375 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2376 && (lcompcode != COMPCODE_EQ)
2377 && (lcompcode != COMPCODE_ORD);
2378 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2379 && (rcompcode != COMPCODE_EQ)
2380 && (rcompcode != COMPCODE_ORD);
2381 bool trap = (compcode & COMPCODE_UNORD) == 0
2382 && (compcode != COMPCODE_EQ)
2383 && (compcode != COMPCODE_ORD);
2384
2385 /* In a short-circuited boolean expression the LHS might be
2386 such that the RHS, if evaluated, will never trap. For
2387 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2388 if neither x nor y is NaN. (This is a mixed blessing: for
2389 example, the expression above will never trap, hence
2390 optimizing it to x < y would be invalid). */
2391 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2392 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2393 rtrap = false;
2394
2395 /* If the comparison was short-circuited, and only the RHS
2396 trapped, we may now generate a spurious trap. */
2397 if (rtrap && !ltrap
2398 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2399 return NULL_TREE;
2400
2401 /* If we changed the conditions that cause a trap, we lose. */
2402 if ((ltrap || rtrap) != trap)
2403 return NULL_TREE;
2404 }
2405
2406 if (compcode == COMPCODE_TRUE)
2407 return constant_boolean_node (true, truth_type);
2408 else if (compcode == COMPCODE_FALSE)
2409 return constant_boolean_node (false, truth_type);
2410 else
2411 {
2412 enum tree_code tcode;
2413
2414 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2415 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2416 }
2417 }
2418 \f
2419 /* Return nonzero if two operands (typically of the same tree node)
2420 are necessarily equal. If either argument has side-effects this
2421 function returns zero. FLAGS modifies behavior as follows:
2422
2423 If OEP_ONLY_CONST is set, only return nonzero for constants.
2424 This function tests whether the operands are indistinguishable;
2425 it does not test whether they are equal using C's == operation.
2426 The distinction is important for IEEE floating point, because
2427 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2428 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2429
2430 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2431 even though it may hold multiple values during a function.
2432 This is because a GCC tree node guarantees that nothing else is
2433 executed between the evaluation of its "operands" (which may often
2434 be evaluated in arbitrary order). Hence if the operands themselves
2435 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2436 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2437 unset means assuming isochronic (or instantaneous) tree equivalence.
2438 Unless comparing arbitrary expression trees, such as from different
2439 statements, this flag can usually be left unset.
2440
2441 If OEP_PURE_SAME is set, then pure functions with identical arguments
2442 are considered the same. It is used when the caller has other ways
2443 to ensure that global memory is unchanged in between. */
2444
2445 int
2446 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2447 {
2448 /* If either is ERROR_MARK, they aren't equal. */
2449 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2450 || TREE_TYPE (arg0) == error_mark_node
2451 || TREE_TYPE (arg1) == error_mark_node)
2452 return 0;
2453
2454 /* Similar, if either does not have a type (like a released SSA name),
2455 they aren't equal. */
2456 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2457 return 0;
2458
2459 /* Check equality of integer constants before bailing out due to
2460 precision differences. */
2461 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2462 return tree_int_cst_equal (arg0, arg1);
2463
2464 /* If both types don't have the same signedness, then we can't consider
2465 them equal. We must check this before the STRIP_NOPS calls
2466 because they may change the signedness of the arguments. As pointers
2467 strictly don't have a signedness, require either two pointers or
2468 two non-pointers as well. */
2469 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2470 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2471 return 0;
2472
2473 /* We cannot consider pointers to different address space equal. */
2474 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2475 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2476 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2477 return 0;
2478
2479 /* If both types don't have the same precision, then it is not safe
2480 to strip NOPs. */
2481 if (element_precision (TREE_TYPE (arg0))
2482 != element_precision (TREE_TYPE (arg1)))
2483 return 0;
2484
2485 STRIP_NOPS (arg0);
2486 STRIP_NOPS (arg1);
2487
2488 /* In case both args are comparisons but with different comparison
2489 code, try to swap the comparison operands of one arg to produce
2490 a match and compare that variant. */
2491 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2492 && COMPARISON_CLASS_P (arg0)
2493 && COMPARISON_CLASS_P (arg1))
2494 {
2495 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2496
2497 if (TREE_CODE (arg0) == swap_code)
2498 return operand_equal_p (TREE_OPERAND (arg0, 0),
2499 TREE_OPERAND (arg1, 1), flags)
2500 && operand_equal_p (TREE_OPERAND (arg0, 1),
2501 TREE_OPERAND (arg1, 0), flags);
2502 }
2503
2504 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2505 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2506 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2507 return 0;
2508
2509 /* This is needed for conversions and for COMPONENT_REF.
2510 Might as well play it safe and always test this. */
2511 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2512 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2513 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2514 return 0;
2515
2516 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2517 We don't care about side effects in that case because the SAVE_EXPR
2518 takes care of that for us. In all other cases, two expressions are
2519 equal if they have no side effects. If we have two identical
2520 expressions with side effects that should be treated the same due
2521 to the only side effects being identical SAVE_EXPR's, that will
2522 be detected in the recursive calls below.
2523 If we are taking an invariant address of two identical objects
2524 they are necessarily equal as well. */
2525 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2526 && (TREE_CODE (arg0) == SAVE_EXPR
2527 || (flags & OEP_CONSTANT_ADDRESS_OF)
2528 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2529 return 1;
2530
2531 /* Next handle constant cases, those for which we can return 1 even
2532 if ONLY_CONST is set. */
2533 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2534 switch (TREE_CODE (arg0))
2535 {
2536 case INTEGER_CST:
2537 return tree_int_cst_equal (arg0, arg1);
2538
2539 case FIXED_CST:
2540 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2541 TREE_FIXED_CST (arg1));
2542
2543 case REAL_CST:
2544 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2545 TREE_REAL_CST (arg1)))
2546 return 1;
2547
2548
2549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2550 {
2551 /* If we do not distinguish between signed and unsigned zero,
2552 consider them equal. */
2553 if (real_zerop (arg0) && real_zerop (arg1))
2554 return 1;
2555 }
2556 return 0;
2557
2558 case VECTOR_CST:
2559 {
2560 unsigned i;
2561
2562 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2563 return 0;
2564
2565 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2566 {
2567 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2568 VECTOR_CST_ELT (arg1, i), flags))
2569 return 0;
2570 }
2571 return 1;
2572 }
2573
2574 case COMPLEX_CST:
2575 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2576 flags)
2577 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2578 flags));
2579
2580 case STRING_CST:
2581 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2582 && ! memcmp (TREE_STRING_POINTER (arg0),
2583 TREE_STRING_POINTER (arg1),
2584 TREE_STRING_LENGTH (arg0)));
2585
2586 case ADDR_EXPR:
2587 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2588 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2589 ? OEP_CONSTANT_ADDRESS_OF : 0);
2590 default:
2591 break;
2592 }
2593
2594 if (flags & OEP_ONLY_CONST)
2595 return 0;
2596
2597 /* Define macros to test an operand from arg0 and arg1 for equality and a
2598 variant that allows null and views null as being different from any
2599 non-null value. In the latter case, if either is null, the both
2600 must be; otherwise, do the normal comparison. */
2601 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2602 TREE_OPERAND (arg1, N), flags)
2603
2604 #define OP_SAME_WITH_NULL(N) \
2605 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2606 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2607
2608 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2609 {
2610 case tcc_unary:
2611 /* Two conversions are equal only if signedness and modes match. */
2612 switch (TREE_CODE (arg0))
2613 {
2614 CASE_CONVERT:
2615 case FIX_TRUNC_EXPR:
2616 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2617 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2618 return 0;
2619 break;
2620 default:
2621 break;
2622 }
2623
2624 return OP_SAME (0);
2625
2626
2627 case tcc_comparison:
2628 case tcc_binary:
2629 if (OP_SAME (0) && OP_SAME (1))
2630 return 1;
2631
2632 /* For commutative ops, allow the other order. */
2633 return (commutative_tree_code (TREE_CODE (arg0))
2634 && operand_equal_p (TREE_OPERAND (arg0, 0),
2635 TREE_OPERAND (arg1, 1), flags)
2636 && operand_equal_p (TREE_OPERAND (arg0, 1),
2637 TREE_OPERAND (arg1, 0), flags));
2638
2639 case tcc_reference:
2640 /* If either of the pointer (or reference) expressions we are
2641 dereferencing contain a side effect, these cannot be equal,
2642 but their addresses can be. */
2643 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2644 && (TREE_SIDE_EFFECTS (arg0)
2645 || TREE_SIDE_EFFECTS (arg1)))
2646 return 0;
2647
2648 switch (TREE_CODE (arg0))
2649 {
2650 case INDIRECT_REF:
2651 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2652 return OP_SAME (0);
2653
2654 case REALPART_EXPR:
2655 case IMAGPART_EXPR:
2656 return OP_SAME (0);
2657
2658 case TARGET_MEM_REF:
2659 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2660 /* Require equal extra operands and then fall through to MEM_REF
2661 handling of the two common operands. */
2662 if (!OP_SAME_WITH_NULL (2)
2663 || !OP_SAME_WITH_NULL (3)
2664 || !OP_SAME_WITH_NULL (4))
2665 return 0;
2666 /* Fallthru. */
2667 case MEM_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 /* Require equal access sizes, and similar pointer types.
2670 We can have incomplete types for array references of
2671 variable-sized arrays from the Fortran frontend
2672 though. Also verify the types are compatible. */
2673 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2674 || (TYPE_SIZE (TREE_TYPE (arg0))
2675 && TYPE_SIZE (TREE_TYPE (arg1))
2676 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2677 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2678 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2679 && alias_ptr_types_compatible_p
2680 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2681 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2682 && OP_SAME (0) && OP_SAME (1));
2683
2684 case ARRAY_REF:
2685 case ARRAY_RANGE_REF:
2686 /* Operands 2 and 3 may be null.
2687 Compare the array index by value if it is constant first as we
2688 may have different types but same value here. */
2689 if (!OP_SAME (0))
2690 return 0;
2691 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2692 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2693 TREE_OPERAND (arg1, 1))
2694 || OP_SAME (1))
2695 && OP_SAME_WITH_NULL (2)
2696 && OP_SAME_WITH_NULL (3));
2697
2698 case COMPONENT_REF:
2699 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2700 may be NULL when we're called to compare MEM_EXPRs. */
2701 if (!OP_SAME_WITH_NULL (0)
2702 || !OP_SAME (1))
2703 return 0;
2704 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2705 return OP_SAME_WITH_NULL (2);
2706
2707 case BIT_FIELD_REF:
2708 if (!OP_SAME (0))
2709 return 0;
2710 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2711 return OP_SAME (1) && OP_SAME (2);
2712
2713 default:
2714 return 0;
2715 }
2716
2717 case tcc_expression:
2718 switch (TREE_CODE (arg0))
2719 {
2720 case ADDR_EXPR:
2721 case TRUTH_NOT_EXPR:
2722 return OP_SAME (0);
2723
2724 case TRUTH_ANDIF_EXPR:
2725 case TRUTH_ORIF_EXPR:
2726 return OP_SAME (0) && OP_SAME (1);
2727
2728 case FMA_EXPR:
2729 case WIDEN_MULT_PLUS_EXPR:
2730 case WIDEN_MULT_MINUS_EXPR:
2731 if (!OP_SAME (2))
2732 return 0;
2733 /* The multiplcation operands are commutative. */
2734 /* FALLTHRU */
2735
2736 case TRUTH_AND_EXPR:
2737 case TRUTH_OR_EXPR:
2738 case TRUTH_XOR_EXPR:
2739 if (OP_SAME (0) && OP_SAME (1))
2740 return 1;
2741
2742 /* Otherwise take into account this is a commutative operation. */
2743 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2744 TREE_OPERAND (arg1, 1), flags)
2745 && operand_equal_p (TREE_OPERAND (arg0, 1),
2746 TREE_OPERAND (arg1, 0), flags));
2747
2748 case COND_EXPR:
2749 case VEC_COND_EXPR:
2750 case DOT_PROD_EXPR:
2751 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2752
2753 default:
2754 return 0;
2755 }
2756
2757 case tcc_vl_exp:
2758 switch (TREE_CODE (arg0))
2759 {
2760 case CALL_EXPR:
2761 /* If the CALL_EXPRs call different functions, then they
2762 clearly can not be equal. */
2763 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2764 flags))
2765 return 0;
2766
2767 {
2768 unsigned int cef = call_expr_flags (arg0);
2769 if (flags & OEP_PURE_SAME)
2770 cef &= ECF_CONST | ECF_PURE;
2771 else
2772 cef &= ECF_CONST;
2773 if (!cef)
2774 return 0;
2775 }
2776
2777 /* Now see if all the arguments are the same. */
2778 {
2779 const_call_expr_arg_iterator iter0, iter1;
2780 const_tree a0, a1;
2781 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2782 a1 = first_const_call_expr_arg (arg1, &iter1);
2783 a0 && a1;
2784 a0 = next_const_call_expr_arg (&iter0),
2785 a1 = next_const_call_expr_arg (&iter1))
2786 if (! operand_equal_p (a0, a1, flags))
2787 return 0;
2788
2789 /* If we get here and both argument lists are exhausted
2790 then the CALL_EXPRs are equal. */
2791 return ! (a0 || a1);
2792 }
2793 default:
2794 return 0;
2795 }
2796
2797 case tcc_declaration:
2798 /* Consider __builtin_sqrt equal to sqrt. */
2799 return (TREE_CODE (arg0) == FUNCTION_DECL
2800 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2801 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2802 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2803
2804 default:
2805 return 0;
2806 }
2807
2808 #undef OP_SAME
2809 #undef OP_SAME_WITH_NULL
2810 }
2811 \f
2812 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2813 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2814
2815 When in doubt, return 0. */
2816
2817 static int
2818 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2819 {
2820 int unsignedp1, unsignedpo;
2821 tree primarg0, primarg1, primother;
2822 unsigned int correct_width;
2823
2824 if (operand_equal_p (arg0, arg1, 0))
2825 return 1;
2826
2827 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2828 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2829 return 0;
2830
2831 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2832 and see if the inner values are the same. This removes any
2833 signedness comparison, which doesn't matter here. */
2834 primarg0 = arg0, primarg1 = arg1;
2835 STRIP_NOPS (primarg0);
2836 STRIP_NOPS (primarg1);
2837 if (operand_equal_p (primarg0, primarg1, 0))
2838 return 1;
2839
2840 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2841 actual comparison operand, ARG0.
2842
2843 First throw away any conversions to wider types
2844 already present in the operands. */
2845
2846 primarg1 = get_narrower (arg1, &unsignedp1);
2847 primother = get_narrower (other, &unsignedpo);
2848
2849 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2850 if (unsignedp1 == unsignedpo
2851 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2852 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2853 {
2854 tree type = TREE_TYPE (arg0);
2855
2856 /* Make sure shorter operand is extended the right way
2857 to match the longer operand. */
2858 primarg1 = fold_convert (signed_or_unsigned_type_for
2859 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2860
2861 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2862 return 1;
2863 }
2864
2865 return 0;
2866 }
2867 \f
2868 /* See if ARG is an expression that is either a comparison or is performing
2869 arithmetic on comparisons. The comparisons must only be comparing
2870 two different values, which will be stored in *CVAL1 and *CVAL2; if
2871 they are nonzero it means that some operands have already been found.
2872 No variables may be used anywhere else in the expression except in the
2873 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2874 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2875
2876 If this is true, return 1. Otherwise, return zero. */
2877
2878 static int
2879 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2880 {
2881 enum tree_code code = TREE_CODE (arg);
2882 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2883
2884 /* We can handle some of the tcc_expression cases here. */
2885 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2886 tclass = tcc_unary;
2887 else if (tclass == tcc_expression
2888 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2889 || code == COMPOUND_EXPR))
2890 tclass = tcc_binary;
2891
2892 else if (tclass == tcc_expression && code == SAVE_EXPR
2893 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2894 {
2895 /* If we've already found a CVAL1 or CVAL2, this expression is
2896 two complex to handle. */
2897 if (*cval1 || *cval2)
2898 return 0;
2899
2900 tclass = tcc_unary;
2901 *save_p = 1;
2902 }
2903
2904 switch (tclass)
2905 {
2906 case tcc_unary:
2907 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2908
2909 case tcc_binary:
2910 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2911 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2912 cval1, cval2, save_p));
2913
2914 case tcc_constant:
2915 return 1;
2916
2917 case tcc_expression:
2918 if (code == COND_EXPR)
2919 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2920 cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2922 cval1, cval2, save_p)
2923 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2924 cval1, cval2, save_p));
2925 return 0;
2926
2927 case tcc_comparison:
2928 /* First see if we can handle the first operand, then the second. For
2929 the second operand, we know *CVAL1 can't be zero. It must be that
2930 one side of the comparison is each of the values; test for the
2931 case where this isn't true by failing if the two operands
2932 are the same. */
2933
2934 if (operand_equal_p (TREE_OPERAND (arg, 0),
2935 TREE_OPERAND (arg, 1), 0))
2936 return 0;
2937
2938 if (*cval1 == 0)
2939 *cval1 = TREE_OPERAND (arg, 0);
2940 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2941 ;
2942 else if (*cval2 == 0)
2943 *cval2 = TREE_OPERAND (arg, 0);
2944 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2945 ;
2946 else
2947 return 0;
2948
2949 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2950 ;
2951 else if (*cval2 == 0)
2952 *cval2 = TREE_OPERAND (arg, 1);
2953 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2954 ;
2955 else
2956 return 0;
2957
2958 return 1;
2959
2960 default:
2961 return 0;
2962 }
2963 }
2964 \f
2965 /* ARG is a tree that is known to contain just arithmetic operations and
2966 comparisons. Evaluate the operations in the tree substituting NEW0 for
2967 any occurrence of OLD0 as an operand of a comparison and likewise for
2968 NEW1 and OLD1. */
2969
2970 static tree
2971 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2972 tree old1, tree new1)
2973 {
2974 tree type = TREE_TYPE (arg);
2975 enum tree_code code = TREE_CODE (arg);
2976 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2977
2978 /* We can handle some of the tcc_expression cases here. */
2979 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2980 tclass = tcc_unary;
2981 else if (tclass == tcc_expression
2982 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2983 tclass = tcc_binary;
2984
2985 switch (tclass)
2986 {
2987 case tcc_unary:
2988 return fold_build1_loc (loc, code, type,
2989 eval_subst (loc, TREE_OPERAND (arg, 0),
2990 old0, new0, old1, new1));
2991
2992 case tcc_binary:
2993 return fold_build2_loc (loc, code, type,
2994 eval_subst (loc, TREE_OPERAND (arg, 0),
2995 old0, new0, old1, new1),
2996 eval_subst (loc, TREE_OPERAND (arg, 1),
2997 old0, new0, old1, new1));
2998
2999 case tcc_expression:
3000 switch (code)
3001 {
3002 case SAVE_EXPR:
3003 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3004 old1, new1);
3005
3006 case COMPOUND_EXPR:
3007 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3008 old1, new1);
3009
3010 case COND_EXPR:
3011 return fold_build3_loc (loc, code, type,
3012 eval_subst (loc, TREE_OPERAND (arg, 0),
3013 old0, new0, old1, new1),
3014 eval_subst (loc, TREE_OPERAND (arg, 1),
3015 old0, new0, old1, new1),
3016 eval_subst (loc, TREE_OPERAND (arg, 2),
3017 old0, new0, old1, new1));
3018 default:
3019 break;
3020 }
3021 /* Fall through - ??? */
3022
3023 case tcc_comparison:
3024 {
3025 tree arg0 = TREE_OPERAND (arg, 0);
3026 tree arg1 = TREE_OPERAND (arg, 1);
3027
3028 /* We need to check both for exact equality and tree equality. The
3029 former will be true if the operand has a side-effect. In that
3030 case, we know the operand occurred exactly once. */
3031
3032 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3033 arg0 = new0;
3034 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3035 arg0 = new1;
3036
3037 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3038 arg1 = new0;
3039 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3040 arg1 = new1;
3041
3042 return fold_build2_loc (loc, code, type, arg0, arg1);
3043 }
3044
3045 default:
3046 return arg;
3047 }
3048 }
3049 \f
3050 /* Return a tree for the case when the result of an expression is RESULT
3051 converted to TYPE and OMITTED was previously an operand of the expression
3052 but is now not needed (e.g., we folded OMITTED * 0).
3053
3054 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3055 the conversion of RESULT to TYPE. */
3056
3057 tree
3058 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3059 {
3060 tree t = fold_convert_loc (loc, type, result);
3061
3062 /* If the resulting operand is an empty statement, just return the omitted
3063 statement casted to void. */
3064 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3065 return build1_loc (loc, NOP_EXPR, void_type_node,
3066 fold_ignored_result (omitted));
3067
3068 if (TREE_SIDE_EFFECTS (omitted))
3069 return build2_loc (loc, COMPOUND_EXPR, type,
3070 fold_ignored_result (omitted), t);
3071
3072 return non_lvalue_loc (loc, t);
3073 }
3074
3075 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3076
3077 static tree
3078 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3079 tree omitted)
3080 {
3081 tree t = fold_convert_loc (loc, type, result);
3082
3083 /* If the resulting operand is an empty statement, just return the omitted
3084 statement casted to void. */
3085 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3086 return build1_loc (loc, NOP_EXPR, void_type_node,
3087 fold_ignored_result (omitted));
3088
3089 if (TREE_SIDE_EFFECTS (omitted))
3090 return build2_loc (loc, COMPOUND_EXPR, type,
3091 fold_ignored_result (omitted), t);
3092
3093 return pedantic_non_lvalue_loc (loc, t);
3094 }
3095
3096 /* Return a tree for the case when the result of an expression is RESULT
3097 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3098 of the expression but are now not needed.
3099
3100 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3101 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3102 evaluated before OMITTED2. Otherwise, if neither has side effects,
3103 just do the conversion of RESULT to TYPE. */
3104
3105 tree
3106 omit_two_operands_loc (location_t loc, tree type, tree result,
3107 tree omitted1, tree omitted2)
3108 {
3109 tree t = fold_convert_loc (loc, type, result);
3110
3111 if (TREE_SIDE_EFFECTS (omitted2))
3112 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3113 if (TREE_SIDE_EFFECTS (omitted1))
3114 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3115
3116 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3117 }
3118
3119 \f
3120 /* Return a simplified tree node for the truth-negation of ARG. This
3121 never alters ARG itself. We assume that ARG is an operation that
3122 returns a truth value (0 or 1).
3123
3124 FIXME: one would think we would fold the result, but it causes
3125 problems with the dominator optimizer. */
3126
3127 static tree
3128 fold_truth_not_expr (location_t loc, tree arg)
3129 {
3130 tree type = TREE_TYPE (arg);
3131 enum tree_code code = TREE_CODE (arg);
3132 location_t loc1, loc2;
3133
3134 /* If this is a comparison, we can simply invert it, except for
3135 floating-point non-equality comparisons, in which case we just
3136 enclose a TRUTH_NOT_EXPR around what we have. */
3137
3138 if (TREE_CODE_CLASS (code) == tcc_comparison)
3139 {
3140 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3141 if (FLOAT_TYPE_P (op_type)
3142 && flag_trapping_math
3143 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3144 && code != NE_EXPR && code != EQ_EXPR)
3145 return NULL_TREE;
3146
3147 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3148 if (code == ERROR_MARK)
3149 return NULL_TREE;
3150
3151 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3152 TREE_OPERAND (arg, 1));
3153 }
3154
3155 switch (code)
3156 {
3157 case INTEGER_CST:
3158 return constant_boolean_node (integer_zerop (arg), type);
3159
3160 case TRUTH_AND_EXPR:
3161 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3162 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3163 return build2_loc (loc, TRUTH_OR_EXPR, type,
3164 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3165 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3166
3167 case TRUTH_OR_EXPR:
3168 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3169 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3170 return build2_loc (loc, TRUTH_AND_EXPR, type,
3171 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3172 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3173
3174 case TRUTH_XOR_EXPR:
3175 /* Here we can invert either operand. We invert the first operand
3176 unless the second operand is a TRUTH_NOT_EXPR in which case our
3177 result is the XOR of the first operand with the inside of the
3178 negation of the second operand. */
3179
3180 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3181 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3182 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3183 else
3184 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3185 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3186 TREE_OPERAND (arg, 1));
3187
3188 case TRUTH_ANDIF_EXPR:
3189 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3190 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3191 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3192 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3193 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3194
3195 case TRUTH_ORIF_EXPR:
3196 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3197 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3198 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3199 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3200 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3201
3202 case TRUTH_NOT_EXPR:
3203 return TREE_OPERAND (arg, 0);
3204
3205 case COND_EXPR:
3206 {
3207 tree arg1 = TREE_OPERAND (arg, 1);
3208 tree arg2 = TREE_OPERAND (arg, 2);
3209
3210 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3211 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3212
3213 /* A COND_EXPR may have a throw as one operand, which
3214 then has void type. Just leave void operands
3215 as they are. */
3216 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3217 VOID_TYPE_P (TREE_TYPE (arg1))
3218 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3219 VOID_TYPE_P (TREE_TYPE (arg2))
3220 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3221 }
3222
3223 case COMPOUND_EXPR:
3224 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3225 return build2_loc (loc, COMPOUND_EXPR, type,
3226 TREE_OPERAND (arg, 0),
3227 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3228
3229 case NON_LVALUE_EXPR:
3230 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3231 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3232
3233 CASE_CONVERT:
3234 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3235 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3236
3237 /* ... fall through ... */
3238
3239 case FLOAT_EXPR:
3240 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3241 return build1_loc (loc, TREE_CODE (arg), type,
3242 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3243
3244 case BIT_AND_EXPR:
3245 if (!integer_onep (TREE_OPERAND (arg, 1)))
3246 return NULL_TREE;
3247 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3248
3249 case SAVE_EXPR:
3250 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3251
3252 case CLEANUP_POINT_EXPR:
3253 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3254 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3255 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3256
3257 default:
3258 return NULL_TREE;
3259 }
3260 }
3261
3262 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3263 assume that ARG is an operation that returns a truth value (0 or 1
3264 for scalars, 0 or -1 for vectors). Return the folded expression if
3265 folding is successful. Otherwise, return NULL_TREE. */
3266
3267 static tree
3268 fold_invert_truthvalue (location_t loc, tree arg)
3269 {
3270 tree type = TREE_TYPE (arg);
3271 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3272 ? BIT_NOT_EXPR
3273 : TRUTH_NOT_EXPR,
3274 type, arg);
3275 }
3276
3277 /* Return a simplified tree node for the truth-negation of ARG. This
3278 never alters ARG itself. We assume that ARG is an operation that
3279 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3280
3281 tree
3282 invert_truthvalue_loc (location_t loc, tree arg)
3283 {
3284 if (TREE_CODE (arg) == ERROR_MARK)
3285 return arg;
3286
3287 tree type = TREE_TYPE (arg);
3288 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3289 ? BIT_NOT_EXPR
3290 : TRUTH_NOT_EXPR,
3291 type, arg);
3292 }
3293
3294 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3295 operands are another bit-wise operation with a common input. If so,
3296 distribute the bit operations to save an operation and possibly two if
3297 constants are involved. For example, convert
3298 (A | B) & (A | C) into A | (B & C)
3299 Further simplification will occur if B and C are constants.
3300
3301 If this optimization cannot be done, 0 will be returned. */
3302
3303 static tree
3304 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3305 tree arg0, tree arg1)
3306 {
3307 tree common;
3308 tree left, right;
3309
3310 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3311 || TREE_CODE (arg0) == code
3312 || (TREE_CODE (arg0) != BIT_AND_EXPR
3313 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3314 return 0;
3315
3316 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3317 {
3318 common = TREE_OPERAND (arg0, 0);
3319 left = TREE_OPERAND (arg0, 1);
3320 right = TREE_OPERAND (arg1, 1);
3321 }
3322 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3323 {
3324 common = TREE_OPERAND (arg0, 0);
3325 left = TREE_OPERAND (arg0, 1);
3326 right = TREE_OPERAND (arg1, 0);
3327 }
3328 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3329 {
3330 common = TREE_OPERAND (arg0, 1);
3331 left = TREE_OPERAND (arg0, 0);
3332 right = TREE_OPERAND (arg1, 1);
3333 }
3334 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3335 {
3336 common = TREE_OPERAND (arg0, 1);
3337 left = TREE_OPERAND (arg0, 0);
3338 right = TREE_OPERAND (arg1, 0);
3339 }
3340 else
3341 return 0;
3342
3343 common = fold_convert_loc (loc, type, common);
3344 left = fold_convert_loc (loc, type, left);
3345 right = fold_convert_loc (loc, type, right);
3346 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3347 fold_build2_loc (loc, code, type, left, right));
3348 }
3349
3350 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3351 with code CODE. This optimization is unsafe. */
3352 static tree
3353 distribute_real_division (location_t loc, enum tree_code code, tree type,
3354 tree arg0, tree arg1)
3355 {
3356 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3357 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3358
3359 /* (A / C) +- (B / C) -> (A +- B) / C. */
3360 if (mul0 == mul1
3361 && operand_equal_p (TREE_OPERAND (arg0, 1),
3362 TREE_OPERAND (arg1, 1), 0))
3363 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3364 fold_build2_loc (loc, code, type,
3365 TREE_OPERAND (arg0, 0),
3366 TREE_OPERAND (arg1, 0)),
3367 TREE_OPERAND (arg0, 1));
3368
3369 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3370 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3371 TREE_OPERAND (arg1, 0), 0)
3372 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3373 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3374 {
3375 REAL_VALUE_TYPE r0, r1;
3376 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3377 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3378 if (!mul0)
3379 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3380 if (!mul1)
3381 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3382 real_arithmetic (&r0, code, &r0, &r1);
3383 return fold_build2_loc (loc, MULT_EXPR, type,
3384 TREE_OPERAND (arg0, 0),
3385 build_real (type, r0));
3386 }
3387
3388 return NULL_TREE;
3389 }
3390 \f
3391 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3392 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3393
3394 static tree
3395 make_bit_field_ref (location_t loc, tree inner, tree type,
3396 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3397 {
3398 tree result, bftype;
3399
3400 if (bitpos == 0)
3401 {
3402 tree size = TYPE_SIZE (TREE_TYPE (inner));
3403 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3404 || POINTER_TYPE_P (TREE_TYPE (inner)))
3405 && tree_fits_shwi_p (size)
3406 && tree_to_shwi (size) == bitsize)
3407 return fold_convert_loc (loc, type, inner);
3408 }
3409
3410 bftype = type;
3411 if (TYPE_PRECISION (bftype) != bitsize
3412 || TYPE_UNSIGNED (bftype) == !unsignedp)
3413 bftype = build_nonstandard_integer_type (bitsize, 0);
3414
3415 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3416 size_int (bitsize), bitsize_int (bitpos));
3417
3418 if (bftype != type)
3419 result = fold_convert_loc (loc, type, result);
3420
3421 return result;
3422 }
3423
3424 /* Optimize a bit-field compare.
3425
3426 There are two cases: First is a compare against a constant and the
3427 second is a comparison of two items where the fields are at the same
3428 bit position relative to the start of a chunk (byte, halfword, word)
3429 large enough to contain it. In these cases we can avoid the shift
3430 implicit in bitfield extractions.
3431
3432 For constants, we emit a compare of the shifted constant with the
3433 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3434 compared. For two fields at the same position, we do the ANDs with the
3435 similar mask and compare the result of the ANDs.
3436
3437 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3438 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3439 are the left and right operands of the comparison, respectively.
3440
3441 If the optimization described above can be done, we return the resulting
3442 tree. Otherwise we return zero. */
3443
3444 static tree
3445 optimize_bit_field_compare (location_t loc, enum tree_code code,
3446 tree compare_type, tree lhs, tree rhs)
3447 {
3448 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3449 tree type = TREE_TYPE (lhs);
3450 tree unsigned_type;
3451 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3452 enum machine_mode lmode, rmode, nmode;
3453 int lunsignedp, runsignedp;
3454 int lvolatilep = 0, rvolatilep = 0;
3455 tree linner, rinner = NULL_TREE;
3456 tree mask;
3457 tree offset;
3458
3459 /* Get all the information about the extractions being done. If the bit size
3460 if the same as the size of the underlying object, we aren't doing an
3461 extraction at all and so can do nothing. We also don't want to
3462 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3463 then will no longer be able to replace it. */
3464 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3465 &lunsignedp, &lvolatilep, false);
3466 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3467 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3468 return 0;
3469
3470 if (!const_p)
3471 {
3472 /* If this is not a constant, we can only do something if bit positions,
3473 sizes, and signedness are the same. */
3474 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3475 &runsignedp, &rvolatilep, false);
3476
3477 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3478 || lunsignedp != runsignedp || offset != 0
3479 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3480 return 0;
3481 }
3482
3483 /* See if we can find a mode to refer to this field. We should be able to,
3484 but fail if we can't. */
3485 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3486 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3487 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3488 TYPE_ALIGN (TREE_TYPE (rinner))),
3489 word_mode, false);
3490 if (nmode == VOIDmode)
3491 return 0;
3492
3493 /* Set signed and unsigned types of the precision of this mode for the
3494 shifts below. */
3495 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3496
3497 /* Compute the bit position and size for the new reference and our offset
3498 within it. If the new reference is the same size as the original, we
3499 won't optimize anything, so return zero. */
3500 nbitsize = GET_MODE_BITSIZE (nmode);
3501 nbitpos = lbitpos & ~ (nbitsize - 1);
3502 lbitpos -= nbitpos;
3503 if (nbitsize == lbitsize)
3504 return 0;
3505
3506 if (BYTES_BIG_ENDIAN)
3507 lbitpos = nbitsize - lbitsize - lbitpos;
3508
3509 /* Make the mask to be used against the extracted field. */
3510 mask = build_int_cst_type (unsigned_type, -1);
3511 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3512 mask = const_binop (RSHIFT_EXPR, mask,
3513 size_int (nbitsize - lbitsize - lbitpos));
3514
3515 if (! const_p)
3516 /* If not comparing with constant, just rework the comparison
3517 and return. */
3518 return fold_build2_loc (loc, code, compare_type,
3519 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3520 make_bit_field_ref (loc, linner,
3521 unsigned_type,
3522 nbitsize, nbitpos,
3523 1),
3524 mask),
3525 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3526 make_bit_field_ref (loc, rinner,
3527 unsigned_type,
3528 nbitsize, nbitpos,
3529 1),
3530 mask));
3531
3532 /* Otherwise, we are handling the constant case. See if the constant is too
3533 big for the field. Warn and return a tree of for 0 (false) if so. We do
3534 this not only for its own sake, but to avoid having to test for this
3535 error case below. If we didn't, we might generate wrong code.
3536
3537 For unsigned fields, the constant shifted right by the field length should
3538 be all zero. For signed fields, the high-order bits should agree with
3539 the sign bit. */
3540
3541 if (lunsignedp)
3542 {
3543 if (wi::lrshift (rhs, lbitsize) != 0)
3544 {
3545 warning (0, "comparison is always %d due to width of bit-field",
3546 code == NE_EXPR);
3547 return constant_boolean_node (code == NE_EXPR, compare_type);
3548 }
3549 }
3550 else
3551 {
3552 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3553 if (tem != 0 && tem != -1)
3554 {
3555 warning (0, "comparison is always %d due to width of bit-field",
3556 code == NE_EXPR);
3557 return constant_boolean_node (code == NE_EXPR, compare_type);
3558 }
3559 }
3560
3561 /* Single-bit compares should always be against zero. */
3562 if (lbitsize == 1 && ! integer_zerop (rhs))
3563 {
3564 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3565 rhs = build_int_cst (type, 0);
3566 }
3567
3568 /* Make a new bitfield reference, shift the constant over the
3569 appropriate number of bits and mask it with the computed mask
3570 (in case this was a signed field). If we changed it, make a new one. */
3571 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3572
3573 rhs = const_binop (BIT_AND_EXPR,
3574 const_binop (LSHIFT_EXPR,
3575 fold_convert_loc (loc, unsigned_type, rhs),
3576 size_int (lbitpos)),
3577 mask);
3578
3579 lhs = build2_loc (loc, code, compare_type,
3580 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3581 return lhs;
3582 }
3583 \f
3584 /* Subroutine for fold_truth_andor_1: decode a field reference.
3585
3586 If EXP is a comparison reference, we return the innermost reference.
3587
3588 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3589 set to the starting bit number.
3590
3591 If the innermost field can be completely contained in a mode-sized
3592 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3593
3594 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3595 otherwise it is not changed.
3596
3597 *PUNSIGNEDP is set to the signedness of the field.
3598
3599 *PMASK is set to the mask used. This is either contained in a
3600 BIT_AND_EXPR or derived from the width of the field.
3601
3602 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3603
3604 Return 0 if this is not a component reference or is one that we can't
3605 do anything with. */
3606
3607 static tree
3608 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3609 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3610 int *punsignedp, int *pvolatilep,
3611 tree *pmask, tree *pand_mask)
3612 {
3613 tree outer_type = 0;
3614 tree and_mask = 0;
3615 tree mask, inner, offset;
3616 tree unsigned_type;
3617 unsigned int precision;
3618
3619 /* All the optimizations using this function assume integer fields.
3620 There are problems with FP fields since the type_for_size call
3621 below can fail for, e.g., XFmode. */
3622 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3623 return 0;
3624
3625 /* We are interested in the bare arrangement of bits, so strip everything
3626 that doesn't affect the machine mode. However, record the type of the
3627 outermost expression if it may matter below. */
3628 if (CONVERT_EXPR_P (exp)
3629 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3630 outer_type = TREE_TYPE (exp);
3631 STRIP_NOPS (exp);
3632
3633 if (TREE_CODE (exp) == BIT_AND_EXPR)
3634 {
3635 and_mask = TREE_OPERAND (exp, 1);
3636 exp = TREE_OPERAND (exp, 0);
3637 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3638 if (TREE_CODE (and_mask) != INTEGER_CST)
3639 return 0;
3640 }
3641
3642 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3643 punsignedp, pvolatilep, false);
3644 if ((inner == exp && and_mask == 0)
3645 || *pbitsize < 0 || offset != 0
3646 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3647 return 0;
3648
3649 /* If the number of bits in the reference is the same as the bitsize of
3650 the outer type, then the outer type gives the signedness. Otherwise
3651 (in case of a small bitfield) the signedness is unchanged. */
3652 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3653 *punsignedp = TYPE_UNSIGNED (outer_type);
3654
3655 /* Compute the mask to access the bitfield. */
3656 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3657 precision = TYPE_PRECISION (unsigned_type);
3658
3659 mask = build_int_cst_type (unsigned_type, -1);
3660
3661 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3662 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3663
3664 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3665 if (and_mask != 0)
3666 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3667 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3668
3669 *pmask = mask;
3670 *pand_mask = and_mask;
3671 return inner;
3672 }
3673
3674 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3675 bit positions and MASK is SIGNED. */
3676
3677 static int
3678 all_ones_mask_p (const_tree mask, unsigned int size)
3679 {
3680 tree type = TREE_TYPE (mask);
3681 unsigned int precision = TYPE_PRECISION (type);
3682
3683 /* If this function returns true when the type of the mask is
3684 UNSIGNED, then there will be errors. In particular see
3685 gcc.c-torture/execute/990326-1.c. There does not appear to be
3686 any documentation paper trail as to why this is so. But the pre
3687 wide-int worked with that restriction and it has been preserved
3688 here. */
3689 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3690 return false;
3691
3692 return wi::mask (size, false, precision) == mask;
3693 }
3694
3695 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3696 represents the sign bit of EXP's type. If EXP represents a sign
3697 or zero extension, also test VAL against the unextended type.
3698 The return value is the (sub)expression whose sign bit is VAL,
3699 or NULL_TREE otherwise. */
3700
3701 static tree
3702 sign_bit_p (tree exp, const_tree val)
3703 {
3704 int width;
3705 tree t;
3706
3707 /* Tree EXP must have an integral type. */
3708 t = TREE_TYPE (exp);
3709 if (! INTEGRAL_TYPE_P (t))
3710 return NULL_TREE;
3711
3712 /* Tree VAL must be an integer constant. */
3713 if (TREE_CODE (val) != INTEGER_CST
3714 || TREE_OVERFLOW (val))
3715 return NULL_TREE;
3716
3717 width = TYPE_PRECISION (t);
3718 if (wi::only_sign_bit_p (val, width))
3719 return exp;
3720
3721 /* Handle extension from a narrower type. */
3722 if (TREE_CODE (exp) == NOP_EXPR
3723 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3724 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3725
3726 return NULL_TREE;
3727 }
3728
3729 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3730 to be evaluated unconditionally. */
3731
3732 static int
3733 simple_operand_p (const_tree exp)
3734 {
3735 /* Strip any conversions that don't change the machine mode. */
3736 STRIP_NOPS (exp);
3737
3738 return (CONSTANT_CLASS_P (exp)
3739 || TREE_CODE (exp) == SSA_NAME
3740 || (DECL_P (exp)
3741 && ! TREE_ADDRESSABLE (exp)
3742 && ! TREE_THIS_VOLATILE (exp)
3743 && ! DECL_NONLOCAL (exp)
3744 /* Don't regard global variables as simple. They may be
3745 allocated in ways unknown to the compiler (shared memory,
3746 #pragma weak, etc). */
3747 && ! TREE_PUBLIC (exp)
3748 && ! DECL_EXTERNAL (exp)
3749 /* Weakrefs are not safe to be read, since they can be NULL.
3750 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3751 have DECL_WEAK flag set. */
3752 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3753 /* Loading a static variable is unduly expensive, but global
3754 registers aren't expensive. */
3755 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3756 }
3757
3758 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3759 to be evaluated unconditionally.
3760 I addition to simple_operand_p, we assume that comparisons, conversions,
3761 and logic-not operations are simple, if their operands are simple, too. */
3762
3763 static bool
3764 simple_operand_p_2 (tree exp)
3765 {
3766 enum tree_code code;
3767
3768 if (TREE_SIDE_EFFECTS (exp)
3769 || tree_could_trap_p (exp))
3770 return false;
3771
3772 while (CONVERT_EXPR_P (exp))
3773 exp = TREE_OPERAND (exp, 0);
3774
3775 code = TREE_CODE (exp);
3776
3777 if (TREE_CODE_CLASS (code) == tcc_comparison)
3778 return (simple_operand_p (TREE_OPERAND (exp, 0))
3779 && simple_operand_p (TREE_OPERAND (exp, 1)));
3780
3781 if (code == TRUTH_NOT_EXPR)
3782 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3783
3784 return simple_operand_p (exp);
3785 }
3786
3787 \f
3788 /* The following functions are subroutines to fold_range_test and allow it to
3789 try to change a logical combination of comparisons into a range test.
3790
3791 For example, both
3792 X == 2 || X == 3 || X == 4 || X == 5
3793 and
3794 X >= 2 && X <= 5
3795 are converted to
3796 (unsigned) (X - 2) <= 3
3797
3798 We describe each set of comparisons as being either inside or outside
3799 a range, using a variable named like IN_P, and then describe the
3800 range with a lower and upper bound. If one of the bounds is omitted,
3801 it represents either the highest or lowest value of the type.
3802
3803 In the comments below, we represent a range by two numbers in brackets
3804 preceded by a "+" to designate being inside that range, or a "-" to
3805 designate being outside that range, so the condition can be inverted by
3806 flipping the prefix. An omitted bound is represented by a "-". For
3807 example, "- [-, 10]" means being outside the range starting at the lowest
3808 possible value and ending at 10, in other words, being greater than 10.
3809 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3810 always false.
3811
3812 We set up things so that the missing bounds are handled in a consistent
3813 manner so neither a missing bound nor "true" and "false" need to be
3814 handled using a special case. */
3815
3816 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3817 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3818 and UPPER1_P are nonzero if the respective argument is an upper bound
3819 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3820 must be specified for a comparison. ARG1 will be converted to ARG0's
3821 type if both are specified. */
3822
3823 static tree
3824 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3825 tree arg1, int upper1_p)
3826 {
3827 tree tem;
3828 int result;
3829 int sgn0, sgn1;
3830
3831 /* If neither arg represents infinity, do the normal operation.
3832 Else, if not a comparison, return infinity. Else handle the special
3833 comparison rules. Note that most of the cases below won't occur, but
3834 are handled for consistency. */
3835
3836 if (arg0 != 0 && arg1 != 0)
3837 {
3838 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3839 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3840 STRIP_NOPS (tem);
3841 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3842 }
3843
3844 if (TREE_CODE_CLASS (code) != tcc_comparison)
3845 return 0;
3846
3847 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3848 for neither. In real maths, we cannot assume open ended ranges are
3849 the same. But, this is computer arithmetic, where numbers are finite.
3850 We can therefore make the transformation of any unbounded range with
3851 the value Z, Z being greater than any representable number. This permits
3852 us to treat unbounded ranges as equal. */
3853 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3854 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3855 switch (code)
3856 {
3857 case EQ_EXPR:
3858 result = sgn0 == sgn1;
3859 break;
3860 case NE_EXPR:
3861 result = sgn0 != sgn1;
3862 break;
3863 case LT_EXPR:
3864 result = sgn0 < sgn1;
3865 break;
3866 case LE_EXPR:
3867 result = sgn0 <= sgn1;
3868 break;
3869 case GT_EXPR:
3870 result = sgn0 > sgn1;
3871 break;
3872 case GE_EXPR:
3873 result = sgn0 >= sgn1;
3874 break;
3875 default:
3876 gcc_unreachable ();
3877 }
3878
3879 return constant_boolean_node (result, type);
3880 }
3881 \f
3882 /* Helper routine for make_range. Perform one step for it, return
3883 new expression if the loop should continue or NULL_TREE if it should
3884 stop. */
3885
3886 tree
3887 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3888 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3889 bool *strict_overflow_p)
3890 {
3891 tree arg0_type = TREE_TYPE (arg0);
3892 tree n_low, n_high, low = *p_low, high = *p_high;
3893 int in_p = *p_in_p, n_in_p;
3894
3895 switch (code)
3896 {
3897 case TRUTH_NOT_EXPR:
3898 /* We can only do something if the range is testing for zero. */
3899 if (low == NULL_TREE || high == NULL_TREE
3900 || ! integer_zerop (low) || ! integer_zerop (high))
3901 return NULL_TREE;
3902 *p_in_p = ! in_p;
3903 return arg0;
3904
3905 case EQ_EXPR: case NE_EXPR:
3906 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3907 /* We can only do something if the range is testing for zero
3908 and if the second operand is an integer constant. Note that
3909 saying something is "in" the range we make is done by
3910 complementing IN_P since it will set in the initial case of
3911 being not equal to zero; "out" is leaving it alone. */
3912 if (low == NULL_TREE || high == NULL_TREE
3913 || ! integer_zerop (low) || ! integer_zerop (high)
3914 || TREE_CODE (arg1) != INTEGER_CST)
3915 return NULL_TREE;
3916
3917 switch (code)
3918 {
3919 case NE_EXPR: /* - [c, c] */
3920 low = high = arg1;
3921 break;
3922 case EQ_EXPR: /* + [c, c] */
3923 in_p = ! in_p, low = high = arg1;
3924 break;
3925 case GT_EXPR: /* - [-, c] */
3926 low = 0, high = arg1;
3927 break;
3928 case GE_EXPR: /* + [c, -] */
3929 in_p = ! in_p, low = arg1, high = 0;
3930 break;
3931 case LT_EXPR: /* - [c, -] */
3932 low = arg1, high = 0;
3933 break;
3934 case LE_EXPR: /* + [-, c] */
3935 in_p = ! in_p, low = 0, high = arg1;
3936 break;
3937 default:
3938 gcc_unreachable ();
3939 }
3940
3941 /* If this is an unsigned comparison, we also know that EXP is
3942 greater than or equal to zero. We base the range tests we make
3943 on that fact, so we record it here so we can parse existing
3944 range tests. We test arg0_type since often the return type
3945 of, e.g. EQ_EXPR, is boolean. */
3946 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3947 {
3948 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3949 in_p, low, high, 1,
3950 build_int_cst (arg0_type, 0),
3951 NULL_TREE))
3952 return NULL_TREE;
3953
3954 in_p = n_in_p, low = n_low, high = n_high;
3955
3956 /* If the high bound is missing, but we have a nonzero low
3957 bound, reverse the range so it goes from zero to the low bound
3958 minus 1. */
3959 if (high == 0 && low && ! integer_zerop (low))
3960 {
3961 in_p = ! in_p;
3962 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3963 build_int_cst (TREE_TYPE (low), 1), 0);
3964 low = build_int_cst (arg0_type, 0);
3965 }
3966 }
3967
3968 *p_low = low;
3969 *p_high = high;
3970 *p_in_p = in_p;
3971 return arg0;
3972
3973 case NEGATE_EXPR:
3974 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3975 low and high are non-NULL, then normalize will DTRT. */
3976 if (!TYPE_UNSIGNED (arg0_type)
3977 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3978 {
3979 if (low == NULL_TREE)
3980 low = TYPE_MIN_VALUE (arg0_type);
3981 if (high == NULL_TREE)
3982 high = TYPE_MAX_VALUE (arg0_type);
3983 }
3984
3985 /* (-x) IN [a,b] -> x in [-b, -a] */
3986 n_low = range_binop (MINUS_EXPR, exp_type,
3987 build_int_cst (exp_type, 0),
3988 0, high, 1);
3989 n_high = range_binop (MINUS_EXPR, exp_type,
3990 build_int_cst (exp_type, 0),
3991 0, low, 0);
3992 if (n_high != 0 && TREE_OVERFLOW (n_high))
3993 return NULL_TREE;
3994 goto normalize;
3995
3996 case BIT_NOT_EXPR:
3997 /* ~ X -> -X - 1 */
3998 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3999 build_int_cst (exp_type, 1));
4000
4001 case PLUS_EXPR:
4002 case MINUS_EXPR:
4003 if (TREE_CODE (arg1) != INTEGER_CST)
4004 return NULL_TREE;
4005
4006 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4007 move a constant to the other side. */
4008 if (!TYPE_UNSIGNED (arg0_type)
4009 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4010 return NULL_TREE;
4011
4012 /* If EXP is signed, any overflow in the computation is undefined,
4013 so we don't worry about it so long as our computations on
4014 the bounds don't overflow. For unsigned, overflow is defined
4015 and this is exactly the right thing. */
4016 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4017 arg0_type, low, 0, arg1, 0);
4018 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4019 arg0_type, high, 1, arg1, 0);
4020 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4021 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4022 return NULL_TREE;
4023
4024 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4025 *strict_overflow_p = true;
4026
4027 normalize:
4028 /* Check for an unsigned range which has wrapped around the maximum
4029 value thus making n_high < n_low, and normalize it. */
4030 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4031 {
4032 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4033 build_int_cst (TREE_TYPE (n_high), 1), 0);
4034 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4035 build_int_cst (TREE_TYPE (n_low), 1), 0);
4036
4037 /* If the range is of the form +/- [ x+1, x ], we won't
4038 be able to normalize it. But then, it represents the
4039 whole range or the empty set, so make it
4040 +/- [ -, - ]. */
4041 if (tree_int_cst_equal (n_low, low)
4042 && tree_int_cst_equal (n_high, high))
4043 low = high = 0;
4044 else
4045 in_p = ! in_p;
4046 }
4047 else
4048 low = n_low, high = n_high;
4049
4050 *p_low = low;
4051 *p_high = high;
4052 *p_in_p = in_p;
4053 return arg0;
4054
4055 CASE_CONVERT:
4056 case NON_LVALUE_EXPR:
4057 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4058 return NULL_TREE;
4059
4060 if (! INTEGRAL_TYPE_P (arg0_type)
4061 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4062 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4063 return NULL_TREE;
4064
4065 n_low = low, n_high = high;
4066
4067 if (n_low != 0)
4068 n_low = fold_convert_loc (loc, arg0_type, n_low);
4069
4070 if (n_high != 0)
4071 n_high = fold_convert_loc (loc, arg0_type, n_high);
4072
4073 /* If we're converting arg0 from an unsigned type, to exp,
4074 a signed type, we will be doing the comparison as unsigned.
4075 The tests above have already verified that LOW and HIGH
4076 are both positive.
4077
4078 So we have to ensure that we will handle large unsigned
4079 values the same way that the current signed bounds treat
4080 negative values. */
4081
4082 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4083 {
4084 tree high_positive;
4085 tree equiv_type;
4086 /* For fixed-point modes, we need to pass the saturating flag
4087 as the 2nd parameter. */
4088 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4089 equiv_type
4090 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4091 TYPE_SATURATING (arg0_type));
4092 else
4093 equiv_type
4094 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4095
4096 /* A range without an upper bound is, naturally, unbounded.
4097 Since convert would have cropped a very large value, use
4098 the max value for the destination type. */
4099 high_positive
4100 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4101 : TYPE_MAX_VALUE (arg0_type);
4102
4103 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4104 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4105 fold_convert_loc (loc, arg0_type,
4106 high_positive),
4107 build_int_cst (arg0_type, 1));
4108
4109 /* If the low bound is specified, "and" the range with the
4110 range for which the original unsigned value will be
4111 positive. */
4112 if (low != 0)
4113 {
4114 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4115 1, fold_convert_loc (loc, arg0_type,
4116 integer_zero_node),
4117 high_positive))
4118 return NULL_TREE;
4119
4120 in_p = (n_in_p == in_p);
4121 }
4122 else
4123 {
4124 /* Otherwise, "or" the range with the range of the input
4125 that will be interpreted as negative. */
4126 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4127 1, fold_convert_loc (loc, arg0_type,
4128 integer_zero_node),
4129 high_positive))
4130 return NULL_TREE;
4131
4132 in_p = (in_p != n_in_p);
4133 }
4134 }
4135
4136 *p_low = n_low;
4137 *p_high = n_high;
4138 *p_in_p = in_p;
4139 return arg0;
4140
4141 default:
4142 return NULL_TREE;
4143 }
4144 }
4145
4146 /* Given EXP, a logical expression, set the range it is testing into
4147 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4148 actually being tested. *PLOW and *PHIGH will be made of the same
4149 type as the returned expression. If EXP is not a comparison, we
4150 will most likely not be returning a useful value and range. Set
4151 *STRICT_OVERFLOW_P to true if the return value is only valid
4152 because signed overflow is undefined; otherwise, do not change
4153 *STRICT_OVERFLOW_P. */
4154
4155 tree
4156 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4157 bool *strict_overflow_p)
4158 {
4159 enum tree_code code;
4160 tree arg0, arg1 = NULL_TREE;
4161 tree exp_type, nexp;
4162 int in_p;
4163 tree low, high;
4164 location_t loc = EXPR_LOCATION (exp);
4165
4166 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4167 and see if we can refine the range. Some of the cases below may not
4168 happen, but it doesn't seem worth worrying about this. We "continue"
4169 the outer loop when we've changed something; otherwise we "break"
4170 the switch, which will "break" the while. */
4171
4172 in_p = 0;
4173 low = high = build_int_cst (TREE_TYPE (exp), 0);
4174
4175 while (1)
4176 {
4177 code = TREE_CODE (exp);
4178 exp_type = TREE_TYPE (exp);
4179 arg0 = NULL_TREE;
4180
4181 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4182 {
4183 if (TREE_OPERAND_LENGTH (exp) > 0)
4184 arg0 = TREE_OPERAND (exp, 0);
4185 if (TREE_CODE_CLASS (code) == tcc_binary
4186 || TREE_CODE_CLASS (code) == tcc_comparison
4187 || (TREE_CODE_CLASS (code) == tcc_expression
4188 && TREE_OPERAND_LENGTH (exp) > 1))
4189 arg1 = TREE_OPERAND (exp, 1);
4190 }
4191 if (arg0 == NULL_TREE)
4192 break;
4193
4194 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4195 &high, &in_p, strict_overflow_p);
4196 if (nexp == NULL_TREE)
4197 break;
4198 exp = nexp;
4199 }
4200
4201 /* If EXP is a constant, we can evaluate whether this is true or false. */
4202 if (TREE_CODE (exp) == INTEGER_CST)
4203 {
4204 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4205 exp, 0, low, 0))
4206 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4207 exp, 1, high, 1)));
4208 low = high = 0;
4209 exp = 0;
4210 }
4211
4212 *pin_p = in_p, *plow = low, *phigh = high;
4213 return exp;
4214 }
4215 \f
4216 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4217 type, TYPE, return an expression to test if EXP is in (or out of, depending
4218 on IN_P) the range. Return 0 if the test couldn't be created. */
4219
4220 tree
4221 build_range_check (location_t loc, tree type, tree exp, int in_p,
4222 tree low, tree high)
4223 {
4224 tree etype = TREE_TYPE (exp), value;
4225
4226 #ifdef HAVE_canonicalize_funcptr_for_compare
4227 /* Disable this optimization for function pointer expressions
4228 on targets that require function pointer canonicalization. */
4229 if (HAVE_canonicalize_funcptr_for_compare
4230 && TREE_CODE (etype) == POINTER_TYPE
4231 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4232 return NULL_TREE;
4233 #endif
4234
4235 if (! in_p)
4236 {
4237 value = build_range_check (loc, type, exp, 1, low, high);
4238 if (value != 0)
4239 return invert_truthvalue_loc (loc, value);
4240
4241 return 0;
4242 }
4243
4244 if (low == 0 && high == 0)
4245 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4246
4247 if (low == 0)
4248 return fold_build2_loc (loc, LE_EXPR, type, exp,
4249 fold_convert_loc (loc, etype, high));
4250
4251 if (high == 0)
4252 return fold_build2_loc (loc, GE_EXPR, type, exp,
4253 fold_convert_loc (loc, etype, low));
4254
4255 if (operand_equal_p (low, high, 0))
4256 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4257 fold_convert_loc (loc, etype, low));
4258
4259 if (integer_zerop (low))
4260 {
4261 if (! TYPE_UNSIGNED (etype))
4262 {
4263 etype = unsigned_type_for (etype);
4264 high = fold_convert_loc (loc, etype, high);
4265 exp = fold_convert_loc (loc, etype, exp);
4266 }
4267 return build_range_check (loc, type, exp, 1, 0, high);
4268 }
4269
4270 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4271 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4272 {
4273 int prec = TYPE_PRECISION (etype);
4274
4275 if (wi::mask (prec - 1, false, prec) == high)
4276 {
4277 if (TYPE_UNSIGNED (etype))
4278 {
4279 tree signed_etype = signed_type_for (etype);
4280 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4281 etype
4282 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4283 else
4284 etype = signed_etype;
4285 exp = fold_convert_loc (loc, etype, exp);
4286 }
4287 return fold_build2_loc (loc, GT_EXPR, type, exp,
4288 build_int_cst (etype, 0));
4289 }
4290 }
4291
4292 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4293 This requires wrap-around arithmetics for the type of the expression.
4294 First make sure that arithmetics in this type is valid, then make sure
4295 that it wraps around. */
4296 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4297 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4298 TYPE_UNSIGNED (etype));
4299
4300 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4301 {
4302 tree utype, minv, maxv;
4303
4304 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4305 for the type in question, as we rely on this here. */
4306 utype = unsigned_type_for (etype);
4307 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4308 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4309 build_int_cst (TREE_TYPE (maxv), 1), 1);
4310 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4311
4312 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4313 minv, 1, maxv, 1)))
4314 etype = utype;
4315 else
4316 return 0;
4317 }
4318
4319 high = fold_convert_loc (loc, etype, high);
4320 low = fold_convert_loc (loc, etype, low);
4321 exp = fold_convert_loc (loc, etype, exp);
4322
4323 value = const_binop (MINUS_EXPR, high, low);
4324
4325
4326 if (POINTER_TYPE_P (etype))
4327 {
4328 if (value != 0 && !TREE_OVERFLOW (value))
4329 {
4330 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4331 return build_range_check (loc, type,
4332 fold_build_pointer_plus_loc (loc, exp, low),
4333 1, build_int_cst (etype, 0), value);
4334 }
4335 return 0;
4336 }
4337
4338 if (value != 0 && !TREE_OVERFLOW (value))
4339 return build_range_check (loc, type,
4340 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4341 1, build_int_cst (etype, 0), value);
4342
4343 return 0;
4344 }
4345 \f
4346 /* Return the predecessor of VAL in its type, handling the infinite case. */
4347
4348 static tree
4349 range_predecessor (tree val)
4350 {
4351 tree type = TREE_TYPE (val);
4352
4353 if (INTEGRAL_TYPE_P (type)
4354 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4355 return 0;
4356 else
4357 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4358 build_int_cst (TREE_TYPE (val), 1), 0);
4359 }
4360
4361 /* Return the successor of VAL in its type, handling the infinite case. */
4362
4363 static tree
4364 range_successor (tree val)
4365 {
4366 tree type = TREE_TYPE (val);
4367
4368 if (INTEGRAL_TYPE_P (type)
4369 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4370 return 0;
4371 else
4372 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4373 build_int_cst (TREE_TYPE (val), 1), 0);
4374 }
4375
4376 /* Given two ranges, see if we can merge them into one. Return 1 if we
4377 can, 0 if we can't. Set the output range into the specified parameters. */
4378
4379 bool
4380 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4381 tree high0, int in1_p, tree low1, tree high1)
4382 {
4383 int no_overlap;
4384 int subset;
4385 int temp;
4386 tree tem;
4387 int in_p;
4388 tree low, high;
4389 int lowequal = ((low0 == 0 && low1 == 0)
4390 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4391 low0, 0, low1, 0)));
4392 int highequal = ((high0 == 0 && high1 == 0)
4393 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4394 high0, 1, high1, 1)));
4395
4396 /* Make range 0 be the range that starts first, or ends last if they
4397 start at the same value. Swap them if it isn't. */
4398 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4399 low0, 0, low1, 0))
4400 || (lowequal
4401 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4402 high1, 1, high0, 1))))
4403 {
4404 temp = in0_p, in0_p = in1_p, in1_p = temp;
4405 tem = low0, low0 = low1, low1 = tem;
4406 tem = high0, high0 = high1, high1 = tem;
4407 }
4408
4409 /* Now flag two cases, whether the ranges are disjoint or whether the
4410 second range is totally subsumed in the first. Note that the tests
4411 below are simplified by the ones above. */
4412 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4413 high0, 1, low1, 0));
4414 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4415 high1, 1, high0, 1));
4416
4417 /* We now have four cases, depending on whether we are including or
4418 excluding the two ranges. */
4419 if (in0_p && in1_p)
4420 {
4421 /* If they don't overlap, the result is false. If the second range
4422 is a subset it is the result. Otherwise, the range is from the start
4423 of the second to the end of the first. */
4424 if (no_overlap)
4425 in_p = 0, low = high = 0;
4426 else if (subset)
4427 in_p = 1, low = low1, high = high1;
4428 else
4429 in_p = 1, low = low1, high = high0;
4430 }
4431
4432 else if (in0_p && ! in1_p)
4433 {
4434 /* If they don't overlap, the result is the first range. If they are
4435 equal, the result is false. If the second range is a subset of the
4436 first, and the ranges begin at the same place, we go from just after
4437 the end of the second range to the end of the first. If the second
4438 range is not a subset of the first, or if it is a subset and both
4439 ranges end at the same place, the range starts at the start of the
4440 first range and ends just before the second range.
4441 Otherwise, we can't describe this as a single range. */
4442 if (no_overlap)
4443 in_p = 1, low = low0, high = high0;
4444 else if (lowequal && highequal)
4445 in_p = 0, low = high = 0;
4446 else if (subset && lowequal)
4447 {
4448 low = range_successor (high1);
4449 high = high0;
4450 in_p = 1;
4451 if (low == 0)
4452 {
4453 /* We are in the weird situation where high0 > high1 but
4454 high1 has no successor. Punt. */
4455 return 0;
4456 }
4457 }
4458 else if (! subset || highequal)
4459 {
4460 low = low0;
4461 high = range_predecessor (low1);
4462 in_p = 1;
4463 if (high == 0)
4464 {
4465 /* low0 < low1 but low1 has no predecessor. Punt. */
4466 return 0;
4467 }
4468 }
4469 else
4470 return 0;
4471 }
4472
4473 else if (! in0_p && in1_p)
4474 {
4475 /* If they don't overlap, the result is the second range. If the second
4476 is a subset of the first, the result is false. Otherwise,
4477 the range starts just after the first range and ends at the
4478 end of the second. */
4479 if (no_overlap)
4480 in_p = 1, low = low1, high = high1;
4481 else if (subset || highequal)
4482 in_p = 0, low = high = 0;
4483 else
4484 {
4485 low = range_successor (high0);
4486 high = high1;
4487 in_p = 1;
4488 if (low == 0)
4489 {
4490 /* high1 > high0 but high0 has no successor. Punt. */
4491 return 0;
4492 }
4493 }
4494 }
4495
4496 else
4497 {
4498 /* The case where we are excluding both ranges. Here the complex case
4499 is if they don't overlap. In that case, the only time we have a
4500 range is if they are adjacent. If the second is a subset of the
4501 first, the result is the first. Otherwise, the range to exclude
4502 starts at the beginning of the first range and ends at the end of the
4503 second. */
4504 if (no_overlap)
4505 {
4506 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4507 range_successor (high0),
4508 1, low1, 0)))
4509 in_p = 0, low = low0, high = high1;
4510 else
4511 {
4512 /* Canonicalize - [min, x] into - [-, x]. */
4513 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4514 switch (TREE_CODE (TREE_TYPE (low0)))
4515 {
4516 case ENUMERAL_TYPE:
4517 if (TYPE_PRECISION (TREE_TYPE (low0))
4518 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4519 break;
4520 /* FALLTHROUGH */
4521 case INTEGER_TYPE:
4522 if (tree_int_cst_equal (low0,
4523 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4524 low0 = 0;
4525 break;
4526 case POINTER_TYPE:
4527 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4528 && integer_zerop (low0))
4529 low0 = 0;
4530 break;
4531 default:
4532 break;
4533 }
4534
4535 /* Canonicalize - [x, max] into - [x, -]. */
4536 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4537 switch (TREE_CODE (TREE_TYPE (high1)))
4538 {
4539 case ENUMERAL_TYPE:
4540 if (TYPE_PRECISION (TREE_TYPE (high1))
4541 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4542 break;
4543 /* FALLTHROUGH */
4544 case INTEGER_TYPE:
4545 if (tree_int_cst_equal (high1,
4546 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4547 high1 = 0;
4548 break;
4549 case POINTER_TYPE:
4550 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4551 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4552 high1, 1,
4553 build_int_cst (TREE_TYPE (high1), 1),
4554 1)))
4555 high1 = 0;
4556 break;
4557 default:
4558 break;
4559 }
4560
4561 /* The ranges might be also adjacent between the maximum and
4562 minimum values of the given type. For
4563 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4564 return + [x + 1, y - 1]. */
4565 if (low0 == 0 && high1 == 0)
4566 {
4567 low = range_successor (high0);
4568 high = range_predecessor (low1);
4569 if (low == 0 || high == 0)
4570 return 0;
4571
4572 in_p = 1;
4573 }
4574 else
4575 return 0;
4576 }
4577 }
4578 else if (subset)
4579 in_p = 0, low = low0, high = high0;
4580 else
4581 in_p = 0, low = low0, high = high1;
4582 }
4583
4584 *pin_p = in_p, *plow = low, *phigh = high;
4585 return 1;
4586 }
4587 \f
4588
4589 /* Subroutine of fold, looking inside expressions of the form
4590 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4591 of the COND_EXPR. This function is being used also to optimize
4592 A op B ? C : A, by reversing the comparison first.
4593
4594 Return a folded expression whose code is not a COND_EXPR
4595 anymore, or NULL_TREE if no folding opportunity is found. */
4596
4597 static tree
4598 fold_cond_expr_with_comparison (location_t loc, tree type,
4599 tree arg0, tree arg1, tree arg2)
4600 {
4601 enum tree_code comp_code = TREE_CODE (arg0);
4602 tree arg00 = TREE_OPERAND (arg0, 0);
4603 tree arg01 = TREE_OPERAND (arg0, 1);
4604 tree arg1_type = TREE_TYPE (arg1);
4605 tree tem;
4606
4607 STRIP_NOPS (arg1);
4608 STRIP_NOPS (arg2);
4609
4610 /* If we have A op 0 ? A : -A, consider applying the following
4611 transformations:
4612
4613 A == 0? A : -A same as -A
4614 A != 0? A : -A same as A
4615 A >= 0? A : -A same as abs (A)
4616 A > 0? A : -A same as abs (A)
4617 A <= 0? A : -A same as -abs (A)
4618 A < 0? A : -A same as -abs (A)
4619
4620 None of these transformations work for modes with signed
4621 zeros. If A is +/-0, the first two transformations will
4622 change the sign of the result (from +0 to -0, or vice
4623 versa). The last four will fix the sign of the result,
4624 even though the original expressions could be positive or
4625 negative, depending on the sign of A.
4626
4627 Note that all these transformations are correct if A is
4628 NaN, since the two alternatives (A and -A) are also NaNs. */
4629 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4630 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4631 ? real_zerop (arg01)
4632 : integer_zerop (arg01))
4633 && ((TREE_CODE (arg2) == NEGATE_EXPR
4634 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4635 /* In the case that A is of the form X-Y, '-A' (arg2) may
4636 have already been folded to Y-X, check for that. */
4637 || (TREE_CODE (arg1) == MINUS_EXPR
4638 && TREE_CODE (arg2) == MINUS_EXPR
4639 && operand_equal_p (TREE_OPERAND (arg1, 0),
4640 TREE_OPERAND (arg2, 1), 0)
4641 && operand_equal_p (TREE_OPERAND (arg1, 1),
4642 TREE_OPERAND (arg2, 0), 0))))
4643 switch (comp_code)
4644 {
4645 case EQ_EXPR:
4646 case UNEQ_EXPR:
4647 tem = fold_convert_loc (loc, arg1_type, arg1);
4648 return pedantic_non_lvalue_loc (loc,
4649 fold_convert_loc (loc, type,
4650 negate_expr (tem)));
4651 case NE_EXPR:
4652 case LTGT_EXPR:
4653 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4654 case UNGE_EXPR:
4655 case UNGT_EXPR:
4656 if (flag_trapping_math)
4657 break;
4658 /* Fall through. */
4659 case GE_EXPR:
4660 case GT_EXPR:
4661 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4662 arg1 = fold_convert_loc (loc, signed_type_for
4663 (TREE_TYPE (arg1)), arg1);
4664 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4665 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4666 case UNLE_EXPR:
4667 case UNLT_EXPR:
4668 if (flag_trapping_math)
4669 break;
4670 case LE_EXPR:
4671 case LT_EXPR:
4672 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4673 arg1 = fold_convert_loc (loc, signed_type_for
4674 (TREE_TYPE (arg1)), arg1);
4675 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4676 return negate_expr (fold_convert_loc (loc, type, tem));
4677 default:
4678 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4679 break;
4680 }
4681
4682 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4683 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4684 both transformations are correct when A is NaN: A != 0
4685 is then true, and A == 0 is false. */
4686
4687 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4688 && integer_zerop (arg01) && integer_zerop (arg2))
4689 {
4690 if (comp_code == NE_EXPR)
4691 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4692 else if (comp_code == EQ_EXPR)
4693 return build_zero_cst (type);
4694 }
4695
4696 /* Try some transformations of A op B ? A : B.
4697
4698 A == B? A : B same as B
4699 A != B? A : B same as A
4700 A >= B? A : B same as max (A, B)
4701 A > B? A : B same as max (B, A)
4702 A <= B? A : B same as min (A, B)
4703 A < B? A : B same as min (B, A)
4704
4705 As above, these transformations don't work in the presence
4706 of signed zeros. For example, if A and B are zeros of
4707 opposite sign, the first two transformations will change
4708 the sign of the result. In the last four, the original
4709 expressions give different results for (A=+0, B=-0) and
4710 (A=-0, B=+0), but the transformed expressions do not.
4711
4712 The first two transformations are correct if either A or B
4713 is a NaN. In the first transformation, the condition will
4714 be false, and B will indeed be chosen. In the case of the
4715 second transformation, the condition A != B will be true,
4716 and A will be chosen.
4717
4718 The conversions to max() and min() are not correct if B is
4719 a number and A is not. The conditions in the original
4720 expressions will be false, so all four give B. The min()
4721 and max() versions would give a NaN instead. */
4722 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4723 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4724 /* Avoid these transformations if the COND_EXPR may be used
4725 as an lvalue in the C++ front-end. PR c++/19199. */
4726 && (in_gimple_form
4727 || VECTOR_TYPE_P (type)
4728 || (strcmp (lang_hooks.name, "GNU C++") != 0
4729 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4730 || ! maybe_lvalue_p (arg1)
4731 || ! maybe_lvalue_p (arg2)))
4732 {
4733 tree comp_op0 = arg00;
4734 tree comp_op1 = arg01;
4735 tree comp_type = TREE_TYPE (comp_op0);
4736
4737 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4738 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4739 {
4740 comp_type = type;
4741 comp_op0 = arg1;
4742 comp_op1 = arg2;
4743 }
4744
4745 switch (comp_code)
4746 {
4747 case EQ_EXPR:
4748 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4749 case NE_EXPR:
4750 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4751 case LE_EXPR:
4752 case LT_EXPR:
4753 case UNLE_EXPR:
4754 case UNLT_EXPR:
4755 /* In C++ a ?: expression can be an lvalue, so put the
4756 operand which will be used if they are equal first
4757 so that we can convert this back to the
4758 corresponding COND_EXPR. */
4759 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4760 {
4761 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4762 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4763 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4764 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4765 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4766 comp_op1, comp_op0);
4767 return pedantic_non_lvalue_loc (loc,
4768 fold_convert_loc (loc, type, tem));
4769 }
4770 break;
4771 case GE_EXPR:
4772 case GT_EXPR:
4773 case UNGE_EXPR:
4774 case UNGT_EXPR:
4775 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4776 {
4777 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4778 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4779 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4780 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4781 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4782 comp_op1, comp_op0);
4783 return pedantic_non_lvalue_loc (loc,
4784 fold_convert_loc (loc, type, tem));
4785 }
4786 break;
4787 case UNEQ_EXPR:
4788 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4789 return pedantic_non_lvalue_loc (loc,
4790 fold_convert_loc (loc, type, arg2));
4791 break;
4792 case LTGT_EXPR:
4793 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4794 return pedantic_non_lvalue_loc (loc,
4795 fold_convert_loc (loc, type, arg1));
4796 break;
4797 default:
4798 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4799 break;
4800 }
4801 }
4802
4803 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4804 we might still be able to simplify this. For example,
4805 if C1 is one less or one more than C2, this might have started
4806 out as a MIN or MAX and been transformed by this function.
4807 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4808
4809 if (INTEGRAL_TYPE_P (type)
4810 && TREE_CODE (arg01) == INTEGER_CST
4811 && TREE_CODE (arg2) == INTEGER_CST)
4812 switch (comp_code)
4813 {
4814 case EQ_EXPR:
4815 if (TREE_CODE (arg1) == INTEGER_CST)
4816 break;
4817 /* We can replace A with C1 in this case. */
4818 arg1 = fold_convert_loc (loc, type, arg01);
4819 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4820
4821 case LT_EXPR:
4822 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4823 MIN_EXPR, to preserve the signedness of the comparison. */
4824 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4825 OEP_ONLY_CONST)
4826 && operand_equal_p (arg01,
4827 const_binop (PLUS_EXPR, arg2,
4828 build_int_cst (type, 1)),
4829 OEP_ONLY_CONST))
4830 {
4831 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4832 fold_convert_loc (loc, TREE_TYPE (arg00),
4833 arg2));
4834 return pedantic_non_lvalue_loc (loc,
4835 fold_convert_loc (loc, type, tem));
4836 }
4837 break;
4838
4839 case LE_EXPR:
4840 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4841 as above. */
4842 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4843 OEP_ONLY_CONST)
4844 && operand_equal_p (arg01,
4845 const_binop (MINUS_EXPR, arg2,
4846 build_int_cst (type, 1)),
4847 OEP_ONLY_CONST))
4848 {
4849 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4850 fold_convert_loc (loc, TREE_TYPE (arg00),
4851 arg2));
4852 return pedantic_non_lvalue_loc (loc,
4853 fold_convert_loc (loc, type, tem));
4854 }
4855 break;
4856
4857 case GT_EXPR:
4858 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4859 MAX_EXPR, to preserve the signedness of the comparison. */
4860 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4861 OEP_ONLY_CONST)
4862 && operand_equal_p (arg01,
4863 const_binop (MINUS_EXPR, arg2,
4864 build_int_cst (type, 1)),
4865 OEP_ONLY_CONST))
4866 {
4867 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4868 fold_convert_loc (loc, TREE_TYPE (arg00),
4869 arg2));
4870 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4871 }
4872 break;
4873
4874 case GE_EXPR:
4875 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4876 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4877 OEP_ONLY_CONST)
4878 && operand_equal_p (arg01,
4879 const_binop (PLUS_EXPR, arg2,
4880 build_int_cst (type, 1)),
4881 OEP_ONLY_CONST))
4882 {
4883 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4884 fold_convert_loc (loc, TREE_TYPE (arg00),
4885 arg2));
4886 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4887 }
4888 break;
4889 case NE_EXPR:
4890 break;
4891 default:
4892 gcc_unreachable ();
4893 }
4894
4895 return NULL_TREE;
4896 }
4897
4898
4899 \f
4900 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4901 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4902 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4903 false) >= 2)
4904 #endif
4905
4906 /* EXP is some logical combination of boolean tests. See if we can
4907 merge it into some range test. Return the new tree if so. */
4908
4909 static tree
4910 fold_range_test (location_t loc, enum tree_code code, tree type,
4911 tree op0, tree op1)
4912 {
4913 int or_op = (code == TRUTH_ORIF_EXPR
4914 || code == TRUTH_OR_EXPR);
4915 int in0_p, in1_p, in_p;
4916 tree low0, low1, low, high0, high1, high;
4917 bool strict_overflow_p = false;
4918 tree tem, lhs, rhs;
4919 const char * const warnmsg = G_("assuming signed overflow does not occur "
4920 "when simplifying range test");
4921
4922 if (!INTEGRAL_TYPE_P (type))
4923 return 0;
4924
4925 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4926 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4927
4928 /* If this is an OR operation, invert both sides; we will invert
4929 again at the end. */
4930 if (or_op)
4931 in0_p = ! in0_p, in1_p = ! in1_p;
4932
4933 /* If both expressions are the same, if we can merge the ranges, and we
4934 can build the range test, return it or it inverted. If one of the
4935 ranges is always true or always false, consider it to be the same
4936 expression as the other. */
4937 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4938 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4939 in1_p, low1, high1)
4940 && 0 != (tem = (build_range_check (loc, type,
4941 lhs != 0 ? lhs
4942 : rhs != 0 ? rhs : integer_zero_node,
4943 in_p, low, high))))
4944 {
4945 if (strict_overflow_p)
4946 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4947 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4948 }
4949
4950 /* On machines where the branch cost is expensive, if this is a
4951 short-circuited branch and the underlying object on both sides
4952 is the same, make a non-short-circuit operation. */
4953 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4954 && lhs != 0 && rhs != 0
4955 && (code == TRUTH_ANDIF_EXPR
4956 || code == TRUTH_ORIF_EXPR)
4957 && operand_equal_p (lhs, rhs, 0))
4958 {
4959 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4960 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4961 which cases we can't do this. */
4962 if (simple_operand_p (lhs))
4963 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4964 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4965 type, op0, op1);
4966
4967 else if (!lang_hooks.decls.global_bindings_p ()
4968 && !CONTAINS_PLACEHOLDER_P (lhs))
4969 {
4970 tree common = save_expr (lhs);
4971
4972 if (0 != (lhs = build_range_check (loc, type, common,
4973 or_op ? ! in0_p : in0_p,
4974 low0, high0))
4975 && (0 != (rhs = build_range_check (loc, type, common,
4976 or_op ? ! in1_p : in1_p,
4977 low1, high1))))
4978 {
4979 if (strict_overflow_p)
4980 fold_overflow_warning (warnmsg,
4981 WARN_STRICT_OVERFLOW_COMPARISON);
4982 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4983 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4984 type, lhs, rhs);
4985 }
4986 }
4987 }
4988
4989 return 0;
4990 }
4991 \f
4992 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4993 bit value. Arrange things so the extra bits will be set to zero if and
4994 only if C is signed-extended to its full width. If MASK is nonzero,
4995 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4996
4997 static tree
4998 unextend (tree c, int p, int unsignedp, tree mask)
4999 {
5000 tree type = TREE_TYPE (c);
5001 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5002 tree temp;
5003
5004 if (p == modesize || unsignedp)
5005 return c;
5006
5007 /* We work by getting just the sign bit into the low-order bit, then
5008 into the high-order bit, then sign-extend. We then XOR that value
5009 with C. */
5010 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5011
5012 /* We must use a signed type in order to get an arithmetic right shift.
5013 However, we must also avoid introducing accidental overflows, so that
5014 a subsequent call to integer_zerop will work. Hence we must
5015 do the type conversion here. At this point, the constant is either
5016 zero or one, and the conversion to a signed type can never overflow.
5017 We could get an overflow if this conversion is done anywhere else. */
5018 if (TYPE_UNSIGNED (type))
5019 temp = fold_convert (signed_type_for (type), temp);
5020
5021 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5022 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5023 if (mask != 0)
5024 temp = const_binop (BIT_AND_EXPR, temp,
5025 fold_convert (TREE_TYPE (c), mask));
5026 /* If necessary, convert the type back to match the type of C. */
5027 if (TYPE_UNSIGNED (type))
5028 temp = fold_convert (type, temp);
5029
5030 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5031 }
5032 \f
5033 /* For an expression that has the form
5034 (A && B) || ~B
5035 or
5036 (A || B) && ~B,
5037 we can drop one of the inner expressions and simplify to
5038 A || ~B
5039 or
5040 A && ~B
5041 LOC is the location of the resulting expression. OP is the inner
5042 logical operation; the left-hand side in the examples above, while CMPOP
5043 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5044 removing a condition that guards another, as in
5045 (A != NULL && A->...) || A == NULL
5046 which we must not transform. If RHS_ONLY is true, only eliminate the
5047 right-most operand of the inner logical operation. */
5048
5049 static tree
5050 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5051 bool rhs_only)
5052 {
5053 tree type = TREE_TYPE (cmpop);
5054 enum tree_code code = TREE_CODE (cmpop);
5055 enum tree_code truthop_code = TREE_CODE (op);
5056 tree lhs = TREE_OPERAND (op, 0);
5057 tree rhs = TREE_OPERAND (op, 1);
5058 tree orig_lhs = lhs, orig_rhs = rhs;
5059 enum tree_code rhs_code = TREE_CODE (rhs);
5060 enum tree_code lhs_code = TREE_CODE (lhs);
5061 enum tree_code inv_code;
5062
5063 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5064 return NULL_TREE;
5065
5066 if (TREE_CODE_CLASS (code) != tcc_comparison)
5067 return NULL_TREE;
5068
5069 if (rhs_code == truthop_code)
5070 {
5071 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5072 if (newrhs != NULL_TREE)
5073 {
5074 rhs = newrhs;
5075 rhs_code = TREE_CODE (rhs);
5076 }
5077 }
5078 if (lhs_code == truthop_code && !rhs_only)
5079 {
5080 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5081 if (newlhs != NULL_TREE)
5082 {
5083 lhs = newlhs;
5084 lhs_code = TREE_CODE (lhs);
5085 }
5086 }
5087
5088 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5089 if (inv_code == rhs_code
5090 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5091 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5092 return lhs;
5093 if (!rhs_only && inv_code == lhs_code
5094 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5095 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5096 return rhs;
5097 if (rhs != orig_rhs || lhs != orig_lhs)
5098 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5099 lhs, rhs);
5100 return NULL_TREE;
5101 }
5102
5103 /* Find ways of folding logical expressions of LHS and RHS:
5104 Try to merge two comparisons to the same innermost item.
5105 Look for range tests like "ch >= '0' && ch <= '9'".
5106 Look for combinations of simple terms on machines with expensive branches
5107 and evaluate the RHS unconditionally.
5108
5109 For example, if we have p->a == 2 && p->b == 4 and we can make an
5110 object large enough to span both A and B, we can do this with a comparison
5111 against the object ANDed with the a mask.
5112
5113 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5114 operations to do this with one comparison.
5115
5116 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5117 function and the one above.
5118
5119 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5120 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5121
5122 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5123 two operands.
5124
5125 We return the simplified tree or 0 if no optimization is possible. */
5126
5127 static tree
5128 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5129 tree lhs, tree rhs)
5130 {
5131 /* If this is the "or" of two comparisons, we can do something if
5132 the comparisons are NE_EXPR. If this is the "and", we can do something
5133 if the comparisons are EQ_EXPR. I.e.,
5134 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5135
5136 WANTED_CODE is this operation code. For single bit fields, we can
5137 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5138 comparison for one-bit fields. */
5139
5140 enum tree_code wanted_code;
5141 enum tree_code lcode, rcode;
5142 tree ll_arg, lr_arg, rl_arg, rr_arg;
5143 tree ll_inner, lr_inner, rl_inner, rr_inner;
5144 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5145 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5146 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5147 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5148 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5149 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5150 enum machine_mode lnmode, rnmode;
5151 tree ll_mask, lr_mask, rl_mask, rr_mask;
5152 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5153 tree l_const, r_const;
5154 tree lntype, rntype, result;
5155 HOST_WIDE_INT first_bit, end_bit;
5156 int volatilep;
5157
5158 /* Start by getting the comparison codes. Fail if anything is volatile.
5159 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5160 it were surrounded with a NE_EXPR. */
5161
5162 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5163 return 0;
5164
5165 lcode = TREE_CODE (lhs);
5166 rcode = TREE_CODE (rhs);
5167
5168 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5169 {
5170 lhs = build2 (NE_EXPR, truth_type, lhs,
5171 build_int_cst (TREE_TYPE (lhs), 0));
5172 lcode = NE_EXPR;
5173 }
5174
5175 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5176 {
5177 rhs = build2 (NE_EXPR, truth_type, rhs,
5178 build_int_cst (TREE_TYPE (rhs), 0));
5179 rcode = NE_EXPR;
5180 }
5181
5182 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5183 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5184 return 0;
5185
5186 ll_arg = TREE_OPERAND (lhs, 0);
5187 lr_arg = TREE_OPERAND (lhs, 1);
5188 rl_arg = TREE_OPERAND (rhs, 0);
5189 rr_arg = TREE_OPERAND (rhs, 1);
5190
5191 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5192 if (simple_operand_p (ll_arg)
5193 && simple_operand_p (lr_arg))
5194 {
5195 if (operand_equal_p (ll_arg, rl_arg, 0)
5196 && operand_equal_p (lr_arg, rr_arg, 0))
5197 {
5198 result = combine_comparisons (loc, code, lcode, rcode,
5199 truth_type, ll_arg, lr_arg);
5200 if (result)
5201 return result;
5202 }
5203 else if (operand_equal_p (ll_arg, rr_arg, 0)
5204 && operand_equal_p (lr_arg, rl_arg, 0))
5205 {
5206 result = combine_comparisons (loc, code, lcode,
5207 swap_tree_comparison (rcode),
5208 truth_type, ll_arg, lr_arg);
5209 if (result)
5210 return result;
5211 }
5212 }
5213
5214 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5215 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5216
5217 /* If the RHS can be evaluated unconditionally and its operands are
5218 simple, it wins to evaluate the RHS unconditionally on machines
5219 with expensive branches. In this case, this isn't a comparison
5220 that can be merged. */
5221
5222 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5223 false) >= 2
5224 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5225 && simple_operand_p (rl_arg)
5226 && simple_operand_p (rr_arg))
5227 {
5228 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5229 if (code == TRUTH_OR_EXPR
5230 && lcode == NE_EXPR && integer_zerop (lr_arg)
5231 && rcode == NE_EXPR && integer_zerop (rr_arg)
5232 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5233 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5234 return build2_loc (loc, NE_EXPR, truth_type,
5235 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5236 ll_arg, rl_arg),
5237 build_int_cst (TREE_TYPE (ll_arg), 0));
5238
5239 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5240 if (code == TRUTH_AND_EXPR
5241 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5242 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5243 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5244 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5245 return build2_loc (loc, EQ_EXPR, truth_type,
5246 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5247 ll_arg, rl_arg),
5248 build_int_cst (TREE_TYPE (ll_arg), 0));
5249 }
5250
5251 /* See if the comparisons can be merged. Then get all the parameters for
5252 each side. */
5253
5254 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5255 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5256 return 0;
5257
5258 volatilep = 0;
5259 ll_inner = decode_field_reference (loc, ll_arg,
5260 &ll_bitsize, &ll_bitpos, &ll_mode,
5261 &ll_unsignedp, &volatilep, &ll_mask,
5262 &ll_and_mask);
5263 lr_inner = decode_field_reference (loc, lr_arg,
5264 &lr_bitsize, &lr_bitpos, &lr_mode,
5265 &lr_unsignedp, &volatilep, &lr_mask,
5266 &lr_and_mask);
5267 rl_inner = decode_field_reference (loc, rl_arg,
5268 &rl_bitsize, &rl_bitpos, &rl_mode,
5269 &rl_unsignedp, &volatilep, &rl_mask,
5270 &rl_and_mask);
5271 rr_inner = decode_field_reference (loc, rr_arg,
5272 &rr_bitsize, &rr_bitpos, &rr_mode,
5273 &rr_unsignedp, &volatilep, &rr_mask,
5274 &rr_and_mask);
5275
5276 /* It must be true that the inner operation on the lhs of each
5277 comparison must be the same if we are to be able to do anything.
5278 Then see if we have constants. If not, the same must be true for
5279 the rhs's. */
5280 if (volatilep || ll_inner == 0 || rl_inner == 0
5281 || ! operand_equal_p (ll_inner, rl_inner, 0))
5282 return 0;
5283
5284 if (TREE_CODE (lr_arg) == INTEGER_CST
5285 && TREE_CODE (rr_arg) == INTEGER_CST)
5286 l_const = lr_arg, r_const = rr_arg;
5287 else if (lr_inner == 0 || rr_inner == 0
5288 || ! operand_equal_p (lr_inner, rr_inner, 0))
5289 return 0;
5290 else
5291 l_const = r_const = 0;
5292
5293 /* If either comparison code is not correct for our logical operation,
5294 fail. However, we can convert a one-bit comparison against zero into
5295 the opposite comparison against that bit being set in the field. */
5296
5297 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5298 if (lcode != wanted_code)
5299 {
5300 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5301 {
5302 /* Make the left operand unsigned, since we are only interested
5303 in the value of one bit. Otherwise we are doing the wrong
5304 thing below. */
5305 ll_unsignedp = 1;
5306 l_const = ll_mask;
5307 }
5308 else
5309 return 0;
5310 }
5311
5312 /* This is analogous to the code for l_const above. */
5313 if (rcode != wanted_code)
5314 {
5315 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5316 {
5317 rl_unsignedp = 1;
5318 r_const = rl_mask;
5319 }
5320 else
5321 return 0;
5322 }
5323
5324 /* See if we can find a mode that contains both fields being compared on
5325 the left. If we can't, fail. Otherwise, update all constants and masks
5326 to be relative to a field of that size. */
5327 first_bit = MIN (ll_bitpos, rl_bitpos);
5328 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5329 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5330 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5331 volatilep);
5332 if (lnmode == VOIDmode)
5333 return 0;
5334
5335 lnbitsize = GET_MODE_BITSIZE (lnmode);
5336 lnbitpos = first_bit & ~ (lnbitsize - 1);
5337 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5338 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5339
5340 if (BYTES_BIG_ENDIAN)
5341 {
5342 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5343 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5344 }
5345
5346 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5347 size_int (xll_bitpos));
5348 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5349 size_int (xrl_bitpos));
5350
5351 if (l_const)
5352 {
5353 l_const = fold_convert_loc (loc, lntype, l_const);
5354 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5355 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5356 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5357 fold_build1_loc (loc, BIT_NOT_EXPR,
5358 lntype, ll_mask))))
5359 {
5360 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5361
5362 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5363 }
5364 }
5365 if (r_const)
5366 {
5367 r_const = fold_convert_loc (loc, lntype, r_const);
5368 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5369 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5370 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5371 fold_build1_loc (loc, BIT_NOT_EXPR,
5372 lntype, rl_mask))))
5373 {
5374 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5375
5376 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5377 }
5378 }
5379
5380 /* If the right sides are not constant, do the same for it. Also,
5381 disallow this optimization if a size or signedness mismatch occurs
5382 between the left and right sides. */
5383 if (l_const == 0)
5384 {
5385 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5386 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5387 /* Make sure the two fields on the right
5388 correspond to the left without being swapped. */
5389 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5390 return 0;
5391
5392 first_bit = MIN (lr_bitpos, rr_bitpos);
5393 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5394 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5395 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5396 volatilep);
5397 if (rnmode == VOIDmode)
5398 return 0;
5399
5400 rnbitsize = GET_MODE_BITSIZE (rnmode);
5401 rnbitpos = first_bit & ~ (rnbitsize - 1);
5402 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5403 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5404
5405 if (BYTES_BIG_ENDIAN)
5406 {
5407 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5408 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5409 }
5410
5411 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5412 rntype, lr_mask),
5413 size_int (xlr_bitpos));
5414 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5415 rntype, rr_mask),
5416 size_int (xrr_bitpos));
5417
5418 /* Make a mask that corresponds to both fields being compared.
5419 Do this for both items being compared. If the operands are the
5420 same size and the bits being compared are in the same position
5421 then we can do this by masking both and comparing the masked
5422 results. */
5423 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5424 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5425 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5426 {
5427 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5428 ll_unsignedp || rl_unsignedp);
5429 if (! all_ones_mask_p (ll_mask, lnbitsize))
5430 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5431
5432 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5433 lr_unsignedp || rr_unsignedp);
5434 if (! all_ones_mask_p (lr_mask, rnbitsize))
5435 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5436
5437 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5438 }
5439
5440 /* There is still another way we can do something: If both pairs of
5441 fields being compared are adjacent, we may be able to make a wider
5442 field containing them both.
5443
5444 Note that we still must mask the lhs/rhs expressions. Furthermore,
5445 the mask must be shifted to account for the shift done by
5446 make_bit_field_ref. */
5447 if ((ll_bitsize + ll_bitpos == rl_bitpos
5448 && lr_bitsize + lr_bitpos == rr_bitpos)
5449 || (ll_bitpos == rl_bitpos + rl_bitsize
5450 && lr_bitpos == rr_bitpos + rr_bitsize))
5451 {
5452 tree type;
5453
5454 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5455 ll_bitsize + rl_bitsize,
5456 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5457 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5458 lr_bitsize + rr_bitsize,
5459 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5460
5461 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5462 size_int (MIN (xll_bitpos, xrl_bitpos)));
5463 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5464 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5465
5466 /* Convert to the smaller type before masking out unwanted bits. */
5467 type = lntype;
5468 if (lntype != rntype)
5469 {
5470 if (lnbitsize > rnbitsize)
5471 {
5472 lhs = fold_convert_loc (loc, rntype, lhs);
5473 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5474 type = rntype;
5475 }
5476 else if (lnbitsize < rnbitsize)
5477 {
5478 rhs = fold_convert_loc (loc, lntype, rhs);
5479 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5480 type = lntype;
5481 }
5482 }
5483
5484 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5485 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5486
5487 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5488 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5489
5490 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5491 }
5492
5493 return 0;
5494 }
5495
5496 /* Handle the case of comparisons with constants. If there is something in
5497 common between the masks, those bits of the constants must be the same.
5498 If not, the condition is always false. Test for this to avoid generating
5499 incorrect code below. */
5500 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5501 if (! integer_zerop (result)
5502 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5503 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5504 {
5505 if (wanted_code == NE_EXPR)
5506 {
5507 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5508 return constant_boolean_node (true, truth_type);
5509 }
5510 else
5511 {
5512 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5513 return constant_boolean_node (false, truth_type);
5514 }
5515 }
5516
5517 /* Construct the expression we will return. First get the component
5518 reference we will make. Unless the mask is all ones the width of
5519 that field, perform the mask operation. Then compare with the
5520 merged constant. */
5521 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5522 ll_unsignedp || rl_unsignedp);
5523
5524 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5525 if (! all_ones_mask_p (ll_mask, lnbitsize))
5526 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5527
5528 return build2_loc (loc, wanted_code, truth_type, result,
5529 const_binop (BIT_IOR_EXPR, l_const, r_const));
5530 }
5531 \f
5532 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5533 constant. */
5534
5535 static tree
5536 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5537 tree op0, tree op1)
5538 {
5539 tree arg0 = op0;
5540 enum tree_code op_code;
5541 tree comp_const;
5542 tree minmax_const;
5543 int consts_equal, consts_lt;
5544 tree inner;
5545
5546 STRIP_SIGN_NOPS (arg0);
5547
5548 op_code = TREE_CODE (arg0);
5549 minmax_const = TREE_OPERAND (arg0, 1);
5550 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5551 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5552 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5553 inner = TREE_OPERAND (arg0, 0);
5554
5555 /* If something does not permit us to optimize, return the original tree. */
5556 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5557 || TREE_CODE (comp_const) != INTEGER_CST
5558 || TREE_OVERFLOW (comp_const)
5559 || TREE_CODE (minmax_const) != INTEGER_CST
5560 || TREE_OVERFLOW (minmax_const))
5561 return NULL_TREE;
5562
5563 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5564 and GT_EXPR, doing the rest with recursive calls using logical
5565 simplifications. */
5566 switch (code)
5567 {
5568 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5569 {
5570 tree tem
5571 = optimize_minmax_comparison (loc,
5572 invert_tree_comparison (code, false),
5573 type, op0, op1);
5574 if (tem)
5575 return invert_truthvalue_loc (loc, tem);
5576 return NULL_TREE;
5577 }
5578
5579 case GE_EXPR:
5580 return
5581 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5582 optimize_minmax_comparison
5583 (loc, EQ_EXPR, type, arg0, comp_const),
5584 optimize_minmax_comparison
5585 (loc, GT_EXPR, type, arg0, comp_const));
5586
5587 case EQ_EXPR:
5588 if (op_code == MAX_EXPR && consts_equal)
5589 /* MAX (X, 0) == 0 -> X <= 0 */
5590 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5591
5592 else if (op_code == MAX_EXPR && consts_lt)
5593 /* MAX (X, 0) == 5 -> X == 5 */
5594 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5595
5596 else if (op_code == MAX_EXPR)
5597 /* MAX (X, 0) == -1 -> false */
5598 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5599
5600 else if (consts_equal)
5601 /* MIN (X, 0) == 0 -> X >= 0 */
5602 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5603
5604 else if (consts_lt)
5605 /* MIN (X, 0) == 5 -> false */
5606 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5607
5608 else
5609 /* MIN (X, 0) == -1 -> X == -1 */
5610 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5611
5612 case GT_EXPR:
5613 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5614 /* MAX (X, 0) > 0 -> X > 0
5615 MAX (X, 0) > 5 -> X > 5 */
5616 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5617
5618 else if (op_code == MAX_EXPR)
5619 /* MAX (X, 0) > -1 -> true */
5620 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5621
5622 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5623 /* MIN (X, 0) > 0 -> false
5624 MIN (X, 0) > 5 -> false */
5625 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5626
5627 else
5628 /* MIN (X, 0) > -1 -> X > -1 */
5629 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5630
5631 default:
5632 return NULL_TREE;
5633 }
5634 }
5635 \f
5636 /* T is an integer expression that is being multiplied, divided, or taken a
5637 modulus (CODE says which and what kind of divide or modulus) by a
5638 constant C. See if we can eliminate that operation by folding it with
5639 other operations already in T. WIDE_TYPE, if non-null, is a type that
5640 should be used for the computation if wider than our type.
5641
5642 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5643 (X * 2) + (Y * 4). We must, however, be assured that either the original
5644 expression would not overflow or that overflow is undefined for the type
5645 in the language in question.
5646
5647 If we return a non-null expression, it is an equivalent form of the
5648 original computation, but need not be in the original type.
5649
5650 We set *STRICT_OVERFLOW_P to true if the return values depends on
5651 signed overflow being undefined. Otherwise we do not change
5652 *STRICT_OVERFLOW_P. */
5653
5654 static tree
5655 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5656 bool *strict_overflow_p)
5657 {
5658 /* To avoid exponential search depth, refuse to allow recursion past
5659 three levels. Beyond that (1) it's highly unlikely that we'll find
5660 something interesting and (2) we've probably processed it before
5661 when we built the inner expression. */
5662
5663 static int depth;
5664 tree ret;
5665
5666 if (depth > 3)
5667 return NULL;
5668
5669 depth++;
5670 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5671 depth--;
5672
5673 return ret;
5674 }
5675
5676 static tree
5677 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5678 bool *strict_overflow_p)
5679 {
5680 tree type = TREE_TYPE (t);
5681 enum tree_code tcode = TREE_CODE (t);
5682 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5683 > GET_MODE_SIZE (TYPE_MODE (type)))
5684 ? wide_type : type);
5685 tree t1, t2;
5686 int same_p = tcode == code;
5687 tree op0 = NULL_TREE, op1 = NULL_TREE;
5688 bool sub_strict_overflow_p;
5689
5690 /* Don't deal with constants of zero here; they confuse the code below. */
5691 if (integer_zerop (c))
5692 return NULL_TREE;
5693
5694 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5695 op0 = TREE_OPERAND (t, 0);
5696
5697 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5698 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5699
5700 /* Note that we need not handle conditional operations here since fold
5701 already handles those cases. So just do arithmetic here. */
5702 switch (tcode)
5703 {
5704 case INTEGER_CST:
5705 /* For a constant, we can always simplify if we are a multiply
5706 or (for divide and modulus) if it is a multiple of our constant. */
5707 if (code == MULT_EXPR
5708 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5709 return const_binop (code, fold_convert (ctype, t),
5710 fold_convert (ctype, c));
5711 break;
5712
5713 CASE_CONVERT: case NON_LVALUE_EXPR:
5714 /* If op0 is an expression ... */
5715 if ((COMPARISON_CLASS_P (op0)
5716 || UNARY_CLASS_P (op0)
5717 || BINARY_CLASS_P (op0)
5718 || VL_EXP_CLASS_P (op0)
5719 || EXPRESSION_CLASS_P (op0))
5720 /* ... and has wrapping overflow, and its type is smaller
5721 than ctype, then we cannot pass through as widening. */
5722 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5723 && (TYPE_PRECISION (ctype)
5724 > TYPE_PRECISION (TREE_TYPE (op0))))
5725 /* ... or this is a truncation (t is narrower than op0),
5726 then we cannot pass through this narrowing. */
5727 || (TYPE_PRECISION (type)
5728 < TYPE_PRECISION (TREE_TYPE (op0)))
5729 /* ... or signedness changes for division or modulus,
5730 then we cannot pass through this conversion. */
5731 || (code != MULT_EXPR
5732 && (TYPE_UNSIGNED (ctype)
5733 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5734 /* ... or has undefined overflow while the converted to
5735 type has not, we cannot do the operation in the inner type
5736 as that would introduce undefined overflow. */
5737 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5738 && !TYPE_OVERFLOW_UNDEFINED (type))))
5739 break;
5740
5741 /* Pass the constant down and see if we can make a simplification. If
5742 we can, replace this expression with the inner simplification for
5743 possible later conversion to our or some other type. */
5744 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5745 && TREE_CODE (t2) == INTEGER_CST
5746 && !TREE_OVERFLOW (t2)
5747 && (0 != (t1 = extract_muldiv (op0, t2, code,
5748 code == MULT_EXPR
5749 ? ctype : NULL_TREE,
5750 strict_overflow_p))))
5751 return t1;
5752 break;
5753
5754 case ABS_EXPR:
5755 /* If widening the type changes it from signed to unsigned, then we
5756 must avoid building ABS_EXPR itself as unsigned. */
5757 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5758 {
5759 tree cstype = (*signed_type_for) (ctype);
5760 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5761 != 0)
5762 {
5763 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5764 return fold_convert (ctype, t1);
5765 }
5766 break;
5767 }
5768 /* If the constant is negative, we cannot simplify this. */
5769 if (tree_int_cst_sgn (c) == -1)
5770 break;
5771 /* FALLTHROUGH */
5772 case NEGATE_EXPR:
5773 /* For division and modulus, type can't be unsigned, as e.g.
5774 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5775 For signed types, even with wrapping overflow, this is fine. */
5776 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5777 break;
5778 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5779 != 0)
5780 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5781 break;
5782
5783 case MIN_EXPR: case MAX_EXPR:
5784 /* If widening the type changes the signedness, then we can't perform
5785 this optimization as that changes the result. */
5786 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5787 break;
5788
5789 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5790 sub_strict_overflow_p = false;
5791 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5792 &sub_strict_overflow_p)) != 0
5793 && (t2 = extract_muldiv (op1, c, code, wide_type,
5794 &sub_strict_overflow_p)) != 0)
5795 {
5796 if (tree_int_cst_sgn (c) < 0)
5797 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5798 if (sub_strict_overflow_p)
5799 *strict_overflow_p = true;
5800 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5801 fold_convert (ctype, t2));
5802 }
5803 break;
5804
5805 case LSHIFT_EXPR: case RSHIFT_EXPR:
5806 /* If the second operand is constant, this is a multiplication
5807 or floor division, by a power of two, so we can treat it that
5808 way unless the multiplier or divisor overflows. Signed
5809 left-shift overflow is implementation-defined rather than
5810 undefined in C90, so do not convert signed left shift into
5811 multiplication. */
5812 if (TREE_CODE (op1) == INTEGER_CST
5813 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5814 /* const_binop may not detect overflow correctly,
5815 so check for it explicitly here. */
5816 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5817 && 0 != (t1 = fold_convert (ctype,
5818 const_binop (LSHIFT_EXPR,
5819 size_one_node,
5820 op1)))
5821 && !TREE_OVERFLOW (t1))
5822 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5823 ? MULT_EXPR : FLOOR_DIV_EXPR,
5824 ctype,
5825 fold_convert (ctype, op0),
5826 t1),
5827 c, code, wide_type, strict_overflow_p);
5828 break;
5829
5830 case PLUS_EXPR: case MINUS_EXPR:
5831 /* See if we can eliminate the operation on both sides. If we can, we
5832 can return a new PLUS or MINUS. If we can't, the only remaining
5833 cases where we can do anything are if the second operand is a
5834 constant. */
5835 sub_strict_overflow_p = false;
5836 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5837 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5838 if (t1 != 0 && t2 != 0
5839 && (code == MULT_EXPR
5840 /* If not multiplication, we can only do this if both operands
5841 are divisible by c. */
5842 || (multiple_of_p (ctype, op0, c)
5843 && multiple_of_p (ctype, op1, c))))
5844 {
5845 if (sub_strict_overflow_p)
5846 *strict_overflow_p = true;
5847 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5848 fold_convert (ctype, t2));
5849 }
5850
5851 /* If this was a subtraction, negate OP1 and set it to be an addition.
5852 This simplifies the logic below. */
5853 if (tcode == MINUS_EXPR)
5854 {
5855 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5856 /* If OP1 was not easily negatable, the constant may be OP0. */
5857 if (TREE_CODE (op0) == INTEGER_CST)
5858 {
5859 tree tem = op0;
5860 op0 = op1;
5861 op1 = tem;
5862 tem = t1;
5863 t1 = t2;
5864 t2 = tem;
5865 }
5866 }
5867
5868 if (TREE_CODE (op1) != INTEGER_CST)
5869 break;
5870
5871 /* If either OP1 or C are negative, this optimization is not safe for
5872 some of the division and remainder types while for others we need
5873 to change the code. */
5874 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5875 {
5876 if (code == CEIL_DIV_EXPR)
5877 code = FLOOR_DIV_EXPR;
5878 else if (code == FLOOR_DIV_EXPR)
5879 code = CEIL_DIV_EXPR;
5880 else if (code != MULT_EXPR
5881 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5882 break;
5883 }
5884
5885 /* If it's a multiply or a division/modulus operation of a multiple
5886 of our constant, do the operation and verify it doesn't overflow. */
5887 if (code == MULT_EXPR
5888 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5889 {
5890 op1 = const_binop (code, fold_convert (ctype, op1),
5891 fold_convert (ctype, c));
5892 /* We allow the constant to overflow with wrapping semantics. */
5893 if (op1 == 0
5894 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5895 break;
5896 }
5897 else
5898 break;
5899
5900 /* If we have an unsigned type, we cannot widen the operation since it
5901 will change the result if the original computation overflowed. */
5902 if (TYPE_UNSIGNED (ctype) && ctype != type)
5903 break;
5904
5905 /* If we were able to eliminate our operation from the first side,
5906 apply our operation to the second side and reform the PLUS. */
5907 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5908 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5909
5910 /* The last case is if we are a multiply. In that case, we can
5911 apply the distributive law to commute the multiply and addition
5912 if the multiplication of the constants doesn't overflow
5913 and overflow is defined. With undefined overflow
5914 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5915 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5916 return fold_build2 (tcode, ctype,
5917 fold_build2 (code, ctype,
5918 fold_convert (ctype, op0),
5919 fold_convert (ctype, c)),
5920 op1);
5921
5922 break;
5923
5924 case MULT_EXPR:
5925 /* We have a special case here if we are doing something like
5926 (C * 8) % 4 since we know that's zero. */
5927 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5928 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5929 /* If the multiplication can overflow we cannot optimize this. */
5930 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5931 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5932 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5933 {
5934 *strict_overflow_p = true;
5935 return omit_one_operand (type, integer_zero_node, op0);
5936 }
5937
5938 /* ... fall through ... */
5939
5940 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5941 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5942 /* If we can extract our operation from the LHS, do so and return a
5943 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5944 do something only if the second operand is a constant. */
5945 if (same_p
5946 && (t1 = extract_muldiv (op0, c, code, wide_type,
5947 strict_overflow_p)) != 0)
5948 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5949 fold_convert (ctype, op1));
5950 else if (tcode == MULT_EXPR && code == MULT_EXPR
5951 && (t1 = extract_muldiv (op1, c, code, wide_type,
5952 strict_overflow_p)) != 0)
5953 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5954 fold_convert (ctype, t1));
5955 else if (TREE_CODE (op1) != INTEGER_CST)
5956 return 0;
5957
5958 /* If these are the same operation types, we can associate them
5959 assuming no overflow. */
5960 if (tcode == code)
5961 {
5962 bool overflow_p = false;
5963 bool overflow_mul_p;
5964 signop sign = TYPE_SIGN (ctype);
5965 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5966 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5967 if (overflow_mul_p
5968 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5969 overflow_p = true;
5970 if (!overflow_p)
5971 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5972 wide_int_to_tree (ctype, mul));
5973 }
5974
5975 /* If these operations "cancel" each other, we have the main
5976 optimizations of this pass, which occur when either constant is a
5977 multiple of the other, in which case we replace this with either an
5978 operation or CODE or TCODE.
5979
5980 If we have an unsigned type, we cannot do this since it will change
5981 the result if the original computation overflowed. */
5982 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5983 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5984 || (tcode == MULT_EXPR
5985 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5986 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5987 && code != MULT_EXPR)))
5988 {
5989 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5990 {
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5992 *strict_overflow_p = true;
5993 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5994 fold_convert (ctype,
5995 const_binop (TRUNC_DIV_EXPR,
5996 op1, c)));
5997 }
5998 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5999 {
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6001 *strict_overflow_p = true;
6002 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6003 fold_convert (ctype,
6004 const_binop (TRUNC_DIV_EXPR,
6005 c, op1)));
6006 }
6007 }
6008 break;
6009
6010 default:
6011 break;
6012 }
6013
6014 return 0;
6015 }
6016 \f
6017 /* Return a node which has the indicated constant VALUE (either 0 or
6018 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6019 and is of the indicated TYPE. */
6020
6021 tree
6022 constant_boolean_node (bool value, tree type)
6023 {
6024 if (type == integer_type_node)
6025 return value ? integer_one_node : integer_zero_node;
6026 else if (type == boolean_type_node)
6027 return value ? boolean_true_node : boolean_false_node;
6028 else if (TREE_CODE (type) == VECTOR_TYPE)
6029 return build_vector_from_val (type,
6030 build_int_cst (TREE_TYPE (type),
6031 value ? -1 : 0));
6032 else
6033 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6034 }
6035
6036
6037 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6038 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6039 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6040 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6041 COND is the first argument to CODE; otherwise (as in the example
6042 given here), it is the second argument. TYPE is the type of the
6043 original expression. Return NULL_TREE if no simplification is
6044 possible. */
6045
6046 static tree
6047 fold_binary_op_with_conditional_arg (location_t loc,
6048 enum tree_code code,
6049 tree type, tree op0, tree op1,
6050 tree cond, tree arg, int cond_first_p)
6051 {
6052 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6053 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6054 tree test, true_value, false_value;
6055 tree lhs = NULL_TREE;
6056 tree rhs = NULL_TREE;
6057 enum tree_code cond_code = COND_EXPR;
6058
6059 if (TREE_CODE (cond) == COND_EXPR
6060 || TREE_CODE (cond) == VEC_COND_EXPR)
6061 {
6062 test = TREE_OPERAND (cond, 0);
6063 true_value = TREE_OPERAND (cond, 1);
6064 false_value = TREE_OPERAND (cond, 2);
6065 /* If this operand throws an expression, then it does not make
6066 sense to try to perform a logical or arithmetic operation
6067 involving it. */
6068 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6069 lhs = true_value;
6070 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6071 rhs = false_value;
6072 }
6073 else
6074 {
6075 tree testtype = TREE_TYPE (cond);
6076 test = cond;
6077 true_value = constant_boolean_node (true, testtype);
6078 false_value = constant_boolean_node (false, testtype);
6079 }
6080
6081 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6082 cond_code = VEC_COND_EXPR;
6083
6084 /* This transformation is only worthwhile if we don't have to wrap ARG
6085 in a SAVE_EXPR and the operation can be simplified without recursing
6086 on at least one of the branches once its pushed inside the COND_EXPR. */
6087 if (!TREE_CONSTANT (arg)
6088 && (TREE_SIDE_EFFECTS (arg)
6089 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6090 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6091 return NULL_TREE;
6092
6093 arg = fold_convert_loc (loc, arg_type, arg);
6094 if (lhs == 0)
6095 {
6096 true_value = fold_convert_loc (loc, cond_type, true_value);
6097 if (cond_first_p)
6098 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6099 else
6100 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6101 }
6102 if (rhs == 0)
6103 {
6104 false_value = fold_convert_loc (loc, cond_type, false_value);
6105 if (cond_first_p)
6106 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6107 else
6108 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6109 }
6110
6111 /* Check that we have simplified at least one of the branches. */
6112 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6113 return NULL_TREE;
6114
6115 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6116 }
6117
6118 \f
6119 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6120
6121 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6122 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6123 ADDEND is the same as X.
6124
6125 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6126 and finite. The problematic cases are when X is zero, and its mode
6127 has signed zeros. In the case of rounding towards -infinity,
6128 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6129 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6130
6131 bool
6132 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6133 {
6134 if (!real_zerop (addend))
6135 return false;
6136
6137 /* Don't allow the fold with -fsignaling-nans. */
6138 if (HONOR_SNANS (TYPE_MODE (type)))
6139 return false;
6140
6141 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6142 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6143 return true;
6144
6145 /* In a vector or complex, we would need to check the sign of all zeros. */
6146 if (TREE_CODE (addend) != REAL_CST)
6147 return false;
6148
6149 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6150 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6151 negate = !negate;
6152
6153 /* The mode has signed zeros, and we have to honor their sign.
6154 In this situation, there is only one case we can return true for.
6155 X - 0 is the same as X unless rounding towards -infinity is
6156 supported. */
6157 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6158 }
6159
6160 /* Subroutine of fold() that checks comparisons of built-in math
6161 functions against real constants.
6162
6163 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6164 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6165 is the type of the result and ARG0 and ARG1 are the operands of the
6166 comparison. ARG1 must be a TREE_REAL_CST.
6167
6168 The function returns the constant folded tree if a simplification
6169 can be made, and NULL_TREE otherwise. */
6170
6171 static tree
6172 fold_mathfn_compare (location_t loc,
6173 enum built_in_function fcode, enum tree_code code,
6174 tree type, tree arg0, tree arg1)
6175 {
6176 REAL_VALUE_TYPE c;
6177
6178 if (BUILTIN_SQRT_P (fcode))
6179 {
6180 tree arg = CALL_EXPR_ARG (arg0, 0);
6181 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6182
6183 c = TREE_REAL_CST (arg1);
6184 if (REAL_VALUE_NEGATIVE (c))
6185 {
6186 /* sqrt(x) < y is always false, if y is negative. */
6187 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6188 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6189
6190 /* sqrt(x) > y is always true, if y is negative and we
6191 don't care about NaNs, i.e. negative values of x. */
6192 if (code == NE_EXPR || !HONOR_NANS (mode))
6193 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6194
6195 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6196 return fold_build2_loc (loc, GE_EXPR, type, arg,
6197 build_real (TREE_TYPE (arg), dconst0));
6198 }
6199 else if (code == GT_EXPR || code == GE_EXPR)
6200 {
6201 REAL_VALUE_TYPE c2;
6202
6203 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6204 real_convert (&c2, mode, &c2);
6205
6206 if (REAL_VALUE_ISINF (c2))
6207 {
6208 /* sqrt(x) > y is x == +Inf, when y is very large. */
6209 if (HONOR_INFINITIES (mode))
6210 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6211 build_real (TREE_TYPE (arg), c2));
6212
6213 /* sqrt(x) > y is always false, when y is very large
6214 and we don't care about infinities. */
6215 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6216 }
6217
6218 /* sqrt(x) > c is the same as x > c*c. */
6219 return fold_build2_loc (loc, code, type, arg,
6220 build_real (TREE_TYPE (arg), c2));
6221 }
6222 else if (code == LT_EXPR || code == LE_EXPR)
6223 {
6224 REAL_VALUE_TYPE c2;
6225
6226 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6227 real_convert (&c2, mode, &c2);
6228
6229 if (REAL_VALUE_ISINF (c2))
6230 {
6231 /* sqrt(x) < y is always true, when y is a very large
6232 value and we don't care about NaNs or Infinities. */
6233 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6234 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6235
6236 /* sqrt(x) < y is x != +Inf when y is very large and we
6237 don't care about NaNs. */
6238 if (! HONOR_NANS (mode))
6239 return fold_build2_loc (loc, NE_EXPR, type, arg,
6240 build_real (TREE_TYPE (arg), c2));
6241
6242 /* sqrt(x) < y is x >= 0 when y is very large and we
6243 don't care about Infinities. */
6244 if (! HONOR_INFINITIES (mode))
6245 return fold_build2_loc (loc, GE_EXPR, type, arg,
6246 build_real (TREE_TYPE (arg), dconst0));
6247
6248 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6249 arg = save_expr (arg);
6250 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6251 fold_build2_loc (loc, GE_EXPR, type, arg,
6252 build_real (TREE_TYPE (arg),
6253 dconst0)),
6254 fold_build2_loc (loc, NE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg),
6256 c2)));
6257 }
6258
6259 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6260 if (! HONOR_NANS (mode))
6261 return fold_build2_loc (loc, code, type, arg,
6262 build_real (TREE_TYPE (arg), c2));
6263
6264 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6265 arg = save_expr (arg);
6266 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6267 fold_build2_loc (loc, GE_EXPR, type, arg,
6268 build_real (TREE_TYPE (arg),
6269 dconst0)),
6270 fold_build2_loc (loc, code, type, arg,
6271 build_real (TREE_TYPE (arg),
6272 c2)));
6273 }
6274 }
6275
6276 return NULL_TREE;
6277 }
6278
6279 /* Subroutine of fold() that optimizes comparisons against Infinities,
6280 either +Inf or -Inf.
6281
6282 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6283 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6284 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6285
6286 The function returns the constant folded tree if a simplification
6287 can be made, and NULL_TREE otherwise. */
6288
6289 static tree
6290 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6291 tree arg0, tree arg1)
6292 {
6293 enum machine_mode mode;
6294 REAL_VALUE_TYPE max;
6295 tree temp;
6296 bool neg;
6297
6298 mode = TYPE_MODE (TREE_TYPE (arg0));
6299
6300 /* For negative infinity swap the sense of the comparison. */
6301 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6302 if (neg)
6303 code = swap_tree_comparison (code);
6304
6305 switch (code)
6306 {
6307 case GT_EXPR:
6308 /* x > +Inf is always false, if with ignore sNANs. */
6309 if (HONOR_SNANS (mode))
6310 return NULL_TREE;
6311 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6312
6313 case LE_EXPR:
6314 /* x <= +Inf is always true, if we don't case about NaNs. */
6315 if (! HONOR_NANS (mode))
6316 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6317
6318 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6319 arg0 = save_expr (arg0);
6320 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6321
6322 case EQ_EXPR:
6323 case GE_EXPR:
6324 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6325 real_maxval (&max, neg, mode);
6326 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6327 arg0, build_real (TREE_TYPE (arg0), max));
6328
6329 case LT_EXPR:
6330 /* x < +Inf is always equal to x <= DBL_MAX. */
6331 real_maxval (&max, neg, mode);
6332 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6333 arg0, build_real (TREE_TYPE (arg0), max));
6334
6335 case NE_EXPR:
6336 /* x != +Inf is always equal to !(x > DBL_MAX). */
6337 real_maxval (&max, neg, mode);
6338 if (! HONOR_NANS (mode))
6339 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6340 arg0, build_real (TREE_TYPE (arg0), max));
6341
6342 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6343 arg0, build_real (TREE_TYPE (arg0), max));
6344 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6345
6346 default:
6347 break;
6348 }
6349
6350 return NULL_TREE;
6351 }
6352
6353 /* Subroutine of fold() that optimizes comparisons of a division by
6354 a nonzero integer constant against an integer constant, i.e.
6355 X/C1 op C2.
6356
6357 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6358 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6359 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6360
6361 The function returns the constant folded tree if a simplification
6362 can be made, and NULL_TREE otherwise. */
6363
6364 static tree
6365 fold_div_compare (location_t loc,
6366 enum tree_code code, tree type, tree arg0, tree arg1)
6367 {
6368 tree prod, tmp, hi, lo;
6369 tree arg00 = TREE_OPERAND (arg0, 0);
6370 tree arg01 = TREE_OPERAND (arg0, 1);
6371 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6372 bool neg_overflow = false;
6373 bool overflow;
6374
6375 /* We have to do this the hard way to detect unsigned overflow.
6376 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6377 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6378 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6379 neg_overflow = false;
6380
6381 if (sign == UNSIGNED)
6382 {
6383 tmp = int_const_binop (MINUS_EXPR, arg01,
6384 build_int_cst (TREE_TYPE (arg01), 1));
6385 lo = prod;
6386
6387 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6388 val = wi::add (prod, tmp, sign, &overflow);
6389 hi = force_fit_type (TREE_TYPE (arg00), val,
6390 -1, overflow | TREE_OVERFLOW (prod));
6391 }
6392 else if (tree_int_cst_sgn (arg01) >= 0)
6393 {
6394 tmp = int_const_binop (MINUS_EXPR, arg01,
6395 build_int_cst (TREE_TYPE (arg01), 1));
6396 switch (tree_int_cst_sgn (arg1))
6397 {
6398 case -1:
6399 neg_overflow = true;
6400 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6401 hi = prod;
6402 break;
6403
6404 case 0:
6405 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6406 hi = tmp;
6407 break;
6408
6409 case 1:
6410 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6411 lo = prod;
6412 break;
6413
6414 default:
6415 gcc_unreachable ();
6416 }
6417 }
6418 else
6419 {
6420 /* A negative divisor reverses the relational operators. */
6421 code = swap_tree_comparison (code);
6422
6423 tmp = int_const_binop (PLUS_EXPR, arg01,
6424 build_int_cst (TREE_TYPE (arg01), 1));
6425 switch (tree_int_cst_sgn (arg1))
6426 {
6427 case -1:
6428 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6429 lo = prod;
6430 break;
6431
6432 case 0:
6433 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6434 lo = tmp;
6435 break;
6436
6437 case 1:
6438 neg_overflow = true;
6439 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6440 hi = prod;
6441 break;
6442
6443 default:
6444 gcc_unreachable ();
6445 }
6446 }
6447
6448 switch (code)
6449 {
6450 case EQ_EXPR:
6451 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6452 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6453 if (TREE_OVERFLOW (hi))
6454 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6455 if (TREE_OVERFLOW (lo))
6456 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6457 return build_range_check (loc, type, arg00, 1, lo, hi);
6458
6459 case NE_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6466 return build_range_check (loc, type, arg00, 0, lo, hi);
6467
6468 case LT_EXPR:
6469 if (TREE_OVERFLOW (lo))
6470 {
6471 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6472 return omit_one_operand_loc (loc, type, tmp, arg00);
6473 }
6474 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6475
6476 case LE_EXPR:
6477 if (TREE_OVERFLOW (hi))
6478 {
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand_loc (loc, type, tmp, arg00);
6481 }
6482 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6483
6484 case GT_EXPR:
6485 if (TREE_OVERFLOW (hi))
6486 {
6487 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6488 return omit_one_operand_loc (loc, type, tmp, arg00);
6489 }
6490 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6491
6492 case GE_EXPR:
6493 if (TREE_OVERFLOW (lo))
6494 {
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand_loc (loc, type, tmp, arg00);
6497 }
6498 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6499
6500 default:
6501 break;
6502 }
6503
6504 return NULL_TREE;
6505 }
6506
6507
6508 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6509 equality/inequality test, then return a simplified form of the test
6510 using a sign testing. Otherwise return NULL. TYPE is the desired
6511 result type. */
6512
6513 static tree
6514 fold_single_bit_test_into_sign_test (location_t loc,
6515 enum tree_code code, tree arg0, tree arg1,
6516 tree result_type)
6517 {
6518 /* If this is testing a single bit, we can optimize the test. */
6519 if ((code == NE_EXPR || code == EQ_EXPR)
6520 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6521 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6522 {
6523 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6524 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6525 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6526
6527 if (arg00 != NULL_TREE
6528 /* This is only a win if casting to a signed type is cheap,
6529 i.e. when arg00's type is not a partial mode. */
6530 && TYPE_PRECISION (TREE_TYPE (arg00))
6531 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6532 {
6533 tree stype = signed_type_for (TREE_TYPE (arg00));
6534 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6535 result_type,
6536 fold_convert_loc (loc, stype, arg00),
6537 build_int_cst (stype, 0));
6538 }
6539 }
6540
6541 return NULL_TREE;
6542 }
6543
6544 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6545 equality/inequality test, then return a simplified form of
6546 the test using shifts and logical operations. Otherwise return
6547 NULL. TYPE is the desired result type. */
6548
6549 tree
6550 fold_single_bit_test (location_t loc, enum tree_code code,
6551 tree arg0, tree arg1, tree result_type)
6552 {
6553 /* If this is testing a single bit, we can optimize the test. */
6554 if ((code == NE_EXPR || code == EQ_EXPR)
6555 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6556 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6557 {
6558 tree inner = TREE_OPERAND (arg0, 0);
6559 tree type = TREE_TYPE (arg0);
6560 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6561 enum machine_mode operand_mode = TYPE_MODE (type);
6562 int ops_unsigned;
6563 tree signed_type, unsigned_type, intermediate_type;
6564 tree tem, one;
6565
6566 /* First, see if we can fold the single bit test into a sign-bit
6567 test. */
6568 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6569 result_type);
6570 if (tem)
6571 return tem;
6572
6573 /* Otherwise we have (A & C) != 0 where C is a single bit,
6574 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6575 Similarly for (A & C) == 0. */
6576
6577 /* If INNER is a right shift of a constant and it plus BITNUM does
6578 not overflow, adjust BITNUM and INNER. */
6579 if (TREE_CODE (inner) == RSHIFT_EXPR
6580 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6581 && bitnum < TYPE_PRECISION (type)
6582 && wi::ltu_p (TREE_OPERAND (inner, 1),
6583 TYPE_PRECISION (type) - bitnum))
6584 {
6585 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6586 inner = TREE_OPERAND (inner, 0);
6587 }
6588
6589 /* If we are going to be able to omit the AND below, we must do our
6590 operations as unsigned. If we must use the AND, we have a choice.
6591 Normally unsigned is faster, but for some machines signed is. */
6592 #ifdef LOAD_EXTEND_OP
6593 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6594 && !flag_syntax_only) ? 0 : 1;
6595 #else
6596 ops_unsigned = 1;
6597 #endif
6598
6599 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6600 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6601 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6602 inner = fold_convert_loc (loc, intermediate_type, inner);
6603
6604 if (bitnum != 0)
6605 inner = build2 (RSHIFT_EXPR, intermediate_type,
6606 inner, size_int (bitnum));
6607
6608 one = build_int_cst (intermediate_type, 1);
6609
6610 if (code == EQ_EXPR)
6611 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6612
6613 /* Put the AND last so it can combine with more things. */
6614 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6615
6616 /* Make sure to return the proper type. */
6617 inner = fold_convert_loc (loc, result_type, inner);
6618
6619 return inner;
6620 }
6621 return NULL_TREE;
6622 }
6623
6624 /* Check whether we are allowed to reorder operands arg0 and arg1,
6625 such that the evaluation of arg1 occurs before arg0. */
6626
6627 static bool
6628 reorder_operands_p (const_tree arg0, const_tree arg1)
6629 {
6630 if (! flag_evaluation_order)
6631 return true;
6632 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6633 return true;
6634 return ! TREE_SIDE_EFFECTS (arg0)
6635 && ! TREE_SIDE_EFFECTS (arg1);
6636 }
6637
6638 /* Test whether it is preferable two swap two operands, ARG0 and
6639 ARG1, for example because ARG0 is an integer constant and ARG1
6640 isn't. If REORDER is true, only recommend swapping if we can
6641 evaluate the operands in reverse order. */
6642
6643 bool
6644 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6645 {
6646 if (CONSTANT_CLASS_P (arg1))
6647 return 0;
6648 if (CONSTANT_CLASS_P (arg0))
6649 return 1;
6650
6651 STRIP_SIGN_NOPS (arg0);
6652 STRIP_SIGN_NOPS (arg1);
6653
6654 if (TREE_CONSTANT (arg1))
6655 return 0;
6656 if (TREE_CONSTANT (arg0))
6657 return 1;
6658
6659 if (reorder && flag_evaluation_order
6660 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6661 return 0;
6662
6663 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6664 for commutative and comparison operators. Ensuring a canonical
6665 form allows the optimizers to find additional redundancies without
6666 having to explicitly check for both orderings. */
6667 if (TREE_CODE (arg0) == SSA_NAME
6668 && TREE_CODE (arg1) == SSA_NAME
6669 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6670 return 1;
6671
6672 /* Put SSA_NAMEs last. */
6673 if (TREE_CODE (arg1) == SSA_NAME)
6674 return 0;
6675 if (TREE_CODE (arg0) == SSA_NAME)
6676 return 1;
6677
6678 /* Put variables last. */
6679 if (DECL_P (arg1))
6680 return 0;
6681 if (DECL_P (arg0))
6682 return 1;
6683
6684 return 0;
6685 }
6686
6687 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6688 ARG0 is extended to a wider type. */
6689
6690 static tree
6691 fold_widened_comparison (location_t loc, enum tree_code code,
6692 tree type, tree arg0, tree arg1)
6693 {
6694 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6695 tree arg1_unw;
6696 tree shorter_type, outer_type;
6697 tree min, max;
6698 bool above, below;
6699
6700 if (arg0_unw == arg0)
6701 return NULL_TREE;
6702 shorter_type = TREE_TYPE (arg0_unw);
6703
6704 #ifdef HAVE_canonicalize_funcptr_for_compare
6705 /* Disable this optimization if we're casting a function pointer
6706 type on targets that require function pointer canonicalization. */
6707 if (HAVE_canonicalize_funcptr_for_compare
6708 && TREE_CODE (shorter_type) == POINTER_TYPE
6709 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6710 return NULL_TREE;
6711 #endif
6712
6713 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6714 return NULL_TREE;
6715
6716 arg1_unw = get_unwidened (arg1, NULL_TREE);
6717
6718 /* If possible, express the comparison in the shorter mode. */
6719 if ((code == EQ_EXPR || code == NE_EXPR
6720 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6721 && (TREE_TYPE (arg1_unw) == shorter_type
6722 || ((TYPE_PRECISION (shorter_type)
6723 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6724 && (TYPE_UNSIGNED (shorter_type)
6725 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6726 || (TREE_CODE (arg1_unw) == INTEGER_CST
6727 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6728 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6729 && int_fits_type_p (arg1_unw, shorter_type))))
6730 return fold_build2_loc (loc, code, type, arg0_unw,
6731 fold_convert_loc (loc, shorter_type, arg1_unw));
6732
6733 if (TREE_CODE (arg1_unw) != INTEGER_CST
6734 || TREE_CODE (shorter_type) != INTEGER_TYPE
6735 || !int_fits_type_p (arg1_unw, shorter_type))
6736 return NULL_TREE;
6737
6738 /* If we are comparing with the integer that does not fit into the range
6739 of the shorter type, the result is known. */
6740 outer_type = TREE_TYPE (arg1_unw);
6741 min = lower_bound_in_type (outer_type, shorter_type);
6742 max = upper_bound_in_type (outer_type, shorter_type);
6743
6744 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6745 max, arg1_unw));
6746 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6747 arg1_unw, min));
6748
6749 switch (code)
6750 {
6751 case EQ_EXPR:
6752 if (above || below)
6753 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6754 break;
6755
6756 case NE_EXPR:
6757 if (above || below)
6758 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6759 break;
6760
6761 case LT_EXPR:
6762 case LE_EXPR:
6763 if (above)
6764 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6765 else if (below)
6766 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6767
6768 case GT_EXPR:
6769 case GE_EXPR:
6770 if (above)
6771 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6772 else if (below)
6773 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6774
6775 default:
6776 break;
6777 }
6778
6779 return NULL_TREE;
6780 }
6781
6782 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6783 ARG0 just the signedness is changed. */
6784
6785 static tree
6786 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6787 tree arg0, tree arg1)
6788 {
6789 tree arg0_inner;
6790 tree inner_type, outer_type;
6791
6792 if (!CONVERT_EXPR_P (arg0))
6793 return NULL_TREE;
6794
6795 outer_type = TREE_TYPE (arg0);
6796 arg0_inner = TREE_OPERAND (arg0, 0);
6797 inner_type = TREE_TYPE (arg0_inner);
6798
6799 #ifdef HAVE_canonicalize_funcptr_for_compare
6800 /* Disable this optimization if we're casting a function pointer
6801 type on targets that require function pointer canonicalization. */
6802 if (HAVE_canonicalize_funcptr_for_compare
6803 && TREE_CODE (inner_type) == POINTER_TYPE
6804 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6805 return NULL_TREE;
6806 #endif
6807
6808 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6809 return NULL_TREE;
6810
6811 if (TREE_CODE (arg1) != INTEGER_CST
6812 && !(CONVERT_EXPR_P (arg1)
6813 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6814 return NULL_TREE;
6815
6816 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6817 && code != NE_EXPR
6818 && code != EQ_EXPR)
6819 return NULL_TREE;
6820
6821 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6822 return NULL_TREE;
6823
6824 if (TREE_CODE (arg1) == INTEGER_CST)
6825 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6826 TREE_OVERFLOW (arg1));
6827 else
6828 arg1 = fold_convert_loc (loc, inner_type, arg1);
6829
6830 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6831 }
6832
6833
6834 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6835 means A >= Y && A != MAX, but in this case we know that
6836 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6837
6838 static tree
6839 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6840 {
6841 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6842
6843 if (TREE_CODE (bound) == LT_EXPR)
6844 a = TREE_OPERAND (bound, 0);
6845 else if (TREE_CODE (bound) == GT_EXPR)
6846 a = TREE_OPERAND (bound, 1);
6847 else
6848 return NULL_TREE;
6849
6850 typea = TREE_TYPE (a);
6851 if (!INTEGRAL_TYPE_P (typea)
6852 && !POINTER_TYPE_P (typea))
6853 return NULL_TREE;
6854
6855 if (TREE_CODE (ineq) == LT_EXPR)
6856 {
6857 a1 = TREE_OPERAND (ineq, 1);
6858 y = TREE_OPERAND (ineq, 0);
6859 }
6860 else if (TREE_CODE (ineq) == GT_EXPR)
6861 {
6862 a1 = TREE_OPERAND (ineq, 0);
6863 y = TREE_OPERAND (ineq, 1);
6864 }
6865 else
6866 return NULL_TREE;
6867
6868 if (TREE_TYPE (a1) != typea)
6869 return NULL_TREE;
6870
6871 if (POINTER_TYPE_P (typea))
6872 {
6873 /* Convert the pointer types into integer before taking the difference. */
6874 tree ta = fold_convert_loc (loc, ssizetype, a);
6875 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6876 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6877 }
6878 else
6879 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6880
6881 if (!diff || !integer_onep (diff))
6882 return NULL_TREE;
6883
6884 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6885 }
6886
6887 /* Fold a sum or difference of at least one multiplication.
6888 Returns the folded tree or NULL if no simplification could be made. */
6889
6890 static tree
6891 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6892 tree arg0, tree arg1)
6893 {
6894 tree arg00, arg01, arg10, arg11;
6895 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6896
6897 /* (A * C) +- (B * C) -> (A+-B) * C.
6898 (A * C) +- A -> A * (C+-1).
6899 We are most concerned about the case where C is a constant,
6900 but other combinations show up during loop reduction. Since
6901 it is not difficult, try all four possibilities. */
6902
6903 if (TREE_CODE (arg0) == MULT_EXPR)
6904 {
6905 arg00 = TREE_OPERAND (arg0, 0);
6906 arg01 = TREE_OPERAND (arg0, 1);
6907 }
6908 else if (TREE_CODE (arg0) == INTEGER_CST)
6909 {
6910 arg00 = build_one_cst (type);
6911 arg01 = arg0;
6912 }
6913 else
6914 {
6915 /* We cannot generate constant 1 for fract. */
6916 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6917 return NULL_TREE;
6918 arg00 = arg0;
6919 arg01 = build_one_cst (type);
6920 }
6921 if (TREE_CODE (arg1) == MULT_EXPR)
6922 {
6923 arg10 = TREE_OPERAND (arg1, 0);
6924 arg11 = TREE_OPERAND (arg1, 1);
6925 }
6926 else if (TREE_CODE (arg1) == INTEGER_CST)
6927 {
6928 arg10 = build_one_cst (type);
6929 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6930 the purpose of this canonicalization. */
6931 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6932 && negate_expr_p (arg1)
6933 && code == PLUS_EXPR)
6934 {
6935 arg11 = negate_expr (arg1);
6936 code = MINUS_EXPR;
6937 }
6938 else
6939 arg11 = arg1;
6940 }
6941 else
6942 {
6943 /* We cannot generate constant 1 for fract. */
6944 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6945 return NULL_TREE;
6946 arg10 = arg1;
6947 arg11 = build_one_cst (type);
6948 }
6949 same = NULL_TREE;
6950
6951 if (operand_equal_p (arg01, arg11, 0))
6952 same = arg01, alt0 = arg00, alt1 = arg10;
6953 else if (operand_equal_p (arg00, arg10, 0))
6954 same = arg00, alt0 = arg01, alt1 = arg11;
6955 else if (operand_equal_p (arg00, arg11, 0))
6956 same = arg00, alt0 = arg01, alt1 = arg10;
6957 else if (operand_equal_p (arg01, arg10, 0))
6958 same = arg01, alt0 = arg00, alt1 = arg11;
6959
6960 /* No identical multiplicands; see if we can find a common
6961 power-of-two factor in non-power-of-two multiplies. This
6962 can help in multi-dimensional array access. */
6963 else if (tree_fits_shwi_p (arg01)
6964 && tree_fits_shwi_p (arg11))
6965 {
6966 HOST_WIDE_INT int01, int11, tmp;
6967 bool swap = false;
6968 tree maybe_same;
6969 int01 = tree_to_shwi (arg01);
6970 int11 = tree_to_shwi (arg11);
6971
6972 /* Move min of absolute values to int11. */
6973 if (absu_hwi (int01) < absu_hwi (int11))
6974 {
6975 tmp = int01, int01 = int11, int11 = tmp;
6976 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6977 maybe_same = arg01;
6978 swap = true;
6979 }
6980 else
6981 maybe_same = arg11;
6982
6983 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6984 /* The remainder should not be a constant, otherwise we
6985 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6986 increased the number of multiplications necessary. */
6987 && TREE_CODE (arg10) != INTEGER_CST)
6988 {
6989 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6990 build_int_cst (TREE_TYPE (arg00),
6991 int01 / int11));
6992 alt1 = arg10;
6993 same = maybe_same;
6994 if (swap)
6995 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6996 }
6997 }
6998
6999 if (same)
7000 return fold_build2_loc (loc, MULT_EXPR, type,
7001 fold_build2_loc (loc, code, type,
7002 fold_convert_loc (loc, type, alt0),
7003 fold_convert_loc (loc, type, alt1)),
7004 fold_convert_loc (loc, type, same));
7005
7006 return NULL_TREE;
7007 }
7008
7009 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7010 specified by EXPR into the buffer PTR of length LEN bytes.
7011 Return the number of bytes placed in the buffer, or zero
7012 upon failure. */
7013
7014 static int
7015 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7016 {
7017 tree type = TREE_TYPE (expr);
7018 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7019 int byte, offset, word, words;
7020 unsigned char value;
7021
7022 if ((off == -1 && total_bytes > len)
7023 || off >= total_bytes)
7024 return 0;
7025 if (off == -1)
7026 off = 0;
7027 words = total_bytes / UNITS_PER_WORD;
7028
7029 for (byte = 0; byte < total_bytes; byte++)
7030 {
7031 int bitpos = byte * BITS_PER_UNIT;
7032 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7033 number of bytes. */
7034 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7035
7036 if (total_bytes > UNITS_PER_WORD)
7037 {
7038 word = byte / UNITS_PER_WORD;
7039 if (WORDS_BIG_ENDIAN)
7040 word = (words - 1) - word;
7041 offset = word * UNITS_PER_WORD;
7042 if (BYTES_BIG_ENDIAN)
7043 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7044 else
7045 offset += byte % UNITS_PER_WORD;
7046 }
7047 else
7048 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7049 if (offset >= off
7050 && offset - off < len)
7051 ptr[offset - off] = value;
7052 }
7053 return MIN (len, total_bytes - off);
7054 }
7055
7056
7057 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7058 specified by EXPR into the buffer PTR of length LEN bytes.
7059 Return the number of bytes placed in the buffer, or zero
7060 upon failure. */
7061
7062 static int
7063 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7064 {
7065 tree type = TREE_TYPE (expr);
7066 enum machine_mode mode = TYPE_MODE (type);
7067 int total_bytes = GET_MODE_SIZE (mode);
7068 FIXED_VALUE_TYPE value;
7069 tree i_value, i_type;
7070
7071 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7072 return 0;
7073
7074 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7075
7076 if (NULL_TREE == i_type
7077 || TYPE_PRECISION (i_type) != total_bytes)
7078 return 0;
7079
7080 value = TREE_FIXED_CST (expr);
7081 i_value = double_int_to_tree (i_type, value.data);
7082
7083 return native_encode_int (i_value, ptr, len, off);
7084 }
7085
7086
7087 /* Subroutine of native_encode_expr. Encode the REAL_CST
7088 specified by EXPR into the buffer PTR of length LEN bytes.
7089 Return the number of bytes placed in the buffer, or zero
7090 upon failure. */
7091
7092 static int
7093 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7094 {
7095 tree type = TREE_TYPE (expr);
7096 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7097 int byte, offset, word, words, bitpos;
7098 unsigned char value;
7099
7100 /* There are always 32 bits in each long, no matter the size of
7101 the hosts long. We handle floating point representations with
7102 up to 192 bits. */
7103 long tmp[6];
7104
7105 if ((off == -1 && total_bytes > len)
7106 || off >= total_bytes)
7107 return 0;
7108 if (off == -1)
7109 off = 0;
7110 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7111
7112 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7113
7114 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7115 bitpos += BITS_PER_UNIT)
7116 {
7117 byte = (bitpos / BITS_PER_UNIT) & 3;
7118 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7119
7120 if (UNITS_PER_WORD < 4)
7121 {
7122 word = byte / UNITS_PER_WORD;
7123 if (WORDS_BIG_ENDIAN)
7124 word = (words - 1) - word;
7125 offset = word * UNITS_PER_WORD;
7126 if (BYTES_BIG_ENDIAN)
7127 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7128 else
7129 offset += byte % UNITS_PER_WORD;
7130 }
7131 else
7132 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7133 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7134 if (offset >= off
7135 && offset - off < len)
7136 ptr[offset - off] = value;
7137 }
7138 return MIN (len, total_bytes - off);
7139 }
7140
7141 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7142 specified by EXPR into the buffer PTR of length LEN bytes.
7143 Return the number of bytes placed in the buffer, or zero
7144 upon failure. */
7145
7146 static int
7147 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7148 {
7149 int rsize, isize;
7150 tree part;
7151
7152 part = TREE_REALPART (expr);
7153 rsize = native_encode_expr (part, ptr, len, off);
7154 if (off == -1
7155 && rsize == 0)
7156 return 0;
7157 part = TREE_IMAGPART (expr);
7158 if (off != -1)
7159 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7160 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7161 if (off == -1
7162 && isize != rsize)
7163 return 0;
7164 return rsize + isize;
7165 }
7166
7167
7168 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7169 specified by EXPR into the buffer PTR of length LEN bytes.
7170 Return the number of bytes placed in the buffer, or zero
7171 upon failure. */
7172
7173 static int
7174 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7175 {
7176 unsigned i, count;
7177 int size, offset;
7178 tree itype, elem;
7179
7180 offset = 0;
7181 count = VECTOR_CST_NELTS (expr);
7182 itype = TREE_TYPE (TREE_TYPE (expr));
7183 size = GET_MODE_SIZE (TYPE_MODE (itype));
7184 for (i = 0; i < count; i++)
7185 {
7186 if (off >= size)
7187 {
7188 off -= size;
7189 continue;
7190 }
7191 elem = VECTOR_CST_ELT (expr, i);
7192 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7193 if ((off == -1 && res != size)
7194 || res == 0)
7195 return 0;
7196 offset += res;
7197 if (offset >= len)
7198 return offset;
7199 if (off != -1)
7200 off = 0;
7201 }
7202 return offset;
7203 }
7204
7205
7206 /* Subroutine of native_encode_expr. Encode the STRING_CST
7207 specified by EXPR into the buffer PTR of length LEN bytes.
7208 Return the number of bytes placed in the buffer, or zero
7209 upon failure. */
7210
7211 static int
7212 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7213 {
7214 tree type = TREE_TYPE (expr);
7215 HOST_WIDE_INT total_bytes;
7216
7217 if (TREE_CODE (type) != ARRAY_TYPE
7218 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7219 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7220 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7221 return 0;
7222 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7223 if ((off == -1 && total_bytes > len)
7224 || off >= total_bytes)
7225 return 0;
7226 if (off == -1)
7227 off = 0;
7228 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7229 {
7230 int written = 0;
7231 if (off < TREE_STRING_LENGTH (expr))
7232 {
7233 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7234 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7235 }
7236 memset (ptr + written, 0,
7237 MIN (total_bytes - written, len - written));
7238 }
7239 else
7240 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7241 return MIN (total_bytes - off, len);
7242 }
7243
7244
7245 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7246 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7247 buffer PTR of length LEN bytes. If OFF is not -1 then start
7248 the encoding at byte offset OFF and encode at most LEN bytes.
7249 Return the number of bytes placed in the buffer, or zero upon failure. */
7250
7251 int
7252 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7253 {
7254 switch (TREE_CODE (expr))
7255 {
7256 case INTEGER_CST:
7257 return native_encode_int (expr, ptr, len, off);
7258
7259 case REAL_CST:
7260 return native_encode_real (expr, ptr, len, off);
7261
7262 case FIXED_CST:
7263 return native_encode_fixed (expr, ptr, len, off);
7264
7265 case COMPLEX_CST:
7266 return native_encode_complex (expr, ptr, len, off);
7267
7268 case VECTOR_CST:
7269 return native_encode_vector (expr, ptr, len, off);
7270
7271 case STRING_CST:
7272 return native_encode_string (expr, ptr, len, off);
7273
7274 default:
7275 return 0;
7276 }
7277 }
7278
7279
7280 /* Subroutine of native_interpret_expr. Interpret the contents of
7281 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7282 If the buffer cannot be interpreted, return NULL_TREE. */
7283
7284 static tree
7285 native_interpret_int (tree type, const unsigned char *ptr, int len)
7286 {
7287 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7288
7289 if (total_bytes > len
7290 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7291 return NULL_TREE;
7292
7293 wide_int result = wi::from_buffer (ptr, total_bytes);
7294
7295 return wide_int_to_tree (type, result);
7296 }
7297
7298
7299 /* Subroutine of native_interpret_expr. Interpret the contents of
7300 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7301 If the buffer cannot be interpreted, return NULL_TREE. */
7302
7303 static tree
7304 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7305 {
7306 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7307 double_int result;
7308 FIXED_VALUE_TYPE fixed_value;
7309
7310 if (total_bytes > len
7311 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7312 return NULL_TREE;
7313
7314 result = double_int::from_buffer (ptr, total_bytes);
7315 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7316
7317 return build_fixed (type, fixed_value);
7318 }
7319
7320
7321 /* Subroutine of native_interpret_expr. Interpret the contents of
7322 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7323 If the buffer cannot be interpreted, return NULL_TREE. */
7324
7325 static tree
7326 native_interpret_real (tree type, const unsigned char *ptr, int len)
7327 {
7328 enum machine_mode mode = TYPE_MODE (type);
7329 int total_bytes = GET_MODE_SIZE (mode);
7330 int byte, offset, word, words, bitpos;
7331 unsigned char value;
7332 /* There are always 32 bits in each long, no matter the size of
7333 the hosts long. We handle floating point representations with
7334 up to 192 bits. */
7335 REAL_VALUE_TYPE r;
7336 long tmp[6];
7337
7338 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7339 if (total_bytes > len || total_bytes > 24)
7340 return NULL_TREE;
7341 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7342
7343 memset (tmp, 0, sizeof (tmp));
7344 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7345 bitpos += BITS_PER_UNIT)
7346 {
7347 byte = (bitpos / BITS_PER_UNIT) & 3;
7348 if (UNITS_PER_WORD < 4)
7349 {
7350 word = byte / UNITS_PER_WORD;
7351 if (WORDS_BIG_ENDIAN)
7352 word = (words - 1) - word;
7353 offset = word * UNITS_PER_WORD;
7354 if (BYTES_BIG_ENDIAN)
7355 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7356 else
7357 offset += byte % UNITS_PER_WORD;
7358 }
7359 else
7360 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7361 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7362
7363 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7364 }
7365
7366 real_from_target (&r, tmp, mode);
7367 return build_real (type, r);
7368 }
7369
7370
7371 /* Subroutine of native_interpret_expr. Interpret the contents of
7372 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7373 If the buffer cannot be interpreted, return NULL_TREE. */
7374
7375 static tree
7376 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7377 {
7378 tree etype, rpart, ipart;
7379 int size;
7380
7381 etype = TREE_TYPE (type);
7382 size = GET_MODE_SIZE (TYPE_MODE (etype));
7383 if (size * 2 > len)
7384 return NULL_TREE;
7385 rpart = native_interpret_expr (etype, ptr, size);
7386 if (!rpart)
7387 return NULL_TREE;
7388 ipart = native_interpret_expr (etype, ptr+size, size);
7389 if (!ipart)
7390 return NULL_TREE;
7391 return build_complex (type, rpart, ipart);
7392 }
7393
7394
7395 /* Subroutine of native_interpret_expr. Interpret the contents of
7396 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7397 If the buffer cannot be interpreted, return NULL_TREE. */
7398
7399 static tree
7400 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7401 {
7402 tree etype, elem;
7403 int i, size, count;
7404 tree *elements;
7405
7406 etype = TREE_TYPE (type);
7407 size = GET_MODE_SIZE (TYPE_MODE (etype));
7408 count = TYPE_VECTOR_SUBPARTS (type);
7409 if (size * count > len)
7410 return NULL_TREE;
7411
7412 elements = XALLOCAVEC (tree, count);
7413 for (i = count - 1; i >= 0; i--)
7414 {
7415 elem = native_interpret_expr (etype, ptr+(i*size), size);
7416 if (!elem)
7417 return NULL_TREE;
7418 elements[i] = elem;
7419 }
7420 return build_vector (type, elements);
7421 }
7422
7423
7424 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7425 the buffer PTR of length LEN as a constant of type TYPE. For
7426 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7427 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7428 return NULL_TREE. */
7429
7430 tree
7431 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7432 {
7433 switch (TREE_CODE (type))
7434 {
7435 case INTEGER_TYPE:
7436 case ENUMERAL_TYPE:
7437 case BOOLEAN_TYPE:
7438 case POINTER_TYPE:
7439 case REFERENCE_TYPE:
7440 return native_interpret_int (type, ptr, len);
7441
7442 case REAL_TYPE:
7443 return native_interpret_real (type, ptr, len);
7444
7445 case FIXED_POINT_TYPE:
7446 return native_interpret_fixed (type, ptr, len);
7447
7448 case COMPLEX_TYPE:
7449 return native_interpret_complex (type, ptr, len);
7450
7451 case VECTOR_TYPE:
7452 return native_interpret_vector (type, ptr, len);
7453
7454 default:
7455 return NULL_TREE;
7456 }
7457 }
7458
7459 /* Returns true if we can interpret the contents of a native encoding
7460 as TYPE. */
7461
7462 static bool
7463 can_native_interpret_type_p (tree type)
7464 {
7465 switch (TREE_CODE (type))
7466 {
7467 case INTEGER_TYPE:
7468 case ENUMERAL_TYPE:
7469 case BOOLEAN_TYPE:
7470 case POINTER_TYPE:
7471 case REFERENCE_TYPE:
7472 case FIXED_POINT_TYPE:
7473 case REAL_TYPE:
7474 case COMPLEX_TYPE:
7475 case VECTOR_TYPE:
7476 return true;
7477 default:
7478 return false;
7479 }
7480 }
7481
7482 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7483 TYPE at compile-time. If we're unable to perform the conversion
7484 return NULL_TREE. */
7485
7486 static tree
7487 fold_view_convert_expr (tree type, tree expr)
7488 {
7489 /* We support up to 512-bit values (for V8DFmode). */
7490 unsigned char buffer[64];
7491 int len;
7492
7493 /* Check that the host and target are sane. */
7494 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7495 return NULL_TREE;
7496
7497 len = native_encode_expr (expr, buffer, sizeof (buffer));
7498 if (len == 0)
7499 return NULL_TREE;
7500
7501 return native_interpret_expr (type, buffer, len);
7502 }
7503
7504 /* Build an expression for the address of T. Folds away INDIRECT_REF
7505 to avoid confusing the gimplify process. */
7506
7507 tree
7508 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7509 {
7510 /* The size of the object is not relevant when talking about its address. */
7511 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7512 t = TREE_OPERAND (t, 0);
7513
7514 if (TREE_CODE (t) == INDIRECT_REF)
7515 {
7516 t = TREE_OPERAND (t, 0);
7517
7518 if (TREE_TYPE (t) != ptrtype)
7519 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7520 }
7521 else if (TREE_CODE (t) == MEM_REF
7522 && integer_zerop (TREE_OPERAND (t, 1)))
7523 return TREE_OPERAND (t, 0);
7524 else if (TREE_CODE (t) == MEM_REF
7525 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7526 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7527 TREE_OPERAND (t, 0),
7528 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7529 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7530 {
7531 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7532
7533 if (TREE_TYPE (t) != ptrtype)
7534 t = fold_convert_loc (loc, ptrtype, t);
7535 }
7536 else
7537 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7538
7539 return t;
7540 }
7541
7542 /* Build an expression for the address of T. */
7543
7544 tree
7545 build_fold_addr_expr_loc (location_t loc, tree t)
7546 {
7547 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7548
7549 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7550 }
7551
7552 static bool vec_cst_ctor_to_array (tree, tree *);
7553
7554 /* Fold a unary expression of code CODE and type TYPE with operand
7555 OP0. Return the folded expression if folding is successful.
7556 Otherwise, return NULL_TREE. */
7557
7558 tree
7559 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7560 {
7561 tree tem;
7562 tree arg0;
7563 enum tree_code_class kind = TREE_CODE_CLASS (code);
7564
7565 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7566 && TREE_CODE_LENGTH (code) == 1);
7567
7568 tem = generic_simplify (loc, code, type, op0);
7569 if (tem)
7570 return tem;
7571
7572 arg0 = op0;
7573 if (arg0)
7574 {
7575 if (CONVERT_EXPR_CODE_P (code)
7576 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7577 {
7578 /* Don't use STRIP_NOPS, because signedness of argument type
7579 matters. */
7580 STRIP_SIGN_NOPS (arg0);
7581 }
7582 else
7583 {
7584 /* Strip any conversions that don't change the mode. This
7585 is safe for every expression, except for a comparison
7586 expression because its signedness is derived from its
7587 operands.
7588
7589 Note that this is done as an internal manipulation within
7590 the constant folder, in order to find the simplest
7591 representation of the arguments so that their form can be
7592 studied. In any cases, the appropriate type conversions
7593 should be put back in the tree that will get out of the
7594 constant folder. */
7595 STRIP_NOPS (arg0);
7596 }
7597 }
7598
7599 if (TREE_CODE_CLASS (code) == tcc_unary)
7600 {
7601 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7602 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7603 fold_build1_loc (loc, code, type,
7604 fold_convert_loc (loc, TREE_TYPE (op0),
7605 TREE_OPERAND (arg0, 1))));
7606 else if (TREE_CODE (arg0) == COND_EXPR)
7607 {
7608 tree arg01 = TREE_OPERAND (arg0, 1);
7609 tree arg02 = TREE_OPERAND (arg0, 2);
7610 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7611 arg01 = fold_build1_loc (loc, code, type,
7612 fold_convert_loc (loc,
7613 TREE_TYPE (op0), arg01));
7614 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7615 arg02 = fold_build1_loc (loc, code, type,
7616 fold_convert_loc (loc,
7617 TREE_TYPE (op0), arg02));
7618 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7619 arg01, arg02);
7620
7621 /* If this was a conversion, and all we did was to move into
7622 inside the COND_EXPR, bring it back out. But leave it if
7623 it is a conversion from integer to integer and the
7624 result precision is no wider than a word since such a
7625 conversion is cheap and may be optimized away by combine,
7626 while it couldn't if it were outside the COND_EXPR. Then return
7627 so we don't get into an infinite recursion loop taking the
7628 conversion out and then back in. */
7629
7630 if ((CONVERT_EXPR_CODE_P (code)
7631 || code == NON_LVALUE_EXPR)
7632 && TREE_CODE (tem) == COND_EXPR
7633 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7634 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7635 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7636 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7637 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7638 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7639 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7640 && (INTEGRAL_TYPE_P
7641 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7642 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7643 || flag_syntax_only))
7644 tem = build1_loc (loc, code, type,
7645 build3 (COND_EXPR,
7646 TREE_TYPE (TREE_OPERAND
7647 (TREE_OPERAND (tem, 1), 0)),
7648 TREE_OPERAND (tem, 0),
7649 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7650 TREE_OPERAND (TREE_OPERAND (tem, 2),
7651 0)));
7652 return tem;
7653 }
7654 }
7655
7656 switch (code)
7657 {
7658 case PAREN_EXPR:
7659 /* Re-association barriers around constants and other re-association
7660 barriers can be removed. */
7661 if (CONSTANT_CLASS_P (op0)
7662 || TREE_CODE (op0) == PAREN_EXPR)
7663 return fold_convert_loc (loc, type, op0);
7664 return NULL_TREE;
7665
7666 case NON_LVALUE_EXPR:
7667 if (!maybe_lvalue_p (op0))
7668 return fold_convert_loc (loc, type, op0);
7669 return NULL_TREE;
7670
7671 CASE_CONVERT:
7672 case FLOAT_EXPR:
7673 case FIX_TRUNC_EXPR:
7674 if (TREE_TYPE (op0) == type)
7675 return op0;
7676
7677 if (COMPARISON_CLASS_P (op0))
7678 {
7679 /* If we have (type) (a CMP b) and type is an integral type, return
7680 new expression involving the new type. Canonicalize
7681 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7682 non-integral type.
7683 Do not fold the result as that would not simplify further, also
7684 folding again results in recursions. */
7685 if (TREE_CODE (type) == BOOLEAN_TYPE)
7686 return build2_loc (loc, TREE_CODE (op0), type,
7687 TREE_OPERAND (op0, 0),
7688 TREE_OPERAND (op0, 1));
7689 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7690 && TREE_CODE (type) != VECTOR_TYPE)
7691 return build3_loc (loc, COND_EXPR, type, op0,
7692 constant_boolean_node (true, type),
7693 constant_boolean_node (false, type));
7694 }
7695
7696 /* Handle cases of two conversions in a row. */
7697 if (CONVERT_EXPR_P (op0))
7698 {
7699 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7700 tree inter_type = TREE_TYPE (op0);
7701 int inside_int = INTEGRAL_TYPE_P (inside_type);
7702 int inside_ptr = POINTER_TYPE_P (inside_type);
7703 int inside_float = FLOAT_TYPE_P (inside_type);
7704 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7705 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7706 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7707 int inter_int = INTEGRAL_TYPE_P (inter_type);
7708 int inter_ptr = POINTER_TYPE_P (inter_type);
7709 int inter_float = FLOAT_TYPE_P (inter_type);
7710 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7711 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7712 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7713 int final_int = INTEGRAL_TYPE_P (type);
7714 int final_ptr = POINTER_TYPE_P (type);
7715 int final_float = FLOAT_TYPE_P (type);
7716 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7717 unsigned int final_prec = TYPE_PRECISION (type);
7718 int final_unsignedp = TYPE_UNSIGNED (type);
7719
7720 /* In addition to the cases of two conversions in a row
7721 handled below, if we are converting something to its own
7722 type via an object of identical or wider precision, neither
7723 conversion is needed. */
7724 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7725 && (((inter_int || inter_ptr) && final_int)
7726 || (inter_float && final_float))
7727 && inter_prec >= final_prec)
7728 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7729
7730 /* Likewise, if the intermediate and initial types are either both
7731 float or both integer, we don't need the middle conversion if the
7732 former is wider than the latter and doesn't change the signedness
7733 (for integers). Avoid this if the final type is a pointer since
7734 then we sometimes need the middle conversion. Likewise if the
7735 final type has a precision not equal to the size of its mode. */
7736 if (((inter_int && inside_int)
7737 || (inter_float && inside_float)
7738 || (inter_vec && inside_vec))
7739 && inter_prec >= inside_prec
7740 && (inter_float || inter_vec
7741 || inter_unsignedp == inside_unsignedp)
7742 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7743 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7744 && ! final_ptr
7745 && (! final_vec || inter_prec == inside_prec))
7746 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7747
7748 /* If we have a sign-extension of a zero-extended value, we can
7749 replace that by a single zero-extension. Likewise if the
7750 final conversion does not change precision we can drop the
7751 intermediate conversion. */
7752 if (inside_int && inter_int && final_int
7753 && ((inside_prec < inter_prec && inter_prec < final_prec
7754 && inside_unsignedp && !inter_unsignedp)
7755 || final_prec == inter_prec))
7756 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7757
7758 /* Two conversions in a row are not needed unless:
7759 - some conversion is floating-point (overstrict for now), or
7760 - some conversion is a vector (overstrict for now), or
7761 - the intermediate type is narrower than both initial and
7762 final, or
7763 - the intermediate type and innermost type differ in signedness,
7764 and the outermost type is wider than the intermediate, or
7765 - the initial type is a pointer type and the precisions of the
7766 intermediate and final types differ, or
7767 - the final type is a pointer type and the precisions of the
7768 initial and intermediate types differ. */
7769 if (! inside_float && ! inter_float && ! final_float
7770 && ! inside_vec && ! inter_vec && ! final_vec
7771 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7772 && ! (inside_int && inter_int
7773 && inter_unsignedp != inside_unsignedp
7774 && inter_prec < final_prec)
7775 && ((inter_unsignedp && inter_prec > inside_prec)
7776 == (final_unsignedp && final_prec > inter_prec))
7777 && ! (inside_ptr && inter_prec != final_prec)
7778 && ! (final_ptr && inside_prec != inter_prec)
7779 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7780 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7781 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7782 }
7783
7784 /* Handle (T *)&A.B.C for A being of type T and B and C
7785 living at offset zero. This occurs frequently in
7786 C++ upcasting and then accessing the base. */
7787 if (TREE_CODE (op0) == ADDR_EXPR
7788 && POINTER_TYPE_P (type)
7789 && handled_component_p (TREE_OPERAND (op0, 0)))
7790 {
7791 HOST_WIDE_INT bitsize, bitpos;
7792 tree offset;
7793 enum machine_mode mode;
7794 int unsignedp, volatilep;
7795 tree base = TREE_OPERAND (op0, 0);
7796 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7797 &mode, &unsignedp, &volatilep, false);
7798 /* If the reference was to a (constant) zero offset, we can use
7799 the address of the base if it has the same base type
7800 as the result type and the pointer type is unqualified. */
7801 if (! offset && bitpos == 0
7802 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7803 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7804 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7805 return fold_convert_loc (loc, type,
7806 build_fold_addr_expr_loc (loc, base));
7807 }
7808
7809 if (TREE_CODE (op0) == MODIFY_EXPR
7810 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7811 /* Detect assigning a bitfield. */
7812 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7813 && DECL_BIT_FIELD
7814 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7815 {
7816 /* Don't leave an assignment inside a conversion
7817 unless assigning a bitfield. */
7818 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7819 /* First do the assignment, then return converted constant. */
7820 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7821 TREE_NO_WARNING (tem) = 1;
7822 TREE_USED (tem) = 1;
7823 return tem;
7824 }
7825
7826 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7827 constants (if x has signed type, the sign bit cannot be set
7828 in c). This folds extension into the BIT_AND_EXPR.
7829 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7830 very likely don't have maximal range for their precision and this
7831 transformation effectively doesn't preserve non-maximal ranges. */
7832 if (TREE_CODE (type) == INTEGER_TYPE
7833 && TREE_CODE (op0) == BIT_AND_EXPR
7834 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7835 {
7836 tree and_expr = op0;
7837 tree and0 = TREE_OPERAND (and_expr, 0);
7838 tree and1 = TREE_OPERAND (and_expr, 1);
7839 int change = 0;
7840
7841 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7842 || (TYPE_PRECISION (type)
7843 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7844 change = 1;
7845 else if (TYPE_PRECISION (TREE_TYPE (and1))
7846 <= HOST_BITS_PER_WIDE_INT
7847 && tree_fits_uhwi_p (and1))
7848 {
7849 unsigned HOST_WIDE_INT cst;
7850
7851 cst = tree_to_uhwi (and1);
7852 cst &= HOST_WIDE_INT_M1U
7853 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7854 change = (cst == 0);
7855 #ifdef LOAD_EXTEND_OP
7856 if (change
7857 && !flag_syntax_only
7858 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7859 == ZERO_EXTEND))
7860 {
7861 tree uns = unsigned_type_for (TREE_TYPE (and0));
7862 and0 = fold_convert_loc (loc, uns, and0);
7863 and1 = fold_convert_loc (loc, uns, and1);
7864 }
7865 #endif
7866 }
7867 if (change)
7868 {
7869 tem = force_fit_type (type, wi::to_widest (and1), 0,
7870 TREE_OVERFLOW (and1));
7871 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7872 fold_convert_loc (loc, type, and0), tem);
7873 }
7874 }
7875
7876 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7877 when one of the new casts will fold away. Conservatively we assume
7878 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7879 if (POINTER_TYPE_P (type)
7880 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7881 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7882 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7883 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7884 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7885 {
7886 tree arg00 = TREE_OPERAND (arg0, 0);
7887 tree arg01 = TREE_OPERAND (arg0, 1);
7888
7889 return fold_build_pointer_plus_loc
7890 (loc, fold_convert_loc (loc, type, arg00), arg01);
7891 }
7892
7893 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7894 of the same precision, and X is an integer type not narrower than
7895 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7896 if (INTEGRAL_TYPE_P (type)
7897 && TREE_CODE (op0) == BIT_NOT_EXPR
7898 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7899 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7900 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7901 {
7902 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7903 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7904 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7905 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7906 fold_convert_loc (loc, type, tem));
7907 }
7908
7909 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7910 type of X and Y (integer types only). */
7911 if (INTEGRAL_TYPE_P (type)
7912 && TREE_CODE (op0) == MULT_EXPR
7913 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7914 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7915 {
7916 /* Be careful not to introduce new overflows. */
7917 tree mult_type;
7918 if (TYPE_OVERFLOW_WRAPS (type))
7919 mult_type = type;
7920 else
7921 mult_type = unsigned_type_for (type);
7922
7923 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7924 {
7925 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7926 fold_convert_loc (loc, mult_type,
7927 TREE_OPERAND (op0, 0)),
7928 fold_convert_loc (loc, mult_type,
7929 TREE_OPERAND (op0, 1)));
7930 return fold_convert_loc (loc, type, tem);
7931 }
7932 }
7933
7934 tem = fold_convert_const (code, type, arg0);
7935 return tem ? tem : NULL_TREE;
7936
7937 case ADDR_SPACE_CONVERT_EXPR:
7938 if (integer_zerop (arg0))
7939 return fold_convert_const (code, type, arg0);
7940 return NULL_TREE;
7941
7942 case FIXED_CONVERT_EXPR:
7943 tem = fold_convert_const (code, type, arg0);
7944 return tem ? tem : NULL_TREE;
7945
7946 case VIEW_CONVERT_EXPR:
7947 if (TREE_TYPE (op0) == type)
7948 return op0;
7949 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7950 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7951 type, TREE_OPERAND (op0, 0));
7952 if (TREE_CODE (op0) == MEM_REF)
7953 return fold_build2_loc (loc, MEM_REF, type,
7954 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7955
7956 /* For integral conversions with the same precision or pointer
7957 conversions use a NOP_EXPR instead. */
7958 if ((INTEGRAL_TYPE_P (type)
7959 || POINTER_TYPE_P (type))
7960 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7961 || POINTER_TYPE_P (TREE_TYPE (op0)))
7962 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7963 return fold_convert_loc (loc, type, op0);
7964
7965 /* Strip inner integral conversions that do not change the precision. */
7966 if (CONVERT_EXPR_P (op0)
7967 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7968 || POINTER_TYPE_P (TREE_TYPE (op0)))
7969 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7970 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7971 && (TYPE_PRECISION (TREE_TYPE (op0))
7972 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7973 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7974 type, TREE_OPERAND (op0, 0));
7975
7976 return fold_view_convert_expr (type, op0);
7977
7978 case NEGATE_EXPR:
7979 tem = fold_negate_expr (loc, arg0);
7980 if (tem)
7981 return fold_convert_loc (loc, type, tem);
7982 return NULL_TREE;
7983
7984 case ABS_EXPR:
7985 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7986 return fold_abs_const (arg0, type);
7987 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7988 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7989 /* Convert fabs((double)float) into (double)fabsf(float). */
7990 else if (TREE_CODE (arg0) == NOP_EXPR
7991 && TREE_CODE (type) == REAL_TYPE)
7992 {
7993 tree targ0 = strip_float_extensions (arg0);
7994 if (targ0 != arg0)
7995 return fold_convert_loc (loc, type,
7996 fold_build1_loc (loc, ABS_EXPR,
7997 TREE_TYPE (targ0),
7998 targ0));
7999 }
8000 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8001 else if (TREE_CODE (arg0) == ABS_EXPR)
8002 return arg0;
8003 else if (tree_expr_nonnegative_p (arg0))
8004 return arg0;
8005
8006 /* Strip sign ops from argument. */
8007 if (TREE_CODE (type) == REAL_TYPE)
8008 {
8009 tem = fold_strip_sign_ops (arg0);
8010 if (tem)
8011 return fold_build1_loc (loc, ABS_EXPR, type,
8012 fold_convert_loc (loc, type, tem));
8013 }
8014 return NULL_TREE;
8015
8016 case CONJ_EXPR:
8017 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8018 return fold_convert_loc (loc, type, arg0);
8019 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8020 {
8021 tree itype = TREE_TYPE (type);
8022 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8023 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8024 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8025 negate_expr (ipart));
8026 }
8027 if (TREE_CODE (arg0) == COMPLEX_CST)
8028 {
8029 tree itype = TREE_TYPE (type);
8030 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8031 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8032 return build_complex (type, rpart, negate_expr (ipart));
8033 }
8034 if (TREE_CODE (arg0) == CONJ_EXPR)
8035 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8036 return NULL_TREE;
8037
8038 case BIT_NOT_EXPR:
8039 if (TREE_CODE (arg0) == INTEGER_CST)
8040 return fold_not_const (arg0, type);
8041 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8042 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8043 /* Convert ~ (-A) to A - 1. */
8044 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8045 return fold_build2_loc (loc, MINUS_EXPR, type,
8046 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8047 build_int_cst (type, 1));
8048 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8049 else if (INTEGRAL_TYPE_P (type)
8050 && ((TREE_CODE (arg0) == MINUS_EXPR
8051 && integer_onep (TREE_OPERAND (arg0, 1)))
8052 || (TREE_CODE (arg0) == PLUS_EXPR
8053 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8054 return fold_build1_loc (loc, NEGATE_EXPR, type,
8055 fold_convert_loc (loc, type,
8056 TREE_OPERAND (arg0, 0)));
8057 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8058 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8059 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8060 fold_convert_loc (loc, type,
8061 TREE_OPERAND (arg0, 0)))))
8062 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8063 fold_convert_loc (loc, type,
8064 TREE_OPERAND (arg0, 1)));
8065 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8066 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8067 fold_convert_loc (loc, type,
8068 TREE_OPERAND (arg0, 1)))))
8069 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8070 fold_convert_loc (loc, type,
8071 TREE_OPERAND (arg0, 0)), tem);
8072 /* Perform BIT_NOT_EXPR on each element individually. */
8073 else if (TREE_CODE (arg0) == VECTOR_CST)
8074 {
8075 tree *elements;
8076 tree elem;
8077 unsigned count = VECTOR_CST_NELTS (arg0), i;
8078
8079 elements = XALLOCAVEC (tree, count);
8080 for (i = 0; i < count; i++)
8081 {
8082 elem = VECTOR_CST_ELT (arg0, i);
8083 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8084 if (elem == NULL_TREE)
8085 break;
8086 elements[i] = elem;
8087 }
8088 if (i == count)
8089 return build_vector (type, elements);
8090 }
8091 else if (COMPARISON_CLASS_P (arg0)
8092 && (VECTOR_TYPE_P (type)
8093 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8094 {
8095 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8096 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8097 HONOR_NANS (TYPE_MODE (op_type)));
8098 if (subcode != ERROR_MARK)
8099 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8100 TREE_OPERAND (arg0, 1));
8101 }
8102
8103
8104 return NULL_TREE;
8105
8106 case TRUTH_NOT_EXPR:
8107 /* Note that the operand of this must be an int
8108 and its values must be 0 or 1.
8109 ("true" is a fixed value perhaps depending on the language,
8110 but we don't handle values other than 1 correctly yet.) */
8111 tem = fold_truth_not_expr (loc, arg0);
8112 if (!tem)
8113 return NULL_TREE;
8114 return fold_convert_loc (loc, type, tem);
8115
8116 case REALPART_EXPR:
8117 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8118 return fold_convert_loc (loc, type, arg0);
8119 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8120 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8121 TREE_OPERAND (arg0, 1));
8122 if (TREE_CODE (arg0) == COMPLEX_CST)
8123 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8124 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8125 {
8126 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8127 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8128 fold_build1_loc (loc, REALPART_EXPR, itype,
8129 TREE_OPERAND (arg0, 0)),
8130 fold_build1_loc (loc, REALPART_EXPR, itype,
8131 TREE_OPERAND (arg0, 1)));
8132 return fold_convert_loc (loc, type, tem);
8133 }
8134 if (TREE_CODE (arg0) == CONJ_EXPR)
8135 {
8136 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8137 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8138 TREE_OPERAND (arg0, 0));
8139 return fold_convert_loc (loc, type, tem);
8140 }
8141 if (TREE_CODE (arg0) == CALL_EXPR)
8142 {
8143 tree fn = get_callee_fndecl (arg0);
8144 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8145 switch (DECL_FUNCTION_CODE (fn))
8146 {
8147 CASE_FLT_FN (BUILT_IN_CEXPI):
8148 fn = mathfn_built_in (type, BUILT_IN_COS);
8149 if (fn)
8150 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8151 break;
8152
8153 default:
8154 break;
8155 }
8156 }
8157 return NULL_TREE;
8158
8159 case IMAGPART_EXPR:
8160 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8161 return build_zero_cst (type);
8162 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8163 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8164 TREE_OPERAND (arg0, 0));
8165 if (TREE_CODE (arg0) == COMPLEX_CST)
8166 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8167 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8168 {
8169 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8170 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8171 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8172 TREE_OPERAND (arg0, 0)),
8173 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8174 TREE_OPERAND (arg0, 1)));
8175 return fold_convert_loc (loc, type, tem);
8176 }
8177 if (TREE_CODE (arg0) == CONJ_EXPR)
8178 {
8179 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8180 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8181 return fold_convert_loc (loc, type, negate_expr (tem));
8182 }
8183 if (TREE_CODE (arg0) == CALL_EXPR)
8184 {
8185 tree fn = get_callee_fndecl (arg0);
8186 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8187 switch (DECL_FUNCTION_CODE (fn))
8188 {
8189 CASE_FLT_FN (BUILT_IN_CEXPI):
8190 fn = mathfn_built_in (type, BUILT_IN_SIN);
8191 if (fn)
8192 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8193 break;
8194
8195 default:
8196 break;
8197 }
8198 }
8199 return NULL_TREE;
8200
8201 case INDIRECT_REF:
8202 /* Fold *&X to X if X is an lvalue. */
8203 if (TREE_CODE (op0) == ADDR_EXPR)
8204 {
8205 tree op00 = TREE_OPERAND (op0, 0);
8206 if ((TREE_CODE (op00) == VAR_DECL
8207 || TREE_CODE (op00) == PARM_DECL
8208 || TREE_CODE (op00) == RESULT_DECL)
8209 && !TREE_READONLY (op00))
8210 return op00;
8211 }
8212 return NULL_TREE;
8213
8214 case VEC_UNPACK_LO_EXPR:
8215 case VEC_UNPACK_HI_EXPR:
8216 case VEC_UNPACK_FLOAT_LO_EXPR:
8217 case VEC_UNPACK_FLOAT_HI_EXPR:
8218 {
8219 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8220 tree *elts;
8221 enum tree_code subcode;
8222
8223 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8224 if (TREE_CODE (arg0) != VECTOR_CST)
8225 return NULL_TREE;
8226
8227 elts = XALLOCAVEC (tree, nelts * 2);
8228 if (!vec_cst_ctor_to_array (arg0, elts))
8229 return NULL_TREE;
8230
8231 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8232 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8233 elts += nelts;
8234
8235 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8236 subcode = NOP_EXPR;
8237 else
8238 subcode = FLOAT_EXPR;
8239
8240 for (i = 0; i < nelts; i++)
8241 {
8242 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8243 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8244 return NULL_TREE;
8245 }
8246
8247 return build_vector (type, elts);
8248 }
8249
8250 case REDUC_MIN_EXPR:
8251 case REDUC_MAX_EXPR:
8252 case REDUC_PLUS_EXPR:
8253 {
8254 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8255 tree *elts;
8256 enum tree_code subcode;
8257
8258 if (TREE_CODE (op0) != VECTOR_CST)
8259 return NULL_TREE;
8260
8261 elts = XALLOCAVEC (tree, nelts);
8262 if (!vec_cst_ctor_to_array (op0, elts))
8263 return NULL_TREE;
8264
8265 switch (code)
8266 {
8267 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8268 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8269 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8270 default: gcc_unreachable ();
8271 }
8272
8273 for (i = 1; i < nelts; i++)
8274 {
8275 elts[0] = const_binop (subcode, elts[0], elts[i]);
8276 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8277 return NULL_TREE;
8278 elts[i] = build_zero_cst (TREE_TYPE (type));
8279 }
8280
8281 return build_vector (type, elts);
8282 }
8283
8284 default:
8285 return NULL_TREE;
8286 } /* switch (code) */
8287 }
8288
8289
8290 /* If the operation was a conversion do _not_ mark a resulting constant
8291 with TREE_OVERFLOW if the original constant was not. These conversions
8292 have implementation defined behavior and retaining the TREE_OVERFLOW
8293 flag here would confuse later passes such as VRP. */
8294 tree
8295 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8296 tree type, tree op0)
8297 {
8298 tree res = fold_unary_loc (loc, code, type, op0);
8299 if (res
8300 && TREE_CODE (res) == INTEGER_CST
8301 && TREE_CODE (op0) == INTEGER_CST
8302 && CONVERT_EXPR_CODE_P (code))
8303 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8304
8305 return res;
8306 }
8307
8308 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8309 operands OP0 and OP1. LOC is the location of the resulting expression.
8310 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8311 Return the folded expression if folding is successful. Otherwise,
8312 return NULL_TREE. */
8313 static tree
8314 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8315 tree arg0, tree arg1, tree op0, tree op1)
8316 {
8317 tree tem;
8318
8319 /* We only do these simplifications if we are optimizing. */
8320 if (!optimize)
8321 return NULL_TREE;
8322
8323 /* Check for things like (A || B) && (A || C). We can convert this
8324 to A || (B && C). Note that either operator can be any of the four
8325 truth and/or operations and the transformation will still be
8326 valid. Also note that we only care about order for the
8327 ANDIF and ORIF operators. If B contains side effects, this
8328 might change the truth-value of A. */
8329 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8330 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8331 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8332 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8333 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8334 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8335 {
8336 tree a00 = TREE_OPERAND (arg0, 0);
8337 tree a01 = TREE_OPERAND (arg0, 1);
8338 tree a10 = TREE_OPERAND (arg1, 0);
8339 tree a11 = TREE_OPERAND (arg1, 1);
8340 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8341 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8342 && (code == TRUTH_AND_EXPR
8343 || code == TRUTH_OR_EXPR));
8344
8345 if (operand_equal_p (a00, a10, 0))
8346 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8347 fold_build2_loc (loc, code, type, a01, a11));
8348 else if (commutative && operand_equal_p (a00, a11, 0))
8349 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8350 fold_build2_loc (loc, code, type, a01, a10));
8351 else if (commutative && operand_equal_p (a01, a10, 0))
8352 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8353 fold_build2_loc (loc, code, type, a00, a11));
8354
8355 /* This case if tricky because we must either have commutative
8356 operators or else A10 must not have side-effects. */
8357
8358 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8359 && operand_equal_p (a01, a11, 0))
8360 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8361 fold_build2_loc (loc, code, type, a00, a10),
8362 a01);
8363 }
8364
8365 /* See if we can build a range comparison. */
8366 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8367 return tem;
8368
8369 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8370 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8371 {
8372 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8373 if (tem)
8374 return fold_build2_loc (loc, code, type, tem, arg1);
8375 }
8376
8377 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8378 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8379 {
8380 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8381 if (tem)
8382 return fold_build2_loc (loc, code, type, arg0, tem);
8383 }
8384
8385 /* Check for the possibility of merging component references. If our
8386 lhs is another similar operation, try to merge its rhs with our
8387 rhs. Then try to merge our lhs and rhs. */
8388 if (TREE_CODE (arg0) == code
8389 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8390 TREE_OPERAND (arg0, 1), arg1)))
8391 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8392
8393 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8394 return tem;
8395
8396 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8397 && (code == TRUTH_AND_EXPR
8398 || code == TRUTH_ANDIF_EXPR
8399 || code == TRUTH_OR_EXPR
8400 || code == TRUTH_ORIF_EXPR))
8401 {
8402 enum tree_code ncode, icode;
8403
8404 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8405 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8406 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8407
8408 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8409 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8410 We don't want to pack more than two leafs to a non-IF AND/OR
8411 expression.
8412 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8413 equal to IF-CODE, then we don't want to add right-hand operand.
8414 If the inner right-hand side of left-hand operand has
8415 side-effects, or isn't simple, then we can't add to it,
8416 as otherwise we might destroy if-sequence. */
8417 if (TREE_CODE (arg0) == icode
8418 && simple_operand_p_2 (arg1)
8419 /* Needed for sequence points to handle trappings, and
8420 side-effects. */
8421 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8422 {
8423 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8424 arg1);
8425 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8426 tem);
8427 }
8428 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8429 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8430 else if (TREE_CODE (arg1) == icode
8431 && simple_operand_p_2 (arg0)
8432 /* Needed for sequence points to handle trappings, and
8433 side-effects. */
8434 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8435 {
8436 tem = fold_build2_loc (loc, ncode, type,
8437 arg0, TREE_OPERAND (arg1, 0));
8438 return fold_build2_loc (loc, icode, type, tem,
8439 TREE_OPERAND (arg1, 1));
8440 }
8441 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8442 into (A OR B).
8443 For sequence point consistancy, we need to check for trapping,
8444 and side-effects. */
8445 else if (code == icode && simple_operand_p_2 (arg0)
8446 && simple_operand_p_2 (arg1))
8447 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8448 }
8449
8450 return NULL_TREE;
8451 }
8452
8453 /* Fold a binary expression of code CODE and type TYPE with operands
8454 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8455 Return the folded expression if folding is successful. Otherwise,
8456 return NULL_TREE. */
8457
8458 static tree
8459 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8460 {
8461 enum tree_code compl_code;
8462
8463 if (code == MIN_EXPR)
8464 compl_code = MAX_EXPR;
8465 else if (code == MAX_EXPR)
8466 compl_code = MIN_EXPR;
8467 else
8468 gcc_unreachable ();
8469
8470 /* MIN (MAX (a, b), b) == b. */
8471 if (TREE_CODE (op0) == compl_code
8472 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8473 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8474
8475 /* MIN (MAX (b, a), b) == b. */
8476 if (TREE_CODE (op0) == compl_code
8477 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8478 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8479 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8480
8481 /* MIN (a, MAX (a, b)) == a. */
8482 if (TREE_CODE (op1) == compl_code
8483 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8484 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8485 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8486
8487 /* MIN (a, MAX (b, a)) == a. */
8488 if (TREE_CODE (op1) == compl_code
8489 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8490 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8491 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8492
8493 return NULL_TREE;
8494 }
8495
8496 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8497 by changing CODE to reduce the magnitude of constants involved in
8498 ARG0 of the comparison.
8499 Returns a canonicalized comparison tree if a simplification was
8500 possible, otherwise returns NULL_TREE.
8501 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8502 valid if signed overflow is undefined. */
8503
8504 static tree
8505 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8506 tree arg0, tree arg1,
8507 bool *strict_overflow_p)
8508 {
8509 enum tree_code code0 = TREE_CODE (arg0);
8510 tree t, cst0 = NULL_TREE;
8511 int sgn0;
8512 bool swap = false;
8513
8514 /* Match A +- CST code arg1 and CST code arg1. We can change the
8515 first form only if overflow is undefined. */
8516 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8517 /* In principle pointers also have undefined overflow behavior,
8518 but that causes problems elsewhere. */
8519 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8520 && (code0 == MINUS_EXPR
8521 || code0 == PLUS_EXPR)
8522 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8523 || code0 == INTEGER_CST))
8524 return NULL_TREE;
8525
8526 /* Identify the constant in arg0 and its sign. */
8527 if (code0 == INTEGER_CST)
8528 cst0 = arg0;
8529 else
8530 cst0 = TREE_OPERAND (arg0, 1);
8531 sgn0 = tree_int_cst_sgn (cst0);
8532
8533 /* Overflowed constants and zero will cause problems. */
8534 if (integer_zerop (cst0)
8535 || TREE_OVERFLOW (cst0))
8536 return NULL_TREE;
8537
8538 /* See if we can reduce the magnitude of the constant in
8539 arg0 by changing the comparison code. */
8540 if (code0 == INTEGER_CST)
8541 {
8542 /* CST <= arg1 -> CST-1 < arg1. */
8543 if (code == LE_EXPR && sgn0 == 1)
8544 code = LT_EXPR;
8545 /* -CST < arg1 -> -CST-1 <= arg1. */
8546 else if (code == LT_EXPR && sgn0 == -1)
8547 code = LE_EXPR;
8548 /* CST > arg1 -> CST-1 >= arg1. */
8549 else if (code == GT_EXPR && sgn0 == 1)
8550 code = GE_EXPR;
8551 /* -CST >= arg1 -> -CST-1 > arg1. */
8552 else if (code == GE_EXPR && sgn0 == -1)
8553 code = GT_EXPR;
8554 else
8555 return NULL_TREE;
8556 /* arg1 code' CST' might be more canonical. */
8557 swap = true;
8558 }
8559 else
8560 {
8561 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8562 if (code == LT_EXPR
8563 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8564 code = LE_EXPR;
8565 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8566 else if (code == GT_EXPR
8567 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8568 code = GE_EXPR;
8569 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8570 else if (code == LE_EXPR
8571 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8572 code = LT_EXPR;
8573 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8574 else if (code == GE_EXPR
8575 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8576 code = GT_EXPR;
8577 else
8578 return NULL_TREE;
8579 *strict_overflow_p = true;
8580 }
8581
8582 /* Now build the constant reduced in magnitude. But not if that
8583 would produce one outside of its types range. */
8584 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8585 && ((sgn0 == 1
8586 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8587 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8588 || (sgn0 == -1
8589 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8590 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8591 /* We cannot swap the comparison here as that would cause us to
8592 endlessly recurse. */
8593 return NULL_TREE;
8594
8595 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8596 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8597 if (code0 != INTEGER_CST)
8598 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8599 t = fold_convert (TREE_TYPE (arg1), t);
8600
8601 /* If swapping might yield to a more canonical form, do so. */
8602 if (swap)
8603 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8604 else
8605 return fold_build2_loc (loc, code, type, t, arg1);
8606 }
8607
8608 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8609 overflow further. Try to decrease the magnitude of constants involved
8610 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8611 and put sole constants at the second argument position.
8612 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8613
8614 static tree
8615 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8616 tree arg0, tree arg1)
8617 {
8618 tree t;
8619 bool strict_overflow_p;
8620 const char * const warnmsg = G_("assuming signed overflow does not occur "
8621 "when reducing constant in comparison");
8622
8623 /* Try canonicalization by simplifying arg0. */
8624 strict_overflow_p = false;
8625 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8626 &strict_overflow_p);
8627 if (t)
8628 {
8629 if (strict_overflow_p)
8630 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8631 return t;
8632 }
8633
8634 /* Try canonicalization by simplifying arg1 using the swapped
8635 comparison. */
8636 code = swap_tree_comparison (code);
8637 strict_overflow_p = false;
8638 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8639 &strict_overflow_p);
8640 if (t && strict_overflow_p)
8641 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8642 return t;
8643 }
8644
8645 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8646 space. This is used to avoid issuing overflow warnings for
8647 expressions like &p->x which can not wrap. */
8648
8649 static bool
8650 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8651 {
8652 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8653 return true;
8654
8655 if (bitpos < 0)
8656 return true;
8657
8658 wide_int wi_offset;
8659 int precision = TYPE_PRECISION (TREE_TYPE (base));
8660 if (offset == NULL_TREE)
8661 wi_offset = wi::zero (precision);
8662 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8663 return true;
8664 else
8665 wi_offset = offset;
8666
8667 bool overflow;
8668 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8669 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8670 if (overflow)
8671 return true;
8672
8673 if (!wi::fits_uhwi_p (total))
8674 return true;
8675
8676 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8677 if (size <= 0)
8678 return true;
8679
8680 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8681 array. */
8682 if (TREE_CODE (base) == ADDR_EXPR)
8683 {
8684 HOST_WIDE_INT base_size;
8685
8686 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8687 if (base_size > 0 && size < base_size)
8688 size = base_size;
8689 }
8690
8691 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8692 }
8693
8694 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8695 kind INTEGER_CST. This makes sure to properly sign-extend the
8696 constant. */
8697
8698 static HOST_WIDE_INT
8699 size_low_cst (const_tree t)
8700 {
8701 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8702 int prec = TYPE_PRECISION (TREE_TYPE (t));
8703 if (prec < HOST_BITS_PER_WIDE_INT)
8704 return sext_hwi (w, prec);
8705 return w;
8706 }
8707
8708 /* Subroutine of fold_binary. This routine performs all of the
8709 transformations that are common to the equality/inequality
8710 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8711 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8712 fold_binary should call fold_binary. Fold a comparison with
8713 tree code CODE and type TYPE with operands OP0 and OP1. Return
8714 the folded comparison or NULL_TREE. */
8715
8716 static tree
8717 fold_comparison (location_t loc, enum tree_code code, tree type,
8718 tree op0, tree op1)
8719 {
8720 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8721 tree arg0, arg1, tem;
8722
8723 arg0 = op0;
8724 arg1 = op1;
8725
8726 STRIP_SIGN_NOPS (arg0);
8727 STRIP_SIGN_NOPS (arg1);
8728
8729 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8730 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8731 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8732 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8733 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8734 && TREE_CODE (arg1) == INTEGER_CST
8735 && !TREE_OVERFLOW (arg1))
8736 {
8737 const enum tree_code
8738 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8739 tree const1 = TREE_OPERAND (arg0, 1);
8740 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8741 tree variable = TREE_OPERAND (arg0, 0);
8742 tree new_const = int_const_binop (reverse_op, const2, const1);
8743
8744 /* If the constant operation overflowed this can be
8745 simplified as a comparison against INT_MAX/INT_MIN. */
8746 if (TREE_OVERFLOW (new_const))
8747 {
8748 int const1_sgn = tree_int_cst_sgn (const1);
8749 enum tree_code code2 = code;
8750
8751 /* Get the sign of the constant on the lhs if the
8752 operation were VARIABLE + CONST1. */
8753 if (TREE_CODE (arg0) == MINUS_EXPR)
8754 const1_sgn = -const1_sgn;
8755
8756 /* The sign of the constant determines if we overflowed
8757 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8758 Canonicalize to the INT_MIN overflow by swapping the comparison
8759 if necessary. */
8760 if (const1_sgn == -1)
8761 code2 = swap_tree_comparison (code);
8762
8763 /* We now can look at the canonicalized case
8764 VARIABLE + 1 CODE2 INT_MIN
8765 and decide on the result. */
8766 switch (code2)
8767 {
8768 case EQ_EXPR:
8769 case LT_EXPR:
8770 case LE_EXPR:
8771 return
8772 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8773
8774 case NE_EXPR:
8775 case GE_EXPR:
8776 case GT_EXPR:
8777 return
8778 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8779
8780 default:
8781 gcc_unreachable ();
8782 }
8783 }
8784 else
8785 {
8786 if (!equality_code)
8787 fold_overflow_warning ("assuming signed overflow does not occur "
8788 "when changing X +- C1 cmp C2 to "
8789 "X cmp C2 -+ C1",
8790 WARN_STRICT_OVERFLOW_COMPARISON);
8791 return fold_build2_loc (loc, code, type, variable, new_const);
8792 }
8793 }
8794
8795 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8796 if (TREE_CODE (arg0) == MINUS_EXPR
8797 && equality_code
8798 && integer_zerop (arg1))
8799 {
8800 /* ??? The transformation is valid for the other operators if overflow
8801 is undefined for the type, but performing it here badly interacts
8802 with the transformation in fold_cond_expr_with_comparison which
8803 attempts to synthetize ABS_EXPR. */
8804 if (!equality_code)
8805 fold_overflow_warning ("assuming signed overflow does not occur "
8806 "when changing X - Y cmp 0 to X cmp Y",
8807 WARN_STRICT_OVERFLOW_COMPARISON);
8808 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8809 TREE_OPERAND (arg0, 1));
8810 }
8811
8812 /* For comparisons of pointers we can decompose it to a compile time
8813 comparison of the base objects and the offsets into the object.
8814 This requires at least one operand being an ADDR_EXPR or a
8815 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8816 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8817 && (TREE_CODE (arg0) == ADDR_EXPR
8818 || TREE_CODE (arg1) == ADDR_EXPR
8819 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8820 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8821 {
8822 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8823 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8824 enum machine_mode mode;
8825 int volatilep, unsignedp;
8826 bool indirect_base0 = false, indirect_base1 = false;
8827
8828 /* Get base and offset for the access. Strip ADDR_EXPR for
8829 get_inner_reference, but put it back by stripping INDIRECT_REF
8830 off the base object if possible. indirect_baseN will be true
8831 if baseN is not an address but refers to the object itself. */
8832 base0 = arg0;
8833 if (TREE_CODE (arg0) == ADDR_EXPR)
8834 {
8835 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8836 &bitsize, &bitpos0, &offset0, &mode,
8837 &unsignedp, &volatilep, false);
8838 if (TREE_CODE (base0) == INDIRECT_REF)
8839 base0 = TREE_OPERAND (base0, 0);
8840 else
8841 indirect_base0 = true;
8842 }
8843 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8844 {
8845 base0 = TREE_OPERAND (arg0, 0);
8846 STRIP_SIGN_NOPS (base0);
8847 if (TREE_CODE (base0) == ADDR_EXPR)
8848 {
8849 base0 = TREE_OPERAND (base0, 0);
8850 indirect_base0 = true;
8851 }
8852 offset0 = TREE_OPERAND (arg0, 1);
8853 if (tree_fits_shwi_p (offset0))
8854 {
8855 HOST_WIDE_INT off = size_low_cst (offset0);
8856 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8857 * BITS_PER_UNIT)
8858 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8859 {
8860 bitpos0 = off * BITS_PER_UNIT;
8861 offset0 = NULL_TREE;
8862 }
8863 }
8864 }
8865
8866 base1 = arg1;
8867 if (TREE_CODE (arg1) == ADDR_EXPR)
8868 {
8869 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8870 &bitsize, &bitpos1, &offset1, &mode,
8871 &unsignedp, &volatilep, false);
8872 if (TREE_CODE (base1) == INDIRECT_REF)
8873 base1 = TREE_OPERAND (base1, 0);
8874 else
8875 indirect_base1 = true;
8876 }
8877 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8878 {
8879 base1 = TREE_OPERAND (arg1, 0);
8880 STRIP_SIGN_NOPS (base1);
8881 if (TREE_CODE (base1) == ADDR_EXPR)
8882 {
8883 base1 = TREE_OPERAND (base1, 0);
8884 indirect_base1 = true;
8885 }
8886 offset1 = TREE_OPERAND (arg1, 1);
8887 if (tree_fits_shwi_p (offset1))
8888 {
8889 HOST_WIDE_INT off = size_low_cst (offset1);
8890 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8891 * BITS_PER_UNIT)
8892 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8893 {
8894 bitpos1 = off * BITS_PER_UNIT;
8895 offset1 = NULL_TREE;
8896 }
8897 }
8898 }
8899
8900 /* A local variable can never be pointed to by
8901 the default SSA name of an incoming parameter. */
8902 if ((TREE_CODE (arg0) == ADDR_EXPR
8903 && indirect_base0
8904 && TREE_CODE (base0) == VAR_DECL
8905 && auto_var_in_fn_p (base0, current_function_decl)
8906 && !indirect_base1
8907 && TREE_CODE (base1) == SSA_NAME
8908 && SSA_NAME_IS_DEFAULT_DEF (base1)
8909 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8910 || (TREE_CODE (arg1) == ADDR_EXPR
8911 && indirect_base1
8912 && TREE_CODE (base1) == VAR_DECL
8913 && auto_var_in_fn_p (base1, current_function_decl)
8914 && !indirect_base0
8915 && TREE_CODE (base0) == SSA_NAME
8916 && SSA_NAME_IS_DEFAULT_DEF (base0)
8917 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8918 {
8919 if (code == NE_EXPR)
8920 return constant_boolean_node (1, type);
8921 else if (code == EQ_EXPR)
8922 return constant_boolean_node (0, type);
8923 }
8924 /* If we have equivalent bases we might be able to simplify. */
8925 else if (indirect_base0 == indirect_base1
8926 && operand_equal_p (base0, base1, 0))
8927 {
8928 /* We can fold this expression to a constant if the non-constant
8929 offset parts are equal. */
8930 if ((offset0 == offset1
8931 || (offset0 && offset1
8932 && operand_equal_p (offset0, offset1, 0)))
8933 && (code == EQ_EXPR
8934 || code == NE_EXPR
8935 || (indirect_base0 && DECL_P (base0))
8936 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8937
8938 {
8939 if (!equality_code
8940 && bitpos0 != bitpos1
8941 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8942 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8943 fold_overflow_warning (("assuming pointer wraparound does not "
8944 "occur when comparing P +- C1 with "
8945 "P +- C2"),
8946 WARN_STRICT_OVERFLOW_CONDITIONAL);
8947
8948 switch (code)
8949 {
8950 case EQ_EXPR:
8951 return constant_boolean_node (bitpos0 == bitpos1, type);
8952 case NE_EXPR:
8953 return constant_boolean_node (bitpos0 != bitpos1, type);
8954 case LT_EXPR:
8955 return constant_boolean_node (bitpos0 < bitpos1, type);
8956 case LE_EXPR:
8957 return constant_boolean_node (bitpos0 <= bitpos1, type);
8958 case GE_EXPR:
8959 return constant_boolean_node (bitpos0 >= bitpos1, type);
8960 case GT_EXPR:
8961 return constant_boolean_node (bitpos0 > bitpos1, type);
8962 default:;
8963 }
8964 }
8965 /* We can simplify the comparison to a comparison of the variable
8966 offset parts if the constant offset parts are equal.
8967 Be careful to use signed sizetype here because otherwise we
8968 mess with array offsets in the wrong way. This is possible
8969 because pointer arithmetic is restricted to retain within an
8970 object and overflow on pointer differences is undefined as of
8971 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8972 else if (bitpos0 == bitpos1
8973 && (equality_code
8974 || (indirect_base0 && DECL_P (base0))
8975 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8976 {
8977 /* By converting to signed sizetype we cover middle-end pointer
8978 arithmetic which operates on unsigned pointer types of size
8979 type size and ARRAY_REF offsets which are properly sign or
8980 zero extended from their type in case it is narrower than
8981 sizetype. */
8982 if (offset0 == NULL_TREE)
8983 offset0 = build_int_cst (ssizetype, 0);
8984 else
8985 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8986 if (offset1 == NULL_TREE)
8987 offset1 = build_int_cst (ssizetype, 0);
8988 else
8989 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8990
8991 if (!equality_code
8992 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8993 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8994 fold_overflow_warning (("assuming pointer wraparound does not "
8995 "occur when comparing P +- C1 with "
8996 "P +- C2"),
8997 WARN_STRICT_OVERFLOW_COMPARISON);
8998
8999 return fold_build2_loc (loc, code, type, offset0, offset1);
9000 }
9001 }
9002 /* For non-equal bases we can simplify if they are addresses
9003 of local binding decls or constants. */
9004 else if (indirect_base0 && indirect_base1
9005 /* We know that !operand_equal_p (base0, base1, 0)
9006 because the if condition was false. But make
9007 sure two decls are not the same. */
9008 && base0 != base1
9009 && TREE_CODE (arg0) == ADDR_EXPR
9010 && TREE_CODE (arg1) == ADDR_EXPR
9011 && (((TREE_CODE (base0) == VAR_DECL
9012 || TREE_CODE (base0) == PARM_DECL)
9013 && (targetm.binds_local_p (base0)
9014 || CONSTANT_CLASS_P (base1)))
9015 || CONSTANT_CLASS_P (base0))
9016 && (((TREE_CODE (base1) == VAR_DECL
9017 || TREE_CODE (base1) == PARM_DECL)
9018 && (targetm.binds_local_p (base1)
9019 || CONSTANT_CLASS_P (base0)))
9020 || CONSTANT_CLASS_P (base1)))
9021 {
9022 if (code == EQ_EXPR)
9023 return omit_two_operands_loc (loc, type, boolean_false_node,
9024 arg0, arg1);
9025 else if (code == NE_EXPR)
9026 return omit_two_operands_loc (loc, type, boolean_true_node,
9027 arg0, arg1);
9028 }
9029 /* For equal offsets we can simplify to a comparison of the
9030 base addresses. */
9031 else if (bitpos0 == bitpos1
9032 && (indirect_base0
9033 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9034 && (indirect_base1
9035 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9036 && ((offset0 == offset1)
9037 || (offset0 && offset1
9038 && operand_equal_p (offset0, offset1, 0))))
9039 {
9040 if (indirect_base0)
9041 base0 = build_fold_addr_expr_loc (loc, base0);
9042 if (indirect_base1)
9043 base1 = build_fold_addr_expr_loc (loc, base1);
9044 return fold_build2_loc (loc, code, type, base0, base1);
9045 }
9046 }
9047
9048 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9049 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9050 the resulting offset is smaller in absolute value than the
9051 original one and has the same sign. */
9052 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9053 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9054 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9055 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9056 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9057 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9058 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9059 {
9060 tree const1 = TREE_OPERAND (arg0, 1);
9061 tree const2 = TREE_OPERAND (arg1, 1);
9062 tree variable1 = TREE_OPERAND (arg0, 0);
9063 tree variable2 = TREE_OPERAND (arg1, 0);
9064 tree cst;
9065 const char * const warnmsg = G_("assuming signed overflow does not "
9066 "occur when combining constants around "
9067 "a comparison");
9068
9069 /* Put the constant on the side where it doesn't overflow and is
9070 of lower absolute value and of same sign than before. */
9071 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9072 ? MINUS_EXPR : PLUS_EXPR,
9073 const2, const1);
9074 if (!TREE_OVERFLOW (cst)
9075 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9076 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9077 {
9078 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9079 return fold_build2_loc (loc, code, type,
9080 variable1,
9081 fold_build2_loc (loc, TREE_CODE (arg1),
9082 TREE_TYPE (arg1),
9083 variable2, cst));
9084 }
9085
9086 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9087 ? MINUS_EXPR : PLUS_EXPR,
9088 const1, const2);
9089 if (!TREE_OVERFLOW (cst)
9090 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9091 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9092 {
9093 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9094 return fold_build2_loc (loc, code, type,
9095 fold_build2_loc (loc, TREE_CODE (arg0),
9096 TREE_TYPE (arg0),
9097 variable1, cst),
9098 variable2);
9099 }
9100 }
9101
9102 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9103 signed arithmetic case. That form is created by the compiler
9104 often enough for folding it to be of value. One example is in
9105 computing loop trip counts after Operator Strength Reduction. */
9106 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9107 && TREE_CODE (arg0) == MULT_EXPR
9108 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9109 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9110 && integer_zerop (arg1))
9111 {
9112 tree const1 = TREE_OPERAND (arg0, 1);
9113 tree const2 = arg1; /* zero */
9114 tree variable1 = TREE_OPERAND (arg0, 0);
9115 enum tree_code cmp_code = code;
9116
9117 /* Handle unfolded multiplication by zero. */
9118 if (integer_zerop (const1))
9119 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9120
9121 fold_overflow_warning (("assuming signed overflow does not occur when "
9122 "eliminating multiplication in comparison "
9123 "with zero"),
9124 WARN_STRICT_OVERFLOW_COMPARISON);
9125
9126 /* If const1 is negative we swap the sense of the comparison. */
9127 if (tree_int_cst_sgn (const1) < 0)
9128 cmp_code = swap_tree_comparison (cmp_code);
9129
9130 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9131 }
9132
9133 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9134 if (tem)
9135 return tem;
9136
9137 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9138 {
9139 tree targ0 = strip_float_extensions (arg0);
9140 tree targ1 = strip_float_extensions (arg1);
9141 tree newtype = TREE_TYPE (targ0);
9142
9143 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9144 newtype = TREE_TYPE (targ1);
9145
9146 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9147 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9148 return fold_build2_loc (loc, code, type,
9149 fold_convert_loc (loc, newtype, targ0),
9150 fold_convert_loc (loc, newtype, targ1));
9151
9152 /* (-a) CMP (-b) -> b CMP a */
9153 if (TREE_CODE (arg0) == NEGATE_EXPR
9154 && TREE_CODE (arg1) == NEGATE_EXPR)
9155 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9156 TREE_OPERAND (arg0, 0));
9157
9158 if (TREE_CODE (arg1) == REAL_CST)
9159 {
9160 REAL_VALUE_TYPE cst;
9161 cst = TREE_REAL_CST (arg1);
9162
9163 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9164 if (TREE_CODE (arg0) == NEGATE_EXPR)
9165 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9166 TREE_OPERAND (arg0, 0),
9167 build_real (TREE_TYPE (arg1),
9168 real_value_negate (&cst)));
9169
9170 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9171 /* a CMP (-0) -> a CMP 0 */
9172 if (REAL_VALUE_MINUS_ZERO (cst))
9173 return fold_build2_loc (loc, code, type, arg0,
9174 build_real (TREE_TYPE (arg1), dconst0));
9175
9176 /* x != NaN is always true, other ops are always false. */
9177 if (REAL_VALUE_ISNAN (cst)
9178 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9179 {
9180 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9181 return omit_one_operand_loc (loc, type, tem, arg0);
9182 }
9183
9184 /* Fold comparisons against infinity. */
9185 if (REAL_VALUE_ISINF (cst)
9186 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9187 {
9188 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9189 if (tem != NULL_TREE)
9190 return tem;
9191 }
9192 }
9193
9194 /* If this is a comparison of a real constant with a PLUS_EXPR
9195 or a MINUS_EXPR of a real constant, we can convert it into a
9196 comparison with a revised real constant as long as no overflow
9197 occurs when unsafe_math_optimizations are enabled. */
9198 if (flag_unsafe_math_optimizations
9199 && TREE_CODE (arg1) == REAL_CST
9200 && (TREE_CODE (arg0) == PLUS_EXPR
9201 || TREE_CODE (arg0) == MINUS_EXPR)
9202 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9203 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9204 ? MINUS_EXPR : PLUS_EXPR,
9205 arg1, TREE_OPERAND (arg0, 1)))
9206 && !TREE_OVERFLOW (tem))
9207 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9208
9209 /* Likewise, we can simplify a comparison of a real constant with
9210 a MINUS_EXPR whose first operand is also a real constant, i.e.
9211 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9212 floating-point types only if -fassociative-math is set. */
9213 if (flag_associative_math
9214 && TREE_CODE (arg1) == REAL_CST
9215 && TREE_CODE (arg0) == MINUS_EXPR
9216 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9217 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9218 arg1))
9219 && !TREE_OVERFLOW (tem))
9220 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9221 TREE_OPERAND (arg0, 1), tem);
9222
9223 /* Fold comparisons against built-in math functions. */
9224 if (TREE_CODE (arg1) == REAL_CST
9225 && flag_unsafe_math_optimizations
9226 && ! flag_errno_math)
9227 {
9228 enum built_in_function fcode = builtin_mathfn_code (arg0);
9229
9230 if (fcode != END_BUILTINS)
9231 {
9232 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9233 if (tem != NULL_TREE)
9234 return tem;
9235 }
9236 }
9237 }
9238
9239 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9240 && CONVERT_EXPR_P (arg0))
9241 {
9242 /* If we are widening one operand of an integer comparison,
9243 see if the other operand is similarly being widened. Perhaps we
9244 can do the comparison in the narrower type. */
9245 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9246 if (tem)
9247 return tem;
9248
9249 /* Or if we are changing signedness. */
9250 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9251 if (tem)
9252 return tem;
9253 }
9254
9255 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9256 constant, we can simplify it. */
9257 if (TREE_CODE (arg1) == INTEGER_CST
9258 && (TREE_CODE (arg0) == MIN_EXPR
9259 || TREE_CODE (arg0) == MAX_EXPR)
9260 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9261 {
9262 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9263 if (tem)
9264 return tem;
9265 }
9266
9267 /* Simplify comparison of something with itself. (For IEEE
9268 floating-point, we can only do some of these simplifications.) */
9269 if (operand_equal_p (arg0, arg1, 0))
9270 {
9271 switch (code)
9272 {
9273 case EQ_EXPR:
9274 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9275 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9276 return constant_boolean_node (1, type);
9277 break;
9278
9279 case GE_EXPR:
9280 case LE_EXPR:
9281 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9282 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9283 return constant_boolean_node (1, type);
9284 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9285
9286 case NE_EXPR:
9287 /* For NE, we can only do this simplification if integer
9288 or we don't honor IEEE floating point NaNs. */
9289 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9290 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9291 break;
9292 /* ... fall through ... */
9293 case GT_EXPR:
9294 case LT_EXPR:
9295 return constant_boolean_node (0, type);
9296 default:
9297 gcc_unreachable ();
9298 }
9299 }
9300
9301 /* If we are comparing an expression that just has comparisons
9302 of two integer values, arithmetic expressions of those comparisons,
9303 and constants, we can simplify it. There are only three cases
9304 to check: the two values can either be equal, the first can be
9305 greater, or the second can be greater. Fold the expression for
9306 those three values. Since each value must be 0 or 1, we have
9307 eight possibilities, each of which corresponds to the constant 0
9308 or 1 or one of the six possible comparisons.
9309
9310 This handles common cases like (a > b) == 0 but also handles
9311 expressions like ((x > y) - (y > x)) > 0, which supposedly
9312 occur in macroized code. */
9313
9314 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9315 {
9316 tree cval1 = 0, cval2 = 0;
9317 int save_p = 0;
9318
9319 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9320 /* Don't handle degenerate cases here; they should already
9321 have been handled anyway. */
9322 && cval1 != 0 && cval2 != 0
9323 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9324 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9325 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9326 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9327 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9328 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9329 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9330 {
9331 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9332 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9333
9334 /* We can't just pass T to eval_subst in case cval1 or cval2
9335 was the same as ARG1. */
9336
9337 tree high_result
9338 = fold_build2_loc (loc, code, type,
9339 eval_subst (loc, arg0, cval1, maxval,
9340 cval2, minval),
9341 arg1);
9342 tree equal_result
9343 = fold_build2_loc (loc, code, type,
9344 eval_subst (loc, arg0, cval1, maxval,
9345 cval2, maxval),
9346 arg1);
9347 tree low_result
9348 = fold_build2_loc (loc, code, type,
9349 eval_subst (loc, arg0, cval1, minval,
9350 cval2, maxval),
9351 arg1);
9352
9353 /* All three of these results should be 0 or 1. Confirm they are.
9354 Then use those values to select the proper code to use. */
9355
9356 if (TREE_CODE (high_result) == INTEGER_CST
9357 && TREE_CODE (equal_result) == INTEGER_CST
9358 && TREE_CODE (low_result) == INTEGER_CST)
9359 {
9360 /* Make a 3-bit mask with the high-order bit being the
9361 value for `>', the next for '=', and the low for '<'. */
9362 switch ((integer_onep (high_result) * 4)
9363 + (integer_onep (equal_result) * 2)
9364 + integer_onep (low_result))
9365 {
9366 case 0:
9367 /* Always false. */
9368 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9369 case 1:
9370 code = LT_EXPR;
9371 break;
9372 case 2:
9373 code = EQ_EXPR;
9374 break;
9375 case 3:
9376 code = LE_EXPR;
9377 break;
9378 case 4:
9379 code = GT_EXPR;
9380 break;
9381 case 5:
9382 code = NE_EXPR;
9383 break;
9384 case 6:
9385 code = GE_EXPR;
9386 break;
9387 case 7:
9388 /* Always true. */
9389 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9390 }
9391
9392 if (save_p)
9393 {
9394 tem = save_expr (build2 (code, type, cval1, cval2));
9395 SET_EXPR_LOCATION (tem, loc);
9396 return tem;
9397 }
9398 return fold_build2_loc (loc, code, type, cval1, cval2);
9399 }
9400 }
9401 }
9402
9403 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9404 into a single range test. */
9405 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9406 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9407 && TREE_CODE (arg1) == INTEGER_CST
9408 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9409 && !integer_zerop (TREE_OPERAND (arg0, 1))
9410 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9411 && !TREE_OVERFLOW (arg1))
9412 {
9413 tem = fold_div_compare (loc, code, type, arg0, arg1);
9414 if (tem != NULL_TREE)
9415 return tem;
9416 }
9417
9418 /* Fold ~X op ~Y as Y op X. */
9419 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9420 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9421 {
9422 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9423 return fold_build2_loc (loc, code, type,
9424 fold_convert_loc (loc, cmp_type,
9425 TREE_OPERAND (arg1, 0)),
9426 TREE_OPERAND (arg0, 0));
9427 }
9428
9429 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9430 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9431 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9432 {
9433 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9434 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9435 TREE_OPERAND (arg0, 0),
9436 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9437 fold_convert_loc (loc, cmp_type, arg1)));
9438 }
9439
9440 return NULL_TREE;
9441 }
9442
9443
9444 /* Subroutine of fold_binary. Optimize complex multiplications of the
9445 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9446 argument EXPR represents the expression "z" of type TYPE. */
9447
9448 static tree
9449 fold_mult_zconjz (location_t loc, tree type, tree expr)
9450 {
9451 tree itype = TREE_TYPE (type);
9452 tree rpart, ipart, tem;
9453
9454 if (TREE_CODE (expr) == COMPLEX_EXPR)
9455 {
9456 rpart = TREE_OPERAND (expr, 0);
9457 ipart = TREE_OPERAND (expr, 1);
9458 }
9459 else if (TREE_CODE (expr) == COMPLEX_CST)
9460 {
9461 rpart = TREE_REALPART (expr);
9462 ipart = TREE_IMAGPART (expr);
9463 }
9464 else
9465 {
9466 expr = save_expr (expr);
9467 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9468 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9469 }
9470
9471 rpart = save_expr (rpart);
9472 ipart = save_expr (ipart);
9473 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9474 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9475 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9476 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9477 build_zero_cst (itype));
9478 }
9479
9480
9481 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9482 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9483 guarantees that P and N have the same least significant log2(M) bits.
9484 N is not otherwise constrained. In particular, N is not normalized to
9485 0 <= N < M as is common. In general, the precise value of P is unknown.
9486 M is chosen as large as possible such that constant N can be determined.
9487
9488 Returns M and sets *RESIDUE to N.
9489
9490 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9491 account. This is not always possible due to PR 35705.
9492 */
9493
9494 static unsigned HOST_WIDE_INT
9495 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9496 bool allow_func_align)
9497 {
9498 enum tree_code code;
9499
9500 *residue = 0;
9501
9502 code = TREE_CODE (expr);
9503 if (code == ADDR_EXPR)
9504 {
9505 unsigned int bitalign;
9506 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9507 *residue /= BITS_PER_UNIT;
9508 return bitalign / BITS_PER_UNIT;
9509 }
9510 else if (code == POINTER_PLUS_EXPR)
9511 {
9512 tree op0, op1;
9513 unsigned HOST_WIDE_INT modulus;
9514 enum tree_code inner_code;
9515
9516 op0 = TREE_OPERAND (expr, 0);
9517 STRIP_NOPS (op0);
9518 modulus = get_pointer_modulus_and_residue (op0, residue,
9519 allow_func_align);
9520
9521 op1 = TREE_OPERAND (expr, 1);
9522 STRIP_NOPS (op1);
9523 inner_code = TREE_CODE (op1);
9524 if (inner_code == INTEGER_CST)
9525 {
9526 *residue += TREE_INT_CST_LOW (op1);
9527 return modulus;
9528 }
9529 else if (inner_code == MULT_EXPR)
9530 {
9531 op1 = TREE_OPERAND (op1, 1);
9532 if (TREE_CODE (op1) == INTEGER_CST)
9533 {
9534 unsigned HOST_WIDE_INT align;
9535
9536 /* Compute the greatest power-of-2 divisor of op1. */
9537 align = TREE_INT_CST_LOW (op1);
9538 align &= -align;
9539
9540 /* If align is non-zero and less than *modulus, replace
9541 *modulus with align., If align is 0, then either op1 is 0
9542 or the greatest power-of-2 divisor of op1 doesn't fit in an
9543 unsigned HOST_WIDE_INT. In either case, no additional
9544 constraint is imposed. */
9545 if (align)
9546 modulus = MIN (modulus, align);
9547
9548 return modulus;
9549 }
9550 }
9551 }
9552
9553 /* If we get here, we were unable to determine anything useful about the
9554 expression. */
9555 return 1;
9556 }
9557
9558 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9559 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9560
9561 static bool
9562 vec_cst_ctor_to_array (tree arg, tree *elts)
9563 {
9564 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9565
9566 if (TREE_CODE (arg) == VECTOR_CST)
9567 {
9568 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9569 elts[i] = VECTOR_CST_ELT (arg, i);
9570 }
9571 else if (TREE_CODE (arg) == CONSTRUCTOR)
9572 {
9573 constructor_elt *elt;
9574
9575 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9576 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9577 return false;
9578 else
9579 elts[i] = elt->value;
9580 }
9581 else
9582 return false;
9583 for (; i < nelts; i++)
9584 elts[i]
9585 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9586 return true;
9587 }
9588
9589 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9590 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9591 NULL_TREE otherwise. */
9592
9593 static tree
9594 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9595 {
9596 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9597 tree *elts;
9598 bool need_ctor = false;
9599
9600 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9601 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9602 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9603 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9604 return NULL_TREE;
9605
9606 elts = XALLOCAVEC (tree, nelts * 3);
9607 if (!vec_cst_ctor_to_array (arg0, elts)
9608 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9609 return NULL_TREE;
9610
9611 for (i = 0; i < nelts; i++)
9612 {
9613 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9614 need_ctor = true;
9615 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9616 }
9617
9618 if (need_ctor)
9619 {
9620 vec<constructor_elt, va_gc> *v;
9621 vec_alloc (v, nelts);
9622 for (i = 0; i < nelts; i++)
9623 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9624 return build_constructor (type, v);
9625 }
9626 else
9627 return build_vector (type, &elts[2 * nelts]);
9628 }
9629
9630 /* Try to fold a pointer difference of type TYPE two address expressions of
9631 array references AREF0 and AREF1 using location LOC. Return a
9632 simplified expression for the difference or NULL_TREE. */
9633
9634 static tree
9635 fold_addr_of_array_ref_difference (location_t loc, tree type,
9636 tree aref0, tree aref1)
9637 {
9638 tree base0 = TREE_OPERAND (aref0, 0);
9639 tree base1 = TREE_OPERAND (aref1, 0);
9640 tree base_offset = build_int_cst (type, 0);
9641
9642 /* If the bases are array references as well, recurse. If the bases
9643 are pointer indirections compute the difference of the pointers.
9644 If the bases are equal, we are set. */
9645 if ((TREE_CODE (base0) == ARRAY_REF
9646 && TREE_CODE (base1) == ARRAY_REF
9647 && (base_offset
9648 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9649 || (INDIRECT_REF_P (base0)
9650 && INDIRECT_REF_P (base1)
9651 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9652 TREE_OPERAND (base0, 0),
9653 TREE_OPERAND (base1, 0))))
9654 || operand_equal_p (base0, base1, 0))
9655 {
9656 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9657 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9658 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9659 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9660 return fold_build2_loc (loc, PLUS_EXPR, type,
9661 base_offset,
9662 fold_build2_loc (loc, MULT_EXPR, type,
9663 diff, esz));
9664 }
9665 return NULL_TREE;
9666 }
9667
9668 /* If the real or vector real constant CST of type TYPE has an exact
9669 inverse, return it, else return NULL. */
9670
9671 static tree
9672 exact_inverse (tree type, tree cst)
9673 {
9674 REAL_VALUE_TYPE r;
9675 tree unit_type, *elts;
9676 enum machine_mode mode;
9677 unsigned vec_nelts, i;
9678
9679 switch (TREE_CODE (cst))
9680 {
9681 case REAL_CST:
9682 r = TREE_REAL_CST (cst);
9683
9684 if (exact_real_inverse (TYPE_MODE (type), &r))
9685 return build_real (type, r);
9686
9687 return NULL_TREE;
9688
9689 case VECTOR_CST:
9690 vec_nelts = VECTOR_CST_NELTS (cst);
9691 elts = XALLOCAVEC (tree, vec_nelts);
9692 unit_type = TREE_TYPE (type);
9693 mode = TYPE_MODE (unit_type);
9694
9695 for (i = 0; i < vec_nelts; i++)
9696 {
9697 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9698 if (!exact_real_inverse (mode, &r))
9699 return NULL_TREE;
9700 elts[i] = build_real (unit_type, r);
9701 }
9702
9703 return build_vector (type, elts);
9704
9705 default:
9706 return NULL_TREE;
9707 }
9708 }
9709
9710 /* Mask out the tz least significant bits of X of type TYPE where
9711 tz is the number of trailing zeroes in Y. */
9712 static wide_int
9713 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9714 {
9715 int tz = wi::ctz (y);
9716 if (tz > 0)
9717 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9718 return x;
9719 }
9720
9721 /* Return true when T is an address and is known to be nonzero.
9722 For floating point we further ensure that T is not denormal.
9723 Similar logic is present in nonzero_address in rtlanal.h.
9724
9725 If the return value is based on the assumption that signed overflow
9726 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9727 change *STRICT_OVERFLOW_P. */
9728
9729 static bool
9730 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9731 {
9732 tree type = TREE_TYPE (t);
9733 enum tree_code code;
9734
9735 /* Doing something useful for floating point would need more work. */
9736 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9737 return false;
9738
9739 code = TREE_CODE (t);
9740 switch (TREE_CODE_CLASS (code))
9741 {
9742 case tcc_unary:
9743 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9744 strict_overflow_p);
9745 case tcc_binary:
9746 case tcc_comparison:
9747 return tree_binary_nonzero_warnv_p (code, type,
9748 TREE_OPERAND (t, 0),
9749 TREE_OPERAND (t, 1),
9750 strict_overflow_p);
9751 case tcc_constant:
9752 case tcc_declaration:
9753 case tcc_reference:
9754 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9755
9756 default:
9757 break;
9758 }
9759
9760 switch (code)
9761 {
9762 case TRUTH_NOT_EXPR:
9763 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9764 strict_overflow_p);
9765
9766 case TRUTH_AND_EXPR:
9767 case TRUTH_OR_EXPR:
9768 case TRUTH_XOR_EXPR:
9769 return tree_binary_nonzero_warnv_p (code, type,
9770 TREE_OPERAND (t, 0),
9771 TREE_OPERAND (t, 1),
9772 strict_overflow_p);
9773
9774 case COND_EXPR:
9775 case CONSTRUCTOR:
9776 case OBJ_TYPE_REF:
9777 case ASSERT_EXPR:
9778 case ADDR_EXPR:
9779 case WITH_SIZE_EXPR:
9780 case SSA_NAME:
9781 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9782
9783 case COMPOUND_EXPR:
9784 case MODIFY_EXPR:
9785 case BIND_EXPR:
9786 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9787 strict_overflow_p);
9788
9789 case SAVE_EXPR:
9790 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9791 strict_overflow_p);
9792
9793 case CALL_EXPR:
9794 {
9795 tree fndecl = get_callee_fndecl (t);
9796 if (!fndecl) return false;
9797 if (flag_delete_null_pointer_checks && !flag_check_new
9798 && DECL_IS_OPERATOR_NEW (fndecl)
9799 && !TREE_NOTHROW (fndecl))
9800 return true;
9801 if (flag_delete_null_pointer_checks
9802 && lookup_attribute ("returns_nonnull",
9803 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9804 return true;
9805 return alloca_call_p (t);
9806 }
9807
9808 default:
9809 break;
9810 }
9811 return false;
9812 }
9813
9814 /* Return true when T is an address and is known to be nonzero.
9815 Handle warnings about undefined signed overflow. */
9816
9817 static bool
9818 tree_expr_nonzero_p (tree t)
9819 {
9820 bool ret, strict_overflow_p;
9821
9822 strict_overflow_p = false;
9823 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9824 if (strict_overflow_p)
9825 fold_overflow_warning (("assuming signed overflow does not occur when "
9826 "determining that expression is always "
9827 "non-zero"),
9828 WARN_STRICT_OVERFLOW_MISC);
9829 return ret;
9830 }
9831
9832 /* Fold a binary expression of code CODE and type TYPE with operands
9833 OP0 and OP1. LOC is the location of the resulting expression.
9834 Return the folded expression if folding is successful. Otherwise,
9835 return NULL_TREE. */
9836
9837 tree
9838 fold_binary_loc (location_t loc,
9839 enum tree_code code, tree type, tree op0, tree op1)
9840 {
9841 enum tree_code_class kind = TREE_CODE_CLASS (code);
9842 tree arg0, arg1, tem;
9843 tree t1 = NULL_TREE;
9844 bool strict_overflow_p;
9845 unsigned int prec;
9846
9847 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9848 && TREE_CODE_LENGTH (code) == 2
9849 && op0 != NULL_TREE
9850 && op1 != NULL_TREE);
9851
9852 arg0 = op0;
9853 arg1 = op1;
9854
9855 /* Strip any conversions that don't change the mode. This is
9856 safe for every expression, except for a comparison expression
9857 because its signedness is derived from its operands. So, in
9858 the latter case, only strip conversions that don't change the
9859 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9860 preserved.
9861
9862 Note that this is done as an internal manipulation within the
9863 constant folder, in order to find the simplest representation
9864 of the arguments so that their form can be studied. In any
9865 cases, the appropriate type conversions should be put back in
9866 the tree that will get out of the constant folder. */
9867
9868 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9869 {
9870 STRIP_SIGN_NOPS (arg0);
9871 STRIP_SIGN_NOPS (arg1);
9872 }
9873 else
9874 {
9875 STRIP_NOPS (arg0);
9876 STRIP_NOPS (arg1);
9877 }
9878
9879 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9880 constant but we can't do arithmetic on them. */
9881 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9882 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9883 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9884 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9885 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9886 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9887 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9888 {
9889 if (kind == tcc_binary)
9890 {
9891 /* Make sure type and arg0 have the same saturating flag. */
9892 gcc_assert (TYPE_SATURATING (type)
9893 == TYPE_SATURATING (TREE_TYPE (arg0)));
9894 tem = const_binop (code, arg0, arg1);
9895 }
9896 else if (kind == tcc_comparison)
9897 tem = fold_relational_const (code, type, arg0, arg1);
9898 else
9899 tem = NULL_TREE;
9900
9901 if (tem != NULL_TREE)
9902 {
9903 if (TREE_TYPE (tem) != type)
9904 tem = fold_convert_loc (loc, type, tem);
9905 return tem;
9906 }
9907 }
9908
9909 /* If this is a commutative operation, and ARG0 is a constant, move it
9910 to ARG1 to reduce the number of tests below. */
9911 if (commutative_tree_code (code)
9912 && tree_swap_operands_p (arg0, arg1, true))
9913 return fold_build2_loc (loc, code, type, op1, op0);
9914
9915 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9916 to ARG1 to reduce the number of tests below. */
9917 if (kind == tcc_comparison
9918 && tree_swap_operands_p (arg0, arg1, true))
9919 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9920
9921 tem = generic_simplify (loc, code, type, op0, op1);
9922 if (tem)
9923 return tem;
9924
9925 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9926
9927 First check for cases where an arithmetic operation is applied to a
9928 compound, conditional, or comparison operation. Push the arithmetic
9929 operation inside the compound or conditional to see if any folding
9930 can then be done. Convert comparison to conditional for this purpose.
9931 The also optimizes non-constant cases that used to be done in
9932 expand_expr.
9933
9934 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9935 one of the operands is a comparison and the other is a comparison, a
9936 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9937 code below would make the expression more complex. Change it to a
9938 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9939 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9940
9941 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9942 || code == EQ_EXPR || code == NE_EXPR)
9943 && TREE_CODE (type) != VECTOR_TYPE
9944 && ((truth_value_p (TREE_CODE (arg0))
9945 && (truth_value_p (TREE_CODE (arg1))
9946 || (TREE_CODE (arg1) == BIT_AND_EXPR
9947 && integer_onep (TREE_OPERAND (arg1, 1)))))
9948 || (truth_value_p (TREE_CODE (arg1))
9949 && (truth_value_p (TREE_CODE (arg0))
9950 || (TREE_CODE (arg0) == BIT_AND_EXPR
9951 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9952 {
9953 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9954 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9955 : TRUTH_XOR_EXPR,
9956 boolean_type_node,
9957 fold_convert_loc (loc, boolean_type_node, arg0),
9958 fold_convert_loc (loc, boolean_type_node, arg1));
9959
9960 if (code == EQ_EXPR)
9961 tem = invert_truthvalue_loc (loc, tem);
9962
9963 return fold_convert_loc (loc, type, tem);
9964 }
9965
9966 if (TREE_CODE_CLASS (code) == tcc_binary
9967 || TREE_CODE_CLASS (code) == tcc_comparison)
9968 {
9969 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9970 {
9971 tem = fold_build2_loc (loc, code, type,
9972 fold_convert_loc (loc, TREE_TYPE (op0),
9973 TREE_OPERAND (arg0, 1)), op1);
9974 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9975 tem);
9976 }
9977 if (TREE_CODE (arg1) == COMPOUND_EXPR
9978 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9979 {
9980 tem = fold_build2_loc (loc, code, type, op0,
9981 fold_convert_loc (loc, TREE_TYPE (op1),
9982 TREE_OPERAND (arg1, 1)));
9983 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9984 tem);
9985 }
9986
9987 if (TREE_CODE (arg0) == COND_EXPR
9988 || TREE_CODE (arg0) == VEC_COND_EXPR
9989 || COMPARISON_CLASS_P (arg0))
9990 {
9991 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9992 arg0, arg1,
9993 /*cond_first_p=*/1);
9994 if (tem != NULL_TREE)
9995 return tem;
9996 }
9997
9998 if (TREE_CODE (arg1) == COND_EXPR
9999 || TREE_CODE (arg1) == VEC_COND_EXPR
10000 || COMPARISON_CLASS_P (arg1))
10001 {
10002 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10003 arg1, arg0,
10004 /*cond_first_p=*/0);
10005 if (tem != NULL_TREE)
10006 return tem;
10007 }
10008 }
10009
10010 switch (code)
10011 {
10012 case MEM_REF:
10013 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10014 if (TREE_CODE (arg0) == ADDR_EXPR
10015 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10016 {
10017 tree iref = TREE_OPERAND (arg0, 0);
10018 return fold_build2 (MEM_REF, type,
10019 TREE_OPERAND (iref, 0),
10020 int_const_binop (PLUS_EXPR, arg1,
10021 TREE_OPERAND (iref, 1)));
10022 }
10023
10024 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10025 if (TREE_CODE (arg0) == ADDR_EXPR
10026 && handled_component_p (TREE_OPERAND (arg0, 0)))
10027 {
10028 tree base;
10029 HOST_WIDE_INT coffset;
10030 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10031 &coffset);
10032 if (!base)
10033 return NULL_TREE;
10034 return fold_build2 (MEM_REF, type,
10035 build_fold_addr_expr (base),
10036 int_const_binop (PLUS_EXPR, arg1,
10037 size_int (coffset)));
10038 }
10039
10040 return NULL_TREE;
10041
10042 case POINTER_PLUS_EXPR:
10043 /* 0 +p index -> (type)index */
10044 if (integer_zerop (arg0))
10045 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10046
10047 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10048 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10049 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10050 return fold_convert_loc (loc, type,
10051 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10052 fold_convert_loc (loc, sizetype,
10053 arg1),
10054 fold_convert_loc (loc, sizetype,
10055 arg0)));
10056
10057 /* (PTR +p B) +p A -> PTR +p (B + A) */
10058 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10059 {
10060 tree inner;
10061 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10062 tree arg00 = TREE_OPERAND (arg0, 0);
10063 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10064 arg01, fold_convert_loc (loc, sizetype, arg1));
10065 return fold_convert_loc (loc, type,
10066 fold_build_pointer_plus_loc (loc,
10067 arg00, inner));
10068 }
10069
10070 /* PTR_CST +p CST -> CST1 */
10071 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10072 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10073 fold_convert_loc (loc, type, arg1));
10074
10075 return NULL_TREE;
10076
10077 case PLUS_EXPR:
10078 /* A + (-B) -> A - B */
10079 if (TREE_CODE (arg1) == NEGATE_EXPR
10080 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10081 return fold_build2_loc (loc, MINUS_EXPR, type,
10082 fold_convert_loc (loc, type, arg0),
10083 fold_convert_loc (loc, type,
10084 TREE_OPERAND (arg1, 0)));
10085 /* (-A) + B -> B - A */
10086 if (TREE_CODE (arg0) == NEGATE_EXPR
10087 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10088 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10089 return fold_build2_loc (loc, MINUS_EXPR, type,
10090 fold_convert_loc (loc, type, arg1),
10091 fold_convert_loc (loc, type,
10092 TREE_OPERAND (arg0, 0)));
10093
10094 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10095 {
10096 /* Convert ~A + 1 to -A. */
10097 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10098 && integer_each_onep (arg1))
10099 return fold_build1_loc (loc, NEGATE_EXPR, type,
10100 fold_convert_loc (loc, type,
10101 TREE_OPERAND (arg0, 0)));
10102
10103 /* ~X + X is -1. */
10104 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10105 && !TYPE_OVERFLOW_TRAPS (type))
10106 {
10107 tree tem = TREE_OPERAND (arg0, 0);
10108
10109 STRIP_NOPS (tem);
10110 if (operand_equal_p (tem, arg1, 0))
10111 {
10112 t1 = build_all_ones_cst (type);
10113 return omit_one_operand_loc (loc, type, t1, arg1);
10114 }
10115 }
10116
10117 /* X + ~X is -1. */
10118 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10119 && !TYPE_OVERFLOW_TRAPS (type))
10120 {
10121 tree tem = TREE_OPERAND (arg1, 0);
10122
10123 STRIP_NOPS (tem);
10124 if (operand_equal_p (arg0, tem, 0))
10125 {
10126 t1 = build_all_ones_cst (type);
10127 return omit_one_operand_loc (loc, type, t1, arg0);
10128 }
10129 }
10130
10131 /* X + (X / CST) * -CST is X % CST. */
10132 if (TREE_CODE (arg1) == MULT_EXPR
10133 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10134 && operand_equal_p (arg0,
10135 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10136 {
10137 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10138 tree cst1 = TREE_OPERAND (arg1, 1);
10139 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10140 cst1, cst0);
10141 if (sum && integer_zerop (sum))
10142 return fold_convert_loc (loc, type,
10143 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10144 TREE_TYPE (arg0), arg0,
10145 cst0));
10146 }
10147 }
10148
10149 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10150 one. Make sure the type is not saturating and has the signedness of
10151 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10152 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10153 if ((TREE_CODE (arg0) == MULT_EXPR
10154 || TREE_CODE (arg1) == MULT_EXPR)
10155 && !TYPE_SATURATING (type)
10156 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10157 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10158 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10159 {
10160 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10161 if (tem)
10162 return tem;
10163 }
10164
10165 if (! FLOAT_TYPE_P (type))
10166 {
10167 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10168 with a constant, and the two constants have no bits in common,
10169 we should treat this as a BIT_IOR_EXPR since this may produce more
10170 simplifications. */
10171 if (TREE_CODE (arg0) == BIT_AND_EXPR
10172 && TREE_CODE (arg1) == BIT_AND_EXPR
10173 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10174 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10175 && wi::bit_and (TREE_OPERAND (arg0, 1),
10176 TREE_OPERAND (arg1, 1)) == 0)
10177 {
10178 code = BIT_IOR_EXPR;
10179 goto bit_ior;
10180 }
10181
10182 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10183 (plus (plus (mult) (mult)) (foo)) so that we can
10184 take advantage of the factoring cases below. */
10185 if (TYPE_OVERFLOW_WRAPS (type)
10186 && (((TREE_CODE (arg0) == PLUS_EXPR
10187 || TREE_CODE (arg0) == MINUS_EXPR)
10188 && TREE_CODE (arg1) == MULT_EXPR)
10189 || ((TREE_CODE (arg1) == PLUS_EXPR
10190 || TREE_CODE (arg1) == MINUS_EXPR)
10191 && TREE_CODE (arg0) == MULT_EXPR)))
10192 {
10193 tree parg0, parg1, parg, marg;
10194 enum tree_code pcode;
10195
10196 if (TREE_CODE (arg1) == MULT_EXPR)
10197 parg = arg0, marg = arg1;
10198 else
10199 parg = arg1, marg = arg0;
10200 pcode = TREE_CODE (parg);
10201 parg0 = TREE_OPERAND (parg, 0);
10202 parg1 = TREE_OPERAND (parg, 1);
10203 STRIP_NOPS (parg0);
10204 STRIP_NOPS (parg1);
10205
10206 if (TREE_CODE (parg0) == MULT_EXPR
10207 && TREE_CODE (parg1) != MULT_EXPR)
10208 return fold_build2_loc (loc, pcode, type,
10209 fold_build2_loc (loc, PLUS_EXPR, type,
10210 fold_convert_loc (loc, type,
10211 parg0),
10212 fold_convert_loc (loc, type,
10213 marg)),
10214 fold_convert_loc (loc, type, parg1));
10215 if (TREE_CODE (parg0) != MULT_EXPR
10216 && TREE_CODE (parg1) == MULT_EXPR)
10217 return
10218 fold_build2_loc (loc, PLUS_EXPR, type,
10219 fold_convert_loc (loc, type, parg0),
10220 fold_build2_loc (loc, pcode, type,
10221 fold_convert_loc (loc, type, marg),
10222 fold_convert_loc (loc, type,
10223 parg1)));
10224 }
10225 }
10226 else
10227 {
10228 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10229 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10230 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10231
10232 /* Likewise if the operands are reversed. */
10233 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10234 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10235
10236 /* Convert X + -C into X - C. */
10237 if (TREE_CODE (arg1) == REAL_CST
10238 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10239 {
10240 tem = fold_negate_const (arg1, type);
10241 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10242 return fold_build2_loc (loc, MINUS_EXPR, type,
10243 fold_convert_loc (loc, type, arg0),
10244 fold_convert_loc (loc, type, tem));
10245 }
10246
10247 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10248 to __complex__ ( x, y ). This is not the same for SNaNs or
10249 if signed zeros are involved. */
10250 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10251 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10252 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10253 {
10254 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10255 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10256 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10257 bool arg0rz = false, arg0iz = false;
10258 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10259 || (arg0i && (arg0iz = real_zerop (arg0i))))
10260 {
10261 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10262 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10263 if (arg0rz && arg1i && real_zerop (arg1i))
10264 {
10265 tree rp = arg1r ? arg1r
10266 : build1 (REALPART_EXPR, rtype, arg1);
10267 tree ip = arg0i ? arg0i
10268 : build1 (IMAGPART_EXPR, rtype, arg0);
10269 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10270 }
10271 else if (arg0iz && arg1r && real_zerop (arg1r))
10272 {
10273 tree rp = arg0r ? arg0r
10274 : build1 (REALPART_EXPR, rtype, arg0);
10275 tree ip = arg1i ? arg1i
10276 : build1 (IMAGPART_EXPR, rtype, arg1);
10277 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10278 }
10279 }
10280 }
10281
10282 if (flag_unsafe_math_optimizations
10283 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10284 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10285 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10286 return tem;
10287
10288 /* Convert x+x into x*2.0. */
10289 if (operand_equal_p (arg0, arg1, 0)
10290 && SCALAR_FLOAT_TYPE_P (type))
10291 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10292 build_real (type, dconst2));
10293
10294 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10295 We associate floats only if the user has specified
10296 -fassociative-math. */
10297 if (flag_associative_math
10298 && TREE_CODE (arg1) == PLUS_EXPR
10299 && TREE_CODE (arg0) != MULT_EXPR)
10300 {
10301 tree tree10 = TREE_OPERAND (arg1, 0);
10302 tree tree11 = TREE_OPERAND (arg1, 1);
10303 if (TREE_CODE (tree11) == MULT_EXPR
10304 && TREE_CODE (tree10) == MULT_EXPR)
10305 {
10306 tree tree0;
10307 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10308 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10309 }
10310 }
10311 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10312 We associate floats only if the user has specified
10313 -fassociative-math. */
10314 if (flag_associative_math
10315 && TREE_CODE (arg0) == PLUS_EXPR
10316 && TREE_CODE (arg1) != MULT_EXPR)
10317 {
10318 tree tree00 = TREE_OPERAND (arg0, 0);
10319 tree tree01 = TREE_OPERAND (arg0, 1);
10320 if (TREE_CODE (tree01) == MULT_EXPR
10321 && TREE_CODE (tree00) == MULT_EXPR)
10322 {
10323 tree tree0;
10324 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10325 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10326 }
10327 }
10328 }
10329
10330 bit_rotate:
10331 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10332 is a rotate of A by C1 bits. */
10333 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10334 is a rotate of A by B bits. */
10335 {
10336 enum tree_code code0, code1;
10337 tree rtype;
10338 code0 = TREE_CODE (arg0);
10339 code1 = TREE_CODE (arg1);
10340 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10341 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10342 && operand_equal_p (TREE_OPERAND (arg0, 0),
10343 TREE_OPERAND (arg1, 0), 0)
10344 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10345 TYPE_UNSIGNED (rtype))
10346 /* Only create rotates in complete modes. Other cases are not
10347 expanded properly. */
10348 && (element_precision (rtype)
10349 == element_precision (TYPE_MODE (rtype))))
10350 {
10351 tree tree01, tree11;
10352 enum tree_code code01, code11;
10353
10354 tree01 = TREE_OPERAND (arg0, 1);
10355 tree11 = TREE_OPERAND (arg1, 1);
10356 STRIP_NOPS (tree01);
10357 STRIP_NOPS (tree11);
10358 code01 = TREE_CODE (tree01);
10359 code11 = TREE_CODE (tree11);
10360 if (code01 == INTEGER_CST
10361 && code11 == INTEGER_CST
10362 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10363 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10364 {
10365 tem = build2_loc (loc, LROTATE_EXPR,
10366 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10367 TREE_OPERAND (arg0, 0),
10368 code0 == LSHIFT_EXPR ? tree01 : tree11);
10369 return fold_convert_loc (loc, type, tem);
10370 }
10371 else if (code11 == MINUS_EXPR)
10372 {
10373 tree tree110, tree111;
10374 tree110 = TREE_OPERAND (tree11, 0);
10375 tree111 = TREE_OPERAND (tree11, 1);
10376 STRIP_NOPS (tree110);
10377 STRIP_NOPS (tree111);
10378 if (TREE_CODE (tree110) == INTEGER_CST
10379 && 0 == compare_tree_int (tree110,
10380 element_precision
10381 (TREE_TYPE (TREE_OPERAND
10382 (arg0, 0))))
10383 && operand_equal_p (tree01, tree111, 0))
10384 return
10385 fold_convert_loc (loc, type,
10386 build2 ((code0 == LSHIFT_EXPR
10387 ? LROTATE_EXPR
10388 : RROTATE_EXPR),
10389 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10390 TREE_OPERAND (arg0, 0), tree01));
10391 }
10392 else if (code01 == MINUS_EXPR)
10393 {
10394 tree tree010, tree011;
10395 tree010 = TREE_OPERAND (tree01, 0);
10396 tree011 = TREE_OPERAND (tree01, 1);
10397 STRIP_NOPS (tree010);
10398 STRIP_NOPS (tree011);
10399 if (TREE_CODE (tree010) == INTEGER_CST
10400 && 0 == compare_tree_int (tree010,
10401 element_precision
10402 (TREE_TYPE (TREE_OPERAND
10403 (arg0, 0))))
10404 && operand_equal_p (tree11, tree011, 0))
10405 return fold_convert_loc
10406 (loc, type,
10407 build2 ((code0 != LSHIFT_EXPR
10408 ? LROTATE_EXPR
10409 : RROTATE_EXPR),
10410 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10411 TREE_OPERAND (arg0, 0), tree11));
10412 }
10413 }
10414 }
10415
10416 associate:
10417 /* In most languages, can't associate operations on floats through
10418 parentheses. Rather than remember where the parentheses were, we
10419 don't associate floats at all, unless the user has specified
10420 -fassociative-math.
10421 And, we need to make sure type is not saturating. */
10422
10423 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10424 && !TYPE_SATURATING (type))
10425 {
10426 tree var0, con0, lit0, minus_lit0;
10427 tree var1, con1, lit1, minus_lit1;
10428 tree atype = type;
10429 bool ok = true;
10430
10431 /* Split both trees into variables, constants, and literals. Then
10432 associate each group together, the constants with literals,
10433 then the result with variables. This increases the chances of
10434 literals being recombined later and of generating relocatable
10435 expressions for the sum of a constant and literal. */
10436 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10437 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10438 code == MINUS_EXPR);
10439
10440 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10441 if (code == MINUS_EXPR)
10442 code = PLUS_EXPR;
10443
10444 /* With undefined overflow prefer doing association in a type
10445 which wraps on overflow, if that is one of the operand types. */
10446 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10447 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10448 {
10449 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10450 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10451 atype = TREE_TYPE (arg0);
10452 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10453 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10454 atype = TREE_TYPE (arg1);
10455 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10456 }
10457
10458 /* With undefined overflow we can only associate constants with one
10459 variable, and constants whose association doesn't overflow. */
10460 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10461 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10462 {
10463 if (var0 && var1)
10464 {
10465 tree tmp0 = var0;
10466 tree tmp1 = var1;
10467
10468 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10469 tmp0 = TREE_OPERAND (tmp0, 0);
10470 if (CONVERT_EXPR_P (tmp0)
10471 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10472 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10473 <= TYPE_PRECISION (atype)))
10474 tmp0 = TREE_OPERAND (tmp0, 0);
10475 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10476 tmp1 = TREE_OPERAND (tmp1, 0);
10477 if (CONVERT_EXPR_P (tmp1)
10478 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10479 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10480 <= TYPE_PRECISION (atype)))
10481 tmp1 = TREE_OPERAND (tmp1, 0);
10482 /* The only case we can still associate with two variables
10483 is if they are the same, modulo negation and bit-pattern
10484 preserving conversions. */
10485 if (!operand_equal_p (tmp0, tmp1, 0))
10486 ok = false;
10487 }
10488 }
10489
10490 /* Only do something if we found more than two objects. Otherwise,
10491 nothing has changed and we risk infinite recursion. */
10492 if (ok
10493 && (2 < ((var0 != 0) + (var1 != 0)
10494 + (con0 != 0) + (con1 != 0)
10495 + (lit0 != 0) + (lit1 != 0)
10496 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10497 {
10498 bool any_overflows = false;
10499 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10500 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10501 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10502 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10503 var0 = associate_trees (loc, var0, var1, code, atype);
10504 con0 = associate_trees (loc, con0, con1, code, atype);
10505 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10506 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10507 code, atype);
10508
10509 /* Preserve the MINUS_EXPR if the negative part of the literal is
10510 greater than the positive part. Otherwise, the multiplicative
10511 folding code (i.e extract_muldiv) may be fooled in case
10512 unsigned constants are subtracted, like in the following
10513 example: ((X*2 + 4) - 8U)/2. */
10514 if (minus_lit0 && lit0)
10515 {
10516 if (TREE_CODE (lit0) == INTEGER_CST
10517 && TREE_CODE (minus_lit0) == INTEGER_CST
10518 && tree_int_cst_lt (lit0, minus_lit0))
10519 {
10520 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10521 MINUS_EXPR, atype);
10522 lit0 = 0;
10523 }
10524 else
10525 {
10526 lit0 = associate_trees (loc, lit0, minus_lit0,
10527 MINUS_EXPR, atype);
10528 minus_lit0 = 0;
10529 }
10530 }
10531
10532 /* Don't introduce overflows through reassociation. */
10533 if (!any_overflows
10534 && ((lit0 && TREE_OVERFLOW (lit0))
10535 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10536 return NULL_TREE;
10537
10538 if (minus_lit0)
10539 {
10540 if (con0 == 0)
10541 return
10542 fold_convert_loc (loc, type,
10543 associate_trees (loc, var0, minus_lit0,
10544 MINUS_EXPR, atype));
10545 else
10546 {
10547 con0 = associate_trees (loc, con0, minus_lit0,
10548 MINUS_EXPR, atype);
10549 return
10550 fold_convert_loc (loc, type,
10551 associate_trees (loc, var0, con0,
10552 PLUS_EXPR, atype));
10553 }
10554 }
10555
10556 con0 = associate_trees (loc, con0, lit0, code, atype);
10557 return
10558 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10559 code, atype));
10560 }
10561 }
10562
10563 return NULL_TREE;
10564
10565 case MINUS_EXPR:
10566 /* Pointer simplifications for subtraction, simple reassociations. */
10567 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10568 {
10569 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10570 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10571 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10572 {
10573 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10574 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10575 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10576 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10577 return fold_build2_loc (loc, PLUS_EXPR, type,
10578 fold_build2_loc (loc, MINUS_EXPR, type,
10579 arg00, arg10),
10580 fold_build2_loc (loc, MINUS_EXPR, type,
10581 arg01, arg11));
10582 }
10583 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10584 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10585 {
10586 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10587 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10588 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10589 fold_convert_loc (loc, type, arg1));
10590 if (tmp)
10591 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10592 }
10593 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10594 simplifies. */
10595 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10596 {
10597 tree arg10 = fold_convert_loc (loc, type,
10598 TREE_OPERAND (arg1, 0));
10599 tree arg11 = fold_convert_loc (loc, type,
10600 TREE_OPERAND (arg1, 1));
10601 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10602 fold_convert_loc (loc, type, arg0),
10603 arg10);
10604 if (tmp)
10605 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10606 }
10607 }
10608 /* A - (-B) -> A + B */
10609 if (TREE_CODE (arg1) == NEGATE_EXPR)
10610 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10611 fold_convert_loc (loc, type,
10612 TREE_OPERAND (arg1, 0)));
10613 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10614 if (TREE_CODE (arg0) == NEGATE_EXPR
10615 && negate_expr_p (arg1)
10616 && reorder_operands_p (arg0, arg1))
10617 return fold_build2_loc (loc, MINUS_EXPR, type,
10618 fold_convert_loc (loc, type,
10619 negate_expr (arg1)),
10620 fold_convert_loc (loc, type,
10621 TREE_OPERAND (arg0, 0)));
10622 /* Convert -A - 1 to ~A. */
10623 if (TREE_CODE (arg0) == NEGATE_EXPR
10624 && integer_each_onep (arg1)
10625 && !TYPE_OVERFLOW_TRAPS (type))
10626 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10627 fold_convert_loc (loc, type,
10628 TREE_OPERAND (arg0, 0)));
10629
10630 /* Convert -1 - A to ~A. */
10631 if (TREE_CODE (type) != COMPLEX_TYPE
10632 && integer_all_onesp (arg0))
10633 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10634
10635
10636 /* X - (X / Y) * Y is X % Y. */
10637 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10638 && TREE_CODE (arg1) == MULT_EXPR
10639 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10640 && operand_equal_p (arg0,
10641 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10642 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10643 TREE_OPERAND (arg1, 1), 0))
10644 return
10645 fold_convert_loc (loc, type,
10646 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10647 arg0, TREE_OPERAND (arg1, 1)));
10648
10649 if (! FLOAT_TYPE_P (type))
10650 {
10651 if (integer_zerop (arg0))
10652 return negate_expr (fold_convert_loc (loc, type, arg1));
10653
10654 /* Fold A - (A & B) into ~B & A. */
10655 if (!TREE_SIDE_EFFECTS (arg0)
10656 && TREE_CODE (arg1) == BIT_AND_EXPR)
10657 {
10658 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10659 {
10660 tree arg10 = fold_convert_loc (loc, type,
10661 TREE_OPERAND (arg1, 0));
10662 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10663 fold_build1_loc (loc, BIT_NOT_EXPR,
10664 type, arg10),
10665 fold_convert_loc (loc, type, arg0));
10666 }
10667 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10668 {
10669 tree arg11 = fold_convert_loc (loc,
10670 type, TREE_OPERAND (arg1, 1));
10671 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10672 fold_build1_loc (loc, BIT_NOT_EXPR,
10673 type, arg11),
10674 fold_convert_loc (loc, type, arg0));
10675 }
10676 }
10677
10678 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10679 any power of 2 minus 1. */
10680 if (TREE_CODE (arg0) == BIT_AND_EXPR
10681 && TREE_CODE (arg1) == BIT_AND_EXPR
10682 && operand_equal_p (TREE_OPERAND (arg0, 0),
10683 TREE_OPERAND (arg1, 0), 0))
10684 {
10685 tree mask0 = TREE_OPERAND (arg0, 1);
10686 tree mask1 = TREE_OPERAND (arg1, 1);
10687 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10688
10689 if (operand_equal_p (tem, mask1, 0))
10690 {
10691 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10692 TREE_OPERAND (arg0, 0), mask1);
10693 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10694 }
10695 }
10696 }
10697
10698 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10699 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10700 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10701
10702 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10703 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10704 (-ARG1 + ARG0) reduces to -ARG1. */
10705 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10706 return negate_expr (fold_convert_loc (loc, type, arg1));
10707
10708 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10709 __complex__ ( x, -y ). This is not the same for SNaNs or if
10710 signed zeros are involved. */
10711 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10712 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10713 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10714 {
10715 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10716 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10717 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10718 bool arg0rz = false, arg0iz = false;
10719 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10720 || (arg0i && (arg0iz = real_zerop (arg0i))))
10721 {
10722 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10723 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10724 if (arg0rz && arg1i && real_zerop (arg1i))
10725 {
10726 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10727 arg1r ? arg1r
10728 : build1 (REALPART_EXPR, rtype, arg1));
10729 tree ip = arg0i ? arg0i
10730 : build1 (IMAGPART_EXPR, rtype, arg0);
10731 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10732 }
10733 else if (arg0iz && arg1r && real_zerop (arg1r))
10734 {
10735 tree rp = arg0r ? arg0r
10736 : build1 (REALPART_EXPR, rtype, arg0);
10737 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10738 arg1i ? arg1i
10739 : build1 (IMAGPART_EXPR, rtype, arg1));
10740 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10741 }
10742 }
10743 }
10744
10745 /* A - B -> A + (-B) if B is easily negatable. */
10746 if (negate_expr_p (arg1)
10747 && ((FLOAT_TYPE_P (type)
10748 /* Avoid this transformation if B is a positive REAL_CST. */
10749 && (TREE_CODE (arg1) != REAL_CST
10750 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10751 || INTEGRAL_TYPE_P (type)))
10752 return fold_build2_loc (loc, PLUS_EXPR, type,
10753 fold_convert_loc (loc, type, arg0),
10754 fold_convert_loc (loc, type,
10755 negate_expr (arg1)));
10756
10757 /* Try folding difference of addresses. */
10758 {
10759 HOST_WIDE_INT diff;
10760
10761 if ((TREE_CODE (arg0) == ADDR_EXPR
10762 || TREE_CODE (arg1) == ADDR_EXPR)
10763 && ptr_difference_const (arg0, arg1, &diff))
10764 return build_int_cst_type (type, diff);
10765 }
10766
10767 /* Fold &a[i] - &a[j] to i-j. */
10768 if (TREE_CODE (arg0) == ADDR_EXPR
10769 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10770 && TREE_CODE (arg1) == ADDR_EXPR
10771 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10772 {
10773 tree tem = fold_addr_of_array_ref_difference (loc, type,
10774 TREE_OPERAND (arg0, 0),
10775 TREE_OPERAND (arg1, 0));
10776 if (tem)
10777 return tem;
10778 }
10779
10780 if (FLOAT_TYPE_P (type)
10781 && flag_unsafe_math_optimizations
10782 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10783 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10784 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10785 return tem;
10786
10787 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10788 one. Make sure the type is not saturating and has the signedness of
10789 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10790 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10791 if ((TREE_CODE (arg0) == MULT_EXPR
10792 || TREE_CODE (arg1) == MULT_EXPR)
10793 && !TYPE_SATURATING (type)
10794 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10795 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10796 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10797 {
10798 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10799 if (tem)
10800 return tem;
10801 }
10802
10803 goto associate;
10804
10805 case MULT_EXPR:
10806 /* (-A) * (-B) -> A * B */
10807 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10808 return fold_build2_loc (loc, MULT_EXPR, type,
10809 fold_convert_loc (loc, type,
10810 TREE_OPERAND (arg0, 0)),
10811 fold_convert_loc (loc, type,
10812 negate_expr (arg1)));
10813 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10814 return fold_build2_loc (loc, MULT_EXPR, type,
10815 fold_convert_loc (loc, type,
10816 negate_expr (arg0)),
10817 fold_convert_loc (loc, type,
10818 TREE_OPERAND (arg1, 0)));
10819
10820 if (! FLOAT_TYPE_P (type))
10821 {
10822 /* Transform x * -1 into -x. Make sure to do the negation
10823 on the original operand with conversions not stripped
10824 because we can only strip non-sign-changing conversions. */
10825 if (integer_minus_onep (arg1))
10826 return fold_convert_loc (loc, type, negate_expr (op0));
10827 /* Transform x * -C into -x * C if x is easily negatable. */
10828 if (TREE_CODE (arg1) == INTEGER_CST
10829 && tree_int_cst_sgn (arg1) == -1
10830 && negate_expr_p (arg0)
10831 && (tem = negate_expr (arg1)) != arg1
10832 && !TREE_OVERFLOW (tem))
10833 return fold_build2_loc (loc, MULT_EXPR, type,
10834 fold_convert_loc (loc, type,
10835 negate_expr (arg0)),
10836 tem);
10837
10838 /* (a * (1 << b)) is (a << b) */
10839 if (TREE_CODE (arg1) == LSHIFT_EXPR
10840 && integer_onep (TREE_OPERAND (arg1, 0)))
10841 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10842 TREE_OPERAND (arg1, 1));
10843 if (TREE_CODE (arg0) == LSHIFT_EXPR
10844 && integer_onep (TREE_OPERAND (arg0, 0)))
10845 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10846 TREE_OPERAND (arg0, 1));
10847
10848 /* (A + A) * C -> A * 2 * C */
10849 if (TREE_CODE (arg0) == PLUS_EXPR
10850 && TREE_CODE (arg1) == INTEGER_CST
10851 && operand_equal_p (TREE_OPERAND (arg0, 0),
10852 TREE_OPERAND (arg0, 1), 0))
10853 return fold_build2_loc (loc, MULT_EXPR, type,
10854 omit_one_operand_loc (loc, type,
10855 TREE_OPERAND (arg0, 0),
10856 TREE_OPERAND (arg0, 1)),
10857 fold_build2_loc (loc, MULT_EXPR, type,
10858 build_int_cst (type, 2) , arg1));
10859
10860 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10861 sign-changing only. */
10862 if (TREE_CODE (arg1) == INTEGER_CST
10863 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10864 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10865 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10866
10867 strict_overflow_p = false;
10868 if (TREE_CODE (arg1) == INTEGER_CST
10869 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10870 &strict_overflow_p)))
10871 {
10872 if (strict_overflow_p)
10873 fold_overflow_warning (("assuming signed overflow does not "
10874 "occur when simplifying "
10875 "multiplication"),
10876 WARN_STRICT_OVERFLOW_MISC);
10877 return fold_convert_loc (loc, type, tem);
10878 }
10879
10880 /* Optimize z * conj(z) for integer complex numbers. */
10881 if (TREE_CODE (arg0) == CONJ_EXPR
10882 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10883 return fold_mult_zconjz (loc, type, arg1);
10884 if (TREE_CODE (arg1) == CONJ_EXPR
10885 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10886 return fold_mult_zconjz (loc, type, arg0);
10887 }
10888 else
10889 {
10890 /* Maybe fold x * 0 to 0. The expressions aren't the same
10891 when x is NaN, since x * 0 is also NaN. Nor are they the
10892 same in modes with signed zeros, since multiplying a
10893 negative value by 0 gives -0, not +0. */
10894 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10895 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10896 && real_zerop (arg1))
10897 return omit_one_operand_loc (loc, type, arg1, arg0);
10898 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10899 Likewise for complex arithmetic with signed zeros. */
10900 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10901 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10902 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10903 && real_onep (arg1))
10904 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10905
10906 /* Transform x * -1.0 into -x. */
10907 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10908 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10909 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10910 && real_minus_onep (arg1))
10911 return fold_convert_loc (loc, type, negate_expr (arg0));
10912
10913 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10914 the result for floating point types due to rounding so it is applied
10915 only if -fassociative-math was specify. */
10916 if (flag_associative_math
10917 && TREE_CODE (arg0) == RDIV_EXPR
10918 && TREE_CODE (arg1) == REAL_CST
10919 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10920 {
10921 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10922 arg1);
10923 if (tem)
10924 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10925 TREE_OPERAND (arg0, 1));
10926 }
10927
10928 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10929 if (operand_equal_p (arg0, arg1, 0))
10930 {
10931 tree tem = fold_strip_sign_ops (arg0);
10932 if (tem != NULL_TREE)
10933 {
10934 tem = fold_convert_loc (loc, type, tem);
10935 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10936 }
10937 }
10938
10939 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10940 This is not the same for NaNs or if signed zeros are
10941 involved. */
10942 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10943 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10944 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10945 && TREE_CODE (arg1) == COMPLEX_CST
10946 && real_zerop (TREE_REALPART (arg1)))
10947 {
10948 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10949 if (real_onep (TREE_IMAGPART (arg1)))
10950 return
10951 fold_build2_loc (loc, COMPLEX_EXPR, type,
10952 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10953 rtype, arg0)),
10954 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10955 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10956 return
10957 fold_build2_loc (loc, COMPLEX_EXPR, type,
10958 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10959 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10960 rtype, arg0)));
10961 }
10962
10963 /* Optimize z * conj(z) for floating point complex numbers.
10964 Guarded by flag_unsafe_math_optimizations as non-finite
10965 imaginary components don't produce scalar results. */
10966 if (flag_unsafe_math_optimizations
10967 && TREE_CODE (arg0) == CONJ_EXPR
10968 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10969 return fold_mult_zconjz (loc, type, arg1);
10970 if (flag_unsafe_math_optimizations
10971 && TREE_CODE (arg1) == CONJ_EXPR
10972 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10973 return fold_mult_zconjz (loc, type, arg0);
10974
10975 if (flag_unsafe_math_optimizations)
10976 {
10977 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10978 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10979
10980 /* Optimizations of root(...)*root(...). */
10981 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10982 {
10983 tree rootfn, arg;
10984 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10985 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10986
10987 /* Optimize sqrt(x)*sqrt(x) as x. */
10988 if (BUILTIN_SQRT_P (fcode0)
10989 && operand_equal_p (arg00, arg10, 0)
10990 && ! HONOR_SNANS (TYPE_MODE (type)))
10991 return arg00;
10992
10993 /* Optimize root(x)*root(y) as root(x*y). */
10994 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10995 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10996 return build_call_expr_loc (loc, rootfn, 1, arg);
10997 }
10998
10999 /* Optimize expN(x)*expN(y) as expN(x+y). */
11000 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11001 {
11002 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11003 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11004 CALL_EXPR_ARG (arg0, 0),
11005 CALL_EXPR_ARG (arg1, 0));
11006 return build_call_expr_loc (loc, expfn, 1, arg);
11007 }
11008
11009 /* Optimizations of pow(...)*pow(...). */
11010 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11011 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11012 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11013 {
11014 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11015 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11016 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11017 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11018
11019 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11020 if (operand_equal_p (arg01, arg11, 0))
11021 {
11022 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11023 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11024 arg00, arg10);
11025 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11026 }
11027
11028 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11029 if (operand_equal_p (arg00, arg10, 0))
11030 {
11031 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11032 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11033 arg01, arg11);
11034 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11035 }
11036 }
11037
11038 /* Optimize tan(x)*cos(x) as sin(x). */
11039 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11040 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11041 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11042 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11043 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11044 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11045 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11046 CALL_EXPR_ARG (arg1, 0), 0))
11047 {
11048 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11049
11050 if (sinfn != NULL_TREE)
11051 return build_call_expr_loc (loc, sinfn, 1,
11052 CALL_EXPR_ARG (arg0, 0));
11053 }
11054
11055 /* Optimize x*pow(x,c) as pow(x,c+1). */
11056 if (fcode1 == BUILT_IN_POW
11057 || fcode1 == BUILT_IN_POWF
11058 || fcode1 == BUILT_IN_POWL)
11059 {
11060 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11061 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11062 if (TREE_CODE (arg11) == REAL_CST
11063 && !TREE_OVERFLOW (arg11)
11064 && operand_equal_p (arg0, arg10, 0))
11065 {
11066 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11067 REAL_VALUE_TYPE c;
11068 tree arg;
11069
11070 c = TREE_REAL_CST (arg11);
11071 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11072 arg = build_real (type, c);
11073 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11074 }
11075 }
11076
11077 /* Optimize pow(x,c)*x as pow(x,c+1). */
11078 if (fcode0 == BUILT_IN_POW
11079 || fcode0 == BUILT_IN_POWF
11080 || fcode0 == BUILT_IN_POWL)
11081 {
11082 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11083 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11084 if (TREE_CODE (arg01) == REAL_CST
11085 && !TREE_OVERFLOW (arg01)
11086 && operand_equal_p (arg1, arg00, 0))
11087 {
11088 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11089 REAL_VALUE_TYPE c;
11090 tree arg;
11091
11092 c = TREE_REAL_CST (arg01);
11093 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11094 arg = build_real (type, c);
11095 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11096 }
11097 }
11098
11099 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11100 if (!in_gimple_form
11101 && optimize
11102 && operand_equal_p (arg0, arg1, 0))
11103 {
11104 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11105
11106 if (powfn)
11107 {
11108 tree arg = build_real (type, dconst2);
11109 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11110 }
11111 }
11112 }
11113 }
11114 goto associate;
11115
11116 case BIT_IOR_EXPR:
11117 bit_ior:
11118 if (operand_equal_p (arg0, arg1, 0))
11119 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11120
11121 /* ~X | X is -1. */
11122 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11123 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11124 {
11125 t1 = build_zero_cst (type);
11126 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11127 return omit_one_operand_loc (loc, type, t1, arg1);
11128 }
11129
11130 /* X | ~X is -1. */
11131 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11132 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11133 {
11134 t1 = build_zero_cst (type);
11135 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11136 return omit_one_operand_loc (loc, type, t1, arg0);
11137 }
11138
11139 /* Canonicalize (X & C1) | C2. */
11140 if (TREE_CODE (arg0) == BIT_AND_EXPR
11141 && TREE_CODE (arg1) == INTEGER_CST
11142 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11143 {
11144 int width = TYPE_PRECISION (type), w;
11145 wide_int c1 = TREE_OPERAND (arg0, 1);
11146 wide_int c2 = arg1;
11147
11148 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11149 if ((c1 & c2) == c1)
11150 return omit_one_operand_loc (loc, type, arg1,
11151 TREE_OPERAND (arg0, 0));
11152
11153 wide_int msk = wi::mask (width, false,
11154 TYPE_PRECISION (TREE_TYPE (arg1)));
11155
11156 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11157 if (msk.and_not (c1 | c2) == 0)
11158 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11159 TREE_OPERAND (arg0, 0), arg1);
11160
11161 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11162 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11163 mode which allows further optimizations. */
11164 c1 &= msk;
11165 c2 &= msk;
11166 wide_int c3 = c1.and_not (c2);
11167 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11168 {
11169 wide_int mask = wi::mask (w, false,
11170 TYPE_PRECISION (type));
11171 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11172 {
11173 c3 = mask;
11174 break;
11175 }
11176 }
11177
11178 if (c3 != c1)
11179 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11180 fold_build2_loc (loc, BIT_AND_EXPR, type,
11181 TREE_OPERAND (arg0, 0),
11182 wide_int_to_tree (type,
11183 c3)),
11184 arg1);
11185 }
11186
11187 /* (X & Y) | Y is (X, Y). */
11188 if (TREE_CODE (arg0) == BIT_AND_EXPR
11189 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11190 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11191 /* (X & Y) | X is (Y, X). */
11192 if (TREE_CODE (arg0) == BIT_AND_EXPR
11193 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11194 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11195 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11196 /* X | (X & Y) is (Y, X). */
11197 if (TREE_CODE (arg1) == BIT_AND_EXPR
11198 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11199 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11200 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11201 /* X | (Y & X) is (Y, X). */
11202 if (TREE_CODE (arg1) == BIT_AND_EXPR
11203 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11204 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11205 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11206
11207 /* (X & ~Y) | (~X & Y) is X ^ Y */
11208 if (TREE_CODE (arg0) == BIT_AND_EXPR
11209 && TREE_CODE (arg1) == BIT_AND_EXPR)
11210 {
11211 tree a0, a1, l0, l1, n0, n1;
11212
11213 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11214 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11215
11216 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11217 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11218
11219 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11220 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11221
11222 if ((operand_equal_p (n0, a0, 0)
11223 && operand_equal_p (n1, a1, 0))
11224 || (operand_equal_p (n0, a1, 0)
11225 && operand_equal_p (n1, a0, 0)))
11226 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11227 }
11228
11229 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11230 if (t1 != NULL_TREE)
11231 return t1;
11232
11233 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11234
11235 This results in more efficient code for machines without a NAND
11236 instruction. Combine will canonicalize to the first form
11237 which will allow use of NAND instructions provided by the
11238 backend if they exist. */
11239 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11240 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11241 {
11242 return
11243 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11244 build2 (BIT_AND_EXPR, type,
11245 fold_convert_loc (loc, type,
11246 TREE_OPERAND (arg0, 0)),
11247 fold_convert_loc (loc, type,
11248 TREE_OPERAND (arg1, 0))));
11249 }
11250
11251 /* See if this can be simplified into a rotate first. If that
11252 is unsuccessful continue in the association code. */
11253 goto bit_rotate;
11254
11255 case BIT_XOR_EXPR:
11256 if (integer_all_onesp (arg1))
11257 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11258
11259 /* ~X ^ X is -1. */
11260 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11262 {
11263 t1 = build_zero_cst (type);
11264 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11265 return omit_one_operand_loc (loc, type, t1, arg1);
11266 }
11267
11268 /* X ^ ~X is -1. */
11269 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11270 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11271 {
11272 t1 = build_zero_cst (type);
11273 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11274 return omit_one_operand_loc (loc, type, t1, arg0);
11275 }
11276
11277 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11278 with a constant, and the two constants have no bits in common,
11279 we should treat this as a BIT_IOR_EXPR since this may produce more
11280 simplifications. */
11281 if (TREE_CODE (arg0) == BIT_AND_EXPR
11282 && TREE_CODE (arg1) == BIT_AND_EXPR
11283 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11284 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11285 && wi::bit_and (TREE_OPERAND (arg0, 1),
11286 TREE_OPERAND (arg1, 1)) == 0)
11287 {
11288 code = BIT_IOR_EXPR;
11289 goto bit_ior;
11290 }
11291
11292 /* (X | Y) ^ X -> Y & ~ X*/
11293 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11294 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11295 {
11296 tree t2 = TREE_OPERAND (arg0, 1);
11297 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11298 arg1);
11299 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11300 fold_convert_loc (loc, type, t2),
11301 fold_convert_loc (loc, type, t1));
11302 return t1;
11303 }
11304
11305 /* (Y | X) ^ X -> Y & ~ X*/
11306 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11307 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11308 {
11309 tree t2 = TREE_OPERAND (arg0, 0);
11310 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11311 arg1);
11312 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11313 fold_convert_loc (loc, type, t2),
11314 fold_convert_loc (loc, type, t1));
11315 return t1;
11316 }
11317
11318 /* X ^ (X | Y) -> Y & ~ X*/
11319 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11320 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11321 {
11322 tree t2 = TREE_OPERAND (arg1, 1);
11323 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11324 arg0);
11325 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11326 fold_convert_loc (loc, type, t2),
11327 fold_convert_loc (loc, type, t1));
11328 return t1;
11329 }
11330
11331 /* X ^ (Y | X) -> Y & ~ X*/
11332 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11333 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11334 {
11335 tree t2 = TREE_OPERAND (arg1, 0);
11336 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11337 arg0);
11338 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11339 fold_convert_loc (loc, type, t2),
11340 fold_convert_loc (loc, type, t1));
11341 return t1;
11342 }
11343
11344 /* Convert ~X ^ ~Y to X ^ Y. */
11345 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11346 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11347 return fold_build2_loc (loc, code, type,
11348 fold_convert_loc (loc, type,
11349 TREE_OPERAND (arg0, 0)),
11350 fold_convert_loc (loc, type,
11351 TREE_OPERAND (arg1, 0)));
11352
11353 /* Convert ~X ^ C to X ^ ~C. */
11354 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11355 && TREE_CODE (arg1) == INTEGER_CST)
11356 return fold_build2_loc (loc, code, type,
11357 fold_convert_loc (loc, type,
11358 TREE_OPERAND (arg0, 0)),
11359 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11360
11361 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11362 if (TREE_CODE (arg0) == BIT_AND_EXPR
11363 && INTEGRAL_TYPE_P (type)
11364 && integer_onep (TREE_OPERAND (arg0, 1))
11365 && integer_onep (arg1))
11366 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11367 build_zero_cst (TREE_TYPE (arg0)));
11368
11369 /* Fold (X & Y) ^ Y as ~X & Y. */
11370 if (TREE_CODE (arg0) == BIT_AND_EXPR
11371 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11372 {
11373 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11374 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11375 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11376 fold_convert_loc (loc, type, arg1));
11377 }
11378 /* Fold (X & Y) ^ X as ~Y & X. */
11379 if (TREE_CODE (arg0) == BIT_AND_EXPR
11380 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11381 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11382 {
11383 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11384 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11385 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11386 fold_convert_loc (loc, type, arg1));
11387 }
11388 /* Fold X ^ (X & Y) as X & ~Y. */
11389 if (TREE_CODE (arg1) == BIT_AND_EXPR
11390 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11391 {
11392 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11393 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11394 fold_convert_loc (loc, type, arg0),
11395 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11396 }
11397 /* Fold X ^ (Y & X) as ~Y & X. */
11398 if (TREE_CODE (arg1) == BIT_AND_EXPR
11399 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11400 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11401 {
11402 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11403 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11404 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11405 fold_convert_loc (loc, type, arg0));
11406 }
11407
11408 /* See if this can be simplified into a rotate first. If that
11409 is unsuccessful continue in the association code. */
11410 goto bit_rotate;
11411
11412 case BIT_AND_EXPR:
11413 if (integer_all_onesp (arg1))
11414 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11415 if (operand_equal_p (arg0, arg1, 0))
11416 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11417
11418 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11419 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11420 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11421 || (TREE_CODE (arg0) == EQ_EXPR
11422 && integer_zerop (TREE_OPERAND (arg0, 1))))
11423 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11424 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11425
11426 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11427 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11428 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11429 || (TREE_CODE (arg1) == EQ_EXPR
11430 && integer_zerop (TREE_OPERAND (arg1, 1))))
11431 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11432 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11433
11434 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11435 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11436 && TREE_CODE (arg1) == INTEGER_CST
11437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11438 {
11439 tree tmp1 = fold_convert_loc (loc, type, arg1);
11440 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11441 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11442 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11443 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11444 return
11445 fold_convert_loc (loc, type,
11446 fold_build2_loc (loc, BIT_IOR_EXPR,
11447 type, tmp2, tmp3));
11448 }
11449
11450 /* (X | Y) & Y is (X, Y). */
11451 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11452 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11453 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11454 /* (X | Y) & X is (Y, X). */
11455 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11456 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11457 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11458 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11459 /* X & (X | Y) is (Y, X). */
11460 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11461 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11462 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11463 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11464 /* X & (Y | X) is (Y, X). */
11465 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11466 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11467 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11468 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11469
11470 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11471 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11472 && INTEGRAL_TYPE_P (type)
11473 && integer_onep (TREE_OPERAND (arg0, 1))
11474 && integer_onep (arg1))
11475 {
11476 tree tem2;
11477 tem = TREE_OPERAND (arg0, 0);
11478 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11479 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11480 tem, tem2);
11481 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11482 build_zero_cst (TREE_TYPE (tem)));
11483 }
11484 /* Fold ~X & 1 as (X & 1) == 0. */
11485 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11486 && INTEGRAL_TYPE_P (type)
11487 && integer_onep (arg1))
11488 {
11489 tree tem2;
11490 tem = TREE_OPERAND (arg0, 0);
11491 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11492 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11493 tem, tem2);
11494 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11495 build_zero_cst (TREE_TYPE (tem)));
11496 }
11497 /* Fold !X & 1 as X == 0. */
11498 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11499 && integer_onep (arg1))
11500 {
11501 tem = TREE_OPERAND (arg0, 0);
11502 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11503 build_zero_cst (TREE_TYPE (tem)));
11504 }
11505
11506 /* Fold (X ^ Y) & Y as ~X & Y. */
11507 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11508 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11509 {
11510 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11511 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11512 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11513 fold_convert_loc (loc, type, arg1));
11514 }
11515 /* Fold (X ^ Y) & X as ~Y & X. */
11516 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11517 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11518 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11519 {
11520 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11521 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11522 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11523 fold_convert_loc (loc, type, arg1));
11524 }
11525 /* Fold X & (X ^ Y) as X & ~Y. */
11526 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11527 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11528 {
11529 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11530 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11531 fold_convert_loc (loc, type, arg0),
11532 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11533 }
11534 /* Fold X & (Y ^ X) as ~Y & X. */
11535 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11536 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11537 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11538 {
11539 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11540 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11541 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11542 fold_convert_loc (loc, type, arg0));
11543 }
11544
11545 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11546 multiple of 1 << CST. */
11547 if (TREE_CODE (arg1) == INTEGER_CST)
11548 {
11549 wide_int cst1 = arg1;
11550 wide_int ncst1 = -cst1;
11551 if ((cst1 & ncst1) == ncst1
11552 && multiple_of_p (type, arg0,
11553 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11554 return fold_convert_loc (loc, type, arg0);
11555 }
11556
11557 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11558 bits from CST2. */
11559 if (TREE_CODE (arg1) == INTEGER_CST
11560 && TREE_CODE (arg0) == MULT_EXPR
11561 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11562 {
11563 wide_int warg1 = arg1;
11564 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11565
11566 if (masked == 0)
11567 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11568 arg0, arg1);
11569 else if (masked != warg1)
11570 {
11571 /* Avoid the transform if arg1 is a mask of some
11572 mode which allows further optimizations. */
11573 int pop = wi::popcount (warg1);
11574 if (!(pop >= BITS_PER_UNIT
11575 && exact_log2 (pop) != -1
11576 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11577 return fold_build2_loc (loc, code, type, op0,
11578 wide_int_to_tree (type, masked));
11579 }
11580 }
11581
11582 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11583 ((A & N) + B) & M -> (A + B) & M
11584 Similarly if (N & M) == 0,
11585 ((A | N) + B) & M -> (A + B) & M
11586 and for - instead of + (or unary - instead of +)
11587 and/or ^ instead of |.
11588 If B is constant and (B & M) == 0, fold into A & M. */
11589 if (TREE_CODE (arg1) == INTEGER_CST)
11590 {
11591 wide_int cst1 = arg1;
11592 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11593 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11594 && (TREE_CODE (arg0) == PLUS_EXPR
11595 || TREE_CODE (arg0) == MINUS_EXPR
11596 || TREE_CODE (arg0) == NEGATE_EXPR)
11597 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11598 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11599 {
11600 tree pmop[2];
11601 int which = 0;
11602 wide_int cst0;
11603
11604 /* Now we know that arg0 is (C + D) or (C - D) or
11605 -C and arg1 (M) is == (1LL << cst) - 1.
11606 Store C into PMOP[0] and D into PMOP[1]. */
11607 pmop[0] = TREE_OPERAND (arg0, 0);
11608 pmop[1] = NULL;
11609 if (TREE_CODE (arg0) != NEGATE_EXPR)
11610 {
11611 pmop[1] = TREE_OPERAND (arg0, 1);
11612 which = 1;
11613 }
11614
11615 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11616 which = -1;
11617
11618 for (; which >= 0; which--)
11619 switch (TREE_CODE (pmop[which]))
11620 {
11621 case BIT_AND_EXPR:
11622 case BIT_IOR_EXPR:
11623 case BIT_XOR_EXPR:
11624 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11625 != INTEGER_CST)
11626 break;
11627 cst0 = TREE_OPERAND (pmop[which], 1);
11628 cst0 &= cst1;
11629 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11630 {
11631 if (cst0 != cst1)
11632 break;
11633 }
11634 else if (cst0 != 0)
11635 break;
11636 /* If C or D is of the form (A & N) where
11637 (N & M) == M, or of the form (A | N) or
11638 (A ^ N) where (N & M) == 0, replace it with A. */
11639 pmop[which] = TREE_OPERAND (pmop[which], 0);
11640 break;
11641 case INTEGER_CST:
11642 /* If C or D is a N where (N & M) == 0, it can be
11643 omitted (assumed 0). */
11644 if ((TREE_CODE (arg0) == PLUS_EXPR
11645 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11646 && (cst1 & pmop[which]) == 0)
11647 pmop[which] = NULL;
11648 break;
11649 default:
11650 break;
11651 }
11652
11653 /* Only build anything new if we optimized one or both arguments
11654 above. */
11655 if (pmop[0] != TREE_OPERAND (arg0, 0)
11656 || (TREE_CODE (arg0) != NEGATE_EXPR
11657 && pmop[1] != TREE_OPERAND (arg0, 1)))
11658 {
11659 tree utype = TREE_TYPE (arg0);
11660 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11661 {
11662 /* Perform the operations in a type that has defined
11663 overflow behavior. */
11664 utype = unsigned_type_for (TREE_TYPE (arg0));
11665 if (pmop[0] != NULL)
11666 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11667 if (pmop[1] != NULL)
11668 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11669 }
11670
11671 if (TREE_CODE (arg0) == NEGATE_EXPR)
11672 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11673 else if (TREE_CODE (arg0) == PLUS_EXPR)
11674 {
11675 if (pmop[0] != NULL && pmop[1] != NULL)
11676 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11677 pmop[0], pmop[1]);
11678 else if (pmop[0] != NULL)
11679 tem = pmop[0];
11680 else if (pmop[1] != NULL)
11681 tem = pmop[1];
11682 else
11683 return build_int_cst (type, 0);
11684 }
11685 else if (pmop[0] == NULL)
11686 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11687 else
11688 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11689 pmop[0], pmop[1]);
11690 /* TEM is now the new binary +, - or unary - replacement. */
11691 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11692 fold_convert_loc (loc, utype, arg1));
11693 return fold_convert_loc (loc, type, tem);
11694 }
11695 }
11696 }
11697
11698 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11699 if (t1 != NULL_TREE)
11700 return t1;
11701 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11702 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11703 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11704 {
11705 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11706
11707 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11708 if (mask == -1)
11709 return
11710 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11711 }
11712
11713 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11714
11715 This results in more efficient code for machines without a NOR
11716 instruction. Combine will canonicalize to the first form
11717 which will allow use of NOR instructions provided by the
11718 backend if they exist. */
11719 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11720 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11721 {
11722 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11723 build2 (BIT_IOR_EXPR, type,
11724 fold_convert_loc (loc, type,
11725 TREE_OPERAND (arg0, 0)),
11726 fold_convert_loc (loc, type,
11727 TREE_OPERAND (arg1, 0))));
11728 }
11729
11730 /* If arg0 is derived from the address of an object or function, we may
11731 be able to fold this expression using the object or function's
11732 alignment. */
11733 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11734 {
11735 unsigned HOST_WIDE_INT modulus, residue;
11736 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11737
11738 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11739 integer_onep (arg1));
11740
11741 /* This works because modulus is a power of 2. If this weren't the
11742 case, we'd have to replace it by its greatest power-of-2
11743 divisor: modulus & -modulus. */
11744 if (low < modulus)
11745 return build_int_cst (type, residue & low);
11746 }
11747
11748 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11749 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11750 if the new mask might be further optimized. */
11751 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11752 || TREE_CODE (arg0) == RSHIFT_EXPR)
11753 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11754 && TREE_CODE (arg1) == INTEGER_CST
11755 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11756 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11757 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11758 < TYPE_PRECISION (TREE_TYPE (arg0))))
11759 {
11760 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11761 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11762 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11763 tree shift_type = TREE_TYPE (arg0);
11764
11765 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11766 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11767 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11768 && TYPE_PRECISION (TREE_TYPE (arg0))
11769 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11770 {
11771 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11772 tree arg00 = TREE_OPERAND (arg0, 0);
11773 /* See if more bits can be proven as zero because of
11774 zero extension. */
11775 if (TREE_CODE (arg00) == NOP_EXPR
11776 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11777 {
11778 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11779 if (TYPE_PRECISION (inner_type)
11780 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11781 && TYPE_PRECISION (inner_type) < prec)
11782 {
11783 prec = TYPE_PRECISION (inner_type);
11784 /* See if we can shorten the right shift. */
11785 if (shiftc < prec)
11786 shift_type = inner_type;
11787 /* Otherwise X >> C1 is all zeros, so we'll optimize
11788 it into (X, 0) later on by making sure zerobits
11789 is all ones. */
11790 }
11791 }
11792 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11793 if (shiftc < prec)
11794 {
11795 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11796 zerobits <<= prec - shiftc;
11797 }
11798 /* For arithmetic shift if sign bit could be set, zerobits
11799 can contain actually sign bits, so no transformation is
11800 possible, unless MASK masks them all away. In that
11801 case the shift needs to be converted into logical shift. */
11802 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11803 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11804 {
11805 if ((mask & zerobits) == 0)
11806 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11807 else
11808 zerobits = 0;
11809 }
11810 }
11811
11812 /* ((X << 16) & 0xff00) is (X, 0). */
11813 if ((mask & zerobits) == mask)
11814 return omit_one_operand_loc (loc, type,
11815 build_int_cst (type, 0), arg0);
11816
11817 newmask = mask | zerobits;
11818 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11819 {
11820 /* Only do the transformation if NEWMASK is some integer
11821 mode's mask. */
11822 for (prec = BITS_PER_UNIT;
11823 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11824 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11825 break;
11826 if (prec < HOST_BITS_PER_WIDE_INT
11827 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11828 {
11829 tree newmaskt;
11830
11831 if (shift_type != TREE_TYPE (arg0))
11832 {
11833 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11834 fold_convert_loc (loc, shift_type,
11835 TREE_OPERAND (arg0, 0)),
11836 TREE_OPERAND (arg0, 1));
11837 tem = fold_convert_loc (loc, type, tem);
11838 }
11839 else
11840 tem = op0;
11841 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11842 if (!tree_int_cst_equal (newmaskt, arg1))
11843 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11844 }
11845 }
11846 }
11847
11848 goto associate;
11849
11850 case RDIV_EXPR:
11851 /* Don't touch a floating-point divide by zero unless the mode
11852 of the constant can represent infinity. */
11853 if (TREE_CODE (arg1) == REAL_CST
11854 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11855 && real_zerop (arg1))
11856 return NULL_TREE;
11857
11858 /* Optimize A / A to 1.0 if we don't care about
11859 NaNs or Infinities. Skip the transformation
11860 for non-real operands. */
11861 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11862 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11863 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11864 && operand_equal_p (arg0, arg1, 0))
11865 {
11866 tree r = build_real (TREE_TYPE (arg0), dconst1);
11867
11868 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11869 }
11870
11871 /* The complex version of the above A / A optimization. */
11872 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11873 && operand_equal_p (arg0, arg1, 0))
11874 {
11875 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11876 if (! HONOR_NANS (TYPE_MODE (elem_type))
11877 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11878 {
11879 tree r = build_real (elem_type, dconst1);
11880 /* omit_two_operands will call fold_convert for us. */
11881 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11882 }
11883 }
11884
11885 /* (-A) / (-B) -> A / B */
11886 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11887 return fold_build2_loc (loc, RDIV_EXPR, type,
11888 TREE_OPERAND (arg0, 0),
11889 negate_expr (arg1));
11890 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11891 return fold_build2_loc (loc, RDIV_EXPR, type,
11892 negate_expr (arg0),
11893 TREE_OPERAND (arg1, 0));
11894
11895 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11896 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11897 && real_onep (arg1))
11898 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11899
11900 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11901 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11902 && real_minus_onep (arg1))
11903 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11904 negate_expr (arg0)));
11905
11906 /* If ARG1 is a constant, we can convert this to a multiply by the
11907 reciprocal. This does not have the same rounding properties,
11908 so only do this if -freciprocal-math. We can actually
11909 always safely do it if ARG1 is a power of two, but it's hard to
11910 tell if it is or not in a portable manner. */
11911 if (optimize
11912 && (TREE_CODE (arg1) == REAL_CST
11913 || (TREE_CODE (arg1) == COMPLEX_CST
11914 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11915 || (TREE_CODE (arg1) == VECTOR_CST
11916 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11917 {
11918 if (flag_reciprocal_math
11919 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11920 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11921 /* Find the reciprocal if optimizing and the result is exact.
11922 TODO: Complex reciprocal not implemented. */
11923 if (TREE_CODE (arg1) != COMPLEX_CST)
11924 {
11925 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11926
11927 if (inverse)
11928 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11929 }
11930 }
11931 /* Convert A/B/C to A/(B*C). */
11932 if (flag_reciprocal_math
11933 && TREE_CODE (arg0) == RDIV_EXPR)
11934 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11935 fold_build2_loc (loc, MULT_EXPR, type,
11936 TREE_OPERAND (arg0, 1), arg1));
11937
11938 /* Convert A/(B/C) to (A/B)*C. */
11939 if (flag_reciprocal_math
11940 && TREE_CODE (arg1) == RDIV_EXPR)
11941 return fold_build2_loc (loc, MULT_EXPR, type,
11942 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11943 TREE_OPERAND (arg1, 0)),
11944 TREE_OPERAND (arg1, 1));
11945
11946 /* Convert C1/(X*C2) into (C1/C2)/X. */
11947 if (flag_reciprocal_math
11948 && TREE_CODE (arg1) == MULT_EXPR
11949 && TREE_CODE (arg0) == REAL_CST
11950 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11951 {
11952 tree tem = const_binop (RDIV_EXPR, arg0,
11953 TREE_OPERAND (arg1, 1));
11954 if (tem)
11955 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11956 TREE_OPERAND (arg1, 0));
11957 }
11958
11959 if (flag_unsafe_math_optimizations)
11960 {
11961 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11962 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11963
11964 /* Optimize sin(x)/cos(x) as tan(x). */
11965 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11966 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11967 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11968 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11969 CALL_EXPR_ARG (arg1, 0), 0))
11970 {
11971 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11972
11973 if (tanfn != NULL_TREE)
11974 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11975 }
11976
11977 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11978 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11979 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11980 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11981 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11982 CALL_EXPR_ARG (arg1, 0), 0))
11983 {
11984 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11985
11986 if (tanfn != NULL_TREE)
11987 {
11988 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11989 CALL_EXPR_ARG (arg0, 0));
11990 return fold_build2_loc (loc, RDIV_EXPR, type,
11991 build_real (type, dconst1), tmp);
11992 }
11993 }
11994
11995 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11996 NaNs or Infinities. */
11997 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11998 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11999 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12000 {
12001 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12002 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12003
12004 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12005 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12006 && operand_equal_p (arg00, arg01, 0))
12007 {
12008 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12009
12010 if (cosfn != NULL_TREE)
12011 return build_call_expr_loc (loc, cosfn, 1, arg00);
12012 }
12013 }
12014
12015 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12016 NaNs or Infinities. */
12017 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12018 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12019 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12020 {
12021 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12022 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12023
12024 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12025 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12026 && operand_equal_p (arg00, arg01, 0))
12027 {
12028 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12029
12030 if (cosfn != NULL_TREE)
12031 {
12032 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12033 return fold_build2_loc (loc, RDIV_EXPR, type,
12034 build_real (type, dconst1),
12035 tmp);
12036 }
12037 }
12038 }
12039
12040 /* Optimize pow(x,c)/x as pow(x,c-1). */
12041 if (fcode0 == BUILT_IN_POW
12042 || fcode0 == BUILT_IN_POWF
12043 || fcode0 == BUILT_IN_POWL)
12044 {
12045 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12046 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12047 if (TREE_CODE (arg01) == REAL_CST
12048 && !TREE_OVERFLOW (arg01)
12049 && operand_equal_p (arg1, arg00, 0))
12050 {
12051 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12052 REAL_VALUE_TYPE c;
12053 tree arg;
12054
12055 c = TREE_REAL_CST (arg01);
12056 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12057 arg = build_real (type, c);
12058 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12059 }
12060 }
12061
12062 /* Optimize a/root(b/c) into a*root(c/b). */
12063 if (BUILTIN_ROOT_P (fcode1))
12064 {
12065 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12066
12067 if (TREE_CODE (rootarg) == RDIV_EXPR)
12068 {
12069 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12070 tree b = TREE_OPERAND (rootarg, 0);
12071 tree c = TREE_OPERAND (rootarg, 1);
12072
12073 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12074
12075 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12076 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12077 }
12078 }
12079
12080 /* Optimize x/expN(y) into x*expN(-y). */
12081 if (BUILTIN_EXPONENT_P (fcode1))
12082 {
12083 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12084 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12085 arg1 = build_call_expr_loc (loc,
12086 expfn, 1,
12087 fold_convert_loc (loc, type, arg));
12088 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12089 }
12090
12091 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12092 if (fcode1 == BUILT_IN_POW
12093 || fcode1 == BUILT_IN_POWF
12094 || fcode1 == BUILT_IN_POWL)
12095 {
12096 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12097 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12098 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12099 tree neg11 = fold_convert_loc (loc, type,
12100 negate_expr (arg11));
12101 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12102 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12103 }
12104 }
12105 return NULL_TREE;
12106
12107 case TRUNC_DIV_EXPR:
12108 /* Optimize (X & (-A)) / A where A is a power of 2,
12109 to X >> log2(A) */
12110 if (TREE_CODE (arg0) == BIT_AND_EXPR
12111 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12112 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12113 {
12114 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12115 arg1, TREE_OPERAND (arg0, 1));
12116 if (sum && integer_zerop (sum)) {
12117 tree pow2 = build_int_cst (integer_type_node,
12118 wi::exact_log2 (arg1));
12119 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12120 TREE_OPERAND (arg0, 0), pow2);
12121 }
12122 }
12123
12124 /* Fall through */
12125
12126 case FLOOR_DIV_EXPR:
12127 /* Simplify A / (B << N) where A and B are positive and B is
12128 a power of 2, to A >> (N + log2(B)). */
12129 strict_overflow_p = false;
12130 if (TREE_CODE (arg1) == LSHIFT_EXPR
12131 && (TYPE_UNSIGNED (type)
12132 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12133 {
12134 tree sval = TREE_OPERAND (arg1, 0);
12135 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12136 {
12137 tree sh_cnt = TREE_OPERAND (arg1, 1);
12138 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12139 wi::exact_log2 (sval));
12140
12141 if (strict_overflow_p)
12142 fold_overflow_warning (("assuming signed overflow does not "
12143 "occur when simplifying A / (B << N)"),
12144 WARN_STRICT_OVERFLOW_MISC);
12145
12146 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12147 sh_cnt, pow2);
12148 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12149 fold_convert_loc (loc, type, arg0), sh_cnt);
12150 }
12151 }
12152
12153 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12154 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12155 if (INTEGRAL_TYPE_P (type)
12156 && TYPE_UNSIGNED (type)
12157 && code == FLOOR_DIV_EXPR)
12158 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12159
12160 /* Fall through */
12161
12162 case ROUND_DIV_EXPR:
12163 case CEIL_DIV_EXPR:
12164 case EXACT_DIV_EXPR:
12165 if (integer_zerop (arg1))
12166 return NULL_TREE;
12167 /* X / -1 is -X. */
12168 if (!TYPE_UNSIGNED (type)
12169 && TREE_CODE (arg1) == INTEGER_CST
12170 && wi::eq_p (arg1, -1))
12171 return fold_convert_loc (loc, type, negate_expr (arg0));
12172
12173 /* Convert -A / -B to A / B when the type is signed and overflow is
12174 undefined. */
12175 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12176 && TREE_CODE (arg0) == NEGATE_EXPR
12177 && negate_expr_p (arg1))
12178 {
12179 if (INTEGRAL_TYPE_P (type))
12180 fold_overflow_warning (("assuming signed overflow does not occur "
12181 "when distributing negation across "
12182 "division"),
12183 WARN_STRICT_OVERFLOW_MISC);
12184 return fold_build2_loc (loc, code, type,
12185 fold_convert_loc (loc, type,
12186 TREE_OPERAND (arg0, 0)),
12187 fold_convert_loc (loc, type,
12188 negate_expr (arg1)));
12189 }
12190 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12191 && TREE_CODE (arg1) == NEGATE_EXPR
12192 && negate_expr_p (arg0))
12193 {
12194 if (INTEGRAL_TYPE_P (type))
12195 fold_overflow_warning (("assuming signed overflow does not occur "
12196 "when distributing negation across "
12197 "division"),
12198 WARN_STRICT_OVERFLOW_MISC);
12199 return fold_build2_loc (loc, code, type,
12200 fold_convert_loc (loc, type,
12201 negate_expr (arg0)),
12202 fold_convert_loc (loc, type,
12203 TREE_OPERAND (arg1, 0)));
12204 }
12205
12206 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12207 operation, EXACT_DIV_EXPR.
12208
12209 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12210 At one time others generated faster code, it's not clear if they do
12211 after the last round to changes to the DIV code in expmed.c. */
12212 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12213 && multiple_of_p (type, arg0, arg1))
12214 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12215
12216 strict_overflow_p = false;
12217 if (TREE_CODE (arg1) == INTEGER_CST
12218 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12219 &strict_overflow_p)))
12220 {
12221 if (strict_overflow_p)
12222 fold_overflow_warning (("assuming signed overflow does not occur "
12223 "when simplifying division"),
12224 WARN_STRICT_OVERFLOW_MISC);
12225 return fold_convert_loc (loc, type, tem);
12226 }
12227
12228 return NULL_TREE;
12229
12230 case CEIL_MOD_EXPR:
12231 case FLOOR_MOD_EXPR:
12232 case ROUND_MOD_EXPR:
12233 case TRUNC_MOD_EXPR:
12234 /* X % -1 is zero. */
12235 if (!TYPE_UNSIGNED (type)
12236 && TREE_CODE (arg1) == INTEGER_CST
12237 && wi::eq_p (arg1, -1))
12238 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12239
12240 /* X % -C is the same as X % C. */
12241 if (code == TRUNC_MOD_EXPR
12242 && TYPE_SIGN (type) == SIGNED
12243 && TREE_CODE (arg1) == INTEGER_CST
12244 && !TREE_OVERFLOW (arg1)
12245 && wi::neg_p (arg1)
12246 && !TYPE_OVERFLOW_TRAPS (type)
12247 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12248 && !sign_bit_p (arg1, arg1))
12249 return fold_build2_loc (loc, code, type,
12250 fold_convert_loc (loc, type, arg0),
12251 fold_convert_loc (loc, type,
12252 negate_expr (arg1)));
12253
12254 /* X % -Y is the same as X % Y. */
12255 if (code == TRUNC_MOD_EXPR
12256 && !TYPE_UNSIGNED (type)
12257 && TREE_CODE (arg1) == NEGATE_EXPR
12258 && !TYPE_OVERFLOW_TRAPS (type))
12259 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12260 fold_convert_loc (loc, type,
12261 TREE_OPERAND (arg1, 0)));
12262
12263 strict_overflow_p = false;
12264 if (TREE_CODE (arg1) == INTEGER_CST
12265 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12266 &strict_overflow_p)))
12267 {
12268 if (strict_overflow_p)
12269 fold_overflow_warning (("assuming signed overflow does not occur "
12270 "when simplifying modulus"),
12271 WARN_STRICT_OVERFLOW_MISC);
12272 return fold_convert_loc (loc, type, tem);
12273 }
12274
12275 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12276 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12277 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12278 && (TYPE_UNSIGNED (type)
12279 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12280 {
12281 tree c = arg1;
12282 /* Also optimize A % (C << N) where C is a power of 2,
12283 to A & ((C << N) - 1). */
12284 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12285 c = TREE_OPERAND (arg1, 0);
12286
12287 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12288 {
12289 tree mask
12290 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12291 build_int_cst (TREE_TYPE (arg1), 1));
12292 if (strict_overflow_p)
12293 fold_overflow_warning (("assuming signed overflow does not "
12294 "occur when simplifying "
12295 "X % (power of two)"),
12296 WARN_STRICT_OVERFLOW_MISC);
12297 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12298 fold_convert_loc (loc, type, arg0),
12299 fold_convert_loc (loc, type, mask));
12300 }
12301 }
12302
12303 return NULL_TREE;
12304
12305 case LROTATE_EXPR:
12306 case RROTATE_EXPR:
12307 if (integer_all_onesp (arg0))
12308 return omit_one_operand_loc (loc, type, arg0, arg1);
12309 goto shift;
12310
12311 case RSHIFT_EXPR:
12312 /* Optimize -1 >> x for arithmetic right shifts. */
12313 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12314 && tree_expr_nonnegative_p (arg1))
12315 return omit_one_operand_loc (loc, type, arg0, arg1);
12316 /* ... fall through ... */
12317
12318 case LSHIFT_EXPR:
12319 shift:
12320 if (integer_zerop (arg1))
12321 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12322 if (integer_zerop (arg0))
12323 return omit_one_operand_loc (loc, type, arg0, arg1);
12324
12325 /* Prefer vector1 << scalar to vector1 << vector2
12326 if vector2 is uniform. */
12327 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12328 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12329 return fold_build2_loc (loc, code, type, op0, tem);
12330
12331 /* Since negative shift count is not well-defined,
12332 don't try to compute it in the compiler. */
12333 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12334 return NULL_TREE;
12335
12336 prec = element_precision (type);
12337
12338 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12339 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12340 && tree_to_uhwi (arg1) < prec
12341 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12342 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12343 {
12344 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12345 + tree_to_uhwi (arg1));
12346
12347 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12348 being well defined. */
12349 if (low >= prec)
12350 {
12351 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12352 low = low % prec;
12353 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12354 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12355 TREE_OPERAND (arg0, 0));
12356 else
12357 low = prec - 1;
12358 }
12359
12360 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12361 build_int_cst (TREE_TYPE (arg1), low));
12362 }
12363
12364 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12365 into x & ((unsigned)-1 >> c) for unsigned types. */
12366 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12367 || (TYPE_UNSIGNED (type)
12368 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12369 && tree_fits_uhwi_p (arg1)
12370 && tree_to_uhwi (arg1) < prec
12371 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12372 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12373 {
12374 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12375 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12376 tree lshift;
12377 tree arg00;
12378
12379 if (low0 == low1)
12380 {
12381 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12382
12383 lshift = build_minus_one_cst (type);
12384 lshift = const_binop (code, lshift, arg1);
12385
12386 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12387 }
12388 }
12389
12390 /* Rewrite an LROTATE_EXPR by a constant into an
12391 RROTATE_EXPR by a new constant. */
12392 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12393 {
12394 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12395 tem = const_binop (MINUS_EXPR, tem, arg1);
12396 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12397 }
12398
12399 /* If we have a rotate of a bit operation with the rotate count and
12400 the second operand of the bit operation both constant,
12401 permute the two operations. */
12402 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12403 && (TREE_CODE (arg0) == BIT_AND_EXPR
12404 || TREE_CODE (arg0) == BIT_IOR_EXPR
12405 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12406 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12407 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12408 fold_build2_loc (loc, code, type,
12409 TREE_OPERAND (arg0, 0), arg1),
12410 fold_build2_loc (loc, code, type,
12411 TREE_OPERAND (arg0, 1), arg1));
12412
12413 /* Two consecutive rotates adding up to the some integer
12414 multiple of the precision of the type can be ignored. */
12415 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12416 && TREE_CODE (arg0) == RROTATE_EXPR
12417 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12418 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12419 prec) == 0)
12420 return TREE_OPERAND (arg0, 0);
12421
12422 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12423 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12424 if the latter can be further optimized. */
12425 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12426 && TREE_CODE (arg0) == BIT_AND_EXPR
12427 && TREE_CODE (arg1) == INTEGER_CST
12428 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12429 {
12430 tree mask = fold_build2_loc (loc, code, type,
12431 fold_convert_loc (loc, type,
12432 TREE_OPERAND (arg0, 1)),
12433 arg1);
12434 tree shift = fold_build2_loc (loc, code, type,
12435 fold_convert_loc (loc, type,
12436 TREE_OPERAND (arg0, 0)),
12437 arg1);
12438 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12439 if (tem)
12440 return tem;
12441 }
12442
12443 return NULL_TREE;
12444
12445 case MIN_EXPR:
12446 if (operand_equal_p (arg0, arg1, 0))
12447 return omit_one_operand_loc (loc, type, arg0, arg1);
12448 if (INTEGRAL_TYPE_P (type)
12449 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12450 return omit_one_operand_loc (loc, type, arg1, arg0);
12451 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12452 if (tem)
12453 return tem;
12454 goto associate;
12455
12456 case MAX_EXPR:
12457 if (operand_equal_p (arg0, arg1, 0))
12458 return omit_one_operand_loc (loc, type, arg0, arg1);
12459 if (INTEGRAL_TYPE_P (type)
12460 && TYPE_MAX_VALUE (type)
12461 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12462 return omit_one_operand_loc (loc, type, arg1, arg0);
12463 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12464 if (tem)
12465 return tem;
12466 goto associate;
12467
12468 case TRUTH_ANDIF_EXPR:
12469 /* Note that the operands of this must be ints
12470 and their values must be 0 or 1.
12471 ("true" is a fixed value perhaps depending on the language.) */
12472 /* If first arg is constant zero, return it. */
12473 if (integer_zerop (arg0))
12474 return fold_convert_loc (loc, type, arg0);
12475 case TRUTH_AND_EXPR:
12476 /* If either arg is constant true, drop it. */
12477 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12478 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12479 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12480 /* Preserve sequence points. */
12481 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12482 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12483 /* If second arg is constant zero, result is zero, but first arg
12484 must be evaluated. */
12485 if (integer_zerop (arg1))
12486 return omit_one_operand_loc (loc, type, arg1, arg0);
12487 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12488 case will be handled here. */
12489 if (integer_zerop (arg0))
12490 return omit_one_operand_loc (loc, type, arg0, arg1);
12491
12492 /* !X && X is always false. */
12493 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12494 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12495 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12496 /* X && !X is always false. */
12497 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12498 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12499 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12500
12501 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12502 means A >= Y && A != MAX, but in this case we know that
12503 A < X <= MAX. */
12504
12505 if (!TREE_SIDE_EFFECTS (arg0)
12506 && !TREE_SIDE_EFFECTS (arg1))
12507 {
12508 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12509 if (tem && !operand_equal_p (tem, arg0, 0))
12510 return fold_build2_loc (loc, code, type, tem, arg1);
12511
12512 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12513 if (tem && !operand_equal_p (tem, arg1, 0))
12514 return fold_build2_loc (loc, code, type, arg0, tem);
12515 }
12516
12517 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12518 != NULL_TREE)
12519 return tem;
12520
12521 return NULL_TREE;
12522
12523 case TRUTH_ORIF_EXPR:
12524 /* Note that the operands of this must be ints
12525 and their values must be 0 or true.
12526 ("true" is a fixed value perhaps depending on the language.) */
12527 /* If first arg is constant true, return it. */
12528 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12529 return fold_convert_loc (loc, type, arg0);
12530 case TRUTH_OR_EXPR:
12531 /* If either arg is constant zero, drop it. */
12532 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12533 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12534 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12535 /* Preserve sequence points. */
12536 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12537 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12538 /* If second arg is constant true, result is true, but we must
12539 evaluate first arg. */
12540 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12541 return omit_one_operand_loc (loc, type, arg1, arg0);
12542 /* Likewise for first arg, but note this only occurs here for
12543 TRUTH_OR_EXPR. */
12544 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12545 return omit_one_operand_loc (loc, type, arg0, arg1);
12546
12547 /* !X || X is always true. */
12548 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12550 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12551 /* X || !X is always true. */
12552 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12554 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12555
12556 /* (X && !Y) || (!X && Y) is X ^ Y */
12557 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12558 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12559 {
12560 tree a0, a1, l0, l1, n0, n1;
12561
12562 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12563 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12564
12565 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12566 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12567
12568 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12569 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12570
12571 if ((operand_equal_p (n0, a0, 0)
12572 && operand_equal_p (n1, a1, 0))
12573 || (operand_equal_p (n0, a1, 0)
12574 && operand_equal_p (n1, a0, 0)))
12575 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12576 }
12577
12578 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12579 != NULL_TREE)
12580 return tem;
12581
12582 return NULL_TREE;
12583
12584 case TRUTH_XOR_EXPR:
12585 /* If the second arg is constant zero, drop it. */
12586 if (integer_zerop (arg1))
12587 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12588 /* If the second arg is constant true, this is a logical inversion. */
12589 if (integer_onep (arg1))
12590 {
12591 tem = invert_truthvalue_loc (loc, arg0);
12592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12593 }
12594 /* Identical arguments cancel to zero. */
12595 if (operand_equal_p (arg0, arg1, 0))
12596 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12597
12598 /* !X ^ X is always true. */
12599 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12600 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12601 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12602
12603 /* X ^ !X is always true. */
12604 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12605 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12606 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12607
12608 return NULL_TREE;
12609
12610 case EQ_EXPR:
12611 case NE_EXPR:
12612 STRIP_NOPS (arg0);
12613 STRIP_NOPS (arg1);
12614
12615 tem = fold_comparison (loc, code, type, op0, op1);
12616 if (tem != NULL_TREE)
12617 return tem;
12618
12619 /* bool_var != 0 becomes bool_var. */
12620 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12621 && code == NE_EXPR)
12622 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12623
12624 /* bool_var == 1 becomes bool_var. */
12625 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12626 && code == EQ_EXPR)
12627 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12628
12629 /* bool_var != 1 becomes !bool_var. */
12630 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12631 && code == NE_EXPR)
12632 return fold_convert_loc (loc, type,
12633 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12634 TREE_TYPE (arg0), arg0));
12635
12636 /* bool_var == 0 becomes !bool_var. */
12637 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12638 && code == EQ_EXPR)
12639 return fold_convert_loc (loc, type,
12640 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12641 TREE_TYPE (arg0), arg0));
12642
12643 /* !exp != 0 becomes !exp */
12644 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12645 && code == NE_EXPR)
12646 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12647
12648 /* If this is an equality comparison of the address of two non-weak,
12649 unaliased symbols neither of which are extern (since we do not
12650 have access to attributes for externs), then we know the result. */
12651 if (TREE_CODE (arg0) == ADDR_EXPR
12652 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12653 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12654 && ! lookup_attribute ("alias",
12655 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12656 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12657 && TREE_CODE (arg1) == ADDR_EXPR
12658 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12659 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12660 && ! lookup_attribute ("alias",
12661 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12662 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12663 {
12664 /* We know that we're looking at the address of two
12665 non-weak, unaliased, static _DECL nodes.
12666
12667 It is both wasteful and incorrect to call operand_equal_p
12668 to compare the two ADDR_EXPR nodes. It is wasteful in that
12669 all we need to do is test pointer equality for the arguments
12670 to the two ADDR_EXPR nodes. It is incorrect to use
12671 operand_equal_p as that function is NOT equivalent to a
12672 C equality test. It can in fact return false for two
12673 objects which would test as equal using the C equality
12674 operator. */
12675 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12676 return constant_boolean_node (equal
12677 ? code == EQ_EXPR : code != EQ_EXPR,
12678 type);
12679 }
12680
12681 /* Similarly for a NEGATE_EXPR. */
12682 if (TREE_CODE (arg0) == NEGATE_EXPR
12683 && TREE_CODE (arg1) == INTEGER_CST
12684 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12685 arg1)))
12686 && TREE_CODE (tem) == INTEGER_CST
12687 && !TREE_OVERFLOW (tem))
12688 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12689
12690 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12691 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12692 && TREE_CODE (arg1) == INTEGER_CST
12693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12694 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12695 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12696 fold_convert_loc (loc,
12697 TREE_TYPE (arg0),
12698 arg1),
12699 TREE_OPERAND (arg0, 1)));
12700
12701 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12702 if ((TREE_CODE (arg0) == PLUS_EXPR
12703 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12704 || TREE_CODE (arg0) == MINUS_EXPR)
12705 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12706 0)),
12707 arg1, 0)
12708 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12709 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12710 {
12711 tree val = TREE_OPERAND (arg0, 1);
12712 return omit_two_operands_loc (loc, type,
12713 fold_build2_loc (loc, code, type,
12714 val,
12715 build_int_cst (TREE_TYPE (val),
12716 0)),
12717 TREE_OPERAND (arg0, 0), arg1);
12718 }
12719
12720 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12721 if (TREE_CODE (arg0) == MINUS_EXPR
12722 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12723 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12724 1)),
12725 arg1, 0)
12726 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12727 {
12728 return omit_two_operands_loc (loc, type,
12729 code == NE_EXPR
12730 ? boolean_true_node : boolean_false_node,
12731 TREE_OPERAND (arg0, 1), arg1);
12732 }
12733
12734 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12735 if (TREE_CODE (arg0) == ABS_EXPR
12736 && (integer_zerop (arg1) || real_zerop (arg1)))
12737 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12738
12739 /* If this is an EQ or NE comparison with zero and ARG0 is
12740 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12741 two operations, but the latter can be done in one less insn
12742 on machines that have only two-operand insns or on which a
12743 constant cannot be the first operand. */
12744 if (TREE_CODE (arg0) == BIT_AND_EXPR
12745 && integer_zerop (arg1))
12746 {
12747 tree arg00 = TREE_OPERAND (arg0, 0);
12748 tree arg01 = TREE_OPERAND (arg0, 1);
12749 if (TREE_CODE (arg00) == LSHIFT_EXPR
12750 && integer_onep (TREE_OPERAND (arg00, 0)))
12751 {
12752 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12753 arg01, TREE_OPERAND (arg00, 1));
12754 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12755 build_int_cst (TREE_TYPE (arg0), 1));
12756 return fold_build2_loc (loc, code, type,
12757 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12758 arg1);
12759 }
12760 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12761 && integer_onep (TREE_OPERAND (arg01, 0)))
12762 {
12763 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12764 arg00, TREE_OPERAND (arg01, 1));
12765 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12766 build_int_cst (TREE_TYPE (arg0), 1));
12767 return fold_build2_loc (loc, code, type,
12768 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12769 arg1);
12770 }
12771 }
12772
12773 /* If this is an NE or EQ comparison of zero against the result of a
12774 signed MOD operation whose second operand is a power of 2, make
12775 the MOD operation unsigned since it is simpler and equivalent. */
12776 if (integer_zerop (arg1)
12777 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12778 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12779 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12780 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12781 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12782 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12783 {
12784 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12785 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12786 fold_convert_loc (loc, newtype,
12787 TREE_OPERAND (arg0, 0)),
12788 fold_convert_loc (loc, newtype,
12789 TREE_OPERAND (arg0, 1)));
12790
12791 return fold_build2_loc (loc, code, type, newmod,
12792 fold_convert_loc (loc, newtype, arg1));
12793 }
12794
12795 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12796 C1 is a valid shift constant, and C2 is a power of two, i.e.
12797 a single bit. */
12798 if (TREE_CODE (arg0) == BIT_AND_EXPR
12799 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12800 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12801 == INTEGER_CST
12802 && integer_pow2p (TREE_OPERAND (arg0, 1))
12803 && integer_zerop (arg1))
12804 {
12805 tree itype = TREE_TYPE (arg0);
12806 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12807 prec = TYPE_PRECISION (itype);
12808
12809 /* Check for a valid shift count. */
12810 if (wi::ltu_p (arg001, prec))
12811 {
12812 tree arg01 = TREE_OPERAND (arg0, 1);
12813 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12814 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12815 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12816 can be rewritten as (X & (C2 << C1)) != 0. */
12817 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12818 {
12819 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12820 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12821 return fold_build2_loc (loc, code, type, tem,
12822 fold_convert_loc (loc, itype, arg1));
12823 }
12824 /* Otherwise, for signed (arithmetic) shifts,
12825 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12826 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12827 else if (!TYPE_UNSIGNED (itype))
12828 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12829 arg000, build_int_cst (itype, 0));
12830 /* Otherwise, of unsigned (logical) shifts,
12831 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12832 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12833 else
12834 return omit_one_operand_loc (loc, type,
12835 code == EQ_EXPR ? integer_one_node
12836 : integer_zero_node,
12837 arg000);
12838 }
12839 }
12840
12841 /* If we have (A & C) == C where C is a power of 2, convert this into
12842 (A & C) != 0. Similarly for NE_EXPR. */
12843 if (TREE_CODE (arg0) == BIT_AND_EXPR
12844 && integer_pow2p (TREE_OPERAND (arg0, 1))
12845 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12846 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12847 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12848 integer_zero_node));
12849
12850 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12851 bit, then fold the expression into A < 0 or A >= 0. */
12852 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12853 if (tem)
12854 return tem;
12855
12856 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12857 Similarly for NE_EXPR. */
12858 if (TREE_CODE (arg0) == BIT_AND_EXPR
12859 && TREE_CODE (arg1) == INTEGER_CST
12860 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12861 {
12862 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12863 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12864 TREE_OPERAND (arg0, 1));
12865 tree dandnotc
12866 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12867 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12868 notc);
12869 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12870 if (integer_nonzerop (dandnotc))
12871 return omit_one_operand_loc (loc, type, rslt, arg0);
12872 }
12873
12874 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12875 Similarly for NE_EXPR. */
12876 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12877 && TREE_CODE (arg1) == INTEGER_CST
12878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12879 {
12880 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12881 tree candnotd
12882 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12883 TREE_OPERAND (arg0, 1),
12884 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12885 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12886 if (integer_nonzerop (candnotd))
12887 return omit_one_operand_loc (loc, type, rslt, arg0);
12888 }
12889
12890 /* If this is a comparison of a field, we may be able to simplify it. */
12891 if ((TREE_CODE (arg0) == COMPONENT_REF
12892 || TREE_CODE (arg0) == BIT_FIELD_REF)
12893 /* Handle the constant case even without -O
12894 to make sure the warnings are given. */
12895 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12896 {
12897 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12898 if (t1)
12899 return t1;
12900 }
12901
12902 /* Optimize comparisons of strlen vs zero to a compare of the
12903 first character of the string vs zero. To wit,
12904 strlen(ptr) == 0 => *ptr == 0
12905 strlen(ptr) != 0 => *ptr != 0
12906 Other cases should reduce to one of these two (or a constant)
12907 due to the return value of strlen being unsigned. */
12908 if (TREE_CODE (arg0) == CALL_EXPR
12909 && integer_zerop (arg1))
12910 {
12911 tree fndecl = get_callee_fndecl (arg0);
12912
12913 if (fndecl
12914 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12915 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12916 && call_expr_nargs (arg0) == 1
12917 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12918 {
12919 tree iref = build_fold_indirect_ref_loc (loc,
12920 CALL_EXPR_ARG (arg0, 0));
12921 return fold_build2_loc (loc, code, type, iref,
12922 build_int_cst (TREE_TYPE (iref), 0));
12923 }
12924 }
12925
12926 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12927 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12928 if (TREE_CODE (arg0) == RSHIFT_EXPR
12929 && integer_zerop (arg1)
12930 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12931 {
12932 tree arg00 = TREE_OPERAND (arg0, 0);
12933 tree arg01 = TREE_OPERAND (arg0, 1);
12934 tree itype = TREE_TYPE (arg00);
12935 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12936 {
12937 if (TYPE_UNSIGNED (itype))
12938 {
12939 itype = signed_type_for (itype);
12940 arg00 = fold_convert_loc (loc, itype, arg00);
12941 }
12942 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12943 type, arg00, build_zero_cst (itype));
12944 }
12945 }
12946
12947 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12948 if (integer_zerop (arg1)
12949 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12950 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12951 TREE_OPERAND (arg0, 1));
12952
12953 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12954 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12955 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12956 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12957 build_zero_cst (TREE_TYPE (arg0)));
12958 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12959 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12960 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12961 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12962 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12963 build_zero_cst (TREE_TYPE (arg0)));
12964
12965 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12966 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12967 && TREE_CODE (arg1) == INTEGER_CST
12968 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12969 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12970 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12971 TREE_OPERAND (arg0, 1), arg1));
12972
12973 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12974 (X & C) == 0 when C is a single bit. */
12975 if (TREE_CODE (arg0) == BIT_AND_EXPR
12976 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12977 && integer_zerop (arg1)
12978 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12979 {
12980 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12981 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12982 TREE_OPERAND (arg0, 1));
12983 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12984 type, tem,
12985 fold_convert_loc (loc, TREE_TYPE (arg0),
12986 arg1));
12987 }
12988
12989 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12990 constant C is a power of two, i.e. a single bit. */
12991 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12992 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12993 && integer_zerop (arg1)
12994 && integer_pow2p (TREE_OPERAND (arg0, 1))
12995 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12996 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12997 {
12998 tree arg00 = TREE_OPERAND (arg0, 0);
12999 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13000 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13001 }
13002
13003 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13004 when is C is a power of two, i.e. a single bit. */
13005 if (TREE_CODE (arg0) == BIT_AND_EXPR
13006 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13007 && integer_zerop (arg1)
13008 && integer_pow2p (TREE_OPERAND (arg0, 1))
13009 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13010 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13011 {
13012 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13013 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13014 arg000, TREE_OPERAND (arg0, 1));
13015 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13016 tem, build_int_cst (TREE_TYPE (tem), 0));
13017 }
13018
13019 if (integer_zerop (arg1)
13020 && tree_expr_nonzero_p (arg0))
13021 {
13022 tree res = constant_boolean_node (code==NE_EXPR, type);
13023 return omit_one_operand_loc (loc, type, res, arg0);
13024 }
13025
13026 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13027 if (TREE_CODE (arg0) == NEGATE_EXPR
13028 && TREE_CODE (arg1) == NEGATE_EXPR)
13029 return fold_build2_loc (loc, code, type,
13030 TREE_OPERAND (arg0, 0),
13031 fold_convert_loc (loc, TREE_TYPE (arg0),
13032 TREE_OPERAND (arg1, 0)));
13033
13034 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13035 if (TREE_CODE (arg0) == BIT_AND_EXPR
13036 && TREE_CODE (arg1) == BIT_AND_EXPR)
13037 {
13038 tree arg00 = TREE_OPERAND (arg0, 0);
13039 tree arg01 = TREE_OPERAND (arg0, 1);
13040 tree arg10 = TREE_OPERAND (arg1, 0);
13041 tree arg11 = TREE_OPERAND (arg1, 1);
13042 tree itype = TREE_TYPE (arg0);
13043
13044 if (operand_equal_p (arg01, arg11, 0))
13045 return fold_build2_loc (loc, code, type,
13046 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13047 fold_build2_loc (loc,
13048 BIT_XOR_EXPR, itype,
13049 arg00, arg10),
13050 arg01),
13051 build_zero_cst (itype));
13052
13053 if (operand_equal_p (arg01, arg10, 0))
13054 return fold_build2_loc (loc, code, type,
13055 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13056 fold_build2_loc (loc,
13057 BIT_XOR_EXPR, itype,
13058 arg00, arg11),
13059 arg01),
13060 build_zero_cst (itype));
13061
13062 if (operand_equal_p (arg00, arg11, 0))
13063 return fold_build2_loc (loc, code, type,
13064 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13065 fold_build2_loc (loc,
13066 BIT_XOR_EXPR, itype,
13067 arg01, arg10),
13068 arg00),
13069 build_zero_cst (itype));
13070
13071 if (operand_equal_p (arg00, arg10, 0))
13072 return fold_build2_loc (loc, code, type,
13073 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13074 fold_build2_loc (loc,
13075 BIT_XOR_EXPR, itype,
13076 arg01, arg11),
13077 arg00),
13078 build_zero_cst (itype));
13079 }
13080
13081 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13082 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13083 {
13084 tree arg00 = TREE_OPERAND (arg0, 0);
13085 tree arg01 = TREE_OPERAND (arg0, 1);
13086 tree arg10 = TREE_OPERAND (arg1, 0);
13087 tree arg11 = TREE_OPERAND (arg1, 1);
13088 tree itype = TREE_TYPE (arg0);
13089
13090 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13091 operand_equal_p guarantees no side-effects so we don't need
13092 to use omit_one_operand on Z. */
13093 if (operand_equal_p (arg01, arg11, 0))
13094 return fold_build2_loc (loc, code, type, arg00,
13095 fold_convert_loc (loc, TREE_TYPE (arg00),
13096 arg10));
13097 if (operand_equal_p (arg01, arg10, 0))
13098 return fold_build2_loc (loc, code, type, arg00,
13099 fold_convert_loc (loc, TREE_TYPE (arg00),
13100 arg11));
13101 if (operand_equal_p (arg00, arg11, 0))
13102 return fold_build2_loc (loc, code, type, arg01,
13103 fold_convert_loc (loc, TREE_TYPE (arg01),
13104 arg10));
13105 if (operand_equal_p (arg00, arg10, 0))
13106 return fold_build2_loc (loc, code, type, arg01,
13107 fold_convert_loc (loc, TREE_TYPE (arg01),
13108 arg11));
13109
13110 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13111 if (TREE_CODE (arg01) == INTEGER_CST
13112 && TREE_CODE (arg11) == INTEGER_CST)
13113 {
13114 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13115 fold_convert_loc (loc, itype, arg11));
13116 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13117 return fold_build2_loc (loc, code, type, tem,
13118 fold_convert_loc (loc, itype, arg10));
13119 }
13120 }
13121
13122 /* Attempt to simplify equality/inequality comparisons of complex
13123 values. Only lower the comparison if the result is known or
13124 can be simplified to a single scalar comparison. */
13125 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13126 || TREE_CODE (arg0) == COMPLEX_CST)
13127 && (TREE_CODE (arg1) == COMPLEX_EXPR
13128 || TREE_CODE (arg1) == COMPLEX_CST))
13129 {
13130 tree real0, imag0, real1, imag1;
13131 tree rcond, icond;
13132
13133 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13134 {
13135 real0 = TREE_OPERAND (arg0, 0);
13136 imag0 = TREE_OPERAND (arg0, 1);
13137 }
13138 else
13139 {
13140 real0 = TREE_REALPART (arg0);
13141 imag0 = TREE_IMAGPART (arg0);
13142 }
13143
13144 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13145 {
13146 real1 = TREE_OPERAND (arg1, 0);
13147 imag1 = TREE_OPERAND (arg1, 1);
13148 }
13149 else
13150 {
13151 real1 = TREE_REALPART (arg1);
13152 imag1 = TREE_IMAGPART (arg1);
13153 }
13154
13155 rcond = fold_binary_loc (loc, code, type, real0, real1);
13156 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13157 {
13158 if (integer_zerop (rcond))
13159 {
13160 if (code == EQ_EXPR)
13161 return omit_two_operands_loc (loc, type, boolean_false_node,
13162 imag0, imag1);
13163 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13164 }
13165 else
13166 {
13167 if (code == NE_EXPR)
13168 return omit_two_operands_loc (loc, type, boolean_true_node,
13169 imag0, imag1);
13170 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13171 }
13172 }
13173
13174 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13175 if (icond && TREE_CODE (icond) == INTEGER_CST)
13176 {
13177 if (integer_zerop (icond))
13178 {
13179 if (code == EQ_EXPR)
13180 return omit_two_operands_loc (loc, type, boolean_false_node,
13181 real0, real1);
13182 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13183 }
13184 else
13185 {
13186 if (code == NE_EXPR)
13187 return omit_two_operands_loc (loc, type, boolean_true_node,
13188 real0, real1);
13189 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13190 }
13191 }
13192 }
13193
13194 return NULL_TREE;
13195
13196 case LT_EXPR:
13197 case GT_EXPR:
13198 case LE_EXPR:
13199 case GE_EXPR:
13200 tem = fold_comparison (loc, code, type, op0, op1);
13201 if (tem != NULL_TREE)
13202 return tem;
13203
13204 /* Transform comparisons of the form X +- C CMP X. */
13205 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13206 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13207 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13208 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13209 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13210 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13211 {
13212 tree arg01 = TREE_OPERAND (arg0, 1);
13213 enum tree_code code0 = TREE_CODE (arg0);
13214 int is_positive;
13215
13216 if (TREE_CODE (arg01) == REAL_CST)
13217 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13218 else
13219 is_positive = tree_int_cst_sgn (arg01);
13220
13221 /* (X - c) > X becomes false. */
13222 if (code == GT_EXPR
13223 && ((code0 == MINUS_EXPR && is_positive >= 0)
13224 || (code0 == PLUS_EXPR && is_positive <= 0)))
13225 {
13226 if (TREE_CODE (arg01) == INTEGER_CST
13227 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13228 fold_overflow_warning (("assuming signed overflow does not "
13229 "occur when assuming that (X - c) > X "
13230 "is always false"),
13231 WARN_STRICT_OVERFLOW_ALL);
13232 return constant_boolean_node (0, type);
13233 }
13234
13235 /* Likewise (X + c) < X becomes false. */
13236 if (code == LT_EXPR
13237 && ((code0 == PLUS_EXPR && is_positive >= 0)
13238 || (code0 == MINUS_EXPR && is_positive <= 0)))
13239 {
13240 if (TREE_CODE (arg01) == INTEGER_CST
13241 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13242 fold_overflow_warning (("assuming signed overflow does not "
13243 "occur when assuming that "
13244 "(X + c) < X is always false"),
13245 WARN_STRICT_OVERFLOW_ALL);
13246 return constant_boolean_node (0, type);
13247 }
13248
13249 /* Convert (X - c) <= X to true. */
13250 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13251 && code == LE_EXPR
13252 && ((code0 == MINUS_EXPR && is_positive >= 0)
13253 || (code0 == PLUS_EXPR && is_positive <= 0)))
13254 {
13255 if (TREE_CODE (arg01) == INTEGER_CST
13256 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13257 fold_overflow_warning (("assuming signed overflow does not "
13258 "occur when assuming that "
13259 "(X - c) <= X is always true"),
13260 WARN_STRICT_OVERFLOW_ALL);
13261 return constant_boolean_node (1, type);
13262 }
13263
13264 /* Convert (X + c) >= X to true. */
13265 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13266 && code == GE_EXPR
13267 && ((code0 == PLUS_EXPR && is_positive >= 0)
13268 || (code0 == MINUS_EXPR && is_positive <= 0)))
13269 {
13270 if (TREE_CODE (arg01) == INTEGER_CST
13271 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13272 fold_overflow_warning (("assuming signed overflow does not "
13273 "occur when assuming that "
13274 "(X + c) >= X is always true"),
13275 WARN_STRICT_OVERFLOW_ALL);
13276 return constant_boolean_node (1, type);
13277 }
13278
13279 if (TREE_CODE (arg01) == INTEGER_CST)
13280 {
13281 /* Convert X + c > X and X - c < X to true for integers. */
13282 if (code == GT_EXPR
13283 && ((code0 == PLUS_EXPR && is_positive > 0)
13284 || (code0 == MINUS_EXPR && is_positive < 0)))
13285 {
13286 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13287 fold_overflow_warning (("assuming signed overflow does "
13288 "not occur when assuming that "
13289 "(X + c) > X is always true"),
13290 WARN_STRICT_OVERFLOW_ALL);
13291 return constant_boolean_node (1, type);
13292 }
13293
13294 if (code == LT_EXPR
13295 && ((code0 == MINUS_EXPR && is_positive > 0)
13296 || (code0 == PLUS_EXPR && is_positive < 0)))
13297 {
13298 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13299 fold_overflow_warning (("assuming signed overflow does "
13300 "not occur when assuming that "
13301 "(X - c) < X is always true"),
13302 WARN_STRICT_OVERFLOW_ALL);
13303 return constant_boolean_node (1, type);
13304 }
13305
13306 /* Convert X + c <= X and X - c >= X to false for integers. */
13307 if (code == LE_EXPR
13308 && ((code0 == PLUS_EXPR && is_positive > 0)
13309 || (code0 == MINUS_EXPR && is_positive < 0)))
13310 {
13311 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13312 fold_overflow_warning (("assuming signed overflow does "
13313 "not occur when assuming that "
13314 "(X + c) <= X is always false"),
13315 WARN_STRICT_OVERFLOW_ALL);
13316 return constant_boolean_node (0, type);
13317 }
13318
13319 if (code == GE_EXPR
13320 && ((code0 == MINUS_EXPR && is_positive > 0)
13321 || (code0 == PLUS_EXPR && is_positive < 0)))
13322 {
13323 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13324 fold_overflow_warning (("assuming signed overflow does "
13325 "not occur when assuming that "
13326 "(X - c) >= X is always false"),
13327 WARN_STRICT_OVERFLOW_ALL);
13328 return constant_boolean_node (0, type);
13329 }
13330 }
13331 }
13332
13333 /* Comparisons with the highest or lowest possible integer of
13334 the specified precision will have known values. */
13335 {
13336 tree arg1_type = TREE_TYPE (arg1);
13337 unsigned int prec = TYPE_PRECISION (arg1_type);
13338
13339 if (TREE_CODE (arg1) == INTEGER_CST
13340 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13341 {
13342 wide_int max = wi::max_value (arg1_type);
13343 wide_int signed_max = wi::max_value (prec, SIGNED);
13344 wide_int min = wi::min_value (arg1_type);
13345
13346 if (wi::eq_p (arg1, max))
13347 switch (code)
13348 {
13349 case GT_EXPR:
13350 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13351
13352 case GE_EXPR:
13353 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13354
13355 case LE_EXPR:
13356 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13357
13358 case LT_EXPR:
13359 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13360
13361 /* The GE_EXPR and LT_EXPR cases above are not normally
13362 reached because of previous transformations. */
13363
13364 default:
13365 break;
13366 }
13367 else if (wi::eq_p (arg1, max - 1))
13368 switch (code)
13369 {
13370 case GT_EXPR:
13371 arg1 = const_binop (PLUS_EXPR, arg1,
13372 build_int_cst (TREE_TYPE (arg1), 1));
13373 return fold_build2_loc (loc, EQ_EXPR, type,
13374 fold_convert_loc (loc,
13375 TREE_TYPE (arg1), arg0),
13376 arg1);
13377 case LE_EXPR:
13378 arg1 = const_binop (PLUS_EXPR, arg1,
13379 build_int_cst (TREE_TYPE (arg1), 1));
13380 return fold_build2_loc (loc, NE_EXPR, type,
13381 fold_convert_loc (loc, TREE_TYPE (arg1),
13382 arg0),
13383 arg1);
13384 default:
13385 break;
13386 }
13387 else if (wi::eq_p (arg1, min))
13388 switch (code)
13389 {
13390 case LT_EXPR:
13391 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13392
13393 case LE_EXPR:
13394 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13395
13396 case GE_EXPR:
13397 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13398
13399 case GT_EXPR:
13400 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13401
13402 default:
13403 break;
13404 }
13405 else if (wi::eq_p (arg1, min + 1))
13406 switch (code)
13407 {
13408 case GE_EXPR:
13409 arg1 = const_binop (MINUS_EXPR, arg1,
13410 build_int_cst (TREE_TYPE (arg1), 1));
13411 return fold_build2_loc (loc, NE_EXPR, type,
13412 fold_convert_loc (loc,
13413 TREE_TYPE (arg1), arg0),
13414 arg1);
13415 case LT_EXPR:
13416 arg1 = const_binop (MINUS_EXPR, arg1,
13417 build_int_cst (TREE_TYPE (arg1), 1));
13418 return fold_build2_loc (loc, EQ_EXPR, type,
13419 fold_convert_loc (loc, TREE_TYPE (arg1),
13420 arg0),
13421 arg1);
13422 default:
13423 break;
13424 }
13425
13426 else if (wi::eq_p (arg1, signed_max)
13427 && TYPE_UNSIGNED (arg1_type)
13428 /* We will flip the signedness of the comparison operator
13429 associated with the mode of arg1, so the sign bit is
13430 specified by this mode. Check that arg1 is the signed
13431 max associated with this sign bit. */
13432 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13433 /* signed_type does not work on pointer types. */
13434 && INTEGRAL_TYPE_P (arg1_type))
13435 {
13436 /* The following case also applies to X < signed_max+1
13437 and X >= signed_max+1 because previous transformations. */
13438 if (code == LE_EXPR || code == GT_EXPR)
13439 {
13440 tree st = signed_type_for (arg1_type);
13441 return fold_build2_loc (loc,
13442 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13443 type, fold_convert_loc (loc, st, arg0),
13444 build_int_cst (st, 0));
13445 }
13446 }
13447 }
13448 }
13449
13450 /* If we are comparing an ABS_EXPR with a constant, we can
13451 convert all the cases into explicit comparisons, but they may
13452 well not be faster than doing the ABS and one comparison.
13453 But ABS (X) <= C is a range comparison, which becomes a subtraction
13454 and a comparison, and is probably faster. */
13455 if (code == LE_EXPR
13456 && TREE_CODE (arg1) == INTEGER_CST
13457 && TREE_CODE (arg0) == ABS_EXPR
13458 && ! TREE_SIDE_EFFECTS (arg0)
13459 && (0 != (tem = negate_expr (arg1)))
13460 && TREE_CODE (tem) == INTEGER_CST
13461 && !TREE_OVERFLOW (tem))
13462 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13463 build2 (GE_EXPR, type,
13464 TREE_OPERAND (arg0, 0), tem),
13465 build2 (LE_EXPR, type,
13466 TREE_OPERAND (arg0, 0), arg1));
13467
13468 /* Convert ABS_EXPR<x> >= 0 to true. */
13469 strict_overflow_p = false;
13470 if (code == GE_EXPR
13471 && (integer_zerop (arg1)
13472 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13473 && real_zerop (arg1)))
13474 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13475 {
13476 if (strict_overflow_p)
13477 fold_overflow_warning (("assuming signed overflow does not occur "
13478 "when simplifying comparison of "
13479 "absolute value and zero"),
13480 WARN_STRICT_OVERFLOW_CONDITIONAL);
13481 return omit_one_operand_loc (loc, type,
13482 constant_boolean_node (true, type),
13483 arg0);
13484 }
13485
13486 /* Convert ABS_EXPR<x> < 0 to false. */
13487 strict_overflow_p = false;
13488 if (code == LT_EXPR
13489 && (integer_zerop (arg1) || real_zerop (arg1))
13490 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13491 {
13492 if (strict_overflow_p)
13493 fold_overflow_warning (("assuming signed overflow does not occur "
13494 "when simplifying comparison of "
13495 "absolute value and zero"),
13496 WARN_STRICT_OVERFLOW_CONDITIONAL);
13497 return omit_one_operand_loc (loc, type,
13498 constant_boolean_node (false, type),
13499 arg0);
13500 }
13501
13502 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13503 and similarly for >= into !=. */
13504 if ((code == LT_EXPR || code == GE_EXPR)
13505 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13506 && TREE_CODE (arg1) == LSHIFT_EXPR
13507 && integer_onep (TREE_OPERAND (arg1, 0)))
13508 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13509 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13510 TREE_OPERAND (arg1, 1)),
13511 build_zero_cst (TREE_TYPE (arg0)));
13512
13513 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13514 otherwise Y might be >= # of bits in X's type and thus e.g.
13515 (unsigned char) (1 << Y) for Y 15 might be 0.
13516 If the cast is widening, then 1 << Y should have unsigned type,
13517 otherwise if Y is number of bits in the signed shift type minus 1,
13518 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13519 31 might be 0xffffffff80000000. */
13520 if ((code == LT_EXPR || code == GE_EXPR)
13521 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13522 && CONVERT_EXPR_P (arg1)
13523 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13524 && (TYPE_PRECISION (TREE_TYPE (arg1))
13525 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13526 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13527 || (TYPE_PRECISION (TREE_TYPE (arg1))
13528 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13529 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13530 {
13531 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13532 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13533 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13534 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13535 build_zero_cst (TREE_TYPE (arg0)));
13536 }
13537
13538 return NULL_TREE;
13539
13540 case UNORDERED_EXPR:
13541 case ORDERED_EXPR:
13542 case UNLT_EXPR:
13543 case UNLE_EXPR:
13544 case UNGT_EXPR:
13545 case UNGE_EXPR:
13546 case UNEQ_EXPR:
13547 case LTGT_EXPR:
13548 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13549 {
13550 t1 = fold_relational_const (code, type, arg0, arg1);
13551 if (t1 != NULL_TREE)
13552 return t1;
13553 }
13554
13555 /* If the first operand is NaN, the result is constant. */
13556 if (TREE_CODE (arg0) == REAL_CST
13557 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13558 && (code != LTGT_EXPR || ! flag_trapping_math))
13559 {
13560 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13561 ? integer_zero_node
13562 : integer_one_node;
13563 return omit_one_operand_loc (loc, type, t1, arg1);
13564 }
13565
13566 /* If the second operand is NaN, the result is constant. */
13567 if (TREE_CODE (arg1) == REAL_CST
13568 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13569 && (code != LTGT_EXPR || ! flag_trapping_math))
13570 {
13571 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13572 ? integer_zero_node
13573 : integer_one_node;
13574 return omit_one_operand_loc (loc, type, t1, arg0);
13575 }
13576
13577 /* Simplify unordered comparison of something with itself. */
13578 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13579 && operand_equal_p (arg0, arg1, 0))
13580 return constant_boolean_node (1, type);
13581
13582 if (code == LTGT_EXPR
13583 && !flag_trapping_math
13584 && operand_equal_p (arg0, arg1, 0))
13585 return constant_boolean_node (0, type);
13586
13587 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13588 {
13589 tree targ0 = strip_float_extensions (arg0);
13590 tree targ1 = strip_float_extensions (arg1);
13591 tree newtype = TREE_TYPE (targ0);
13592
13593 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13594 newtype = TREE_TYPE (targ1);
13595
13596 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13597 return fold_build2_loc (loc, code, type,
13598 fold_convert_loc (loc, newtype, targ0),
13599 fold_convert_loc (loc, newtype, targ1));
13600 }
13601
13602 return NULL_TREE;
13603
13604 case COMPOUND_EXPR:
13605 /* When pedantic, a compound expression can be neither an lvalue
13606 nor an integer constant expression. */
13607 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13608 return NULL_TREE;
13609 /* Don't let (0, 0) be null pointer constant. */
13610 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13611 : fold_convert_loc (loc, type, arg1);
13612 return pedantic_non_lvalue_loc (loc, tem);
13613
13614 case COMPLEX_EXPR:
13615 if ((TREE_CODE (arg0) == REAL_CST
13616 && TREE_CODE (arg1) == REAL_CST)
13617 || (TREE_CODE (arg0) == INTEGER_CST
13618 && TREE_CODE (arg1) == INTEGER_CST))
13619 return build_complex (type, arg0, arg1);
13620 if (TREE_CODE (arg0) == REALPART_EXPR
13621 && TREE_CODE (arg1) == IMAGPART_EXPR
13622 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13623 && operand_equal_p (TREE_OPERAND (arg0, 0),
13624 TREE_OPERAND (arg1, 0), 0))
13625 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13626 TREE_OPERAND (arg1, 0));
13627 return NULL_TREE;
13628
13629 case ASSERT_EXPR:
13630 /* An ASSERT_EXPR should never be passed to fold_binary. */
13631 gcc_unreachable ();
13632
13633 case VEC_PACK_TRUNC_EXPR:
13634 case VEC_PACK_FIX_TRUNC_EXPR:
13635 {
13636 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13637 tree *elts;
13638
13639 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13640 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13641 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13642 return NULL_TREE;
13643
13644 elts = XALLOCAVEC (tree, nelts);
13645 if (!vec_cst_ctor_to_array (arg0, elts)
13646 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13647 return NULL_TREE;
13648
13649 for (i = 0; i < nelts; i++)
13650 {
13651 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13652 ? NOP_EXPR : FIX_TRUNC_EXPR,
13653 TREE_TYPE (type), elts[i]);
13654 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13655 return NULL_TREE;
13656 }
13657
13658 return build_vector (type, elts);
13659 }
13660
13661 case VEC_WIDEN_MULT_LO_EXPR:
13662 case VEC_WIDEN_MULT_HI_EXPR:
13663 case VEC_WIDEN_MULT_EVEN_EXPR:
13664 case VEC_WIDEN_MULT_ODD_EXPR:
13665 {
13666 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13667 unsigned int out, ofs, scale;
13668 tree *elts;
13669
13670 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13671 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13672 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13673 return NULL_TREE;
13674
13675 elts = XALLOCAVEC (tree, nelts * 4);
13676 if (!vec_cst_ctor_to_array (arg0, elts)
13677 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13678 return NULL_TREE;
13679
13680 if (code == VEC_WIDEN_MULT_LO_EXPR)
13681 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13682 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13683 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13684 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13685 scale = 1, ofs = 0;
13686 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13687 scale = 1, ofs = 1;
13688
13689 for (out = 0; out < nelts; out++)
13690 {
13691 unsigned int in1 = (out << scale) + ofs;
13692 unsigned int in2 = in1 + nelts * 2;
13693 tree t1, t2;
13694
13695 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13696 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13697
13698 if (t1 == NULL_TREE || t2 == NULL_TREE)
13699 return NULL_TREE;
13700 elts[out] = const_binop (MULT_EXPR, t1, t2);
13701 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13702 return NULL_TREE;
13703 }
13704
13705 return build_vector (type, elts);
13706 }
13707
13708 default:
13709 return NULL_TREE;
13710 } /* switch (code) */
13711 }
13712
13713 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13714 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13715 of GOTO_EXPR. */
13716
13717 static tree
13718 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13719 {
13720 switch (TREE_CODE (*tp))
13721 {
13722 case LABEL_EXPR:
13723 return *tp;
13724
13725 case GOTO_EXPR:
13726 *walk_subtrees = 0;
13727
13728 /* ... fall through ... */
13729
13730 default:
13731 return NULL_TREE;
13732 }
13733 }
13734
13735 /* Return whether the sub-tree ST contains a label which is accessible from
13736 outside the sub-tree. */
13737
13738 static bool
13739 contains_label_p (tree st)
13740 {
13741 return
13742 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13743 }
13744
13745 /* Fold a ternary expression of code CODE and type TYPE with operands
13746 OP0, OP1, and OP2. Return the folded expression if folding is
13747 successful. Otherwise, return NULL_TREE. */
13748
13749 tree
13750 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13751 tree op0, tree op1, tree op2)
13752 {
13753 tree tem;
13754 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13755 enum tree_code_class kind = TREE_CODE_CLASS (code);
13756
13757 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13758 && TREE_CODE_LENGTH (code) == 3);
13759
13760 /* If this is a commutative operation, and OP0 is a constant, move it
13761 to OP1 to reduce the number of tests below. */
13762 if (commutative_ternary_tree_code (code)
13763 && tree_swap_operands_p (op0, op1, true))
13764 return fold_build3_loc (loc, code, type, op1, op0, op2);
13765
13766 tem = generic_simplify (loc, code, type, op0, op1, op2);
13767 if (tem)
13768 return tem;
13769
13770 /* Strip any conversions that don't change the mode. This is safe
13771 for every expression, except for a comparison expression because
13772 its signedness is derived from its operands. So, in the latter
13773 case, only strip conversions that don't change the signedness.
13774
13775 Note that this is done as an internal manipulation within the
13776 constant folder, in order to find the simplest representation of
13777 the arguments so that their form can be studied. In any cases,
13778 the appropriate type conversions should be put back in the tree
13779 that will get out of the constant folder. */
13780 if (op0)
13781 {
13782 arg0 = op0;
13783 STRIP_NOPS (arg0);
13784 }
13785
13786 if (op1)
13787 {
13788 arg1 = op1;
13789 STRIP_NOPS (arg1);
13790 }
13791
13792 if (op2)
13793 {
13794 arg2 = op2;
13795 STRIP_NOPS (arg2);
13796 }
13797
13798 switch (code)
13799 {
13800 case COMPONENT_REF:
13801 if (TREE_CODE (arg0) == CONSTRUCTOR
13802 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13803 {
13804 unsigned HOST_WIDE_INT idx;
13805 tree field, value;
13806 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13807 if (field == arg1)
13808 return value;
13809 }
13810 return NULL_TREE;
13811
13812 case COND_EXPR:
13813 case VEC_COND_EXPR:
13814 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13815 so all simple results must be passed through pedantic_non_lvalue. */
13816 if (TREE_CODE (arg0) == INTEGER_CST)
13817 {
13818 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13819 tem = integer_zerop (arg0) ? op2 : op1;
13820 /* Only optimize constant conditions when the selected branch
13821 has the same type as the COND_EXPR. This avoids optimizing
13822 away "c ? x : throw", where the throw has a void type.
13823 Avoid throwing away that operand which contains label. */
13824 if ((!TREE_SIDE_EFFECTS (unused_op)
13825 || !contains_label_p (unused_op))
13826 && (! VOID_TYPE_P (TREE_TYPE (tem))
13827 || VOID_TYPE_P (type)))
13828 return pedantic_non_lvalue_loc (loc, tem);
13829 return NULL_TREE;
13830 }
13831 else if (TREE_CODE (arg0) == VECTOR_CST)
13832 {
13833 if (integer_all_onesp (arg0))
13834 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13835 if (integer_zerop (arg0))
13836 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13837
13838 if ((TREE_CODE (arg1) == VECTOR_CST
13839 || TREE_CODE (arg1) == CONSTRUCTOR)
13840 && (TREE_CODE (arg2) == VECTOR_CST
13841 || TREE_CODE (arg2) == CONSTRUCTOR))
13842 {
13843 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13844 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13845 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13846 for (i = 0; i < nelts; i++)
13847 {
13848 tree val = VECTOR_CST_ELT (arg0, i);
13849 if (integer_all_onesp (val))
13850 sel[i] = i;
13851 else if (integer_zerop (val))
13852 sel[i] = nelts + i;
13853 else /* Currently unreachable. */
13854 return NULL_TREE;
13855 }
13856 tree t = fold_vec_perm (type, arg1, arg2, sel);
13857 if (t != NULL_TREE)
13858 return t;
13859 }
13860 }
13861
13862 if (operand_equal_p (arg1, op2, 0))
13863 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13864
13865 /* If we have A op B ? A : C, we may be able to convert this to a
13866 simpler expression, depending on the operation and the values
13867 of B and C. Signed zeros prevent all of these transformations,
13868 for reasons given above each one.
13869
13870 Also try swapping the arguments and inverting the conditional. */
13871 if (COMPARISON_CLASS_P (arg0)
13872 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13873 arg1, TREE_OPERAND (arg0, 1))
13874 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13875 {
13876 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13877 if (tem)
13878 return tem;
13879 }
13880
13881 if (COMPARISON_CLASS_P (arg0)
13882 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13883 op2,
13884 TREE_OPERAND (arg0, 1))
13885 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13886 {
13887 location_t loc0 = expr_location_or (arg0, loc);
13888 tem = fold_invert_truthvalue (loc0, arg0);
13889 if (tem && COMPARISON_CLASS_P (tem))
13890 {
13891 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13892 if (tem)
13893 return tem;
13894 }
13895 }
13896
13897 /* If the second operand is simpler than the third, swap them
13898 since that produces better jump optimization results. */
13899 if (truth_value_p (TREE_CODE (arg0))
13900 && tree_swap_operands_p (op1, op2, false))
13901 {
13902 location_t loc0 = expr_location_or (arg0, loc);
13903 /* See if this can be inverted. If it can't, possibly because
13904 it was a floating-point inequality comparison, don't do
13905 anything. */
13906 tem = fold_invert_truthvalue (loc0, arg0);
13907 if (tem)
13908 return fold_build3_loc (loc, code, type, tem, op2, op1);
13909 }
13910
13911 /* Convert A ? 1 : 0 to simply A. */
13912 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13913 : (integer_onep (op1)
13914 && !VECTOR_TYPE_P (type)))
13915 && integer_zerop (op2)
13916 /* If we try to convert OP0 to our type, the
13917 call to fold will try to move the conversion inside
13918 a COND, which will recurse. In that case, the COND_EXPR
13919 is probably the best choice, so leave it alone. */
13920 && type == TREE_TYPE (arg0))
13921 return pedantic_non_lvalue_loc (loc, arg0);
13922
13923 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13924 over COND_EXPR in cases such as floating point comparisons. */
13925 if (integer_zerop (op1)
13926 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13927 : (integer_onep (op2)
13928 && !VECTOR_TYPE_P (type)))
13929 && truth_value_p (TREE_CODE (arg0)))
13930 return pedantic_non_lvalue_loc (loc,
13931 fold_convert_loc (loc, type,
13932 invert_truthvalue_loc (loc,
13933 arg0)));
13934
13935 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13936 if (TREE_CODE (arg0) == LT_EXPR
13937 && integer_zerop (TREE_OPERAND (arg0, 1))
13938 && integer_zerop (op2)
13939 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13940 {
13941 /* sign_bit_p looks through both zero and sign extensions,
13942 but for this optimization only sign extensions are
13943 usable. */
13944 tree tem2 = TREE_OPERAND (arg0, 0);
13945 while (tem != tem2)
13946 {
13947 if (TREE_CODE (tem2) != NOP_EXPR
13948 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13949 {
13950 tem = NULL_TREE;
13951 break;
13952 }
13953 tem2 = TREE_OPERAND (tem2, 0);
13954 }
13955 /* sign_bit_p only checks ARG1 bits within A's precision.
13956 If <sign bit of A> has wider type than A, bits outside
13957 of A's precision in <sign bit of A> need to be checked.
13958 If they are all 0, this optimization needs to be done
13959 in unsigned A's type, if they are all 1 in signed A's type,
13960 otherwise this can't be done. */
13961 if (tem
13962 && TYPE_PRECISION (TREE_TYPE (tem))
13963 < TYPE_PRECISION (TREE_TYPE (arg1))
13964 && TYPE_PRECISION (TREE_TYPE (tem))
13965 < TYPE_PRECISION (type))
13966 {
13967 int inner_width, outer_width;
13968 tree tem_type;
13969
13970 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13971 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13972 if (outer_width > TYPE_PRECISION (type))
13973 outer_width = TYPE_PRECISION (type);
13974
13975 wide_int mask = wi::shifted_mask
13976 (inner_width, outer_width - inner_width, false,
13977 TYPE_PRECISION (TREE_TYPE (arg1)));
13978
13979 wide_int common = mask & arg1;
13980 if (common == mask)
13981 {
13982 tem_type = signed_type_for (TREE_TYPE (tem));
13983 tem = fold_convert_loc (loc, tem_type, tem);
13984 }
13985 else if (common == 0)
13986 {
13987 tem_type = unsigned_type_for (TREE_TYPE (tem));
13988 tem = fold_convert_loc (loc, tem_type, tem);
13989 }
13990 else
13991 tem = NULL;
13992 }
13993
13994 if (tem)
13995 return
13996 fold_convert_loc (loc, type,
13997 fold_build2_loc (loc, BIT_AND_EXPR,
13998 TREE_TYPE (tem), tem,
13999 fold_convert_loc (loc,
14000 TREE_TYPE (tem),
14001 arg1)));
14002 }
14003
14004 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14005 already handled above. */
14006 if (TREE_CODE (arg0) == BIT_AND_EXPR
14007 && integer_onep (TREE_OPERAND (arg0, 1))
14008 && integer_zerop (op2)
14009 && integer_pow2p (arg1))
14010 {
14011 tree tem = TREE_OPERAND (arg0, 0);
14012 STRIP_NOPS (tem);
14013 if (TREE_CODE (tem) == RSHIFT_EXPR
14014 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14015 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14016 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14017 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14018 TREE_OPERAND (tem, 0), arg1);
14019 }
14020
14021 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14022 is probably obsolete because the first operand should be a
14023 truth value (that's why we have the two cases above), but let's
14024 leave it in until we can confirm this for all front-ends. */
14025 if (integer_zerop (op2)
14026 && TREE_CODE (arg0) == NE_EXPR
14027 && integer_zerop (TREE_OPERAND (arg0, 1))
14028 && integer_pow2p (arg1)
14029 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14030 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14031 arg1, OEP_ONLY_CONST))
14032 return pedantic_non_lvalue_loc (loc,
14033 fold_convert_loc (loc, type,
14034 TREE_OPERAND (arg0, 0)));
14035
14036 /* Disable the transformations below for vectors, since
14037 fold_binary_op_with_conditional_arg may undo them immediately,
14038 yielding an infinite loop. */
14039 if (code == VEC_COND_EXPR)
14040 return NULL_TREE;
14041
14042 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14043 if (integer_zerop (op2)
14044 && truth_value_p (TREE_CODE (arg0))
14045 && truth_value_p (TREE_CODE (arg1))
14046 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14047 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14048 : TRUTH_ANDIF_EXPR,
14049 type, fold_convert_loc (loc, type, arg0), arg1);
14050
14051 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14052 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14053 && truth_value_p (TREE_CODE (arg0))
14054 && truth_value_p (TREE_CODE (arg1))
14055 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14056 {
14057 location_t loc0 = expr_location_or (arg0, loc);
14058 /* Only perform transformation if ARG0 is easily inverted. */
14059 tem = fold_invert_truthvalue (loc0, arg0);
14060 if (tem)
14061 return fold_build2_loc (loc, code == VEC_COND_EXPR
14062 ? BIT_IOR_EXPR
14063 : TRUTH_ORIF_EXPR,
14064 type, fold_convert_loc (loc, type, tem),
14065 arg1);
14066 }
14067
14068 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14069 if (integer_zerop (arg1)
14070 && truth_value_p (TREE_CODE (arg0))
14071 && truth_value_p (TREE_CODE (op2))
14072 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14073 {
14074 location_t loc0 = expr_location_or (arg0, loc);
14075 /* Only perform transformation if ARG0 is easily inverted. */
14076 tem = fold_invert_truthvalue (loc0, arg0);
14077 if (tem)
14078 return fold_build2_loc (loc, code == VEC_COND_EXPR
14079 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14080 type, fold_convert_loc (loc, type, tem),
14081 op2);
14082 }
14083
14084 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14085 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14086 && truth_value_p (TREE_CODE (arg0))
14087 && truth_value_p (TREE_CODE (op2))
14088 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14089 return fold_build2_loc (loc, code == VEC_COND_EXPR
14090 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14091 type, fold_convert_loc (loc, type, arg0), op2);
14092
14093 return NULL_TREE;
14094
14095 case CALL_EXPR:
14096 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14097 of fold_ternary on them. */
14098 gcc_unreachable ();
14099
14100 case BIT_FIELD_REF:
14101 if ((TREE_CODE (arg0) == VECTOR_CST
14102 || (TREE_CODE (arg0) == CONSTRUCTOR
14103 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14104 && (type == TREE_TYPE (TREE_TYPE (arg0))
14105 || (TREE_CODE (type) == VECTOR_TYPE
14106 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14107 {
14108 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14109 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14110 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14111 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14112
14113 if (n != 0
14114 && (idx % width) == 0
14115 && (n % width) == 0
14116 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14117 {
14118 idx = idx / width;
14119 n = n / width;
14120
14121 if (TREE_CODE (arg0) == VECTOR_CST)
14122 {
14123 if (n == 1)
14124 return VECTOR_CST_ELT (arg0, idx);
14125
14126 tree *vals = XALLOCAVEC (tree, n);
14127 for (unsigned i = 0; i < n; ++i)
14128 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14129 return build_vector (type, vals);
14130 }
14131
14132 /* Constructor elements can be subvectors. */
14133 unsigned HOST_WIDE_INT k = 1;
14134 if (CONSTRUCTOR_NELTS (arg0) != 0)
14135 {
14136 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14137 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14138 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14139 }
14140
14141 /* We keep an exact subset of the constructor elements. */
14142 if ((idx % k) == 0 && (n % k) == 0)
14143 {
14144 if (CONSTRUCTOR_NELTS (arg0) == 0)
14145 return build_constructor (type, NULL);
14146 idx /= k;
14147 n /= k;
14148 if (n == 1)
14149 {
14150 if (idx < CONSTRUCTOR_NELTS (arg0))
14151 return CONSTRUCTOR_ELT (arg0, idx)->value;
14152 return build_zero_cst (type);
14153 }
14154
14155 vec<constructor_elt, va_gc> *vals;
14156 vec_alloc (vals, n);
14157 for (unsigned i = 0;
14158 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14159 ++i)
14160 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14161 CONSTRUCTOR_ELT
14162 (arg0, idx + i)->value);
14163 return build_constructor (type, vals);
14164 }
14165 /* The bitfield references a single constructor element. */
14166 else if (idx + n <= (idx / k + 1) * k)
14167 {
14168 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14169 return build_zero_cst (type);
14170 else if (n == k)
14171 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14172 else
14173 return fold_build3_loc (loc, code, type,
14174 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14175 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14176 }
14177 }
14178 }
14179
14180 /* A bit-field-ref that referenced the full argument can be stripped. */
14181 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14182 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14183 && integer_zerop (op2))
14184 return fold_convert_loc (loc, type, arg0);
14185
14186 /* On constants we can use native encode/interpret to constant
14187 fold (nearly) all BIT_FIELD_REFs. */
14188 if (CONSTANT_CLASS_P (arg0)
14189 && can_native_interpret_type_p (type)
14190 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14191 /* This limitation should not be necessary, we just need to
14192 round this up to mode size. */
14193 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14194 /* Need bit-shifting of the buffer to relax the following. */
14195 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14196 {
14197 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14198 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14199 unsigned HOST_WIDE_INT clen;
14200 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14201 /* ??? We cannot tell native_encode_expr to start at
14202 some random byte only. So limit us to a reasonable amount
14203 of work. */
14204 if (clen <= 4096)
14205 {
14206 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14207 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14208 if (len > 0
14209 && len * BITS_PER_UNIT >= bitpos + bitsize)
14210 {
14211 tree v = native_interpret_expr (type,
14212 b + bitpos / BITS_PER_UNIT,
14213 bitsize / BITS_PER_UNIT);
14214 if (v)
14215 return v;
14216 }
14217 }
14218 }
14219
14220 return NULL_TREE;
14221
14222 case FMA_EXPR:
14223 /* For integers we can decompose the FMA if possible. */
14224 if (TREE_CODE (arg0) == INTEGER_CST
14225 && TREE_CODE (arg1) == INTEGER_CST)
14226 return fold_build2_loc (loc, PLUS_EXPR, type,
14227 const_binop (MULT_EXPR, arg0, arg1), arg2);
14228 if (integer_zerop (arg2))
14229 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14230
14231 return fold_fma (loc, type, arg0, arg1, arg2);
14232
14233 case VEC_PERM_EXPR:
14234 if (TREE_CODE (arg2) == VECTOR_CST)
14235 {
14236 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14237 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14238 bool need_mask_canon = false;
14239 bool all_in_vec0 = true;
14240 bool all_in_vec1 = true;
14241 bool maybe_identity = true;
14242 bool single_arg = (op0 == op1);
14243 bool changed = false;
14244
14245 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14246 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14247 for (i = 0; i < nelts; i++)
14248 {
14249 tree val = VECTOR_CST_ELT (arg2, i);
14250 if (TREE_CODE (val) != INTEGER_CST)
14251 return NULL_TREE;
14252
14253 /* Make sure that the perm value is in an acceptable
14254 range. */
14255 wide_int t = val;
14256 if (wi::gtu_p (t, mask))
14257 {
14258 need_mask_canon = true;
14259 sel[i] = t.to_uhwi () & mask;
14260 }
14261 else
14262 sel[i] = t.to_uhwi ();
14263
14264 if (sel[i] < nelts)
14265 all_in_vec1 = false;
14266 else
14267 all_in_vec0 = false;
14268
14269 if ((sel[i] & (nelts-1)) != i)
14270 maybe_identity = false;
14271 }
14272
14273 if (maybe_identity)
14274 {
14275 if (all_in_vec0)
14276 return op0;
14277 if (all_in_vec1)
14278 return op1;
14279 }
14280
14281 if (all_in_vec0)
14282 op1 = op0;
14283 else if (all_in_vec1)
14284 {
14285 op0 = op1;
14286 for (i = 0; i < nelts; i++)
14287 sel[i] -= nelts;
14288 need_mask_canon = true;
14289 }
14290
14291 if ((TREE_CODE (op0) == VECTOR_CST
14292 || TREE_CODE (op0) == CONSTRUCTOR)
14293 && (TREE_CODE (op1) == VECTOR_CST
14294 || TREE_CODE (op1) == CONSTRUCTOR))
14295 {
14296 tree t = fold_vec_perm (type, op0, op1, sel);
14297 if (t != NULL_TREE)
14298 return t;
14299 }
14300
14301 if (op0 == op1 && !single_arg)
14302 changed = true;
14303
14304 if (need_mask_canon && arg2 == op2)
14305 {
14306 tree *tsel = XALLOCAVEC (tree, nelts);
14307 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14308 for (i = 0; i < nelts; i++)
14309 tsel[i] = build_int_cst (eltype, sel[i]);
14310 op2 = build_vector (TREE_TYPE (arg2), tsel);
14311 changed = true;
14312 }
14313
14314 if (changed)
14315 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14316 }
14317 return NULL_TREE;
14318
14319 default:
14320 return NULL_TREE;
14321 } /* switch (code) */
14322 }
14323
14324 /* Perform constant folding and related simplification of EXPR.
14325 The related simplifications include x*1 => x, x*0 => 0, etc.,
14326 and application of the associative law.
14327 NOP_EXPR conversions may be removed freely (as long as we
14328 are careful not to change the type of the overall expression).
14329 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14330 but we can constant-fold them if they have constant operands. */
14331
14332 #ifdef ENABLE_FOLD_CHECKING
14333 # define fold(x) fold_1 (x)
14334 static tree fold_1 (tree);
14335 static
14336 #endif
14337 tree
14338 fold (tree expr)
14339 {
14340 const tree t = expr;
14341 enum tree_code code = TREE_CODE (t);
14342 enum tree_code_class kind = TREE_CODE_CLASS (code);
14343 tree tem;
14344 location_t loc = EXPR_LOCATION (expr);
14345
14346 /* Return right away if a constant. */
14347 if (kind == tcc_constant)
14348 return t;
14349
14350 /* CALL_EXPR-like objects with variable numbers of operands are
14351 treated specially. */
14352 if (kind == tcc_vl_exp)
14353 {
14354 if (code == CALL_EXPR)
14355 {
14356 tem = fold_call_expr (loc, expr, false);
14357 return tem ? tem : expr;
14358 }
14359 return expr;
14360 }
14361
14362 if (IS_EXPR_CODE_CLASS (kind))
14363 {
14364 tree type = TREE_TYPE (t);
14365 tree op0, op1, op2;
14366
14367 switch (TREE_CODE_LENGTH (code))
14368 {
14369 case 1:
14370 op0 = TREE_OPERAND (t, 0);
14371 tem = fold_unary_loc (loc, code, type, op0);
14372 return tem ? tem : expr;
14373 case 2:
14374 op0 = TREE_OPERAND (t, 0);
14375 op1 = TREE_OPERAND (t, 1);
14376 tem = fold_binary_loc (loc, code, type, op0, op1);
14377 return tem ? tem : expr;
14378 case 3:
14379 op0 = TREE_OPERAND (t, 0);
14380 op1 = TREE_OPERAND (t, 1);
14381 op2 = TREE_OPERAND (t, 2);
14382 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14383 return tem ? tem : expr;
14384 default:
14385 break;
14386 }
14387 }
14388
14389 switch (code)
14390 {
14391 case ARRAY_REF:
14392 {
14393 tree op0 = TREE_OPERAND (t, 0);
14394 tree op1 = TREE_OPERAND (t, 1);
14395
14396 if (TREE_CODE (op1) == INTEGER_CST
14397 && TREE_CODE (op0) == CONSTRUCTOR
14398 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14399 {
14400 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14401 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14402 unsigned HOST_WIDE_INT begin = 0;
14403
14404 /* Find a matching index by means of a binary search. */
14405 while (begin != end)
14406 {
14407 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14408 tree index = (*elts)[middle].index;
14409
14410 if (TREE_CODE (index) == INTEGER_CST
14411 && tree_int_cst_lt (index, op1))
14412 begin = middle + 1;
14413 else if (TREE_CODE (index) == INTEGER_CST
14414 && tree_int_cst_lt (op1, index))
14415 end = middle;
14416 else if (TREE_CODE (index) == RANGE_EXPR
14417 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14418 begin = middle + 1;
14419 else if (TREE_CODE (index) == RANGE_EXPR
14420 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14421 end = middle;
14422 else
14423 return (*elts)[middle].value;
14424 }
14425 }
14426
14427 return t;
14428 }
14429
14430 /* Return a VECTOR_CST if possible. */
14431 case CONSTRUCTOR:
14432 {
14433 tree type = TREE_TYPE (t);
14434 if (TREE_CODE (type) != VECTOR_TYPE)
14435 return t;
14436
14437 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14438 unsigned HOST_WIDE_INT idx, pos = 0;
14439 tree value;
14440
14441 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14442 {
14443 if (!CONSTANT_CLASS_P (value))
14444 return t;
14445 if (TREE_CODE (value) == VECTOR_CST)
14446 {
14447 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14448 vec[pos++] = VECTOR_CST_ELT (value, i);
14449 }
14450 else
14451 vec[pos++] = value;
14452 }
14453 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14454 vec[pos] = build_zero_cst (TREE_TYPE (type));
14455
14456 return build_vector (type, vec);
14457 }
14458
14459 case CONST_DECL:
14460 return fold (DECL_INITIAL (t));
14461
14462 default:
14463 return t;
14464 } /* switch (code) */
14465 }
14466
14467 #ifdef ENABLE_FOLD_CHECKING
14468 #undef fold
14469
14470 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14471 hash_table<pointer_hash<const tree_node> > *);
14472 static void fold_check_failed (const_tree, const_tree);
14473 void print_fold_checksum (const_tree);
14474
14475 /* When --enable-checking=fold, compute a digest of expr before
14476 and after actual fold call to see if fold did not accidentally
14477 change original expr. */
14478
14479 tree
14480 fold (tree expr)
14481 {
14482 tree ret;
14483 struct md5_ctx ctx;
14484 unsigned char checksum_before[16], checksum_after[16];
14485 hash_table<pointer_hash<const tree_node> > ht (32);
14486
14487 md5_init_ctx (&ctx);
14488 fold_checksum_tree (expr, &ctx, &ht);
14489 md5_finish_ctx (&ctx, checksum_before);
14490 ht.empty ();
14491
14492 ret = fold_1 (expr);
14493
14494 md5_init_ctx (&ctx);
14495 fold_checksum_tree (expr, &ctx, &ht);
14496 md5_finish_ctx (&ctx, checksum_after);
14497
14498 if (memcmp (checksum_before, checksum_after, 16))
14499 fold_check_failed (expr, ret);
14500
14501 return ret;
14502 }
14503
14504 void
14505 print_fold_checksum (const_tree expr)
14506 {
14507 struct md5_ctx ctx;
14508 unsigned char checksum[16], cnt;
14509 hash_table<pointer_hash<const tree_node> > ht (32);
14510
14511 md5_init_ctx (&ctx);
14512 fold_checksum_tree (expr, &ctx, &ht);
14513 md5_finish_ctx (&ctx, checksum);
14514 for (cnt = 0; cnt < 16; ++cnt)
14515 fprintf (stderr, "%02x", checksum[cnt]);
14516 putc ('\n', stderr);
14517 }
14518
14519 static void
14520 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14521 {
14522 internal_error ("fold check: original tree changed by fold");
14523 }
14524
14525 static void
14526 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14527 hash_table<pointer_hash <const tree_node> > *ht)
14528 {
14529 const tree_node **slot;
14530 enum tree_code code;
14531 union tree_node buf;
14532 int i, len;
14533
14534 recursive_label:
14535 if (expr == NULL)
14536 return;
14537 slot = ht->find_slot (expr, INSERT);
14538 if (*slot != NULL)
14539 return;
14540 *slot = expr;
14541 code = TREE_CODE (expr);
14542 if (TREE_CODE_CLASS (code) == tcc_declaration
14543 && DECL_ASSEMBLER_NAME_SET_P (expr))
14544 {
14545 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14546 memcpy ((char *) &buf, expr, tree_size (expr));
14547 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14548 expr = (tree) &buf;
14549 }
14550 else if (TREE_CODE_CLASS (code) == tcc_type
14551 && (TYPE_POINTER_TO (expr)
14552 || TYPE_REFERENCE_TO (expr)
14553 || TYPE_CACHED_VALUES_P (expr)
14554 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14555 || TYPE_NEXT_VARIANT (expr)))
14556 {
14557 /* Allow these fields to be modified. */
14558 tree tmp;
14559 memcpy ((char *) &buf, expr, tree_size (expr));
14560 expr = tmp = (tree) &buf;
14561 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14562 TYPE_POINTER_TO (tmp) = NULL;
14563 TYPE_REFERENCE_TO (tmp) = NULL;
14564 TYPE_NEXT_VARIANT (tmp) = NULL;
14565 if (TYPE_CACHED_VALUES_P (tmp))
14566 {
14567 TYPE_CACHED_VALUES_P (tmp) = 0;
14568 TYPE_CACHED_VALUES (tmp) = NULL;
14569 }
14570 }
14571 md5_process_bytes (expr, tree_size (expr), ctx);
14572 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14573 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14574 if (TREE_CODE_CLASS (code) != tcc_type
14575 && TREE_CODE_CLASS (code) != tcc_declaration
14576 && code != TREE_LIST
14577 && code != SSA_NAME
14578 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14579 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14580 switch (TREE_CODE_CLASS (code))
14581 {
14582 case tcc_constant:
14583 switch (code)
14584 {
14585 case STRING_CST:
14586 md5_process_bytes (TREE_STRING_POINTER (expr),
14587 TREE_STRING_LENGTH (expr), ctx);
14588 break;
14589 case COMPLEX_CST:
14590 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14591 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14592 break;
14593 case VECTOR_CST:
14594 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14595 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14596 break;
14597 default:
14598 break;
14599 }
14600 break;
14601 case tcc_exceptional:
14602 switch (code)
14603 {
14604 case TREE_LIST:
14605 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14606 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14607 expr = TREE_CHAIN (expr);
14608 goto recursive_label;
14609 break;
14610 case TREE_VEC:
14611 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14612 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14613 break;
14614 default:
14615 break;
14616 }
14617 break;
14618 case tcc_expression:
14619 case tcc_reference:
14620 case tcc_comparison:
14621 case tcc_unary:
14622 case tcc_binary:
14623 case tcc_statement:
14624 case tcc_vl_exp:
14625 len = TREE_OPERAND_LENGTH (expr);
14626 for (i = 0; i < len; ++i)
14627 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14628 break;
14629 case tcc_declaration:
14630 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14631 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14632 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14633 {
14634 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14635 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14636 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14637 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14638 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14639 }
14640
14641 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14642 {
14643 if (TREE_CODE (expr) == FUNCTION_DECL)
14644 {
14645 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14646 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14647 }
14648 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14649 }
14650 break;
14651 case tcc_type:
14652 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14653 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14654 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14655 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14656 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14657 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14658 if (INTEGRAL_TYPE_P (expr)
14659 || SCALAR_FLOAT_TYPE_P (expr))
14660 {
14661 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14662 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14663 }
14664 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14665 if (TREE_CODE (expr) == RECORD_TYPE
14666 || TREE_CODE (expr) == UNION_TYPE
14667 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14668 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14669 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14670 break;
14671 default:
14672 break;
14673 }
14674 }
14675
14676 /* Helper function for outputting the checksum of a tree T. When
14677 debugging with gdb, you can "define mynext" to be "next" followed
14678 by "call debug_fold_checksum (op0)", then just trace down till the
14679 outputs differ. */
14680
14681 DEBUG_FUNCTION void
14682 debug_fold_checksum (const_tree t)
14683 {
14684 int i;
14685 unsigned char checksum[16];
14686 struct md5_ctx ctx;
14687 hash_table<pointer_hash<const tree_node> > ht (32);
14688
14689 md5_init_ctx (&ctx);
14690 fold_checksum_tree (t, &ctx, &ht);
14691 md5_finish_ctx (&ctx, checksum);
14692 ht.empty ();
14693
14694 for (i = 0; i < 16; i++)
14695 fprintf (stderr, "%d ", checksum[i]);
14696
14697 fprintf (stderr, "\n");
14698 }
14699
14700 #endif
14701
14702 /* Fold a unary tree expression with code CODE of type TYPE with an
14703 operand OP0. LOC is the location of the resulting expression.
14704 Return a folded expression if successful. Otherwise, return a tree
14705 expression with code CODE of type TYPE with an operand OP0. */
14706
14707 tree
14708 fold_build1_stat_loc (location_t loc,
14709 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14710 {
14711 tree tem;
14712 #ifdef ENABLE_FOLD_CHECKING
14713 unsigned char checksum_before[16], checksum_after[16];
14714 struct md5_ctx ctx;
14715 hash_table<pointer_hash<const tree_node> > ht (32);
14716
14717 md5_init_ctx (&ctx);
14718 fold_checksum_tree (op0, &ctx, &ht);
14719 md5_finish_ctx (&ctx, checksum_before);
14720 ht.empty ();
14721 #endif
14722
14723 tem = fold_unary_loc (loc, code, type, op0);
14724 if (!tem)
14725 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14726
14727 #ifdef ENABLE_FOLD_CHECKING
14728 md5_init_ctx (&ctx);
14729 fold_checksum_tree (op0, &ctx, &ht);
14730 md5_finish_ctx (&ctx, checksum_after);
14731
14732 if (memcmp (checksum_before, checksum_after, 16))
14733 fold_check_failed (op0, tem);
14734 #endif
14735 return tem;
14736 }
14737
14738 /* Fold a binary tree expression with code CODE of type TYPE with
14739 operands OP0 and OP1. LOC is the location of the resulting
14740 expression. Return a folded expression if successful. Otherwise,
14741 return a tree expression with code CODE of type TYPE with operands
14742 OP0 and OP1. */
14743
14744 tree
14745 fold_build2_stat_loc (location_t loc,
14746 enum tree_code code, tree type, tree op0, tree op1
14747 MEM_STAT_DECL)
14748 {
14749 tree tem;
14750 #ifdef ENABLE_FOLD_CHECKING
14751 unsigned char checksum_before_op0[16],
14752 checksum_before_op1[16],
14753 checksum_after_op0[16],
14754 checksum_after_op1[16];
14755 struct md5_ctx ctx;
14756 hash_table<pointer_hash<const tree_node> > ht (32);
14757
14758 md5_init_ctx (&ctx);
14759 fold_checksum_tree (op0, &ctx, &ht);
14760 md5_finish_ctx (&ctx, checksum_before_op0);
14761 ht.empty ();
14762
14763 md5_init_ctx (&ctx);
14764 fold_checksum_tree (op1, &ctx, &ht);
14765 md5_finish_ctx (&ctx, checksum_before_op1);
14766 ht.empty ();
14767 #endif
14768
14769 tem = fold_binary_loc (loc, code, type, op0, op1);
14770 if (!tem)
14771 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14772
14773 #ifdef ENABLE_FOLD_CHECKING
14774 md5_init_ctx (&ctx);
14775 fold_checksum_tree (op0, &ctx, &ht);
14776 md5_finish_ctx (&ctx, checksum_after_op0);
14777 ht.empty ();
14778
14779 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14780 fold_check_failed (op0, tem);
14781
14782 md5_init_ctx (&ctx);
14783 fold_checksum_tree (op1, &ctx, &ht);
14784 md5_finish_ctx (&ctx, checksum_after_op1);
14785
14786 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14787 fold_check_failed (op1, tem);
14788 #endif
14789 return tem;
14790 }
14791
14792 /* Fold a ternary tree expression with code CODE of type TYPE with
14793 operands OP0, OP1, and OP2. Return a folded expression if
14794 successful. Otherwise, return a tree expression with code CODE of
14795 type TYPE with operands OP0, OP1, and OP2. */
14796
14797 tree
14798 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14799 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14800 {
14801 tree tem;
14802 #ifdef ENABLE_FOLD_CHECKING
14803 unsigned char checksum_before_op0[16],
14804 checksum_before_op1[16],
14805 checksum_before_op2[16],
14806 checksum_after_op0[16],
14807 checksum_after_op1[16],
14808 checksum_after_op2[16];
14809 struct md5_ctx ctx;
14810 hash_table<pointer_hash<const tree_node> > ht (32);
14811
14812 md5_init_ctx (&ctx);
14813 fold_checksum_tree (op0, &ctx, &ht);
14814 md5_finish_ctx (&ctx, checksum_before_op0);
14815 ht.empty ();
14816
14817 md5_init_ctx (&ctx);
14818 fold_checksum_tree (op1, &ctx, &ht);
14819 md5_finish_ctx (&ctx, checksum_before_op1);
14820 ht.empty ();
14821
14822 md5_init_ctx (&ctx);
14823 fold_checksum_tree (op2, &ctx, &ht);
14824 md5_finish_ctx (&ctx, checksum_before_op2);
14825 ht.empty ();
14826 #endif
14827
14828 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14829 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14830 if (!tem)
14831 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14832
14833 #ifdef ENABLE_FOLD_CHECKING
14834 md5_init_ctx (&ctx);
14835 fold_checksum_tree (op0, &ctx, &ht);
14836 md5_finish_ctx (&ctx, checksum_after_op0);
14837 ht.empty ();
14838
14839 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14840 fold_check_failed (op0, tem);
14841
14842 md5_init_ctx (&ctx);
14843 fold_checksum_tree (op1, &ctx, &ht);
14844 md5_finish_ctx (&ctx, checksum_after_op1);
14845 ht.empty ();
14846
14847 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14848 fold_check_failed (op1, tem);
14849
14850 md5_init_ctx (&ctx);
14851 fold_checksum_tree (op2, &ctx, &ht);
14852 md5_finish_ctx (&ctx, checksum_after_op2);
14853
14854 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14855 fold_check_failed (op2, tem);
14856 #endif
14857 return tem;
14858 }
14859
14860 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14861 arguments in ARGARRAY, and a null static chain.
14862 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14863 of type TYPE from the given operands as constructed by build_call_array. */
14864
14865 tree
14866 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14867 int nargs, tree *argarray)
14868 {
14869 tree tem;
14870 #ifdef ENABLE_FOLD_CHECKING
14871 unsigned char checksum_before_fn[16],
14872 checksum_before_arglist[16],
14873 checksum_after_fn[16],
14874 checksum_after_arglist[16];
14875 struct md5_ctx ctx;
14876 hash_table<pointer_hash<const tree_node> > ht (32);
14877 int i;
14878
14879 md5_init_ctx (&ctx);
14880 fold_checksum_tree (fn, &ctx, &ht);
14881 md5_finish_ctx (&ctx, checksum_before_fn);
14882 ht.empty ();
14883
14884 md5_init_ctx (&ctx);
14885 for (i = 0; i < nargs; i++)
14886 fold_checksum_tree (argarray[i], &ctx, &ht);
14887 md5_finish_ctx (&ctx, checksum_before_arglist);
14888 ht.empty ();
14889 #endif
14890
14891 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14892
14893 #ifdef ENABLE_FOLD_CHECKING
14894 md5_init_ctx (&ctx);
14895 fold_checksum_tree (fn, &ctx, &ht);
14896 md5_finish_ctx (&ctx, checksum_after_fn);
14897 ht.empty ();
14898
14899 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14900 fold_check_failed (fn, tem);
14901
14902 md5_init_ctx (&ctx);
14903 for (i = 0; i < nargs; i++)
14904 fold_checksum_tree (argarray[i], &ctx, &ht);
14905 md5_finish_ctx (&ctx, checksum_after_arglist);
14906
14907 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14908 fold_check_failed (NULL_TREE, tem);
14909 #endif
14910 return tem;
14911 }
14912
14913 /* Perform constant folding and related simplification of initializer
14914 expression EXPR. These behave identically to "fold_buildN" but ignore
14915 potential run-time traps and exceptions that fold must preserve. */
14916
14917 #define START_FOLD_INIT \
14918 int saved_signaling_nans = flag_signaling_nans;\
14919 int saved_trapping_math = flag_trapping_math;\
14920 int saved_rounding_math = flag_rounding_math;\
14921 int saved_trapv = flag_trapv;\
14922 int saved_folding_initializer = folding_initializer;\
14923 flag_signaling_nans = 0;\
14924 flag_trapping_math = 0;\
14925 flag_rounding_math = 0;\
14926 flag_trapv = 0;\
14927 folding_initializer = 1;
14928
14929 #define END_FOLD_INIT \
14930 flag_signaling_nans = saved_signaling_nans;\
14931 flag_trapping_math = saved_trapping_math;\
14932 flag_rounding_math = saved_rounding_math;\
14933 flag_trapv = saved_trapv;\
14934 folding_initializer = saved_folding_initializer;
14935
14936 tree
14937 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14938 tree type, tree op)
14939 {
14940 tree result;
14941 START_FOLD_INIT;
14942
14943 result = fold_build1_loc (loc, code, type, op);
14944
14945 END_FOLD_INIT;
14946 return result;
14947 }
14948
14949 tree
14950 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14951 tree type, tree op0, tree op1)
14952 {
14953 tree result;
14954 START_FOLD_INIT;
14955
14956 result = fold_build2_loc (loc, code, type, op0, op1);
14957
14958 END_FOLD_INIT;
14959 return result;
14960 }
14961
14962 tree
14963 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14964 int nargs, tree *argarray)
14965 {
14966 tree result;
14967 START_FOLD_INIT;
14968
14969 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14970
14971 END_FOLD_INIT;
14972 return result;
14973 }
14974
14975 #undef START_FOLD_INIT
14976 #undef END_FOLD_INIT
14977
14978 /* Determine if first argument is a multiple of second argument. Return 0 if
14979 it is not, or we cannot easily determined it to be.
14980
14981 An example of the sort of thing we care about (at this point; this routine
14982 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14983 fold cases do now) is discovering that
14984
14985 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14986
14987 is a multiple of
14988
14989 SAVE_EXPR (J * 8)
14990
14991 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14992
14993 This code also handles discovering that
14994
14995 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14996
14997 is a multiple of 8 so we don't have to worry about dealing with a
14998 possible remainder.
14999
15000 Note that we *look* inside a SAVE_EXPR only to determine how it was
15001 calculated; it is not safe for fold to do much of anything else with the
15002 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15003 at run time. For example, the latter example above *cannot* be implemented
15004 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15005 evaluation time of the original SAVE_EXPR is not necessarily the same at
15006 the time the new expression is evaluated. The only optimization of this
15007 sort that would be valid is changing
15008
15009 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15010
15011 divided by 8 to
15012
15013 SAVE_EXPR (I) * SAVE_EXPR (J)
15014
15015 (where the same SAVE_EXPR (J) is used in the original and the
15016 transformed version). */
15017
15018 int
15019 multiple_of_p (tree type, const_tree top, const_tree bottom)
15020 {
15021 if (operand_equal_p (top, bottom, 0))
15022 return 1;
15023
15024 if (TREE_CODE (type) != INTEGER_TYPE)
15025 return 0;
15026
15027 switch (TREE_CODE (top))
15028 {
15029 case BIT_AND_EXPR:
15030 /* Bitwise and provides a power of two multiple. If the mask is
15031 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15032 if (!integer_pow2p (bottom))
15033 return 0;
15034 /* FALLTHRU */
15035
15036 case MULT_EXPR:
15037 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15038 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15039
15040 case PLUS_EXPR:
15041 case MINUS_EXPR:
15042 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15043 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15044
15045 case LSHIFT_EXPR:
15046 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15047 {
15048 tree op1, t1;
15049
15050 op1 = TREE_OPERAND (top, 1);
15051 /* const_binop may not detect overflow correctly,
15052 so check for it explicitly here. */
15053 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15054 && 0 != (t1 = fold_convert (type,
15055 const_binop (LSHIFT_EXPR,
15056 size_one_node,
15057 op1)))
15058 && !TREE_OVERFLOW (t1))
15059 return multiple_of_p (type, t1, bottom);
15060 }
15061 return 0;
15062
15063 case NOP_EXPR:
15064 /* Can't handle conversions from non-integral or wider integral type. */
15065 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15066 || (TYPE_PRECISION (type)
15067 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15068 return 0;
15069
15070 /* .. fall through ... */
15071
15072 case SAVE_EXPR:
15073 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15074
15075 case COND_EXPR:
15076 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15077 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15078
15079 case INTEGER_CST:
15080 if (TREE_CODE (bottom) != INTEGER_CST
15081 || integer_zerop (bottom)
15082 || (TYPE_UNSIGNED (type)
15083 && (tree_int_cst_sgn (top) < 0
15084 || tree_int_cst_sgn (bottom) < 0)))
15085 return 0;
15086 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15087 SIGNED);
15088
15089 default:
15090 return 0;
15091 }
15092 }
15093
15094 /* Return true if CODE or TYPE is known to be non-negative. */
15095
15096 static bool
15097 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15098 {
15099 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15100 && truth_value_p (code))
15101 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15102 have a signed:1 type (where the value is -1 and 0). */
15103 return true;
15104 return false;
15105 }
15106
15107 /* Return true if (CODE OP0) is known to be non-negative. If the return
15108 value is based on the assumption that signed overflow is undefined,
15109 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15110 *STRICT_OVERFLOW_P. */
15111
15112 bool
15113 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15114 bool *strict_overflow_p)
15115 {
15116 if (TYPE_UNSIGNED (type))
15117 return true;
15118
15119 switch (code)
15120 {
15121 case ABS_EXPR:
15122 /* We can't return 1 if flag_wrapv is set because
15123 ABS_EXPR<INT_MIN> = INT_MIN. */
15124 if (!INTEGRAL_TYPE_P (type))
15125 return true;
15126 if (TYPE_OVERFLOW_UNDEFINED (type))
15127 {
15128 *strict_overflow_p = true;
15129 return true;
15130 }
15131 break;
15132
15133 case NON_LVALUE_EXPR:
15134 case FLOAT_EXPR:
15135 case FIX_TRUNC_EXPR:
15136 return tree_expr_nonnegative_warnv_p (op0,
15137 strict_overflow_p);
15138
15139 case NOP_EXPR:
15140 {
15141 tree inner_type = TREE_TYPE (op0);
15142 tree outer_type = type;
15143
15144 if (TREE_CODE (outer_type) == REAL_TYPE)
15145 {
15146 if (TREE_CODE (inner_type) == REAL_TYPE)
15147 return tree_expr_nonnegative_warnv_p (op0,
15148 strict_overflow_p);
15149 if (INTEGRAL_TYPE_P (inner_type))
15150 {
15151 if (TYPE_UNSIGNED (inner_type))
15152 return true;
15153 return tree_expr_nonnegative_warnv_p (op0,
15154 strict_overflow_p);
15155 }
15156 }
15157 else if (INTEGRAL_TYPE_P (outer_type))
15158 {
15159 if (TREE_CODE (inner_type) == REAL_TYPE)
15160 return tree_expr_nonnegative_warnv_p (op0,
15161 strict_overflow_p);
15162 if (INTEGRAL_TYPE_P (inner_type))
15163 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15164 && TYPE_UNSIGNED (inner_type);
15165 }
15166 }
15167 break;
15168
15169 default:
15170 return tree_simple_nonnegative_warnv_p (code, type);
15171 }
15172
15173 /* We don't know sign of `t', so be conservative and return false. */
15174 return false;
15175 }
15176
15177 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15178 value is based on the assumption that signed overflow is undefined,
15179 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15180 *STRICT_OVERFLOW_P. */
15181
15182 bool
15183 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15184 tree op1, bool *strict_overflow_p)
15185 {
15186 if (TYPE_UNSIGNED (type))
15187 return true;
15188
15189 switch (code)
15190 {
15191 case POINTER_PLUS_EXPR:
15192 case PLUS_EXPR:
15193 if (FLOAT_TYPE_P (type))
15194 return (tree_expr_nonnegative_warnv_p (op0,
15195 strict_overflow_p)
15196 && tree_expr_nonnegative_warnv_p (op1,
15197 strict_overflow_p));
15198
15199 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15200 both unsigned and at least 2 bits shorter than the result. */
15201 if (TREE_CODE (type) == INTEGER_TYPE
15202 && TREE_CODE (op0) == NOP_EXPR
15203 && TREE_CODE (op1) == NOP_EXPR)
15204 {
15205 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15206 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15207 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15208 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15209 {
15210 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15211 TYPE_PRECISION (inner2)) + 1;
15212 return prec < TYPE_PRECISION (type);
15213 }
15214 }
15215 break;
15216
15217 case MULT_EXPR:
15218 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15219 {
15220 /* x * x is always non-negative for floating point x
15221 or without overflow. */
15222 if (operand_equal_p (op0, op1, 0)
15223 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15224 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15225 {
15226 if (TYPE_OVERFLOW_UNDEFINED (type))
15227 *strict_overflow_p = true;
15228 return true;
15229 }
15230 }
15231
15232 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15233 both unsigned and their total bits is shorter than the result. */
15234 if (TREE_CODE (type) == INTEGER_TYPE
15235 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15236 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15237 {
15238 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15239 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15240 : TREE_TYPE (op0);
15241 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15242 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15243 : TREE_TYPE (op1);
15244
15245 bool unsigned0 = TYPE_UNSIGNED (inner0);
15246 bool unsigned1 = TYPE_UNSIGNED (inner1);
15247
15248 if (TREE_CODE (op0) == INTEGER_CST)
15249 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15250
15251 if (TREE_CODE (op1) == INTEGER_CST)
15252 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15253
15254 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15255 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15256 {
15257 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15258 ? tree_int_cst_min_precision (op0, UNSIGNED)
15259 : TYPE_PRECISION (inner0);
15260
15261 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15262 ? tree_int_cst_min_precision (op1, UNSIGNED)
15263 : TYPE_PRECISION (inner1);
15264
15265 return precision0 + precision1 < TYPE_PRECISION (type);
15266 }
15267 }
15268 return false;
15269
15270 case BIT_AND_EXPR:
15271 case MAX_EXPR:
15272 return (tree_expr_nonnegative_warnv_p (op0,
15273 strict_overflow_p)
15274 || tree_expr_nonnegative_warnv_p (op1,
15275 strict_overflow_p));
15276
15277 case BIT_IOR_EXPR:
15278 case BIT_XOR_EXPR:
15279 case MIN_EXPR:
15280 case RDIV_EXPR:
15281 case TRUNC_DIV_EXPR:
15282 case CEIL_DIV_EXPR:
15283 case FLOOR_DIV_EXPR:
15284 case ROUND_DIV_EXPR:
15285 return (tree_expr_nonnegative_warnv_p (op0,
15286 strict_overflow_p)
15287 && tree_expr_nonnegative_warnv_p (op1,
15288 strict_overflow_p));
15289
15290 case TRUNC_MOD_EXPR:
15291 case CEIL_MOD_EXPR:
15292 case FLOOR_MOD_EXPR:
15293 case ROUND_MOD_EXPR:
15294 return tree_expr_nonnegative_warnv_p (op0,
15295 strict_overflow_p);
15296 default:
15297 return tree_simple_nonnegative_warnv_p (code, type);
15298 }
15299
15300 /* We don't know sign of `t', so be conservative and return false. */
15301 return false;
15302 }
15303
15304 /* Return true if T is known to be non-negative. If the return
15305 value is based on the assumption that signed overflow is undefined,
15306 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15307 *STRICT_OVERFLOW_P. */
15308
15309 bool
15310 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15311 {
15312 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15313 return true;
15314
15315 switch (TREE_CODE (t))
15316 {
15317 case INTEGER_CST:
15318 return tree_int_cst_sgn (t) >= 0;
15319
15320 case REAL_CST:
15321 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15322
15323 case FIXED_CST:
15324 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15325
15326 case COND_EXPR:
15327 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15328 strict_overflow_p)
15329 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15330 strict_overflow_p));
15331 default:
15332 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15333 TREE_TYPE (t));
15334 }
15335 /* We don't know sign of `t', so be conservative and return false. */
15336 return false;
15337 }
15338
15339 /* Return true if T is known to be non-negative. If the return
15340 value is based on the assumption that signed overflow is undefined,
15341 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15342 *STRICT_OVERFLOW_P. */
15343
15344 bool
15345 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15346 tree arg0, tree arg1, bool *strict_overflow_p)
15347 {
15348 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15349 switch (DECL_FUNCTION_CODE (fndecl))
15350 {
15351 CASE_FLT_FN (BUILT_IN_ACOS):
15352 CASE_FLT_FN (BUILT_IN_ACOSH):
15353 CASE_FLT_FN (BUILT_IN_CABS):
15354 CASE_FLT_FN (BUILT_IN_COSH):
15355 CASE_FLT_FN (BUILT_IN_ERFC):
15356 CASE_FLT_FN (BUILT_IN_EXP):
15357 CASE_FLT_FN (BUILT_IN_EXP10):
15358 CASE_FLT_FN (BUILT_IN_EXP2):
15359 CASE_FLT_FN (BUILT_IN_FABS):
15360 CASE_FLT_FN (BUILT_IN_FDIM):
15361 CASE_FLT_FN (BUILT_IN_HYPOT):
15362 CASE_FLT_FN (BUILT_IN_POW10):
15363 CASE_INT_FN (BUILT_IN_FFS):
15364 CASE_INT_FN (BUILT_IN_PARITY):
15365 CASE_INT_FN (BUILT_IN_POPCOUNT):
15366 CASE_INT_FN (BUILT_IN_CLZ):
15367 CASE_INT_FN (BUILT_IN_CLRSB):
15368 case BUILT_IN_BSWAP32:
15369 case BUILT_IN_BSWAP64:
15370 /* Always true. */
15371 return true;
15372
15373 CASE_FLT_FN (BUILT_IN_SQRT):
15374 /* sqrt(-0.0) is -0.0. */
15375 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15376 return true;
15377 return tree_expr_nonnegative_warnv_p (arg0,
15378 strict_overflow_p);
15379
15380 CASE_FLT_FN (BUILT_IN_ASINH):
15381 CASE_FLT_FN (BUILT_IN_ATAN):
15382 CASE_FLT_FN (BUILT_IN_ATANH):
15383 CASE_FLT_FN (BUILT_IN_CBRT):
15384 CASE_FLT_FN (BUILT_IN_CEIL):
15385 CASE_FLT_FN (BUILT_IN_ERF):
15386 CASE_FLT_FN (BUILT_IN_EXPM1):
15387 CASE_FLT_FN (BUILT_IN_FLOOR):
15388 CASE_FLT_FN (BUILT_IN_FMOD):
15389 CASE_FLT_FN (BUILT_IN_FREXP):
15390 CASE_FLT_FN (BUILT_IN_ICEIL):
15391 CASE_FLT_FN (BUILT_IN_IFLOOR):
15392 CASE_FLT_FN (BUILT_IN_IRINT):
15393 CASE_FLT_FN (BUILT_IN_IROUND):
15394 CASE_FLT_FN (BUILT_IN_LCEIL):
15395 CASE_FLT_FN (BUILT_IN_LDEXP):
15396 CASE_FLT_FN (BUILT_IN_LFLOOR):
15397 CASE_FLT_FN (BUILT_IN_LLCEIL):
15398 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15399 CASE_FLT_FN (BUILT_IN_LLRINT):
15400 CASE_FLT_FN (BUILT_IN_LLROUND):
15401 CASE_FLT_FN (BUILT_IN_LRINT):
15402 CASE_FLT_FN (BUILT_IN_LROUND):
15403 CASE_FLT_FN (BUILT_IN_MODF):
15404 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15405 CASE_FLT_FN (BUILT_IN_RINT):
15406 CASE_FLT_FN (BUILT_IN_ROUND):
15407 CASE_FLT_FN (BUILT_IN_SCALB):
15408 CASE_FLT_FN (BUILT_IN_SCALBLN):
15409 CASE_FLT_FN (BUILT_IN_SCALBN):
15410 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15411 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15412 CASE_FLT_FN (BUILT_IN_SINH):
15413 CASE_FLT_FN (BUILT_IN_TANH):
15414 CASE_FLT_FN (BUILT_IN_TRUNC):
15415 /* True if the 1st argument is nonnegative. */
15416 return tree_expr_nonnegative_warnv_p (arg0,
15417 strict_overflow_p);
15418
15419 CASE_FLT_FN (BUILT_IN_FMAX):
15420 /* True if the 1st OR 2nd arguments are nonnegative. */
15421 return (tree_expr_nonnegative_warnv_p (arg0,
15422 strict_overflow_p)
15423 || (tree_expr_nonnegative_warnv_p (arg1,
15424 strict_overflow_p)));
15425
15426 CASE_FLT_FN (BUILT_IN_FMIN):
15427 /* True if the 1st AND 2nd arguments are nonnegative. */
15428 return (tree_expr_nonnegative_warnv_p (arg0,
15429 strict_overflow_p)
15430 && (tree_expr_nonnegative_warnv_p (arg1,
15431 strict_overflow_p)));
15432
15433 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15434 /* True if the 2nd argument is nonnegative. */
15435 return tree_expr_nonnegative_warnv_p (arg1,
15436 strict_overflow_p);
15437
15438 CASE_FLT_FN (BUILT_IN_POWI):
15439 /* True if the 1st argument is nonnegative or the second
15440 argument is an even integer. */
15441 if (TREE_CODE (arg1) == INTEGER_CST
15442 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15443 return true;
15444 return tree_expr_nonnegative_warnv_p (arg0,
15445 strict_overflow_p);
15446
15447 CASE_FLT_FN (BUILT_IN_POW):
15448 /* True if the 1st argument is nonnegative or the second
15449 argument is an even integer valued real. */
15450 if (TREE_CODE (arg1) == REAL_CST)
15451 {
15452 REAL_VALUE_TYPE c;
15453 HOST_WIDE_INT n;
15454
15455 c = TREE_REAL_CST (arg1);
15456 n = real_to_integer (&c);
15457 if ((n & 1) == 0)
15458 {
15459 REAL_VALUE_TYPE cint;
15460 real_from_integer (&cint, VOIDmode, n, SIGNED);
15461 if (real_identical (&c, &cint))
15462 return true;
15463 }
15464 }
15465 return tree_expr_nonnegative_warnv_p (arg0,
15466 strict_overflow_p);
15467
15468 default:
15469 break;
15470 }
15471 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15472 type);
15473 }
15474
15475 /* Return true if T is known to be non-negative. If the return
15476 value is based on the assumption that signed overflow is undefined,
15477 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15478 *STRICT_OVERFLOW_P. */
15479
15480 static bool
15481 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15482 {
15483 enum tree_code code = TREE_CODE (t);
15484 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15485 return true;
15486
15487 switch (code)
15488 {
15489 case TARGET_EXPR:
15490 {
15491 tree temp = TARGET_EXPR_SLOT (t);
15492 t = TARGET_EXPR_INITIAL (t);
15493
15494 /* If the initializer is non-void, then it's a normal expression
15495 that will be assigned to the slot. */
15496 if (!VOID_TYPE_P (t))
15497 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15498
15499 /* Otherwise, the initializer sets the slot in some way. One common
15500 way is an assignment statement at the end of the initializer. */
15501 while (1)
15502 {
15503 if (TREE_CODE (t) == BIND_EXPR)
15504 t = expr_last (BIND_EXPR_BODY (t));
15505 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15506 || TREE_CODE (t) == TRY_CATCH_EXPR)
15507 t = expr_last (TREE_OPERAND (t, 0));
15508 else if (TREE_CODE (t) == STATEMENT_LIST)
15509 t = expr_last (t);
15510 else
15511 break;
15512 }
15513 if (TREE_CODE (t) == MODIFY_EXPR
15514 && TREE_OPERAND (t, 0) == temp)
15515 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15516 strict_overflow_p);
15517
15518 return false;
15519 }
15520
15521 case CALL_EXPR:
15522 {
15523 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15524 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15525
15526 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15527 get_callee_fndecl (t),
15528 arg0,
15529 arg1,
15530 strict_overflow_p);
15531 }
15532 case COMPOUND_EXPR:
15533 case MODIFY_EXPR:
15534 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15535 strict_overflow_p);
15536 case BIND_EXPR:
15537 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15538 strict_overflow_p);
15539 case SAVE_EXPR:
15540 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15541 strict_overflow_p);
15542
15543 default:
15544 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15545 TREE_TYPE (t));
15546 }
15547
15548 /* We don't know sign of `t', so be conservative and return false. */
15549 return false;
15550 }
15551
15552 /* Return true if T is known to be non-negative. If the return
15553 value is based on the assumption that signed overflow is undefined,
15554 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15555 *STRICT_OVERFLOW_P. */
15556
15557 bool
15558 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15559 {
15560 enum tree_code code;
15561 if (t == error_mark_node)
15562 return false;
15563
15564 code = TREE_CODE (t);
15565 switch (TREE_CODE_CLASS (code))
15566 {
15567 case tcc_binary:
15568 case tcc_comparison:
15569 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15570 TREE_TYPE (t),
15571 TREE_OPERAND (t, 0),
15572 TREE_OPERAND (t, 1),
15573 strict_overflow_p);
15574
15575 case tcc_unary:
15576 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15577 TREE_TYPE (t),
15578 TREE_OPERAND (t, 0),
15579 strict_overflow_p);
15580
15581 case tcc_constant:
15582 case tcc_declaration:
15583 case tcc_reference:
15584 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15585
15586 default:
15587 break;
15588 }
15589
15590 switch (code)
15591 {
15592 case TRUTH_AND_EXPR:
15593 case TRUTH_OR_EXPR:
15594 case TRUTH_XOR_EXPR:
15595 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15596 TREE_TYPE (t),
15597 TREE_OPERAND (t, 0),
15598 TREE_OPERAND (t, 1),
15599 strict_overflow_p);
15600 case TRUTH_NOT_EXPR:
15601 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15602 TREE_TYPE (t),
15603 TREE_OPERAND (t, 0),
15604 strict_overflow_p);
15605
15606 case COND_EXPR:
15607 case CONSTRUCTOR:
15608 case OBJ_TYPE_REF:
15609 case ASSERT_EXPR:
15610 case ADDR_EXPR:
15611 case WITH_SIZE_EXPR:
15612 case SSA_NAME:
15613 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15614
15615 default:
15616 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15617 }
15618 }
15619
15620 /* Return true if `t' is known to be non-negative. Handle warnings
15621 about undefined signed overflow. */
15622
15623 bool
15624 tree_expr_nonnegative_p (tree t)
15625 {
15626 bool ret, strict_overflow_p;
15627
15628 strict_overflow_p = false;
15629 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15630 if (strict_overflow_p)
15631 fold_overflow_warning (("assuming signed overflow does not occur when "
15632 "determining that expression is always "
15633 "non-negative"),
15634 WARN_STRICT_OVERFLOW_MISC);
15635 return ret;
15636 }
15637
15638
15639 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15640 For floating point we further ensure that T is not denormal.
15641 Similar logic is present in nonzero_address in rtlanal.h.
15642
15643 If the return value is based on the assumption that signed overflow
15644 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15645 change *STRICT_OVERFLOW_P. */
15646
15647 bool
15648 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15649 bool *strict_overflow_p)
15650 {
15651 switch (code)
15652 {
15653 case ABS_EXPR:
15654 return tree_expr_nonzero_warnv_p (op0,
15655 strict_overflow_p);
15656
15657 case NOP_EXPR:
15658 {
15659 tree inner_type = TREE_TYPE (op0);
15660 tree outer_type = type;
15661
15662 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15663 && tree_expr_nonzero_warnv_p (op0,
15664 strict_overflow_p));
15665 }
15666 break;
15667
15668 case NON_LVALUE_EXPR:
15669 return tree_expr_nonzero_warnv_p (op0,
15670 strict_overflow_p);
15671
15672 default:
15673 break;
15674 }
15675
15676 return false;
15677 }
15678
15679 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15680 For floating point we further ensure that T is not denormal.
15681 Similar logic is present in nonzero_address in rtlanal.h.
15682
15683 If the return value is based on the assumption that signed overflow
15684 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15685 change *STRICT_OVERFLOW_P. */
15686
15687 bool
15688 tree_binary_nonzero_warnv_p (enum tree_code code,
15689 tree type,
15690 tree op0,
15691 tree op1, bool *strict_overflow_p)
15692 {
15693 bool sub_strict_overflow_p;
15694 switch (code)
15695 {
15696 case POINTER_PLUS_EXPR:
15697 case PLUS_EXPR:
15698 if (TYPE_OVERFLOW_UNDEFINED (type))
15699 {
15700 /* With the presence of negative values it is hard
15701 to say something. */
15702 sub_strict_overflow_p = false;
15703 if (!tree_expr_nonnegative_warnv_p (op0,
15704 &sub_strict_overflow_p)
15705 || !tree_expr_nonnegative_warnv_p (op1,
15706 &sub_strict_overflow_p))
15707 return false;
15708 /* One of operands must be positive and the other non-negative. */
15709 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15710 overflows, on a twos-complement machine the sum of two
15711 nonnegative numbers can never be zero. */
15712 return (tree_expr_nonzero_warnv_p (op0,
15713 strict_overflow_p)
15714 || tree_expr_nonzero_warnv_p (op1,
15715 strict_overflow_p));
15716 }
15717 break;
15718
15719 case MULT_EXPR:
15720 if (TYPE_OVERFLOW_UNDEFINED (type))
15721 {
15722 if (tree_expr_nonzero_warnv_p (op0,
15723 strict_overflow_p)
15724 && tree_expr_nonzero_warnv_p (op1,
15725 strict_overflow_p))
15726 {
15727 *strict_overflow_p = true;
15728 return true;
15729 }
15730 }
15731 break;
15732
15733 case MIN_EXPR:
15734 sub_strict_overflow_p = false;
15735 if (tree_expr_nonzero_warnv_p (op0,
15736 &sub_strict_overflow_p)
15737 && tree_expr_nonzero_warnv_p (op1,
15738 &sub_strict_overflow_p))
15739 {
15740 if (sub_strict_overflow_p)
15741 *strict_overflow_p = true;
15742 }
15743 break;
15744
15745 case MAX_EXPR:
15746 sub_strict_overflow_p = false;
15747 if (tree_expr_nonzero_warnv_p (op0,
15748 &sub_strict_overflow_p))
15749 {
15750 if (sub_strict_overflow_p)
15751 *strict_overflow_p = true;
15752
15753 /* When both operands are nonzero, then MAX must be too. */
15754 if (tree_expr_nonzero_warnv_p (op1,
15755 strict_overflow_p))
15756 return true;
15757
15758 /* MAX where operand 0 is positive is positive. */
15759 return tree_expr_nonnegative_warnv_p (op0,
15760 strict_overflow_p);
15761 }
15762 /* MAX where operand 1 is positive is positive. */
15763 else if (tree_expr_nonzero_warnv_p (op1,
15764 &sub_strict_overflow_p)
15765 && tree_expr_nonnegative_warnv_p (op1,
15766 &sub_strict_overflow_p))
15767 {
15768 if (sub_strict_overflow_p)
15769 *strict_overflow_p = true;
15770 return true;
15771 }
15772 break;
15773
15774 case BIT_IOR_EXPR:
15775 return (tree_expr_nonzero_warnv_p (op1,
15776 strict_overflow_p)
15777 || tree_expr_nonzero_warnv_p (op0,
15778 strict_overflow_p));
15779
15780 default:
15781 break;
15782 }
15783
15784 return false;
15785 }
15786
15787 /* Return true when T is an address and is known to be nonzero.
15788 For floating point we further ensure that T is not denormal.
15789 Similar logic is present in nonzero_address in rtlanal.h.
15790
15791 If the return value is based on the assumption that signed overflow
15792 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15793 change *STRICT_OVERFLOW_P. */
15794
15795 bool
15796 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15797 {
15798 bool sub_strict_overflow_p;
15799 switch (TREE_CODE (t))
15800 {
15801 case INTEGER_CST:
15802 return !integer_zerop (t);
15803
15804 case ADDR_EXPR:
15805 {
15806 tree base = TREE_OPERAND (t, 0);
15807
15808 if (!DECL_P (base))
15809 base = get_base_address (base);
15810
15811 if (!base)
15812 return false;
15813
15814 /* For objects in symbol table check if we know they are non-zero.
15815 Don't do anything for variables and functions before symtab is built;
15816 it is quite possible that they will be declared weak later. */
15817 if (DECL_P (base) && decl_in_symtab_p (base))
15818 {
15819 struct symtab_node *symbol;
15820
15821 symbol = symtab_node::get_create (base);
15822 if (symbol)
15823 return symbol->nonzero_address ();
15824 else
15825 return false;
15826 }
15827
15828 /* Function local objects are never NULL. */
15829 if (DECL_P (base)
15830 && (DECL_CONTEXT (base)
15831 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15832 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15833 return true;
15834
15835 /* Constants are never weak. */
15836 if (CONSTANT_CLASS_P (base))
15837 return true;
15838
15839 return false;
15840 }
15841
15842 case COND_EXPR:
15843 sub_strict_overflow_p = false;
15844 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15845 &sub_strict_overflow_p)
15846 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15847 &sub_strict_overflow_p))
15848 {
15849 if (sub_strict_overflow_p)
15850 *strict_overflow_p = true;
15851 return true;
15852 }
15853 break;
15854
15855 default:
15856 break;
15857 }
15858 return false;
15859 }
15860
15861 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15862 attempt to fold the expression to a constant without modifying TYPE,
15863 OP0 or OP1.
15864
15865 If the expression could be simplified to a constant, then return
15866 the constant. If the expression would not be simplified to a
15867 constant, then return NULL_TREE. */
15868
15869 tree
15870 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15871 {
15872 tree tem = fold_binary (code, type, op0, op1);
15873 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15874 }
15875
15876 /* Given the components of a unary expression CODE, TYPE and OP0,
15877 attempt to fold the expression to a constant without modifying
15878 TYPE or OP0.
15879
15880 If the expression could be simplified to a constant, then return
15881 the constant. If the expression would not be simplified to a
15882 constant, then return NULL_TREE. */
15883
15884 tree
15885 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15886 {
15887 tree tem = fold_unary (code, type, op0);
15888 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15889 }
15890
15891 /* If EXP represents referencing an element in a constant string
15892 (either via pointer arithmetic or array indexing), return the
15893 tree representing the value accessed, otherwise return NULL. */
15894
15895 tree
15896 fold_read_from_constant_string (tree exp)
15897 {
15898 if ((TREE_CODE (exp) == INDIRECT_REF
15899 || TREE_CODE (exp) == ARRAY_REF)
15900 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15901 {
15902 tree exp1 = TREE_OPERAND (exp, 0);
15903 tree index;
15904 tree string;
15905 location_t loc = EXPR_LOCATION (exp);
15906
15907 if (TREE_CODE (exp) == INDIRECT_REF)
15908 string = string_constant (exp1, &index);
15909 else
15910 {
15911 tree low_bound = array_ref_low_bound (exp);
15912 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15913
15914 /* Optimize the special-case of a zero lower bound.
15915
15916 We convert the low_bound to sizetype to avoid some problems
15917 with constant folding. (E.g. suppose the lower bound is 1,
15918 and its mode is QI. Without the conversion,l (ARRAY
15919 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15920 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15921 if (! integer_zerop (low_bound))
15922 index = size_diffop_loc (loc, index,
15923 fold_convert_loc (loc, sizetype, low_bound));
15924
15925 string = exp1;
15926 }
15927
15928 if (string
15929 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15930 && TREE_CODE (string) == STRING_CST
15931 && TREE_CODE (index) == INTEGER_CST
15932 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15933 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15934 == MODE_INT)
15935 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15936 return build_int_cst_type (TREE_TYPE (exp),
15937 (TREE_STRING_POINTER (string)
15938 [TREE_INT_CST_LOW (index)]));
15939 }
15940 return NULL;
15941 }
15942
15943 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15944 an integer constant, real, or fixed-point constant.
15945
15946 TYPE is the type of the result. */
15947
15948 static tree
15949 fold_negate_const (tree arg0, tree type)
15950 {
15951 tree t = NULL_TREE;
15952
15953 switch (TREE_CODE (arg0))
15954 {
15955 case INTEGER_CST:
15956 {
15957 bool overflow;
15958 wide_int val = wi::neg (arg0, &overflow);
15959 t = force_fit_type (type, val, 1,
15960 (overflow | TREE_OVERFLOW (arg0))
15961 && !TYPE_UNSIGNED (type));
15962 break;
15963 }
15964
15965 case REAL_CST:
15966 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15967 break;
15968
15969 case FIXED_CST:
15970 {
15971 FIXED_VALUE_TYPE f;
15972 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15973 &(TREE_FIXED_CST (arg0)), NULL,
15974 TYPE_SATURATING (type));
15975 t = build_fixed (type, f);
15976 /* Propagate overflow flags. */
15977 if (overflow_p | TREE_OVERFLOW (arg0))
15978 TREE_OVERFLOW (t) = 1;
15979 break;
15980 }
15981
15982 default:
15983 gcc_unreachable ();
15984 }
15985
15986 return t;
15987 }
15988
15989 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15990 an integer constant or real constant.
15991
15992 TYPE is the type of the result. */
15993
15994 tree
15995 fold_abs_const (tree arg0, tree type)
15996 {
15997 tree t = NULL_TREE;
15998
15999 switch (TREE_CODE (arg0))
16000 {
16001 case INTEGER_CST:
16002 {
16003 /* If the value is unsigned or non-negative, then the absolute value
16004 is the same as the ordinary value. */
16005 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16006 t = arg0;
16007
16008 /* If the value is negative, then the absolute value is
16009 its negation. */
16010 else
16011 {
16012 bool overflow;
16013 wide_int val = wi::neg (arg0, &overflow);
16014 t = force_fit_type (type, val, -1,
16015 overflow | TREE_OVERFLOW (arg0));
16016 }
16017 }
16018 break;
16019
16020 case REAL_CST:
16021 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16022 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16023 else
16024 t = arg0;
16025 break;
16026
16027 default:
16028 gcc_unreachable ();
16029 }
16030
16031 return t;
16032 }
16033
16034 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16035 constant. TYPE is the type of the result. */
16036
16037 static tree
16038 fold_not_const (const_tree arg0, tree type)
16039 {
16040 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16041
16042 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16043 }
16044
16045 /* Given CODE, a relational operator, the target type, TYPE and two
16046 constant operands OP0 and OP1, return the result of the
16047 relational operation. If the result is not a compile time
16048 constant, then return NULL_TREE. */
16049
16050 static tree
16051 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16052 {
16053 int result, invert;
16054
16055 /* From here on, the only cases we handle are when the result is
16056 known to be a constant. */
16057
16058 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16059 {
16060 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16061 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16062
16063 /* Handle the cases where either operand is a NaN. */
16064 if (real_isnan (c0) || real_isnan (c1))
16065 {
16066 switch (code)
16067 {
16068 case EQ_EXPR:
16069 case ORDERED_EXPR:
16070 result = 0;
16071 break;
16072
16073 case NE_EXPR:
16074 case UNORDERED_EXPR:
16075 case UNLT_EXPR:
16076 case UNLE_EXPR:
16077 case UNGT_EXPR:
16078 case UNGE_EXPR:
16079 case UNEQ_EXPR:
16080 result = 1;
16081 break;
16082
16083 case LT_EXPR:
16084 case LE_EXPR:
16085 case GT_EXPR:
16086 case GE_EXPR:
16087 case LTGT_EXPR:
16088 if (flag_trapping_math)
16089 return NULL_TREE;
16090 result = 0;
16091 break;
16092
16093 default:
16094 gcc_unreachable ();
16095 }
16096
16097 return constant_boolean_node (result, type);
16098 }
16099
16100 return constant_boolean_node (real_compare (code, c0, c1), type);
16101 }
16102
16103 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16104 {
16105 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16106 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16107 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16108 }
16109
16110 /* Handle equality/inequality of complex constants. */
16111 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16112 {
16113 tree rcond = fold_relational_const (code, type,
16114 TREE_REALPART (op0),
16115 TREE_REALPART (op1));
16116 tree icond = fold_relational_const (code, type,
16117 TREE_IMAGPART (op0),
16118 TREE_IMAGPART (op1));
16119 if (code == EQ_EXPR)
16120 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16121 else if (code == NE_EXPR)
16122 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16123 else
16124 return NULL_TREE;
16125 }
16126
16127 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16128 {
16129 unsigned count = VECTOR_CST_NELTS (op0);
16130 tree *elts = XALLOCAVEC (tree, count);
16131 gcc_assert (VECTOR_CST_NELTS (op1) == count
16132 && TYPE_VECTOR_SUBPARTS (type) == count);
16133
16134 for (unsigned i = 0; i < count; i++)
16135 {
16136 tree elem_type = TREE_TYPE (type);
16137 tree elem0 = VECTOR_CST_ELT (op0, i);
16138 tree elem1 = VECTOR_CST_ELT (op1, i);
16139
16140 tree tem = fold_relational_const (code, elem_type,
16141 elem0, elem1);
16142
16143 if (tem == NULL_TREE)
16144 return NULL_TREE;
16145
16146 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16147 }
16148
16149 return build_vector (type, elts);
16150 }
16151
16152 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16153
16154 To compute GT, swap the arguments and do LT.
16155 To compute GE, do LT and invert the result.
16156 To compute LE, swap the arguments, do LT and invert the result.
16157 To compute NE, do EQ and invert the result.
16158
16159 Therefore, the code below must handle only EQ and LT. */
16160
16161 if (code == LE_EXPR || code == GT_EXPR)
16162 {
16163 tree tem = op0;
16164 op0 = op1;
16165 op1 = tem;
16166 code = swap_tree_comparison (code);
16167 }
16168
16169 /* Note that it is safe to invert for real values here because we
16170 have already handled the one case that it matters. */
16171
16172 invert = 0;
16173 if (code == NE_EXPR || code == GE_EXPR)
16174 {
16175 invert = 1;
16176 code = invert_tree_comparison (code, false);
16177 }
16178
16179 /* Compute a result for LT or EQ if args permit;
16180 Otherwise return T. */
16181 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16182 {
16183 if (code == EQ_EXPR)
16184 result = tree_int_cst_equal (op0, op1);
16185 else
16186 result = tree_int_cst_lt (op0, op1);
16187 }
16188 else
16189 return NULL_TREE;
16190
16191 if (invert)
16192 result ^= 1;
16193 return constant_boolean_node (result, type);
16194 }
16195
16196 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16197 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16198 itself. */
16199
16200 tree
16201 fold_build_cleanup_point_expr (tree type, tree expr)
16202 {
16203 /* If the expression does not have side effects then we don't have to wrap
16204 it with a cleanup point expression. */
16205 if (!TREE_SIDE_EFFECTS (expr))
16206 return expr;
16207
16208 /* If the expression is a return, check to see if the expression inside the
16209 return has no side effects or the right hand side of the modify expression
16210 inside the return. If either don't have side effects set we don't need to
16211 wrap the expression in a cleanup point expression. Note we don't check the
16212 left hand side of the modify because it should always be a return decl. */
16213 if (TREE_CODE (expr) == RETURN_EXPR)
16214 {
16215 tree op = TREE_OPERAND (expr, 0);
16216 if (!op || !TREE_SIDE_EFFECTS (op))
16217 return expr;
16218 op = TREE_OPERAND (op, 1);
16219 if (!TREE_SIDE_EFFECTS (op))
16220 return expr;
16221 }
16222
16223 return build1 (CLEANUP_POINT_EXPR, type, expr);
16224 }
16225
16226 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16227 of an indirection through OP0, or NULL_TREE if no simplification is
16228 possible. */
16229
16230 tree
16231 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16232 {
16233 tree sub = op0;
16234 tree subtype;
16235
16236 STRIP_NOPS (sub);
16237 subtype = TREE_TYPE (sub);
16238 if (!POINTER_TYPE_P (subtype))
16239 return NULL_TREE;
16240
16241 if (TREE_CODE (sub) == ADDR_EXPR)
16242 {
16243 tree op = TREE_OPERAND (sub, 0);
16244 tree optype = TREE_TYPE (op);
16245 /* *&CONST_DECL -> to the value of the const decl. */
16246 if (TREE_CODE (op) == CONST_DECL)
16247 return DECL_INITIAL (op);
16248 /* *&p => p; make sure to handle *&"str"[cst] here. */
16249 if (type == optype)
16250 {
16251 tree fop = fold_read_from_constant_string (op);
16252 if (fop)
16253 return fop;
16254 else
16255 return op;
16256 }
16257 /* *(foo *)&fooarray => fooarray[0] */
16258 else if (TREE_CODE (optype) == ARRAY_TYPE
16259 && type == TREE_TYPE (optype)
16260 && (!in_gimple_form
16261 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16262 {
16263 tree type_domain = TYPE_DOMAIN (optype);
16264 tree min_val = size_zero_node;
16265 if (type_domain && TYPE_MIN_VALUE (type_domain))
16266 min_val = TYPE_MIN_VALUE (type_domain);
16267 if (in_gimple_form
16268 && TREE_CODE (min_val) != INTEGER_CST)
16269 return NULL_TREE;
16270 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16271 NULL_TREE, NULL_TREE);
16272 }
16273 /* *(foo *)&complexfoo => __real__ complexfoo */
16274 else if (TREE_CODE (optype) == COMPLEX_TYPE
16275 && type == TREE_TYPE (optype))
16276 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16277 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16278 else if (TREE_CODE (optype) == VECTOR_TYPE
16279 && type == TREE_TYPE (optype))
16280 {
16281 tree part_width = TYPE_SIZE (type);
16282 tree index = bitsize_int (0);
16283 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16284 }
16285 }
16286
16287 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16288 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16289 {
16290 tree op00 = TREE_OPERAND (sub, 0);
16291 tree op01 = TREE_OPERAND (sub, 1);
16292
16293 STRIP_NOPS (op00);
16294 if (TREE_CODE (op00) == ADDR_EXPR)
16295 {
16296 tree op00type;
16297 op00 = TREE_OPERAND (op00, 0);
16298 op00type = TREE_TYPE (op00);
16299
16300 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16301 if (TREE_CODE (op00type) == VECTOR_TYPE
16302 && type == TREE_TYPE (op00type))
16303 {
16304 HOST_WIDE_INT offset = tree_to_shwi (op01);
16305 tree part_width = TYPE_SIZE (type);
16306 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16307 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16308 tree index = bitsize_int (indexi);
16309
16310 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16311 return fold_build3_loc (loc,
16312 BIT_FIELD_REF, type, op00,
16313 part_width, index);
16314
16315 }
16316 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16317 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16318 && type == TREE_TYPE (op00type))
16319 {
16320 tree size = TYPE_SIZE_UNIT (type);
16321 if (tree_int_cst_equal (size, op01))
16322 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16323 }
16324 /* ((foo *)&fooarray)[1] => fooarray[1] */
16325 else if (TREE_CODE (op00type) == ARRAY_TYPE
16326 && type == TREE_TYPE (op00type))
16327 {
16328 tree type_domain = TYPE_DOMAIN (op00type);
16329 tree min_val = size_zero_node;
16330 if (type_domain && TYPE_MIN_VALUE (type_domain))
16331 min_val = TYPE_MIN_VALUE (type_domain);
16332 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16333 TYPE_SIZE_UNIT (type));
16334 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16335 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16336 NULL_TREE, NULL_TREE);
16337 }
16338 }
16339 }
16340
16341 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16342 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16343 && type == TREE_TYPE (TREE_TYPE (subtype))
16344 && (!in_gimple_form
16345 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16346 {
16347 tree type_domain;
16348 tree min_val = size_zero_node;
16349 sub = build_fold_indirect_ref_loc (loc, sub);
16350 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16351 if (type_domain && TYPE_MIN_VALUE (type_domain))
16352 min_val = TYPE_MIN_VALUE (type_domain);
16353 if (in_gimple_form
16354 && TREE_CODE (min_val) != INTEGER_CST)
16355 return NULL_TREE;
16356 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16357 NULL_TREE);
16358 }
16359
16360 return NULL_TREE;
16361 }
16362
16363 /* Builds an expression for an indirection through T, simplifying some
16364 cases. */
16365
16366 tree
16367 build_fold_indirect_ref_loc (location_t loc, tree t)
16368 {
16369 tree type = TREE_TYPE (TREE_TYPE (t));
16370 tree sub = fold_indirect_ref_1 (loc, type, t);
16371
16372 if (sub)
16373 return sub;
16374
16375 return build1_loc (loc, INDIRECT_REF, type, t);
16376 }
16377
16378 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16379
16380 tree
16381 fold_indirect_ref_loc (location_t loc, tree t)
16382 {
16383 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16384
16385 if (sub)
16386 return sub;
16387 else
16388 return t;
16389 }
16390
16391 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16392 whose result is ignored. The type of the returned tree need not be
16393 the same as the original expression. */
16394
16395 tree
16396 fold_ignored_result (tree t)
16397 {
16398 if (!TREE_SIDE_EFFECTS (t))
16399 return integer_zero_node;
16400
16401 for (;;)
16402 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16403 {
16404 case tcc_unary:
16405 t = TREE_OPERAND (t, 0);
16406 break;
16407
16408 case tcc_binary:
16409 case tcc_comparison:
16410 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16411 t = TREE_OPERAND (t, 0);
16412 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16413 t = TREE_OPERAND (t, 1);
16414 else
16415 return t;
16416 break;
16417
16418 case tcc_expression:
16419 switch (TREE_CODE (t))
16420 {
16421 case COMPOUND_EXPR:
16422 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16423 return t;
16424 t = TREE_OPERAND (t, 0);
16425 break;
16426
16427 case COND_EXPR:
16428 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16429 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16430 return t;
16431 t = TREE_OPERAND (t, 0);
16432 break;
16433
16434 default:
16435 return t;
16436 }
16437 break;
16438
16439 default:
16440 return t;
16441 }
16442 }
16443
16444 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16445
16446 tree
16447 round_up_loc (location_t loc, tree value, unsigned int divisor)
16448 {
16449 tree div = NULL_TREE;
16450
16451 if (divisor == 1)
16452 return value;
16453
16454 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16455 have to do anything. Only do this when we are not given a const,
16456 because in that case, this check is more expensive than just
16457 doing it. */
16458 if (TREE_CODE (value) != INTEGER_CST)
16459 {
16460 div = build_int_cst (TREE_TYPE (value), divisor);
16461
16462 if (multiple_of_p (TREE_TYPE (value), value, div))
16463 return value;
16464 }
16465
16466 /* If divisor is a power of two, simplify this to bit manipulation. */
16467 if (divisor == (divisor & -divisor))
16468 {
16469 if (TREE_CODE (value) == INTEGER_CST)
16470 {
16471 wide_int val = value;
16472 bool overflow_p;
16473
16474 if ((val & (divisor - 1)) == 0)
16475 return value;
16476
16477 overflow_p = TREE_OVERFLOW (value);
16478 val &= ~(divisor - 1);
16479 val += divisor;
16480 if (val == 0)
16481 overflow_p = true;
16482
16483 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16484 }
16485 else
16486 {
16487 tree t;
16488
16489 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16490 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16491 t = build_int_cst (TREE_TYPE (value), -divisor);
16492 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16493 }
16494 }
16495 else
16496 {
16497 if (!div)
16498 div = build_int_cst (TREE_TYPE (value), divisor);
16499 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16500 value = size_binop_loc (loc, MULT_EXPR, value, div);
16501 }
16502
16503 return value;
16504 }
16505
16506 /* Likewise, but round down. */
16507
16508 tree
16509 round_down_loc (location_t loc, tree value, int divisor)
16510 {
16511 tree div = NULL_TREE;
16512
16513 gcc_assert (divisor > 0);
16514 if (divisor == 1)
16515 return value;
16516
16517 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16518 have to do anything. Only do this when we are not given a const,
16519 because in that case, this check is more expensive than just
16520 doing it. */
16521 if (TREE_CODE (value) != INTEGER_CST)
16522 {
16523 div = build_int_cst (TREE_TYPE (value), divisor);
16524
16525 if (multiple_of_p (TREE_TYPE (value), value, div))
16526 return value;
16527 }
16528
16529 /* If divisor is a power of two, simplify this to bit manipulation. */
16530 if (divisor == (divisor & -divisor))
16531 {
16532 tree t;
16533
16534 t = build_int_cst (TREE_TYPE (value), -divisor);
16535 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16536 }
16537 else
16538 {
16539 if (!div)
16540 div = build_int_cst (TREE_TYPE (value), divisor);
16541 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16542 value = size_binop_loc (loc, MULT_EXPR, value, div);
16543 }
16544
16545 return value;
16546 }
16547
16548 /* Returns the pointer to the base of the object addressed by EXP and
16549 extracts the information about the offset of the access, storing it
16550 to PBITPOS and POFFSET. */
16551
16552 static tree
16553 split_address_to_core_and_offset (tree exp,
16554 HOST_WIDE_INT *pbitpos, tree *poffset)
16555 {
16556 tree core;
16557 enum machine_mode mode;
16558 int unsignedp, volatilep;
16559 HOST_WIDE_INT bitsize;
16560 location_t loc = EXPR_LOCATION (exp);
16561
16562 if (TREE_CODE (exp) == ADDR_EXPR)
16563 {
16564 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16565 poffset, &mode, &unsignedp, &volatilep,
16566 false);
16567 core = build_fold_addr_expr_loc (loc, core);
16568 }
16569 else
16570 {
16571 core = exp;
16572 *pbitpos = 0;
16573 *poffset = NULL_TREE;
16574 }
16575
16576 return core;
16577 }
16578
16579 /* Returns true if addresses of E1 and E2 differ by a constant, false
16580 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16581
16582 bool
16583 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16584 {
16585 tree core1, core2;
16586 HOST_WIDE_INT bitpos1, bitpos2;
16587 tree toffset1, toffset2, tdiff, type;
16588
16589 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16590 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16591
16592 if (bitpos1 % BITS_PER_UNIT != 0
16593 || bitpos2 % BITS_PER_UNIT != 0
16594 || !operand_equal_p (core1, core2, 0))
16595 return false;
16596
16597 if (toffset1 && toffset2)
16598 {
16599 type = TREE_TYPE (toffset1);
16600 if (type != TREE_TYPE (toffset2))
16601 toffset2 = fold_convert (type, toffset2);
16602
16603 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16604 if (!cst_and_fits_in_hwi (tdiff))
16605 return false;
16606
16607 *diff = int_cst_value (tdiff);
16608 }
16609 else if (toffset1 || toffset2)
16610 {
16611 /* If only one of the offsets is non-constant, the difference cannot
16612 be a constant. */
16613 return false;
16614 }
16615 else
16616 *diff = 0;
16617
16618 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16619 return true;
16620 }
16621
16622 /* Simplify the floating point expression EXP when the sign of the
16623 result is not significant. Return NULL_TREE if no simplification
16624 is possible. */
16625
16626 tree
16627 fold_strip_sign_ops (tree exp)
16628 {
16629 tree arg0, arg1;
16630 location_t loc = EXPR_LOCATION (exp);
16631
16632 switch (TREE_CODE (exp))
16633 {
16634 case ABS_EXPR:
16635 case NEGATE_EXPR:
16636 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16637 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16638
16639 case MULT_EXPR:
16640 case RDIV_EXPR:
16641 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16642 return NULL_TREE;
16643 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16644 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16645 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16646 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16647 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16648 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16649 break;
16650
16651 case COMPOUND_EXPR:
16652 arg0 = TREE_OPERAND (exp, 0);
16653 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16654 if (arg1)
16655 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16656 break;
16657
16658 case COND_EXPR:
16659 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16660 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16661 if (arg0 || arg1)
16662 return fold_build3_loc (loc,
16663 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16664 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16665 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16666 break;
16667
16668 case CALL_EXPR:
16669 {
16670 const enum built_in_function fcode = builtin_mathfn_code (exp);
16671 switch (fcode)
16672 {
16673 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16674 /* Strip copysign function call, return the 1st argument. */
16675 arg0 = CALL_EXPR_ARG (exp, 0);
16676 arg1 = CALL_EXPR_ARG (exp, 1);
16677 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16678
16679 default:
16680 /* Strip sign ops from the argument of "odd" math functions. */
16681 if (negate_mathfn_p (fcode))
16682 {
16683 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16684 if (arg0)
16685 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16686 }
16687 break;
16688 }
16689 }
16690 break;
16691
16692 default:
16693 break;
16694 }
16695 return NULL_TREE;
16696 }