fold-const.c (tree_swap_operands_p): Put all constants last...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71 #include "builtins.h"
72 #include "cgraph.h"
73
74 /* Nonzero if we are folding constants inside an initializer; zero
75 otherwise. */
76 int folding_initializer = 0;
77
78 /* The following constants represent a bit based encoding of GCC's
79 comparison operators. This encoding simplifies transformations
80 on relational comparison operators, such as AND and OR. */
81 enum comparison_code {
82 COMPCODE_FALSE = 0,
83 COMPCODE_LT = 1,
84 COMPCODE_EQ = 2,
85 COMPCODE_LE = 3,
86 COMPCODE_GT = 4,
87 COMPCODE_LTGT = 5,
88 COMPCODE_GE = 6,
89 COMPCODE_ORD = 7,
90 COMPCODE_UNORD = 8,
91 COMPCODE_UNLT = 9,
92 COMPCODE_UNEQ = 10,
93 COMPCODE_UNLE = 11,
94 COMPCODE_UNGT = 12,
95 COMPCODE_NE = 13,
96 COMPCODE_UNGE = 14,
97 COMPCODE_TRUE = 15
98 };
99
100 static bool negate_mathfn_p (enum built_in_function);
101 static bool negate_expr_p (tree);
102 static tree negate_expr (tree);
103 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
104 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
105 static tree const_binop (enum tree_code, tree, tree);
106 static enum comparison_code comparison_to_compcode (enum tree_code);
107 static enum tree_code compcode_to_comparison (enum comparison_code);
108 static int operand_equal_for_comparison_p (tree, tree, tree);
109 static int twoval_comparison_p (tree, tree *, tree *, int *);
110 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
111 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
112 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
113 static tree make_bit_field_ref (location_t, tree, tree,
114 HOST_WIDE_INT, HOST_WIDE_INT, int);
115 static tree optimize_bit_field_compare (location_t, enum tree_code,
116 tree, tree, tree);
117 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
118 HOST_WIDE_INT *,
119 enum machine_mode *, int *, int *,
120 tree *, tree *);
121 static tree sign_bit_p (tree, const_tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree optimize_minmax_comparison (location_t, enum tree_code,
131 tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_mathfn_compare (location_t,
139 enum built_in_function, enum tree_code,
140 tree, tree, tree);
141 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
142 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
143 static bool reorder_operands_p (const_tree, const_tree);
144 static tree fold_negate_const (tree, tree);
145 static tree fold_not_const (const_tree, tree);
146 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 static tree fold_convert_const (enum tree_code, tree, tree);
148
149 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
150 Otherwise, return LOC. */
151
152 static location_t
153 expr_location_or (tree t, location_t loc)
154 {
155 location_t tloc = EXPR_LOCATION (t);
156 return tloc == UNKNOWN_LOCATION ? loc : tloc;
157 }
158
159 /* Similar to protected_set_expr_location, but never modify x in place,
160 if location can and needs to be set, unshare it. */
161
162 static inline tree
163 protected_set_expr_location_unshare (tree x, location_t loc)
164 {
165 if (CAN_HAVE_LOCATION_P (x)
166 && EXPR_LOCATION (x) != loc
167 && !(TREE_CODE (x) == SAVE_EXPR
168 || TREE_CODE (x) == TARGET_EXPR
169 || TREE_CODE (x) == BIND_EXPR))
170 {
171 x = copy_node (x);
172 SET_EXPR_LOCATION (x, loc);
173 }
174 return x;
175 }
176 \f
177 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
178 division and returns the quotient. Otherwise returns
179 NULL_TREE. */
180
181 tree
182 div_if_zero_remainder (const_tree arg1, const_tree arg2)
183 {
184 widest_int quo;
185
186 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
187 SIGNED, &quo))
188 return wide_int_to_tree (TREE_TYPE (arg1), quo);
189
190 return NULL_TREE;
191 }
192 \f
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
201
202 static int fold_deferring_overflow_warnings;
203
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
208
209 static const char* fold_deferred_overflow_warning;
210
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
213
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
215
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
218
219 void
220 fold_defer_overflow_warnings (void)
221 {
222 ++fold_deferring_overflow_warnings;
223 }
224
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
233
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
236 {
237 const char *warnmsg;
238 location_t locus;
239
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
243 {
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
249 }
250
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
253
254 if (!issue || warnmsg == NULL)
255 return;
256
257 if (gimple_no_warning_p (stmt))
258 return;
259
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
264
265 if (!issue_strict_overflow_warning (code))
266 return;
267
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 }
274
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
277
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
280 {
281 fold_undefer_overflow_warnings (false, NULL, 0);
282 }
283
284 /* Whether we are deferring overflow warnings. */
285
286 bool
287 fold_deferring_overflow_warnings_p (void)
288 {
289 return fold_deferring_overflow_warnings > 0;
290 }
291
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
294
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
297 {
298 if (fold_deferring_overflow_warnings > 0)
299 {
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
302 {
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
305 }
306 }
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
309 }
310 \f
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
313
314 static bool
315 negate_mathfn_p (enum built_in_function code)
316 {
317 switch (code)
318 {
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
343
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
349
350 default:
351 break;
352 }
353 return false;
354 }
355
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
358
359 bool
360 may_negate_without_overflow_p (const_tree t)
361 {
362 tree type;
363
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
369
370 return !wi::only_sign_bit_p (t);
371 }
372
373 /* Determine whether an expression T can be cheaply negated using
374 the function negate_expr without introducing undefined overflow. */
375
376 static bool
377 negate_expr_p (tree t)
378 {
379 tree type;
380
381 if (t == 0)
382 return false;
383
384 type = TREE_TYPE (t);
385
386 STRIP_SIGN_NOPS (t);
387 switch (TREE_CODE (t))
388 {
389 case INTEGER_CST:
390 if (TYPE_OVERFLOW_WRAPS (type))
391 return true;
392
393 /* Check that -CST will not overflow type. */
394 return may_negate_without_overflow_p (t);
395 case BIT_NOT_EXPR:
396 return (INTEGRAL_TYPE_P (type)
397 && TYPE_OVERFLOW_WRAPS (type));
398
399 case FIXED_CST:
400 case NEGATE_EXPR:
401 return true;
402
403 case REAL_CST:
404 /* We want to canonicalize to positive real constants. Pretend
405 that only negative ones can be easily negated. */
406 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
407
408 case COMPLEX_CST:
409 return negate_expr_p (TREE_REALPART (t))
410 && negate_expr_p (TREE_IMAGPART (t));
411
412 case VECTOR_CST:
413 {
414 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
415 return true;
416
417 int count = TYPE_VECTOR_SUBPARTS (type), i;
418
419 for (i = 0; i < count; i++)
420 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
421 return false;
422
423 return true;
424 }
425
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
429
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
432
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
444
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
451
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
455
456 /* Fall through. */
457
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
463
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
473 {
474 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
475 break;
476 /* If overflow is undefined then we have to be careful because
477 we ask whether it's ok to associate the negate with the
478 division which is not ok for example for
479 -((a - b) / c) where (-(a - b)) / c may invoke undefined
480 overflow because of negating INT_MIN. So do not use
481 negate_expr_p here but open-code the two important cases. */
482 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
483 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
485 return true;
486 }
487 else if (negate_expr_p (TREE_OPERAND (t, 0)))
488 return true;
489 return negate_expr_p (TREE_OPERAND (t, 1));
490
491 case NOP_EXPR:
492 /* Negate -((double)float) as (double)(-float). */
493 if (TREE_CODE (type) == REAL_TYPE)
494 {
495 tree tem = strip_float_extensions (t);
496 if (tem != t)
497 return negate_expr_p (tem);
498 }
499 break;
500
501 case CALL_EXPR:
502 /* Negate -f(x) as f(-x). */
503 if (negate_mathfn_p (builtin_mathfn_code (t)))
504 return negate_expr_p (CALL_EXPR_ARG (t, 0));
505 break;
506
507 case RSHIFT_EXPR:
508 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
509 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
510 {
511 tree op1 = TREE_OPERAND (t, 1);
512 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
513 return true;
514 }
515 break;
516
517 default:
518 break;
519 }
520 return false;
521 }
522
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
526 returned. */
527
528 static tree
529 fold_negate_expr (location_t loc, tree t)
530 {
531 tree type = TREE_TYPE (t);
532 tree tem;
533
534 switch (TREE_CODE (t))
535 {
536 /* Convert - (~A) to A + 1. */
537 case BIT_NOT_EXPR:
538 if (INTEGRAL_TYPE_P (type))
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
540 build_one_cst (type));
541 break;
542
543 case INTEGER_CST:
544 tem = fold_negate_const (t, type);
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
546 || !TYPE_OVERFLOW_TRAPS (type))
547 return tem;
548 break;
549
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
554 return tem;
555 break;
556
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case COMPLEX_CST:
562 {
563 tree rpart = negate_expr (TREE_REALPART (t));
564 tree ipart = negate_expr (TREE_IMAGPART (t));
565
566 if ((TREE_CODE (rpart) == REAL_CST
567 && TREE_CODE (ipart) == REAL_CST)
568 || (TREE_CODE (rpart) == INTEGER_CST
569 && TREE_CODE (ipart) == INTEGER_CST))
570 return build_complex (type, rpart, ipart);
571 }
572 break;
573
574 case VECTOR_CST:
575 {
576 int count = TYPE_VECTOR_SUBPARTS (type), i;
577 tree *elts = XALLOCAVEC (tree, count);
578
579 for (i = 0; i < count; i++)
580 {
581 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
582 if (elts[i] == NULL_TREE)
583 return NULL_TREE;
584 }
585
586 return build_vector (type, elts);
587 }
588
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
595
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
601
602 case NEGATE_EXPR:
603 return TREE_OPERAND (t, 0);
604
605 case PLUS_EXPR:
606 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
607 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
608 {
609 /* -(A + B) -> (-B) - A. */
610 if (negate_expr_p (TREE_OPERAND (t, 1))
611 && reorder_operands_p (TREE_OPERAND (t, 0),
612 TREE_OPERAND (t, 1)))
613 {
614 tem = negate_expr (TREE_OPERAND (t, 1));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 0));
617 }
618
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t, 0)))
621 {
622 tem = negate_expr (TREE_OPERAND (t, 0));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 1));
625 }
626 }
627 break;
628
629 case MINUS_EXPR:
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
633 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
634 return fold_build2_loc (loc, MINUS_EXPR, type,
635 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
636 break;
637
638 case MULT_EXPR:
639 if (TYPE_UNSIGNED (type))
640 break;
641
642 /* Fall through. */
643
644 case RDIV_EXPR:
645 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
646 {
647 tem = TREE_OPERAND (t, 1);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 TREE_OPERAND (t, 0), negate_expr (tem));
651 tem = TREE_OPERAND (t, 0);
652 if (negate_expr_p (tem))
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 negate_expr (tem), TREE_OPERAND (t, 1));
655 }
656 break;
657
658 case TRUNC_DIV_EXPR:
659 case ROUND_DIV_EXPR:
660 case EXACT_DIV_EXPR:
661 /* In general we can't negate A / B, because if A is INT_MIN and
662 B is 1, we may turn this into INT_MIN / -1 which is undefined
663 and actually traps on some architectures. But if overflow is
664 undefined, we can negate, because - (INT_MIN / 1) is an
665 overflow. */
666 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
667 {
668 const char * const warnmsg = G_("assuming signed overflow does not "
669 "occur when negating a division");
670 tem = TREE_OPERAND (t, 1);
671 if (negate_expr_p (tem))
672 {
673 if (INTEGRAL_TYPE_P (type)
674 && (TREE_CODE (tem) != INTEGER_CST
675 || integer_onep (tem)))
676 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 TREE_OPERAND (t, 0), negate_expr (tem));
679 }
680 /* If overflow is undefined then we have to be careful because
681 we ask whether it's ok to associate the negate with the
682 division which is not ok for example for
683 -((a - b) / c) where (-(a - b)) / c may invoke undefined
684 overflow because of negating INT_MIN. So do not use
685 negate_expr_p here but open-code the two important cases. */
686 tem = TREE_OPERAND (t, 0);
687 if ((INTEGRAL_TYPE_P (type)
688 && (TREE_CODE (tem) == NEGATE_EXPR
689 || (TREE_CODE (tem) == INTEGER_CST
690 && may_negate_without_overflow_p (tem))))
691 || !INTEGRAL_TYPE_P (type))
692 return fold_build2_loc (loc, TREE_CODE (t), type,
693 negate_expr (tem), TREE_OPERAND (t, 1));
694 }
695 break;
696
697 case NOP_EXPR:
698 /* Convert -((double)float) into (double)(-float). */
699 if (TREE_CODE (type) == REAL_TYPE)
700 {
701 tem = strip_float_extensions (t);
702 if (tem != t && negate_expr_p (tem))
703 return fold_convert_loc (loc, type, negate_expr (tem));
704 }
705 break;
706
707 case CALL_EXPR:
708 /* Negate -f(x) as f(-x). */
709 if (negate_mathfn_p (builtin_mathfn_code (t))
710 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
711 {
712 tree fndecl, arg;
713
714 fndecl = get_callee_fndecl (t);
715 arg = negate_expr (CALL_EXPR_ARG (t, 0));
716 return build_call_expr_loc (loc, fndecl, 1, arg);
717 }
718 break;
719
720 case RSHIFT_EXPR:
721 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
722 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
723 {
724 tree op1 = TREE_OPERAND (t, 1);
725 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
726 {
727 tree ntype = TYPE_UNSIGNED (type)
728 ? signed_type_for (type)
729 : unsigned_type_for (type);
730 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
731 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
732 return fold_convert_loc (loc, type, temp);
733 }
734 }
735 break;
736
737 default:
738 break;
739 }
740
741 return NULL_TREE;
742 }
743
744 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
745 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
746 return NULL_TREE. */
747
748 static tree
749 negate_expr (tree t)
750 {
751 tree type, tem;
752 location_t loc;
753
754 if (t == NULL_TREE)
755 return NULL_TREE;
756
757 loc = EXPR_LOCATION (t);
758 type = TREE_TYPE (t);
759 STRIP_SIGN_NOPS (t);
760
761 tem = fold_negate_expr (loc, t);
762 if (!tem)
763 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
764 return fold_convert_loc (loc, type, tem);
765 }
766 \f
767 /* Split a tree IN into a constant, literal and variable parts that could be
768 combined with CODE to make IN. "constant" means an expression with
769 TREE_CONSTANT but that isn't an actual constant. CODE must be a
770 commutative arithmetic operation. Store the constant part into *CONP,
771 the literal in *LITP and return the variable part. If a part isn't
772 present, set it to null. If the tree does not decompose in this way,
773 return the entire tree as the variable part and the other parts as null.
774
775 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
776 case, we negate an operand that was subtracted. Except if it is a
777 literal for which we use *MINUS_LITP instead.
778
779 If NEGATE_P is true, we are negating all of IN, again except a literal
780 for which we use *MINUS_LITP instead.
781
782 If IN is itself a literal or constant, return it as appropriate.
783
784 Note that we do not guarantee that any of the three values will be the
785 same type as IN, but they will have the same signedness and mode. */
786
787 static tree
788 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
789 tree *minus_litp, int negate_p)
790 {
791 tree var = 0;
792
793 *conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
796
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
799
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
812 {
813 tree op0 = TREE_OPERAND (in, 0);
814 tree op1 = TREE_OPERAND (in, 1);
815 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
816 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
817
818 /* First see if either of the operands is a literal, then a constant. */
819 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
820 || TREE_CODE (op0) == FIXED_CST)
821 *litp = op0, op0 = 0;
822 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
823 || TREE_CODE (op1) == FIXED_CST)
824 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
825
826 if (op0 != 0 && TREE_CONSTANT (op0))
827 *conp = op0, op0 = 0;
828 else if (op1 != 0 && TREE_CONSTANT (op1))
829 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
830
831 /* If we haven't dealt with either operand, this is not a case we can
832 decompose. Otherwise, VAR is either of the ones remaining, if any. */
833 if (op0 != 0 && op1 != 0)
834 var = in;
835 else if (op0 != 0)
836 var = op0;
837 else
838 var = op1, neg_var_p = neg1_p;
839
840 /* Now do any needed negations. */
841 if (neg_litp_p)
842 *minus_litp = *litp, *litp = 0;
843 if (neg_conp_p)
844 *conp = negate_expr (*conp);
845 if (neg_var_p)
846 var = negate_expr (var);
847 }
848 else if (TREE_CODE (in) == BIT_NOT_EXPR
849 && code == PLUS_EXPR)
850 {
851 /* -X - 1 is folded to ~X, undo that here. */
852 *minus_litp = build_one_cst (TREE_TYPE (in));
853 var = negate_expr (TREE_OPERAND (in, 0));
854 }
855 else if (TREE_CONSTANT (in))
856 *conp = in;
857 else
858 var = in;
859
860 if (negate_p)
861 {
862 if (*litp)
863 *minus_litp = *litp, *litp = 0;
864 else if (*minus_litp)
865 *litp = *minus_litp, *minus_litp = 0;
866 *conp = negate_expr (*conp);
867 var = negate_expr (var);
868 }
869
870 return var;
871 }
872
873 /* Re-associate trees split by the above function. T1 and T2 are
874 either expressions to associate or null. Return the new
875 expression, if any. LOC is the location of the new expression. If
876 we build an operation, do it in TYPE and with CODE. */
877
878 static tree
879 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
880 {
881 if (t1 == 0)
882 return t2;
883 else if (t2 == 0)
884 return t1;
885
886 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
887 try to fold this since we will have infinite recursion. But do
888 deal with any NEGATE_EXPRs. */
889 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
890 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
891 {
892 if (code == PLUS_EXPR)
893 {
894 if (TREE_CODE (t1) == NEGATE_EXPR)
895 return build2_loc (loc, MINUS_EXPR, type,
896 fold_convert_loc (loc, type, t2),
897 fold_convert_loc (loc, type,
898 TREE_OPERAND (t1, 0)));
899 else if (TREE_CODE (t2) == NEGATE_EXPR)
900 return build2_loc (loc, MINUS_EXPR, type,
901 fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type,
903 TREE_OPERAND (t2, 0)));
904 else if (integer_zerop (t2))
905 return fold_convert_loc (loc, type, t1);
906 }
907 else if (code == MINUS_EXPR)
908 {
909 if (integer_zerop (t2))
910 return fold_convert_loc (loc, type, t1);
911 }
912
913 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type, t2));
915 }
916
917 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
918 fold_convert_loc (loc, type, t2));
919 }
920 \f
921 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
922 for use in int_const_binop, size_binop and size_diffop. */
923
924 static bool
925 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
926 {
927 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
928 return false;
929 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
930 return false;
931
932 switch (code)
933 {
934 case LSHIFT_EXPR:
935 case RSHIFT_EXPR:
936 case LROTATE_EXPR:
937 case RROTATE_EXPR:
938 return true;
939
940 default:
941 break;
942 }
943
944 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
945 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
946 && TYPE_MODE (type1) == TYPE_MODE (type2);
947 }
948
949
950 /* Combine two integer constants ARG1 and ARG2 under operation CODE
951 to produce a new constant. Return NULL_TREE if we don't know how
952 to evaluate CODE at compile-time. */
953
954 static tree
955 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
956 int overflowable)
957 {
958 wide_int res;
959 tree t;
960 tree type = TREE_TYPE (arg1);
961 signop sign = TYPE_SIGN (type);
962 bool overflow = false;
963
964 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
965 TYPE_SIGN (TREE_TYPE (parg2)));
966
967 switch (code)
968 {
969 case BIT_IOR_EXPR:
970 res = wi::bit_or (arg1, arg2);
971 break;
972
973 case BIT_XOR_EXPR:
974 res = wi::bit_xor (arg1, arg2);
975 break;
976
977 case BIT_AND_EXPR:
978 res = wi::bit_and (arg1, arg2);
979 break;
980
981 case RSHIFT_EXPR:
982 case LSHIFT_EXPR:
983 if (wi::neg_p (arg2))
984 {
985 arg2 = -arg2;
986 if (code == RSHIFT_EXPR)
987 code = LSHIFT_EXPR;
988 else
989 code = RSHIFT_EXPR;
990 }
991
992 if (code == RSHIFT_EXPR)
993 /* It's unclear from the C standard whether shifts can overflow.
994 The following code ignores overflow; perhaps a C standard
995 interpretation ruling is needed. */
996 res = wi::rshift (arg1, arg2, sign);
997 else
998 res = wi::lshift (arg1, arg2);
999 break;
1000
1001 case RROTATE_EXPR:
1002 case LROTATE_EXPR:
1003 if (wi::neg_p (arg2))
1004 {
1005 arg2 = -arg2;
1006 if (code == RROTATE_EXPR)
1007 code = LROTATE_EXPR;
1008 else
1009 code = RROTATE_EXPR;
1010 }
1011
1012 if (code == RROTATE_EXPR)
1013 res = wi::rrotate (arg1, arg2);
1014 else
1015 res = wi::lrotate (arg1, arg2);
1016 break;
1017
1018 case PLUS_EXPR:
1019 res = wi::add (arg1, arg2, sign, &overflow);
1020 break;
1021
1022 case MINUS_EXPR:
1023 res = wi::sub (arg1, arg2, sign, &overflow);
1024 break;
1025
1026 case MULT_EXPR:
1027 res = wi::mul (arg1, arg2, sign, &overflow);
1028 break;
1029
1030 case MULT_HIGHPART_EXPR:
1031 res = wi::mul_high (arg1, arg2, sign);
1032 break;
1033
1034 case TRUNC_DIV_EXPR:
1035 case EXACT_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1039 break;
1040
1041 case FLOOR_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_floor (arg1, arg2, sign, &overflow);
1045 break;
1046
1047 case CEIL_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1051 break;
1052
1053 case ROUND_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_round (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case TRUNC_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case FLOOR_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case CEIL_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1075 break;
1076
1077 case ROUND_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_round (arg1, arg2, sign, &overflow);
1081 break;
1082
1083 case MIN_EXPR:
1084 res = wi::min (arg1, arg2, sign);
1085 break;
1086
1087 case MAX_EXPR:
1088 res = wi::max (arg1, arg2, sign);
1089 break;
1090
1091 default:
1092 return NULL_TREE;
1093 }
1094
1095 t = force_fit_type (type, res, overflowable,
1096 (((sign == SIGNED || overflowable == -1)
1097 && overflow)
1098 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1099
1100 return t;
1101 }
1102
1103 tree
1104 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1105 {
1106 return int_const_binop_1 (code, arg1, arg2, 1);
1107 }
1108
1109 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1110 constant. We assume ARG1 and ARG2 have the same data type, or at least
1111 are the same kind of constant and the same machine mode. Return zero if
1112 combining the constants is not allowed in the current operating mode. */
1113
1114 static tree
1115 const_binop (enum tree_code code, tree arg1, tree arg2)
1116 {
1117 /* Sanity check for the recursive cases. */
1118 if (!arg1 || !arg2)
1119 return NULL_TREE;
1120
1121 STRIP_NOPS (arg1);
1122 STRIP_NOPS (arg2);
1123
1124 if (TREE_CODE (arg1) == INTEGER_CST)
1125 return int_const_binop (code, arg1, arg2);
1126
1127 if (TREE_CODE (arg1) == REAL_CST)
1128 {
1129 enum machine_mode mode;
1130 REAL_VALUE_TYPE d1;
1131 REAL_VALUE_TYPE d2;
1132 REAL_VALUE_TYPE value;
1133 REAL_VALUE_TYPE result;
1134 bool inexact;
1135 tree t, type;
1136
1137 /* The following codes are handled by real_arithmetic. */
1138 switch (code)
1139 {
1140 case PLUS_EXPR:
1141 case MINUS_EXPR:
1142 case MULT_EXPR:
1143 case RDIV_EXPR:
1144 case MIN_EXPR:
1145 case MAX_EXPR:
1146 break;
1147
1148 default:
1149 return NULL_TREE;
1150 }
1151
1152 d1 = TREE_REAL_CST (arg1);
1153 d2 = TREE_REAL_CST (arg2);
1154
1155 type = TREE_TYPE (arg1);
1156 mode = TYPE_MODE (type);
1157
1158 /* Don't perform operation if we honor signaling NaNs and
1159 either operand is a NaN. */
1160 if (HONOR_SNANS (mode)
1161 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1162 return NULL_TREE;
1163
1164 /* Don't perform operation if it would raise a division
1165 by zero exception. */
1166 if (code == RDIV_EXPR
1167 && REAL_VALUES_EQUAL (d2, dconst0)
1168 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1169 return NULL_TREE;
1170
1171 /* If either operand is a NaN, just return it. Otherwise, set up
1172 for floating-point trap; we return an overflow. */
1173 if (REAL_VALUE_ISNAN (d1))
1174 return arg1;
1175 else if (REAL_VALUE_ISNAN (d2))
1176 return arg2;
1177
1178 inexact = real_arithmetic (&value, code, &d1, &d2);
1179 real_convert (&result, mode, &value);
1180
1181 /* Don't constant fold this floating point operation if
1182 the result has overflowed and flag_trapping_math. */
1183 if (flag_trapping_math
1184 && MODE_HAS_INFINITIES (mode)
1185 && REAL_VALUE_ISINF (result)
1186 && !REAL_VALUE_ISINF (d1)
1187 && !REAL_VALUE_ISINF (d2))
1188 return NULL_TREE;
1189
1190 /* Don't constant fold this floating point operation if the
1191 result may dependent upon the run-time rounding mode and
1192 flag_rounding_math is set, or if GCC's software emulation
1193 is unable to accurately represent the result. */
1194 if ((flag_rounding_math
1195 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1196 && (inexact || !real_identical (&result, &value)))
1197 return NULL_TREE;
1198
1199 t = build_real (type, result);
1200
1201 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1202 return t;
1203 }
1204
1205 if (TREE_CODE (arg1) == FIXED_CST)
1206 {
1207 FIXED_VALUE_TYPE f1;
1208 FIXED_VALUE_TYPE f2;
1209 FIXED_VALUE_TYPE result;
1210 tree t, type;
1211 int sat_p;
1212 bool overflow_p;
1213
1214 /* The following codes are handled by fixed_arithmetic. */
1215 switch (code)
1216 {
1217 case PLUS_EXPR:
1218 case MINUS_EXPR:
1219 case MULT_EXPR:
1220 case TRUNC_DIV_EXPR:
1221 f2 = TREE_FIXED_CST (arg2);
1222 break;
1223
1224 case LSHIFT_EXPR:
1225 case RSHIFT_EXPR:
1226 {
1227 wide_int w2 = arg2;
1228 f2.data.high = w2.elt (1);
1229 f2.data.low = w2.elt (0);
1230 f2.mode = SImode;
1231 }
1232 break;
1233
1234 default:
1235 return NULL_TREE;
1236 }
1237
1238 f1 = TREE_FIXED_CST (arg1);
1239 type = TREE_TYPE (arg1);
1240 sat_p = TYPE_SATURATING (type);
1241 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1242 t = build_fixed (type, result);
1243 /* Propagate overflow flags. */
1244 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1245 TREE_OVERFLOW (t) = 1;
1246 return t;
1247 }
1248
1249 if (TREE_CODE (arg1) == COMPLEX_CST)
1250 {
1251 tree type = TREE_TYPE (arg1);
1252 tree r1 = TREE_REALPART (arg1);
1253 tree i1 = TREE_IMAGPART (arg1);
1254 tree r2 = TREE_REALPART (arg2);
1255 tree i2 = TREE_IMAGPART (arg2);
1256 tree real, imag;
1257
1258 switch (code)
1259 {
1260 case PLUS_EXPR:
1261 case MINUS_EXPR:
1262 real = const_binop (code, r1, r2);
1263 imag = const_binop (code, i1, i2);
1264 break;
1265
1266 case MULT_EXPR:
1267 if (COMPLEX_FLOAT_TYPE_P (type))
1268 return do_mpc_arg2 (arg1, arg2, type,
1269 /* do_nonfinite= */ folding_initializer,
1270 mpc_mul);
1271
1272 real = const_binop (MINUS_EXPR,
1273 const_binop (MULT_EXPR, r1, r2),
1274 const_binop (MULT_EXPR, i1, i2));
1275 imag = const_binop (PLUS_EXPR,
1276 const_binop (MULT_EXPR, r1, i2),
1277 const_binop (MULT_EXPR, i1, r2));
1278 break;
1279
1280 case RDIV_EXPR:
1281 if (COMPLEX_FLOAT_TYPE_P (type))
1282 return do_mpc_arg2 (arg1, arg2, type,
1283 /* do_nonfinite= */ folding_initializer,
1284 mpc_div);
1285 /* Fallthru ... */
1286 case TRUNC_DIV_EXPR:
1287 case CEIL_DIV_EXPR:
1288 case FLOOR_DIV_EXPR:
1289 case ROUND_DIV_EXPR:
1290 if (flag_complex_method == 0)
1291 {
1292 /* Keep this algorithm in sync with
1293 tree-complex.c:expand_complex_div_straight().
1294
1295 Expand complex division to scalars, straightforward algorithm.
1296 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1297 t = br*br + bi*bi
1298 */
1299 tree magsquared
1300 = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r2, r2),
1302 const_binop (MULT_EXPR, i2, i2));
1303 tree t1
1304 = const_binop (PLUS_EXPR,
1305 const_binop (MULT_EXPR, r1, r2),
1306 const_binop (MULT_EXPR, i1, i2));
1307 tree t2
1308 = const_binop (MINUS_EXPR,
1309 const_binop (MULT_EXPR, i1, r2),
1310 const_binop (MULT_EXPR, r1, i2));
1311
1312 real = const_binop (code, t1, magsquared);
1313 imag = const_binop (code, t2, magsquared);
1314 }
1315 else
1316 {
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_wide().
1319
1320 Expand complex division to scalars, modified algorithm to minimize
1321 overflow with wide input ranges. */
1322 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1323 fold_abs_const (r2, TREE_TYPE (type)),
1324 fold_abs_const (i2, TREE_TYPE (type)));
1325
1326 if (integer_nonzerop (compare))
1327 {
1328 /* In the TRUE branch, we compute
1329 ratio = br/bi;
1330 div = (br * ratio) + bi;
1331 tr = (ar * ratio) + ai;
1332 ti = (ai * ratio) - ar;
1333 tr = tr / div;
1334 ti = ti / div; */
1335 tree ratio = const_binop (code, r2, i2);
1336 tree div = const_binop (PLUS_EXPR, i2,
1337 const_binop (MULT_EXPR, r2, ratio));
1338 real = const_binop (MULT_EXPR, r1, ratio);
1339 real = const_binop (PLUS_EXPR, real, i1);
1340 real = const_binop (code, real, div);
1341
1342 imag = const_binop (MULT_EXPR, i1, ratio);
1343 imag = const_binop (MINUS_EXPR, imag, r1);
1344 imag = const_binop (code, imag, div);
1345 }
1346 else
1347 {
1348 /* In the FALSE branch, we compute
1349 ratio = d/c;
1350 divisor = (d * ratio) + c;
1351 tr = (b * ratio) + a;
1352 ti = b - (a * ratio);
1353 tr = tr / div;
1354 ti = ti / div; */
1355 tree ratio = const_binop (code, i2, r2);
1356 tree div = const_binop (PLUS_EXPR, r2,
1357 const_binop (MULT_EXPR, i2, ratio));
1358
1359 real = const_binop (MULT_EXPR, i1, ratio);
1360 real = const_binop (PLUS_EXPR, real, r1);
1361 real = const_binop (code, real, div);
1362
1363 imag = const_binop (MULT_EXPR, r1, ratio);
1364 imag = const_binop (MINUS_EXPR, i1, imag);
1365 imag = const_binop (code, imag, div);
1366 }
1367 }
1368 break;
1369
1370 default:
1371 return NULL_TREE;
1372 }
1373
1374 if (real && imag)
1375 return build_complex (type, real, imag);
1376 }
1377
1378 if (TREE_CODE (arg1) == VECTOR_CST
1379 && TREE_CODE (arg2) == VECTOR_CST)
1380 {
1381 tree type = TREE_TYPE (arg1);
1382 int count = TYPE_VECTOR_SUBPARTS (type), i;
1383 tree *elts = XALLOCAVEC (tree, count);
1384
1385 for (i = 0; i < count; i++)
1386 {
1387 tree elem1 = VECTOR_CST_ELT (arg1, i);
1388 tree elem2 = VECTOR_CST_ELT (arg2, i);
1389
1390 elts[i] = const_binop (code, elem1, elem2);
1391
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if (elts[i] == NULL_TREE)
1395 return NULL_TREE;
1396 }
1397
1398 return build_vector (type, elts);
1399 }
1400
1401 /* Shifts allow a scalar offset for a vector. */
1402 if (TREE_CODE (arg1) == VECTOR_CST
1403 && TREE_CODE (arg2) == INTEGER_CST)
1404 {
1405 tree type = TREE_TYPE (arg1);
1406 int count = TYPE_VECTOR_SUBPARTS (type), i;
1407 tree *elts = XALLOCAVEC (tree, count);
1408
1409 if (code == VEC_LSHIFT_EXPR
1410 || code == VEC_RSHIFT_EXPR)
1411 {
1412 if (!tree_fits_uhwi_p (arg2))
1413 return NULL_TREE;
1414
1415 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1416 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1417 unsigned HOST_WIDE_INT innerc
1418 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1419 if (shiftc >= outerc || (shiftc % innerc) != 0)
1420 return NULL_TREE;
1421 int offset = shiftc / innerc;
1422 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1423 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1424 for !BYTES_BIG_ENDIAN picks first vector element, but
1425 for BYTES_BIG_ENDIAN last element from the vector. */
1426 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1427 offset = -offset;
1428 tree zero = build_zero_cst (TREE_TYPE (type));
1429 for (i = 0; i < count; i++)
1430 {
1431 if (i + offset < 0 || i + offset >= count)
1432 elts[i] = zero;
1433 else
1434 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1435 }
1436 }
1437 else
1438 for (i = 0; i < count; i++)
1439 {
1440 tree elem1 = VECTOR_CST_ELT (arg1, i);
1441
1442 elts[i] = const_binop (code, elem1, arg2);
1443
1444 /* It is possible that const_binop cannot handle the given
1445 code and return NULL_TREE */
1446 if (elts[i] == NULL_TREE)
1447 return NULL_TREE;
1448 }
1449
1450 return build_vector (type, elts);
1451 }
1452 return NULL_TREE;
1453 }
1454
1455 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1456 indicates which particular sizetype to create. */
1457
1458 tree
1459 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1460 {
1461 return build_int_cst (sizetype_tab[(int) kind], number);
1462 }
1463 \f
1464 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1465 is a tree code. The type of the result is taken from the operands.
1466 Both must be equivalent integer types, ala int_binop_types_match_p.
1467 If the operands are constant, so is the result. */
1468
1469 tree
1470 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1471 {
1472 tree type = TREE_TYPE (arg0);
1473
1474 if (arg0 == error_mark_node || arg1 == error_mark_node)
1475 return error_mark_node;
1476
1477 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1478 TREE_TYPE (arg1)));
1479
1480 /* Handle the special case of two integer constants faster. */
1481 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1482 {
1483 /* And some specific cases even faster than that. */
1484 if (code == PLUS_EXPR)
1485 {
1486 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1487 return arg1;
1488 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1489 return arg0;
1490 }
1491 else if (code == MINUS_EXPR)
1492 {
1493 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1494 return arg0;
1495 }
1496 else if (code == MULT_EXPR)
1497 {
1498 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1499 return arg1;
1500 }
1501
1502 /* Handle general case of two integer constants. For sizetype
1503 constant calculations we always want to know about overflow,
1504 even in the unsigned case. */
1505 return int_const_binop_1 (code, arg0, arg1, -1);
1506 }
1507
1508 return fold_build2_loc (loc, code, type, arg0, arg1);
1509 }
1510
1511 /* Given two values, either both of sizetype or both of bitsizetype,
1512 compute the difference between the two values. Return the value
1513 in signed type corresponding to the type of the operands. */
1514
1515 tree
1516 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1517 {
1518 tree type = TREE_TYPE (arg0);
1519 tree ctype;
1520
1521 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1522 TREE_TYPE (arg1)));
1523
1524 /* If the type is already signed, just do the simple thing. */
1525 if (!TYPE_UNSIGNED (type))
1526 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1527
1528 if (type == sizetype)
1529 ctype = ssizetype;
1530 else if (type == bitsizetype)
1531 ctype = sbitsizetype;
1532 else
1533 ctype = signed_type_for (type);
1534
1535 /* If either operand is not a constant, do the conversions to the signed
1536 type and subtract. The hardware will do the right thing with any
1537 overflow in the subtraction. */
1538 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1539 return size_binop_loc (loc, MINUS_EXPR,
1540 fold_convert_loc (loc, ctype, arg0),
1541 fold_convert_loc (loc, ctype, arg1));
1542
1543 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1544 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1545 overflow) and negate (which can't either). Special-case a result
1546 of zero while we're here. */
1547 if (tree_int_cst_equal (arg0, arg1))
1548 return build_int_cst (ctype, 0);
1549 else if (tree_int_cst_lt (arg1, arg0))
1550 return fold_convert_loc (loc, ctype,
1551 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1552 else
1553 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1554 fold_convert_loc (loc, ctype,
1555 size_binop_loc (loc,
1556 MINUS_EXPR,
1557 arg1, arg0)));
1558 }
1559 \f
1560 /* A subroutine of fold_convert_const handling conversions of an
1561 INTEGER_CST to another integer type. */
1562
1563 static tree
1564 fold_convert_const_int_from_int (tree type, const_tree arg1)
1565 {
1566 /* Given an integer constant, make new constant with new type,
1567 appropriately sign-extended or truncated. Use widest_int
1568 so that any extension is done according ARG1's type. */
1569 return force_fit_type (type, wi::to_widest (arg1),
1570 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1571 TREE_OVERFLOW (arg1));
1572 }
1573
1574 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1575 to an integer type. */
1576
1577 static tree
1578 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1579 {
1580 bool overflow = false;
1581 tree t;
1582
1583 /* The following code implements the floating point to integer
1584 conversion rules required by the Java Language Specification,
1585 that IEEE NaNs are mapped to zero and values that overflow
1586 the target precision saturate, i.e. values greater than
1587 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1588 are mapped to INT_MIN. These semantics are allowed by the
1589 C and C++ standards that simply state that the behavior of
1590 FP-to-integer conversion is unspecified upon overflow. */
1591
1592 wide_int val;
1593 REAL_VALUE_TYPE r;
1594 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1595
1596 switch (code)
1597 {
1598 case FIX_TRUNC_EXPR:
1599 real_trunc (&r, VOIDmode, &x);
1600 break;
1601
1602 default:
1603 gcc_unreachable ();
1604 }
1605
1606 /* If R is NaN, return zero and show we have an overflow. */
1607 if (REAL_VALUE_ISNAN (r))
1608 {
1609 overflow = true;
1610 val = wi::zero (TYPE_PRECISION (type));
1611 }
1612
1613 /* See if R is less than the lower bound or greater than the
1614 upper bound. */
1615
1616 if (! overflow)
1617 {
1618 tree lt = TYPE_MIN_VALUE (type);
1619 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1620 if (REAL_VALUES_LESS (r, l))
1621 {
1622 overflow = true;
1623 val = lt;
1624 }
1625 }
1626
1627 if (! overflow)
1628 {
1629 tree ut = TYPE_MAX_VALUE (type);
1630 if (ut)
1631 {
1632 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1633 if (REAL_VALUES_LESS (u, r))
1634 {
1635 overflow = true;
1636 val = ut;
1637 }
1638 }
1639 }
1640
1641 if (! overflow)
1642 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1643
1644 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1645 return t;
1646 }
1647
1648 /* A subroutine of fold_convert_const handling conversions of a
1649 FIXED_CST to an integer type. */
1650
1651 static tree
1652 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1653 {
1654 tree t;
1655 double_int temp, temp_trunc;
1656 unsigned int mode;
1657
1658 /* Right shift FIXED_CST to temp by fbit. */
1659 temp = TREE_FIXED_CST (arg1).data;
1660 mode = TREE_FIXED_CST (arg1).mode;
1661 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1662 {
1663 temp = temp.rshift (GET_MODE_FBIT (mode),
1664 HOST_BITS_PER_DOUBLE_INT,
1665 SIGNED_FIXED_POINT_MODE_P (mode));
1666
1667 /* Left shift temp to temp_trunc by fbit. */
1668 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1669 HOST_BITS_PER_DOUBLE_INT,
1670 SIGNED_FIXED_POINT_MODE_P (mode));
1671 }
1672 else
1673 {
1674 temp = double_int_zero;
1675 temp_trunc = double_int_zero;
1676 }
1677
1678 /* If FIXED_CST is negative, we need to round the value toward 0.
1679 By checking if the fractional bits are not zero to add 1 to temp. */
1680 if (SIGNED_FIXED_POINT_MODE_P (mode)
1681 && temp_trunc.is_negative ()
1682 && TREE_FIXED_CST (arg1).data != temp_trunc)
1683 temp += double_int_one;
1684
1685 /* Given a fixed-point constant, make new constant with new type,
1686 appropriately sign-extended or truncated. */
1687 t = force_fit_type (type, temp, -1,
1688 (temp.is_negative ()
1689 && (TYPE_UNSIGNED (type)
1690 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1691 | TREE_OVERFLOW (arg1));
1692
1693 return t;
1694 }
1695
1696 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1697 to another floating point type. */
1698
1699 static tree
1700 fold_convert_const_real_from_real (tree type, const_tree arg1)
1701 {
1702 REAL_VALUE_TYPE value;
1703 tree t;
1704
1705 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1706 t = build_real (type, value);
1707
1708 /* If converting an infinity or NAN to a representation that doesn't
1709 have one, set the overflow bit so that we can produce some kind of
1710 error message at the appropriate point if necessary. It's not the
1711 most user-friendly message, but it's better than nothing. */
1712 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1713 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1714 TREE_OVERFLOW (t) = 1;
1715 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1716 && !MODE_HAS_NANS (TYPE_MODE (type)))
1717 TREE_OVERFLOW (t) = 1;
1718 /* Regular overflow, conversion produced an infinity in a mode that
1719 can't represent them. */
1720 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1721 && REAL_VALUE_ISINF (value)
1722 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1723 TREE_OVERFLOW (t) = 1;
1724 else
1725 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1726 return t;
1727 }
1728
1729 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1730 to a floating point type. */
1731
1732 static tree
1733 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1734 {
1735 REAL_VALUE_TYPE value;
1736 tree t;
1737
1738 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1739 t = build_real (type, value);
1740
1741 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1742 return t;
1743 }
1744
1745 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1746 to another fixed-point type. */
1747
1748 static tree
1749 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1750 {
1751 FIXED_VALUE_TYPE value;
1752 tree t;
1753 bool overflow_p;
1754
1755 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1756 TYPE_SATURATING (type));
1757 t = build_fixed (type, value);
1758
1759 /* Propagate overflow flags. */
1760 if (overflow_p | TREE_OVERFLOW (arg1))
1761 TREE_OVERFLOW (t) = 1;
1762 return t;
1763 }
1764
1765 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1766 to a fixed-point type. */
1767
1768 static tree
1769 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1770 {
1771 FIXED_VALUE_TYPE value;
1772 tree t;
1773 bool overflow_p;
1774 double_int di;
1775
1776 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1777
1778 di.low = TREE_INT_CST_ELT (arg1, 0);
1779 if (TREE_INT_CST_NUNITS (arg1) == 1)
1780 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1781 else
1782 di.high = TREE_INT_CST_ELT (arg1, 1);
1783
1784 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1785 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1786 TYPE_SATURATING (type));
1787 t = build_fixed (type, value);
1788
1789 /* Propagate overflow flags. */
1790 if (overflow_p | TREE_OVERFLOW (arg1))
1791 TREE_OVERFLOW (t) = 1;
1792 return t;
1793 }
1794
1795 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1796 to a fixed-point type. */
1797
1798 static tree
1799 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1800 {
1801 FIXED_VALUE_TYPE value;
1802 tree t;
1803 bool overflow_p;
1804
1805 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1806 &TREE_REAL_CST (arg1),
1807 TYPE_SATURATING (type));
1808 t = build_fixed (type, value);
1809
1810 /* Propagate overflow flags. */
1811 if (overflow_p | TREE_OVERFLOW (arg1))
1812 TREE_OVERFLOW (t) = 1;
1813 return t;
1814 }
1815
1816 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1817 type TYPE. If no simplification can be done return NULL_TREE. */
1818
1819 static tree
1820 fold_convert_const (enum tree_code code, tree type, tree arg1)
1821 {
1822 if (TREE_TYPE (arg1) == type)
1823 return arg1;
1824
1825 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1826 || TREE_CODE (type) == OFFSET_TYPE)
1827 {
1828 if (TREE_CODE (arg1) == INTEGER_CST)
1829 return fold_convert_const_int_from_int (type, arg1);
1830 else if (TREE_CODE (arg1) == REAL_CST)
1831 return fold_convert_const_int_from_real (code, type, arg1);
1832 else if (TREE_CODE (arg1) == FIXED_CST)
1833 return fold_convert_const_int_from_fixed (type, arg1);
1834 }
1835 else if (TREE_CODE (type) == REAL_TYPE)
1836 {
1837 if (TREE_CODE (arg1) == INTEGER_CST)
1838 return build_real_from_int_cst (type, arg1);
1839 else if (TREE_CODE (arg1) == REAL_CST)
1840 return fold_convert_const_real_from_real (type, arg1);
1841 else if (TREE_CODE (arg1) == FIXED_CST)
1842 return fold_convert_const_real_from_fixed (type, arg1);
1843 }
1844 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1845 {
1846 if (TREE_CODE (arg1) == FIXED_CST)
1847 return fold_convert_const_fixed_from_fixed (type, arg1);
1848 else if (TREE_CODE (arg1) == INTEGER_CST)
1849 return fold_convert_const_fixed_from_int (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_fixed_from_real (type, arg1);
1852 }
1853 return NULL_TREE;
1854 }
1855
1856 /* Construct a vector of zero elements of vector type TYPE. */
1857
1858 static tree
1859 build_zero_vector (tree type)
1860 {
1861 tree t;
1862
1863 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1864 return build_vector_from_val (type, t);
1865 }
1866
1867 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1868
1869 bool
1870 fold_convertible_p (const_tree type, const_tree arg)
1871 {
1872 tree orig = TREE_TYPE (arg);
1873
1874 if (type == orig)
1875 return true;
1876
1877 if (TREE_CODE (arg) == ERROR_MARK
1878 || TREE_CODE (type) == ERROR_MARK
1879 || TREE_CODE (orig) == ERROR_MARK)
1880 return false;
1881
1882 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1883 return true;
1884
1885 switch (TREE_CODE (type))
1886 {
1887 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1888 case POINTER_TYPE: case REFERENCE_TYPE:
1889 case OFFSET_TYPE:
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
1892 return true;
1893 return (TREE_CODE (orig) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1895
1896 case REAL_TYPE:
1897 case FIXED_POINT_TYPE:
1898 case COMPLEX_TYPE:
1899 case VECTOR_TYPE:
1900 case VOID_TYPE:
1901 return TREE_CODE (type) == TREE_CODE (orig);
1902
1903 default:
1904 return false;
1905 }
1906 }
1907
1908 /* Convert expression ARG to type TYPE. Used by the middle-end for
1909 simple conversions in preference to calling the front-end's convert. */
1910
1911 tree
1912 fold_convert_loc (location_t loc, tree type, tree arg)
1913 {
1914 tree orig = TREE_TYPE (arg);
1915 tree tem;
1916
1917 if (type == orig)
1918 return arg;
1919
1920 if (TREE_CODE (arg) == ERROR_MARK
1921 || TREE_CODE (type) == ERROR_MARK
1922 || TREE_CODE (orig) == ERROR_MARK)
1923 return error_mark_node;
1924
1925 switch (TREE_CODE (type))
1926 {
1927 case POINTER_TYPE:
1928 case REFERENCE_TYPE:
1929 /* Handle conversions between pointers to different address spaces. */
1930 if (POINTER_TYPE_P (orig)
1931 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1932 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1933 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1934 /* fall through */
1935
1936 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1937 case OFFSET_TYPE:
1938 if (TREE_CODE (arg) == INTEGER_CST)
1939 {
1940 tem = fold_convert_const (NOP_EXPR, type, arg);
1941 if (tem != NULL_TREE)
1942 return tem;
1943 }
1944 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1945 || TREE_CODE (orig) == OFFSET_TYPE)
1946 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1947 if (TREE_CODE (orig) == COMPLEX_TYPE)
1948 return fold_convert_loc (loc, type,
1949 fold_build1_loc (loc, REALPART_EXPR,
1950 TREE_TYPE (orig), arg));
1951 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1952 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1953 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1954
1955 case REAL_TYPE:
1956 if (TREE_CODE (arg) == INTEGER_CST)
1957 {
1958 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1961 }
1962 else if (TREE_CODE (arg) == REAL_CST)
1963 {
1964 tem = fold_convert_const (NOP_EXPR, type, arg);
1965 if (tem != NULL_TREE)
1966 return tem;
1967 }
1968 else if (TREE_CODE (arg) == FIXED_CST)
1969 {
1970 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1971 if (tem != NULL_TREE)
1972 return tem;
1973 }
1974
1975 switch (TREE_CODE (orig))
1976 {
1977 case INTEGER_TYPE:
1978 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1979 case POINTER_TYPE: case REFERENCE_TYPE:
1980 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1981
1982 case REAL_TYPE:
1983 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984
1985 case FIXED_POINT_TYPE:
1986 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1987
1988 case COMPLEX_TYPE:
1989 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1990 return fold_convert_loc (loc, type, tem);
1991
1992 default:
1993 gcc_unreachable ();
1994 }
1995
1996 case FIXED_POINT_TYPE:
1997 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1998 || TREE_CODE (arg) == REAL_CST)
1999 {
2000 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2001 if (tem != NULL_TREE)
2002 goto fold_convert_exit;
2003 }
2004
2005 switch (TREE_CODE (orig))
2006 {
2007 case FIXED_POINT_TYPE:
2008 case INTEGER_TYPE:
2009 case ENUMERAL_TYPE:
2010 case BOOLEAN_TYPE:
2011 case REAL_TYPE:
2012 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2013
2014 case COMPLEX_TYPE:
2015 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2016 return fold_convert_loc (loc, type, tem);
2017
2018 default:
2019 gcc_unreachable ();
2020 }
2021
2022 case COMPLEX_TYPE:
2023 switch (TREE_CODE (orig))
2024 {
2025 case INTEGER_TYPE:
2026 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2027 case POINTER_TYPE: case REFERENCE_TYPE:
2028 case REAL_TYPE:
2029 case FIXED_POINT_TYPE:
2030 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2031 fold_convert_loc (loc, TREE_TYPE (type), arg),
2032 fold_convert_loc (loc, TREE_TYPE (type),
2033 integer_zero_node));
2034 case COMPLEX_TYPE:
2035 {
2036 tree rpart, ipart;
2037
2038 if (TREE_CODE (arg) == COMPLEX_EXPR)
2039 {
2040 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2041 TREE_OPERAND (arg, 0));
2042 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2043 TREE_OPERAND (arg, 1));
2044 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2045 }
2046
2047 arg = save_expr (arg);
2048 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2049 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2051 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2052 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2053 }
2054
2055 default:
2056 gcc_unreachable ();
2057 }
2058
2059 case VECTOR_TYPE:
2060 if (integer_zerop (arg))
2061 return build_zero_vector (type);
2062 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2063 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2064 || TREE_CODE (orig) == VECTOR_TYPE);
2065 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2066
2067 case VOID_TYPE:
2068 tem = fold_ignored_result (arg);
2069 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2070
2071 default:
2072 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2073 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2074 gcc_unreachable ();
2075 }
2076 fold_convert_exit:
2077 protected_set_expr_location_unshare (tem, loc);
2078 return tem;
2079 }
2080 \f
2081 /* Return false if expr can be assumed not to be an lvalue, true
2082 otherwise. */
2083
2084 static bool
2085 maybe_lvalue_p (const_tree x)
2086 {
2087 /* We only need to wrap lvalue tree codes. */
2088 switch (TREE_CODE (x))
2089 {
2090 case VAR_DECL:
2091 case PARM_DECL:
2092 case RESULT_DECL:
2093 case LABEL_DECL:
2094 case FUNCTION_DECL:
2095 case SSA_NAME:
2096
2097 case COMPONENT_REF:
2098 case MEM_REF:
2099 case INDIRECT_REF:
2100 case ARRAY_REF:
2101 case ARRAY_RANGE_REF:
2102 case BIT_FIELD_REF:
2103 case OBJ_TYPE_REF:
2104
2105 case REALPART_EXPR:
2106 case IMAGPART_EXPR:
2107 case PREINCREMENT_EXPR:
2108 case PREDECREMENT_EXPR:
2109 case SAVE_EXPR:
2110 case TRY_CATCH_EXPR:
2111 case WITH_CLEANUP_EXPR:
2112 case COMPOUND_EXPR:
2113 case MODIFY_EXPR:
2114 case TARGET_EXPR:
2115 case COND_EXPR:
2116 case BIND_EXPR:
2117 break;
2118
2119 default:
2120 /* Assume the worst for front-end tree codes. */
2121 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2122 break;
2123 return false;
2124 }
2125
2126 return true;
2127 }
2128
2129 /* Return an expr equal to X but certainly not valid as an lvalue. */
2130
2131 tree
2132 non_lvalue_loc (location_t loc, tree x)
2133 {
2134 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2135 us. */
2136 if (in_gimple_form)
2137 return x;
2138
2139 if (! maybe_lvalue_p (x))
2140 return x;
2141 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2142 }
2143
2144 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2145 Zero means allow extended lvalues. */
2146
2147 int pedantic_lvalues;
2148
2149 /* When pedantic, return an expr equal to X but certainly not valid as a
2150 pedantic lvalue. Otherwise, return X. */
2151
2152 static tree
2153 pedantic_non_lvalue_loc (location_t loc, tree x)
2154 {
2155 if (pedantic_lvalues)
2156 return non_lvalue_loc (loc, x);
2157
2158 return protected_set_expr_location_unshare (x, loc);
2159 }
2160 \f
2161 /* Given a tree comparison code, return the code that is the logical inverse.
2162 It is generally not safe to do this for floating-point comparisons, except
2163 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2164 ERROR_MARK in this case. */
2165
2166 enum tree_code
2167 invert_tree_comparison (enum tree_code code, bool honor_nans)
2168 {
2169 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2170 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2171 return ERROR_MARK;
2172
2173 switch (code)
2174 {
2175 case EQ_EXPR:
2176 return NE_EXPR;
2177 case NE_EXPR:
2178 return EQ_EXPR;
2179 case GT_EXPR:
2180 return honor_nans ? UNLE_EXPR : LE_EXPR;
2181 case GE_EXPR:
2182 return honor_nans ? UNLT_EXPR : LT_EXPR;
2183 case LT_EXPR:
2184 return honor_nans ? UNGE_EXPR : GE_EXPR;
2185 case LE_EXPR:
2186 return honor_nans ? UNGT_EXPR : GT_EXPR;
2187 case LTGT_EXPR:
2188 return UNEQ_EXPR;
2189 case UNEQ_EXPR:
2190 return LTGT_EXPR;
2191 case UNGT_EXPR:
2192 return LE_EXPR;
2193 case UNGE_EXPR:
2194 return LT_EXPR;
2195 case UNLT_EXPR:
2196 return GE_EXPR;
2197 case UNLE_EXPR:
2198 return GT_EXPR;
2199 case ORDERED_EXPR:
2200 return UNORDERED_EXPR;
2201 case UNORDERED_EXPR:
2202 return ORDERED_EXPR;
2203 default:
2204 gcc_unreachable ();
2205 }
2206 }
2207
2208 /* Similar, but return the comparison that results if the operands are
2209 swapped. This is safe for floating-point. */
2210
2211 enum tree_code
2212 swap_tree_comparison (enum tree_code code)
2213 {
2214 switch (code)
2215 {
2216 case EQ_EXPR:
2217 case NE_EXPR:
2218 case ORDERED_EXPR:
2219 case UNORDERED_EXPR:
2220 case LTGT_EXPR:
2221 case UNEQ_EXPR:
2222 return code;
2223 case GT_EXPR:
2224 return LT_EXPR;
2225 case GE_EXPR:
2226 return LE_EXPR;
2227 case LT_EXPR:
2228 return GT_EXPR;
2229 case LE_EXPR:
2230 return GE_EXPR;
2231 case UNGT_EXPR:
2232 return UNLT_EXPR;
2233 case UNGE_EXPR:
2234 return UNLE_EXPR;
2235 case UNLT_EXPR:
2236 return UNGT_EXPR;
2237 case UNLE_EXPR:
2238 return UNGE_EXPR;
2239 default:
2240 gcc_unreachable ();
2241 }
2242 }
2243
2244
2245 /* Convert a comparison tree code from an enum tree_code representation
2246 into a compcode bit-based encoding. This function is the inverse of
2247 compcode_to_comparison. */
2248
2249 static enum comparison_code
2250 comparison_to_compcode (enum tree_code code)
2251 {
2252 switch (code)
2253 {
2254 case LT_EXPR:
2255 return COMPCODE_LT;
2256 case EQ_EXPR:
2257 return COMPCODE_EQ;
2258 case LE_EXPR:
2259 return COMPCODE_LE;
2260 case GT_EXPR:
2261 return COMPCODE_GT;
2262 case NE_EXPR:
2263 return COMPCODE_NE;
2264 case GE_EXPR:
2265 return COMPCODE_GE;
2266 case ORDERED_EXPR:
2267 return COMPCODE_ORD;
2268 case UNORDERED_EXPR:
2269 return COMPCODE_UNORD;
2270 case UNLT_EXPR:
2271 return COMPCODE_UNLT;
2272 case UNEQ_EXPR:
2273 return COMPCODE_UNEQ;
2274 case UNLE_EXPR:
2275 return COMPCODE_UNLE;
2276 case UNGT_EXPR:
2277 return COMPCODE_UNGT;
2278 case LTGT_EXPR:
2279 return COMPCODE_LTGT;
2280 case UNGE_EXPR:
2281 return COMPCODE_UNGE;
2282 default:
2283 gcc_unreachable ();
2284 }
2285 }
2286
2287 /* Convert a compcode bit-based encoding of a comparison operator back
2288 to GCC's enum tree_code representation. This function is the
2289 inverse of comparison_to_compcode. */
2290
2291 static enum tree_code
2292 compcode_to_comparison (enum comparison_code code)
2293 {
2294 switch (code)
2295 {
2296 case COMPCODE_LT:
2297 return LT_EXPR;
2298 case COMPCODE_EQ:
2299 return EQ_EXPR;
2300 case COMPCODE_LE:
2301 return LE_EXPR;
2302 case COMPCODE_GT:
2303 return GT_EXPR;
2304 case COMPCODE_NE:
2305 return NE_EXPR;
2306 case COMPCODE_GE:
2307 return GE_EXPR;
2308 case COMPCODE_ORD:
2309 return ORDERED_EXPR;
2310 case COMPCODE_UNORD:
2311 return UNORDERED_EXPR;
2312 case COMPCODE_UNLT:
2313 return UNLT_EXPR;
2314 case COMPCODE_UNEQ:
2315 return UNEQ_EXPR;
2316 case COMPCODE_UNLE:
2317 return UNLE_EXPR;
2318 case COMPCODE_UNGT:
2319 return UNGT_EXPR;
2320 case COMPCODE_LTGT:
2321 return LTGT_EXPR;
2322 case COMPCODE_UNGE:
2323 return UNGE_EXPR;
2324 default:
2325 gcc_unreachable ();
2326 }
2327 }
2328
2329 /* Return a tree for the comparison which is the combination of
2330 doing the AND or OR (depending on CODE) of the two operations LCODE
2331 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2332 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2333 if this makes the transformation invalid. */
2334
2335 tree
2336 combine_comparisons (location_t loc,
2337 enum tree_code code, enum tree_code lcode,
2338 enum tree_code rcode, tree truth_type,
2339 tree ll_arg, tree lr_arg)
2340 {
2341 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2342 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2343 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2344 int compcode;
2345
2346 switch (code)
2347 {
2348 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2349 compcode = lcompcode & rcompcode;
2350 break;
2351
2352 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2353 compcode = lcompcode | rcompcode;
2354 break;
2355
2356 default:
2357 return NULL_TREE;
2358 }
2359
2360 if (!honor_nans)
2361 {
2362 /* Eliminate unordered comparisons, as well as LTGT and ORD
2363 which are not used unless the mode has NaNs. */
2364 compcode &= ~COMPCODE_UNORD;
2365 if (compcode == COMPCODE_LTGT)
2366 compcode = COMPCODE_NE;
2367 else if (compcode == COMPCODE_ORD)
2368 compcode = COMPCODE_TRUE;
2369 }
2370 else if (flag_trapping_math)
2371 {
2372 /* Check that the original operation and the optimized ones will trap
2373 under the same condition. */
2374 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2375 && (lcompcode != COMPCODE_EQ)
2376 && (lcompcode != COMPCODE_ORD);
2377 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2378 && (rcompcode != COMPCODE_EQ)
2379 && (rcompcode != COMPCODE_ORD);
2380 bool trap = (compcode & COMPCODE_UNORD) == 0
2381 && (compcode != COMPCODE_EQ)
2382 && (compcode != COMPCODE_ORD);
2383
2384 /* In a short-circuited boolean expression the LHS might be
2385 such that the RHS, if evaluated, will never trap. For
2386 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2387 if neither x nor y is NaN. (This is a mixed blessing: for
2388 example, the expression above will never trap, hence
2389 optimizing it to x < y would be invalid). */
2390 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2391 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2392 rtrap = false;
2393
2394 /* If the comparison was short-circuited, and only the RHS
2395 trapped, we may now generate a spurious trap. */
2396 if (rtrap && !ltrap
2397 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2398 return NULL_TREE;
2399
2400 /* If we changed the conditions that cause a trap, we lose. */
2401 if ((ltrap || rtrap) != trap)
2402 return NULL_TREE;
2403 }
2404
2405 if (compcode == COMPCODE_TRUE)
2406 return constant_boolean_node (true, truth_type);
2407 else if (compcode == COMPCODE_FALSE)
2408 return constant_boolean_node (false, truth_type);
2409 else
2410 {
2411 enum tree_code tcode;
2412
2413 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2414 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2415 }
2416 }
2417 \f
2418 /* Return nonzero if two operands (typically of the same tree node)
2419 are necessarily equal. If either argument has side-effects this
2420 function returns zero. FLAGS modifies behavior as follows:
2421
2422 If OEP_ONLY_CONST is set, only return nonzero for constants.
2423 This function tests whether the operands are indistinguishable;
2424 it does not test whether they are equal using C's == operation.
2425 The distinction is important for IEEE floating point, because
2426 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2427 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2428
2429 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2430 even though it may hold multiple values during a function.
2431 This is because a GCC tree node guarantees that nothing else is
2432 executed between the evaluation of its "operands" (which may often
2433 be evaluated in arbitrary order). Hence if the operands themselves
2434 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2435 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2436 unset means assuming isochronic (or instantaneous) tree equivalence.
2437 Unless comparing arbitrary expression trees, such as from different
2438 statements, this flag can usually be left unset.
2439
2440 If OEP_PURE_SAME is set, then pure functions with identical arguments
2441 are considered the same. It is used when the caller has other ways
2442 to ensure that global memory is unchanged in between. */
2443
2444 int
2445 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2446 {
2447 /* If either is ERROR_MARK, they aren't equal. */
2448 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2449 || TREE_TYPE (arg0) == error_mark_node
2450 || TREE_TYPE (arg1) == error_mark_node)
2451 return 0;
2452
2453 /* Similar, if either does not have a type (like a released SSA name),
2454 they aren't equal. */
2455 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2456 return 0;
2457
2458 /* Check equality of integer constants before bailing out due to
2459 precision differences. */
2460 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2461 return tree_int_cst_equal (arg0, arg1);
2462
2463 /* If both types don't have the same signedness, then we can't consider
2464 them equal. We must check this before the STRIP_NOPS calls
2465 because they may change the signedness of the arguments. As pointers
2466 strictly don't have a signedness, require either two pointers or
2467 two non-pointers as well. */
2468 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2469 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2470 return 0;
2471
2472 /* We cannot consider pointers to different address space equal. */
2473 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2474 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2475 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2476 return 0;
2477
2478 /* If both types don't have the same precision, then it is not safe
2479 to strip NOPs. */
2480 if (element_precision (TREE_TYPE (arg0))
2481 != element_precision (TREE_TYPE (arg1)))
2482 return 0;
2483
2484 STRIP_NOPS (arg0);
2485 STRIP_NOPS (arg1);
2486
2487 /* In case both args are comparisons but with different comparison
2488 code, try to swap the comparison operands of one arg to produce
2489 a match and compare that variant. */
2490 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2491 && COMPARISON_CLASS_P (arg0)
2492 && COMPARISON_CLASS_P (arg1))
2493 {
2494 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2495
2496 if (TREE_CODE (arg0) == swap_code)
2497 return operand_equal_p (TREE_OPERAND (arg0, 0),
2498 TREE_OPERAND (arg1, 1), flags)
2499 && operand_equal_p (TREE_OPERAND (arg0, 1),
2500 TREE_OPERAND (arg1, 0), flags);
2501 }
2502
2503 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2504 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2505 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2506 return 0;
2507
2508 /* This is needed for conversions and for COMPONENT_REF.
2509 Might as well play it safe and always test this. */
2510 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2511 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2512 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2513 return 0;
2514
2515 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2516 We don't care about side effects in that case because the SAVE_EXPR
2517 takes care of that for us. In all other cases, two expressions are
2518 equal if they have no side effects. If we have two identical
2519 expressions with side effects that should be treated the same due
2520 to the only side effects being identical SAVE_EXPR's, that will
2521 be detected in the recursive calls below.
2522 If we are taking an invariant address of two identical objects
2523 they are necessarily equal as well. */
2524 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2525 && (TREE_CODE (arg0) == SAVE_EXPR
2526 || (flags & OEP_CONSTANT_ADDRESS_OF)
2527 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2528 return 1;
2529
2530 /* Next handle constant cases, those for which we can return 1 even
2531 if ONLY_CONST is set. */
2532 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2533 switch (TREE_CODE (arg0))
2534 {
2535 case INTEGER_CST:
2536 return tree_int_cst_equal (arg0, arg1);
2537
2538 case FIXED_CST:
2539 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2540 TREE_FIXED_CST (arg1));
2541
2542 case REAL_CST:
2543 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2544 TREE_REAL_CST (arg1)))
2545 return 1;
2546
2547
2548 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2549 {
2550 /* If we do not distinguish between signed and unsigned zero,
2551 consider them equal. */
2552 if (real_zerop (arg0) && real_zerop (arg1))
2553 return 1;
2554 }
2555 return 0;
2556
2557 case VECTOR_CST:
2558 {
2559 unsigned i;
2560
2561 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2562 return 0;
2563
2564 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2565 {
2566 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2567 VECTOR_CST_ELT (arg1, i), flags))
2568 return 0;
2569 }
2570 return 1;
2571 }
2572
2573 case COMPLEX_CST:
2574 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2575 flags)
2576 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2577 flags));
2578
2579 case STRING_CST:
2580 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2581 && ! memcmp (TREE_STRING_POINTER (arg0),
2582 TREE_STRING_POINTER (arg1),
2583 TREE_STRING_LENGTH (arg0)));
2584
2585 case ADDR_EXPR:
2586 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2587 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2588 ? OEP_CONSTANT_ADDRESS_OF : 0);
2589 default:
2590 break;
2591 }
2592
2593 if (flags & OEP_ONLY_CONST)
2594 return 0;
2595
2596 /* Define macros to test an operand from arg0 and arg1 for equality and a
2597 variant that allows null and views null as being different from any
2598 non-null value. In the latter case, if either is null, the both
2599 must be; otherwise, do the normal comparison. */
2600 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2601 TREE_OPERAND (arg1, N), flags)
2602
2603 #define OP_SAME_WITH_NULL(N) \
2604 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2605 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2606
2607 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2608 {
2609 case tcc_unary:
2610 /* Two conversions are equal only if signedness and modes match. */
2611 switch (TREE_CODE (arg0))
2612 {
2613 CASE_CONVERT:
2614 case FIX_TRUNC_EXPR:
2615 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2616 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2617 return 0;
2618 break;
2619 default:
2620 break;
2621 }
2622
2623 return OP_SAME (0);
2624
2625
2626 case tcc_comparison:
2627 case tcc_binary:
2628 if (OP_SAME (0) && OP_SAME (1))
2629 return 1;
2630
2631 /* For commutative ops, allow the other order. */
2632 return (commutative_tree_code (TREE_CODE (arg0))
2633 && operand_equal_p (TREE_OPERAND (arg0, 0),
2634 TREE_OPERAND (arg1, 1), flags)
2635 && operand_equal_p (TREE_OPERAND (arg0, 1),
2636 TREE_OPERAND (arg1, 0), flags));
2637
2638 case tcc_reference:
2639 /* If either of the pointer (or reference) expressions we are
2640 dereferencing contain a side effect, these cannot be equal,
2641 but their addresses can be. */
2642 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2643 && (TREE_SIDE_EFFECTS (arg0)
2644 || TREE_SIDE_EFFECTS (arg1)))
2645 return 0;
2646
2647 switch (TREE_CODE (arg0))
2648 {
2649 case INDIRECT_REF:
2650 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2651 return OP_SAME (0);
2652
2653 case REALPART_EXPR:
2654 case IMAGPART_EXPR:
2655 return OP_SAME (0);
2656
2657 case TARGET_MEM_REF:
2658 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2659 /* Require equal extra operands and then fall through to MEM_REF
2660 handling of the two common operands. */
2661 if (!OP_SAME_WITH_NULL (2)
2662 || !OP_SAME_WITH_NULL (3)
2663 || !OP_SAME_WITH_NULL (4))
2664 return 0;
2665 /* Fallthru. */
2666 case MEM_REF:
2667 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2668 /* Require equal access sizes, and similar pointer types.
2669 We can have incomplete types for array references of
2670 variable-sized arrays from the Fortran frontend
2671 though. Also verify the types are compatible. */
2672 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2673 || (TYPE_SIZE (TREE_TYPE (arg0))
2674 && TYPE_SIZE (TREE_TYPE (arg1))
2675 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2676 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2677 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2678 && alias_ptr_types_compatible_p
2679 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2680 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2681 && OP_SAME (0) && OP_SAME (1));
2682
2683 case ARRAY_REF:
2684 case ARRAY_RANGE_REF:
2685 /* Operands 2 and 3 may be null.
2686 Compare the array index by value if it is constant first as we
2687 may have different types but same value here. */
2688 if (!OP_SAME (0))
2689 return 0;
2690 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2691 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2692 TREE_OPERAND (arg1, 1))
2693 || OP_SAME (1))
2694 && OP_SAME_WITH_NULL (2)
2695 && OP_SAME_WITH_NULL (3));
2696
2697 case COMPONENT_REF:
2698 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2699 may be NULL when we're called to compare MEM_EXPRs. */
2700 if (!OP_SAME_WITH_NULL (0)
2701 || !OP_SAME (1))
2702 return 0;
2703 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2704 return OP_SAME_WITH_NULL (2);
2705
2706 case BIT_FIELD_REF:
2707 if (!OP_SAME (0))
2708 return 0;
2709 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2710 return OP_SAME (1) && OP_SAME (2);
2711
2712 default:
2713 return 0;
2714 }
2715
2716 case tcc_expression:
2717 switch (TREE_CODE (arg0))
2718 {
2719 case ADDR_EXPR:
2720 case TRUTH_NOT_EXPR:
2721 return OP_SAME (0);
2722
2723 case TRUTH_ANDIF_EXPR:
2724 case TRUTH_ORIF_EXPR:
2725 return OP_SAME (0) && OP_SAME (1);
2726
2727 case FMA_EXPR:
2728 case WIDEN_MULT_PLUS_EXPR:
2729 case WIDEN_MULT_MINUS_EXPR:
2730 if (!OP_SAME (2))
2731 return 0;
2732 /* The multiplcation operands are commutative. */
2733 /* FALLTHRU */
2734
2735 case TRUTH_AND_EXPR:
2736 case TRUTH_OR_EXPR:
2737 case TRUTH_XOR_EXPR:
2738 if (OP_SAME (0) && OP_SAME (1))
2739 return 1;
2740
2741 /* Otherwise take into account this is a commutative operation. */
2742 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2743 TREE_OPERAND (arg1, 1), flags)
2744 && operand_equal_p (TREE_OPERAND (arg0, 1),
2745 TREE_OPERAND (arg1, 0), flags));
2746
2747 case COND_EXPR:
2748 case VEC_COND_EXPR:
2749 case DOT_PROD_EXPR:
2750 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2751
2752 default:
2753 return 0;
2754 }
2755
2756 case tcc_vl_exp:
2757 switch (TREE_CODE (arg0))
2758 {
2759 case CALL_EXPR:
2760 /* If the CALL_EXPRs call different functions, then they
2761 clearly can not be equal. */
2762 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2763 flags))
2764 return 0;
2765
2766 {
2767 unsigned int cef = call_expr_flags (arg0);
2768 if (flags & OEP_PURE_SAME)
2769 cef &= ECF_CONST | ECF_PURE;
2770 else
2771 cef &= ECF_CONST;
2772 if (!cef)
2773 return 0;
2774 }
2775
2776 /* Now see if all the arguments are the same. */
2777 {
2778 const_call_expr_arg_iterator iter0, iter1;
2779 const_tree a0, a1;
2780 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2781 a1 = first_const_call_expr_arg (arg1, &iter1);
2782 a0 && a1;
2783 a0 = next_const_call_expr_arg (&iter0),
2784 a1 = next_const_call_expr_arg (&iter1))
2785 if (! operand_equal_p (a0, a1, flags))
2786 return 0;
2787
2788 /* If we get here and both argument lists are exhausted
2789 then the CALL_EXPRs are equal. */
2790 return ! (a0 || a1);
2791 }
2792 default:
2793 return 0;
2794 }
2795
2796 case tcc_declaration:
2797 /* Consider __builtin_sqrt equal to sqrt. */
2798 return (TREE_CODE (arg0) == FUNCTION_DECL
2799 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2800 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2801 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2802
2803 default:
2804 return 0;
2805 }
2806
2807 #undef OP_SAME
2808 #undef OP_SAME_WITH_NULL
2809 }
2810 \f
2811 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2812 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2813
2814 When in doubt, return 0. */
2815
2816 static int
2817 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2818 {
2819 int unsignedp1, unsignedpo;
2820 tree primarg0, primarg1, primother;
2821 unsigned int correct_width;
2822
2823 if (operand_equal_p (arg0, arg1, 0))
2824 return 1;
2825
2826 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2827 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2828 return 0;
2829
2830 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2831 and see if the inner values are the same. This removes any
2832 signedness comparison, which doesn't matter here. */
2833 primarg0 = arg0, primarg1 = arg1;
2834 STRIP_NOPS (primarg0);
2835 STRIP_NOPS (primarg1);
2836 if (operand_equal_p (primarg0, primarg1, 0))
2837 return 1;
2838
2839 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2840 actual comparison operand, ARG0.
2841
2842 First throw away any conversions to wider types
2843 already present in the operands. */
2844
2845 primarg1 = get_narrower (arg1, &unsignedp1);
2846 primother = get_narrower (other, &unsignedpo);
2847
2848 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2849 if (unsignedp1 == unsignedpo
2850 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2851 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2852 {
2853 tree type = TREE_TYPE (arg0);
2854
2855 /* Make sure shorter operand is extended the right way
2856 to match the longer operand. */
2857 primarg1 = fold_convert (signed_or_unsigned_type_for
2858 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2859
2860 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2861 return 1;
2862 }
2863
2864 return 0;
2865 }
2866 \f
2867 /* See if ARG is an expression that is either a comparison or is performing
2868 arithmetic on comparisons. The comparisons must only be comparing
2869 two different values, which will be stored in *CVAL1 and *CVAL2; if
2870 they are nonzero it means that some operands have already been found.
2871 No variables may be used anywhere else in the expression except in the
2872 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2873 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2874
2875 If this is true, return 1. Otherwise, return zero. */
2876
2877 static int
2878 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2879 {
2880 enum tree_code code = TREE_CODE (arg);
2881 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2882
2883 /* We can handle some of the tcc_expression cases here. */
2884 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2885 tclass = tcc_unary;
2886 else if (tclass == tcc_expression
2887 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2888 || code == COMPOUND_EXPR))
2889 tclass = tcc_binary;
2890
2891 else if (tclass == tcc_expression && code == SAVE_EXPR
2892 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2893 {
2894 /* If we've already found a CVAL1 or CVAL2, this expression is
2895 two complex to handle. */
2896 if (*cval1 || *cval2)
2897 return 0;
2898
2899 tclass = tcc_unary;
2900 *save_p = 1;
2901 }
2902
2903 switch (tclass)
2904 {
2905 case tcc_unary:
2906 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2907
2908 case tcc_binary:
2909 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2910 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2911 cval1, cval2, save_p));
2912
2913 case tcc_constant:
2914 return 1;
2915
2916 case tcc_expression:
2917 if (code == COND_EXPR)
2918 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2919 cval1, cval2, save_p)
2920 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2921 cval1, cval2, save_p)
2922 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2923 cval1, cval2, save_p));
2924 return 0;
2925
2926 case tcc_comparison:
2927 /* First see if we can handle the first operand, then the second. For
2928 the second operand, we know *CVAL1 can't be zero. It must be that
2929 one side of the comparison is each of the values; test for the
2930 case where this isn't true by failing if the two operands
2931 are the same. */
2932
2933 if (operand_equal_p (TREE_OPERAND (arg, 0),
2934 TREE_OPERAND (arg, 1), 0))
2935 return 0;
2936
2937 if (*cval1 == 0)
2938 *cval1 = TREE_OPERAND (arg, 0);
2939 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2940 ;
2941 else if (*cval2 == 0)
2942 *cval2 = TREE_OPERAND (arg, 0);
2943 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2944 ;
2945 else
2946 return 0;
2947
2948 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2949 ;
2950 else if (*cval2 == 0)
2951 *cval2 = TREE_OPERAND (arg, 1);
2952 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2953 ;
2954 else
2955 return 0;
2956
2957 return 1;
2958
2959 default:
2960 return 0;
2961 }
2962 }
2963 \f
2964 /* ARG is a tree that is known to contain just arithmetic operations and
2965 comparisons. Evaluate the operations in the tree substituting NEW0 for
2966 any occurrence of OLD0 as an operand of a comparison and likewise for
2967 NEW1 and OLD1. */
2968
2969 static tree
2970 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2971 tree old1, tree new1)
2972 {
2973 tree type = TREE_TYPE (arg);
2974 enum tree_code code = TREE_CODE (arg);
2975 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2976
2977 /* We can handle some of the tcc_expression cases here. */
2978 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2979 tclass = tcc_unary;
2980 else if (tclass == tcc_expression
2981 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2982 tclass = tcc_binary;
2983
2984 switch (tclass)
2985 {
2986 case tcc_unary:
2987 return fold_build1_loc (loc, code, type,
2988 eval_subst (loc, TREE_OPERAND (arg, 0),
2989 old0, new0, old1, new1));
2990
2991 case tcc_binary:
2992 return fold_build2_loc (loc, code, type,
2993 eval_subst (loc, TREE_OPERAND (arg, 0),
2994 old0, new0, old1, new1),
2995 eval_subst (loc, TREE_OPERAND (arg, 1),
2996 old0, new0, old1, new1));
2997
2998 case tcc_expression:
2999 switch (code)
3000 {
3001 case SAVE_EXPR:
3002 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3003 old1, new1);
3004
3005 case COMPOUND_EXPR:
3006 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3007 old1, new1);
3008
3009 case COND_EXPR:
3010 return fold_build3_loc (loc, code, type,
3011 eval_subst (loc, TREE_OPERAND (arg, 0),
3012 old0, new0, old1, new1),
3013 eval_subst (loc, TREE_OPERAND (arg, 1),
3014 old0, new0, old1, new1),
3015 eval_subst (loc, TREE_OPERAND (arg, 2),
3016 old0, new0, old1, new1));
3017 default:
3018 break;
3019 }
3020 /* Fall through - ??? */
3021
3022 case tcc_comparison:
3023 {
3024 tree arg0 = TREE_OPERAND (arg, 0);
3025 tree arg1 = TREE_OPERAND (arg, 1);
3026
3027 /* We need to check both for exact equality and tree equality. The
3028 former will be true if the operand has a side-effect. In that
3029 case, we know the operand occurred exactly once. */
3030
3031 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3032 arg0 = new0;
3033 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3034 arg0 = new1;
3035
3036 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3037 arg1 = new0;
3038 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3039 arg1 = new1;
3040
3041 return fold_build2_loc (loc, code, type, arg0, arg1);
3042 }
3043
3044 default:
3045 return arg;
3046 }
3047 }
3048 \f
3049 /* Return a tree for the case when the result of an expression is RESULT
3050 converted to TYPE and OMITTED was previously an operand of the expression
3051 but is now not needed (e.g., we folded OMITTED * 0).
3052
3053 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3054 the conversion of RESULT to TYPE. */
3055
3056 tree
3057 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3058 {
3059 tree t = fold_convert_loc (loc, type, result);
3060
3061 /* If the resulting operand is an empty statement, just return the omitted
3062 statement casted to void. */
3063 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3064 return build1_loc (loc, NOP_EXPR, void_type_node,
3065 fold_ignored_result (omitted));
3066
3067 if (TREE_SIDE_EFFECTS (omitted))
3068 return build2_loc (loc, COMPOUND_EXPR, type,
3069 fold_ignored_result (omitted), t);
3070
3071 return non_lvalue_loc (loc, t);
3072 }
3073
3074 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3075
3076 static tree
3077 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3078 tree omitted)
3079 {
3080 tree t = fold_convert_loc (loc, type, result);
3081
3082 /* If the resulting operand is an empty statement, just return the omitted
3083 statement casted to void. */
3084 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3085 return build1_loc (loc, NOP_EXPR, void_type_node,
3086 fold_ignored_result (omitted));
3087
3088 if (TREE_SIDE_EFFECTS (omitted))
3089 return build2_loc (loc, COMPOUND_EXPR, type,
3090 fold_ignored_result (omitted), t);
3091
3092 return pedantic_non_lvalue_loc (loc, t);
3093 }
3094
3095 /* Return a tree for the case when the result of an expression is RESULT
3096 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3097 of the expression but are now not needed.
3098
3099 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3100 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3101 evaluated before OMITTED2. Otherwise, if neither has side effects,
3102 just do the conversion of RESULT to TYPE. */
3103
3104 tree
3105 omit_two_operands_loc (location_t loc, tree type, tree result,
3106 tree omitted1, tree omitted2)
3107 {
3108 tree t = fold_convert_loc (loc, type, result);
3109
3110 if (TREE_SIDE_EFFECTS (omitted2))
3111 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3112 if (TREE_SIDE_EFFECTS (omitted1))
3113 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3114
3115 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3116 }
3117
3118 \f
3119 /* Return a simplified tree node for the truth-negation of ARG. This
3120 never alters ARG itself. We assume that ARG is an operation that
3121 returns a truth value (0 or 1).
3122
3123 FIXME: one would think we would fold the result, but it causes
3124 problems with the dominator optimizer. */
3125
3126 static tree
3127 fold_truth_not_expr (location_t loc, tree arg)
3128 {
3129 tree type = TREE_TYPE (arg);
3130 enum tree_code code = TREE_CODE (arg);
3131 location_t loc1, loc2;
3132
3133 /* If this is a comparison, we can simply invert it, except for
3134 floating-point non-equality comparisons, in which case we just
3135 enclose a TRUTH_NOT_EXPR around what we have. */
3136
3137 if (TREE_CODE_CLASS (code) == tcc_comparison)
3138 {
3139 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3140 if (FLOAT_TYPE_P (op_type)
3141 && flag_trapping_math
3142 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3143 && code != NE_EXPR && code != EQ_EXPR)
3144 return NULL_TREE;
3145
3146 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3147 if (code == ERROR_MARK)
3148 return NULL_TREE;
3149
3150 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3151 TREE_OPERAND (arg, 1));
3152 }
3153
3154 switch (code)
3155 {
3156 case INTEGER_CST:
3157 return constant_boolean_node (integer_zerop (arg), type);
3158
3159 case TRUTH_AND_EXPR:
3160 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3161 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3162 return build2_loc (loc, TRUTH_OR_EXPR, type,
3163 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3164 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3165
3166 case TRUTH_OR_EXPR:
3167 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3168 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3169 return build2_loc (loc, TRUTH_AND_EXPR, type,
3170 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3171 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3172
3173 case TRUTH_XOR_EXPR:
3174 /* Here we can invert either operand. We invert the first operand
3175 unless the second operand is a TRUTH_NOT_EXPR in which case our
3176 result is the XOR of the first operand with the inside of the
3177 negation of the second operand. */
3178
3179 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3180 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3181 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3182 else
3183 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3184 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3185 TREE_OPERAND (arg, 1));
3186
3187 case TRUTH_ANDIF_EXPR:
3188 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3189 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3190 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3191 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3192 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3193
3194 case TRUTH_ORIF_EXPR:
3195 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3196 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3197 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3198 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3199 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3200
3201 case TRUTH_NOT_EXPR:
3202 return TREE_OPERAND (arg, 0);
3203
3204 case COND_EXPR:
3205 {
3206 tree arg1 = TREE_OPERAND (arg, 1);
3207 tree arg2 = TREE_OPERAND (arg, 2);
3208
3209 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3210 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3211
3212 /* A COND_EXPR may have a throw as one operand, which
3213 then has void type. Just leave void operands
3214 as they are. */
3215 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3216 VOID_TYPE_P (TREE_TYPE (arg1))
3217 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3218 VOID_TYPE_P (TREE_TYPE (arg2))
3219 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3220 }
3221
3222 case COMPOUND_EXPR:
3223 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3224 return build2_loc (loc, COMPOUND_EXPR, type,
3225 TREE_OPERAND (arg, 0),
3226 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3227
3228 case NON_LVALUE_EXPR:
3229 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3230 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3231
3232 CASE_CONVERT:
3233 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3234 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3235
3236 /* ... fall through ... */
3237
3238 case FLOAT_EXPR:
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3240 return build1_loc (loc, TREE_CODE (arg), type,
3241 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3242
3243 case BIT_AND_EXPR:
3244 if (!integer_onep (TREE_OPERAND (arg, 1)))
3245 return NULL_TREE;
3246 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3247
3248 case SAVE_EXPR:
3249 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3250
3251 case CLEANUP_POINT_EXPR:
3252 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3253 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3254 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3255
3256 default:
3257 return NULL_TREE;
3258 }
3259 }
3260
3261 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3262 assume that ARG is an operation that returns a truth value (0 or 1
3263 for scalars, 0 or -1 for vectors). Return the folded expression if
3264 folding is successful. Otherwise, return NULL_TREE. */
3265
3266 static tree
3267 fold_invert_truthvalue (location_t loc, tree arg)
3268 {
3269 tree type = TREE_TYPE (arg);
3270 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3271 ? BIT_NOT_EXPR
3272 : TRUTH_NOT_EXPR,
3273 type, arg);
3274 }
3275
3276 /* Return a simplified tree node for the truth-negation of ARG. This
3277 never alters ARG itself. We assume that ARG is an operation that
3278 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3279
3280 tree
3281 invert_truthvalue_loc (location_t loc, tree arg)
3282 {
3283 if (TREE_CODE (arg) == ERROR_MARK)
3284 return arg;
3285
3286 tree type = TREE_TYPE (arg);
3287 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3288 ? BIT_NOT_EXPR
3289 : TRUTH_NOT_EXPR,
3290 type, arg);
3291 }
3292
3293 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3294 operands are another bit-wise operation with a common input. If so,
3295 distribute the bit operations to save an operation and possibly two if
3296 constants are involved. For example, convert
3297 (A | B) & (A | C) into A | (B & C)
3298 Further simplification will occur if B and C are constants.
3299
3300 If this optimization cannot be done, 0 will be returned. */
3301
3302 static tree
3303 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3304 tree arg0, tree arg1)
3305 {
3306 tree common;
3307 tree left, right;
3308
3309 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3310 || TREE_CODE (arg0) == code
3311 || (TREE_CODE (arg0) != BIT_AND_EXPR
3312 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3313 return 0;
3314
3315 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3316 {
3317 common = TREE_OPERAND (arg0, 0);
3318 left = TREE_OPERAND (arg0, 1);
3319 right = TREE_OPERAND (arg1, 1);
3320 }
3321 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3322 {
3323 common = TREE_OPERAND (arg0, 0);
3324 left = TREE_OPERAND (arg0, 1);
3325 right = TREE_OPERAND (arg1, 0);
3326 }
3327 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3328 {
3329 common = TREE_OPERAND (arg0, 1);
3330 left = TREE_OPERAND (arg0, 0);
3331 right = TREE_OPERAND (arg1, 1);
3332 }
3333 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3334 {
3335 common = TREE_OPERAND (arg0, 1);
3336 left = TREE_OPERAND (arg0, 0);
3337 right = TREE_OPERAND (arg1, 0);
3338 }
3339 else
3340 return 0;
3341
3342 common = fold_convert_loc (loc, type, common);
3343 left = fold_convert_loc (loc, type, left);
3344 right = fold_convert_loc (loc, type, right);
3345 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3346 fold_build2_loc (loc, code, type, left, right));
3347 }
3348
3349 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3350 with code CODE. This optimization is unsafe. */
3351 static tree
3352 distribute_real_division (location_t loc, enum tree_code code, tree type,
3353 tree arg0, tree arg1)
3354 {
3355 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3356 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3357
3358 /* (A / C) +- (B / C) -> (A +- B) / C. */
3359 if (mul0 == mul1
3360 && operand_equal_p (TREE_OPERAND (arg0, 1),
3361 TREE_OPERAND (arg1, 1), 0))
3362 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3363 fold_build2_loc (loc, code, type,
3364 TREE_OPERAND (arg0, 0),
3365 TREE_OPERAND (arg1, 0)),
3366 TREE_OPERAND (arg0, 1));
3367
3368 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3369 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3370 TREE_OPERAND (arg1, 0), 0)
3371 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3372 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3373 {
3374 REAL_VALUE_TYPE r0, r1;
3375 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3376 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3377 if (!mul0)
3378 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3379 if (!mul1)
3380 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3381 real_arithmetic (&r0, code, &r0, &r1);
3382 return fold_build2_loc (loc, MULT_EXPR, type,
3383 TREE_OPERAND (arg0, 0),
3384 build_real (type, r0));
3385 }
3386
3387 return NULL_TREE;
3388 }
3389 \f
3390 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3391 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3392
3393 static tree
3394 make_bit_field_ref (location_t loc, tree inner, tree type,
3395 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3396 {
3397 tree result, bftype;
3398
3399 if (bitpos == 0)
3400 {
3401 tree size = TYPE_SIZE (TREE_TYPE (inner));
3402 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3403 || POINTER_TYPE_P (TREE_TYPE (inner)))
3404 && tree_fits_shwi_p (size)
3405 && tree_to_shwi (size) == bitsize)
3406 return fold_convert_loc (loc, type, inner);
3407 }
3408
3409 bftype = type;
3410 if (TYPE_PRECISION (bftype) != bitsize
3411 || TYPE_UNSIGNED (bftype) == !unsignedp)
3412 bftype = build_nonstandard_integer_type (bitsize, 0);
3413
3414 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3415 size_int (bitsize), bitsize_int (bitpos));
3416
3417 if (bftype != type)
3418 result = fold_convert_loc (loc, type, result);
3419
3420 return result;
3421 }
3422
3423 /* Optimize a bit-field compare.
3424
3425 There are two cases: First is a compare against a constant and the
3426 second is a comparison of two items where the fields are at the same
3427 bit position relative to the start of a chunk (byte, halfword, word)
3428 large enough to contain it. In these cases we can avoid the shift
3429 implicit in bitfield extractions.
3430
3431 For constants, we emit a compare of the shifted constant with the
3432 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3433 compared. For two fields at the same position, we do the ANDs with the
3434 similar mask and compare the result of the ANDs.
3435
3436 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3437 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3438 are the left and right operands of the comparison, respectively.
3439
3440 If the optimization described above can be done, we return the resulting
3441 tree. Otherwise we return zero. */
3442
3443 static tree
3444 optimize_bit_field_compare (location_t loc, enum tree_code code,
3445 tree compare_type, tree lhs, tree rhs)
3446 {
3447 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3448 tree type = TREE_TYPE (lhs);
3449 tree unsigned_type;
3450 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3451 enum machine_mode lmode, rmode, nmode;
3452 int lunsignedp, runsignedp;
3453 int lvolatilep = 0, rvolatilep = 0;
3454 tree linner, rinner = NULL_TREE;
3455 tree mask;
3456 tree offset;
3457
3458 /* Get all the information about the extractions being done. If the bit size
3459 if the same as the size of the underlying object, we aren't doing an
3460 extraction at all and so can do nothing. We also don't want to
3461 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3462 then will no longer be able to replace it. */
3463 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3464 &lunsignedp, &lvolatilep, false);
3465 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3466 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3467 return 0;
3468
3469 if (!const_p)
3470 {
3471 /* If this is not a constant, we can only do something if bit positions,
3472 sizes, and signedness are the same. */
3473 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3474 &runsignedp, &rvolatilep, false);
3475
3476 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3477 || lunsignedp != runsignedp || offset != 0
3478 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3479 return 0;
3480 }
3481
3482 /* See if we can find a mode to refer to this field. We should be able to,
3483 but fail if we can't. */
3484 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3485 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3486 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3487 TYPE_ALIGN (TREE_TYPE (rinner))),
3488 word_mode, false);
3489 if (nmode == VOIDmode)
3490 return 0;
3491
3492 /* Set signed and unsigned types of the precision of this mode for the
3493 shifts below. */
3494 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3495
3496 /* Compute the bit position and size for the new reference and our offset
3497 within it. If the new reference is the same size as the original, we
3498 won't optimize anything, so return zero. */
3499 nbitsize = GET_MODE_BITSIZE (nmode);
3500 nbitpos = lbitpos & ~ (nbitsize - 1);
3501 lbitpos -= nbitpos;
3502 if (nbitsize == lbitsize)
3503 return 0;
3504
3505 if (BYTES_BIG_ENDIAN)
3506 lbitpos = nbitsize - lbitsize - lbitpos;
3507
3508 /* Make the mask to be used against the extracted field. */
3509 mask = build_int_cst_type (unsigned_type, -1);
3510 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3511 mask = const_binop (RSHIFT_EXPR, mask,
3512 size_int (nbitsize - lbitsize - lbitpos));
3513
3514 if (! const_p)
3515 /* If not comparing with constant, just rework the comparison
3516 and return. */
3517 return fold_build2_loc (loc, code, compare_type,
3518 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3519 make_bit_field_ref (loc, linner,
3520 unsigned_type,
3521 nbitsize, nbitpos,
3522 1),
3523 mask),
3524 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3525 make_bit_field_ref (loc, rinner,
3526 unsigned_type,
3527 nbitsize, nbitpos,
3528 1),
3529 mask));
3530
3531 /* Otherwise, we are handling the constant case. See if the constant is too
3532 big for the field. Warn and return a tree of for 0 (false) if so. We do
3533 this not only for its own sake, but to avoid having to test for this
3534 error case below. If we didn't, we might generate wrong code.
3535
3536 For unsigned fields, the constant shifted right by the field length should
3537 be all zero. For signed fields, the high-order bits should agree with
3538 the sign bit. */
3539
3540 if (lunsignedp)
3541 {
3542 if (wi::lrshift (rhs, lbitsize) != 0)
3543 {
3544 warning (0, "comparison is always %d due to width of bit-field",
3545 code == NE_EXPR);
3546 return constant_boolean_node (code == NE_EXPR, compare_type);
3547 }
3548 }
3549 else
3550 {
3551 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3552 if (tem != 0 && tem != -1)
3553 {
3554 warning (0, "comparison is always %d due to width of bit-field",
3555 code == NE_EXPR);
3556 return constant_boolean_node (code == NE_EXPR, compare_type);
3557 }
3558 }
3559
3560 /* Single-bit compares should always be against zero. */
3561 if (lbitsize == 1 && ! integer_zerop (rhs))
3562 {
3563 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3564 rhs = build_int_cst (type, 0);
3565 }
3566
3567 /* Make a new bitfield reference, shift the constant over the
3568 appropriate number of bits and mask it with the computed mask
3569 (in case this was a signed field). If we changed it, make a new one. */
3570 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3571
3572 rhs = const_binop (BIT_AND_EXPR,
3573 const_binop (LSHIFT_EXPR,
3574 fold_convert_loc (loc, unsigned_type, rhs),
3575 size_int (lbitpos)),
3576 mask);
3577
3578 lhs = build2_loc (loc, code, compare_type,
3579 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3580 return lhs;
3581 }
3582 \f
3583 /* Subroutine for fold_truth_andor_1: decode a field reference.
3584
3585 If EXP is a comparison reference, we return the innermost reference.
3586
3587 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3588 set to the starting bit number.
3589
3590 If the innermost field can be completely contained in a mode-sized
3591 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3592
3593 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3594 otherwise it is not changed.
3595
3596 *PUNSIGNEDP is set to the signedness of the field.
3597
3598 *PMASK is set to the mask used. This is either contained in a
3599 BIT_AND_EXPR or derived from the width of the field.
3600
3601 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3602
3603 Return 0 if this is not a component reference or is one that we can't
3604 do anything with. */
3605
3606 static tree
3607 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3608 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3609 int *punsignedp, int *pvolatilep,
3610 tree *pmask, tree *pand_mask)
3611 {
3612 tree outer_type = 0;
3613 tree and_mask = 0;
3614 tree mask, inner, offset;
3615 tree unsigned_type;
3616 unsigned int precision;
3617
3618 /* All the optimizations using this function assume integer fields.
3619 There are problems with FP fields since the type_for_size call
3620 below can fail for, e.g., XFmode. */
3621 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3622 return 0;
3623
3624 /* We are interested in the bare arrangement of bits, so strip everything
3625 that doesn't affect the machine mode. However, record the type of the
3626 outermost expression if it may matter below. */
3627 if (CONVERT_EXPR_P (exp)
3628 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3629 outer_type = TREE_TYPE (exp);
3630 STRIP_NOPS (exp);
3631
3632 if (TREE_CODE (exp) == BIT_AND_EXPR)
3633 {
3634 and_mask = TREE_OPERAND (exp, 1);
3635 exp = TREE_OPERAND (exp, 0);
3636 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3637 if (TREE_CODE (and_mask) != INTEGER_CST)
3638 return 0;
3639 }
3640
3641 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3642 punsignedp, pvolatilep, false);
3643 if ((inner == exp && and_mask == 0)
3644 || *pbitsize < 0 || offset != 0
3645 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3646 return 0;
3647
3648 /* If the number of bits in the reference is the same as the bitsize of
3649 the outer type, then the outer type gives the signedness. Otherwise
3650 (in case of a small bitfield) the signedness is unchanged. */
3651 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3652 *punsignedp = TYPE_UNSIGNED (outer_type);
3653
3654 /* Compute the mask to access the bitfield. */
3655 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3656 precision = TYPE_PRECISION (unsigned_type);
3657
3658 mask = build_int_cst_type (unsigned_type, -1);
3659
3660 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3661 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3662
3663 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3664 if (and_mask != 0)
3665 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3666 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3667
3668 *pmask = mask;
3669 *pand_mask = and_mask;
3670 return inner;
3671 }
3672
3673 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3674 bit positions and MASK is SIGNED. */
3675
3676 static int
3677 all_ones_mask_p (const_tree mask, unsigned int size)
3678 {
3679 tree type = TREE_TYPE (mask);
3680 unsigned int precision = TYPE_PRECISION (type);
3681
3682 /* If this function returns true when the type of the mask is
3683 UNSIGNED, then there will be errors. In particular see
3684 gcc.c-torture/execute/990326-1.c. There does not appear to be
3685 any documentation paper trail as to why this is so. But the pre
3686 wide-int worked with that restriction and it has been preserved
3687 here. */
3688 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3689 return false;
3690
3691 return wi::mask (size, false, precision) == mask;
3692 }
3693
3694 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3695 represents the sign bit of EXP's type. If EXP represents a sign
3696 or zero extension, also test VAL against the unextended type.
3697 The return value is the (sub)expression whose sign bit is VAL,
3698 or NULL_TREE otherwise. */
3699
3700 static tree
3701 sign_bit_p (tree exp, const_tree val)
3702 {
3703 int width;
3704 tree t;
3705
3706 /* Tree EXP must have an integral type. */
3707 t = TREE_TYPE (exp);
3708 if (! INTEGRAL_TYPE_P (t))
3709 return NULL_TREE;
3710
3711 /* Tree VAL must be an integer constant. */
3712 if (TREE_CODE (val) != INTEGER_CST
3713 || TREE_OVERFLOW (val))
3714 return NULL_TREE;
3715
3716 width = TYPE_PRECISION (t);
3717 if (wi::only_sign_bit_p (val, width))
3718 return exp;
3719
3720 /* Handle extension from a narrower type. */
3721 if (TREE_CODE (exp) == NOP_EXPR
3722 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3723 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3724
3725 return NULL_TREE;
3726 }
3727
3728 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3729 to be evaluated unconditionally. */
3730
3731 static int
3732 simple_operand_p (const_tree exp)
3733 {
3734 /* Strip any conversions that don't change the machine mode. */
3735 STRIP_NOPS (exp);
3736
3737 return (CONSTANT_CLASS_P (exp)
3738 || TREE_CODE (exp) == SSA_NAME
3739 || (DECL_P (exp)
3740 && ! TREE_ADDRESSABLE (exp)
3741 && ! TREE_THIS_VOLATILE (exp)
3742 && ! DECL_NONLOCAL (exp)
3743 /* Don't regard global variables as simple. They may be
3744 allocated in ways unknown to the compiler (shared memory,
3745 #pragma weak, etc). */
3746 && ! TREE_PUBLIC (exp)
3747 && ! DECL_EXTERNAL (exp)
3748 /* Weakrefs are not safe to be read, since they can be NULL.
3749 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3750 have DECL_WEAK flag set. */
3751 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3752 /* Loading a static variable is unduly expensive, but global
3753 registers aren't expensive. */
3754 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3755 }
3756
3757 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3758 to be evaluated unconditionally.
3759 I addition to simple_operand_p, we assume that comparisons, conversions,
3760 and logic-not operations are simple, if their operands are simple, too. */
3761
3762 static bool
3763 simple_operand_p_2 (tree exp)
3764 {
3765 enum tree_code code;
3766
3767 if (TREE_SIDE_EFFECTS (exp)
3768 || tree_could_trap_p (exp))
3769 return false;
3770
3771 while (CONVERT_EXPR_P (exp))
3772 exp = TREE_OPERAND (exp, 0);
3773
3774 code = TREE_CODE (exp);
3775
3776 if (TREE_CODE_CLASS (code) == tcc_comparison)
3777 return (simple_operand_p (TREE_OPERAND (exp, 0))
3778 && simple_operand_p (TREE_OPERAND (exp, 1)));
3779
3780 if (code == TRUTH_NOT_EXPR)
3781 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3782
3783 return simple_operand_p (exp);
3784 }
3785
3786 \f
3787 /* The following functions are subroutines to fold_range_test and allow it to
3788 try to change a logical combination of comparisons into a range test.
3789
3790 For example, both
3791 X == 2 || X == 3 || X == 4 || X == 5
3792 and
3793 X >= 2 && X <= 5
3794 are converted to
3795 (unsigned) (X - 2) <= 3
3796
3797 We describe each set of comparisons as being either inside or outside
3798 a range, using a variable named like IN_P, and then describe the
3799 range with a lower and upper bound. If one of the bounds is omitted,
3800 it represents either the highest or lowest value of the type.
3801
3802 In the comments below, we represent a range by two numbers in brackets
3803 preceded by a "+" to designate being inside that range, or a "-" to
3804 designate being outside that range, so the condition can be inverted by
3805 flipping the prefix. An omitted bound is represented by a "-". For
3806 example, "- [-, 10]" means being outside the range starting at the lowest
3807 possible value and ending at 10, in other words, being greater than 10.
3808 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3809 always false.
3810
3811 We set up things so that the missing bounds are handled in a consistent
3812 manner so neither a missing bound nor "true" and "false" need to be
3813 handled using a special case. */
3814
3815 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3816 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3817 and UPPER1_P are nonzero if the respective argument is an upper bound
3818 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3819 must be specified for a comparison. ARG1 will be converted to ARG0's
3820 type if both are specified. */
3821
3822 static tree
3823 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3824 tree arg1, int upper1_p)
3825 {
3826 tree tem;
3827 int result;
3828 int sgn0, sgn1;
3829
3830 /* If neither arg represents infinity, do the normal operation.
3831 Else, if not a comparison, return infinity. Else handle the special
3832 comparison rules. Note that most of the cases below won't occur, but
3833 are handled for consistency. */
3834
3835 if (arg0 != 0 && arg1 != 0)
3836 {
3837 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3838 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3839 STRIP_NOPS (tem);
3840 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3841 }
3842
3843 if (TREE_CODE_CLASS (code) != tcc_comparison)
3844 return 0;
3845
3846 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3847 for neither. In real maths, we cannot assume open ended ranges are
3848 the same. But, this is computer arithmetic, where numbers are finite.
3849 We can therefore make the transformation of any unbounded range with
3850 the value Z, Z being greater than any representable number. This permits
3851 us to treat unbounded ranges as equal. */
3852 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3853 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3854 switch (code)
3855 {
3856 case EQ_EXPR:
3857 result = sgn0 == sgn1;
3858 break;
3859 case NE_EXPR:
3860 result = sgn0 != sgn1;
3861 break;
3862 case LT_EXPR:
3863 result = sgn0 < sgn1;
3864 break;
3865 case LE_EXPR:
3866 result = sgn0 <= sgn1;
3867 break;
3868 case GT_EXPR:
3869 result = sgn0 > sgn1;
3870 break;
3871 case GE_EXPR:
3872 result = sgn0 >= sgn1;
3873 break;
3874 default:
3875 gcc_unreachable ();
3876 }
3877
3878 return constant_boolean_node (result, type);
3879 }
3880 \f
3881 /* Helper routine for make_range. Perform one step for it, return
3882 new expression if the loop should continue or NULL_TREE if it should
3883 stop. */
3884
3885 tree
3886 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3887 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3888 bool *strict_overflow_p)
3889 {
3890 tree arg0_type = TREE_TYPE (arg0);
3891 tree n_low, n_high, low = *p_low, high = *p_high;
3892 int in_p = *p_in_p, n_in_p;
3893
3894 switch (code)
3895 {
3896 case TRUTH_NOT_EXPR:
3897 /* We can only do something if the range is testing for zero. */
3898 if (low == NULL_TREE || high == NULL_TREE
3899 || ! integer_zerop (low) || ! integer_zerop (high))
3900 return NULL_TREE;
3901 *p_in_p = ! in_p;
3902 return arg0;
3903
3904 case EQ_EXPR: case NE_EXPR:
3905 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3906 /* We can only do something if the range is testing for zero
3907 and if the second operand is an integer constant. Note that
3908 saying something is "in" the range we make is done by
3909 complementing IN_P since it will set in the initial case of
3910 being not equal to zero; "out" is leaving it alone. */
3911 if (low == NULL_TREE || high == NULL_TREE
3912 || ! integer_zerop (low) || ! integer_zerop (high)
3913 || TREE_CODE (arg1) != INTEGER_CST)
3914 return NULL_TREE;
3915
3916 switch (code)
3917 {
3918 case NE_EXPR: /* - [c, c] */
3919 low = high = arg1;
3920 break;
3921 case EQ_EXPR: /* + [c, c] */
3922 in_p = ! in_p, low = high = arg1;
3923 break;
3924 case GT_EXPR: /* - [-, c] */
3925 low = 0, high = arg1;
3926 break;
3927 case GE_EXPR: /* + [c, -] */
3928 in_p = ! in_p, low = arg1, high = 0;
3929 break;
3930 case LT_EXPR: /* - [c, -] */
3931 low = arg1, high = 0;
3932 break;
3933 case LE_EXPR: /* + [-, c] */
3934 in_p = ! in_p, low = 0, high = arg1;
3935 break;
3936 default:
3937 gcc_unreachable ();
3938 }
3939
3940 /* If this is an unsigned comparison, we also know that EXP is
3941 greater than or equal to zero. We base the range tests we make
3942 on that fact, so we record it here so we can parse existing
3943 range tests. We test arg0_type since often the return type
3944 of, e.g. EQ_EXPR, is boolean. */
3945 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3946 {
3947 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3948 in_p, low, high, 1,
3949 build_int_cst (arg0_type, 0),
3950 NULL_TREE))
3951 return NULL_TREE;
3952
3953 in_p = n_in_p, low = n_low, high = n_high;
3954
3955 /* If the high bound is missing, but we have a nonzero low
3956 bound, reverse the range so it goes from zero to the low bound
3957 minus 1. */
3958 if (high == 0 && low && ! integer_zerop (low))
3959 {
3960 in_p = ! in_p;
3961 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3962 build_int_cst (TREE_TYPE (low), 1), 0);
3963 low = build_int_cst (arg0_type, 0);
3964 }
3965 }
3966
3967 *p_low = low;
3968 *p_high = high;
3969 *p_in_p = in_p;
3970 return arg0;
3971
3972 case NEGATE_EXPR:
3973 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3974 low and high are non-NULL, then normalize will DTRT. */
3975 if (!TYPE_UNSIGNED (arg0_type)
3976 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3977 {
3978 if (low == NULL_TREE)
3979 low = TYPE_MIN_VALUE (arg0_type);
3980 if (high == NULL_TREE)
3981 high = TYPE_MAX_VALUE (arg0_type);
3982 }
3983
3984 /* (-x) IN [a,b] -> x in [-b, -a] */
3985 n_low = range_binop (MINUS_EXPR, exp_type,
3986 build_int_cst (exp_type, 0),
3987 0, high, 1);
3988 n_high = range_binop (MINUS_EXPR, exp_type,
3989 build_int_cst (exp_type, 0),
3990 0, low, 0);
3991 if (n_high != 0 && TREE_OVERFLOW (n_high))
3992 return NULL_TREE;
3993 goto normalize;
3994
3995 case BIT_NOT_EXPR:
3996 /* ~ X -> -X - 1 */
3997 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3998 build_int_cst (exp_type, 1));
3999
4000 case PLUS_EXPR:
4001 case MINUS_EXPR:
4002 if (TREE_CODE (arg1) != INTEGER_CST)
4003 return NULL_TREE;
4004
4005 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4006 move a constant to the other side. */
4007 if (!TYPE_UNSIGNED (arg0_type)
4008 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4009 return NULL_TREE;
4010
4011 /* If EXP is signed, any overflow in the computation is undefined,
4012 so we don't worry about it so long as our computations on
4013 the bounds don't overflow. For unsigned, overflow is defined
4014 and this is exactly the right thing. */
4015 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4016 arg0_type, low, 0, arg1, 0);
4017 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4018 arg0_type, high, 1, arg1, 0);
4019 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4020 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4021 return NULL_TREE;
4022
4023 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4024 *strict_overflow_p = true;
4025
4026 normalize:
4027 /* Check for an unsigned range which has wrapped around the maximum
4028 value thus making n_high < n_low, and normalize it. */
4029 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4030 {
4031 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4032 build_int_cst (TREE_TYPE (n_high), 1), 0);
4033 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4034 build_int_cst (TREE_TYPE (n_low), 1), 0);
4035
4036 /* If the range is of the form +/- [ x+1, x ], we won't
4037 be able to normalize it. But then, it represents the
4038 whole range or the empty set, so make it
4039 +/- [ -, - ]. */
4040 if (tree_int_cst_equal (n_low, low)
4041 && tree_int_cst_equal (n_high, high))
4042 low = high = 0;
4043 else
4044 in_p = ! in_p;
4045 }
4046 else
4047 low = n_low, high = n_high;
4048
4049 *p_low = low;
4050 *p_high = high;
4051 *p_in_p = in_p;
4052 return arg0;
4053
4054 CASE_CONVERT:
4055 case NON_LVALUE_EXPR:
4056 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4057 return NULL_TREE;
4058
4059 if (! INTEGRAL_TYPE_P (arg0_type)
4060 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4061 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4062 return NULL_TREE;
4063
4064 n_low = low, n_high = high;
4065
4066 if (n_low != 0)
4067 n_low = fold_convert_loc (loc, arg0_type, n_low);
4068
4069 if (n_high != 0)
4070 n_high = fold_convert_loc (loc, arg0_type, n_high);
4071
4072 /* If we're converting arg0 from an unsigned type, to exp,
4073 a signed type, we will be doing the comparison as unsigned.
4074 The tests above have already verified that LOW and HIGH
4075 are both positive.
4076
4077 So we have to ensure that we will handle large unsigned
4078 values the same way that the current signed bounds treat
4079 negative values. */
4080
4081 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4082 {
4083 tree high_positive;
4084 tree equiv_type;
4085 /* For fixed-point modes, we need to pass the saturating flag
4086 as the 2nd parameter. */
4087 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4088 equiv_type
4089 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4090 TYPE_SATURATING (arg0_type));
4091 else
4092 equiv_type
4093 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4094
4095 /* A range without an upper bound is, naturally, unbounded.
4096 Since convert would have cropped a very large value, use
4097 the max value for the destination type. */
4098 high_positive
4099 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4100 : TYPE_MAX_VALUE (arg0_type);
4101
4102 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4103 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4104 fold_convert_loc (loc, arg0_type,
4105 high_positive),
4106 build_int_cst (arg0_type, 1));
4107
4108 /* If the low bound is specified, "and" the range with the
4109 range for which the original unsigned value will be
4110 positive. */
4111 if (low != 0)
4112 {
4113 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4114 1, fold_convert_loc (loc, arg0_type,
4115 integer_zero_node),
4116 high_positive))
4117 return NULL_TREE;
4118
4119 in_p = (n_in_p == in_p);
4120 }
4121 else
4122 {
4123 /* Otherwise, "or" the range with the range of the input
4124 that will be interpreted as negative. */
4125 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4126 1, fold_convert_loc (loc, arg0_type,
4127 integer_zero_node),
4128 high_positive))
4129 return NULL_TREE;
4130
4131 in_p = (in_p != n_in_p);
4132 }
4133 }
4134
4135 *p_low = n_low;
4136 *p_high = n_high;
4137 *p_in_p = in_p;
4138 return arg0;
4139
4140 default:
4141 return NULL_TREE;
4142 }
4143 }
4144
4145 /* Given EXP, a logical expression, set the range it is testing into
4146 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4147 actually being tested. *PLOW and *PHIGH will be made of the same
4148 type as the returned expression. If EXP is not a comparison, we
4149 will most likely not be returning a useful value and range. Set
4150 *STRICT_OVERFLOW_P to true if the return value is only valid
4151 because signed overflow is undefined; otherwise, do not change
4152 *STRICT_OVERFLOW_P. */
4153
4154 tree
4155 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4156 bool *strict_overflow_p)
4157 {
4158 enum tree_code code;
4159 tree arg0, arg1 = NULL_TREE;
4160 tree exp_type, nexp;
4161 int in_p;
4162 tree low, high;
4163 location_t loc = EXPR_LOCATION (exp);
4164
4165 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4166 and see if we can refine the range. Some of the cases below may not
4167 happen, but it doesn't seem worth worrying about this. We "continue"
4168 the outer loop when we've changed something; otherwise we "break"
4169 the switch, which will "break" the while. */
4170
4171 in_p = 0;
4172 low = high = build_int_cst (TREE_TYPE (exp), 0);
4173
4174 while (1)
4175 {
4176 code = TREE_CODE (exp);
4177 exp_type = TREE_TYPE (exp);
4178 arg0 = NULL_TREE;
4179
4180 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4181 {
4182 if (TREE_OPERAND_LENGTH (exp) > 0)
4183 arg0 = TREE_OPERAND (exp, 0);
4184 if (TREE_CODE_CLASS (code) == tcc_binary
4185 || TREE_CODE_CLASS (code) == tcc_comparison
4186 || (TREE_CODE_CLASS (code) == tcc_expression
4187 && TREE_OPERAND_LENGTH (exp) > 1))
4188 arg1 = TREE_OPERAND (exp, 1);
4189 }
4190 if (arg0 == NULL_TREE)
4191 break;
4192
4193 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4194 &high, &in_p, strict_overflow_p);
4195 if (nexp == NULL_TREE)
4196 break;
4197 exp = nexp;
4198 }
4199
4200 /* If EXP is a constant, we can evaluate whether this is true or false. */
4201 if (TREE_CODE (exp) == INTEGER_CST)
4202 {
4203 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4204 exp, 0, low, 0))
4205 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4206 exp, 1, high, 1)));
4207 low = high = 0;
4208 exp = 0;
4209 }
4210
4211 *pin_p = in_p, *plow = low, *phigh = high;
4212 return exp;
4213 }
4214 \f
4215 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4216 type, TYPE, return an expression to test if EXP is in (or out of, depending
4217 on IN_P) the range. Return 0 if the test couldn't be created. */
4218
4219 tree
4220 build_range_check (location_t loc, tree type, tree exp, int in_p,
4221 tree low, tree high)
4222 {
4223 tree etype = TREE_TYPE (exp), value;
4224
4225 #ifdef HAVE_canonicalize_funcptr_for_compare
4226 /* Disable this optimization for function pointer expressions
4227 on targets that require function pointer canonicalization. */
4228 if (HAVE_canonicalize_funcptr_for_compare
4229 && TREE_CODE (etype) == POINTER_TYPE
4230 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4231 return NULL_TREE;
4232 #endif
4233
4234 if (! in_p)
4235 {
4236 value = build_range_check (loc, type, exp, 1, low, high);
4237 if (value != 0)
4238 return invert_truthvalue_loc (loc, value);
4239
4240 return 0;
4241 }
4242
4243 if (low == 0 && high == 0)
4244 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4245
4246 if (low == 0)
4247 return fold_build2_loc (loc, LE_EXPR, type, exp,
4248 fold_convert_loc (loc, etype, high));
4249
4250 if (high == 0)
4251 return fold_build2_loc (loc, GE_EXPR, type, exp,
4252 fold_convert_loc (loc, etype, low));
4253
4254 if (operand_equal_p (low, high, 0))
4255 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4256 fold_convert_loc (loc, etype, low));
4257
4258 if (integer_zerop (low))
4259 {
4260 if (! TYPE_UNSIGNED (etype))
4261 {
4262 etype = unsigned_type_for (etype);
4263 high = fold_convert_loc (loc, etype, high);
4264 exp = fold_convert_loc (loc, etype, exp);
4265 }
4266 return build_range_check (loc, type, exp, 1, 0, high);
4267 }
4268
4269 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4270 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4271 {
4272 int prec = TYPE_PRECISION (etype);
4273
4274 if (wi::mask (prec - 1, false, prec) == high)
4275 {
4276 if (TYPE_UNSIGNED (etype))
4277 {
4278 tree signed_etype = signed_type_for (etype);
4279 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4280 etype
4281 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4282 else
4283 etype = signed_etype;
4284 exp = fold_convert_loc (loc, etype, exp);
4285 }
4286 return fold_build2_loc (loc, GT_EXPR, type, exp,
4287 build_int_cst (etype, 0));
4288 }
4289 }
4290
4291 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4292 This requires wrap-around arithmetics for the type of the expression.
4293 First make sure that arithmetics in this type is valid, then make sure
4294 that it wraps around. */
4295 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4296 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4297 TYPE_UNSIGNED (etype));
4298
4299 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4300 {
4301 tree utype, minv, maxv;
4302
4303 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4304 for the type in question, as we rely on this here. */
4305 utype = unsigned_type_for (etype);
4306 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4307 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4308 build_int_cst (TREE_TYPE (maxv), 1), 1);
4309 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4310
4311 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4312 minv, 1, maxv, 1)))
4313 etype = utype;
4314 else
4315 return 0;
4316 }
4317
4318 high = fold_convert_loc (loc, etype, high);
4319 low = fold_convert_loc (loc, etype, low);
4320 exp = fold_convert_loc (loc, etype, exp);
4321
4322 value = const_binop (MINUS_EXPR, high, low);
4323
4324
4325 if (POINTER_TYPE_P (etype))
4326 {
4327 if (value != 0 && !TREE_OVERFLOW (value))
4328 {
4329 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4330 return build_range_check (loc, type,
4331 fold_build_pointer_plus_loc (loc, exp, low),
4332 1, build_int_cst (etype, 0), value);
4333 }
4334 return 0;
4335 }
4336
4337 if (value != 0 && !TREE_OVERFLOW (value))
4338 return build_range_check (loc, type,
4339 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4340 1, build_int_cst (etype, 0), value);
4341
4342 return 0;
4343 }
4344 \f
4345 /* Return the predecessor of VAL in its type, handling the infinite case. */
4346
4347 static tree
4348 range_predecessor (tree val)
4349 {
4350 tree type = TREE_TYPE (val);
4351
4352 if (INTEGRAL_TYPE_P (type)
4353 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4354 return 0;
4355 else
4356 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4357 build_int_cst (TREE_TYPE (val), 1), 0);
4358 }
4359
4360 /* Return the successor of VAL in its type, handling the infinite case. */
4361
4362 static tree
4363 range_successor (tree val)
4364 {
4365 tree type = TREE_TYPE (val);
4366
4367 if (INTEGRAL_TYPE_P (type)
4368 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4369 return 0;
4370 else
4371 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4372 build_int_cst (TREE_TYPE (val), 1), 0);
4373 }
4374
4375 /* Given two ranges, see if we can merge them into one. Return 1 if we
4376 can, 0 if we can't. Set the output range into the specified parameters. */
4377
4378 bool
4379 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4380 tree high0, int in1_p, tree low1, tree high1)
4381 {
4382 int no_overlap;
4383 int subset;
4384 int temp;
4385 tree tem;
4386 int in_p;
4387 tree low, high;
4388 int lowequal = ((low0 == 0 && low1 == 0)
4389 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4390 low0, 0, low1, 0)));
4391 int highequal = ((high0 == 0 && high1 == 0)
4392 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4393 high0, 1, high1, 1)));
4394
4395 /* Make range 0 be the range that starts first, or ends last if they
4396 start at the same value. Swap them if it isn't. */
4397 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4398 low0, 0, low1, 0))
4399 || (lowequal
4400 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4401 high1, 1, high0, 1))))
4402 {
4403 temp = in0_p, in0_p = in1_p, in1_p = temp;
4404 tem = low0, low0 = low1, low1 = tem;
4405 tem = high0, high0 = high1, high1 = tem;
4406 }
4407
4408 /* Now flag two cases, whether the ranges are disjoint or whether the
4409 second range is totally subsumed in the first. Note that the tests
4410 below are simplified by the ones above. */
4411 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4412 high0, 1, low1, 0));
4413 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4414 high1, 1, high0, 1));
4415
4416 /* We now have four cases, depending on whether we are including or
4417 excluding the two ranges. */
4418 if (in0_p && in1_p)
4419 {
4420 /* If they don't overlap, the result is false. If the second range
4421 is a subset it is the result. Otherwise, the range is from the start
4422 of the second to the end of the first. */
4423 if (no_overlap)
4424 in_p = 0, low = high = 0;
4425 else if (subset)
4426 in_p = 1, low = low1, high = high1;
4427 else
4428 in_p = 1, low = low1, high = high0;
4429 }
4430
4431 else if (in0_p && ! in1_p)
4432 {
4433 /* If they don't overlap, the result is the first range. If they are
4434 equal, the result is false. If the second range is a subset of the
4435 first, and the ranges begin at the same place, we go from just after
4436 the end of the second range to the end of the first. If the second
4437 range is not a subset of the first, or if it is a subset and both
4438 ranges end at the same place, the range starts at the start of the
4439 first range and ends just before the second range.
4440 Otherwise, we can't describe this as a single range. */
4441 if (no_overlap)
4442 in_p = 1, low = low0, high = high0;
4443 else if (lowequal && highequal)
4444 in_p = 0, low = high = 0;
4445 else if (subset && lowequal)
4446 {
4447 low = range_successor (high1);
4448 high = high0;
4449 in_p = 1;
4450 if (low == 0)
4451 {
4452 /* We are in the weird situation where high0 > high1 but
4453 high1 has no successor. Punt. */
4454 return 0;
4455 }
4456 }
4457 else if (! subset || highequal)
4458 {
4459 low = low0;
4460 high = range_predecessor (low1);
4461 in_p = 1;
4462 if (high == 0)
4463 {
4464 /* low0 < low1 but low1 has no predecessor. Punt. */
4465 return 0;
4466 }
4467 }
4468 else
4469 return 0;
4470 }
4471
4472 else if (! in0_p && in1_p)
4473 {
4474 /* If they don't overlap, the result is the second range. If the second
4475 is a subset of the first, the result is false. Otherwise,
4476 the range starts just after the first range and ends at the
4477 end of the second. */
4478 if (no_overlap)
4479 in_p = 1, low = low1, high = high1;
4480 else if (subset || highequal)
4481 in_p = 0, low = high = 0;
4482 else
4483 {
4484 low = range_successor (high0);
4485 high = high1;
4486 in_p = 1;
4487 if (low == 0)
4488 {
4489 /* high1 > high0 but high0 has no successor. Punt. */
4490 return 0;
4491 }
4492 }
4493 }
4494
4495 else
4496 {
4497 /* The case where we are excluding both ranges. Here the complex case
4498 is if they don't overlap. In that case, the only time we have a
4499 range is if they are adjacent. If the second is a subset of the
4500 first, the result is the first. Otherwise, the range to exclude
4501 starts at the beginning of the first range and ends at the end of the
4502 second. */
4503 if (no_overlap)
4504 {
4505 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4506 range_successor (high0),
4507 1, low1, 0)))
4508 in_p = 0, low = low0, high = high1;
4509 else
4510 {
4511 /* Canonicalize - [min, x] into - [-, x]. */
4512 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4513 switch (TREE_CODE (TREE_TYPE (low0)))
4514 {
4515 case ENUMERAL_TYPE:
4516 if (TYPE_PRECISION (TREE_TYPE (low0))
4517 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4518 break;
4519 /* FALLTHROUGH */
4520 case INTEGER_TYPE:
4521 if (tree_int_cst_equal (low0,
4522 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4523 low0 = 0;
4524 break;
4525 case POINTER_TYPE:
4526 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4527 && integer_zerop (low0))
4528 low0 = 0;
4529 break;
4530 default:
4531 break;
4532 }
4533
4534 /* Canonicalize - [x, max] into - [x, -]. */
4535 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4536 switch (TREE_CODE (TREE_TYPE (high1)))
4537 {
4538 case ENUMERAL_TYPE:
4539 if (TYPE_PRECISION (TREE_TYPE (high1))
4540 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4541 break;
4542 /* FALLTHROUGH */
4543 case INTEGER_TYPE:
4544 if (tree_int_cst_equal (high1,
4545 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4546 high1 = 0;
4547 break;
4548 case POINTER_TYPE:
4549 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4550 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4551 high1, 1,
4552 build_int_cst (TREE_TYPE (high1), 1),
4553 1)))
4554 high1 = 0;
4555 break;
4556 default:
4557 break;
4558 }
4559
4560 /* The ranges might be also adjacent between the maximum and
4561 minimum values of the given type. For
4562 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4563 return + [x + 1, y - 1]. */
4564 if (low0 == 0 && high1 == 0)
4565 {
4566 low = range_successor (high0);
4567 high = range_predecessor (low1);
4568 if (low == 0 || high == 0)
4569 return 0;
4570
4571 in_p = 1;
4572 }
4573 else
4574 return 0;
4575 }
4576 }
4577 else if (subset)
4578 in_p = 0, low = low0, high = high0;
4579 else
4580 in_p = 0, low = low0, high = high1;
4581 }
4582
4583 *pin_p = in_p, *plow = low, *phigh = high;
4584 return 1;
4585 }
4586 \f
4587
4588 /* Subroutine of fold, looking inside expressions of the form
4589 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4590 of the COND_EXPR. This function is being used also to optimize
4591 A op B ? C : A, by reversing the comparison first.
4592
4593 Return a folded expression whose code is not a COND_EXPR
4594 anymore, or NULL_TREE if no folding opportunity is found. */
4595
4596 static tree
4597 fold_cond_expr_with_comparison (location_t loc, tree type,
4598 tree arg0, tree arg1, tree arg2)
4599 {
4600 enum tree_code comp_code = TREE_CODE (arg0);
4601 tree arg00 = TREE_OPERAND (arg0, 0);
4602 tree arg01 = TREE_OPERAND (arg0, 1);
4603 tree arg1_type = TREE_TYPE (arg1);
4604 tree tem;
4605
4606 STRIP_NOPS (arg1);
4607 STRIP_NOPS (arg2);
4608
4609 /* If we have A op 0 ? A : -A, consider applying the following
4610 transformations:
4611
4612 A == 0? A : -A same as -A
4613 A != 0? A : -A same as A
4614 A >= 0? A : -A same as abs (A)
4615 A > 0? A : -A same as abs (A)
4616 A <= 0? A : -A same as -abs (A)
4617 A < 0? A : -A same as -abs (A)
4618
4619 None of these transformations work for modes with signed
4620 zeros. If A is +/-0, the first two transformations will
4621 change the sign of the result (from +0 to -0, or vice
4622 versa). The last four will fix the sign of the result,
4623 even though the original expressions could be positive or
4624 negative, depending on the sign of A.
4625
4626 Note that all these transformations are correct if A is
4627 NaN, since the two alternatives (A and -A) are also NaNs. */
4628 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4629 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4630 ? real_zerop (arg01)
4631 : integer_zerop (arg01))
4632 && ((TREE_CODE (arg2) == NEGATE_EXPR
4633 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4634 /* In the case that A is of the form X-Y, '-A' (arg2) may
4635 have already been folded to Y-X, check for that. */
4636 || (TREE_CODE (arg1) == MINUS_EXPR
4637 && TREE_CODE (arg2) == MINUS_EXPR
4638 && operand_equal_p (TREE_OPERAND (arg1, 0),
4639 TREE_OPERAND (arg2, 1), 0)
4640 && operand_equal_p (TREE_OPERAND (arg1, 1),
4641 TREE_OPERAND (arg2, 0), 0))))
4642 switch (comp_code)
4643 {
4644 case EQ_EXPR:
4645 case UNEQ_EXPR:
4646 tem = fold_convert_loc (loc, arg1_type, arg1);
4647 return pedantic_non_lvalue_loc (loc,
4648 fold_convert_loc (loc, type,
4649 negate_expr (tem)));
4650 case NE_EXPR:
4651 case LTGT_EXPR:
4652 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4653 case UNGE_EXPR:
4654 case UNGT_EXPR:
4655 if (flag_trapping_math)
4656 break;
4657 /* Fall through. */
4658 case GE_EXPR:
4659 case GT_EXPR:
4660 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4661 arg1 = fold_convert_loc (loc, signed_type_for
4662 (TREE_TYPE (arg1)), arg1);
4663 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4664 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4665 case UNLE_EXPR:
4666 case UNLT_EXPR:
4667 if (flag_trapping_math)
4668 break;
4669 case LE_EXPR:
4670 case LT_EXPR:
4671 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4672 arg1 = fold_convert_loc (loc, signed_type_for
4673 (TREE_TYPE (arg1)), arg1);
4674 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4675 return negate_expr (fold_convert_loc (loc, type, tem));
4676 default:
4677 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4678 break;
4679 }
4680
4681 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4682 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4683 both transformations are correct when A is NaN: A != 0
4684 is then true, and A == 0 is false. */
4685
4686 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4687 && integer_zerop (arg01) && integer_zerop (arg2))
4688 {
4689 if (comp_code == NE_EXPR)
4690 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4691 else if (comp_code == EQ_EXPR)
4692 return build_zero_cst (type);
4693 }
4694
4695 /* Try some transformations of A op B ? A : B.
4696
4697 A == B? A : B same as B
4698 A != B? A : B same as A
4699 A >= B? A : B same as max (A, B)
4700 A > B? A : B same as max (B, A)
4701 A <= B? A : B same as min (A, B)
4702 A < B? A : B same as min (B, A)
4703
4704 As above, these transformations don't work in the presence
4705 of signed zeros. For example, if A and B are zeros of
4706 opposite sign, the first two transformations will change
4707 the sign of the result. In the last four, the original
4708 expressions give different results for (A=+0, B=-0) and
4709 (A=-0, B=+0), but the transformed expressions do not.
4710
4711 The first two transformations are correct if either A or B
4712 is a NaN. In the first transformation, the condition will
4713 be false, and B will indeed be chosen. In the case of the
4714 second transformation, the condition A != B will be true,
4715 and A will be chosen.
4716
4717 The conversions to max() and min() are not correct if B is
4718 a number and A is not. The conditions in the original
4719 expressions will be false, so all four give B. The min()
4720 and max() versions would give a NaN instead. */
4721 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4722 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4723 /* Avoid these transformations if the COND_EXPR may be used
4724 as an lvalue in the C++ front-end. PR c++/19199. */
4725 && (in_gimple_form
4726 || VECTOR_TYPE_P (type)
4727 || (strcmp (lang_hooks.name, "GNU C++") != 0
4728 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4729 || ! maybe_lvalue_p (arg1)
4730 || ! maybe_lvalue_p (arg2)))
4731 {
4732 tree comp_op0 = arg00;
4733 tree comp_op1 = arg01;
4734 tree comp_type = TREE_TYPE (comp_op0);
4735
4736 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4737 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4738 {
4739 comp_type = type;
4740 comp_op0 = arg1;
4741 comp_op1 = arg2;
4742 }
4743
4744 switch (comp_code)
4745 {
4746 case EQ_EXPR:
4747 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4748 case NE_EXPR:
4749 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4750 case LE_EXPR:
4751 case LT_EXPR:
4752 case UNLE_EXPR:
4753 case UNLT_EXPR:
4754 /* In C++ a ?: expression can be an lvalue, so put the
4755 operand which will be used if they are equal first
4756 so that we can convert this back to the
4757 corresponding COND_EXPR. */
4758 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4759 {
4760 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4761 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4762 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4763 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4764 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4765 comp_op1, comp_op0);
4766 return pedantic_non_lvalue_loc (loc,
4767 fold_convert_loc (loc, type, tem));
4768 }
4769 break;
4770 case GE_EXPR:
4771 case GT_EXPR:
4772 case UNGE_EXPR:
4773 case UNGT_EXPR:
4774 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4775 {
4776 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4777 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4778 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4779 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4780 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4781 comp_op1, comp_op0);
4782 return pedantic_non_lvalue_loc (loc,
4783 fold_convert_loc (loc, type, tem));
4784 }
4785 break;
4786 case UNEQ_EXPR:
4787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4788 return pedantic_non_lvalue_loc (loc,
4789 fold_convert_loc (loc, type, arg2));
4790 break;
4791 case LTGT_EXPR:
4792 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4793 return pedantic_non_lvalue_loc (loc,
4794 fold_convert_loc (loc, type, arg1));
4795 break;
4796 default:
4797 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4798 break;
4799 }
4800 }
4801
4802 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4803 we might still be able to simplify this. For example,
4804 if C1 is one less or one more than C2, this might have started
4805 out as a MIN or MAX and been transformed by this function.
4806 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4807
4808 if (INTEGRAL_TYPE_P (type)
4809 && TREE_CODE (arg01) == INTEGER_CST
4810 && TREE_CODE (arg2) == INTEGER_CST)
4811 switch (comp_code)
4812 {
4813 case EQ_EXPR:
4814 if (TREE_CODE (arg1) == INTEGER_CST)
4815 break;
4816 /* We can replace A with C1 in this case. */
4817 arg1 = fold_convert_loc (loc, type, arg01);
4818 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4819
4820 case LT_EXPR:
4821 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4822 MIN_EXPR, to preserve the signedness of the comparison. */
4823 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4824 OEP_ONLY_CONST)
4825 && operand_equal_p (arg01,
4826 const_binop (PLUS_EXPR, arg2,
4827 build_int_cst (type, 1)),
4828 OEP_ONLY_CONST))
4829 {
4830 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4831 fold_convert_loc (loc, TREE_TYPE (arg00),
4832 arg2));
4833 return pedantic_non_lvalue_loc (loc,
4834 fold_convert_loc (loc, type, tem));
4835 }
4836 break;
4837
4838 case LE_EXPR:
4839 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4840 as above. */
4841 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4842 OEP_ONLY_CONST)
4843 && operand_equal_p (arg01,
4844 const_binop (MINUS_EXPR, arg2,
4845 build_int_cst (type, 1)),
4846 OEP_ONLY_CONST))
4847 {
4848 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4849 fold_convert_loc (loc, TREE_TYPE (arg00),
4850 arg2));
4851 return pedantic_non_lvalue_loc (loc,
4852 fold_convert_loc (loc, type, tem));
4853 }
4854 break;
4855
4856 case GT_EXPR:
4857 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4858 MAX_EXPR, to preserve the signedness of the comparison. */
4859 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4860 OEP_ONLY_CONST)
4861 && operand_equal_p (arg01,
4862 const_binop (MINUS_EXPR, arg2,
4863 build_int_cst (type, 1)),
4864 OEP_ONLY_CONST))
4865 {
4866 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4867 fold_convert_loc (loc, TREE_TYPE (arg00),
4868 arg2));
4869 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4870 }
4871 break;
4872
4873 case GE_EXPR:
4874 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4875 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4876 OEP_ONLY_CONST)
4877 && operand_equal_p (arg01,
4878 const_binop (PLUS_EXPR, arg2,
4879 build_int_cst (type, 1)),
4880 OEP_ONLY_CONST))
4881 {
4882 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4883 fold_convert_loc (loc, TREE_TYPE (arg00),
4884 arg2));
4885 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4886 }
4887 break;
4888 case NE_EXPR:
4889 break;
4890 default:
4891 gcc_unreachable ();
4892 }
4893
4894 return NULL_TREE;
4895 }
4896
4897
4898 \f
4899 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4900 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4901 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4902 false) >= 2)
4903 #endif
4904
4905 /* EXP is some logical combination of boolean tests. See if we can
4906 merge it into some range test. Return the new tree if so. */
4907
4908 static tree
4909 fold_range_test (location_t loc, enum tree_code code, tree type,
4910 tree op0, tree op1)
4911 {
4912 int or_op = (code == TRUTH_ORIF_EXPR
4913 || code == TRUTH_OR_EXPR);
4914 int in0_p, in1_p, in_p;
4915 tree low0, low1, low, high0, high1, high;
4916 bool strict_overflow_p = false;
4917 tree tem, lhs, rhs;
4918 const char * const warnmsg = G_("assuming signed overflow does not occur "
4919 "when simplifying range test");
4920
4921 if (!INTEGRAL_TYPE_P (type))
4922 return 0;
4923
4924 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4925 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4926
4927 /* If this is an OR operation, invert both sides; we will invert
4928 again at the end. */
4929 if (or_op)
4930 in0_p = ! in0_p, in1_p = ! in1_p;
4931
4932 /* If both expressions are the same, if we can merge the ranges, and we
4933 can build the range test, return it or it inverted. If one of the
4934 ranges is always true or always false, consider it to be the same
4935 expression as the other. */
4936 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4937 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4938 in1_p, low1, high1)
4939 && 0 != (tem = (build_range_check (loc, type,
4940 lhs != 0 ? lhs
4941 : rhs != 0 ? rhs : integer_zero_node,
4942 in_p, low, high))))
4943 {
4944 if (strict_overflow_p)
4945 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4946 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4947 }
4948
4949 /* On machines where the branch cost is expensive, if this is a
4950 short-circuited branch and the underlying object on both sides
4951 is the same, make a non-short-circuit operation. */
4952 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4953 && lhs != 0 && rhs != 0
4954 && (code == TRUTH_ANDIF_EXPR
4955 || code == TRUTH_ORIF_EXPR)
4956 && operand_equal_p (lhs, rhs, 0))
4957 {
4958 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4959 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4960 which cases we can't do this. */
4961 if (simple_operand_p (lhs))
4962 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4963 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4964 type, op0, op1);
4965
4966 else if (!lang_hooks.decls.global_bindings_p ()
4967 && !CONTAINS_PLACEHOLDER_P (lhs))
4968 {
4969 tree common = save_expr (lhs);
4970
4971 if (0 != (lhs = build_range_check (loc, type, common,
4972 or_op ? ! in0_p : in0_p,
4973 low0, high0))
4974 && (0 != (rhs = build_range_check (loc, type, common,
4975 or_op ? ! in1_p : in1_p,
4976 low1, high1))))
4977 {
4978 if (strict_overflow_p)
4979 fold_overflow_warning (warnmsg,
4980 WARN_STRICT_OVERFLOW_COMPARISON);
4981 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4982 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4983 type, lhs, rhs);
4984 }
4985 }
4986 }
4987
4988 return 0;
4989 }
4990 \f
4991 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4992 bit value. Arrange things so the extra bits will be set to zero if and
4993 only if C is signed-extended to its full width. If MASK is nonzero,
4994 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4995
4996 static tree
4997 unextend (tree c, int p, int unsignedp, tree mask)
4998 {
4999 tree type = TREE_TYPE (c);
5000 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5001 tree temp;
5002
5003 if (p == modesize || unsignedp)
5004 return c;
5005
5006 /* We work by getting just the sign bit into the low-order bit, then
5007 into the high-order bit, then sign-extend. We then XOR that value
5008 with C. */
5009 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5010
5011 /* We must use a signed type in order to get an arithmetic right shift.
5012 However, we must also avoid introducing accidental overflows, so that
5013 a subsequent call to integer_zerop will work. Hence we must
5014 do the type conversion here. At this point, the constant is either
5015 zero or one, and the conversion to a signed type can never overflow.
5016 We could get an overflow if this conversion is done anywhere else. */
5017 if (TYPE_UNSIGNED (type))
5018 temp = fold_convert (signed_type_for (type), temp);
5019
5020 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5021 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5022 if (mask != 0)
5023 temp = const_binop (BIT_AND_EXPR, temp,
5024 fold_convert (TREE_TYPE (c), mask));
5025 /* If necessary, convert the type back to match the type of C. */
5026 if (TYPE_UNSIGNED (type))
5027 temp = fold_convert (type, temp);
5028
5029 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5030 }
5031 \f
5032 /* For an expression that has the form
5033 (A && B) || ~B
5034 or
5035 (A || B) && ~B,
5036 we can drop one of the inner expressions and simplify to
5037 A || ~B
5038 or
5039 A && ~B
5040 LOC is the location of the resulting expression. OP is the inner
5041 logical operation; the left-hand side in the examples above, while CMPOP
5042 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5043 removing a condition that guards another, as in
5044 (A != NULL && A->...) || A == NULL
5045 which we must not transform. If RHS_ONLY is true, only eliminate the
5046 right-most operand of the inner logical operation. */
5047
5048 static tree
5049 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5050 bool rhs_only)
5051 {
5052 tree type = TREE_TYPE (cmpop);
5053 enum tree_code code = TREE_CODE (cmpop);
5054 enum tree_code truthop_code = TREE_CODE (op);
5055 tree lhs = TREE_OPERAND (op, 0);
5056 tree rhs = TREE_OPERAND (op, 1);
5057 tree orig_lhs = lhs, orig_rhs = rhs;
5058 enum tree_code rhs_code = TREE_CODE (rhs);
5059 enum tree_code lhs_code = TREE_CODE (lhs);
5060 enum tree_code inv_code;
5061
5062 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5063 return NULL_TREE;
5064
5065 if (TREE_CODE_CLASS (code) != tcc_comparison)
5066 return NULL_TREE;
5067
5068 if (rhs_code == truthop_code)
5069 {
5070 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5071 if (newrhs != NULL_TREE)
5072 {
5073 rhs = newrhs;
5074 rhs_code = TREE_CODE (rhs);
5075 }
5076 }
5077 if (lhs_code == truthop_code && !rhs_only)
5078 {
5079 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5080 if (newlhs != NULL_TREE)
5081 {
5082 lhs = newlhs;
5083 lhs_code = TREE_CODE (lhs);
5084 }
5085 }
5086
5087 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5088 if (inv_code == rhs_code
5089 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5090 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5091 return lhs;
5092 if (!rhs_only && inv_code == lhs_code
5093 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5094 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5095 return rhs;
5096 if (rhs != orig_rhs || lhs != orig_lhs)
5097 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5098 lhs, rhs);
5099 return NULL_TREE;
5100 }
5101
5102 /* Find ways of folding logical expressions of LHS and RHS:
5103 Try to merge two comparisons to the same innermost item.
5104 Look for range tests like "ch >= '0' && ch <= '9'".
5105 Look for combinations of simple terms on machines with expensive branches
5106 and evaluate the RHS unconditionally.
5107
5108 For example, if we have p->a == 2 && p->b == 4 and we can make an
5109 object large enough to span both A and B, we can do this with a comparison
5110 against the object ANDed with the a mask.
5111
5112 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5113 operations to do this with one comparison.
5114
5115 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5116 function and the one above.
5117
5118 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5119 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5120
5121 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5122 two operands.
5123
5124 We return the simplified tree or 0 if no optimization is possible. */
5125
5126 static tree
5127 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5128 tree lhs, tree rhs)
5129 {
5130 /* If this is the "or" of two comparisons, we can do something if
5131 the comparisons are NE_EXPR. If this is the "and", we can do something
5132 if the comparisons are EQ_EXPR. I.e.,
5133 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5134
5135 WANTED_CODE is this operation code. For single bit fields, we can
5136 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5137 comparison for one-bit fields. */
5138
5139 enum tree_code wanted_code;
5140 enum tree_code lcode, rcode;
5141 tree ll_arg, lr_arg, rl_arg, rr_arg;
5142 tree ll_inner, lr_inner, rl_inner, rr_inner;
5143 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5144 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5145 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5146 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5147 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5148 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5149 enum machine_mode lnmode, rnmode;
5150 tree ll_mask, lr_mask, rl_mask, rr_mask;
5151 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5152 tree l_const, r_const;
5153 tree lntype, rntype, result;
5154 HOST_WIDE_INT first_bit, end_bit;
5155 int volatilep;
5156
5157 /* Start by getting the comparison codes. Fail if anything is volatile.
5158 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5159 it were surrounded with a NE_EXPR. */
5160
5161 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5162 return 0;
5163
5164 lcode = TREE_CODE (lhs);
5165 rcode = TREE_CODE (rhs);
5166
5167 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5168 {
5169 lhs = build2 (NE_EXPR, truth_type, lhs,
5170 build_int_cst (TREE_TYPE (lhs), 0));
5171 lcode = NE_EXPR;
5172 }
5173
5174 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5175 {
5176 rhs = build2 (NE_EXPR, truth_type, rhs,
5177 build_int_cst (TREE_TYPE (rhs), 0));
5178 rcode = NE_EXPR;
5179 }
5180
5181 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5182 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5183 return 0;
5184
5185 ll_arg = TREE_OPERAND (lhs, 0);
5186 lr_arg = TREE_OPERAND (lhs, 1);
5187 rl_arg = TREE_OPERAND (rhs, 0);
5188 rr_arg = TREE_OPERAND (rhs, 1);
5189
5190 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5191 if (simple_operand_p (ll_arg)
5192 && simple_operand_p (lr_arg))
5193 {
5194 if (operand_equal_p (ll_arg, rl_arg, 0)
5195 && operand_equal_p (lr_arg, rr_arg, 0))
5196 {
5197 result = combine_comparisons (loc, code, lcode, rcode,
5198 truth_type, ll_arg, lr_arg);
5199 if (result)
5200 return result;
5201 }
5202 else if (operand_equal_p (ll_arg, rr_arg, 0)
5203 && operand_equal_p (lr_arg, rl_arg, 0))
5204 {
5205 result = combine_comparisons (loc, code, lcode,
5206 swap_tree_comparison (rcode),
5207 truth_type, ll_arg, lr_arg);
5208 if (result)
5209 return result;
5210 }
5211 }
5212
5213 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5214 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5215
5216 /* If the RHS can be evaluated unconditionally and its operands are
5217 simple, it wins to evaluate the RHS unconditionally on machines
5218 with expensive branches. In this case, this isn't a comparison
5219 that can be merged. */
5220
5221 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5222 false) >= 2
5223 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5224 && simple_operand_p (rl_arg)
5225 && simple_operand_p (rr_arg))
5226 {
5227 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5228 if (code == TRUTH_OR_EXPR
5229 && lcode == NE_EXPR && integer_zerop (lr_arg)
5230 && rcode == NE_EXPR && integer_zerop (rr_arg)
5231 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5232 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5233 return build2_loc (loc, NE_EXPR, truth_type,
5234 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5235 ll_arg, rl_arg),
5236 build_int_cst (TREE_TYPE (ll_arg), 0));
5237
5238 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5239 if (code == TRUTH_AND_EXPR
5240 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5241 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5242 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5244 return build2_loc (loc, EQ_EXPR, truth_type,
5245 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5246 ll_arg, rl_arg),
5247 build_int_cst (TREE_TYPE (ll_arg), 0));
5248 }
5249
5250 /* See if the comparisons can be merged. Then get all the parameters for
5251 each side. */
5252
5253 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5254 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5255 return 0;
5256
5257 volatilep = 0;
5258 ll_inner = decode_field_reference (loc, ll_arg,
5259 &ll_bitsize, &ll_bitpos, &ll_mode,
5260 &ll_unsignedp, &volatilep, &ll_mask,
5261 &ll_and_mask);
5262 lr_inner = decode_field_reference (loc, lr_arg,
5263 &lr_bitsize, &lr_bitpos, &lr_mode,
5264 &lr_unsignedp, &volatilep, &lr_mask,
5265 &lr_and_mask);
5266 rl_inner = decode_field_reference (loc, rl_arg,
5267 &rl_bitsize, &rl_bitpos, &rl_mode,
5268 &rl_unsignedp, &volatilep, &rl_mask,
5269 &rl_and_mask);
5270 rr_inner = decode_field_reference (loc, rr_arg,
5271 &rr_bitsize, &rr_bitpos, &rr_mode,
5272 &rr_unsignedp, &volatilep, &rr_mask,
5273 &rr_and_mask);
5274
5275 /* It must be true that the inner operation on the lhs of each
5276 comparison must be the same if we are to be able to do anything.
5277 Then see if we have constants. If not, the same must be true for
5278 the rhs's. */
5279 if (volatilep || ll_inner == 0 || rl_inner == 0
5280 || ! operand_equal_p (ll_inner, rl_inner, 0))
5281 return 0;
5282
5283 if (TREE_CODE (lr_arg) == INTEGER_CST
5284 && TREE_CODE (rr_arg) == INTEGER_CST)
5285 l_const = lr_arg, r_const = rr_arg;
5286 else if (lr_inner == 0 || rr_inner == 0
5287 || ! operand_equal_p (lr_inner, rr_inner, 0))
5288 return 0;
5289 else
5290 l_const = r_const = 0;
5291
5292 /* If either comparison code is not correct for our logical operation,
5293 fail. However, we can convert a one-bit comparison against zero into
5294 the opposite comparison against that bit being set in the field. */
5295
5296 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5297 if (lcode != wanted_code)
5298 {
5299 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5300 {
5301 /* Make the left operand unsigned, since we are only interested
5302 in the value of one bit. Otherwise we are doing the wrong
5303 thing below. */
5304 ll_unsignedp = 1;
5305 l_const = ll_mask;
5306 }
5307 else
5308 return 0;
5309 }
5310
5311 /* This is analogous to the code for l_const above. */
5312 if (rcode != wanted_code)
5313 {
5314 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5315 {
5316 rl_unsignedp = 1;
5317 r_const = rl_mask;
5318 }
5319 else
5320 return 0;
5321 }
5322
5323 /* See if we can find a mode that contains both fields being compared on
5324 the left. If we can't, fail. Otherwise, update all constants and masks
5325 to be relative to a field of that size. */
5326 first_bit = MIN (ll_bitpos, rl_bitpos);
5327 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5328 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5329 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5330 volatilep);
5331 if (lnmode == VOIDmode)
5332 return 0;
5333
5334 lnbitsize = GET_MODE_BITSIZE (lnmode);
5335 lnbitpos = first_bit & ~ (lnbitsize - 1);
5336 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5337 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5338
5339 if (BYTES_BIG_ENDIAN)
5340 {
5341 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5342 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5343 }
5344
5345 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5346 size_int (xll_bitpos));
5347 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5348 size_int (xrl_bitpos));
5349
5350 if (l_const)
5351 {
5352 l_const = fold_convert_loc (loc, lntype, l_const);
5353 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5354 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5355 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5356 fold_build1_loc (loc, BIT_NOT_EXPR,
5357 lntype, ll_mask))))
5358 {
5359 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5360
5361 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5362 }
5363 }
5364 if (r_const)
5365 {
5366 r_const = fold_convert_loc (loc, lntype, r_const);
5367 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5368 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5369 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5370 fold_build1_loc (loc, BIT_NOT_EXPR,
5371 lntype, rl_mask))))
5372 {
5373 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5374
5375 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5376 }
5377 }
5378
5379 /* If the right sides are not constant, do the same for it. Also,
5380 disallow this optimization if a size or signedness mismatch occurs
5381 between the left and right sides. */
5382 if (l_const == 0)
5383 {
5384 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5385 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5386 /* Make sure the two fields on the right
5387 correspond to the left without being swapped. */
5388 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5389 return 0;
5390
5391 first_bit = MIN (lr_bitpos, rr_bitpos);
5392 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5393 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5394 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5395 volatilep);
5396 if (rnmode == VOIDmode)
5397 return 0;
5398
5399 rnbitsize = GET_MODE_BITSIZE (rnmode);
5400 rnbitpos = first_bit & ~ (rnbitsize - 1);
5401 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5402 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5403
5404 if (BYTES_BIG_ENDIAN)
5405 {
5406 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5407 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5408 }
5409
5410 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5411 rntype, lr_mask),
5412 size_int (xlr_bitpos));
5413 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5414 rntype, rr_mask),
5415 size_int (xrr_bitpos));
5416
5417 /* Make a mask that corresponds to both fields being compared.
5418 Do this for both items being compared. If the operands are the
5419 same size and the bits being compared are in the same position
5420 then we can do this by masking both and comparing the masked
5421 results. */
5422 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5423 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5424 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5425 {
5426 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5427 ll_unsignedp || rl_unsignedp);
5428 if (! all_ones_mask_p (ll_mask, lnbitsize))
5429 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5430
5431 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5432 lr_unsignedp || rr_unsignedp);
5433 if (! all_ones_mask_p (lr_mask, rnbitsize))
5434 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5435
5436 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5437 }
5438
5439 /* There is still another way we can do something: If both pairs of
5440 fields being compared are adjacent, we may be able to make a wider
5441 field containing them both.
5442
5443 Note that we still must mask the lhs/rhs expressions. Furthermore,
5444 the mask must be shifted to account for the shift done by
5445 make_bit_field_ref. */
5446 if ((ll_bitsize + ll_bitpos == rl_bitpos
5447 && lr_bitsize + lr_bitpos == rr_bitpos)
5448 || (ll_bitpos == rl_bitpos + rl_bitsize
5449 && lr_bitpos == rr_bitpos + rr_bitsize))
5450 {
5451 tree type;
5452
5453 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5454 ll_bitsize + rl_bitsize,
5455 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5456 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5457 lr_bitsize + rr_bitsize,
5458 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5459
5460 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5461 size_int (MIN (xll_bitpos, xrl_bitpos)));
5462 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5463 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5464
5465 /* Convert to the smaller type before masking out unwanted bits. */
5466 type = lntype;
5467 if (lntype != rntype)
5468 {
5469 if (lnbitsize > rnbitsize)
5470 {
5471 lhs = fold_convert_loc (loc, rntype, lhs);
5472 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5473 type = rntype;
5474 }
5475 else if (lnbitsize < rnbitsize)
5476 {
5477 rhs = fold_convert_loc (loc, lntype, rhs);
5478 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5479 type = lntype;
5480 }
5481 }
5482
5483 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5484 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5485
5486 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5487 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5488
5489 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5490 }
5491
5492 return 0;
5493 }
5494
5495 /* Handle the case of comparisons with constants. If there is something in
5496 common between the masks, those bits of the constants must be the same.
5497 If not, the condition is always false. Test for this to avoid generating
5498 incorrect code below. */
5499 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5500 if (! integer_zerop (result)
5501 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5502 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5503 {
5504 if (wanted_code == NE_EXPR)
5505 {
5506 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5507 return constant_boolean_node (true, truth_type);
5508 }
5509 else
5510 {
5511 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5512 return constant_boolean_node (false, truth_type);
5513 }
5514 }
5515
5516 /* Construct the expression we will return. First get the component
5517 reference we will make. Unless the mask is all ones the width of
5518 that field, perform the mask operation. Then compare with the
5519 merged constant. */
5520 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5521 ll_unsignedp || rl_unsignedp);
5522
5523 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5524 if (! all_ones_mask_p (ll_mask, lnbitsize))
5525 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5526
5527 return build2_loc (loc, wanted_code, truth_type, result,
5528 const_binop (BIT_IOR_EXPR, l_const, r_const));
5529 }
5530 \f
5531 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5532 constant. */
5533
5534 static tree
5535 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5536 tree op0, tree op1)
5537 {
5538 tree arg0 = op0;
5539 enum tree_code op_code;
5540 tree comp_const;
5541 tree minmax_const;
5542 int consts_equal, consts_lt;
5543 tree inner;
5544
5545 STRIP_SIGN_NOPS (arg0);
5546
5547 op_code = TREE_CODE (arg0);
5548 minmax_const = TREE_OPERAND (arg0, 1);
5549 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5550 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5551 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5552 inner = TREE_OPERAND (arg0, 0);
5553
5554 /* If something does not permit us to optimize, return the original tree. */
5555 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5556 || TREE_CODE (comp_const) != INTEGER_CST
5557 || TREE_OVERFLOW (comp_const)
5558 || TREE_CODE (minmax_const) != INTEGER_CST
5559 || TREE_OVERFLOW (minmax_const))
5560 return NULL_TREE;
5561
5562 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5563 and GT_EXPR, doing the rest with recursive calls using logical
5564 simplifications. */
5565 switch (code)
5566 {
5567 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5568 {
5569 tree tem
5570 = optimize_minmax_comparison (loc,
5571 invert_tree_comparison (code, false),
5572 type, op0, op1);
5573 if (tem)
5574 return invert_truthvalue_loc (loc, tem);
5575 return NULL_TREE;
5576 }
5577
5578 case GE_EXPR:
5579 return
5580 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5581 optimize_minmax_comparison
5582 (loc, EQ_EXPR, type, arg0, comp_const),
5583 optimize_minmax_comparison
5584 (loc, GT_EXPR, type, arg0, comp_const));
5585
5586 case EQ_EXPR:
5587 if (op_code == MAX_EXPR && consts_equal)
5588 /* MAX (X, 0) == 0 -> X <= 0 */
5589 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5590
5591 else if (op_code == MAX_EXPR && consts_lt)
5592 /* MAX (X, 0) == 5 -> X == 5 */
5593 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5594
5595 else if (op_code == MAX_EXPR)
5596 /* MAX (X, 0) == -1 -> false */
5597 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5598
5599 else if (consts_equal)
5600 /* MIN (X, 0) == 0 -> X >= 0 */
5601 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5602
5603 else if (consts_lt)
5604 /* MIN (X, 0) == 5 -> false */
5605 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5606
5607 else
5608 /* MIN (X, 0) == -1 -> X == -1 */
5609 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5610
5611 case GT_EXPR:
5612 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5613 /* MAX (X, 0) > 0 -> X > 0
5614 MAX (X, 0) > 5 -> X > 5 */
5615 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5616
5617 else if (op_code == MAX_EXPR)
5618 /* MAX (X, 0) > -1 -> true */
5619 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5620
5621 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5622 /* MIN (X, 0) > 0 -> false
5623 MIN (X, 0) > 5 -> false */
5624 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5625
5626 else
5627 /* MIN (X, 0) > -1 -> X > -1 */
5628 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5629
5630 default:
5631 return NULL_TREE;
5632 }
5633 }
5634 \f
5635 /* T is an integer expression that is being multiplied, divided, or taken a
5636 modulus (CODE says which and what kind of divide or modulus) by a
5637 constant C. See if we can eliminate that operation by folding it with
5638 other operations already in T. WIDE_TYPE, if non-null, is a type that
5639 should be used for the computation if wider than our type.
5640
5641 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5642 (X * 2) + (Y * 4). We must, however, be assured that either the original
5643 expression would not overflow or that overflow is undefined for the type
5644 in the language in question.
5645
5646 If we return a non-null expression, it is an equivalent form of the
5647 original computation, but need not be in the original type.
5648
5649 We set *STRICT_OVERFLOW_P to true if the return values depends on
5650 signed overflow being undefined. Otherwise we do not change
5651 *STRICT_OVERFLOW_P. */
5652
5653 static tree
5654 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5655 bool *strict_overflow_p)
5656 {
5657 /* To avoid exponential search depth, refuse to allow recursion past
5658 three levels. Beyond that (1) it's highly unlikely that we'll find
5659 something interesting and (2) we've probably processed it before
5660 when we built the inner expression. */
5661
5662 static int depth;
5663 tree ret;
5664
5665 if (depth > 3)
5666 return NULL;
5667
5668 depth++;
5669 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5670 depth--;
5671
5672 return ret;
5673 }
5674
5675 static tree
5676 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5677 bool *strict_overflow_p)
5678 {
5679 tree type = TREE_TYPE (t);
5680 enum tree_code tcode = TREE_CODE (t);
5681 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5682 > GET_MODE_SIZE (TYPE_MODE (type)))
5683 ? wide_type : type);
5684 tree t1, t2;
5685 int same_p = tcode == code;
5686 tree op0 = NULL_TREE, op1 = NULL_TREE;
5687 bool sub_strict_overflow_p;
5688
5689 /* Don't deal with constants of zero here; they confuse the code below. */
5690 if (integer_zerop (c))
5691 return NULL_TREE;
5692
5693 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5694 op0 = TREE_OPERAND (t, 0);
5695
5696 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5697 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5698
5699 /* Note that we need not handle conditional operations here since fold
5700 already handles those cases. So just do arithmetic here. */
5701 switch (tcode)
5702 {
5703 case INTEGER_CST:
5704 /* For a constant, we can always simplify if we are a multiply
5705 or (for divide and modulus) if it is a multiple of our constant. */
5706 if (code == MULT_EXPR
5707 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5708 return const_binop (code, fold_convert (ctype, t),
5709 fold_convert (ctype, c));
5710 break;
5711
5712 CASE_CONVERT: case NON_LVALUE_EXPR:
5713 /* If op0 is an expression ... */
5714 if ((COMPARISON_CLASS_P (op0)
5715 || UNARY_CLASS_P (op0)
5716 || BINARY_CLASS_P (op0)
5717 || VL_EXP_CLASS_P (op0)
5718 || EXPRESSION_CLASS_P (op0))
5719 /* ... and has wrapping overflow, and its type is smaller
5720 than ctype, then we cannot pass through as widening. */
5721 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5722 && (TYPE_PRECISION (ctype)
5723 > TYPE_PRECISION (TREE_TYPE (op0))))
5724 /* ... or this is a truncation (t is narrower than op0),
5725 then we cannot pass through this narrowing. */
5726 || (TYPE_PRECISION (type)
5727 < TYPE_PRECISION (TREE_TYPE (op0)))
5728 /* ... or signedness changes for division or modulus,
5729 then we cannot pass through this conversion. */
5730 || (code != MULT_EXPR
5731 && (TYPE_UNSIGNED (ctype)
5732 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5733 /* ... or has undefined overflow while the converted to
5734 type has not, we cannot do the operation in the inner type
5735 as that would introduce undefined overflow. */
5736 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5737 && !TYPE_OVERFLOW_UNDEFINED (type))))
5738 break;
5739
5740 /* Pass the constant down and see if we can make a simplification. If
5741 we can, replace this expression with the inner simplification for
5742 possible later conversion to our or some other type. */
5743 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5744 && TREE_CODE (t2) == INTEGER_CST
5745 && !TREE_OVERFLOW (t2)
5746 && (0 != (t1 = extract_muldiv (op0, t2, code,
5747 code == MULT_EXPR
5748 ? ctype : NULL_TREE,
5749 strict_overflow_p))))
5750 return t1;
5751 break;
5752
5753 case ABS_EXPR:
5754 /* If widening the type changes it from signed to unsigned, then we
5755 must avoid building ABS_EXPR itself as unsigned. */
5756 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5757 {
5758 tree cstype = (*signed_type_for) (ctype);
5759 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5760 != 0)
5761 {
5762 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5763 return fold_convert (ctype, t1);
5764 }
5765 break;
5766 }
5767 /* If the constant is negative, we cannot simplify this. */
5768 if (tree_int_cst_sgn (c) == -1)
5769 break;
5770 /* FALLTHROUGH */
5771 case NEGATE_EXPR:
5772 /* For division and modulus, type can't be unsigned, as e.g.
5773 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5774 For signed types, even with wrapping overflow, this is fine. */
5775 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5776 break;
5777 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5778 != 0)
5779 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5780 break;
5781
5782 case MIN_EXPR: case MAX_EXPR:
5783 /* If widening the type changes the signedness, then we can't perform
5784 this optimization as that changes the result. */
5785 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5786 break;
5787
5788 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5789 sub_strict_overflow_p = false;
5790 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5791 &sub_strict_overflow_p)) != 0
5792 && (t2 = extract_muldiv (op1, c, code, wide_type,
5793 &sub_strict_overflow_p)) != 0)
5794 {
5795 if (tree_int_cst_sgn (c) < 0)
5796 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5797 if (sub_strict_overflow_p)
5798 *strict_overflow_p = true;
5799 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5800 fold_convert (ctype, t2));
5801 }
5802 break;
5803
5804 case LSHIFT_EXPR: case RSHIFT_EXPR:
5805 /* If the second operand is constant, this is a multiplication
5806 or floor division, by a power of two, so we can treat it that
5807 way unless the multiplier or divisor overflows. Signed
5808 left-shift overflow is implementation-defined rather than
5809 undefined in C90, so do not convert signed left shift into
5810 multiplication. */
5811 if (TREE_CODE (op1) == INTEGER_CST
5812 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5813 /* const_binop may not detect overflow correctly,
5814 so check for it explicitly here. */
5815 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5816 && 0 != (t1 = fold_convert (ctype,
5817 const_binop (LSHIFT_EXPR,
5818 size_one_node,
5819 op1)))
5820 && !TREE_OVERFLOW (t1))
5821 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5822 ? MULT_EXPR : FLOOR_DIV_EXPR,
5823 ctype,
5824 fold_convert (ctype, op0),
5825 t1),
5826 c, code, wide_type, strict_overflow_p);
5827 break;
5828
5829 case PLUS_EXPR: case MINUS_EXPR:
5830 /* See if we can eliminate the operation on both sides. If we can, we
5831 can return a new PLUS or MINUS. If we can't, the only remaining
5832 cases where we can do anything are if the second operand is a
5833 constant. */
5834 sub_strict_overflow_p = false;
5835 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5836 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5837 if (t1 != 0 && t2 != 0
5838 && (code == MULT_EXPR
5839 /* If not multiplication, we can only do this if both operands
5840 are divisible by c. */
5841 || (multiple_of_p (ctype, op0, c)
5842 && multiple_of_p (ctype, op1, c))))
5843 {
5844 if (sub_strict_overflow_p)
5845 *strict_overflow_p = true;
5846 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5847 fold_convert (ctype, t2));
5848 }
5849
5850 /* If this was a subtraction, negate OP1 and set it to be an addition.
5851 This simplifies the logic below. */
5852 if (tcode == MINUS_EXPR)
5853 {
5854 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5855 /* If OP1 was not easily negatable, the constant may be OP0. */
5856 if (TREE_CODE (op0) == INTEGER_CST)
5857 {
5858 tree tem = op0;
5859 op0 = op1;
5860 op1 = tem;
5861 tem = t1;
5862 t1 = t2;
5863 t2 = tem;
5864 }
5865 }
5866
5867 if (TREE_CODE (op1) != INTEGER_CST)
5868 break;
5869
5870 /* If either OP1 or C are negative, this optimization is not safe for
5871 some of the division and remainder types while for others we need
5872 to change the code. */
5873 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5874 {
5875 if (code == CEIL_DIV_EXPR)
5876 code = FLOOR_DIV_EXPR;
5877 else if (code == FLOOR_DIV_EXPR)
5878 code = CEIL_DIV_EXPR;
5879 else if (code != MULT_EXPR
5880 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5881 break;
5882 }
5883
5884 /* If it's a multiply or a division/modulus operation of a multiple
5885 of our constant, do the operation and verify it doesn't overflow. */
5886 if (code == MULT_EXPR
5887 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5888 {
5889 op1 = const_binop (code, fold_convert (ctype, op1),
5890 fold_convert (ctype, c));
5891 /* We allow the constant to overflow with wrapping semantics. */
5892 if (op1 == 0
5893 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5894 break;
5895 }
5896 else
5897 break;
5898
5899 /* If we have an unsigned type, we cannot widen the operation since it
5900 will change the result if the original computation overflowed. */
5901 if (TYPE_UNSIGNED (ctype) && ctype != type)
5902 break;
5903
5904 /* If we were able to eliminate our operation from the first side,
5905 apply our operation to the second side and reform the PLUS. */
5906 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5907 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5908
5909 /* The last case is if we are a multiply. In that case, we can
5910 apply the distributive law to commute the multiply and addition
5911 if the multiplication of the constants doesn't overflow
5912 and overflow is defined. With undefined overflow
5913 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5914 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5915 return fold_build2 (tcode, ctype,
5916 fold_build2 (code, ctype,
5917 fold_convert (ctype, op0),
5918 fold_convert (ctype, c)),
5919 op1);
5920
5921 break;
5922
5923 case MULT_EXPR:
5924 /* We have a special case here if we are doing something like
5925 (C * 8) % 4 since we know that's zero. */
5926 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5927 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5928 /* If the multiplication can overflow we cannot optimize this. */
5929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5930 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5931 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5932 {
5933 *strict_overflow_p = true;
5934 return omit_one_operand (type, integer_zero_node, op0);
5935 }
5936
5937 /* ... fall through ... */
5938
5939 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5940 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5941 /* If we can extract our operation from the LHS, do so and return a
5942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5943 do something only if the second operand is a constant. */
5944 if (same_p
5945 && (t1 = extract_muldiv (op0, c, code, wide_type,
5946 strict_overflow_p)) != 0)
5947 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5948 fold_convert (ctype, op1));
5949 else if (tcode == MULT_EXPR && code == MULT_EXPR
5950 && (t1 = extract_muldiv (op1, c, code, wide_type,
5951 strict_overflow_p)) != 0)
5952 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5953 fold_convert (ctype, t1));
5954 else if (TREE_CODE (op1) != INTEGER_CST)
5955 return 0;
5956
5957 /* If these are the same operation types, we can associate them
5958 assuming no overflow. */
5959 if (tcode == code)
5960 {
5961 bool overflow_p = false;
5962 bool overflow_mul_p;
5963 signop sign = TYPE_SIGN (ctype);
5964 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5965 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5966 if (overflow_mul_p
5967 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5968 overflow_p = true;
5969 if (!overflow_p)
5970 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5971 wide_int_to_tree (ctype, mul));
5972 }
5973
5974 /* If these operations "cancel" each other, we have the main
5975 optimizations of this pass, which occur when either constant is a
5976 multiple of the other, in which case we replace this with either an
5977 operation or CODE or TCODE.
5978
5979 If we have an unsigned type, we cannot do this since it will change
5980 the result if the original computation overflowed. */
5981 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5982 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5983 || (tcode == MULT_EXPR
5984 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5985 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5986 && code != MULT_EXPR)))
5987 {
5988 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5989 {
5990 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5991 *strict_overflow_p = true;
5992 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5993 fold_convert (ctype,
5994 const_binop (TRUNC_DIV_EXPR,
5995 op1, c)));
5996 }
5997 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5998 {
5999 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6000 *strict_overflow_p = true;
6001 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6002 fold_convert (ctype,
6003 const_binop (TRUNC_DIV_EXPR,
6004 c, op1)));
6005 }
6006 }
6007 break;
6008
6009 default:
6010 break;
6011 }
6012
6013 return 0;
6014 }
6015 \f
6016 /* Return a node which has the indicated constant VALUE (either 0 or
6017 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6018 and is of the indicated TYPE. */
6019
6020 tree
6021 constant_boolean_node (bool value, tree type)
6022 {
6023 if (type == integer_type_node)
6024 return value ? integer_one_node : integer_zero_node;
6025 else if (type == boolean_type_node)
6026 return value ? boolean_true_node : boolean_false_node;
6027 else if (TREE_CODE (type) == VECTOR_TYPE)
6028 return build_vector_from_val (type,
6029 build_int_cst (TREE_TYPE (type),
6030 value ? -1 : 0));
6031 else
6032 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6033 }
6034
6035
6036 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6037 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6038 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6039 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6040 COND is the first argument to CODE; otherwise (as in the example
6041 given here), it is the second argument. TYPE is the type of the
6042 original expression. Return NULL_TREE if no simplification is
6043 possible. */
6044
6045 static tree
6046 fold_binary_op_with_conditional_arg (location_t loc,
6047 enum tree_code code,
6048 tree type, tree op0, tree op1,
6049 tree cond, tree arg, int cond_first_p)
6050 {
6051 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6052 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6053 tree test, true_value, false_value;
6054 tree lhs = NULL_TREE;
6055 tree rhs = NULL_TREE;
6056 enum tree_code cond_code = COND_EXPR;
6057
6058 if (TREE_CODE (cond) == COND_EXPR
6059 || TREE_CODE (cond) == VEC_COND_EXPR)
6060 {
6061 test = TREE_OPERAND (cond, 0);
6062 true_value = TREE_OPERAND (cond, 1);
6063 false_value = TREE_OPERAND (cond, 2);
6064 /* If this operand throws an expression, then it does not make
6065 sense to try to perform a logical or arithmetic operation
6066 involving it. */
6067 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6068 lhs = true_value;
6069 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6070 rhs = false_value;
6071 }
6072 else
6073 {
6074 tree testtype = TREE_TYPE (cond);
6075 test = cond;
6076 true_value = constant_boolean_node (true, testtype);
6077 false_value = constant_boolean_node (false, testtype);
6078 }
6079
6080 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6081 cond_code = VEC_COND_EXPR;
6082
6083 /* This transformation is only worthwhile if we don't have to wrap ARG
6084 in a SAVE_EXPR and the operation can be simplified without recursing
6085 on at least one of the branches once its pushed inside the COND_EXPR. */
6086 if (!TREE_CONSTANT (arg)
6087 && (TREE_SIDE_EFFECTS (arg)
6088 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6089 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6090 return NULL_TREE;
6091
6092 arg = fold_convert_loc (loc, arg_type, arg);
6093 if (lhs == 0)
6094 {
6095 true_value = fold_convert_loc (loc, cond_type, true_value);
6096 if (cond_first_p)
6097 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6098 else
6099 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6100 }
6101 if (rhs == 0)
6102 {
6103 false_value = fold_convert_loc (loc, cond_type, false_value);
6104 if (cond_first_p)
6105 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6106 else
6107 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6108 }
6109
6110 /* Check that we have simplified at least one of the branches. */
6111 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6112 return NULL_TREE;
6113
6114 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6115 }
6116
6117 \f
6118 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6119
6120 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6121 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6122 ADDEND is the same as X.
6123
6124 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6125 and finite. The problematic cases are when X is zero, and its mode
6126 has signed zeros. In the case of rounding towards -infinity,
6127 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6128 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6129
6130 bool
6131 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6132 {
6133 if (!real_zerop (addend))
6134 return false;
6135
6136 /* Don't allow the fold with -fsignaling-nans. */
6137 if (HONOR_SNANS (TYPE_MODE (type)))
6138 return false;
6139
6140 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6141 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6142 return true;
6143
6144 /* In a vector or complex, we would need to check the sign of all zeros. */
6145 if (TREE_CODE (addend) != REAL_CST)
6146 return false;
6147
6148 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6149 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6150 negate = !negate;
6151
6152 /* The mode has signed zeros, and we have to honor their sign.
6153 In this situation, there is only one case we can return true for.
6154 X - 0 is the same as X unless rounding towards -infinity is
6155 supported. */
6156 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6157 }
6158
6159 /* Subroutine of fold() that checks comparisons of built-in math
6160 functions against real constants.
6161
6162 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6163 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6164 is the type of the result and ARG0 and ARG1 are the operands of the
6165 comparison. ARG1 must be a TREE_REAL_CST.
6166
6167 The function returns the constant folded tree if a simplification
6168 can be made, and NULL_TREE otherwise. */
6169
6170 static tree
6171 fold_mathfn_compare (location_t loc,
6172 enum built_in_function fcode, enum tree_code code,
6173 tree type, tree arg0, tree arg1)
6174 {
6175 REAL_VALUE_TYPE c;
6176
6177 if (BUILTIN_SQRT_P (fcode))
6178 {
6179 tree arg = CALL_EXPR_ARG (arg0, 0);
6180 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6181
6182 c = TREE_REAL_CST (arg1);
6183 if (REAL_VALUE_NEGATIVE (c))
6184 {
6185 /* sqrt(x) < y is always false, if y is negative. */
6186 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6187 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6188
6189 /* sqrt(x) > y is always true, if y is negative and we
6190 don't care about NaNs, i.e. negative values of x. */
6191 if (code == NE_EXPR || !HONOR_NANS (mode))
6192 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6193
6194 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6195 return fold_build2_loc (loc, GE_EXPR, type, arg,
6196 build_real (TREE_TYPE (arg), dconst0));
6197 }
6198 else if (code == GT_EXPR || code == GE_EXPR)
6199 {
6200 REAL_VALUE_TYPE c2;
6201
6202 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6203 real_convert (&c2, mode, &c2);
6204
6205 if (REAL_VALUE_ISINF (c2))
6206 {
6207 /* sqrt(x) > y is x == +Inf, when y is very large. */
6208 if (HONOR_INFINITIES (mode))
6209 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6210 build_real (TREE_TYPE (arg), c2));
6211
6212 /* sqrt(x) > y is always false, when y is very large
6213 and we don't care about infinities. */
6214 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6215 }
6216
6217 /* sqrt(x) > c is the same as x > c*c. */
6218 return fold_build2_loc (loc, code, type, arg,
6219 build_real (TREE_TYPE (arg), c2));
6220 }
6221 else if (code == LT_EXPR || code == LE_EXPR)
6222 {
6223 REAL_VALUE_TYPE c2;
6224
6225 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6226 real_convert (&c2, mode, &c2);
6227
6228 if (REAL_VALUE_ISINF (c2))
6229 {
6230 /* sqrt(x) < y is always true, when y is a very large
6231 value and we don't care about NaNs or Infinities. */
6232 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6233 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6234
6235 /* sqrt(x) < y is x != +Inf when y is very large and we
6236 don't care about NaNs. */
6237 if (! HONOR_NANS (mode))
6238 return fold_build2_loc (loc, NE_EXPR, type, arg,
6239 build_real (TREE_TYPE (arg), c2));
6240
6241 /* sqrt(x) < y is x >= 0 when y is very large and we
6242 don't care about Infinities. */
6243 if (! HONOR_INFINITIES (mode))
6244 return fold_build2_loc (loc, GE_EXPR, type, arg,
6245 build_real (TREE_TYPE (arg), dconst0));
6246
6247 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6248 arg = save_expr (arg);
6249 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6250 fold_build2_loc (loc, GE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg),
6252 dconst0)),
6253 fold_build2_loc (loc, NE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg),
6255 c2)));
6256 }
6257
6258 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6259 if (! HONOR_NANS (mode))
6260 return fold_build2_loc (loc, code, type, arg,
6261 build_real (TREE_TYPE (arg), c2));
6262
6263 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6264 arg = save_expr (arg);
6265 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6266 fold_build2_loc (loc, GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg),
6268 dconst0)),
6269 fold_build2_loc (loc, code, type, arg,
6270 build_real (TREE_TYPE (arg),
6271 c2)));
6272 }
6273 }
6274
6275 return NULL_TREE;
6276 }
6277
6278 /* Subroutine of fold() that optimizes comparisons against Infinities,
6279 either +Inf or -Inf.
6280
6281 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6282 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6283 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6284
6285 The function returns the constant folded tree if a simplification
6286 can be made, and NULL_TREE otherwise. */
6287
6288 static tree
6289 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6290 tree arg0, tree arg1)
6291 {
6292 enum machine_mode mode;
6293 REAL_VALUE_TYPE max;
6294 tree temp;
6295 bool neg;
6296
6297 mode = TYPE_MODE (TREE_TYPE (arg0));
6298
6299 /* For negative infinity swap the sense of the comparison. */
6300 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6301 if (neg)
6302 code = swap_tree_comparison (code);
6303
6304 switch (code)
6305 {
6306 case GT_EXPR:
6307 /* x > +Inf is always false, if with ignore sNANs. */
6308 if (HONOR_SNANS (mode))
6309 return NULL_TREE;
6310 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6311
6312 case LE_EXPR:
6313 /* x <= +Inf is always true, if we don't case about NaNs. */
6314 if (! HONOR_NANS (mode))
6315 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6316
6317 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6318 arg0 = save_expr (arg0);
6319 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6320
6321 case EQ_EXPR:
6322 case GE_EXPR:
6323 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6324 real_maxval (&max, neg, mode);
6325 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6326 arg0, build_real (TREE_TYPE (arg0), max));
6327
6328 case LT_EXPR:
6329 /* x < +Inf is always equal to x <= DBL_MAX. */
6330 real_maxval (&max, neg, mode);
6331 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6332 arg0, build_real (TREE_TYPE (arg0), max));
6333
6334 case NE_EXPR:
6335 /* x != +Inf is always equal to !(x > DBL_MAX). */
6336 real_maxval (&max, neg, mode);
6337 if (! HONOR_NANS (mode))
6338 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6339 arg0, build_real (TREE_TYPE (arg0), max));
6340
6341 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6343 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6344
6345 default:
6346 break;
6347 }
6348
6349 return NULL_TREE;
6350 }
6351
6352 /* Subroutine of fold() that optimizes comparisons of a division by
6353 a nonzero integer constant against an integer constant, i.e.
6354 X/C1 op C2.
6355
6356 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6357 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6358 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6359
6360 The function returns the constant folded tree if a simplification
6361 can be made, and NULL_TREE otherwise. */
6362
6363 static tree
6364 fold_div_compare (location_t loc,
6365 enum tree_code code, tree type, tree arg0, tree arg1)
6366 {
6367 tree prod, tmp, hi, lo;
6368 tree arg00 = TREE_OPERAND (arg0, 0);
6369 tree arg01 = TREE_OPERAND (arg0, 1);
6370 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6371 bool neg_overflow = false;
6372 bool overflow;
6373
6374 /* We have to do this the hard way to detect unsigned overflow.
6375 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6376 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6377 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6378 neg_overflow = false;
6379
6380 if (sign == UNSIGNED)
6381 {
6382 tmp = int_const_binop (MINUS_EXPR, arg01,
6383 build_int_cst (TREE_TYPE (arg01), 1));
6384 lo = prod;
6385
6386 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6387 val = wi::add (prod, tmp, sign, &overflow);
6388 hi = force_fit_type (TREE_TYPE (arg00), val,
6389 -1, overflow | TREE_OVERFLOW (prod));
6390 }
6391 else if (tree_int_cst_sgn (arg01) >= 0)
6392 {
6393 tmp = int_const_binop (MINUS_EXPR, arg01,
6394 build_int_cst (TREE_TYPE (arg01), 1));
6395 switch (tree_int_cst_sgn (arg1))
6396 {
6397 case -1:
6398 neg_overflow = true;
6399 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6400 hi = prod;
6401 break;
6402
6403 case 0:
6404 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6405 hi = tmp;
6406 break;
6407
6408 case 1:
6409 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6410 lo = prod;
6411 break;
6412
6413 default:
6414 gcc_unreachable ();
6415 }
6416 }
6417 else
6418 {
6419 /* A negative divisor reverses the relational operators. */
6420 code = swap_tree_comparison (code);
6421
6422 tmp = int_const_binop (PLUS_EXPR, arg01,
6423 build_int_cst (TREE_TYPE (arg01), 1));
6424 switch (tree_int_cst_sgn (arg1))
6425 {
6426 case -1:
6427 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6428 lo = prod;
6429 break;
6430
6431 case 0:
6432 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6433 lo = tmp;
6434 break;
6435
6436 case 1:
6437 neg_overflow = true;
6438 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6439 hi = prod;
6440 break;
6441
6442 default:
6443 gcc_unreachable ();
6444 }
6445 }
6446
6447 switch (code)
6448 {
6449 case EQ_EXPR:
6450 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6451 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6452 if (TREE_OVERFLOW (hi))
6453 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6454 if (TREE_OVERFLOW (lo))
6455 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6456 return build_range_check (loc, type, arg00, 1, lo, hi);
6457
6458 case NE_EXPR:
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6465 return build_range_check (loc, type, arg00, 0, lo, hi);
6466
6467 case LT_EXPR:
6468 if (TREE_OVERFLOW (lo))
6469 {
6470 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6471 return omit_one_operand_loc (loc, type, tmp, arg00);
6472 }
6473 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6474
6475 case LE_EXPR:
6476 if (TREE_OVERFLOW (hi))
6477 {
6478 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6479 return omit_one_operand_loc (loc, type, tmp, arg00);
6480 }
6481 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6482
6483 case GT_EXPR:
6484 if (TREE_OVERFLOW (hi))
6485 {
6486 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6487 return omit_one_operand_loc (loc, type, tmp, arg00);
6488 }
6489 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6490
6491 case GE_EXPR:
6492 if (TREE_OVERFLOW (lo))
6493 {
6494 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6495 return omit_one_operand_loc (loc, type, tmp, arg00);
6496 }
6497 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6498
6499 default:
6500 break;
6501 }
6502
6503 return NULL_TREE;
6504 }
6505
6506
6507 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6508 equality/inequality test, then return a simplified form of the test
6509 using a sign testing. Otherwise return NULL. TYPE is the desired
6510 result type. */
6511
6512 static tree
6513 fold_single_bit_test_into_sign_test (location_t loc,
6514 enum tree_code code, tree arg0, tree arg1,
6515 tree result_type)
6516 {
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code == NE_EXPR || code == EQ_EXPR)
6519 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6521 {
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6525
6526 if (arg00 != NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00))
6530 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6531 {
6532 tree stype = signed_type_for (TREE_TYPE (arg00));
6533 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6534 result_type,
6535 fold_convert_loc (loc, stype, arg00),
6536 build_int_cst (stype, 0));
6537 }
6538 }
6539
6540 return NULL_TREE;
6541 }
6542
6543 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6544 equality/inequality test, then return a simplified form of
6545 the test using shifts and logical operations. Otherwise return
6546 NULL. TYPE is the desired result type. */
6547
6548 tree
6549 fold_single_bit_test (location_t loc, enum tree_code code,
6550 tree arg0, tree arg1, tree result_type)
6551 {
6552 /* If this is testing a single bit, we can optimize the test. */
6553 if ((code == NE_EXPR || code == EQ_EXPR)
6554 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6556 {
6557 tree inner = TREE_OPERAND (arg0, 0);
6558 tree type = TREE_TYPE (arg0);
6559 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6560 enum machine_mode operand_mode = TYPE_MODE (type);
6561 int ops_unsigned;
6562 tree signed_type, unsigned_type, intermediate_type;
6563 tree tem, one;
6564
6565 /* First, see if we can fold the single bit test into a sign-bit
6566 test. */
6567 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6568 result_type);
6569 if (tem)
6570 return tem;
6571
6572 /* Otherwise we have (A & C) != 0 where C is a single bit,
6573 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6574 Similarly for (A & C) == 0. */
6575
6576 /* If INNER is a right shift of a constant and it plus BITNUM does
6577 not overflow, adjust BITNUM and INNER. */
6578 if (TREE_CODE (inner) == RSHIFT_EXPR
6579 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6580 && bitnum < TYPE_PRECISION (type)
6581 && wi::ltu_p (TREE_OPERAND (inner, 1),
6582 TYPE_PRECISION (type) - bitnum))
6583 {
6584 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6585 inner = TREE_OPERAND (inner, 0);
6586 }
6587
6588 /* If we are going to be able to omit the AND below, we must do our
6589 operations as unsigned. If we must use the AND, we have a choice.
6590 Normally unsigned is faster, but for some machines signed is. */
6591 #ifdef LOAD_EXTEND_OP
6592 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6593 && !flag_syntax_only) ? 0 : 1;
6594 #else
6595 ops_unsigned = 1;
6596 #endif
6597
6598 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6599 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6600 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6601 inner = fold_convert_loc (loc, intermediate_type, inner);
6602
6603 if (bitnum != 0)
6604 inner = build2 (RSHIFT_EXPR, intermediate_type,
6605 inner, size_int (bitnum));
6606
6607 one = build_int_cst (intermediate_type, 1);
6608
6609 if (code == EQ_EXPR)
6610 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6611
6612 /* Put the AND last so it can combine with more things. */
6613 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6614
6615 /* Make sure to return the proper type. */
6616 inner = fold_convert_loc (loc, result_type, inner);
6617
6618 return inner;
6619 }
6620 return NULL_TREE;
6621 }
6622
6623 /* Check whether we are allowed to reorder operands arg0 and arg1,
6624 such that the evaluation of arg1 occurs before arg0. */
6625
6626 static bool
6627 reorder_operands_p (const_tree arg0, const_tree arg1)
6628 {
6629 if (! flag_evaluation_order)
6630 return true;
6631 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6632 return true;
6633 return ! TREE_SIDE_EFFECTS (arg0)
6634 && ! TREE_SIDE_EFFECTS (arg1);
6635 }
6636
6637 /* Test whether it is preferable two swap two operands, ARG0 and
6638 ARG1, for example because ARG0 is an integer constant and ARG1
6639 isn't. If REORDER is true, only recommend swapping if we can
6640 evaluate the operands in reverse order. */
6641
6642 bool
6643 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6644 {
6645 if (CONSTANT_CLASS_P (arg1))
6646 return 0;
6647 if (CONSTANT_CLASS_P (arg0))
6648 return 1;
6649
6650 STRIP_NOPS (arg0);
6651 STRIP_NOPS (arg1);
6652
6653 if (TREE_CONSTANT (arg1))
6654 return 0;
6655 if (TREE_CONSTANT (arg0))
6656 return 1;
6657
6658 if (reorder && flag_evaluation_order
6659 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6660 return 0;
6661
6662 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6663 for commutative and comparison operators. Ensuring a canonical
6664 form allows the optimizers to find additional redundancies without
6665 having to explicitly check for both orderings. */
6666 if (TREE_CODE (arg0) == SSA_NAME
6667 && TREE_CODE (arg1) == SSA_NAME
6668 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6669 return 1;
6670
6671 /* Put SSA_NAMEs last. */
6672 if (TREE_CODE (arg1) == SSA_NAME)
6673 return 0;
6674 if (TREE_CODE (arg0) == SSA_NAME)
6675 return 1;
6676
6677 /* Put variables last. */
6678 if (DECL_P (arg1))
6679 return 0;
6680 if (DECL_P (arg0))
6681 return 1;
6682
6683 return 0;
6684 }
6685
6686 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6687 ARG0 is extended to a wider type. */
6688
6689 static tree
6690 fold_widened_comparison (location_t loc, enum tree_code code,
6691 tree type, tree arg0, tree arg1)
6692 {
6693 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6694 tree arg1_unw;
6695 tree shorter_type, outer_type;
6696 tree min, max;
6697 bool above, below;
6698
6699 if (arg0_unw == arg0)
6700 return NULL_TREE;
6701 shorter_type = TREE_TYPE (arg0_unw);
6702
6703 #ifdef HAVE_canonicalize_funcptr_for_compare
6704 /* Disable this optimization if we're casting a function pointer
6705 type on targets that require function pointer canonicalization. */
6706 if (HAVE_canonicalize_funcptr_for_compare
6707 && TREE_CODE (shorter_type) == POINTER_TYPE
6708 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6709 return NULL_TREE;
6710 #endif
6711
6712 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6713 return NULL_TREE;
6714
6715 arg1_unw = get_unwidened (arg1, NULL_TREE);
6716
6717 /* If possible, express the comparison in the shorter mode. */
6718 if ((code == EQ_EXPR || code == NE_EXPR
6719 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6720 && (TREE_TYPE (arg1_unw) == shorter_type
6721 || ((TYPE_PRECISION (shorter_type)
6722 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6723 && (TYPE_UNSIGNED (shorter_type)
6724 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6725 || (TREE_CODE (arg1_unw) == INTEGER_CST
6726 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6727 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6728 && int_fits_type_p (arg1_unw, shorter_type))))
6729 return fold_build2_loc (loc, code, type, arg0_unw,
6730 fold_convert_loc (loc, shorter_type, arg1_unw));
6731
6732 if (TREE_CODE (arg1_unw) != INTEGER_CST
6733 || TREE_CODE (shorter_type) != INTEGER_TYPE
6734 || !int_fits_type_p (arg1_unw, shorter_type))
6735 return NULL_TREE;
6736
6737 /* If we are comparing with the integer that does not fit into the range
6738 of the shorter type, the result is known. */
6739 outer_type = TREE_TYPE (arg1_unw);
6740 min = lower_bound_in_type (outer_type, shorter_type);
6741 max = upper_bound_in_type (outer_type, shorter_type);
6742
6743 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6744 max, arg1_unw));
6745 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6746 arg1_unw, min));
6747
6748 switch (code)
6749 {
6750 case EQ_EXPR:
6751 if (above || below)
6752 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6753 break;
6754
6755 case NE_EXPR:
6756 if (above || below)
6757 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6758 break;
6759
6760 case LT_EXPR:
6761 case LE_EXPR:
6762 if (above)
6763 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6764 else if (below)
6765 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6766
6767 case GT_EXPR:
6768 case GE_EXPR:
6769 if (above)
6770 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6771 else if (below)
6772 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6773
6774 default:
6775 break;
6776 }
6777
6778 return NULL_TREE;
6779 }
6780
6781 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6782 ARG0 just the signedness is changed. */
6783
6784 static tree
6785 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6786 tree arg0, tree arg1)
6787 {
6788 tree arg0_inner;
6789 tree inner_type, outer_type;
6790
6791 if (!CONVERT_EXPR_P (arg0))
6792 return NULL_TREE;
6793
6794 outer_type = TREE_TYPE (arg0);
6795 arg0_inner = TREE_OPERAND (arg0, 0);
6796 inner_type = TREE_TYPE (arg0_inner);
6797
6798 #ifdef HAVE_canonicalize_funcptr_for_compare
6799 /* Disable this optimization if we're casting a function pointer
6800 type on targets that require function pointer canonicalization. */
6801 if (HAVE_canonicalize_funcptr_for_compare
6802 && TREE_CODE (inner_type) == POINTER_TYPE
6803 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6804 return NULL_TREE;
6805 #endif
6806
6807 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6808 return NULL_TREE;
6809
6810 if (TREE_CODE (arg1) != INTEGER_CST
6811 && !(CONVERT_EXPR_P (arg1)
6812 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6813 return NULL_TREE;
6814
6815 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6816 && code != NE_EXPR
6817 && code != EQ_EXPR)
6818 return NULL_TREE;
6819
6820 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6821 return NULL_TREE;
6822
6823 if (TREE_CODE (arg1) == INTEGER_CST)
6824 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6825 TREE_OVERFLOW (arg1));
6826 else
6827 arg1 = fold_convert_loc (loc, inner_type, arg1);
6828
6829 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6830 }
6831
6832 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6833 step of the array. Reconstructs s and delta in the case of s *
6834 delta being an integer constant (and thus already folded). ADDR is
6835 the address. MULT is the multiplicative expression. If the
6836 function succeeds, the new address expression is returned.
6837 Otherwise NULL_TREE is returned. LOC is the location of the
6838 resulting expression. */
6839
6840 static tree
6841 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6842 {
6843 tree s, delta, step;
6844 tree ref = TREE_OPERAND (addr, 0), pref;
6845 tree ret, pos;
6846 tree itype;
6847 bool mdim = false;
6848
6849 /* Strip the nops that might be added when converting op1 to sizetype. */
6850 STRIP_NOPS (op1);
6851
6852 /* Canonicalize op1 into a possibly non-constant delta
6853 and an INTEGER_CST s. */
6854 if (TREE_CODE (op1) == MULT_EXPR)
6855 {
6856 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6857
6858 STRIP_NOPS (arg0);
6859 STRIP_NOPS (arg1);
6860
6861 if (TREE_CODE (arg0) == INTEGER_CST)
6862 {
6863 s = arg0;
6864 delta = arg1;
6865 }
6866 else if (TREE_CODE (arg1) == INTEGER_CST)
6867 {
6868 s = arg1;
6869 delta = arg0;
6870 }
6871 else
6872 return NULL_TREE;
6873 }
6874 else if (TREE_CODE (op1) == INTEGER_CST)
6875 {
6876 delta = op1;
6877 s = NULL_TREE;
6878 }
6879 else
6880 {
6881 /* Simulate we are delta * 1. */
6882 delta = op1;
6883 s = integer_one_node;
6884 }
6885
6886 /* Handle &x.array the same as we would handle &x.array[0]. */
6887 if (TREE_CODE (ref) == COMPONENT_REF
6888 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6889 {
6890 tree domain;
6891
6892 /* Remember if this was a multi-dimensional array. */
6893 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6894 mdim = true;
6895
6896 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6897 if (! domain)
6898 goto cont;
6899 itype = TREE_TYPE (domain);
6900
6901 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6902 if (TREE_CODE (step) != INTEGER_CST)
6903 goto cont;
6904
6905 if (s)
6906 {
6907 if (! tree_int_cst_equal (step, s))
6908 goto cont;
6909 }
6910 else
6911 {
6912 /* Try if delta is a multiple of step. */
6913 tree tmp = div_if_zero_remainder (op1, step);
6914 if (! tmp)
6915 goto cont;
6916 delta = tmp;
6917 }
6918
6919 /* Only fold here if we can verify we do not overflow one
6920 dimension of a multi-dimensional array. */
6921 if (mdim)
6922 {
6923 tree tmp;
6924
6925 if (!TYPE_MIN_VALUE (domain)
6926 || !TYPE_MAX_VALUE (domain)
6927 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6928 goto cont;
6929
6930 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6931 fold_convert_loc (loc, itype,
6932 TYPE_MIN_VALUE (domain)),
6933 fold_convert_loc (loc, itype, delta));
6934 if (TREE_CODE (tmp) != INTEGER_CST
6935 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6936 goto cont;
6937 }
6938
6939 /* We found a suitable component reference. */
6940
6941 pref = TREE_OPERAND (addr, 0);
6942 ret = copy_node (pref);
6943 SET_EXPR_LOCATION (ret, loc);
6944
6945 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6946 fold_build2_loc
6947 (loc, PLUS_EXPR, itype,
6948 fold_convert_loc (loc, itype,
6949 TYPE_MIN_VALUE
6950 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6951 fold_convert_loc (loc, itype, delta)),
6952 NULL_TREE, NULL_TREE);
6953 return build_fold_addr_expr_loc (loc, ret);
6954 }
6955
6956 cont:
6957
6958 for (;; ref = TREE_OPERAND (ref, 0))
6959 {
6960 if (TREE_CODE (ref) == ARRAY_REF)
6961 {
6962 tree domain;
6963
6964 /* Remember if this was a multi-dimensional array. */
6965 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6966 mdim = true;
6967
6968 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6969 if (! domain)
6970 continue;
6971 itype = TREE_TYPE (domain);
6972
6973 step = array_ref_element_size (ref);
6974 if (TREE_CODE (step) != INTEGER_CST)
6975 continue;
6976
6977 if (s)
6978 {
6979 if (! tree_int_cst_equal (step, s))
6980 continue;
6981 }
6982 else
6983 {
6984 /* Try if delta is a multiple of step. */
6985 tree tmp = div_if_zero_remainder (op1, step);
6986 if (! tmp)
6987 continue;
6988 delta = tmp;
6989 }
6990
6991 /* Only fold here if we can verify we do not overflow one
6992 dimension of a multi-dimensional array. */
6993 if (mdim)
6994 {
6995 tree tmp;
6996
6997 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6998 || !TYPE_MAX_VALUE (domain)
6999 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7000 continue;
7001
7002 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7003 fold_convert_loc (loc, itype,
7004 TREE_OPERAND (ref, 1)),
7005 fold_convert_loc (loc, itype, delta));
7006 if (!tmp
7007 || TREE_CODE (tmp) != INTEGER_CST
7008 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7009 continue;
7010 }
7011
7012 break;
7013 }
7014 else
7015 mdim = false;
7016
7017 if (!handled_component_p (ref))
7018 return NULL_TREE;
7019 }
7020
7021 /* We found the suitable array reference. So copy everything up to it,
7022 and replace the index. */
7023
7024 pref = TREE_OPERAND (addr, 0);
7025 ret = copy_node (pref);
7026 SET_EXPR_LOCATION (ret, loc);
7027 pos = ret;
7028
7029 while (pref != ref)
7030 {
7031 pref = TREE_OPERAND (pref, 0);
7032 TREE_OPERAND (pos, 0) = copy_node (pref);
7033 pos = TREE_OPERAND (pos, 0);
7034 }
7035
7036 TREE_OPERAND (pos, 1)
7037 = fold_build2_loc (loc, PLUS_EXPR, itype,
7038 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7039 fold_convert_loc (loc, itype, delta));
7040 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7041 }
7042
7043
7044 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7045 means A >= Y && A != MAX, but in this case we know that
7046 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7047
7048 static tree
7049 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7050 {
7051 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7052
7053 if (TREE_CODE (bound) == LT_EXPR)
7054 a = TREE_OPERAND (bound, 0);
7055 else if (TREE_CODE (bound) == GT_EXPR)
7056 a = TREE_OPERAND (bound, 1);
7057 else
7058 return NULL_TREE;
7059
7060 typea = TREE_TYPE (a);
7061 if (!INTEGRAL_TYPE_P (typea)
7062 && !POINTER_TYPE_P (typea))
7063 return NULL_TREE;
7064
7065 if (TREE_CODE (ineq) == LT_EXPR)
7066 {
7067 a1 = TREE_OPERAND (ineq, 1);
7068 y = TREE_OPERAND (ineq, 0);
7069 }
7070 else if (TREE_CODE (ineq) == GT_EXPR)
7071 {
7072 a1 = TREE_OPERAND (ineq, 0);
7073 y = TREE_OPERAND (ineq, 1);
7074 }
7075 else
7076 return NULL_TREE;
7077
7078 if (TREE_TYPE (a1) != typea)
7079 return NULL_TREE;
7080
7081 if (POINTER_TYPE_P (typea))
7082 {
7083 /* Convert the pointer types into integer before taking the difference. */
7084 tree ta = fold_convert_loc (loc, ssizetype, a);
7085 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7086 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7087 }
7088 else
7089 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7090
7091 if (!diff || !integer_onep (diff))
7092 return NULL_TREE;
7093
7094 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7095 }
7096
7097 /* Fold a sum or difference of at least one multiplication.
7098 Returns the folded tree or NULL if no simplification could be made. */
7099
7100 static tree
7101 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7102 tree arg0, tree arg1)
7103 {
7104 tree arg00, arg01, arg10, arg11;
7105 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7106
7107 /* (A * C) +- (B * C) -> (A+-B) * C.
7108 (A * C) +- A -> A * (C+-1).
7109 We are most concerned about the case where C is a constant,
7110 but other combinations show up during loop reduction. Since
7111 it is not difficult, try all four possibilities. */
7112
7113 if (TREE_CODE (arg0) == MULT_EXPR)
7114 {
7115 arg00 = TREE_OPERAND (arg0, 0);
7116 arg01 = TREE_OPERAND (arg0, 1);
7117 }
7118 else if (TREE_CODE (arg0) == INTEGER_CST)
7119 {
7120 arg00 = build_one_cst (type);
7121 arg01 = arg0;
7122 }
7123 else
7124 {
7125 /* We cannot generate constant 1 for fract. */
7126 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7127 return NULL_TREE;
7128 arg00 = arg0;
7129 arg01 = build_one_cst (type);
7130 }
7131 if (TREE_CODE (arg1) == MULT_EXPR)
7132 {
7133 arg10 = TREE_OPERAND (arg1, 0);
7134 arg11 = TREE_OPERAND (arg1, 1);
7135 }
7136 else if (TREE_CODE (arg1) == INTEGER_CST)
7137 {
7138 arg10 = build_one_cst (type);
7139 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7140 the purpose of this canonicalization. */
7141 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7142 && negate_expr_p (arg1)
7143 && code == PLUS_EXPR)
7144 {
7145 arg11 = negate_expr (arg1);
7146 code = MINUS_EXPR;
7147 }
7148 else
7149 arg11 = arg1;
7150 }
7151 else
7152 {
7153 /* We cannot generate constant 1 for fract. */
7154 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7155 return NULL_TREE;
7156 arg10 = arg1;
7157 arg11 = build_one_cst (type);
7158 }
7159 same = NULL_TREE;
7160
7161 if (operand_equal_p (arg01, arg11, 0))
7162 same = arg01, alt0 = arg00, alt1 = arg10;
7163 else if (operand_equal_p (arg00, arg10, 0))
7164 same = arg00, alt0 = arg01, alt1 = arg11;
7165 else if (operand_equal_p (arg00, arg11, 0))
7166 same = arg00, alt0 = arg01, alt1 = arg10;
7167 else if (operand_equal_p (arg01, arg10, 0))
7168 same = arg01, alt0 = arg00, alt1 = arg11;
7169
7170 /* No identical multiplicands; see if we can find a common
7171 power-of-two factor in non-power-of-two multiplies. This
7172 can help in multi-dimensional array access. */
7173 else if (tree_fits_shwi_p (arg01)
7174 && tree_fits_shwi_p (arg11))
7175 {
7176 HOST_WIDE_INT int01, int11, tmp;
7177 bool swap = false;
7178 tree maybe_same;
7179 int01 = tree_to_shwi (arg01);
7180 int11 = tree_to_shwi (arg11);
7181
7182 /* Move min of absolute values to int11. */
7183 if (absu_hwi (int01) < absu_hwi (int11))
7184 {
7185 tmp = int01, int01 = int11, int11 = tmp;
7186 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7187 maybe_same = arg01;
7188 swap = true;
7189 }
7190 else
7191 maybe_same = arg11;
7192
7193 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7194 /* The remainder should not be a constant, otherwise we
7195 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7196 increased the number of multiplications necessary. */
7197 && TREE_CODE (arg10) != INTEGER_CST)
7198 {
7199 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7200 build_int_cst (TREE_TYPE (arg00),
7201 int01 / int11));
7202 alt1 = arg10;
7203 same = maybe_same;
7204 if (swap)
7205 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7206 }
7207 }
7208
7209 if (same)
7210 return fold_build2_loc (loc, MULT_EXPR, type,
7211 fold_build2_loc (loc, code, type,
7212 fold_convert_loc (loc, type, alt0),
7213 fold_convert_loc (loc, type, alt1)),
7214 fold_convert_loc (loc, type, same));
7215
7216 return NULL_TREE;
7217 }
7218
7219 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7220 specified by EXPR into the buffer PTR of length LEN bytes.
7221 Return the number of bytes placed in the buffer, or zero
7222 upon failure. */
7223
7224 static int
7225 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7226 {
7227 tree type = TREE_TYPE (expr);
7228 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7229 int byte, offset, word, words;
7230 unsigned char value;
7231
7232 if ((off == -1 && total_bytes > len)
7233 || off >= total_bytes)
7234 return 0;
7235 if (off == -1)
7236 off = 0;
7237 words = total_bytes / UNITS_PER_WORD;
7238
7239 for (byte = 0; byte < total_bytes; byte++)
7240 {
7241 int bitpos = byte * BITS_PER_UNIT;
7242 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7243 number of bytes. */
7244 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7245
7246 if (total_bytes > UNITS_PER_WORD)
7247 {
7248 word = byte / UNITS_PER_WORD;
7249 if (WORDS_BIG_ENDIAN)
7250 word = (words - 1) - word;
7251 offset = word * UNITS_PER_WORD;
7252 if (BYTES_BIG_ENDIAN)
7253 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7254 else
7255 offset += byte % UNITS_PER_WORD;
7256 }
7257 else
7258 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7259 if (offset >= off
7260 && offset - off < len)
7261 ptr[offset - off] = value;
7262 }
7263 return MIN (len, total_bytes - off);
7264 }
7265
7266
7267 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7268 specified by EXPR into the buffer PTR of length LEN bytes.
7269 Return the number of bytes placed in the buffer, or zero
7270 upon failure. */
7271
7272 static int
7273 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7274 {
7275 tree type = TREE_TYPE (expr);
7276 enum machine_mode mode = TYPE_MODE (type);
7277 int total_bytes = GET_MODE_SIZE (mode);
7278 FIXED_VALUE_TYPE value;
7279 tree i_value, i_type;
7280
7281 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7282 return 0;
7283
7284 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7285
7286 if (NULL_TREE == i_type
7287 || TYPE_PRECISION (i_type) != total_bytes)
7288 return 0;
7289
7290 value = TREE_FIXED_CST (expr);
7291 i_value = double_int_to_tree (i_type, value.data);
7292
7293 return native_encode_int (i_value, ptr, len, off);
7294 }
7295
7296
7297 /* Subroutine of native_encode_expr. Encode the REAL_CST
7298 specified by EXPR into the buffer PTR of length LEN bytes.
7299 Return the number of bytes placed in the buffer, or zero
7300 upon failure. */
7301
7302 static int
7303 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7304 {
7305 tree type = TREE_TYPE (expr);
7306 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7307 int byte, offset, word, words, bitpos;
7308 unsigned char value;
7309
7310 /* There are always 32 bits in each long, no matter the size of
7311 the hosts long. We handle floating point representations with
7312 up to 192 bits. */
7313 long tmp[6];
7314
7315 if ((off == -1 && total_bytes > len)
7316 || off >= total_bytes)
7317 return 0;
7318 if (off == -1)
7319 off = 0;
7320 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7321
7322 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7323
7324 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7325 bitpos += BITS_PER_UNIT)
7326 {
7327 byte = (bitpos / BITS_PER_UNIT) & 3;
7328 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7329
7330 if (UNITS_PER_WORD < 4)
7331 {
7332 word = byte / UNITS_PER_WORD;
7333 if (WORDS_BIG_ENDIAN)
7334 word = (words - 1) - word;
7335 offset = word * UNITS_PER_WORD;
7336 if (BYTES_BIG_ENDIAN)
7337 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7338 else
7339 offset += byte % UNITS_PER_WORD;
7340 }
7341 else
7342 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7343 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7344 if (offset >= off
7345 && offset - off < len)
7346 ptr[offset - off] = value;
7347 }
7348 return MIN (len, total_bytes - off);
7349 }
7350
7351 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7352 specified by EXPR into the buffer PTR of length LEN bytes.
7353 Return the number of bytes placed in the buffer, or zero
7354 upon failure. */
7355
7356 static int
7357 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7358 {
7359 int rsize, isize;
7360 tree part;
7361
7362 part = TREE_REALPART (expr);
7363 rsize = native_encode_expr (part, ptr, len, off);
7364 if (off == -1
7365 && rsize == 0)
7366 return 0;
7367 part = TREE_IMAGPART (expr);
7368 if (off != -1)
7369 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7370 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7371 if (off == -1
7372 && isize != rsize)
7373 return 0;
7374 return rsize + isize;
7375 }
7376
7377
7378 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7379 specified by EXPR into the buffer PTR of length LEN bytes.
7380 Return the number of bytes placed in the buffer, or zero
7381 upon failure. */
7382
7383 static int
7384 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7385 {
7386 unsigned i, count;
7387 int size, offset;
7388 tree itype, elem;
7389
7390 offset = 0;
7391 count = VECTOR_CST_NELTS (expr);
7392 itype = TREE_TYPE (TREE_TYPE (expr));
7393 size = GET_MODE_SIZE (TYPE_MODE (itype));
7394 for (i = 0; i < count; i++)
7395 {
7396 if (off >= size)
7397 {
7398 off -= size;
7399 continue;
7400 }
7401 elem = VECTOR_CST_ELT (expr, i);
7402 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7403 if ((off == -1 && res != size)
7404 || res == 0)
7405 return 0;
7406 offset += res;
7407 if (offset >= len)
7408 return offset;
7409 if (off != -1)
7410 off = 0;
7411 }
7412 return offset;
7413 }
7414
7415
7416 /* Subroutine of native_encode_expr. Encode the STRING_CST
7417 specified by EXPR into the buffer PTR of length LEN bytes.
7418 Return the number of bytes placed in the buffer, or zero
7419 upon failure. */
7420
7421 static int
7422 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7423 {
7424 tree type = TREE_TYPE (expr);
7425 HOST_WIDE_INT total_bytes;
7426
7427 if (TREE_CODE (type) != ARRAY_TYPE
7428 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7429 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7430 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7431 return 0;
7432 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7433 if ((off == -1 && total_bytes > len)
7434 || off >= total_bytes)
7435 return 0;
7436 if (off == -1)
7437 off = 0;
7438 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7439 {
7440 int written = 0;
7441 if (off < TREE_STRING_LENGTH (expr))
7442 {
7443 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7444 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7445 }
7446 memset (ptr + written, 0,
7447 MIN (total_bytes - written, len - written));
7448 }
7449 else
7450 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7451 return MIN (total_bytes - off, len);
7452 }
7453
7454
7455 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7456 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7457 buffer PTR of length LEN bytes. If OFF is not -1 then start
7458 the encoding at byte offset OFF and encode at most LEN bytes.
7459 Return the number of bytes placed in the buffer, or zero upon failure. */
7460
7461 int
7462 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7463 {
7464 switch (TREE_CODE (expr))
7465 {
7466 case INTEGER_CST:
7467 return native_encode_int (expr, ptr, len, off);
7468
7469 case REAL_CST:
7470 return native_encode_real (expr, ptr, len, off);
7471
7472 case FIXED_CST:
7473 return native_encode_fixed (expr, ptr, len, off);
7474
7475 case COMPLEX_CST:
7476 return native_encode_complex (expr, ptr, len, off);
7477
7478 case VECTOR_CST:
7479 return native_encode_vector (expr, ptr, len, off);
7480
7481 case STRING_CST:
7482 return native_encode_string (expr, ptr, len, off);
7483
7484 default:
7485 return 0;
7486 }
7487 }
7488
7489
7490 /* Subroutine of native_interpret_expr. Interpret the contents of
7491 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7492 If the buffer cannot be interpreted, return NULL_TREE. */
7493
7494 static tree
7495 native_interpret_int (tree type, const unsigned char *ptr, int len)
7496 {
7497 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7498
7499 if (total_bytes > len
7500 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7501 return NULL_TREE;
7502
7503 wide_int result = wi::from_buffer (ptr, total_bytes);
7504
7505 return wide_int_to_tree (type, result);
7506 }
7507
7508
7509 /* Subroutine of native_interpret_expr. Interpret the contents of
7510 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7511 If the buffer cannot be interpreted, return NULL_TREE. */
7512
7513 static tree
7514 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7515 {
7516 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7517 double_int result;
7518 FIXED_VALUE_TYPE fixed_value;
7519
7520 if (total_bytes > len
7521 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7522 return NULL_TREE;
7523
7524 result = double_int::from_buffer (ptr, total_bytes);
7525 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7526
7527 return build_fixed (type, fixed_value);
7528 }
7529
7530
7531 /* Subroutine of native_interpret_expr. Interpret the contents of
7532 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7533 If the buffer cannot be interpreted, return NULL_TREE. */
7534
7535 static tree
7536 native_interpret_real (tree type, const unsigned char *ptr, int len)
7537 {
7538 enum machine_mode mode = TYPE_MODE (type);
7539 int total_bytes = GET_MODE_SIZE (mode);
7540 int byte, offset, word, words, bitpos;
7541 unsigned char value;
7542 /* There are always 32 bits in each long, no matter the size of
7543 the hosts long. We handle floating point representations with
7544 up to 192 bits. */
7545 REAL_VALUE_TYPE r;
7546 long tmp[6];
7547
7548 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7549 if (total_bytes > len || total_bytes > 24)
7550 return NULL_TREE;
7551 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7552
7553 memset (tmp, 0, sizeof (tmp));
7554 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7555 bitpos += BITS_PER_UNIT)
7556 {
7557 byte = (bitpos / BITS_PER_UNIT) & 3;
7558 if (UNITS_PER_WORD < 4)
7559 {
7560 word = byte / UNITS_PER_WORD;
7561 if (WORDS_BIG_ENDIAN)
7562 word = (words - 1) - word;
7563 offset = word * UNITS_PER_WORD;
7564 if (BYTES_BIG_ENDIAN)
7565 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7566 else
7567 offset += byte % UNITS_PER_WORD;
7568 }
7569 else
7570 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7571 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7572
7573 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7574 }
7575
7576 real_from_target (&r, tmp, mode);
7577 return build_real (type, r);
7578 }
7579
7580
7581 /* Subroutine of native_interpret_expr. Interpret the contents of
7582 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7583 If the buffer cannot be interpreted, return NULL_TREE. */
7584
7585 static tree
7586 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7587 {
7588 tree etype, rpart, ipart;
7589 int size;
7590
7591 etype = TREE_TYPE (type);
7592 size = GET_MODE_SIZE (TYPE_MODE (etype));
7593 if (size * 2 > len)
7594 return NULL_TREE;
7595 rpart = native_interpret_expr (etype, ptr, size);
7596 if (!rpart)
7597 return NULL_TREE;
7598 ipart = native_interpret_expr (etype, ptr+size, size);
7599 if (!ipart)
7600 return NULL_TREE;
7601 return build_complex (type, rpart, ipart);
7602 }
7603
7604
7605 /* Subroutine of native_interpret_expr. Interpret the contents of
7606 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7607 If the buffer cannot be interpreted, return NULL_TREE. */
7608
7609 static tree
7610 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7611 {
7612 tree etype, elem;
7613 int i, size, count;
7614 tree *elements;
7615
7616 etype = TREE_TYPE (type);
7617 size = GET_MODE_SIZE (TYPE_MODE (etype));
7618 count = TYPE_VECTOR_SUBPARTS (type);
7619 if (size * count > len)
7620 return NULL_TREE;
7621
7622 elements = XALLOCAVEC (tree, count);
7623 for (i = count - 1; i >= 0; i--)
7624 {
7625 elem = native_interpret_expr (etype, ptr+(i*size), size);
7626 if (!elem)
7627 return NULL_TREE;
7628 elements[i] = elem;
7629 }
7630 return build_vector (type, elements);
7631 }
7632
7633
7634 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7635 the buffer PTR of length LEN as a constant of type TYPE. For
7636 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7637 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7638 return NULL_TREE. */
7639
7640 tree
7641 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7642 {
7643 switch (TREE_CODE (type))
7644 {
7645 case INTEGER_TYPE:
7646 case ENUMERAL_TYPE:
7647 case BOOLEAN_TYPE:
7648 case POINTER_TYPE:
7649 case REFERENCE_TYPE:
7650 return native_interpret_int (type, ptr, len);
7651
7652 case REAL_TYPE:
7653 return native_interpret_real (type, ptr, len);
7654
7655 case FIXED_POINT_TYPE:
7656 return native_interpret_fixed (type, ptr, len);
7657
7658 case COMPLEX_TYPE:
7659 return native_interpret_complex (type, ptr, len);
7660
7661 case VECTOR_TYPE:
7662 return native_interpret_vector (type, ptr, len);
7663
7664 default:
7665 return NULL_TREE;
7666 }
7667 }
7668
7669 /* Returns true if we can interpret the contents of a native encoding
7670 as TYPE. */
7671
7672 static bool
7673 can_native_interpret_type_p (tree type)
7674 {
7675 switch (TREE_CODE (type))
7676 {
7677 case INTEGER_TYPE:
7678 case ENUMERAL_TYPE:
7679 case BOOLEAN_TYPE:
7680 case POINTER_TYPE:
7681 case REFERENCE_TYPE:
7682 case FIXED_POINT_TYPE:
7683 case REAL_TYPE:
7684 case COMPLEX_TYPE:
7685 case VECTOR_TYPE:
7686 return true;
7687 default:
7688 return false;
7689 }
7690 }
7691
7692 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7693 TYPE at compile-time. If we're unable to perform the conversion
7694 return NULL_TREE. */
7695
7696 static tree
7697 fold_view_convert_expr (tree type, tree expr)
7698 {
7699 /* We support up to 512-bit values (for V8DFmode). */
7700 unsigned char buffer[64];
7701 int len;
7702
7703 /* Check that the host and target are sane. */
7704 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7705 return NULL_TREE;
7706
7707 len = native_encode_expr (expr, buffer, sizeof (buffer));
7708 if (len == 0)
7709 return NULL_TREE;
7710
7711 return native_interpret_expr (type, buffer, len);
7712 }
7713
7714 /* Build an expression for the address of T. Folds away INDIRECT_REF
7715 to avoid confusing the gimplify process. */
7716
7717 tree
7718 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7719 {
7720 /* The size of the object is not relevant when talking about its address. */
7721 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7722 t = TREE_OPERAND (t, 0);
7723
7724 if (TREE_CODE (t) == INDIRECT_REF)
7725 {
7726 t = TREE_OPERAND (t, 0);
7727
7728 if (TREE_TYPE (t) != ptrtype)
7729 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7730 }
7731 else if (TREE_CODE (t) == MEM_REF
7732 && integer_zerop (TREE_OPERAND (t, 1)))
7733 return TREE_OPERAND (t, 0);
7734 else if (TREE_CODE (t) == MEM_REF
7735 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7736 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7737 TREE_OPERAND (t, 0),
7738 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7739 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7740 {
7741 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7742
7743 if (TREE_TYPE (t) != ptrtype)
7744 t = fold_convert_loc (loc, ptrtype, t);
7745 }
7746 else
7747 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7748
7749 return t;
7750 }
7751
7752 /* Build an expression for the address of T. */
7753
7754 tree
7755 build_fold_addr_expr_loc (location_t loc, tree t)
7756 {
7757 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7758
7759 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7760 }
7761
7762 static bool vec_cst_ctor_to_array (tree, tree *);
7763
7764 /* Fold a unary expression of code CODE and type TYPE with operand
7765 OP0. Return the folded expression if folding is successful.
7766 Otherwise, return NULL_TREE. */
7767
7768 tree
7769 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7770 {
7771 tree tem;
7772 tree arg0;
7773 enum tree_code_class kind = TREE_CODE_CLASS (code);
7774
7775 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7776 && TREE_CODE_LENGTH (code) == 1);
7777
7778 arg0 = op0;
7779 if (arg0)
7780 {
7781 if (CONVERT_EXPR_CODE_P (code)
7782 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7783 {
7784 /* Don't use STRIP_NOPS, because signedness of argument type
7785 matters. */
7786 STRIP_SIGN_NOPS (arg0);
7787 }
7788 else
7789 {
7790 /* Strip any conversions that don't change the mode. This
7791 is safe for every expression, except for a comparison
7792 expression because its signedness is derived from its
7793 operands.
7794
7795 Note that this is done as an internal manipulation within
7796 the constant folder, in order to find the simplest
7797 representation of the arguments so that their form can be
7798 studied. In any cases, the appropriate type conversions
7799 should be put back in the tree that will get out of the
7800 constant folder. */
7801 STRIP_NOPS (arg0);
7802 }
7803 }
7804
7805 if (TREE_CODE_CLASS (code) == tcc_unary)
7806 {
7807 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7808 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7809 fold_build1_loc (loc, code, type,
7810 fold_convert_loc (loc, TREE_TYPE (op0),
7811 TREE_OPERAND (arg0, 1))));
7812 else if (TREE_CODE (arg0) == COND_EXPR)
7813 {
7814 tree arg01 = TREE_OPERAND (arg0, 1);
7815 tree arg02 = TREE_OPERAND (arg0, 2);
7816 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7817 arg01 = fold_build1_loc (loc, code, type,
7818 fold_convert_loc (loc,
7819 TREE_TYPE (op0), arg01));
7820 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7821 arg02 = fold_build1_loc (loc, code, type,
7822 fold_convert_loc (loc,
7823 TREE_TYPE (op0), arg02));
7824 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7825 arg01, arg02);
7826
7827 /* If this was a conversion, and all we did was to move into
7828 inside the COND_EXPR, bring it back out. But leave it if
7829 it is a conversion from integer to integer and the
7830 result precision is no wider than a word since such a
7831 conversion is cheap and may be optimized away by combine,
7832 while it couldn't if it were outside the COND_EXPR. Then return
7833 so we don't get into an infinite recursion loop taking the
7834 conversion out and then back in. */
7835
7836 if ((CONVERT_EXPR_CODE_P (code)
7837 || code == NON_LVALUE_EXPR)
7838 && TREE_CODE (tem) == COND_EXPR
7839 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7840 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7841 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7842 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7843 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7844 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7845 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7846 && (INTEGRAL_TYPE_P
7847 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7848 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7849 || flag_syntax_only))
7850 tem = build1_loc (loc, code, type,
7851 build3 (COND_EXPR,
7852 TREE_TYPE (TREE_OPERAND
7853 (TREE_OPERAND (tem, 1), 0)),
7854 TREE_OPERAND (tem, 0),
7855 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7856 TREE_OPERAND (TREE_OPERAND (tem, 2),
7857 0)));
7858 return tem;
7859 }
7860 }
7861
7862 switch (code)
7863 {
7864 case PAREN_EXPR:
7865 /* Re-association barriers around constants and other re-association
7866 barriers can be removed. */
7867 if (CONSTANT_CLASS_P (op0)
7868 || TREE_CODE (op0) == PAREN_EXPR)
7869 return fold_convert_loc (loc, type, op0);
7870 return NULL_TREE;
7871
7872 case NON_LVALUE_EXPR:
7873 if (!maybe_lvalue_p (op0))
7874 return fold_convert_loc (loc, type, op0);
7875 return NULL_TREE;
7876
7877 CASE_CONVERT:
7878 case FLOAT_EXPR:
7879 case FIX_TRUNC_EXPR:
7880 if (TREE_TYPE (op0) == type)
7881 return op0;
7882
7883 if (COMPARISON_CLASS_P (op0))
7884 {
7885 /* If we have (type) (a CMP b) and type is an integral type, return
7886 new expression involving the new type. Canonicalize
7887 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7888 non-integral type.
7889 Do not fold the result as that would not simplify further, also
7890 folding again results in recursions. */
7891 if (TREE_CODE (type) == BOOLEAN_TYPE)
7892 return build2_loc (loc, TREE_CODE (op0), type,
7893 TREE_OPERAND (op0, 0),
7894 TREE_OPERAND (op0, 1));
7895 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7896 && TREE_CODE (type) != VECTOR_TYPE)
7897 return build3_loc (loc, COND_EXPR, type, op0,
7898 constant_boolean_node (true, type),
7899 constant_boolean_node (false, type));
7900 }
7901
7902 /* Handle cases of two conversions in a row. */
7903 if (CONVERT_EXPR_P (op0))
7904 {
7905 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7906 tree inter_type = TREE_TYPE (op0);
7907 int inside_int = INTEGRAL_TYPE_P (inside_type);
7908 int inside_ptr = POINTER_TYPE_P (inside_type);
7909 int inside_float = FLOAT_TYPE_P (inside_type);
7910 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7911 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7912 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7913 int inter_int = INTEGRAL_TYPE_P (inter_type);
7914 int inter_ptr = POINTER_TYPE_P (inter_type);
7915 int inter_float = FLOAT_TYPE_P (inter_type);
7916 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7917 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7918 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7919 int final_int = INTEGRAL_TYPE_P (type);
7920 int final_ptr = POINTER_TYPE_P (type);
7921 int final_float = FLOAT_TYPE_P (type);
7922 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7923 unsigned int final_prec = TYPE_PRECISION (type);
7924 int final_unsignedp = TYPE_UNSIGNED (type);
7925
7926 /* In addition to the cases of two conversions in a row
7927 handled below, if we are converting something to its own
7928 type via an object of identical or wider precision, neither
7929 conversion is needed. */
7930 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7931 && (((inter_int || inter_ptr) && final_int)
7932 || (inter_float && final_float))
7933 && inter_prec >= final_prec)
7934 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7935
7936 /* Likewise, if the intermediate and initial types are either both
7937 float or both integer, we don't need the middle conversion if the
7938 former is wider than the latter and doesn't change the signedness
7939 (for integers). Avoid this if the final type is a pointer since
7940 then we sometimes need the middle conversion. Likewise if the
7941 final type has a precision not equal to the size of its mode. */
7942 if (((inter_int && inside_int)
7943 || (inter_float && inside_float)
7944 || (inter_vec && inside_vec))
7945 && inter_prec >= inside_prec
7946 && (inter_float || inter_vec
7947 || inter_unsignedp == inside_unsignedp)
7948 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7949 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7950 && ! final_ptr
7951 && (! final_vec || inter_prec == inside_prec))
7952 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7953
7954 /* If we have a sign-extension of a zero-extended value, we can
7955 replace that by a single zero-extension. Likewise if the
7956 final conversion does not change precision we can drop the
7957 intermediate conversion. */
7958 if (inside_int && inter_int && final_int
7959 && ((inside_prec < inter_prec && inter_prec < final_prec
7960 && inside_unsignedp && !inter_unsignedp)
7961 || final_prec == inter_prec))
7962 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7963
7964 /* Two conversions in a row are not needed unless:
7965 - some conversion is floating-point (overstrict for now), or
7966 - some conversion is a vector (overstrict for now), or
7967 - the intermediate type is narrower than both initial and
7968 final, or
7969 - the intermediate type and innermost type differ in signedness,
7970 and the outermost type is wider than the intermediate, or
7971 - the initial type is a pointer type and the precisions of the
7972 intermediate and final types differ, or
7973 - the final type is a pointer type and the precisions of the
7974 initial and intermediate types differ. */
7975 if (! inside_float && ! inter_float && ! final_float
7976 && ! inside_vec && ! inter_vec && ! final_vec
7977 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7978 && ! (inside_int && inter_int
7979 && inter_unsignedp != inside_unsignedp
7980 && inter_prec < final_prec)
7981 && ((inter_unsignedp && inter_prec > inside_prec)
7982 == (final_unsignedp && final_prec > inter_prec))
7983 && ! (inside_ptr && inter_prec != final_prec)
7984 && ! (final_ptr && inside_prec != inter_prec)
7985 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7986 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7987 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7988 }
7989
7990 /* Handle (T *)&A.B.C for A being of type T and B and C
7991 living at offset zero. This occurs frequently in
7992 C++ upcasting and then accessing the base. */
7993 if (TREE_CODE (op0) == ADDR_EXPR
7994 && POINTER_TYPE_P (type)
7995 && handled_component_p (TREE_OPERAND (op0, 0)))
7996 {
7997 HOST_WIDE_INT bitsize, bitpos;
7998 tree offset;
7999 enum machine_mode mode;
8000 int unsignedp, volatilep;
8001 tree base = TREE_OPERAND (op0, 0);
8002 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8003 &mode, &unsignedp, &volatilep, false);
8004 /* If the reference was to a (constant) zero offset, we can use
8005 the address of the base if it has the same base type
8006 as the result type and the pointer type is unqualified. */
8007 if (! offset && bitpos == 0
8008 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8009 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8010 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8011 return fold_convert_loc (loc, type,
8012 build_fold_addr_expr_loc (loc, base));
8013 }
8014
8015 if (TREE_CODE (op0) == MODIFY_EXPR
8016 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8017 /* Detect assigning a bitfield. */
8018 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8019 && DECL_BIT_FIELD
8020 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8021 {
8022 /* Don't leave an assignment inside a conversion
8023 unless assigning a bitfield. */
8024 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8025 /* First do the assignment, then return converted constant. */
8026 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8027 TREE_NO_WARNING (tem) = 1;
8028 TREE_USED (tem) = 1;
8029 return tem;
8030 }
8031
8032 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8033 constants (if x has signed type, the sign bit cannot be set
8034 in c). This folds extension into the BIT_AND_EXPR.
8035 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8036 very likely don't have maximal range for their precision and this
8037 transformation effectively doesn't preserve non-maximal ranges. */
8038 if (TREE_CODE (type) == INTEGER_TYPE
8039 && TREE_CODE (op0) == BIT_AND_EXPR
8040 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8041 {
8042 tree and_expr = op0;
8043 tree and0 = TREE_OPERAND (and_expr, 0);
8044 tree and1 = TREE_OPERAND (and_expr, 1);
8045 int change = 0;
8046
8047 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8048 || (TYPE_PRECISION (type)
8049 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8050 change = 1;
8051 else if (TYPE_PRECISION (TREE_TYPE (and1))
8052 <= HOST_BITS_PER_WIDE_INT
8053 && tree_fits_uhwi_p (and1))
8054 {
8055 unsigned HOST_WIDE_INT cst;
8056
8057 cst = tree_to_uhwi (and1);
8058 cst &= HOST_WIDE_INT_M1U
8059 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8060 change = (cst == 0);
8061 #ifdef LOAD_EXTEND_OP
8062 if (change
8063 && !flag_syntax_only
8064 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8065 == ZERO_EXTEND))
8066 {
8067 tree uns = unsigned_type_for (TREE_TYPE (and0));
8068 and0 = fold_convert_loc (loc, uns, and0);
8069 and1 = fold_convert_loc (loc, uns, and1);
8070 }
8071 #endif
8072 }
8073 if (change)
8074 {
8075 tem = force_fit_type (type, wi::to_widest (and1), 0,
8076 TREE_OVERFLOW (and1));
8077 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8078 fold_convert_loc (loc, type, and0), tem);
8079 }
8080 }
8081
8082 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8083 when one of the new casts will fold away. Conservatively we assume
8084 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8085 if (POINTER_TYPE_P (type)
8086 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8087 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8088 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8089 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8090 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8091 {
8092 tree arg00 = TREE_OPERAND (arg0, 0);
8093 tree arg01 = TREE_OPERAND (arg0, 1);
8094
8095 return fold_build_pointer_plus_loc
8096 (loc, fold_convert_loc (loc, type, arg00), arg01);
8097 }
8098
8099 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8100 of the same precision, and X is an integer type not narrower than
8101 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8102 if (INTEGRAL_TYPE_P (type)
8103 && TREE_CODE (op0) == BIT_NOT_EXPR
8104 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8105 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8106 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8107 {
8108 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8109 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8110 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8111 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8112 fold_convert_loc (loc, type, tem));
8113 }
8114
8115 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8116 type of X and Y (integer types only). */
8117 if (INTEGRAL_TYPE_P (type)
8118 && TREE_CODE (op0) == MULT_EXPR
8119 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8120 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8121 {
8122 /* Be careful not to introduce new overflows. */
8123 tree mult_type;
8124 if (TYPE_OVERFLOW_WRAPS (type))
8125 mult_type = type;
8126 else
8127 mult_type = unsigned_type_for (type);
8128
8129 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8130 {
8131 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8132 fold_convert_loc (loc, mult_type,
8133 TREE_OPERAND (op0, 0)),
8134 fold_convert_loc (loc, mult_type,
8135 TREE_OPERAND (op0, 1)));
8136 return fold_convert_loc (loc, type, tem);
8137 }
8138 }
8139
8140 tem = fold_convert_const (code, type, arg0);
8141 return tem ? tem : NULL_TREE;
8142
8143 case ADDR_SPACE_CONVERT_EXPR:
8144 if (integer_zerop (arg0))
8145 return fold_convert_const (code, type, arg0);
8146 return NULL_TREE;
8147
8148 case FIXED_CONVERT_EXPR:
8149 tem = fold_convert_const (code, type, arg0);
8150 return tem ? tem : NULL_TREE;
8151
8152 case VIEW_CONVERT_EXPR:
8153 if (TREE_TYPE (op0) == type)
8154 return op0;
8155 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8156 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8157 type, TREE_OPERAND (op0, 0));
8158 if (TREE_CODE (op0) == MEM_REF)
8159 return fold_build2_loc (loc, MEM_REF, type,
8160 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8161
8162 /* For integral conversions with the same precision or pointer
8163 conversions use a NOP_EXPR instead. */
8164 if ((INTEGRAL_TYPE_P (type)
8165 || POINTER_TYPE_P (type))
8166 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8167 || POINTER_TYPE_P (TREE_TYPE (op0)))
8168 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8169 return fold_convert_loc (loc, type, op0);
8170
8171 /* Strip inner integral conversions that do not change the precision. */
8172 if (CONVERT_EXPR_P (op0)
8173 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8174 || POINTER_TYPE_P (TREE_TYPE (op0)))
8175 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8176 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8177 && (TYPE_PRECISION (TREE_TYPE (op0))
8178 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8179 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8180 type, TREE_OPERAND (op0, 0));
8181
8182 return fold_view_convert_expr (type, op0);
8183
8184 case NEGATE_EXPR:
8185 tem = fold_negate_expr (loc, arg0);
8186 if (tem)
8187 return fold_convert_loc (loc, type, tem);
8188 return NULL_TREE;
8189
8190 case ABS_EXPR:
8191 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8192 return fold_abs_const (arg0, type);
8193 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8194 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8195 /* Convert fabs((double)float) into (double)fabsf(float). */
8196 else if (TREE_CODE (arg0) == NOP_EXPR
8197 && TREE_CODE (type) == REAL_TYPE)
8198 {
8199 tree targ0 = strip_float_extensions (arg0);
8200 if (targ0 != arg0)
8201 return fold_convert_loc (loc, type,
8202 fold_build1_loc (loc, ABS_EXPR,
8203 TREE_TYPE (targ0),
8204 targ0));
8205 }
8206 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8207 else if (TREE_CODE (arg0) == ABS_EXPR)
8208 return arg0;
8209 else if (tree_expr_nonnegative_p (arg0))
8210 return arg0;
8211
8212 /* Strip sign ops from argument. */
8213 if (TREE_CODE (type) == REAL_TYPE)
8214 {
8215 tem = fold_strip_sign_ops (arg0);
8216 if (tem)
8217 return fold_build1_loc (loc, ABS_EXPR, type,
8218 fold_convert_loc (loc, type, tem));
8219 }
8220 return NULL_TREE;
8221
8222 case CONJ_EXPR:
8223 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8224 return fold_convert_loc (loc, type, arg0);
8225 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8226 {
8227 tree itype = TREE_TYPE (type);
8228 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8229 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8230 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8231 negate_expr (ipart));
8232 }
8233 if (TREE_CODE (arg0) == COMPLEX_CST)
8234 {
8235 tree itype = TREE_TYPE (type);
8236 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8237 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8238 return build_complex (type, rpart, negate_expr (ipart));
8239 }
8240 if (TREE_CODE (arg0) == CONJ_EXPR)
8241 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8242 return NULL_TREE;
8243
8244 case BIT_NOT_EXPR:
8245 if (TREE_CODE (arg0) == INTEGER_CST)
8246 return fold_not_const (arg0, type);
8247 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8248 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8249 /* Convert ~ (-A) to A - 1. */
8250 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8251 return fold_build2_loc (loc, MINUS_EXPR, type,
8252 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8253 build_int_cst (type, 1));
8254 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8255 else if (INTEGRAL_TYPE_P (type)
8256 && ((TREE_CODE (arg0) == MINUS_EXPR
8257 && integer_onep (TREE_OPERAND (arg0, 1)))
8258 || (TREE_CODE (arg0) == PLUS_EXPR
8259 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8260 return fold_build1_loc (loc, NEGATE_EXPR, type,
8261 fold_convert_loc (loc, type,
8262 TREE_OPERAND (arg0, 0)));
8263 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8264 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8265 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8266 fold_convert_loc (loc, type,
8267 TREE_OPERAND (arg0, 0)))))
8268 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8269 fold_convert_loc (loc, type,
8270 TREE_OPERAND (arg0, 1)));
8271 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8272 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8273 fold_convert_loc (loc, type,
8274 TREE_OPERAND (arg0, 1)))))
8275 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8276 fold_convert_loc (loc, type,
8277 TREE_OPERAND (arg0, 0)), tem);
8278 /* Perform BIT_NOT_EXPR on each element individually. */
8279 else if (TREE_CODE (arg0) == VECTOR_CST)
8280 {
8281 tree *elements;
8282 tree elem;
8283 unsigned count = VECTOR_CST_NELTS (arg0), i;
8284
8285 elements = XALLOCAVEC (tree, count);
8286 for (i = 0; i < count; i++)
8287 {
8288 elem = VECTOR_CST_ELT (arg0, i);
8289 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8290 if (elem == NULL_TREE)
8291 break;
8292 elements[i] = elem;
8293 }
8294 if (i == count)
8295 return build_vector (type, elements);
8296 }
8297 else if (COMPARISON_CLASS_P (arg0)
8298 && (VECTOR_TYPE_P (type)
8299 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8300 {
8301 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8302 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8303 HONOR_NANS (TYPE_MODE (op_type)));
8304 if (subcode != ERROR_MARK)
8305 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8306 TREE_OPERAND (arg0, 1));
8307 }
8308
8309
8310 return NULL_TREE;
8311
8312 case TRUTH_NOT_EXPR:
8313 /* Note that the operand of this must be an int
8314 and its values must be 0 or 1.
8315 ("true" is a fixed value perhaps depending on the language,
8316 but we don't handle values other than 1 correctly yet.) */
8317 tem = fold_truth_not_expr (loc, arg0);
8318 if (!tem)
8319 return NULL_TREE;
8320 return fold_convert_loc (loc, type, tem);
8321
8322 case REALPART_EXPR:
8323 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8324 return fold_convert_loc (loc, type, arg0);
8325 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8326 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8327 TREE_OPERAND (arg0, 1));
8328 if (TREE_CODE (arg0) == COMPLEX_CST)
8329 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8330 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8331 {
8332 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8333 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8334 fold_build1_loc (loc, REALPART_EXPR, itype,
8335 TREE_OPERAND (arg0, 0)),
8336 fold_build1_loc (loc, REALPART_EXPR, itype,
8337 TREE_OPERAND (arg0, 1)));
8338 return fold_convert_loc (loc, type, tem);
8339 }
8340 if (TREE_CODE (arg0) == CONJ_EXPR)
8341 {
8342 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8343 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8344 TREE_OPERAND (arg0, 0));
8345 return fold_convert_loc (loc, type, tem);
8346 }
8347 if (TREE_CODE (arg0) == CALL_EXPR)
8348 {
8349 tree fn = get_callee_fndecl (arg0);
8350 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8351 switch (DECL_FUNCTION_CODE (fn))
8352 {
8353 CASE_FLT_FN (BUILT_IN_CEXPI):
8354 fn = mathfn_built_in (type, BUILT_IN_COS);
8355 if (fn)
8356 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8357 break;
8358
8359 default:
8360 break;
8361 }
8362 }
8363 return NULL_TREE;
8364
8365 case IMAGPART_EXPR:
8366 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8367 return build_zero_cst (type);
8368 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8369 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8370 TREE_OPERAND (arg0, 0));
8371 if (TREE_CODE (arg0) == COMPLEX_CST)
8372 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8373 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8374 {
8375 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8376 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8377 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8378 TREE_OPERAND (arg0, 0)),
8379 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8380 TREE_OPERAND (arg0, 1)));
8381 return fold_convert_loc (loc, type, tem);
8382 }
8383 if (TREE_CODE (arg0) == CONJ_EXPR)
8384 {
8385 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8386 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8387 return fold_convert_loc (loc, type, negate_expr (tem));
8388 }
8389 if (TREE_CODE (arg0) == CALL_EXPR)
8390 {
8391 tree fn = get_callee_fndecl (arg0);
8392 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8393 switch (DECL_FUNCTION_CODE (fn))
8394 {
8395 CASE_FLT_FN (BUILT_IN_CEXPI):
8396 fn = mathfn_built_in (type, BUILT_IN_SIN);
8397 if (fn)
8398 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8399 break;
8400
8401 default:
8402 break;
8403 }
8404 }
8405 return NULL_TREE;
8406
8407 case INDIRECT_REF:
8408 /* Fold *&X to X if X is an lvalue. */
8409 if (TREE_CODE (op0) == ADDR_EXPR)
8410 {
8411 tree op00 = TREE_OPERAND (op0, 0);
8412 if ((TREE_CODE (op00) == VAR_DECL
8413 || TREE_CODE (op00) == PARM_DECL
8414 || TREE_CODE (op00) == RESULT_DECL)
8415 && !TREE_READONLY (op00))
8416 return op00;
8417 }
8418 return NULL_TREE;
8419
8420 case VEC_UNPACK_LO_EXPR:
8421 case VEC_UNPACK_HI_EXPR:
8422 case VEC_UNPACK_FLOAT_LO_EXPR:
8423 case VEC_UNPACK_FLOAT_HI_EXPR:
8424 {
8425 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8426 tree *elts;
8427 enum tree_code subcode;
8428
8429 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8430 if (TREE_CODE (arg0) != VECTOR_CST)
8431 return NULL_TREE;
8432
8433 elts = XALLOCAVEC (tree, nelts * 2);
8434 if (!vec_cst_ctor_to_array (arg0, elts))
8435 return NULL_TREE;
8436
8437 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8438 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8439 elts += nelts;
8440
8441 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8442 subcode = NOP_EXPR;
8443 else
8444 subcode = FLOAT_EXPR;
8445
8446 for (i = 0; i < nelts; i++)
8447 {
8448 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8449 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8450 return NULL_TREE;
8451 }
8452
8453 return build_vector (type, elts);
8454 }
8455
8456 case REDUC_MIN_EXPR:
8457 case REDUC_MAX_EXPR:
8458 case REDUC_PLUS_EXPR:
8459 {
8460 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8461 tree *elts;
8462 enum tree_code subcode;
8463
8464 if (TREE_CODE (op0) != VECTOR_CST)
8465 return NULL_TREE;
8466
8467 elts = XALLOCAVEC (tree, nelts);
8468 if (!vec_cst_ctor_to_array (op0, elts))
8469 return NULL_TREE;
8470
8471 switch (code)
8472 {
8473 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8474 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8475 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8476 default: gcc_unreachable ();
8477 }
8478
8479 for (i = 1; i < nelts; i++)
8480 {
8481 elts[0] = const_binop (subcode, elts[0], elts[i]);
8482 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8483 return NULL_TREE;
8484 elts[i] = build_zero_cst (TREE_TYPE (type));
8485 }
8486
8487 return build_vector (type, elts);
8488 }
8489
8490 default:
8491 return NULL_TREE;
8492 } /* switch (code) */
8493 }
8494
8495
8496 /* If the operation was a conversion do _not_ mark a resulting constant
8497 with TREE_OVERFLOW if the original constant was not. These conversions
8498 have implementation defined behavior and retaining the TREE_OVERFLOW
8499 flag here would confuse later passes such as VRP. */
8500 tree
8501 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8502 tree type, tree op0)
8503 {
8504 tree res = fold_unary_loc (loc, code, type, op0);
8505 if (res
8506 && TREE_CODE (res) == INTEGER_CST
8507 && TREE_CODE (op0) == INTEGER_CST
8508 && CONVERT_EXPR_CODE_P (code))
8509 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8510
8511 return res;
8512 }
8513
8514 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8515 operands OP0 and OP1. LOC is the location of the resulting expression.
8516 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8517 Return the folded expression if folding is successful. Otherwise,
8518 return NULL_TREE. */
8519 static tree
8520 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8521 tree arg0, tree arg1, tree op0, tree op1)
8522 {
8523 tree tem;
8524
8525 /* We only do these simplifications if we are optimizing. */
8526 if (!optimize)
8527 return NULL_TREE;
8528
8529 /* Check for things like (A || B) && (A || C). We can convert this
8530 to A || (B && C). Note that either operator can be any of the four
8531 truth and/or operations and the transformation will still be
8532 valid. Also note that we only care about order for the
8533 ANDIF and ORIF operators. If B contains side effects, this
8534 might change the truth-value of A. */
8535 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8536 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8537 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8538 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8539 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8540 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8541 {
8542 tree a00 = TREE_OPERAND (arg0, 0);
8543 tree a01 = TREE_OPERAND (arg0, 1);
8544 tree a10 = TREE_OPERAND (arg1, 0);
8545 tree a11 = TREE_OPERAND (arg1, 1);
8546 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8547 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8548 && (code == TRUTH_AND_EXPR
8549 || code == TRUTH_OR_EXPR));
8550
8551 if (operand_equal_p (a00, a10, 0))
8552 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8553 fold_build2_loc (loc, code, type, a01, a11));
8554 else if (commutative && operand_equal_p (a00, a11, 0))
8555 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8556 fold_build2_loc (loc, code, type, a01, a10));
8557 else if (commutative && operand_equal_p (a01, a10, 0))
8558 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8559 fold_build2_loc (loc, code, type, a00, a11));
8560
8561 /* This case if tricky because we must either have commutative
8562 operators or else A10 must not have side-effects. */
8563
8564 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8565 && operand_equal_p (a01, a11, 0))
8566 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8567 fold_build2_loc (loc, code, type, a00, a10),
8568 a01);
8569 }
8570
8571 /* See if we can build a range comparison. */
8572 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8573 return tem;
8574
8575 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8576 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8577 {
8578 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8579 if (tem)
8580 return fold_build2_loc (loc, code, type, tem, arg1);
8581 }
8582
8583 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8584 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8585 {
8586 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8587 if (tem)
8588 return fold_build2_loc (loc, code, type, arg0, tem);
8589 }
8590
8591 /* Check for the possibility of merging component references. If our
8592 lhs is another similar operation, try to merge its rhs with our
8593 rhs. Then try to merge our lhs and rhs. */
8594 if (TREE_CODE (arg0) == code
8595 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8596 TREE_OPERAND (arg0, 1), arg1)))
8597 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8598
8599 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8600 return tem;
8601
8602 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8603 && (code == TRUTH_AND_EXPR
8604 || code == TRUTH_ANDIF_EXPR
8605 || code == TRUTH_OR_EXPR
8606 || code == TRUTH_ORIF_EXPR))
8607 {
8608 enum tree_code ncode, icode;
8609
8610 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8611 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8612 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8613
8614 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8615 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8616 We don't want to pack more than two leafs to a non-IF AND/OR
8617 expression.
8618 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8619 equal to IF-CODE, then we don't want to add right-hand operand.
8620 If the inner right-hand side of left-hand operand has
8621 side-effects, or isn't simple, then we can't add to it,
8622 as otherwise we might destroy if-sequence. */
8623 if (TREE_CODE (arg0) == icode
8624 && simple_operand_p_2 (arg1)
8625 /* Needed for sequence points to handle trappings, and
8626 side-effects. */
8627 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8628 {
8629 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8630 arg1);
8631 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8632 tem);
8633 }
8634 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8635 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8636 else if (TREE_CODE (arg1) == icode
8637 && simple_operand_p_2 (arg0)
8638 /* Needed for sequence points to handle trappings, and
8639 side-effects. */
8640 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8641 {
8642 tem = fold_build2_loc (loc, ncode, type,
8643 arg0, TREE_OPERAND (arg1, 0));
8644 return fold_build2_loc (loc, icode, type, tem,
8645 TREE_OPERAND (arg1, 1));
8646 }
8647 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8648 into (A OR B).
8649 For sequence point consistancy, we need to check for trapping,
8650 and side-effects. */
8651 else if (code == icode && simple_operand_p_2 (arg0)
8652 && simple_operand_p_2 (arg1))
8653 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8654 }
8655
8656 return NULL_TREE;
8657 }
8658
8659 /* Fold a binary expression of code CODE and type TYPE with operands
8660 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8661 Return the folded expression if folding is successful. Otherwise,
8662 return NULL_TREE. */
8663
8664 static tree
8665 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8666 {
8667 enum tree_code compl_code;
8668
8669 if (code == MIN_EXPR)
8670 compl_code = MAX_EXPR;
8671 else if (code == MAX_EXPR)
8672 compl_code = MIN_EXPR;
8673 else
8674 gcc_unreachable ();
8675
8676 /* MIN (MAX (a, b), b) == b. */
8677 if (TREE_CODE (op0) == compl_code
8678 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8679 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8680
8681 /* MIN (MAX (b, a), b) == b. */
8682 if (TREE_CODE (op0) == compl_code
8683 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8684 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8685 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8686
8687 /* MIN (a, MAX (a, b)) == a. */
8688 if (TREE_CODE (op1) == compl_code
8689 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8690 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8691 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8692
8693 /* MIN (a, MAX (b, a)) == a. */
8694 if (TREE_CODE (op1) == compl_code
8695 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8696 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8697 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8698
8699 return NULL_TREE;
8700 }
8701
8702 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8703 by changing CODE to reduce the magnitude of constants involved in
8704 ARG0 of the comparison.
8705 Returns a canonicalized comparison tree if a simplification was
8706 possible, otherwise returns NULL_TREE.
8707 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8708 valid if signed overflow is undefined. */
8709
8710 static tree
8711 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8712 tree arg0, tree arg1,
8713 bool *strict_overflow_p)
8714 {
8715 enum tree_code code0 = TREE_CODE (arg0);
8716 tree t, cst0 = NULL_TREE;
8717 int sgn0;
8718 bool swap = false;
8719
8720 /* Match A +- CST code arg1 and CST code arg1. We can change the
8721 first form only if overflow is undefined. */
8722 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8723 /* In principle pointers also have undefined overflow behavior,
8724 but that causes problems elsewhere. */
8725 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8726 && (code0 == MINUS_EXPR
8727 || code0 == PLUS_EXPR)
8728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8729 || code0 == INTEGER_CST))
8730 return NULL_TREE;
8731
8732 /* Identify the constant in arg0 and its sign. */
8733 if (code0 == INTEGER_CST)
8734 cst0 = arg0;
8735 else
8736 cst0 = TREE_OPERAND (arg0, 1);
8737 sgn0 = tree_int_cst_sgn (cst0);
8738
8739 /* Overflowed constants and zero will cause problems. */
8740 if (integer_zerop (cst0)
8741 || TREE_OVERFLOW (cst0))
8742 return NULL_TREE;
8743
8744 /* See if we can reduce the magnitude of the constant in
8745 arg0 by changing the comparison code. */
8746 if (code0 == INTEGER_CST)
8747 {
8748 /* CST <= arg1 -> CST-1 < arg1. */
8749 if (code == LE_EXPR && sgn0 == 1)
8750 code = LT_EXPR;
8751 /* -CST < arg1 -> -CST-1 <= arg1. */
8752 else if (code == LT_EXPR && sgn0 == -1)
8753 code = LE_EXPR;
8754 /* CST > arg1 -> CST-1 >= arg1. */
8755 else if (code == GT_EXPR && sgn0 == 1)
8756 code = GE_EXPR;
8757 /* -CST >= arg1 -> -CST-1 > arg1. */
8758 else if (code == GE_EXPR && sgn0 == -1)
8759 code = GT_EXPR;
8760 else
8761 return NULL_TREE;
8762 /* arg1 code' CST' might be more canonical. */
8763 swap = true;
8764 }
8765 else
8766 {
8767 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8768 if (code == LT_EXPR
8769 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8770 code = LE_EXPR;
8771 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8772 else if (code == GT_EXPR
8773 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8774 code = GE_EXPR;
8775 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8776 else if (code == LE_EXPR
8777 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8778 code = LT_EXPR;
8779 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8780 else if (code == GE_EXPR
8781 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8782 code = GT_EXPR;
8783 else
8784 return NULL_TREE;
8785 *strict_overflow_p = true;
8786 }
8787
8788 /* Now build the constant reduced in magnitude. But not if that
8789 would produce one outside of its types range. */
8790 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8791 && ((sgn0 == 1
8792 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8793 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8794 || (sgn0 == -1
8795 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8796 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8797 /* We cannot swap the comparison here as that would cause us to
8798 endlessly recurse. */
8799 return NULL_TREE;
8800
8801 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8802 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8803 if (code0 != INTEGER_CST)
8804 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8805 t = fold_convert (TREE_TYPE (arg1), t);
8806
8807 /* If swapping might yield to a more canonical form, do so. */
8808 if (swap)
8809 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8810 else
8811 return fold_build2_loc (loc, code, type, t, arg1);
8812 }
8813
8814 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8815 overflow further. Try to decrease the magnitude of constants involved
8816 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8817 and put sole constants at the second argument position.
8818 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8819
8820 static tree
8821 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8822 tree arg0, tree arg1)
8823 {
8824 tree t;
8825 bool strict_overflow_p;
8826 const char * const warnmsg = G_("assuming signed overflow does not occur "
8827 "when reducing constant in comparison");
8828
8829 /* Try canonicalization by simplifying arg0. */
8830 strict_overflow_p = false;
8831 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8832 &strict_overflow_p);
8833 if (t)
8834 {
8835 if (strict_overflow_p)
8836 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8837 return t;
8838 }
8839
8840 /* Try canonicalization by simplifying arg1 using the swapped
8841 comparison. */
8842 code = swap_tree_comparison (code);
8843 strict_overflow_p = false;
8844 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8845 &strict_overflow_p);
8846 if (t && strict_overflow_p)
8847 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8848 return t;
8849 }
8850
8851 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8852 space. This is used to avoid issuing overflow warnings for
8853 expressions like &p->x which can not wrap. */
8854
8855 static bool
8856 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8857 {
8858 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8859 return true;
8860
8861 if (bitpos < 0)
8862 return true;
8863
8864 wide_int wi_offset;
8865 int precision = TYPE_PRECISION (TREE_TYPE (base));
8866 if (offset == NULL_TREE)
8867 wi_offset = wi::zero (precision);
8868 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8869 return true;
8870 else
8871 wi_offset = offset;
8872
8873 bool overflow;
8874 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8875 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8876 if (overflow)
8877 return true;
8878
8879 if (!wi::fits_uhwi_p (total))
8880 return true;
8881
8882 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8883 if (size <= 0)
8884 return true;
8885
8886 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8887 array. */
8888 if (TREE_CODE (base) == ADDR_EXPR)
8889 {
8890 HOST_WIDE_INT base_size;
8891
8892 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8893 if (base_size > 0 && size < base_size)
8894 size = base_size;
8895 }
8896
8897 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8898 }
8899
8900 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8901 kind INTEGER_CST. This makes sure to properly sign-extend the
8902 constant. */
8903
8904 static HOST_WIDE_INT
8905 size_low_cst (const_tree t)
8906 {
8907 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8908 int prec = TYPE_PRECISION (TREE_TYPE (t));
8909 if (prec < HOST_BITS_PER_WIDE_INT)
8910 return sext_hwi (w, prec);
8911 return w;
8912 }
8913
8914 /* Subroutine of fold_binary. This routine performs all of the
8915 transformations that are common to the equality/inequality
8916 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8917 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8918 fold_binary should call fold_binary. Fold a comparison with
8919 tree code CODE and type TYPE with operands OP0 and OP1. Return
8920 the folded comparison or NULL_TREE. */
8921
8922 static tree
8923 fold_comparison (location_t loc, enum tree_code code, tree type,
8924 tree op0, tree op1)
8925 {
8926 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8927 tree arg0, arg1, tem;
8928
8929 arg0 = op0;
8930 arg1 = op1;
8931
8932 STRIP_SIGN_NOPS (arg0);
8933 STRIP_SIGN_NOPS (arg1);
8934
8935 tem = fold_relational_const (code, type, arg0, arg1);
8936 if (tem != NULL_TREE)
8937 return tem;
8938
8939 /* If one arg is a real or integer constant, put it last. */
8940 if (tree_swap_operands_p (arg0, arg1, true))
8941 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8942
8943 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8944 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8945 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8946 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8947 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8948 && TREE_CODE (arg1) == INTEGER_CST
8949 && !TREE_OVERFLOW (arg1))
8950 {
8951 const enum tree_code
8952 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8953 tree const1 = TREE_OPERAND (arg0, 1);
8954 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8955 tree variable = TREE_OPERAND (arg0, 0);
8956 tree new_const = int_const_binop (reverse_op, const2, const1);
8957
8958 /* If the constant operation overflowed this can be
8959 simplified as a comparison against INT_MAX/INT_MIN. */
8960 if (TREE_OVERFLOW (new_const))
8961 {
8962 int const1_sgn = tree_int_cst_sgn (const1);
8963 enum tree_code code2 = code;
8964
8965 /* Get the sign of the constant on the lhs if the
8966 operation were VARIABLE + CONST1. */
8967 if (TREE_CODE (arg0) == MINUS_EXPR)
8968 const1_sgn = -const1_sgn;
8969
8970 /* The sign of the constant determines if we overflowed
8971 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8972 Canonicalize to the INT_MIN overflow by swapping the comparison
8973 if necessary. */
8974 if (const1_sgn == -1)
8975 code2 = swap_tree_comparison (code);
8976
8977 /* We now can look at the canonicalized case
8978 VARIABLE + 1 CODE2 INT_MIN
8979 and decide on the result. */
8980 switch (code2)
8981 {
8982 case EQ_EXPR:
8983 case LT_EXPR:
8984 case LE_EXPR:
8985 return
8986 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8987
8988 case NE_EXPR:
8989 case GE_EXPR:
8990 case GT_EXPR:
8991 return
8992 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8993
8994 default:
8995 gcc_unreachable ();
8996 }
8997 }
8998 else
8999 {
9000 if (!equality_code)
9001 fold_overflow_warning ("assuming signed overflow does not occur "
9002 "when changing X +- C1 cmp C2 to "
9003 "X cmp C2 -+ C1",
9004 WARN_STRICT_OVERFLOW_COMPARISON);
9005 return fold_build2_loc (loc, code, type, variable, new_const);
9006 }
9007 }
9008
9009 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
9010 if (TREE_CODE (arg0) == MINUS_EXPR
9011 && equality_code
9012 && integer_zerop (arg1))
9013 {
9014 /* ??? The transformation is valid for the other operators if overflow
9015 is undefined for the type, but performing it here badly interacts
9016 with the transformation in fold_cond_expr_with_comparison which
9017 attempts to synthetize ABS_EXPR. */
9018 if (!equality_code)
9019 fold_overflow_warning ("assuming signed overflow does not occur "
9020 "when changing X - Y cmp 0 to X cmp Y",
9021 WARN_STRICT_OVERFLOW_COMPARISON);
9022 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
9023 TREE_OPERAND (arg0, 1));
9024 }
9025
9026 /* For comparisons of pointers we can decompose it to a compile time
9027 comparison of the base objects and the offsets into the object.
9028 This requires at least one operand being an ADDR_EXPR or a
9029 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9030 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9031 && (TREE_CODE (arg0) == ADDR_EXPR
9032 || TREE_CODE (arg1) == ADDR_EXPR
9033 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9034 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9035 {
9036 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9037 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9038 enum machine_mode mode;
9039 int volatilep, unsignedp;
9040 bool indirect_base0 = false, indirect_base1 = false;
9041
9042 /* Get base and offset for the access. Strip ADDR_EXPR for
9043 get_inner_reference, but put it back by stripping INDIRECT_REF
9044 off the base object if possible. indirect_baseN will be true
9045 if baseN is not an address but refers to the object itself. */
9046 base0 = arg0;
9047 if (TREE_CODE (arg0) == ADDR_EXPR)
9048 {
9049 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9050 &bitsize, &bitpos0, &offset0, &mode,
9051 &unsignedp, &volatilep, false);
9052 if (TREE_CODE (base0) == INDIRECT_REF)
9053 base0 = TREE_OPERAND (base0, 0);
9054 else
9055 indirect_base0 = true;
9056 }
9057 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9058 {
9059 base0 = TREE_OPERAND (arg0, 0);
9060 STRIP_SIGN_NOPS (base0);
9061 if (TREE_CODE (base0) == ADDR_EXPR)
9062 {
9063 base0 = TREE_OPERAND (base0, 0);
9064 indirect_base0 = true;
9065 }
9066 offset0 = TREE_OPERAND (arg0, 1);
9067 if (tree_fits_shwi_p (offset0))
9068 {
9069 HOST_WIDE_INT off = size_low_cst (offset0);
9070 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9071 * BITS_PER_UNIT)
9072 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9073 {
9074 bitpos0 = off * BITS_PER_UNIT;
9075 offset0 = NULL_TREE;
9076 }
9077 }
9078 }
9079
9080 base1 = arg1;
9081 if (TREE_CODE (arg1) == ADDR_EXPR)
9082 {
9083 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9084 &bitsize, &bitpos1, &offset1, &mode,
9085 &unsignedp, &volatilep, false);
9086 if (TREE_CODE (base1) == INDIRECT_REF)
9087 base1 = TREE_OPERAND (base1, 0);
9088 else
9089 indirect_base1 = true;
9090 }
9091 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9092 {
9093 base1 = TREE_OPERAND (arg1, 0);
9094 STRIP_SIGN_NOPS (base1);
9095 if (TREE_CODE (base1) == ADDR_EXPR)
9096 {
9097 base1 = TREE_OPERAND (base1, 0);
9098 indirect_base1 = true;
9099 }
9100 offset1 = TREE_OPERAND (arg1, 1);
9101 if (tree_fits_shwi_p (offset1))
9102 {
9103 HOST_WIDE_INT off = size_low_cst (offset1);
9104 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9105 * BITS_PER_UNIT)
9106 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9107 {
9108 bitpos1 = off * BITS_PER_UNIT;
9109 offset1 = NULL_TREE;
9110 }
9111 }
9112 }
9113
9114 /* A local variable can never be pointed to by
9115 the default SSA name of an incoming parameter. */
9116 if ((TREE_CODE (arg0) == ADDR_EXPR
9117 && indirect_base0
9118 && TREE_CODE (base0) == VAR_DECL
9119 && auto_var_in_fn_p (base0, current_function_decl)
9120 && !indirect_base1
9121 && TREE_CODE (base1) == SSA_NAME
9122 && SSA_NAME_IS_DEFAULT_DEF (base1)
9123 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9124 || (TREE_CODE (arg1) == ADDR_EXPR
9125 && indirect_base1
9126 && TREE_CODE (base1) == VAR_DECL
9127 && auto_var_in_fn_p (base1, current_function_decl)
9128 && !indirect_base0
9129 && TREE_CODE (base0) == SSA_NAME
9130 && SSA_NAME_IS_DEFAULT_DEF (base0)
9131 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9132 {
9133 if (code == NE_EXPR)
9134 return constant_boolean_node (1, type);
9135 else if (code == EQ_EXPR)
9136 return constant_boolean_node (0, type);
9137 }
9138 /* If we have equivalent bases we might be able to simplify. */
9139 else if (indirect_base0 == indirect_base1
9140 && operand_equal_p (base0, base1, 0))
9141 {
9142 /* We can fold this expression to a constant if the non-constant
9143 offset parts are equal. */
9144 if ((offset0 == offset1
9145 || (offset0 && offset1
9146 && operand_equal_p (offset0, offset1, 0)))
9147 && (code == EQ_EXPR
9148 || code == NE_EXPR
9149 || (indirect_base0 && DECL_P (base0))
9150 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9151
9152 {
9153 if (!equality_code
9154 && bitpos0 != bitpos1
9155 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9156 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9157 fold_overflow_warning (("assuming pointer wraparound does not "
9158 "occur when comparing P +- C1 with "
9159 "P +- C2"),
9160 WARN_STRICT_OVERFLOW_CONDITIONAL);
9161
9162 switch (code)
9163 {
9164 case EQ_EXPR:
9165 return constant_boolean_node (bitpos0 == bitpos1, type);
9166 case NE_EXPR:
9167 return constant_boolean_node (bitpos0 != bitpos1, type);
9168 case LT_EXPR:
9169 return constant_boolean_node (bitpos0 < bitpos1, type);
9170 case LE_EXPR:
9171 return constant_boolean_node (bitpos0 <= bitpos1, type);
9172 case GE_EXPR:
9173 return constant_boolean_node (bitpos0 >= bitpos1, type);
9174 case GT_EXPR:
9175 return constant_boolean_node (bitpos0 > bitpos1, type);
9176 default:;
9177 }
9178 }
9179 /* We can simplify the comparison to a comparison of the variable
9180 offset parts if the constant offset parts are equal.
9181 Be careful to use signed sizetype here because otherwise we
9182 mess with array offsets in the wrong way. This is possible
9183 because pointer arithmetic is restricted to retain within an
9184 object and overflow on pointer differences is undefined as of
9185 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9186 else if (bitpos0 == bitpos1
9187 && (equality_code
9188 || (indirect_base0 && DECL_P (base0))
9189 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9190 {
9191 /* By converting to signed sizetype we cover middle-end pointer
9192 arithmetic which operates on unsigned pointer types of size
9193 type size and ARRAY_REF offsets which are properly sign or
9194 zero extended from their type in case it is narrower than
9195 sizetype. */
9196 if (offset0 == NULL_TREE)
9197 offset0 = build_int_cst (ssizetype, 0);
9198 else
9199 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9200 if (offset1 == NULL_TREE)
9201 offset1 = build_int_cst (ssizetype, 0);
9202 else
9203 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9204
9205 if (!equality_code
9206 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9207 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9208 fold_overflow_warning (("assuming pointer wraparound does not "
9209 "occur when comparing P +- C1 with "
9210 "P +- C2"),
9211 WARN_STRICT_OVERFLOW_COMPARISON);
9212
9213 return fold_build2_loc (loc, code, type, offset0, offset1);
9214 }
9215 }
9216 /* For non-equal bases we can simplify if they are addresses
9217 of local binding decls or constants. */
9218 else if (indirect_base0 && indirect_base1
9219 /* We know that !operand_equal_p (base0, base1, 0)
9220 because the if condition was false. But make
9221 sure two decls are not the same. */
9222 && base0 != base1
9223 && TREE_CODE (arg0) == ADDR_EXPR
9224 && TREE_CODE (arg1) == ADDR_EXPR
9225 && (((TREE_CODE (base0) == VAR_DECL
9226 || TREE_CODE (base0) == PARM_DECL)
9227 && (targetm.binds_local_p (base0)
9228 || CONSTANT_CLASS_P (base1)))
9229 || CONSTANT_CLASS_P (base0))
9230 && (((TREE_CODE (base1) == VAR_DECL
9231 || TREE_CODE (base1) == PARM_DECL)
9232 && (targetm.binds_local_p (base1)
9233 || CONSTANT_CLASS_P (base0)))
9234 || CONSTANT_CLASS_P (base1)))
9235 {
9236 if (code == EQ_EXPR)
9237 return omit_two_operands_loc (loc, type, boolean_false_node,
9238 arg0, arg1);
9239 else if (code == NE_EXPR)
9240 return omit_two_operands_loc (loc, type, boolean_true_node,
9241 arg0, arg1);
9242 }
9243 /* For equal offsets we can simplify to a comparison of the
9244 base addresses. */
9245 else if (bitpos0 == bitpos1
9246 && (indirect_base0
9247 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9248 && (indirect_base1
9249 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9250 && ((offset0 == offset1)
9251 || (offset0 && offset1
9252 && operand_equal_p (offset0, offset1, 0))))
9253 {
9254 if (indirect_base0)
9255 base0 = build_fold_addr_expr_loc (loc, base0);
9256 if (indirect_base1)
9257 base1 = build_fold_addr_expr_loc (loc, base1);
9258 return fold_build2_loc (loc, code, type, base0, base1);
9259 }
9260 }
9261
9262 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9263 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9264 the resulting offset is smaller in absolute value than the
9265 original one and has the same sign. */
9266 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9267 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9268 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9269 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9270 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9271 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9272 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9273 {
9274 tree const1 = TREE_OPERAND (arg0, 1);
9275 tree const2 = TREE_OPERAND (arg1, 1);
9276 tree variable1 = TREE_OPERAND (arg0, 0);
9277 tree variable2 = TREE_OPERAND (arg1, 0);
9278 tree cst;
9279 const char * const warnmsg = G_("assuming signed overflow does not "
9280 "occur when combining constants around "
9281 "a comparison");
9282
9283 /* Put the constant on the side where it doesn't overflow and is
9284 of lower absolute value and of same sign than before. */
9285 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9286 ? MINUS_EXPR : PLUS_EXPR,
9287 const2, const1);
9288 if (!TREE_OVERFLOW (cst)
9289 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9290 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9291 {
9292 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9293 return fold_build2_loc (loc, code, type,
9294 variable1,
9295 fold_build2_loc (loc, TREE_CODE (arg1),
9296 TREE_TYPE (arg1),
9297 variable2, cst));
9298 }
9299
9300 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9301 ? MINUS_EXPR : PLUS_EXPR,
9302 const1, const2);
9303 if (!TREE_OVERFLOW (cst)
9304 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9305 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9306 {
9307 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9308 return fold_build2_loc (loc, code, type,
9309 fold_build2_loc (loc, TREE_CODE (arg0),
9310 TREE_TYPE (arg0),
9311 variable1, cst),
9312 variable2);
9313 }
9314 }
9315
9316 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9317 signed arithmetic case. That form is created by the compiler
9318 often enough for folding it to be of value. One example is in
9319 computing loop trip counts after Operator Strength Reduction. */
9320 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9321 && TREE_CODE (arg0) == MULT_EXPR
9322 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9323 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9324 && integer_zerop (arg1))
9325 {
9326 tree const1 = TREE_OPERAND (arg0, 1);
9327 tree const2 = arg1; /* zero */
9328 tree variable1 = TREE_OPERAND (arg0, 0);
9329 enum tree_code cmp_code = code;
9330
9331 /* Handle unfolded multiplication by zero. */
9332 if (integer_zerop (const1))
9333 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9334
9335 fold_overflow_warning (("assuming signed overflow does not occur when "
9336 "eliminating multiplication in comparison "
9337 "with zero"),
9338 WARN_STRICT_OVERFLOW_COMPARISON);
9339
9340 /* If const1 is negative we swap the sense of the comparison. */
9341 if (tree_int_cst_sgn (const1) < 0)
9342 cmp_code = swap_tree_comparison (cmp_code);
9343
9344 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9345 }
9346
9347 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9348 if (tem)
9349 return tem;
9350
9351 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9352 {
9353 tree targ0 = strip_float_extensions (arg0);
9354 tree targ1 = strip_float_extensions (arg1);
9355 tree newtype = TREE_TYPE (targ0);
9356
9357 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9358 newtype = TREE_TYPE (targ1);
9359
9360 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9361 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9362 return fold_build2_loc (loc, code, type,
9363 fold_convert_loc (loc, newtype, targ0),
9364 fold_convert_loc (loc, newtype, targ1));
9365
9366 /* (-a) CMP (-b) -> b CMP a */
9367 if (TREE_CODE (arg0) == NEGATE_EXPR
9368 && TREE_CODE (arg1) == NEGATE_EXPR)
9369 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9370 TREE_OPERAND (arg0, 0));
9371
9372 if (TREE_CODE (arg1) == REAL_CST)
9373 {
9374 REAL_VALUE_TYPE cst;
9375 cst = TREE_REAL_CST (arg1);
9376
9377 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9378 if (TREE_CODE (arg0) == NEGATE_EXPR)
9379 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9380 TREE_OPERAND (arg0, 0),
9381 build_real (TREE_TYPE (arg1),
9382 real_value_negate (&cst)));
9383
9384 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9385 /* a CMP (-0) -> a CMP 0 */
9386 if (REAL_VALUE_MINUS_ZERO (cst))
9387 return fold_build2_loc (loc, code, type, arg0,
9388 build_real (TREE_TYPE (arg1), dconst0));
9389
9390 /* x != NaN is always true, other ops are always false. */
9391 if (REAL_VALUE_ISNAN (cst)
9392 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9393 {
9394 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9395 return omit_one_operand_loc (loc, type, tem, arg0);
9396 }
9397
9398 /* Fold comparisons against infinity. */
9399 if (REAL_VALUE_ISINF (cst)
9400 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9401 {
9402 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9403 if (tem != NULL_TREE)
9404 return tem;
9405 }
9406 }
9407
9408 /* If this is a comparison of a real constant with a PLUS_EXPR
9409 or a MINUS_EXPR of a real constant, we can convert it into a
9410 comparison with a revised real constant as long as no overflow
9411 occurs when unsafe_math_optimizations are enabled. */
9412 if (flag_unsafe_math_optimizations
9413 && TREE_CODE (arg1) == REAL_CST
9414 && (TREE_CODE (arg0) == PLUS_EXPR
9415 || TREE_CODE (arg0) == MINUS_EXPR)
9416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9417 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9418 ? MINUS_EXPR : PLUS_EXPR,
9419 arg1, TREE_OPERAND (arg0, 1)))
9420 && !TREE_OVERFLOW (tem))
9421 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9422
9423 /* Likewise, we can simplify a comparison of a real constant with
9424 a MINUS_EXPR whose first operand is also a real constant, i.e.
9425 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9426 floating-point types only if -fassociative-math is set. */
9427 if (flag_associative_math
9428 && TREE_CODE (arg1) == REAL_CST
9429 && TREE_CODE (arg0) == MINUS_EXPR
9430 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9431 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9432 arg1))
9433 && !TREE_OVERFLOW (tem))
9434 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9435 TREE_OPERAND (arg0, 1), tem);
9436
9437 /* Fold comparisons against built-in math functions. */
9438 if (TREE_CODE (arg1) == REAL_CST
9439 && flag_unsafe_math_optimizations
9440 && ! flag_errno_math)
9441 {
9442 enum built_in_function fcode = builtin_mathfn_code (arg0);
9443
9444 if (fcode != END_BUILTINS)
9445 {
9446 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9447 if (tem != NULL_TREE)
9448 return tem;
9449 }
9450 }
9451 }
9452
9453 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9454 && CONVERT_EXPR_P (arg0))
9455 {
9456 /* If we are widening one operand of an integer comparison,
9457 see if the other operand is similarly being widened. Perhaps we
9458 can do the comparison in the narrower type. */
9459 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9460 if (tem)
9461 return tem;
9462
9463 /* Or if we are changing signedness. */
9464 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9465 if (tem)
9466 return tem;
9467 }
9468
9469 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9470 constant, we can simplify it. */
9471 if (TREE_CODE (arg1) == INTEGER_CST
9472 && (TREE_CODE (arg0) == MIN_EXPR
9473 || TREE_CODE (arg0) == MAX_EXPR)
9474 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9475 {
9476 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9477 if (tem)
9478 return tem;
9479 }
9480
9481 /* Simplify comparison of something with itself. (For IEEE
9482 floating-point, we can only do some of these simplifications.) */
9483 if (operand_equal_p (arg0, arg1, 0))
9484 {
9485 switch (code)
9486 {
9487 case EQ_EXPR:
9488 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9489 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9490 return constant_boolean_node (1, type);
9491 break;
9492
9493 case GE_EXPR:
9494 case LE_EXPR:
9495 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9496 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9497 return constant_boolean_node (1, type);
9498 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9499
9500 case NE_EXPR:
9501 /* For NE, we can only do this simplification if integer
9502 or we don't honor IEEE floating point NaNs. */
9503 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9504 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9505 break;
9506 /* ... fall through ... */
9507 case GT_EXPR:
9508 case LT_EXPR:
9509 return constant_boolean_node (0, type);
9510 default:
9511 gcc_unreachable ();
9512 }
9513 }
9514
9515 /* If we are comparing an expression that just has comparisons
9516 of two integer values, arithmetic expressions of those comparisons,
9517 and constants, we can simplify it. There are only three cases
9518 to check: the two values can either be equal, the first can be
9519 greater, or the second can be greater. Fold the expression for
9520 those three values. Since each value must be 0 or 1, we have
9521 eight possibilities, each of which corresponds to the constant 0
9522 or 1 or one of the six possible comparisons.
9523
9524 This handles common cases like (a > b) == 0 but also handles
9525 expressions like ((x > y) - (y > x)) > 0, which supposedly
9526 occur in macroized code. */
9527
9528 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9529 {
9530 tree cval1 = 0, cval2 = 0;
9531 int save_p = 0;
9532
9533 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9534 /* Don't handle degenerate cases here; they should already
9535 have been handled anyway. */
9536 && cval1 != 0 && cval2 != 0
9537 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9538 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9539 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9540 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9541 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9542 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9543 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9544 {
9545 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9546 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9547
9548 /* We can't just pass T to eval_subst in case cval1 or cval2
9549 was the same as ARG1. */
9550
9551 tree high_result
9552 = fold_build2_loc (loc, code, type,
9553 eval_subst (loc, arg0, cval1, maxval,
9554 cval2, minval),
9555 arg1);
9556 tree equal_result
9557 = fold_build2_loc (loc, code, type,
9558 eval_subst (loc, arg0, cval1, maxval,
9559 cval2, maxval),
9560 arg1);
9561 tree low_result
9562 = fold_build2_loc (loc, code, type,
9563 eval_subst (loc, arg0, cval1, minval,
9564 cval2, maxval),
9565 arg1);
9566
9567 /* All three of these results should be 0 or 1. Confirm they are.
9568 Then use those values to select the proper code to use. */
9569
9570 if (TREE_CODE (high_result) == INTEGER_CST
9571 && TREE_CODE (equal_result) == INTEGER_CST
9572 && TREE_CODE (low_result) == INTEGER_CST)
9573 {
9574 /* Make a 3-bit mask with the high-order bit being the
9575 value for `>', the next for '=', and the low for '<'. */
9576 switch ((integer_onep (high_result) * 4)
9577 + (integer_onep (equal_result) * 2)
9578 + integer_onep (low_result))
9579 {
9580 case 0:
9581 /* Always false. */
9582 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9583 case 1:
9584 code = LT_EXPR;
9585 break;
9586 case 2:
9587 code = EQ_EXPR;
9588 break;
9589 case 3:
9590 code = LE_EXPR;
9591 break;
9592 case 4:
9593 code = GT_EXPR;
9594 break;
9595 case 5:
9596 code = NE_EXPR;
9597 break;
9598 case 6:
9599 code = GE_EXPR;
9600 break;
9601 case 7:
9602 /* Always true. */
9603 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9604 }
9605
9606 if (save_p)
9607 {
9608 tem = save_expr (build2 (code, type, cval1, cval2));
9609 SET_EXPR_LOCATION (tem, loc);
9610 return tem;
9611 }
9612 return fold_build2_loc (loc, code, type, cval1, cval2);
9613 }
9614 }
9615 }
9616
9617 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9618 into a single range test. */
9619 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9620 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9621 && TREE_CODE (arg1) == INTEGER_CST
9622 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9623 && !integer_zerop (TREE_OPERAND (arg0, 1))
9624 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9625 && !TREE_OVERFLOW (arg1))
9626 {
9627 tem = fold_div_compare (loc, code, type, arg0, arg1);
9628 if (tem != NULL_TREE)
9629 return tem;
9630 }
9631
9632 /* Fold ~X op ~Y as Y op X. */
9633 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9634 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9635 {
9636 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9637 return fold_build2_loc (loc, code, type,
9638 fold_convert_loc (loc, cmp_type,
9639 TREE_OPERAND (arg1, 0)),
9640 TREE_OPERAND (arg0, 0));
9641 }
9642
9643 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9644 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9645 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9646 {
9647 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9648 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9649 TREE_OPERAND (arg0, 0),
9650 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9651 fold_convert_loc (loc, cmp_type, arg1)));
9652 }
9653
9654 return NULL_TREE;
9655 }
9656
9657
9658 /* Subroutine of fold_binary. Optimize complex multiplications of the
9659 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9660 argument EXPR represents the expression "z" of type TYPE. */
9661
9662 static tree
9663 fold_mult_zconjz (location_t loc, tree type, tree expr)
9664 {
9665 tree itype = TREE_TYPE (type);
9666 tree rpart, ipart, tem;
9667
9668 if (TREE_CODE (expr) == COMPLEX_EXPR)
9669 {
9670 rpart = TREE_OPERAND (expr, 0);
9671 ipart = TREE_OPERAND (expr, 1);
9672 }
9673 else if (TREE_CODE (expr) == COMPLEX_CST)
9674 {
9675 rpart = TREE_REALPART (expr);
9676 ipart = TREE_IMAGPART (expr);
9677 }
9678 else
9679 {
9680 expr = save_expr (expr);
9681 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9682 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9683 }
9684
9685 rpart = save_expr (rpart);
9686 ipart = save_expr (ipart);
9687 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9688 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9689 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9690 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9691 build_zero_cst (itype));
9692 }
9693
9694
9695 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9696 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9697 guarantees that P and N have the same least significant log2(M) bits.
9698 N is not otherwise constrained. In particular, N is not normalized to
9699 0 <= N < M as is common. In general, the precise value of P is unknown.
9700 M is chosen as large as possible such that constant N can be determined.
9701
9702 Returns M and sets *RESIDUE to N.
9703
9704 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9705 account. This is not always possible due to PR 35705.
9706 */
9707
9708 static unsigned HOST_WIDE_INT
9709 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9710 bool allow_func_align)
9711 {
9712 enum tree_code code;
9713
9714 *residue = 0;
9715
9716 code = TREE_CODE (expr);
9717 if (code == ADDR_EXPR)
9718 {
9719 unsigned int bitalign;
9720 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9721 *residue /= BITS_PER_UNIT;
9722 return bitalign / BITS_PER_UNIT;
9723 }
9724 else if (code == POINTER_PLUS_EXPR)
9725 {
9726 tree op0, op1;
9727 unsigned HOST_WIDE_INT modulus;
9728 enum tree_code inner_code;
9729
9730 op0 = TREE_OPERAND (expr, 0);
9731 STRIP_NOPS (op0);
9732 modulus = get_pointer_modulus_and_residue (op0, residue,
9733 allow_func_align);
9734
9735 op1 = TREE_OPERAND (expr, 1);
9736 STRIP_NOPS (op1);
9737 inner_code = TREE_CODE (op1);
9738 if (inner_code == INTEGER_CST)
9739 {
9740 *residue += TREE_INT_CST_LOW (op1);
9741 return modulus;
9742 }
9743 else if (inner_code == MULT_EXPR)
9744 {
9745 op1 = TREE_OPERAND (op1, 1);
9746 if (TREE_CODE (op1) == INTEGER_CST)
9747 {
9748 unsigned HOST_WIDE_INT align;
9749
9750 /* Compute the greatest power-of-2 divisor of op1. */
9751 align = TREE_INT_CST_LOW (op1);
9752 align &= -align;
9753
9754 /* If align is non-zero and less than *modulus, replace
9755 *modulus with align., If align is 0, then either op1 is 0
9756 or the greatest power-of-2 divisor of op1 doesn't fit in an
9757 unsigned HOST_WIDE_INT. In either case, no additional
9758 constraint is imposed. */
9759 if (align)
9760 modulus = MIN (modulus, align);
9761
9762 return modulus;
9763 }
9764 }
9765 }
9766
9767 /* If we get here, we were unable to determine anything useful about the
9768 expression. */
9769 return 1;
9770 }
9771
9772 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9773 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9774
9775 static bool
9776 vec_cst_ctor_to_array (tree arg, tree *elts)
9777 {
9778 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9779
9780 if (TREE_CODE (arg) == VECTOR_CST)
9781 {
9782 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9783 elts[i] = VECTOR_CST_ELT (arg, i);
9784 }
9785 else if (TREE_CODE (arg) == CONSTRUCTOR)
9786 {
9787 constructor_elt *elt;
9788
9789 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9790 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9791 return false;
9792 else
9793 elts[i] = elt->value;
9794 }
9795 else
9796 return false;
9797 for (; i < nelts; i++)
9798 elts[i]
9799 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9800 return true;
9801 }
9802
9803 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9804 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9805 NULL_TREE otherwise. */
9806
9807 static tree
9808 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9809 {
9810 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9811 tree *elts;
9812 bool need_ctor = false;
9813
9814 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9815 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9816 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9817 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9818 return NULL_TREE;
9819
9820 elts = XALLOCAVEC (tree, nelts * 3);
9821 if (!vec_cst_ctor_to_array (arg0, elts)
9822 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9823 return NULL_TREE;
9824
9825 for (i = 0; i < nelts; i++)
9826 {
9827 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9828 need_ctor = true;
9829 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9830 }
9831
9832 if (need_ctor)
9833 {
9834 vec<constructor_elt, va_gc> *v;
9835 vec_alloc (v, nelts);
9836 for (i = 0; i < nelts; i++)
9837 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9838 return build_constructor (type, v);
9839 }
9840 else
9841 return build_vector (type, &elts[2 * nelts]);
9842 }
9843
9844 /* Try to fold a pointer difference of type TYPE two address expressions of
9845 array references AREF0 and AREF1 using location LOC. Return a
9846 simplified expression for the difference or NULL_TREE. */
9847
9848 static tree
9849 fold_addr_of_array_ref_difference (location_t loc, tree type,
9850 tree aref0, tree aref1)
9851 {
9852 tree base0 = TREE_OPERAND (aref0, 0);
9853 tree base1 = TREE_OPERAND (aref1, 0);
9854 tree base_offset = build_int_cst (type, 0);
9855
9856 /* If the bases are array references as well, recurse. If the bases
9857 are pointer indirections compute the difference of the pointers.
9858 If the bases are equal, we are set. */
9859 if ((TREE_CODE (base0) == ARRAY_REF
9860 && TREE_CODE (base1) == ARRAY_REF
9861 && (base_offset
9862 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9863 || (INDIRECT_REF_P (base0)
9864 && INDIRECT_REF_P (base1)
9865 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9866 TREE_OPERAND (base0, 0),
9867 TREE_OPERAND (base1, 0))))
9868 || operand_equal_p (base0, base1, 0))
9869 {
9870 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9871 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9872 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9873 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9874 return fold_build2_loc (loc, PLUS_EXPR, type,
9875 base_offset,
9876 fold_build2_loc (loc, MULT_EXPR, type,
9877 diff, esz));
9878 }
9879 return NULL_TREE;
9880 }
9881
9882 /* If the real or vector real constant CST of type TYPE has an exact
9883 inverse, return it, else return NULL. */
9884
9885 static tree
9886 exact_inverse (tree type, tree cst)
9887 {
9888 REAL_VALUE_TYPE r;
9889 tree unit_type, *elts;
9890 enum machine_mode mode;
9891 unsigned vec_nelts, i;
9892
9893 switch (TREE_CODE (cst))
9894 {
9895 case REAL_CST:
9896 r = TREE_REAL_CST (cst);
9897
9898 if (exact_real_inverse (TYPE_MODE (type), &r))
9899 return build_real (type, r);
9900
9901 return NULL_TREE;
9902
9903 case VECTOR_CST:
9904 vec_nelts = VECTOR_CST_NELTS (cst);
9905 elts = XALLOCAVEC (tree, vec_nelts);
9906 unit_type = TREE_TYPE (type);
9907 mode = TYPE_MODE (unit_type);
9908
9909 for (i = 0; i < vec_nelts; i++)
9910 {
9911 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9912 if (!exact_real_inverse (mode, &r))
9913 return NULL_TREE;
9914 elts[i] = build_real (unit_type, r);
9915 }
9916
9917 return build_vector (type, elts);
9918
9919 default:
9920 return NULL_TREE;
9921 }
9922 }
9923
9924 /* Mask out the tz least significant bits of X of type TYPE where
9925 tz is the number of trailing zeroes in Y. */
9926 static wide_int
9927 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9928 {
9929 int tz = wi::ctz (y);
9930 if (tz > 0)
9931 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9932 return x;
9933 }
9934
9935 /* Return true when T is an address and is known to be nonzero.
9936 For floating point we further ensure that T is not denormal.
9937 Similar logic is present in nonzero_address in rtlanal.h.
9938
9939 If the return value is based on the assumption that signed overflow
9940 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9941 change *STRICT_OVERFLOW_P. */
9942
9943 static bool
9944 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9945 {
9946 tree type = TREE_TYPE (t);
9947 enum tree_code code;
9948
9949 /* Doing something useful for floating point would need more work. */
9950 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9951 return false;
9952
9953 code = TREE_CODE (t);
9954 switch (TREE_CODE_CLASS (code))
9955 {
9956 case tcc_unary:
9957 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9958 strict_overflow_p);
9959 case tcc_binary:
9960 case tcc_comparison:
9961 return tree_binary_nonzero_warnv_p (code, type,
9962 TREE_OPERAND (t, 0),
9963 TREE_OPERAND (t, 1),
9964 strict_overflow_p);
9965 case tcc_constant:
9966 case tcc_declaration:
9967 case tcc_reference:
9968 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9969
9970 default:
9971 break;
9972 }
9973
9974 switch (code)
9975 {
9976 case TRUTH_NOT_EXPR:
9977 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9978 strict_overflow_p);
9979
9980 case TRUTH_AND_EXPR:
9981 case TRUTH_OR_EXPR:
9982 case TRUTH_XOR_EXPR:
9983 return tree_binary_nonzero_warnv_p (code, type,
9984 TREE_OPERAND (t, 0),
9985 TREE_OPERAND (t, 1),
9986 strict_overflow_p);
9987
9988 case COND_EXPR:
9989 case CONSTRUCTOR:
9990 case OBJ_TYPE_REF:
9991 case ASSERT_EXPR:
9992 case ADDR_EXPR:
9993 case WITH_SIZE_EXPR:
9994 case SSA_NAME:
9995 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9996
9997 case COMPOUND_EXPR:
9998 case MODIFY_EXPR:
9999 case BIND_EXPR:
10000 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10001 strict_overflow_p);
10002
10003 case SAVE_EXPR:
10004 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10005 strict_overflow_p);
10006
10007 case CALL_EXPR:
10008 {
10009 tree fndecl = get_callee_fndecl (t);
10010 if (!fndecl) return false;
10011 if (flag_delete_null_pointer_checks && !flag_check_new
10012 && DECL_IS_OPERATOR_NEW (fndecl)
10013 && !TREE_NOTHROW (fndecl))
10014 return true;
10015 if (flag_delete_null_pointer_checks
10016 && lookup_attribute ("returns_nonnull",
10017 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10018 return true;
10019 return alloca_call_p (t);
10020 }
10021
10022 default:
10023 break;
10024 }
10025 return false;
10026 }
10027
10028 /* Return true when T is an address and is known to be nonzero.
10029 Handle warnings about undefined signed overflow. */
10030
10031 static bool
10032 tree_expr_nonzero_p (tree t)
10033 {
10034 bool ret, strict_overflow_p;
10035
10036 strict_overflow_p = false;
10037 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10038 if (strict_overflow_p)
10039 fold_overflow_warning (("assuming signed overflow does not occur when "
10040 "determining that expression is always "
10041 "non-zero"),
10042 WARN_STRICT_OVERFLOW_MISC);
10043 return ret;
10044 }
10045
10046 /* Fold a binary expression of code CODE and type TYPE with operands
10047 OP0 and OP1. LOC is the location of the resulting expression.
10048 Return the folded expression if folding is successful. Otherwise,
10049 return NULL_TREE. */
10050
10051 tree
10052 fold_binary_loc (location_t loc,
10053 enum tree_code code, tree type, tree op0, tree op1)
10054 {
10055 enum tree_code_class kind = TREE_CODE_CLASS (code);
10056 tree arg0, arg1, tem;
10057 tree t1 = NULL_TREE;
10058 bool strict_overflow_p;
10059 unsigned int prec;
10060
10061 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10062 && TREE_CODE_LENGTH (code) == 2
10063 && op0 != NULL_TREE
10064 && op1 != NULL_TREE);
10065
10066 arg0 = op0;
10067 arg1 = op1;
10068
10069 /* Strip any conversions that don't change the mode. This is
10070 safe for every expression, except for a comparison expression
10071 because its signedness is derived from its operands. So, in
10072 the latter case, only strip conversions that don't change the
10073 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10074 preserved.
10075
10076 Note that this is done as an internal manipulation within the
10077 constant folder, in order to find the simplest representation
10078 of the arguments so that their form can be studied. In any
10079 cases, the appropriate type conversions should be put back in
10080 the tree that will get out of the constant folder. */
10081
10082 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10083 {
10084 STRIP_SIGN_NOPS (arg0);
10085 STRIP_SIGN_NOPS (arg1);
10086 }
10087 else
10088 {
10089 STRIP_NOPS (arg0);
10090 STRIP_NOPS (arg1);
10091 }
10092
10093 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10094 constant but we can't do arithmetic on them. */
10095 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10096 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10097 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10098 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10099 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10100 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10101 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10102 {
10103 if (kind == tcc_binary)
10104 {
10105 /* Make sure type and arg0 have the same saturating flag. */
10106 gcc_assert (TYPE_SATURATING (type)
10107 == TYPE_SATURATING (TREE_TYPE (arg0)));
10108 tem = const_binop (code, arg0, arg1);
10109 }
10110 else if (kind == tcc_comparison)
10111 tem = fold_relational_const (code, type, arg0, arg1);
10112 else
10113 tem = NULL_TREE;
10114
10115 if (tem != NULL_TREE)
10116 {
10117 if (TREE_TYPE (tem) != type)
10118 tem = fold_convert_loc (loc, type, tem);
10119 return tem;
10120 }
10121 }
10122
10123 /* If this is a commutative operation, and ARG0 is a constant, move it
10124 to ARG1 to reduce the number of tests below. */
10125 if (commutative_tree_code (code)
10126 && tree_swap_operands_p (arg0, arg1, true))
10127 return fold_build2_loc (loc, code, type, op1, op0);
10128
10129 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10130
10131 First check for cases where an arithmetic operation is applied to a
10132 compound, conditional, or comparison operation. Push the arithmetic
10133 operation inside the compound or conditional to see if any folding
10134 can then be done. Convert comparison to conditional for this purpose.
10135 The also optimizes non-constant cases that used to be done in
10136 expand_expr.
10137
10138 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10139 one of the operands is a comparison and the other is a comparison, a
10140 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10141 code below would make the expression more complex. Change it to a
10142 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10143 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10144
10145 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10146 || code == EQ_EXPR || code == NE_EXPR)
10147 && TREE_CODE (type) != VECTOR_TYPE
10148 && ((truth_value_p (TREE_CODE (arg0))
10149 && (truth_value_p (TREE_CODE (arg1))
10150 || (TREE_CODE (arg1) == BIT_AND_EXPR
10151 && integer_onep (TREE_OPERAND (arg1, 1)))))
10152 || (truth_value_p (TREE_CODE (arg1))
10153 && (truth_value_p (TREE_CODE (arg0))
10154 || (TREE_CODE (arg0) == BIT_AND_EXPR
10155 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10156 {
10157 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10158 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10159 : TRUTH_XOR_EXPR,
10160 boolean_type_node,
10161 fold_convert_loc (loc, boolean_type_node, arg0),
10162 fold_convert_loc (loc, boolean_type_node, arg1));
10163
10164 if (code == EQ_EXPR)
10165 tem = invert_truthvalue_loc (loc, tem);
10166
10167 return fold_convert_loc (loc, type, tem);
10168 }
10169
10170 if (TREE_CODE_CLASS (code) == tcc_binary
10171 || TREE_CODE_CLASS (code) == tcc_comparison)
10172 {
10173 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10174 {
10175 tem = fold_build2_loc (loc, code, type,
10176 fold_convert_loc (loc, TREE_TYPE (op0),
10177 TREE_OPERAND (arg0, 1)), op1);
10178 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10179 tem);
10180 }
10181 if (TREE_CODE (arg1) == COMPOUND_EXPR
10182 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10183 {
10184 tem = fold_build2_loc (loc, code, type, op0,
10185 fold_convert_loc (loc, TREE_TYPE (op1),
10186 TREE_OPERAND (arg1, 1)));
10187 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10188 tem);
10189 }
10190
10191 if (TREE_CODE (arg0) == COND_EXPR
10192 || TREE_CODE (arg0) == VEC_COND_EXPR
10193 || COMPARISON_CLASS_P (arg0))
10194 {
10195 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10196 arg0, arg1,
10197 /*cond_first_p=*/1);
10198 if (tem != NULL_TREE)
10199 return tem;
10200 }
10201
10202 if (TREE_CODE (arg1) == COND_EXPR
10203 || TREE_CODE (arg1) == VEC_COND_EXPR
10204 || COMPARISON_CLASS_P (arg1))
10205 {
10206 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10207 arg1, arg0,
10208 /*cond_first_p=*/0);
10209 if (tem != NULL_TREE)
10210 return tem;
10211 }
10212 }
10213
10214 switch (code)
10215 {
10216 case MEM_REF:
10217 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10218 if (TREE_CODE (arg0) == ADDR_EXPR
10219 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10220 {
10221 tree iref = TREE_OPERAND (arg0, 0);
10222 return fold_build2 (MEM_REF, type,
10223 TREE_OPERAND (iref, 0),
10224 int_const_binop (PLUS_EXPR, arg1,
10225 TREE_OPERAND (iref, 1)));
10226 }
10227
10228 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10229 if (TREE_CODE (arg0) == ADDR_EXPR
10230 && handled_component_p (TREE_OPERAND (arg0, 0)))
10231 {
10232 tree base;
10233 HOST_WIDE_INT coffset;
10234 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10235 &coffset);
10236 if (!base)
10237 return NULL_TREE;
10238 return fold_build2 (MEM_REF, type,
10239 build_fold_addr_expr (base),
10240 int_const_binop (PLUS_EXPR, arg1,
10241 size_int (coffset)));
10242 }
10243
10244 return NULL_TREE;
10245
10246 case POINTER_PLUS_EXPR:
10247 /* 0 +p index -> (type)index */
10248 if (integer_zerop (arg0))
10249 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10250
10251 /* PTR +p 0 -> PTR */
10252 if (integer_zerop (arg1))
10253 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10254
10255 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10256 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10257 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10258 return fold_convert_loc (loc, type,
10259 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10260 fold_convert_loc (loc, sizetype,
10261 arg1),
10262 fold_convert_loc (loc, sizetype,
10263 arg0)));
10264
10265 /* (PTR +p B) +p A -> PTR +p (B + A) */
10266 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10267 {
10268 tree inner;
10269 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10270 tree arg00 = TREE_OPERAND (arg0, 0);
10271 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10272 arg01, fold_convert_loc (loc, sizetype, arg1));
10273 return fold_convert_loc (loc, type,
10274 fold_build_pointer_plus_loc (loc,
10275 arg00, inner));
10276 }
10277
10278 /* PTR_CST +p CST -> CST1 */
10279 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10280 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10281 fold_convert_loc (loc, type, arg1));
10282
10283 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10284 of the array. Loop optimizer sometimes produce this type of
10285 expressions. */
10286 if (TREE_CODE (arg0) == ADDR_EXPR)
10287 {
10288 tem = try_move_mult_to_index (loc, arg0,
10289 fold_convert_loc (loc,
10290 ssizetype, arg1));
10291 if (tem)
10292 return fold_convert_loc (loc, type, tem);
10293 }
10294
10295 return NULL_TREE;
10296
10297 case PLUS_EXPR:
10298 /* A + (-B) -> A - B */
10299 if (TREE_CODE (arg1) == NEGATE_EXPR
10300 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10301 return fold_build2_loc (loc, MINUS_EXPR, type,
10302 fold_convert_loc (loc, type, arg0),
10303 fold_convert_loc (loc, type,
10304 TREE_OPERAND (arg1, 0)));
10305 /* (-A) + B -> B - A */
10306 if (TREE_CODE (arg0) == NEGATE_EXPR
10307 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10308 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10309 return fold_build2_loc (loc, MINUS_EXPR, type,
10310 fold_convert_loc (loc, type, arg1),
10311 fold_convert_loc (loc, type,
10312 TREE_OPERAND (arg0, 0)));
10313
10314 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10315 {
10316 /* Convert ~A + 1 to -A. */
10317 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10318 && integer_onep (arg1))
10319 return fold_build1_loc (loc, NEGATE_EXPR, type,
10320 fold_convert_loc (loc, type,
10321 TREE_OPERAND (arg0, 0)));
10322
10323 /* ~X + X is -1. */
10324 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10325 && !TYPE_OVERFLOW_TRAPS (type))
10326 {
10327 tree tem = TREE_OPERAND (arg0, 0);
10328
10329 STRIP_NOPS (tem);
10330 if (operand_equal_p (tem, arg1, 0))
10331 {
10332 t1 = build_all_ones_cst (type);
10333 return omit_one_operand_loc (loc, type, t1, arg1);
10334 }
10335 }
10336
10337 /* X + ~X is -1. */
10338 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10339 && !TYPE_OVERFLOW_TRAPS (type))
10340 {
10341 tree tem = TREE_OPERAND (arg1, 0);
10342
10343 STRIP_NOPS (tem);
10344 if (operand_equal_p (arg0, tem, 0))
10345 {
10346 t1 = build_all_ones_cst (type);
10347 return omit_one_operand_loc (loc, type, t1, arg0);
10348 }
10349 }
10350
10351 /* X + (X / CST) * -CST is X % CST. */
10352 if (TREE_CODE (arg1) == MULT_EXPR
10353 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10354 && operand_equal_p (arg0,
10355 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10356 {
10357 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10358 tree cst1 = TREE_OPERAND (arg1, 1);
10359 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10360 cst1, cst0);
10361 if (sum && integer_zerop (sum))
10362 return fold_convert_loc (loc, type,
10363 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10364 TREE_TYPE (arg0), arg0,
10365 cst0));
10366 }
10367 }
10368
10369 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10370 one. Make sure the type is not saturating and has the signedness of
10371 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10372 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10373 if ((TREE_CODE (arg0) == MULT_EXPR
10374 || TREE_CODE (arg1) == MULT_EXPR)
10375 && !TYPE_SATURATING (type)
10376 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10377 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10378 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10379 {
10380 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10381 if (tem)
10382 return tem;
10383 }
10384
10385 if (! FLOAT_TYPE_P (type))
10386 {
10387 if (integer_zerop (arg1))
10388 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10389
10390 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10391 with a constant, and the two constants have no bits in common,
10392 we should treat this as a BIT_IOR_EXPR since this may produce more
10393 simplifications. */
10394 if (TREE_CODE (arg0) == BIT_AND_EXPR
10395 && TREE_CODE (arg1) == BIT_AND_EXPR
10396 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10397 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10398 && wi::bit_and (TREE_OPERAND (arg0, 1),
10399 TREE_OPERAND (arg1, 1)) == 0)
10400 {
10401 code = BIT_IOR_EXPR;
10402 goto bit_ior;
10403 }
10404
10405 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10406 (plus (plus (mult) (mult)) (foo)) so that we can
10407 take advantage of the factoring cases below. */
10408 if (TYPE_OVERFLOW_WRAPS (type)
10409 && (((TREE_CODE (arg0) == PLUS_EXPR
10410 || TREE_CODE (arg0) == MINUS_EXPR)
10411 && TREE_CODE (arg1) == MULT_EXPR)
10412 || ((TREE_CODE (arg1) == PLUS_EXPR
10413 || TREE_CODE (arg1) == MINUS_EXPR)
10414 && TREE_CODE (arg0) == MULT_EXPR)))
10415 {
10416 tree parg0, parg1, parg, marg;
10417 enum tree_code pcode;
10418
10419 if (TREE_CODE (arg1) == MULT_EXPR)
10420 parg = arg0, marg = arg1;
10421 else
10422 parg = arg1, marg = arg0;
10423 pcode = TREE_CODE (parg);
10424 parg0 = TREE_OPERAND (parg, 0);
10425 parg1 = TREE_OPERAND (parg, 1);
10426 STRIP_NOPS (parg0);
10427 STRIP_NOPS (parg1);
10428
10429 if (TREE_CODE (parg0) == MULT_EXPR
10430 && TREE_CODE (parg1) != MULT_EXPR)
10431 return fold_build2_loc (loc, pcode, type,
10432 fold_build2_loc (loc, PLUS_EXPR, type,
10433 fold_convert_loc (loc, type,
10434 parg0),
10435 fold_convert_loc (loc, type,
10436 marg)),
10437 fold_convert_loc (loc, type, parg1));
10438 if (TREE_CODE (parg0) != MULT_EXPR
10439 && TREE_CODE (parg1) == MULT_EXPR)
10440 return
10441 fold_build2_loc (loc, PLUS_EXPR, type,
10442 fold_convert_loc (loc, type, parg0),
10443 fold_build2_loc (loc, pcode, type,
10444 fold_convert_loc (loc, type, marg),
10445 fold_convert_loc (loc, type,
10446 parg1)));
10447 }
10448 }
10449 else
10450 {
10451 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10452 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10453 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10454
10455 /* Likewise if the operands are reversed. */
10456 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10457 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10458
10459 /* Convert X + -C into X - C. */
10460 if (TREE_CODE (arg1) == REAL_CST
10461 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10462 {
10463 tem = fold_negate_const (arg1, type);
10464 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10465 return fold_build2_loc (loc, MINUS_EXPR, type,
10466 fold_convert_loc (loc, type, arg0),
10467 fold_convert_loc (loc, type, tem));
10468 }
10469
10470 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10471 to __complex__ ( x, y ). This is not the same for SNaNs or
10472 if signed zeros are involved. */
10473 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10474 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10475 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10476 {
10477 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10478 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10479 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10480 bool arg0rz = false, arg0iz = false;
10481 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10482 || (arg0i && (arg0iz = real_zerop (arg0i))))
10483 {
10484 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10485 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10486 if (arg0rz && arg1i && real_zerop (arg1i))
10487 {
10488 tree rp = arg1r ? arg1r
10489 : build1 (REALPART_EXPR, rtype, arg1);
10490 tree ip = arg0i ? arg0i
10491 : build1 (IMAGPART_EXPR, rtype, arg0);
10492 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10493 }
10494 else if (arg0iz && arg1r && real_zerop (arg1r))
10495 {
10496 tree rp = arg0r ? arg0r
10497 : build1 (REALPART_EXPR, rtype, arg0);
10498 tree ip = arg1i ? arg1i
10499 : build1 (IMAGPART_EXPR, rtype, arg1);
10500 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10501 }
10502 }
10503 }
10504
10505 if (flag_unsafe_math_optimizations
10506 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10507 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10508 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10509 return tem;
10510
10511 /* Convert x+x into x*2.0. */
10512 if (operand_equal_p (arg0, arg1, 0)
10513 && SCALAR_FLOAT_TYPE_P (type))
10514 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10515 build_real (type, dconst2));
10516
10517 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10518 We associate floats only if the user has specified
10519 -fassociative-math. */
10520 if (flag_associative_math
10521 && TREE_CODE (arg1) == PLUS_EXPR
10522 && TREE_CODE (arg0) != MULT_EXPR)
10523 {
10524 tree tree10 = TREE_OPERAND (arg1, 0);
10525 tree tree11 = TREE_OPERAND (arg1, 1);
10526 if (TREE_CODE (tree11) == MULT_EXPR
10527 && TREE_CODE (tree10) == MULT_EXPR)
10528 {
10529 tree tree0;
10530 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10531 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10532 }
10533 }
10534 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10535 We associate floats only if the user has specified
10536 -fassociative-math. */
10537 if (flag_associative_math
10538 && TREE_CODE (arg0) == PLUS_EXPR
10539 && TREE_CODE (arg1) != MULT_EXPR)
10540 {
10541 tree tree00 = TREE_OPERAND (arg0, 0);
10542 tree tree01 = TREE_OPERAND (arg0, 1);
10543 if (TREE_CODE (tree01) == MULT_EXPR
10544 && TREE_CODE (tree00) == MULT_EXPR)
10545 {
10546 tree tree0;
10547 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10548 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10549 }
10550 }
10551 }
10552
10553 bit_rotate:
10554 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10555 is a rotate of A by C1 bits. */
10556 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10557 is a rotate of A by B bits. */
10558 {
10559 enum tree_code code0, code1;
10560 tree rtype;
10561 code0 = TREE_CODE (arg0);
10562 code1 = TREE_CODE (arg1);
10563 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10564 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10565 && operand_equal_p (TREE_OPERAND (arg0, 0),
10566 TREE_OPERAND (arg1, 0), 0)
10567 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10568 TYPE_UNSIGNED (rtype))
10569 /* Only create rotates in complete modes. Other cases are not
10570 expanded properly. */
10571 && (element_precision (rtype)
10572 == element_precision (TYPE_MODE (rtype))))
10573 {
10574 tree tree01, tree11;
10575 enum tree_code code01, code11;
10576
10577 tree01 = TREE_OPERAND (arg0, 1);
10578 tree11 = TREE_OPERAND (arg1, 1);
10579 STRIP_NOPS (tree01);
10580 STRIP_NOPS (tree11);
10581 code01 = TREE_CODE (tree01);
10582 code11 = TREE_CODE (tree11);
10583 if (code01 == INTEGER_CST
10584 && code11 == INTEGER_CST
10585 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10586 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10587 {
10588 tem = build2_loc (loc, LROTATE_EXPR,
10589 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10590 TREE_OPERAND (arg0, 0),
10591 code0 == LSHIFT_EXPR ? tree01 : tree11);
10592 return fold_convert_loc (loc, type, tem);
10593 }
10594 else if (code11 == MINUS_EXPR)
10595 {
10596 tree tree110, tree111;
10597 tree110 = TREE_OPERAND (tree11, 0);
10598 tree111 = TREE_OPERAND (tree11, 1);
10599 STRIP_NOPS (tree110);
10600 STRIP_NOPS (tree111);
10601 if (TREE_CODE (tree110) == INTEGER_CST
10602 && 0 == compare_tree_int (tree110,
10603 element_precision
10604 (TREE_TYPE (TREE_OPERAND
10605 (arg0, 0))))
10606 && operand_equal_p (tree01, tree111, 0))
10607 return
10608 fold_convert_loc (loc, type,
10609 build2 ((code0 == LSHIFT_EXPR
10610 ? LROTATE_EXPR
10611 : RROTATE_EXPR),
10612 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10613 TREE_OPERAND (arg0, 0), tree01));
10614 }
10615 else if (code01 == MINUS_EXPR)
10616 {
10617 tree tree010, tree011;
10618 tree010 = TREE_OPERAND (tree01, 0);
10619 tree011 = TREE_OPERAND (tree01, 1);
10620 STRIP_NOPS (tree010);
10621 STRIP_NOPS (tree011);
10622 if (TREE_CODE (tree010) == INTEGER_CST
10623 && 0 == compare_tree_int (tree010,
10624 element_precision
10625 (TREE_TYPE (TREE_OPERAND
10626 (arg0, 0))))
10627 && operand_equal_p (tree11, tree011, 0))
10628 return fold_convert_loc
10629 (loc, type,
10630 build2 ((code0 != LSHIFT_EXPR
10631 ? LROTATE_EXPR
10632 : RROTATE_EXPR),
10633 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10634 TREE_OPERAND (arg0, 0), tree11));
10635 }
10636 }
10637 }
10638
10639 associate:
10640 /* In most languages, can't associate operations on floats through
10641 parentheses. Rather than remember where the parentheses were, we
10642 don't associate floats at all, unless the user has specified
10643 -fassociative-math.
10644 And, we need to make sure type is not saturating. */
10645
10646 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10647 && !TYPE_SATURATING (type))
10648 {
10649 tree var0, con0, lit0, minus_lit0;
10650 tree var1, con1, lit1, minus_lit1;
10651 tree atype = type;
10652 bool ok = true;
10653
10654 /* Split both trees into variables, constants, and literals. Then
10655 associate each group together, the constants with literals,
10656 then the result with variables. This increases the chances of
10657 literals being recombined later and of generating relocatable
10658 expressions for the sum of a constant and literal. */
10659 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10660 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10661 code == MINUS_EXPR);
10662
10663 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10664 if (code == MINUS_EXPR)
10665 code = PLUS_EXPR;
10666
10667 /* With undefined overflow prefer doing association in a type
10668 which wraps on overflow, if that is one of the operand types. */
10669 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10670 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10671 {
10672 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10673 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10674 atype = TREE_TYPE (arg0);
10675 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10676 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10677 atype = TREE_TYPE (arg1);
10678 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10679 }
10680
10681 /* With undefined overflow we can only associate constants with one
10682 variable, and constants whose association doesn't overflow. */
10683 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10684 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10685 {
10686 if (var0 && var1)
10687 {
10688 tree tmp0 = var0;
10689 tree tmp1 = var1;
10690
10691 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10692 tmp0 = TREE_OPERAND (tmp0, 0);
10693 if (CONVERT_EXPR_P (tmp0)
10694 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10695 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10696 <= TYPE_PRECISION (atype)))
10697 tmp0 = TREE_OPERAND (tmp0, 0);
10698 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10699 tmp1 = TREE_OPERAND (tmp1, 0);
10700 if (CONVERT_EXPR_P (tmp1)
10701 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10702 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10703 <= TYPE_PRECISION (atype)))
10704 tmp1 = TREE_OPERAND (tmp1, 0);
10705 /* The only case we can still associate with two variables
10706 is if they are the same, modulo negation and bit-pattern
10707 preserving conversions. */
10708 if (!operand_equal_p (tmp0, tmp1, 0))
10709 ok = false;
10710 }
10711 }
10712
10713 /* Only do something if we found more than two objects. Otherwise,
10714 nothing has changed and we risk infinite recursion. */
10715 if (ok
10716 && (2 < ((var0 != 0) + (var1 != 0)
10717 + (con0 != 0) + (con1 != 0)
10718 + (lit0 != 0) + (lit1 != 0)
10719 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10720 {
10721 bool any_overflows = false;
10722 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10723 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10724 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10725 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10726 var0 = associate_trees (loc, var0, var1, code, atype);
10727 con0 = associate_trees (loc, con0, con1, code, atype);
10728 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10729 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10730 code, atype);
10731
10732 /* Preserve the MINUS_EXPR if the negative part of the literal is
10733 greater than the positive part. Otherwise, the multiplicative
10734 folding code (i.e extract_muldiv) may be fooled in case
10735 unsigned constants are subtracted, like in the following
10736 example: ((X*2 + 4) - 8U)/2. */
10737 if (minus_lit0 && lit0)
10738 {
10739 if (TREE_CODE (lit0) == INTEGER_CST
10740 && TREE_CODE (minus_lit0) == INTEGER_CST
10741 && tree_int_cst_lt (lit0, minus_lit0))
10742 {
10743 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10744 MINUS_EXPR, atype);
10745 lit0 = 0;
10746 }
10747 else
10748 {
10749 lit0 = associate_trees (loc, lit0, minus_lit0,
10750 MINUS_EXPR, atype);
10751 minus_lit0 = 0;
10752 }
10753 }
10754
10755 /* Don't introduce overflows through reassociation. */
10756 if (!any_overflows
10757 && ((lit0 && TREE_OVERFLOW (lit0))
10758 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10759 return NULL_TREE;
10760
10761 if (minus_lit0)
10762 {
10763 if (con0 == 0)
10764 return
10765 fold_convert_loc (loc, type,
10766 associate_trees (loc, var0, minus_lit0,
10767 MINUS_EXPR, atype));
10768 else
10769 {
10770 con0 = associate_trees (loc, con0, minus_lit0,
10771 MINUS_EXPR, atype);
10772 return
10773 fold_convert_loc (loc, type,
10774 associate_trees (loc, var0, con0,
10775 PLUS_EXPR, atype));
10776 }
10777 }
10778
10779 con0 = associate_trees (loc, con0, lit0, code, atype);
10780 return
10781 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10782 code, atype));
10783 }
10784 }
10785
10786 return NULL_TREE;
10787
10788 case MINUS_EXPR:
10789 /* Pointer simplifications for subtraction, simple reassociations. */
10790 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10791 {
10792 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10793 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10794 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10795 {
10796 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10797 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10798 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10799 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10800 return fold_build2_loc (loc, PLUS_EXPR, type,
10801 fold_build2_loc (loc, MINUS_EXPR, type,
10802 arg00, arg10),
10803 fold_build2_loc (loc, MINUS_EXPR, type,
10804 arg01, arg11));
10805 }
10806 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10807 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10808 {
10809 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10810 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10811 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10812 fold_convert_loc (loc, type, arg1));
10813 if (tmp)
10814 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10815 }
10816 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10817 simplifies. */
10818 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10819 {
10820 tree arg10 = fold_convert_loc (loc, type,
10821 TREE_OPERAND (arg1, 0));
10822 tree arg11 = fold_convert_loc (loc, type,
10823 TREE_OPERAND (arg1, 1));
10824 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg0,
10825 fold_convert_loc (loc, type, arg10));
10826 if (tmp)
10827 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10828 }
10829 }
10830 /* A - (-B) -> A + B */
10831 if (TREE_CODE (arg1) == NEGATE_EXPR)
10832 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10833 fold_convert_loc (loc, type,
10834 TREE_OPERAND (arg1, 0)));
10835 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10836 if (TREE_CODE (arg0) == NEGATE_EXPR
10837 && negate_expr_p (arg1)
10838 && reorder_operands_p (arg0, arg1))
10839 return fold_build2_loc (loc, MINUS_EXPR, type,
10840 fold_convert_loc (loc, type,
10841 negate_expr (arg1)),
10842 fold_convert_loc (loc, type,
10843 TREE_OPERAND (arg0, 0)));
10844 /* Convert -A - 1 to ~A. */
10845 if (TREE_CODE (type) != COMPLEX_TYPE
10846 && TREE_CODE (arg0) == NEGATE_EXPR
10847 && integer_onep (arg1)
10848 && !TYPE_OVERFLOW_TRAPS (type))
10849 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10850 fold_convert_loc (loc, type,
10851 TREE_OPERAND (arg0, 0)));
10852
10853 /* Convert -1 - A to ~A. */
10854 if (TREE_CODE (type) != COMPLEX_TYPE
10855 && integer_all_onesp (arg0))
10856 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10857
10858
10859 /* X - (X / Y) * Y is X % Y. */
10860 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10861 && TREE_CODE (arg1) == MULT_EXPR
10862 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10863 && operand_equal_p (arg0,
10864 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10865 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10866 TREE_OPERAND (arg1, 1), 0))
10867 return
10868 fold_convert_loc (loc, type,
10869 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10870 arg0, TREE_OPERAND (arg1, 1)));
10871
10872 if (! FLOAT_TYPE_P (type))
10873 {
10874 if (integer_zerop (arg0))
10875 return negate_expr (fold_convert_loc (loc, type, arg1));
10876 if (integer_zerop (arg1))
10877 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10878
10879 /* Fold A - (A & B) into ~B & A. */
10880 if (!TREE_SIDE_EFFECTS (arg0)
10881 && TREE_CODE (arg1) == BIT_AND_EXPR)
10882 {
10883 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10884 {
10885 tree arg10 = fold_convert_loc (loc, type,
10886 TREE_OPERAND (arg1, 0));
10887 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10888 fold_build1_loc (loc, BIT_NOT_EXPR,
10889 type, arg10),
10890 fold_convert_loc (loc, type, arg0));
10891 }
10892 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10893 {
10894 tree arg11 = fold_convert_loc (loc,
10895 type, TREE_OPERAND (arg1, 1));
10896 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10897 fold_build1_loc (loc, BIT_NOT_EXPR,
10898 type, arg11),
10899 fold_convert_loc (loc, type, arg0));
10900 }
10901 }
10902
10903 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10904 any power of 2 minus 1. */
10905 if (TREE_CODE (arg0) == BIT_AND_EXPR
10906 && TREE_CODE (arg1) == BIT_AND_EXPR
10907 && operand_equal_p (TREE_OPERAND (arg0, 0),
10908 TREE_OPERAND (arg1, 0), 0))
10909 {
10910 tree mask0 = TREE_OPERAND (arg0, 1);
10911 tree mask1 = TREE_OPERAND (arg1, 1);
10912 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10913
10914 if (operand_equal_p (tem, mask1, 0))
10915 {
10916 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10917 TREE_OPERAND (arg0, 0), mask1);
10918 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10919 }
10920 }
10921 }
10922
10923 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10924 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10925 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10926
10927 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10928 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10929 (-ARG1 + ARG0) reduces to -ARG1. */
10930 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10931 return negate_expr (fold_convert_loc (loc, type, arg1));
10932
10933 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10934 __complex__ ( x, -y ). This is not the same for SNaNs or if
10935 signed zeros are involved. */
10936 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10937 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10938 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10939 {
10940 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10941 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10942 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10943 bool arg0rz = false, arg0iz = false;
10944 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10945 || (arg0i && (arg0iz = real_zerop (arg0i))))
10946 {
10947 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10948 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10949 if (arg0rz && arg1i && real_zerop (arg1i))
10950 {
10951 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10952 arg1r ? arg1r
10953 : build1 (REALPART_EXPR, rtype, arg1));
10954 tree ip = arg0i ? arg0i
10955 : build1 (IMAGPART_EXPR, rtype, arg0);
10956 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10957 }
10958 else if (arg0iz && arg1r && real_zerop (arg1r))
10959 {
10960 tree rp = arg0r ? arg0r
10961 : build1 (REALPART_EXPR, rtype, arg0);
10962 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10963 arg1i ? arg1i
10964 : build1 (IMAGPART_EXPR, rtype, arg1));
10965 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10966 }
10967 }
10968 }
10969
10970 /* Fold &x - &x. This can happen from &x.foo - &x.
10971 This is unsafe for certain floats even in non-IEEE formats.
10972 In IEEE, it is unsafe because it does wrong for NaNs.
10973 Also note that operand_equal_p is always false if an operand
10974 is volatile. */
10975
10976 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10977 && operand_equal_p (arg0, arg1, 0))
10978 return build_zero_cst (type);
10979
10980 /* A - B -> A + (-B) if B is easily negatable. */
10981 if (negate_expr_p (arg1)
10982 && ((FLOAT_TYPE_P (type)
10983 /* Avoid this transformation if B is a positive REAL_CST. */
10984 && (TREE_CODE (arg1) != REAL_CST
10985 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10986 || INTEGRAL_TYPE_P (type)))
10987 return fold_build2_loc (loc, PLUS_EXPR, type,
10988 fold_convert_loc (loc, type, arg0),
10989 fold_convert_loc (loc, type,
10990 negate_expr (arg1)));
10991
10992 /* Try folding difference of addresses. */
10993 {
10994 HOST_WIDE_INT diff;
10995
10996 if ((TREE_CODE (arg0) == ADDR_EXPR
10997 || TREE_CODE (arg1) == ADDR_EXPR)
10998 && ptr_difference_const (arg0, arg1, &diff))
10999 return build_int_cst_type (type, diff);
11000 }
11001
11002 /* Fold &a[i] - &a[j] to i-j. */
11003 if (TREE_CODE (arg0) == ADDR_EXPR
11004 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11005 && TREE_CODE (arg1) == ADDR_EXPR
11006 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11007 {
11008 tree tem = fold_addr_of_array_ref_difference (loc, type,
11009 TREE_OPERAND (arg0, 0),
11010 TREE_OPERAND (arg1, 0));
11011 if (tem)
11012 return tem;
11013 }
11014
11015 if (FLOAT_TYPE_P (type)
11016 && flag_unsafe_math_optimizations
11017 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11018 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11019 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11020 return tem;
11021
11022 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11023 one. Make sure the type is not saturating and has the signedness of
11024 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11025 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11026 if ((TREE_CODE (arg0) == MULT_EXPR
11027 || TREE_CODE (arg1) == MULT_EXPR)
11028 && !TYPE_SATURATING (type)
11029 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11030 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11031 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11032 {
11033 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11034 if (tem)
11035 return tem;
11036 }
11037
11038 goto associate;
11039
11040 case MULT_EXPR:
11041 /* (-A) * (-B) -> A * B */
11042 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11043 return fold_build2_loc (loc, MULT_EXPR, type,
11044 fold_convert_loc (loc, type,
11045 TREE_OPERAND (arg0, 0)),
11046 fold_convert_loc (loc, type,
11047 negate_expr (arg1)));
11048 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11049 return fold_build2_loc (loc, MULT_EXPR, type,
11050 fold_convert_loc (loc, type,
11051 negate_expr (arg0)),
11052 fold_convert_loc (loc, type,
11053 TREE_OPERAND (arg1, 0)));
11054
11055 if (! FLOAT_TYPE_P (type))
11056 {
11057 if (integer_zerop (arg1))
11058 return omit_one_operand_loc (loc, type, arg1, arg0);
11059 if (integer_onep (arg1))
11060 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11061 /* Transform x * -1 into -x. Make sure to do the negation
11062 on the original operand with conversions not stripped
11063 because we can only strip non-sign-changing conversions. */
11064 if (integer_minus_onep (arg1))
11065 return fold_convert_loc (loc, type, negate_expr (op0));
11066 /* Transform x * -C into -x * C if x is easily negatable. */
11067 if (TREE_CODE (arg1) == INTEGER_CST
11068 && tree_int_cst_sgn (arg1) == -1
11069 && negate_expr_p (arg0)
11070 && (tem = negate_expr (arg1)) != arg1
11071 && !TREE_OVERFLOW (tem))
11072 return fold_build2_loc (loc, MULT_EXPR, type,
11073 fold_convert_loc (loc, type,
11074 negate_expr (arg0)),
11075 tem);
11076
11077 /* (a * (1 << b)) is (a << b) */
11078 if (TREE_CODE (arg1) == LSHIFT_EXPR
11079 && integer_onep (TREE_OPERAND (arg1, 0)))
11080 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11081 TREE_OPERAND (arg1, 1));
11082 if (TREE_CODE (arg0) == LSHIFT_EXPR
11083 && integer_onep (TREE_OPERAND (arg0, 0)))
11084 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11085 TREE_OPERAND (arg0, 1));
11086
11087 /* (A + A) * C -> A * 2 * C */
11088 if (TREE_CODE (arg0) == PLUS_EXPR
11089 && TREE_CODE (arg1) == INTEGER_CST
11090 && operand_equal_p (TREE_OPERAND (arg0, 0),
11091 TREE_OPERAND (arg0, 1), 0))
11092 return fold_build2_loc (loc, MULT_EXPR, type,
11093 omit_one_operand_loc (loc, type,
11094 TREE_OPERAND (arg0, 0),
11095 TREE_OPERAND (arg0, 1)),
11096 fold_build2_loc (loc, MULT_EXPR, type,
11097 build_int_cst (type, 2) , arg1));
11098
11099 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11100 sign-changing only. */
11101 if (TREE_CODE (arg1) == INTEGER_CST
11102 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11103 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11104 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11105
11106 strict_overflow_p = false;
11107 if (TREE_CODE (arg1) == INTEGER_CST
11108 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11109 &strict_overflow_p)))
11110 {
11111 if (strict_overflow_p)
11112 fold_overflow_warning (("assuming signed overflow does not "
11113 "occur when simplifying "
11114 "multiplication"),
11115 WARN_STRICT_OVERFLOW_MISC);
11116 return fold_convert_loc (loc, type, tem);
11117 }
11118
11119 /* Optimize z * conj(z) for integer complex numbers. */
11120 if (TREE_CODE (arg0) == CONJ_EXPR
11121 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11122 return fold_mult_zconjz (loc, type, arg1);
11123 if (TREE_CODE (arg1) == CONJ_EXPR
11124 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11125 return fold_mult_zconjz (loc, type, arg0);
11126 }
11127 else
11128 {
11129 /* Maybe fold x * 0 to 0. The expressions aren't the same
11130 when x is NaN, since x * 0 is also NaN. Nor are they the
11131 same in modes with signed zeros, since multiplying a
11132 negative value by 0 gives -0, not +0. */
11133 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11134 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11135 && real_zerop (arg1))
11136 return omit_one_operand_loc (loc, type, arg1, arg0);
11137 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11138 Likewise for complex arithmetic with signed zeros. */
11139 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11140 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11141 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11142 && real_onep (arg1))
11143 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11144
11145 /* Transform x * -1.0 into -x. */
11146 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11147 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11148 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11149 && real_minus_onep (arg1))
11150 return fold_convert_loc (loc, type, negate_expr (arg0));
11151
11152 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11153 the result for floating point types due to rounding so it is applied
11154 only if -fassociative-math was specify. */
11155 if (flag_associative_math
11156 && TREE_CODE (arg0) == RDIV_EXPR
11157 && TREE_CODE (arg1) == REAL_CST
11158 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11159 {
11160 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11161 arg1);
11162 if (tem)
11163 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11164 TREE_OPERAND (arg0, 1));
11165 }
11166
11167 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11168 if (operand_equal_p (arg0, arg1, 0))
11169 {
11170 tree tem = fold_strip_sign_ops (arg0);
11171 if (tem != NULL_TREE)
11172 {
11173 tem = fold_convert_loc (loc, type, tem);
11174 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11175 }
11176 }
11177
11178 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11179 This is not the same for NaNs or if signed zeros are
11180 involved. */
11181 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11182 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11183 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11184 && TREE_CODE (arg1) == COMPLEX_CST
11185 && real_zerop (TREE_REALPART (arg1)))
11186 {
11187 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11188 if (real_onep (TREE_IMAGPART (arg1)))
11189 return
11190 fold_build2_loc (loc, COMPLEX_EXPR, type,
11191 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11192 rtype, arg0)),
11193 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11194 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11195 return
11196 fold_build2_loc (loc, COMPLEX_EXPR, type,
11197 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11198 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11199 rtype, arg0)));
11200 }
11201
11202 /* Optimize z * conj(z) for floating point complex numbers.
11203 Guarded by flag_unsafe_math_optimizations as non-finite
11204 imaginary components don't produce scalar results. */
11205 if (flag_unsafe_math_optimizations
11206 && TREE_CODE (arg0) == CONJ_EXPR
11207 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11208 return fold_mult_zconjz (loc, type, arg1);
11209 if (flag_unsafe_math_optimizations
11210 && TREE_CODE (arg1) == CONJ_EXPR
11211 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11212 return fold_mult_zconjz (loc, type, arg0);
11213
11214 if (flag_unsafe_math_optimizations)
11215 {
11216 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11217 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11218
11219 /* Optimizations of root(...)*root(...). */
11220 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11221 {
11222 tree rootfn, arg;
11223 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11224 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11225
11226 /* Optimize sqrt(x)*sqrt(x) as x. */
11227 if (BUILTIN_SQRT_P (fcode0)
11228 && operand_equal_p (arg00, arg10, 0)
11229 && ! HONOR_SNANS (TYPE_MODE (type)))
11230 return arg00;
11231
11232 /* Optimize root(x)*root(y) as root(x*y). */
11233 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11234 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11235 return build_call_expr_loc (loc, rootfn, 1, arg);
11236 }
11237
11238 /* Optimize expN(x)*expN(y) as expN(x+y). */
11239 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11240 {
11241 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11242 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11243 CALL_EXPR_ARG (arg0, 0),
11244 CALL_EXPR_ARG (arg1, 0));
11245 return build_call_expr_loc (loc, expfn, 1, arg);
11246 }
11247
11248 /* Optimizations of pow(...)*pow(...). */
11249 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11250 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11251 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11252 {
11253 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11254 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11255 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11256 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11257
11258 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11259 if (operand_equal_p (arg01, arg11, 0))
11260 {
11261 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11262 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11263 arg00, arg10);
11264 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11265 }
11266
11267 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11268 if (operand_equal_p (arg00, arg10, 0))
11269 {
11270 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11271 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11272 arg01, arg11);
11273 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11274 }
11275 }
11276
11277 /* Optimize tan(x)*cos(x) as sin(x). */
11278 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11279 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11280 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11281 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11282 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11283 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11284 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11285 CALL_EXPR_ARG (arg1, 0), 0))
11286 {
11287 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11288
11289 if (sinfn != NULL_TREE)
11290 return build_call_expr_loc (loc, sinfn, 1,
11291 CALL_EXPR_ARG (arg0, 0));
11292 }
11293
11294 /* Optimize x*pow(x,c) as pow(x,c+1). */
11295 if (fcode1 == BUILT_IN_POW
11296 || fcode1 == BUILT_IN_POWF
11297 || fcode1 == BUILT_IN_POWL)
11298 {
11299 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11300 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11301 if (TREE_CODE (arg11) == REAL_CST
11302 && !TREE_OVERFLOW (arg11)
11303 && operand_equal_p (arg0, arg10, 0))
11304 {
11305 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11306 REAL_VALUE_TYPE c;
11307 tree arg;
11308
11309 c = TREE_REAL_CST (arg11);
11310 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11311 arg = build_real (type, c);
11312 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11313 }
11314 }
11315
11316 /* Optimize pow(x,c)*x as pow(x,c+1). */
11317 if (fcode0 == BUILT_IN_POW
11318 || fcode0 == BUILT_IN_POWF
11319 || fcode0 == BUILT_IN_POWL)
11320 {
11321 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11322 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11323 if (TREE_CODE (arg01) == REAL_CST
11324 && !TREE_OVERFLOW (arg01)
11325 && operand_equal_p (arg1, arg00, 0))
11326 {
11327 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11328 REAL_VALUE_TYPE c;
11329 tree arg;
11330
11331 c = TREE_REAL_CST (arg01);
11332 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11333 arg = build_real (type, c);
11334 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11335 }
11336 }
11337
11338 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11339 if (!in_gimple_form
11340 && optimize
11341 && operand_equal_p (arg0, arg1, 0))
11342 {
11343 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11344
11345 if (powfn)
11346 {
11347 tree arg = build_real (type, dconst2);
11348 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11349 }
11350 }
11351 }
11352 }
11353 goto associate;
11354
11355 case BIT_IOR_EXPR:
11356 bit_ior:
11357 if (integer_all_onesp (arg1))
11358 return omit_one_operand_loc (loc, type, arg1, arg0);
11359 if (integer_zerop (arg1))
11360 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11361 if (operand_equal_p (arg0, arg1, 0))
11362 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11363
11364 /* ~X | X is -1. */
11365 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11366 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11367 {
11368 t1 = build_zero_cst (type);
11369 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11370 return omit_one_operand_loc (loc, type, t1, arg1);
11371 }
11372
11373 /* X | ~X is -1. */
11374 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11375 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11376 {
11377 t1 = build_zero_cst (type);
11378 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11379 return omit_one_operand_loc (loc, type, t1, arg0);
11380 }
11381
11382 /* Canonicalize (X & C1) | C2. */
11383 if (TREE_CODE (arg0) == BIT_AND_EXPR
11384 && TREE_CODE (arg1) == INTEGER_CST
11385 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11386 {
11387 int width = TYPE_PRECISION (type), w;
11388 wide_int c1 = TREE_OPERAND (arg0, 1);
11389 wide_int c2 = arg1;
11390
11391 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11392 if ((c1 & c2) == c1)
11393 return omit_one_operand_loc (loc, type, arg1,
11394 TREE_OPERAND (arg0, 0));
11395
11396 wide_int msk = wi::mask (width, false,
11397 TYPE_PRECISION (TREE_TYPE (arg1)));
11398
11399 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11400 if (msk.and_not (c1 | c2) == 0)
11401 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11402 TREE_OPERAND (arg0, 0), arg1);
11403
11404 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11405 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11406 mode which allows further optimizations. */
11407 c1 &= msk;
11408 c2 &= msk;
11409 wide_int c3 = c1.and_not (c2);
11410 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11411 {
11412 wide_int mask = wi::mask (w, false,
11413 TYPE_PRECISION (type));
11414 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11415 {
11416 c3 = mask;
11417 break;
11418 }
11419 }
11420
11421 if (c3 != c1)
11422 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11423 fold_build2_loc (loc, BIT_AND_EXPR, type,
11424 TREE_OPERAND (arg0, 0),
11425 wide_int_to_tree (type,
11426 c3)),
11427 arg1);
11428 }
11429
11430 /* (X & Y) | Y is (X, Y). */
11431 if (TREE_CODE (arg0) == BIT_AND_EXPR
11432 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11433 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11434 /* (X & Y) | X is (Y, X). */
11435 if (TREE_CODE (arg0) == BIT_AND_EXPR
11436 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11437 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11438 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11439 /* X | (X & Y) is (Y, X). */
11440 if (TREE_CODE (arg1) == BIT_AND_EXPR
11441 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11442 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11443 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11444 /* X | (Y & X) is (Y, X). */
11445 if (TREE_CODE (arg1) == BIT_AND_EXPR
11446 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11447 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11448 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11449
11450 /* (X & ~Y) | (~X & Y) is X ^ Y */
11451 if (TREE_CODE (arg0) == BIT_AND_EXPR
11452 && TREE_CODE (arg1) == BIT_AND_EXPR)
11453 {
11454 tree a0, a1, l0, l1, n0, n1;
11455
11456 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11457 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11458
11459 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11460 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11461
11462 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11463 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11464
11465 if ((operand_equal_p (n0, a0, 0)
11466 && operand_equal_p (n1, a1, 0))
11467 || (operand_equal_p (n0, a1, 0)
11468 && operand_equal_p (n1, a0, 0)))
11469 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11470 }
11471
11472 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11473 if (t1 != NULL_TREE)
11474 return t1;
11475
11476 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11477
11478 This results in more efficient code for machines without a NAND
11479 instruction. Combine will canonicalize to the first form
11480 which will allow use of NAND instructions provided by the
11481 backend if they exist. */
11482 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11483 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11484 {
11485 return
11486 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11487 build2 (BIT_AND_EXPR, type,
11488 fold_convert_loc (loc, type,
11489 TREE_OPERAND (arg0, 0)),
11490 fold_convert_loc (loc, type,
11491 TREE_OPERAND (arg1, 0))));
11492 }
11493
11494 /* See if this can be simplified into a rotate first. If that
11495 is unsuccessful continue in the association code. */
11496 goto bit_rotate;
11497
11498 case BIT_XOR_EXPR:
11499 if (integer_zerop (arg1))
11500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11501 if (integer_all_onesp (arg1))
11502 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11503 if (operand_equal_p (arg0, arg1, 0))
11504 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11505
11506 /* ~X ^ X is -1. */
11507 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11508 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11509 {
11510 t1 = build_zero_cst (type);
11511 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11512 return omit_one_operand_loc (loc, type, t1, arg1);
11513 }
11514
11515 /* X ^ ~X is -1. */
11516 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11517 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11518 {
11519 t1 = build_zero_cst (type);
11520 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11521 return omit_one_operand_loc (loc, type, t1, arg0);
11522 }
11523
11524 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11525 with a constant, and the two constants have no bits in common,
11526 we should treat this as a BIT_IOR_EXPR since this may produce more
11527 simplifications. */
11528 if (TREE_CODE (arg0) == BIT_AND_EXPR
11529 && TREE_CODE (arg1) == BIT_AND_EXPR
11530 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11531 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11532 && wi::bit_and (TREE_OPERAND (arg0, 1),
11533 TREE_OPERAND (arg1, 1)) == 0)
11534 {
11535 code = BIT_IOR_EXPR;
11536 goto bit_ior;
11537 }
11538
11539 /* (X | Y) ^ X -> Y & ~ X*/
11540 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11541 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11542 {
11543 tree t2 = TREE_OPERAND (arg0, 1);
11544 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11545 arg1);
11546 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11547 fold_convert_loc (loc, type, t2),
11548 fold_convert_loc (loc, type, t1));
11549 return t1;
11550 }
11551
11552 /* (Y | X) ^ X -> Y & ~ X*/
11553 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11554 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11555 {
11556 tree t2 = TREE_OPERAND (arg0, 0);
11557 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11558 arg1);
11559 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11560 fold_convert_loc (loc, type, t2),
11561 fold_convert_loc (loc, type, t1));
11562 return t1;
11563 }
11564
11565 /* X ^ (X | Y) -> Y & ~ X*/
11566 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11567 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11568 {
11569 tree t2 = TREE_OPERAND (arg1, 1);
11570 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11571 arg0);
11572 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11573 fold_convert_loc (loc, type, t2),
11574 fold_convert_loc (loc, type, t1));
11575 return t1;
11576 }
11577
11578 /* X ^ (Y | X) -> Y & ~ X*/
11579 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11581 {
11582 tree t2 = TREE_OPERAND (arg1, 0);
11583 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11584 arg0);
11585 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11586 fold_convert_loc (loc, type, t2),
11587 fold_convert_loc (loc, type, t1));
11588 return t1;
11589 }
11590
11591 /* Convert ~X ^ ~Y to X ^ Y. */
11592 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11593 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11594 return fold_build2_loc (loc, code, type,
11595 fold_convert_loc (loc, type,
11596 TREE_OPERAND (arg0, 0)),
11597 fold_convert_loc (loc, type,
11598 TREE_OPERAND (arg1, 0)));
11599
11600 /* Convert ~X ^ C to X ^ ~C. */
11601 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11602 && TREE_CODE (arg1) == INTEGER_CST)
11603 return fold_build2_loc (loc, code, type,
11604 fold_convert_loc (loc, type,
11605 TREE_OPERAND (arg0, 0)),
11606 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11607
11608 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11609 if (TREE_CODE (arg0) == BIT_AND_EXPR
11610 && integer_onep (TREE_OPERAND (arg0, 1))
11611 && integer_onep (arg1))
11612 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11613 build_zero_cst (TREE_TYPE (arg0)));
11614
11615 /* Fold (X & Y) ^ Y as ~X & Y. */
11616 if (TREE_CODE (arg0) == BIT_AND_EXPR
11617 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11618 {
11619 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11620 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11621 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11622 fold_convert_loc (loc, type, arg1));
11623 }
11624 /* Fold (X & Y) ^ X as ~Y & X. */
11625 if (TREE_CODE (arg0) == BIT_AND_EXPR
11626 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11627 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11628 {
11629 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11630 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11631 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11632 fold_convert_loc (loc, type, arg1));
11633 }
11634 /* Fold X ^ (X & Y) as X & ~Y. */
11635 if (TREE_CODE (arg1) == BIT_AND_EXPR
11636 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11637 {
11638 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11639 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11640 fold_convert_loc (loc, type, arg0),
11641 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11642 }
11643 /* Fold X ^ (Y & X) as ~Y & X. */
11644 if (TREE_CODE (arg1) == BIT_AND_EXPR
11645 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11646 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11647 {
11648 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11649 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11650 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11651 fold_convert_loc (loc, type, arg0));
11652 }
11653
11654 /* See if this can be simplified into a rotate first. If that
11655 is unsuccessful continue in the association code. */
11656 goto bit_rotate;
11657
11658 case BIT_AND_EXPR:
11659 if (integer_all_onesp (arg1))
11660 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11661 if (integer_zerop (arg1))
11662 return omit_one_operand_loc (loc, type, arg1, arg0);
11663 if (operand_equal_p (arg0, arg1, 0))
11664 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11665
11666 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11667 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11668 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11669 || (TREE_CODE (arg0) == EQ_EXPR
11670 && integer_zerop (TREE_OPERAND (arg0, 1))))
11671 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11672 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11673
11674 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11675 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11676 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11677 || (TREE_CODE (arg1) == EQ_EXPR
11678 && integer_zerop (TREE_OPERAND (arg1, 1))))
11679 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11680 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11681
11682 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11683 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11684 && TREE_CODE (arg1) == INTEGER_CST
11685 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11686 {
11687 tree tmp1 = fold_convert_loc (loc, type, arg1);
11688 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11689 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11690 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11691 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11692 return
11693 fold_convert_loc (loc, type,
11694 fold_build2_loc (loc, BIT_IOR_EXPR,
11695 type, tmp2, tmp3));
11696 }
11697
11698 /* (X | Y) & Y is (X, Y). */
11699 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11700 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11701 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11702 /* (X | Y) & X is (Y, X). */
11703 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11704 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11705 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11706 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11707 /* X & (X | Y) is (Y, X). */
11708 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11709 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11710 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11711 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11712 /* X & (Y | X) is (Y, X). */
11713 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11714 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11715 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11716 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11717
11718 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11719 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11720 && integer_onep (TREE_OPERAND (arg0, 1))
11721 && integer_onep (arg1))
11722 {
11723 tree tem2;
11724 tem = TREE_OPERAND (arg0, 0);
11725 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11726 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11727 tem, tem2);
11728 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11729 build_zero_cst (TREE_TYPE (tem)));
11730 }
11731 /* Fold ~X & 1 as (X & 1) == 0. */
11732 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11733 && integer_onep (arg1))
11734 {
11735 tree tem2;
11736 tem = TREE_OPERAND (arg0, 0);
11737 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11738 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11739 tem, tem2);
11740 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11741 build_zero_cst (TREE_TYPE (tem)));
11742 }
11743 /* Fold !X & 1 as X == 0. */
11744 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11745 && integer_onep (arg1))
11746 {
11747 tem = TREE_OPERAND (arg0, 0);
11748 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11749 build_zero_cst (TREE_TYPE (tem)));
11750 }
11751
11752 /* Fold (X ^ Y) & Y as ~X & Y. */
11753 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11754 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11755 {
11756 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11757 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11758 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11759 fold_convert_loc (loc, type, arg1));
11760 }
11761 /* Fold (X ^ Y) & X as ~Y & X. */
11762 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11763 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11764 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11765 {
11766 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11767 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11768 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11769 fold_convert_loc (loc, type, arg1));
11770 }
11771 /* Fold X & (X ^ Y) as X & ~Y. */
11772 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11773 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11774 {
11775 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11776 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11777 fold_convert_loc (loc, type, arg0),
11778 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11779 }
11780 /* Fold X & (Y ^ X) as ~Y & X. */
11781 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11782 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11783 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11784 {
11785 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11786 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11787 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11788 fold_convert_loc (loc, type, arg0));
11789 }
11790
11791 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11792 multiple of 1 << CST. */
11793 if (TREE_CODE (arg1) == INTEGER_CST)
11794 {
11795 wide_int cst1 = arg1;
11796 wide_int ncst1 = -cst1;
11797 if ((cst1 & ncst1) == ncst1
11798 && multiple_of_p (type, arg0,
11799 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11800 return fold_convert_loc (loc, type, arg0);
11801 }
11802
11803 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11804 bits from CST2. */
11805 if (TREE_CODE (arg1) == INTEGER_CST
11806 && TREE_CODE (arg0) == MULT_EXPR
11807 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11808 {
11809 wide_int warg1 = arg1;
11810 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11811
11812 if (masked == 0)
11813 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11814 arg0, arg1);
11815 else if (masked != warg1)
11816 {
11817 /* Avoid the transform if arg1 is a mask of some
11818 mode which allows further optimizations. */
11819 int pop = wi::popcount (warg1);
11820 if (!(pop >= BITS_PER_UNIT
11821 && exact_log2 (pop) != -1
11822 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11823 return fold_build2_loc (loc, code, type, op0,
11824 wide_int_to_tree (type, masked));
11825 }
11826 }
11827
11828 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11829 ((A & N) + B) & M -> (A + B) & M
11830 Similarly if (N & M) == 0,
11831 ((A | N) + B) & M -> (A + B) & M
11832 and for - instead of + (or unary - instead of +)
11833 and/or ^ instead of |.
11834 If B is constant and (B & M) == 0, fold into A & M. */
11835 if (TREE_CODE (arg1) == INTEGER_CST)
11836 {
11837 wide_int cst1 = arg1;
11838 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11839 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11840 && (TREE_CODE (arg0) == PLUS_EXPR
11841 || TREE_CODE (arg0) == MINUS_EXPR
11842 || TREE_CODE (arg0) == NEGATE_EXPR)
11843 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11844 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11845 {
11846 tree pmop[2];
11847 int which = 0;
11848 wide_int cst0;
11849
11850 /* Now we know that arg0 is (C + D) or (C - D) or
11851 -C and arg1 (M) is == (1LL << cst) - 1.
11852 Store C into PMOP[0] and D into PMOP[1]. */
11853 pmop[0] = TREE_OPERAND (arg0, 0);
11854 pmop[1] = NULL;
11855 if (TREE_CODE (arg0) != NEGATE_EXPR)
11856 {
11857 pmop[1] = TREE_OPERAND (arg0, 1);
11858 which = 1;
11859 }
11860
11861 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11862 which = -1;
11863
11864 for (; which >= 0; which--)
11865 switch (TREE_CODE (pmop[which]))
11866 {
11867 case BIT_AND_EXPR:
11868 case BIT_IOR_EXPR:
11869 case BIT_XOR_EXPR:
11870 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11871 != INTEGER_CST)
11872 break;
11873 cst0 = TREE_OPERAND (pmop[which], 1);
11874 cst0 &= cst1;
11875 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11876 {
11877 if (cst0 != cst1)
11878 break;
11879 }
11880 else if (cst0 != 0)
11881 break;
11882 /* If C or D is of the form (A & N) where
11883 (N & M) == M, or of the form (A | N) or
11884 (A ^ N) where (N & M) == 0, replace it with A. */
11885 pmop[which] = TREE_OPERAND (pmop[which], 0);
11886 break;
11887 case INTEGER_CST:
11888 /* If C or D is a N where (N & M) == 0, it can be
11889 omitted (assumed 0). */
11890 if ((TREE_CODE (arg0) == PLUS_EXPR
11891 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11892 && (cst1 & pmop[which]) == 0)
11893 pmop[which] = NULL;
11894 break;
11895 default:
11896 break;
11897 }
11898
11899 /* Only build anything new if we optimized one or both arguments
11900 above. */
11901 if (pmop[0] != TREE_OPERAND (arg0, 0)
11902 || (TREE_CODE (arg0) != NEGATE_EXPR
11903 && pmop[1] != TREE_OPERAND (arg0, 1)))
11904 {
11905 tree utype = TREE_TYPE (arg0);
11906 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11907 {
11908 /* Perform the operations in a type that has defined
11909 overflow behavior. */
11910 utype = unsigned_type_for (TREE_TYPE (arg0));
11911 if (pmop[0] != NULL)
11912 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11913 if (pmop[1] != NULL)
11914 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11915 }
11916
11917 if (TREE_CODE (arg0) == NEGATE_EXPR)
11918 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11919 else if (TREE_CODE (arg0) == PLUS_EXPR)
11920 {
11921 if (pmop[0] != NULL && pmop[1] != NULL)
11922 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11923 pmop[0], pmop[1]);
11924 else if (pmop[0] != NULL)
11925 tem = pmop[0];
11926 else if (pmop[1] != NULL)
11927 tem = pmop[1];
11928 else
11929 return build_int_cst (type, 0);
11930 }
11931 else if (pmop[0] == NULL)
11932 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11933 else
11934 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11935 pmop[0], pmop[1]);
11936 /* TEM is now the new binary +, - or unary - replacement. */
11937 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11938 fold_convert_loc (loc, utype, arg1));
11939 return fold_convert_loc (loc, type, tem);
11940 }
11941 }
11942 }
11943
11944 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11945 if (t1 != NULL_TREE)
11946 return t1;
11947 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11948 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11949 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11950 {
11951 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11952
11953 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11954 if (mask == -1)
11955 return
11956 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11957 }
11958
11959 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11960
11961 This results in more efficient code for machines without a NOR
11962 instruction. Combine will canonicalize to the first form
11963 which will allow use of NOR instructions provided by the
11964 backend if they exist. */
11965 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11966 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11967 {
11968 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11969 build2 (BIT_IOR_EXPR, type,
11970 fold_convert_loc (loc, type,
11971 TREE_OPERAND (arg0, 0)),
11972 fold_convert_loc (loc, type,
11973 TREE_OPERAND (arg1, 0))));
11974 }
11975
11976 /* If arg0 is derived from the address of an object or function, we may
11977 be able to fold this expression using the object or function's
11978 alignment. */
11979 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11980 {
11981 unsigned HOST_WIDE_INT modulus, residue;
11982 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11983
11984 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11985 integer_onep (arg1));
11986
11987 /* This works because modulus is a power of 2. If this weren't the
11988 case, we'd have to replace it by its greatest power-of-2
11989 divisor: modulus & -modulus. */
11990 if (low < modulus)
11991 return build_int_cst (type, residue & low);
11992 }
11993
11994 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11995 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11996 if the new mask might be further optimized. */
11997 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11998 || TREE_CODE (arg0) == RSHIFT_EXPR)
11999 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12000 && TREE_CODE (arg1) == INTEGER_CST
12001 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12002 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12003 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12004 < TYPE_PRECISION (TREE_TYPE (arg0))))
12005 {
12006 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12007 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12008 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12009 tree shift_type = TREE_TYPE (arg0);
12010
12011 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12012 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12013 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12014 && TYPE_PRECISION (TREE_TYPE (arg0))
12015 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12016 {
12017 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12018 tree arg00 = TREE_OPERAND (arg0, 0);
12019 /* See if more bits can be proven as zero because of
12020 zero extension. */
12021 if (TREE_CODE (arg00) == NOP_EXPR
12022 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12023 {
12024 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12025 if (TYPE_PRECISION (inner_type)
12026 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12027 && TYPE_PRECISION (inner_type) < prec)
12028 {
12029 prec = TYPE_PRECISION (inner_type);
12030 /* See if we can shorten the right shift. */
12031 if (shiftc < prec)
12032 shift_type = inner_type;
12033 /* Otherwise X >> C1 is all zeros, so we'll optimize
12034 it into (X, 0) later on by making sure zerobits
12035 is all ones. */
12036 }
12037 }
12038 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12039 if (shiftc < prec)
12040 {
12041 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12042 zerobits <<= prec - shiftc;
12043 }
12044 /* For arithmetic shift if sign bit could be set, zerobits
12045 can contain actually sign bits, so no transformation is
12046 possible, unless MASK masks them all away. In that
12047 case the shift needs to be converted into logical shift. */
12048 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12049 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12050 {
12051 if ((mask & zerobits) == 0)
12052 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12053 else
12054 zerobits = 0;
12055 }
12056 }
12057
12058 /* ((X << 16) & 0xff00) is (X, 0). */
12059 if ((mask & zerobits) == mask)
12060 return omit_one_operand_loc (loc, type,
12061 build_int_cst (type, 0), arg0);
12062
12063 newmask = mask | zerobits;
12064 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12065 {
12066 /* Only do the transformation if NEWMASK is some integer
12067 mode's mask. */
12068 for (prec = BITS_PER_UNIT;
12069 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12070 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12071 break;
12072 if (prec < HOST_BITS_PER_WIDE_INT
12073 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12074 {
12075 tree newmaskt;
12076
12077 if (shift_type != TREE_TYPE (arg0))
12078 {
12079 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12080 fold_convert_loc (loc, shift_type,
12081 TREE_OPERAND (arg0, 0)),
12082 TREE_OPERAND (arg0, 1));
12083 tem = fold_convert_loc (loc, type, tem);
12084 }
12085 else
12086 tem = op0;
12087 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12088 if (!tree_int_cst_equal (newmaskt, arg1))
12089 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12090 }
12091 }
12092 }
12093
12094 goto associate;
12095
12096 case RDIV_EXPR:
12097 /* Don't touch a floating-point divide by zero unless the mode
12098 of the constant can represent infinity. */
12099 if (TREE_CODE (arg1) == REAL_CST
12100 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12101 && real_zerop (arg1))
12102 return NULL_TREE;
12103
12104 /* Optimize A / A to 1.0 if we don't care about
12105 NaNs or Infinities. Skip the transformation
12106 for non-real operands. */
12107 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12108 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12109 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12110 && operand_equal_p (arg0, arg1, 0))
12111 {
12112 tree r = build_real (TREE_TYPE (arg0), dconst1);
12113
12114 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12115 }
12116
12117 /* The complex version of the above A / A optimization. */
12118 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12119 && operand_equal_p (arg0, arg1, 0))
12120 {
12121 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12122 if (! HONOR_NANS (TYPE_MODE (elem_type))
12123 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12124 {
12125 tree r = build_real (elem_type, dconst1);
12126 /* omit_two_operands will call fold_convert for us. */
12127 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12128 }
12129 }
12130
12131 /* (-A) / (-B) -> A / B */
12132 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12133 return fold_build2_loc (loc, RDIV_EXPR, type,
12134 TREE_OPERAND (arg0, 0),
12135 negate_expr (arg1));
12136 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12137 return fold_build2_loc (loc, RDIV_EXPR, type,
12138 negate_expr (arg0),
12139 TREE_OPERAND (arg1, 0));
12140
12141 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12142 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12143 && real_onep (arg1))
12144 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12145
12146 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12147 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12148 && real_minus_onep (arg1))
12149 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12150 negate_expr (arg0)));
12151
12152 /* If ARG1 is a constant, we can convert this to a multiply by the
12153 reciprocal. This does not have the same rounding properties,
12154 so only do this if -freciprocal-math. We can actually
12155 always safely do it if ARG1 is a power of two, but it's hard to
12156 tell if it is or not in a portable manner. */
12157 if (optimize
12158 && (TREE_CODE (arg1) == REAL_CST
12159 || (TREE_CODE (arg1) == COMPLEX_CST
12160 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12161 || (TREE_CODE (arg1) == VECTOR_CST
12162 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12163 {
12164 if (flag_reciprocal_math
12165 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12166 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12167 /* Find the reciprocal if optimizing and the result is exact.
12168 TODO: Complex reciprocal not implemented. */
12169 if (TREE_CODE (arg1) != COMPLEX_CST)
12170 {
12171 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12172
12173 if (inverse)
12174 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12175 }
12176 }
12177 /* Convert A/B/C to A/(B*C). */
12178 if (flag_reciprocal_math
12179 && TREE_CODE (arg0) == RDIV_EXPR)
12180 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12181 fold_build2_loc (loc, MULT_EXPR, type,
12182 TREE_OPERAND (arg0, 1), arg1));
12183
12184 /* Convert A/(B/C) to (A/B)*C. */
12185 if (flag_reciprocal_math
12186 && TREE_CODE (arg1) == RDIV_EXPR)
12187 return fold_build2_loc (loc, MULT_EXPR, type,
12188 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12189 TREE_OPERAND (arg1, 0)),
12190 TREE_OPERAND (arg1, 1));
12191
12192 /* Convert C1/(X*C2) into (C1/C2)/X. */
12193 if (flag_reciprocal_math
12194 && TREE_CODE (arg1) == MULT_EXPR
12195 && TREE_CODE (arg0) == REAL_CST
12196 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12197 {
12198 tree tem = const_binop (RDIV_EXPR, arg0,
12199 TREE_OPERAND (arg1, 1));
12200 if (tem)
12201 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12202 TREE_OPERAND (arg1, 0));
12203 }
12204
12205 if (flag_unsafe_math_optimizations)
12206 {
12207 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12208 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12209
12210 /* Optimize sin(x)/cos(x) as tan(x). */
12211 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12212 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12213 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12214 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12215 CALL_EXPR_ARG (arg1, 0), 0))
12216 {
12217 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12218
12219 if (tanfn != NULL_TREE)
12220 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12221 }
12222
12223 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12224 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12225 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12226 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12227 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12228 CALL_EXPR_ARG (arg1, 0), 0))
12229 {
12230 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12231
12232 if (tanfn != NULL_TREE)
12233 {
12234 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12235 CALL_EXPR_ARG (arg0, 0));
12236 return fold_build2_loc (loc, RDIV_EXPR, type,
12237 build_real (type, dconst1), tmp);
12238 }
12239 }
12240
12241 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12242 NaNs or Infinities. */
12243 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12244 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12245 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12246 {
12247 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12248 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12249
12250 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12251 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12252 && operand_equal_p (arg00, arg01, 0))
12253 {
12254 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12255
12256 if (cosfn != NULL_TREE)
12257 return build_call_expr_loc (loc, cosfn, 1, arg00);
12258 }
12259 }
12260
12261 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12262 NaNs or Infinities. */
12263 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12264 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12265 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12266 {
12267 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12268 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12269
12270 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12271 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12272 && operand_equal_p (arg00, arg01, 0))
12273 {
12274 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12275
12276 if (cosfn != NULL_TREE)
12277 {
12278 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12279 return fold_build2_loc (loc, RDIV_EXPR, type,
12280 build_real (type, dconst1),
12281 tmp);
12282 }
12283 }
12284 }
12285
12286 /* Optimize pow(x,c)/x as pow(x,c-1). */
12287 if (fcode0 == BUILT_IN_POW
12288 || fcode0 == BUILT_IN_POWF
12289 || fcode0 == BUILT_IN_POWL)
12290 {
12291 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12292 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12293 if (TREE_CODE (arg01) == REAL_CST
12294 && !TREE_OVERFLOW (arg01)
12295 && operand_equal_p (arg1, arg00, 0))
12296 {
12297 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12298 REAL_VALUE_TYPE c;
12299 tree arg;
12300
12301 c = TREE_REAL_CST (arg01);
12302 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12303 arg = build_real (type, c);
12304 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12305 }
12306 }
12307
12308 /* Optimize a/root(b/c) into a*root(c/b). */
12309 if (BUILTIN_ROOT_P (fcode1))
12310 {
12311 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12312
12313 if (TREE_CODE (rootarg) == RDIV_EXPR)
12314 {
12315 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12316 tree b = TREE_OPERAND (rootarg, 0);
12317 tree c = TREE_OPERAND (rootarg, 1);
12318
12319 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12320
12321 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12322 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12323 }
12324 }
12325
12326 /* Optimize x/expN(y) into x*expN(-y). */
12327 if (BUILTIN_EXPONENT_P (fcode1))
12328 {
12329 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12330 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12331 arg1 = build_call_expr_loc (loc,
12332 expfn, 1,
12333 fold_convert_loc (loc, type, arg));
12334 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12335 }
12336
12337 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12338 if (fcode1 == BUILT_IN_POW
12339 || fcode1 == BUILT_IN_POWF
12340 || fcode1 == BUILT_IN_POWL)
12341 {
12342 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12343 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12344 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12345 tree neg11 = fold_convert_loc (loc, type,
12346 negate_expr (arg11));
12347 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12348 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12349 }
12350 }
12351 return NULL_TREE;
12352
12353 case TRUNC_DIV_EXPR:
12354 /* Optimize (X & (-A)) / A where A is a power of 2,
12355 to X >> log2(A) */
12356 if (TREE_CODE (arg0) == BIT_AND_EXPR
12357 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12358 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12359 {
12360 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12361 arg1, TREE_OPERAND (arg0, 1));
12362 if (sum && integer_zerop (sum)) {
12363 tree pow2 = build_int_cst (integer_type_node,
12364 wi::exact_log2 (arg1));
12365 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12366 TREE_OPERAND (arg0, 0), pow2);
12367 }
12368 }
12369
12370 /* Fall through */
12371
12372 case FLOOR_DIV_EXPR:
12373 /* Simplify A / (B << N) where A and B are positive and B is
12374 a power of 2, to A >> (N + log2(B)). */
12375 strict_overflow_p = false;
12376 if (TREE_CODE (arg1) == LSHIFT_EXPR
12377 && (TYPE_UNSIGNED (type)
12378 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12379 {
12380 tree sval = TREE_OPERAND (arg1, 0);
12381 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12382 {
12383 tree sh_cnt = TREE_OPERAND (arg1, 1);
12384 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12385 wi::exact_log2 (sval));
12386
12387 if (strict_overflow_p)
12388 fold_overflow_warning (("assuming signed overflow does not "
12389 "occur when simplifying A / (B << N)"),
12390 WARN_STRICT_OVERFLOW_MISC);
12391
12392 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12393 sh_cnt, pow2);
12394 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12395 fold_convert_loc (loc, type, arg0), sh_cnt);
12396 }
12397 }
12398
12399 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12400 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12401 if (INTEGRAL_TYPE_P (type)
12402 && TYPE_UNSIGNED (type)
12403 && code == FLOOR_DIV_EXPR)
12404 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12405
12406 /* Fall through */
12407
12408 case ROUND_DIV_EXPR:
12409 case CEIL_DIV_EXPR:
12410 case EXACT_DIV_EXPR:
12411 if (integer_onep (arg1))
12412 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12413 if (integer_zerop (arg1))
12414 return NULL_TREE;
12415 /* X / -1 is -X. */
12416 if (!TYPE_UNSIGNED (type)
12417 && TREE_CODE (arg1) == INTEGER_CST
12418 && wi::eq_p (arg1, -1))
12419 return fold_convert_loc (loc, type, negate_expr (arg0));
12420
12421 /* Convert -A / -B to A / B when the type is signed and overflow is
12422 undefined. */
12423 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12424 && TREE_CODE (arg0) == NEGATE_EXPR
12425 && negate_expr_p (arg1))
12426 {
12427 if (INTEGRAL_TYPE_P (type))
12428 fold_overflow_warning (("assuming signed overflow does not occur "
12429 "when distributing negation across "
12430 "division"),
12431 WARN_STRICT_OVERFLOW_MISC);
12432 return fold_build2_loc (loc, code, type,
12433 fold_convert_loc (loc, type,
12434 TREE_OPERAND (arg0, 0)),
12435 fold_convert_loc (loc, type,
12436 negate_expr (arg1)));
12437 }
12438 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12439 && TREE_CODE (arg1) == NEGATE_EXPR
12440 && negate_expr_p (arg0))
12441 {
12442 if (INTEGRAL_TYPE_P (type))
12443 fold_overflow_warning (("assuming signed overflow does not occur "
12444 "when distributing negation across "
12445 "division"),
12446 WARN_STRICT_OVERFLOW_MISC);
12447 return fold_build2_loc (loc, code, type,
12448 fold_convert_loc (loc, type,
12449 negate_expr (arg0)),
12450 fold_convert_loc (loc, type,
12451 TREE_OPERAND (arg1, 0)));
12452 }
12453
12454 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12455 operation, EXACT_DIV_EXPR.
12456
12457 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12458 At one time others generated faster code, it's not clear if they do
12459 after the last round to changes to the DIV code in expmed.c. */
12460 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12461 && multiple_of_p (type, arg0, arg1))
12462 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12463
12464 strict_overflow_p = false;
12465 if (TREE_CODE (arg1) == INTEGER_CST
12466 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12467 &strict_overflow_p)))
12468 {
12469 if (strict_overflow_p)
12470 fold_overflow_warning (("assuming signed overflow does not occur "
12471 "when simplifying division"),
12472 WARN_STRICT_OVERFLOW_MISC);
12473 return fold_convert_loc (loc, type, tem);
12474 }
12475
12476 return NULL_TREE;
12477
12478 case CEIL_MOD_EXPR:
12479 case FLOOR_MOD_EXPR:
12480 case ROUND_MOD_EXPR:
12481 case TRUNC_MOD_EXPR:
12482 /* X % 1 is always zero, but be sure to preserve any side
12483 effects in X. */
12484 if (integer_onep (arg1))
12485 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12486
12487 /* X % 0, return X % 0 unchanged so that we can get the
12488 proper warnings and errors. */
12489 if (integer_zerop (arg1))
12490 return NULL_TREE;
12491
12492 /* 0 % X is always zero, but be sure to preserve any side
12493 effects in X. Place this after checking for X == 0. */
12494 if (integer_zerop (arg0))
12495 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12496
12497 /* X % -1 is zero. */
12498 if (!TYPE_UNSIGNED (type)
12499 && TREE_CODE (arg1) == INTEGER_CST
12500 && wi::eq_p (arg1, -1))
12501 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12502
12503 /* X % -C is the same as X % C. */
12504 if (code == TRUNC_MOD_EXPR
12505 && TYPE_SIGN (type) == SIGNED
12506 && TREE_CODE (arg1) == INTEGER_CST
12507 && !TREE_OVERFLOW (arg1)
12508 && wi::neg_p (arg1)
12509 && !TYPE_OVERFLOW_TRAPS (type)
12510 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12511 && !sign_bit_p (arg1, arg1))
12512 return fold_build2_loc (loc, code, type,
12513 fold_convert_loc (loc, type, arg0),
12514 fold_convert_loc (loc, type,
12515 negate_expr (arg1)));
12516
12517 /* X % -Y is the same as X % Y. */
12518 if (code == TRUNC_MOD_EXPR
12519 && !TYPE_UNSIGNED (type)
12520 && TREE_CODE (arg1) == NEGATE_EXPR
12521 && !TYPE_OVERFLOW_TRAPS (type))
12522 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12523 fold_convert_loc (loc, type,
12524 TREE_OPERAND (arg1, 0)));
12525
12526 strict_overflow_p = false;
12527 if (TREE_CODE (arg1) == INTEGER_CST
12528 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12529 &strict_overflow_p)))
12530 {
12531 if (strict_overflow_p)
12532 fold_overflow_warning (("assuming signed overflow does not occur "
12533 "when simplifying modulus"),
12534 WARN_STRICT_OVERFLOW_MISC);
12535 return fold_convert_loc (loc, type, tem);
12536 }
12537
12538 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12539 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12540 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12541 && (TYPE_UNSIGNED (type)
12542 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12543 {
12544 tree c = arg1;
12545 /* Also optimize A % (C << N) where C is a power of 2,
12546 to A & ((C << N) - 1). */
12547 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12548 c = TREE_OPERAND (arg1, 0);
12549
12550 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12551 {
12552 tree mask
12553 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12554 build_int_cst (TREE_TYPE (arg1), 1));
12555 if (strict_overflow_p)
12556 fold_overflow_warning (("assuming signed overflow does not "
12557 "occur when simplifying "
12558 "X % (power of two)"),
12559 WARN_STRICT_OVERFLOW_MISC);
12560 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12561 fold_convert_loc (loc, type, arg0),
12562 fold_convert_loc (loc, type, mask));
12563 }
12564 }
12565
12566 return NULL_TREE;
12567
12568 case LROTATE_EXPR:
12569 case RROTATE_EXPR:
12570 if (integer_all_onesp (arg0))
12571 return omit_one_operand_loc (loc, type, arg0, arg1);
12572 goto shift;
12573
12574 case RSHIFT_EXPR:
12575 /* Optimize -1 >> x for arithmetic right shifts. */
12576 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12577 && tree_expr_nonnegative_p (arg1))
12578 return omit_one_operand_loc (loc, type, arg0, arg1);
12579 /* ... fall through ... */
12580
12581 case LSHIFT_EXPR:
12582 shift:
12583 if (integer_zerop (arg1))
12584 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12585 if (integer_zerop (arg0))
12586 return omit_one_operand_loc (loc, type, arg0, arg1);
12587
12588 /* Prefer vector1 << scalar to vector1 << vector2
12589 if vector2 is uniform. */
12590 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12591 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12592 return fold_build2_loc (loc, code, type, op0, tem);
12593
12594 /* Since negative shift count is not well-defined,
12595 don't try to compute it in the compiler. */
12596 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12597 return NULL_TREE;
12598
12599 prec = element_precision (type);
12600
12601 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12602 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12603 && tree_to_uhwi (arg1) < prec
12604 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12605 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12606 {
12607 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12608 + tree_to_uhwi (arg1));
12609
12610 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12611 being well defined. */
12612 if (low >= prec)
12613 {
12614 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12615 low = low % prec;
12616 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12617 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12618 TREE_OPERAND (arg0, 0));
12619 else
12620 low = prec - 1;
12621 }
12622
12623 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12624 build_int_cst (TREE_TYPE (arg1), low));
12625 }
12626
12627 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12628 into x & ((unsigned)-1 >> c) for unsigned types. */
12629 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12630 || (TYPE_UNSIGNED (type)
12631 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12632 && tree_fits_uhwi_p (arg1)
12633 && tree_to_uhwi (arg1) < prec
12634 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12635 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12636 {
12637 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12638 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12639 tree lshift;
12640 tree arg00;
12641
12642 if (low0 == low1)
12643 {
12644 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12645
12646 lshift = build_minus_one_cst (type);
12647 lshift = const_binop (code, lshift, arg1);
12648
12649 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12650 }
12651 }
12652
12653 /* Rewrite an LROTATE_EXPR by a constant into an
12654 RROTATE_EXPR by a new constant. */
12655 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12656 {
12657 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12658 tem = const_binop (MINUS_EXPR, tem, arg1);
12659 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12660 }
12661
12662 /* If we have a rotate of a bit operation with the rotate count and
12663 the second operand of the bit operation both constant,
12664 permute the two operations. */
12665 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12666 && (TREE_CODE (arg0) == BIT_AND_EXPR
12667 || TREE_CODE (arg0) == BIT_IOR_EXPR
12668 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12670 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12671 fold_build2_loc (loc, code, type,
12672 TREE_OPERAND (arg0, 0), arg1),
12673 fold_build2_loc (loc, code, type,
12674 TREE_OPERAND (arg0, 1), arg1));
12675
12676 /* Two consecutive rotates adding up to the some integer
12677 multiple of the precision of the type can be ignored. */
12678 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12679 && TREE_CODE (arg0) == RROTATE_EXPR
12680 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12681 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12682 prec) == 0)
12683 return TREE_OPERAND (arg0, 0);
12684
12685 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12686 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12687 if the latter can be further optimized. */
12688 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12689 && TREE_CODE (arg0) == BIT_AND_EXPR
12690 && TREE_CODE (arg1) == INTEGER_CST
12691 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12692 {
12693 tree mask = fold_build2_loc (loc, code, type,
12694 fold_convert_loc (loc, type,
12695 TREE_OPERAND (arg0, 1)),
12696 arg1);
12697 tree shift = fold_build2_loc (loc, code, type,
12698 fold_convert_loc (loc, type,
12699 TREE_OPERAND (arg0, 0)),
12700 arg1);
12701 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12702 if (tem)
12703 return tem;
12704 }
12705
12706 return NULL_TREE;
12707
12708 case MIN_EXPR:
12709 if (operand_equal_p (arg0, arg1, 0))
12710 return omit_one_operand_loc (loc, type, arg0, arg1);
12711 if (INTEGRAL_TYPE_P (type)
12712 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12713 return omit_one_operand_loc (loc, type, arg1, arg0);
12714 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12715 if (tem)
12716 return tem;
12717 goto associate;
12718
12719 case MAX_EXPR:
12720 if (operand_equal_p (arg0, arg1, 0))
12721 return omit_one_operand_loc (loc, type, arg0, arg1);
12722 if (INTEGRAL_TYPE_P (type)
12723 && TYPE_MAX_VALUE (type)
12724 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12725 return omit_one_operand_loc (loc, type, arg1, arg0);
12726 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12727 if (tem)
12728 return tem;
12729 goto associate;
12730
12731 case TRUTH_ANDIF_EXPR:
12732 /* Note that the operands of this must be ints
12733 and their values must be 0 or 1.
12734 ("true" is a fixed value perhaps depending on the language.) */
12735 /* If first arg is constant zero, return it. */
12736 if (integer_zerop (arg0))
12737 return fold_convert_loc (loc, type, arg0);
12738 case TRUTH_AND_EXPR:
12739 /* If either arg is constant true, drop it. */
12740 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12741 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12742 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12743 /* Preserve sequence points. */
12744 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12745 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12746 /* If second arg is constant zero, result is zero, but first arg
12747 must be evaluated. */
12748 if (integer_zerop (arg1))
12749 return omit_one_operand_loc (loc, type, arg1, arg0);
12750 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12751 case will be handled here. */
12752 if (integer_zerop (arg0))
12753 return omit_one_operand_loc (loc, type, arg0, arg1);
12754
12755 /* !X && X is always false. */
12756 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12757 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12758 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12759 /* X && !X is always false. */
12760 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12761 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12762 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12763
12764 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12765 means A >= Y && A != MAX, but in this case we know that
12766 A < X <= MAX. */
12767
12768 if (!TREE_SIDE_EFFECTS (arg0)
12769 && !TREE_SIDE_EFFECTS (arg1))
12770 {
12771 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12772 if (tem && !operand_equal_p (tem, arg0, 0))
12773 return fold_build2_loc (loc, code, type, tem, arg1);
12774
12775 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12776 if (tem && !operand_equal_p (tem, arg1, 0))
12777 return fold_build2_loc (loc, code, type, arg0, tem);
12778 }
12779
12780 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12781 != NULL_TREE)
12782 return tem;
12783
12784 return NULL_TREE;
12785
12786 case TRUTH_ORIF_EXPR:
12787 /* Note that the operands of this must be ints
12788 and their values must be 0 or true.
12789 ("true" is a fixed value perhaps depending on the language.) */
12790 /* If first arg is constant true, return it. */
12791 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12792 return fold_convert_loc (loc, type, arg0);
12793 case TRUTH_OR_EXPR:
12794 /* If either arg is constant zero, drop it. */
12795 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12797 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12798 /* Preserve sequence points. */
12799 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12800 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12801 /* If second arg is constant true, result is true, but we must
12802 evaluate first arg. */
12803 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12804 return omit_one_operand_loc (loc, type, arg1, arg0);
12805 /* Likewise for first arg, but note this only occurs here for
12806 TRUTH_OR_EXPR. */
12807 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12808 return omit_one_operand_loc (loc, type, arg0, arg1);
12809
12810 /* !X || X is always true. */
12811 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12812 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12813 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12814 /* X || !X is always true. */
12815 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12816 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12817 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12818
12819 /* (X && !Y) || (!X && Y) is X ^ Y */
12820 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12821 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12822 {
12823 tree a0, a1, l0, l1, n0, n1;
12824
12825 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12826 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12827
12828 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12829 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12830
12831 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12832 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12833
12834 if ((operand_equal_p (n0, a0, 0)
12835 && operand_equal_p (n1, a1, 0))
12836 || (operand_equal_p (n0, a1, 0)
12837 && operand_equal_p (n1, a0, 0)))
12838 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12839 }
12840
12841 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12842 != NULL_TREE)
12843 return tem;
12844
12845 return NULL_TREE;
12846
12847 case TRUTH_XOR_EXPR:
12848 /* If the second arg is constant zero, drop it. */
12849 if (integer_zerop (arg1))
12850 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12851 /* If the second arg is constant true, this is a logical inversion. */
12852 if (integer_onep (arg1))
12853 {
12854 tem = invert_truthvalue_loc (loc, arg0);
12855 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12856 }
12857 /* Identical arguments cancel to zero. */
12858 if (operand_equal_p (arg0, arg1, 0))
12859 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12860
12861 /* !X ^ X is always true. */
12862 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12863 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12864 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12865
12866 /* X ^ !X is always true. */
12867 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12868 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12869 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12870
12871 return NULL_TREE;
12872
12873 case EQ_EXPR:
12874 case NE_EXPR:
12875 STRIP_NOPS (arg0);
12876 STRIP_NOPS (arg1);
12877
12878 tem = fold_comparison (loc, code, type, op0, op1);
12879 if (tem != NULL_TREE)
12880 return tem;
12881
12882 /* bool_var != 0 becomes bool_var. */
12883 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12884 && code == NE_EXPR)
12885 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12886
12887 /* bool_var == 1 becomes bool_var. */
12888 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12889 && code == EQ_EXPR)
12890 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12891
12892 /* bool_var != 1 becomes !bool_var. */
12893 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12894 && code == NE_EXPR)
12895 return fold_convert_loc (loc, type,
12896 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12897 TREE_TYPE (arg0), arg0));
12898
12899 /* bool_var == 0 becomes !bool_var. */
12900 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12901 && code == EQ_EXPR)
12902 return fold_convert_loc (loc, type,
12903 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12904 TREE_TYPE (arg0), arg0));
12905
12906 /* !exp != 0 becomes !exp */
12907 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12908 && code == NE_EXPR)
12909 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12910
12911 /* If this is an equality comparison of the address of two non-weak,
12912 unaliased symbols neither of which are extern (since we do not
12913 have access to attributes for externs), then we know the result. */
12914 if (TREE_CODE (arg0) == ADDR_EXPR
12915 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12916 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12917 && ! lookup_attribute ("alias",
12918 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12919 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12920 && TREE_CODE (arg1) == ADDR_EXPR
12921 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12922 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12923 && ! lookup_attribute ("alias",
12924 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12925 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12926 {
12927 /* We know that we're looking at the address of two
12928 non-weak, unaliased, static _DECL nodes.
12929
12930 It is both wasteful and incorrect to call operand_equal_p
12931 to compare the two ADDR_EXPR nodes. It is wasteful in that
12932 all we need to do is test pointer equality for the arguments
12933 to the two ADDR_EXPR nodes. It is incorrect to use
12934 operand_equal_p as that function is NOT equivalent to a
12935 C equality test. It can in fact return false for two
12936 objects which would test as equal using the C equality
12937 operator. */
12938 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12939 return constant_boolean_node (equal
12940 ? code == EQ_EXPR : code != EQ_EXPR,
12941 type);
12942 }
12943
12944 /* Similarly for a NEGATE_EXPR. */
12945 if (TREE_CODE (arg0) == NEGATE_EXPR
12946 && TREE_CODE (arg1) == INTEGER_CST
12947 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12948 arg1)))
12949 && TREE_CODE (tem) == INTEGER_CST
12950 && !TREE_OVERFLOW (tem))
12951 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12952
12953 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12954 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12955 && TREE_CODE (arg1) == INTEGER_CST
12956 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12957 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12958 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12959 fold_convert_loc (loc,
12960 TREE_TYPE (arg0),
12961 arg1),
12962 TREE_OPERAND (arg0, 1)));
12963
12964 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12965 if ((TREE_CODE (arg0) == PLUS_EXPR
12966 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12967 || TREE_CODE (arg0) == MINUS_EXPR)
12968 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12969 0)),
12970 arg1, 0)
12971 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12972 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12973 {
12974 tree val = TREE_OPERAND (arg0, 1);
12975 return omit_two_operands_loc (loc, type,
12976 fold_build2_loc (loc, code, type,
12977 val,
12978 build_int_cst (TREE_TYPE (val),
12979 0)),
12980 TREE_OPERAND (arg0, 0), arg1);
12981 }
12982
12983 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12984 if (TREE_CODE (arg0) == MINUS_EXPR
12985 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12986 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12987 1)),
12988 arg1, 0)
12989 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12990 {
12991 return omit_two_operands_loc (loc, type,
12992 code == NE_EXPR
12993 ? boolean_true_node : boolean_false_node,
12994 TREE_OPERAND (arg0, 1), arg1);
12995 }
12996
12997 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12998 if (TREE_CODE (arg0) == ABS_EXPR
12999 && (integer_zerop (arg1) || real_zerop (arg1)))
13000 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13001
13002 /* If this is an EQ or NE comparison with zero and ARG0 is
13003 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13004 two operations, but the latter can be done in one less insn
13005 on machines that have only two-operand insns or on which a
13006 constant cannot be the first operand. */
13007 if (TREE_CODE (arg0) == BIT_AND_EXPR
13008 && integer_zerop (arg1))
13009 {
13010 tree arg00 = TREE_OPERAND (arg0, 0);
13011 tree arg01 = TREE_OPERAND (arg0, 1);
13012 if (TREE_CODE (arg00) == LSHIFT_EXPR
13013 && integer_onep (TREE_OPERAND (arg00, 0)))
13014 {
13015 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13016 arg01, TREE_OPERAND (arg00, 1));
13017 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13018 build_int_cst (TREE_TYPE (arg0), 1));
13019 return fold_build2_loc (loc, code, type,
13020 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13021 arg1);
13022 }
13023 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13024 && integer_onep (TREE_OPERAND (arg01, 0)))
13025 {
13026 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13027 arg00, TREE_OPERAND (arg01, 1));
13028 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13029 build_int_cst (TREE_TYPE (arg0), 1));
13030 return fold_build2_loc (loc, code, type,
13031 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13032 arg1);
13033 }
13034 }
13035
13036 /* If this is an NE or EQ comparison of zero against the result of a
13037 signed MOD operation whose second operand is a power of 2, make
13038 the MOD operation unsigned since it is simpler and equivalent. */
13039 if (integer_zerop (arg1)
13040 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13041 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13042 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13043 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13044 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13045 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13046 {
13047 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13048 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13049 fold_convert_loc (loc, newtype,
13050 TREE_OPERAND (arg0, 0)),
13051 fold_convert_loc (loc, newtype,
13052 TREE_OPERAND (arg0, 1)));
13053
13054 return fold_build2_loc (loc, code, type, newmod,
13055 fold_convert_loc (loc, newtype, arg1));
13056 }
13057
13058 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13059 C1 is a valid shift constant, and C2 is a power of two, i.e.
13060 a single bit. */
13061 if (TREE_CODE (arg0) == BIT_AND_EXPR
13062 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13063 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13064 == INTEGER_CST
13065 && integer_pow2p (TREE_OPERAND (arg0, 1))
13066 && integer_zerop (arg1))
13067 {
13068 tree itype = TREE_TYPE (arg0);
13069 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13070 prec = TYPE_PRECISION (itype);
13071
13072 /* Check for a valid shift count. */
13073 if (wi::ltu_p (arg001, prec))
13074 {
13075 tree arg01 = TREE_OPERAND (arg0, 1);
13076 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13077 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13078 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13079 can be rewritten as (X & (C2 << C1)) != 0. */
13080 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13081 {
13082 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13083 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13084 return fold_build2_loc (loc, code, type, tem,
13085 fold_convert_loc (loc, itype, arg1));
13086 }
13087 /* Otherwise, for signed (arithmetic) shifts,
13088 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13089 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13090 else if (!TYPE_UNSIGNED (itype))
13091 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13092 arg000, build_int_cst (itype, 0));
13093 /* Otherwise, of unsigned (logical) shifts,
13094 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13095 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13096 else
13097 return omit_one_operand_loc (loc, type,
13098 code == EQ_EXPR ? integer_one_node
13099 : integer_zero_node,
13100 arg000);
13101 }
13102 }
13103
13104 /* If we have (A & C) == C where C is a power of 2, convert this into
13105 (A & C) != 0. Similarly for NE_EXPR. */
13106 if (TREE_CODE (arg0) == BIT_AND_EXPR
13107 && integer_pow2p (TREE_OPERAND (arg0, 1))
13108 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13109 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13110 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13111 integer_zero_node));
13112
13113 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13114 bit, then fold the expression into A < 0 or A >= 0. */
13115 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13116 if (tem)
13117 return tem;
13118
13119 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13120 Similarly for NE_EXPR. */
13121 if (TREE_CODE (arg0) == BIT_AND_EXPR
13122 && TREE_CODE (arg1) == INTEGER_CST
13123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13124 {
13125 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13126 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13127 TREE_OPERAND (arg0, 1));
13128 tree dandnotc
13129 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13130 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13131 notc);
13132 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13133 if (integer_nonzerop (dandnotc))
13134 return omit_one_operand_loc (loc, type, rslt, arg0);
13135 }
13136
13137 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13138 Similarly for NE_EXPR. */
13139 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13140 && TREE_CODE (arg1) == INTEGER_CST
13141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13142 {
13143 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13144 tree candnotd
13145 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13146 TREE_OPERAND (arg0, 1),
13147 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13148 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13149 if (integer_nonzerop (candnotd))
13150 return omit_one_operand_loc (loc, type, rslt, arg0);
13151 }
13152
13153 /* If this is a comparison of a field, we may be able to simplify it. */
13154 if ((TREE_CODE (arg0) == COMPONENT_REF
13155 || TREE_CODE (arg0) == BIT_FIELD_REF)
13156 /* Handle the constant case even without -O
13157 to make sure the warnings are given. */
13158 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13159 {
13160 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13161 if (t1)
13162 return t1;
13163 }
13164
13165 /* Optimize comparisons of strlen vs zero to a compare of the
13166 first character of the string vs zero. To wit,
13167 strlen(ptr) == 0 => *ptr == 0
13168 strlen(ptr) != 0 => *ptr != 0
13169 Other cases should reduce to one of these two (or a constant)
13170 due to the return value of strlen being unsigned. */
13171 if (TREE_CODE (arg0) == CALL_EXPR
13172 && integer_zerop (arg1))
13173 {
13174 tree fndecl = get_callee_fndecl (arg0);
13175
13176 if (fndecl
13177 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13178 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13179 && call_expr_nargs (arg0) == 1
13180 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13181 {
13182 tree iref = build_fold_indirect_ref_loc (loc,
13183 CALL_EXPR_ARG (arg0, 0));
13184 return fold_build2_loc (loc, code, type, iref,
13185 build_int_cst (TREE_TYPE (iref), 0));
13186 }
13187 }
13188
13189 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13190 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13191 if (TREE_CODE (arg0) == RSHIFT_EXPR
13192 && integer_zerop (arg1)
13193 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13194 {
13195 tree arg00 = TREE_OPERAND (arg0, 0);
13196 tree arg01 = TREE_OPERAND (arg0, 1);
13197 tree itype = TREE_TYPE (arg00);
13198 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13199 {
13200 if (TYPE_UNSIGNED (itype))
13201 {
13202 itype = signed_type_for (itype);
13203 arg00 = fold_convert_loc (loc, itype, arg00);
13204 }
13205 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13206 type, arg00, build_zero_cst (itype));
13207 }
13208 }
13209
13210 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13211 if (integer_zerop (arg1)
13212 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13213 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13214 TREE_OPERAND (arg0, 1));
13215
13216 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13217 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13218 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13219 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13220 build_zero_cst (TREE_TYPE (arg0)));
13221 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13222 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13223 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13224 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13225 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13226 build_zero_cst (TREE_TYPE (arg0)));
13227
13228 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13229 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13230 && TREE_CODE (arg1) == INTEGER_CST
13231 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13232 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13233 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13234 TREE_OPERAND (arg0, 1), arg1));
13235
13236 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13237 (X & C) == 0 when C is a single bit. */
13238 if (TREE_CODE (arg0) == BIT_AND_EXPR
13239 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13240 && integer_zerop (arg1)
13241 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13242 {
13243 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13244 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13245 TREE_OPERAND (arg0, 1));
13246 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13247 type, tem,
13248 fold_convert_loc (loc, TREE_TYPE (arg0),
13249 arg1));
13250 }
13251
13252 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13253 constant C is a power of two, i.e. a single bit. */
13254 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13255 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13256 && integer_zerop (arg1)
13257 && integer_pow2p (TREE_OPERAND (arg0, 1))
13258 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13259 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13260 {
13261 tree arg00 = TREE_OPERAND (arg0, 0);
13262 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13263 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13264 }
13265
13266 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13267 when is C is a power of two, i.e. a single bit. */
13268 if (TREE_CODE (arg0) == BIT_AND_EXPR
13269 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13270 && integer_zerop (arg1)
13271 && integer_pow2p (TREE_OPERAND (arg0, 1))
13272 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13273 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13274 {
13275 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13276 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13277 arg000, TREE_OPERAND (arg0, 1));
13278 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13279 tem, build_int_cst (TREE_TYPE (tem), 0));
13280 }
13281
13282 if (integer_zerop (arg1)
13283 && tree_expr_nonzero_p (arg0))
13284 {
13285 tree res = constant_boolean_node (code==NE_EXPR, type);
13286 return omit_one_operand_loc (loc, type, res, arg0);
13287 }
13288
13289 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13290 if (TREE_CODE (arg0) == NEGATE_EXPR
13291 && TREE_CODE (arg1) == NEGATE_EXPR)
13292 return fold_build2_loc (loc, code, type,
13293 TREE_OPERAND (arg0, 0),
13294 fold_convert_loc (loc, TREE_TYPE (arg0),
13295 TREE_OPERAND (arg1, 0)));
13296
13297 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13298 if (TREE_CODE (arg0) == BIT_AND_EXPR
13299 && TREE_CODE (arg1) == BIT_AND_EXPR)
13300 {
13301 tree arg00 = TREE_OPERAND (arg0, 0);
13302 tree arg01 = TREE_OPERAND (arg0, 1);
13303 tree arg10 = TREE_OPERAND (arg1, 0);
13304 tree arg11 = TREE_OPERAND (arg1, 1);
13305 tree itype = TREE_TYPE (arg0);
13306
13307 if (operand_equal_p (arg01, arg11, 0))
13308 return fold_build2_loc (loc, code, type,
13309 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13310 fold_build2_loc (loc,
13311 BIT_XOR_EXPR, itype,
13312 arg00, arg10),
13313 arg01),
13314 build_zero_cst (itype));
13315
13316 if (operand_equal_p (arg01, arg10, 0))
13317 return fold_build2_loc (loc, code, type,
13318 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13319 fold_build2_loc (loc,
13320 BIT_XOR_EXPR, itype,
13321 arg00, arg11),
13322 arg01),
13323 build_zero_cst (itype));
13324
13325 if (operand_equal_p (arg00, arg11, 0))
13326 return fold_build2_loc (loc, code, type,
13327 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13328 fold_build2_loc (loc,
13329 BIT_XOR_EXPR, itype,
13330 arg01, arg10),
13331 arg00),
13332 build_zero_cst (itype));
13333
13334 if (operand_equal_p (arg00, arg10, 0))
13335 return fold_build2_loc (loc, code, type,
13336 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13337 fold_build2_loc (loc,
13338 BIT_XOR_EXPR, itype,
13339 arg01, arg11),
13340 arg00),
13341 build_zero_cst (itype));
13342 }
13343
13344 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13345 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13346 {
13347 tree arg00 = TREE_OPERAND (arg0, 0);
13348 tree arg01 = TREE_OPERAND (arg0, 1);
13349 tree arg10 = TREE_OPERAND (arg1, 0);
13350 tree arg11 = TREE_OPERAND (arg1, 1);
13351 tree itype = TREE_TYPE (arg0);
13352
13353 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13354 operand_equal_p guarantees no side-effects so we don't need
13355 to use omit_one_operand on Z. */
13356 if (operand_equal_p (arg01, arg11, 0))
13357 return fold_build2_loc (loc, code, type, arg00,
13358 fold_convert_loc (loc, TREE_TYPE (arg00),
13359 arg10));
13360 if (operand_equal_p (arg01, arg10, 0))
13361 return fold_build2_loc (loc, code, type, arg00,
13362 fold_convert_loc (loc, TREE_TYPE (arg00),
13363 arg11));
13364 if (operand_equal_p (arg00, arg11, 0))
13365 return fold_build2_loc (loc, code, type, arg01,
13366 fold_convert_loc (loc, TREE_TYPE (arg01),
13367 arg10));
13368 if (operand_equal_p (arg00, arg10, 0))
13369 return fold_build2_loc (loc, code, type, arg01,
13370 fold_convert_loc (loc, TREE_TYPE (arg01),
13371 arg11));
13372
13373 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13374 if (TREE_CODE (arg01) == INTEGER_CST
13375 && TREE_CODE (arg11) == INTEGER_CST)
13376 {
13377 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13378 fold_convert_loc (loc, itype, arg11));
13379 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13380 return fold_build2_loc (loc, code, type, tem,
13381 fold_convert_loc (loc, itype, arg10));
13382 }
13383 }
13384
13385 /* Attempt to simplify equality/inequality comparisons of complex
13386 values. Only lower the comparison if the result is known or
13387 can be simplified to a single scalar comparison. */
13388 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13389 || TREE_CODE (arg0) == COMPLEX_CST)
13390 && (TREE_CODE (arg1) == COMPLEX_EXPR
13391 || TREE_CODE (arg1) == COMPLEX_CST))
13392 {
13393 tree real0, imag0, real1, imag1;
13394 tree rcond, icond;
13395
13396 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13397 {
13398 real0 = TREE_OPERAND (arg0, 0);
13399 imag0 = TREE_OPERAND (arg0, 1);
13400 }
13401 else
13402 {
13403 real0 = TREE_REALPART (arg0);
13404 imag0 = TREE_IMAGPART (arg0);
13405 }
13406
13407 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13408 {
13409 real1 = TREE_OPERAND (arg1, 0);
13410 imag1 = TREE_OPERAND (arg1, 1);
13411 }
13412 else
13413 {
13414 real1 = TREE_REALPART (arg1);
13415 imag1 = TREE_IMAGPART (arg1);
13416 }
13417
13418 rcond = fold_binary_loc (loc, code, type, real0, real1);
13419 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13420 {
13421 if (integer_zerop (rcond))
13422 {
13423 if (code == EQ_EXPR)
13424 return omit_two_operands_loc (loc, type, boolean_false_node,
13425 imag0, imag1);
13426 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13427 }
13428 else
13429 {
13430 if (code == NE_EXPR)
13431 return omit_two_operands_loc (loc, type, boolean_true_node,
13432 imag0, imag1);
13433 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13434 }
13435 }
13436
13437 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13438 if (icond && TREE_CODE (icond) == INTEGER_CST)
13439 {
13440 if (integer_zerop (icond))
13441 {
13442 if (code == EQ_EXPR)
13443 return omit_two_operands_loc (loc, type, boolean_false_node,
13444 real0, real1);
13445 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13446 }
13447 else
13448 {
13449 if (code == NE_EXPR)
13450 return omit_two_operands_loc (loc, type, boolean_true_node,
13451 real0, real1);
13452 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13453 }
13454 }
13455 }
13456
13457 return NULL_TREE;
13458
13459 case LT_EXPR:
13460 case GT_EXPR:
13461 case LE_EXPR:
13462 case GE_EXPR:
13463 tem = fold_comparison (loc, code, type, op0, op1);
13464 if (tem != NULL_TREE)
13465 return tem;
13466
13467 /* Transform comparisons of the form X +- C CMP X. */
13468 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13469 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13470 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13471 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13472 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13473 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13474 {
13475 tree arg01 = TREE_OPERAND (arg0, 1);
13476 enum tree_code code0 = TREE_CODE (arg0);
13477 int is_positive;
13478
13479 if (TREE_CODE (arg01) == REAL_CST)
13480 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13481 else
13482 is_positive = tree_int_cst_sgn (arg01);
13483
13484 /* (X - c) > X becomes false. */
13485 if (code == GT_EXPR
13486 && ((code0 == MINUS_EXPR && is_positive >= 0)
13487 || (code0 == PLUS_EXPR && is_positive <= 0)))
13488 {
13489 if (TREE_CODE (arg01) == INTEGER_CST
13490 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13491 fold_overflow_warning (("assuming signed overflow does not "
13492 "occur when assuming that (X - c) > X "
13493 "is always false"),
13494 WARN_STRICT_OVERFLOW_ALL);
13495 return constant_boolean_node (0, type);
13496 }
13497
13498 /* Likewise (X + c) < X becomes false. */
13499 if (code == LT_EXPR
13500 && ((code0 == PLUS_EXPR && is_positive >= 0)
13501 || (code0 == MINUS_EXPR && is_positive <= 0)))
13502 {
13503 if (TREE_CODE (arg01) == INTEGER_CST
13504 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13505 fold_overflow_warning (("assuming signed overflow does not "
13506 "occur when assuming that "
13507 "(X + c) < X is always false"),
13508 WARN_STRICT_OVERFLOW_ALL);
13509 return constant_boolean_node (0, type);
13510 }
13511
13512 /* Convert (X - c) <= X to true. */
13513 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13514 && code == LE_EXPR
13515 && ((code0 == MINUS_EXPR && is_positive >= 0)
13516 || (code0 == PLUS_EXPR && is_positive <= 0)))
13517 {
13518 if (TREE_CODE (arg01) == INTEGER_CST
13519 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13520 fold_overflow_warning (("assuming signed overflow does not "
13521 "occur when assuming that "
13522 "(X - c) <= X is always true"),
13523 WARN_STRICT_OVERFLOW_ALL);
13524 return constant_boolean_node (1, type);
13525 }
13526
13527 /* Convert (X + c) >= X to true. */
13528 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13529 && code == GE_EXPR
13530 && ((code0 == PLUS_EXPR && is_positive >= 0)
13531 || (code0 == MINUS_EXPR && is_positive <= 0)))
13532 {
13533 if (TREE_CODE (arg01) == INTEGER_CST
13534 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13535 fold_overflow_warning (("assuming signed overflow does not "
13536 "occur when assuming that "
13537 "(X + c) >= X is always true"),
13538 WARN_STRICT_OVERFLOW_ALL);
13539 return constant_boolean_node (1, type);
13540 }
13541
13542 if (TREE_CODE (arg01) == INTEGER_CST)
13543 {
13544 /* Convert X + c > X and X - c < X to true for integers. */
13545 if (code == GT_EXPR
13546 && ((code0 == PLUS_EXPR && is_positive > 0)
13547 || (code0 == MINUS_EXPR && is_positive < 0)))
13548 {
13549 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13550 fold_overflow_warning (("assuming signed overflow does "
13551 "not occur when assuming that "
13552 "(X + c) > X is always true"),
13553 WARN_STRICT_OVERFLOW_ALL);
13554 return constant_boolean_node (1, type);
13555 }
13556
13557 if (code == LT_EXPR
13558 && ((code0 == MINUS_EXPR && is_positive > 0)
13559 || (code0 == PLUS_EXPR && is_positive < 0)))
13560 {
13561 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13562 fold_overflow_warning (("assuming signed overflow does "
13563 "not occur when assuming that "
13564 "(X - c) < X is always true"),
13565 WARN_STRICT_OVERFLOW_ALL);
13566 return constant_boolean_node (1, type);
13567 }
13568
13569 /* Convert X + c <= X and X - c >= X to false for integers. */
13570 if (code == LE_EXPR
13571 && ((code0 == PLUS_EXPR && is_positive > 0)
13572 || (code0 == MINUS_EXPR && is_positive < 0)))
13573 {
13574 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13575 fold_overflow_warning (("assuming signed overflow does "
13576 "not occur when assuming that "
13577 "(X + c) <= X is always false"),
13578 WARN_STRICT_OVERFLOW_ALL);
13579 return constant_boolean_node (0, type);
13580 }
13581
13582 if (code == GE_EXPR
13583 && ((code0 == MINUS_EXPR && is_positive > 0)
13584 || (code0 == PLUS_EXPR && is_positive < 0)))
13585 {
13586 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13587 fold_overflow_warning (("assuming signed overflow does "
13588 "not occur when assuming that "
13589 "(X - c) >= X is always false"),
13590 WARN_STRICT_OVERFLOW_ALL);
13591 return constant_boolean_node (0, type);
13592 }
13593 }
13594 }
13595
13596 /* Comparisons with the highest or lowest possible integer of
13597 the specified precision will have known values. */
13598 {
13599 tree arg1_type = TREE_TYPE (arg1);
13600 unsigned int prec = TYPE_PRECISION (arg1_type);
13601
13602 if (TREE_CODE (arg1) == INTEGER_CST
13603 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13604 {
13605 wide_int max = wi::max_value (arg1_type);
13606 wide_int signed_max = wi::max_value (prec, SIGNED);
13607 wide_int min = wi::min_value (arg1_type);
13608
13609 if (wi::eq_p (arg1, max))
13610 switch (code)
13611 {
13612 case GT_EXPR:
13613 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13614
13615 case GE_EXPR:
13616 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13617
13618 case LE_EXPR:
13619 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13620
13621 case LT_EXPR:
13622 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13623
13624 /* The GE_EXPR and LT_EXPR cases above are not normally
13625 reached because of previous transformations. */
13626
13627 default:
13628 break;
13629 }
13630 else if (wi::eq_p (arg1, max - 1))
13631 switch (code)
13632 {
13633 case GT_EXPR:
13634 arg1 = const_binop (PLUS_EXPR, arg1,
13635 build_int_cst (TREE_TYPE (arg1), 1));
13636 return fold_build2_loc (loc, EQ_EXPR, type,
13637 fold_convert_loc (loc,
13638 TREE_TYPE (arg1), arg0),
13639 arg1);
13640 case LE_EXPR:
13641 arg1 = const_binop (PLUS_EXPR, arg1,
13642 build_int_cst (TREE_TYPE (arg1), 1));
13643 return fold_build2_loc (loc, NE_EXPR, type,
13644 fold_convert_loc (loc, TREE_TYPE (arg1),
13645 arg0),
13646 arg1);
13647 default:
13648 break;
13649 }
13650 else if (wi::eq_p (arg1, min))
13651 switch (code)
13652 {
13653 case LT_EXPR:
13654 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13655
13656 case LE_EXPR:
13657 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13658
13659 case GE_EXPR:
13660 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13661
13662 case GT_EXPR:
13663 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13664
13665 default:
13666 break;
13667 }
13668 else if (wi::eq_p (arg1, min + 1))
13669 switch (code)
13670 {
13671 case GE_EXPR:
13672 arg1 = const_binop (MINUS_EXPR, arg1,
13673 build_int_cst (TREE_TYPE (arg1), 1));
13674 return fold_build2_loc (loc, NE_EXPR, type,
13675 fold_convert_loc (loc,
13676 TREE_TYPE (arg1), arg0),
13677 arg1);
13678 case LT_EXPR:
13679 arg1 = const_binop (MINUS_EXPR, arg1,
13680 build_int_cst (TREE_TYPE (arg1), 1));
13681 return fold_build2_loc (loc, EQ_EXPR, type,
13682 fold_convert_loc (loc, TREE_TYPE (arg1),
13683 arg0),
13684 arg1);
13685 default:
13686 break;
13687 }
13688
13689 else if (wi::eq_p (arg1, signed_max)
13690 && TYPE_UNSIGNED (arg1_type)
13691 /* We will flip the signedness of the comparison operator
13692 associated with the mode of arg1, so the sign bit is
13693 specified by this mode. Check that arg1 is the signed
13694 max associated with this sign bit. */
13695 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13696 /* signed_type does not work on pointer types. */
13697 && INTEGRAL_TYPE_P (arg1_type))
13698 {
13699 /* The following case also applies to X < signed_max+1
13700 and X >= signed_max+1 because previous transformations. */
13701 if (code == LE_EXPR || code == GT_EXPR)
13702 {
13703 tree st = signed_type_for (arg1_type);
13704 return fold_build2_loc (loc,
13705 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13706 type, fold_convert_loc (loc, st, arg0),
13707 build_int_cst (st, 0));
13708 }
13709 }
13710 }
13711 }
13712
13713 /* If we are comparing an ABS_EXPR with a constant, we can
13714 convert all the cases into explicit comparisons, but they may
13715 well not be faster than doing the ABS and one comparison.
13716 But ABS (X) <= C is a range comparison, which becomes a subtraction
13717 and a comparison, and is probably faster. */
13718 if (code == LE_EXPR
13719 && TREE_CODE (arg1) == INTEGER_CST
13720 && TREE_CODE (arg0) == ABS_EXPR
13721 && ! TREE_SIDE_EFFECTS (arg0)
13722 && (0 != (tem = negate_expr (arg1)))
13723 && TREE_CODE (tem) == INTEGER_CST
13724 && !TREE_OVERFLOW (tem))
13725 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13726 build2 (GE_EXPR, type,
13727 TREE_OPERAND (arg0, 0), tem),
13728 build2 (LE_EXPR, type,
13729 TREE_OPERAND (arg0, 0), arg1));
13730
13731 /* Convert ABS_EXPR<x> >= 0 to true. */
13732 strict_overflow_p = false;
13733 if (code == GE_EXPR
13734 && (integer_zerop (arg1)
13735 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13736 && real_zerop (arg1)))
13737 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13738 {
13739 if (strict_overflow_p)
13740 fold_overflow_warning (("assuming signed overflow does not occur "
13741 "when simplifying comparison of "
13742 "absolute value and zero"),
13743 WARN_STRICT_OVERFLOW_CONDITIONAL);
13744 return omit_one_operand_loc (loc, type,
13745 constant_boolean_node (true, type),
13746 arg0);
13747 }
13748
13749 /* Convert ABS_EXPR<x> < 0 to false. */
13750 strict_overflow_p = false;
13751 if (code == LT_EXPR
13752 && (integer_zerop (arg1) || real_zerop (arg1))
13753 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13754 {
13755 if (strict_overflow_p)
13756 fold_overflow_warning (("assuming signed overflow does not occur "
13757 "when simplifying comparison of "
13758 "absolute value and zero"),
13759 WARN_STRICT_OVERFLOW_CONDITIONAL);
13760 return omit_one_operand_loc (loc, type,
13761 constant_boolean_node (false, type),
13762 arg0);
13763 }
13764
13765 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13766 and similarly for >= into !=. */
13767 if ((code == LT_EXPR || code == GE_EXPR)
13768 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13769 && TREE_CODE (arg1) == LSHIFT_EXPR
13770 && integer_onep (TREE_OPERAND (arg1, 0)))
13771 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13772 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13773 TREE_OPERAND (arg1, 1)),
13774 build_zero_cst (TREE_TYPE (arg0)));
13775
13776 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13777 otherwise Y might be >= # of bits in X's type and thus e.g.
13778 (unsigned char) (1 << Y) for Y 15 might be 0.
13779 If the cast is widening, then 1 << Y should have unsigned type,
13780 otherwise if Y is number of bits in the signed shift type minus 1,
13781 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13782 31 might be 0xffffffff80000000. */
13783 if ((code == LT_EXPR || code == GE_EXPR)
13784 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13785 && CONVERT_EXPR_P (arg1)
13786 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13787 && (TYPE_PRECISION (TREE_TYPE (arg1))
13788 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13789 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13790 || (TYPE_PRECISION (TREE_TYPE (arg1))
13791 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13792 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13793 {
13794 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13795 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13796 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13797 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13798 build_zero_cst (TREE_TYPE (arg0)));
13799 }
13800
13801 return NULL_TREE;
13802
13803 case UNORDERED_EXPR:
13804 case ORDERED_EXPR:
13805 case UNLT_EXPR:
13806 case UNLE_EXPR:
13807 case UNGT_EXPR:
13808 case UNGE_EXPR:
13809 case UNEQ_EXPR:
13810 case LTGT_EXPR:
13811 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13812 {
13813 t1 = fold_relational_const (code, type, arg0, arg1);
13814 if (t1 != NULL_TREE)
13815 return t1;
13816 }
13817
13818 /* If the first operand is NaN, the result is constant. */
13819 if (TREE_CODE (arg0) == REAL_CST
13820 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13821 && (code != LTGT_EXPR || ! flag_trapping_math))
13822 {
13823 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13824 ? integer_zero_node
13825 : integer_one_node;
13826 return omit_one_operand_loc (loc, type, t1, arg1);
13827 }
13828
13829 /* If the second operand is NaN, the result is constant. */
13830 if (TREE_CODE (arg1) == REAL_CST
13831 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13832 && (code != LTGT_EXPR || ! flag_trapping_math))
13833 {
13834 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13835 ? integer_zero_node
13836 : integer_one_node;
13837 return omit_one_operand_loc (loc, type, t1, arg0);
13838 }
13839
13840 /* Simplify unordered comparison of something with itself. */
13841 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13842 && operand_equal_p (arg0, arg1, 0))
13843 return constant_boolean_node (1, type);
13844
13845 if (code == LTGT_EXPR
13846 && !flag_trapping_math
13847 && operand_equal_p (arg0, arg1, 0))
13848 return constant_boolean_node (0, type);
13849
13850 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13851 {
13852 tree targ0 = strip_float_extensions (arg0);
13853 tree targ1 = strip_float_extensions (arg1);
13854 tree newtype = TREE_TYPE (targ0);
13855
13856 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13857 newtype = TREE_TYPE (targ1);
13858
13859 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13860 return fold_build2_loc (loc, code, type,
13861 fold_convert_loc (loc, newtype, targ0),
13862 fold_convert_loc (loc, newtype, targ1));
13863 }
13864
13865 return NULL_TREE;
13866
13867 case COMPOUND_EXPR:
13868 /* When pedantic, a compound expression can be neither an lvalue
13869 nor an integer constant expression. */
13870 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13871 return NULL_TREE;
13872 /* Don't let (0, 0) be null pointer constant. */
13873 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13874 : fold_convert_loc (loc, type, arg1);
13875 return pedantic_non_lvalue_loc (loc, tem);
13876
13877 case COMPLEX_EXPR:
13878 if ((TREE_CODE (arg0) == REAL_CST
13879 && TREE_CODE (arg1) == REAL_CST)
13880 || (TREE_CODE (arg0) == INTEGER_CST
13881 && TREE_CODE (arg1) == INTEGER_CST))
13882 return build_complex (type, arg0, arg1);
13883 if (TREE_CODE (arg0) == REALPART_EXPR
13884 && TREE_CODE (arg1) == IMAGPART_EXPR
13885 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13886 && operand_equal_p (TREE_OPERAND (arg0, 0),
13887 TREE_OPERAND (arg1, 0), 0))
13888 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13889 TREE_OPERAND (arg1, 0));
13890 return NULL_TREE;
13891
13892 case ASSERT_EXPR:
13893 /* An ASSERT_EXPR should never be passed to fold_binary. */
13894 gcc_unreachable ();
13895
13896 case VEC_PACK_TRUNC_EXPR:
13897 case VEC_PACK_FIX_TRUNC_EXPR:
13898 {
13899 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13900 tree *elts;
13901
13902 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13903 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13904 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13905 return NULL_TREE;
13906
13907 elts = XALLOCAVEC (tree, nelts);
13908 if (!vec_cst_ctor_to_array (arg0, elts)
13909 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13910 return NULL_TREE;
13911
13912 for (i = 0; i < nelts; i++)
13913 {
13914 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13915 ? NOP_EXPR : FIX_TRUNC_EXPR,
13916 TREE_TYPE (type), elts[i]);
13917 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13918 return NULL_TREE;
13919 }
13920
13921 return build_vector (type, elts);
13922 }
13923
13924 case VEC_WIDEN_MULT_LO_EXPR:
13925 case VEC_WIDEN_MULT_HI_EXPR:
13926 case VEC_WIDEN_MULT_EVEN_EXPR:
13927 case VEC_WIDEN_MULT_ODD_EXPR:
13928 {
13929 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13930 unsigned int out, ofs, scale;
13931 tree *elts;
13932
13933 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13934 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13935 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13936 return NULL_TREE;
13937
13938 elts = XALLOCAVEC (tree, nelts * 4);
13939 if (!vec_cst_ctor_to_array (arg0, elts)
13940 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13941 return NULL_TREE;
13942
13943 if (code == VEC_WIDEN_MULT_LO_EXPR)
13944 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13945 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13946 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13947 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13948 scale = 1, ofs = 0;
13949 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13950 scale = 1, ofs = 1;
13951
13952 for (out = 0; out < nelts; out++)
13953 {
13954 unsigned int in1 = (out << scale) + ofs;
13955 unsigned int in2 = in1 + nelts * 2;
13956 tree t1, t2;
13957
13958 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13959 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13960
13961 if (t1 == NULL_TREE || t2 == NULL_TREE)
13962 return NULL_TREE;
13963 elts[out] = const_binop (MULT_EXPR, t1, t2);
13964 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13965 return NULL_TREE;
13966 }
13967
13968 return build_vector (type, elts);
13969 }
13970
13971 default:
13972 return NULL_TREE;
13973 } /* switch (code) */
13974 }
13975
13976 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13977 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13978 of GOTO_EXPR. */
13979
13980 static tree
13981 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13982 {
13983 switch (TREE_CODE (*tp))
13984 {
13985 case LABEL_EXPR:
13986 return *tp;
13987
13988 case GOTO_EXPR:
13989 *walk_subtrees = 0;
13990
13991 /* ... fall through ... */
13992
13993 default:
13994 return NULL_TREE;
13995 }
13996 }
13997
13998 /* Return whether the sub-tree ST contains a label which is accessible from
13999 outside the sub-tree. */
14000
14001 static bool
14002 contains_label_p (tree st)
14003 {
14004 return
14005 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14006 }
14007
14008 /* Fold a ternary expression of code CODE and type TYPE with operands
14009 OP0, OP1, and OP2. Return the folded expression if folding is
14010 successful. Otherwise, return NULL_TREE. */
14011
14012 tree
14013 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14014 tree op0, tree op1, tree op2)
14015 {
14016 tree tem;
14017 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14018 enum tree_code_class kind = TREE_CODE_CLASS (code);
14019
14020 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14021 && TREE_CODE_LENGTH (code) == 3);
14022
14023 /* Strip any conversions that don't change the mode. This is safe
14024 for every expression, except for a comparison expression because
14025 its signedness is derived from its operands. So, in the latter
14026 case, only strip conversions that don't change the signedness.
14027
14028 Note that this is done as an internal manipulation within the
14029 constant folder, in order to find the simplest representation of
14030 the arguments so that their form can be studied. In any cases,
14031 the appropriate type conversions should be put back in the tree
14032 that will get out of the constant folder. */
14033 if (op0)
14034 {
14035 arg0 = op0;
14036 STRIP_NOPS (arg0);
14037 }
14038
14039 if (op1)
14040 {
14041 arg1 = op1;
14042 STRIP_NOPS (arg1);
14043 }
14044
14045 if (op2)
14046 {
14047 arg2 = op2;
14048 STRIP_NOPS (arg2);
14049 }
14050
14051 switch (code)
14052 {
14053 case COMPONENT_REF:
14054 if (TREE_CODE (arg0) == CONSTRUCTOR
14055 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14056 {
14057 unsigned HOST_WIDE_INT idx;
14058 tree field, value;
14059 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14060 if (field == arg1)
14061 return value;
14062 }
14063 return NULL_TREE;
14064
14065 case COND_EXPR:
14066 case VEC_COND_EXPR:
14067 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14068 so all simple results must be passed through pedantic_non_lvalue. */
14069 if (TREE_CODE (arg0) == INTEGER_CST)
14070 {
14071 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14072 tem = integer_zerop (arg0) ? op2 : op1;
14073 /* Only optimize constant conditions when the selected branch
14074 has the same type as the COND_EXPR. This avoids optimizing
14075 away "c ? x : throw", where the throw has a void type.
14076 Avoid throwing away that operand which contains label. */
14077 if ((!TREE_SIDE_EFFECTS (unused_op)
14078 || !contains_label_p (unused_op))
14079 && (! VOID_TYPE_P (TREE_TYPE (tem))
14080 || VOID_TYPE_P (type)))
14081 return pedantic_non_lvalue_loc (loc, tem);
14082 return NULL_TREE;
14083 }
14084 else if (TREE_CODE (arg0) == VECTOR_CST)
14085 {
14086 if (integer_all_onesp (arg0))
14087 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14088 if (integer_zerop (arg0))
14089 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14090
14091 if ((TREE_CODE (arg1) == VECTOR_CST
14092 || TREE_CODE (arg1) == CONSTRUCTOR)
14093 && (TREE_CODE (arg2) == VECTOR_CST
14094 || TREE_CODE (arg2) == CONSTRUCTOR))
14095 {
14096 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14097 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14098 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14099 for (i = 0; i < nelts; i++)
14100 {
14101 tree val = VECTOR_CST_ELT (arg0, i);
14102 if (integer_all_onesp (val))
14103 sel[i] = i;
14104 else if (integer_zerop (val))
14105 sel[i] = nelts + i;
14106 else /* Currently unreachable. */
14107 return NULL_TREE;
14108 }
14109 tree t = fold_vec_perm (type, arg1, arg2, sel);
14110 if (t != NULL_TREE)
14111 return t;
14112 }
14113 }
14114
14115 if (operand_equal_p (arg1, op2, 0))
14116 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14117
14118 /* If we have A op B ? A : C, we may be able to convert this to a
14119 simpler expression, depending on the operation and the values
14120 of B and C. Signed zeros prevent all of these transformations,
14121 for reasons given above each one.
14122
14123 Also try swapping the arguments and inverting the conditional. */
14124 if (COMPARISON_CLASS_P (arg0)
14125 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14126 arg1, TREE_OPERAND (arg0, 1))
14127 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14128 {
14129 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14130 if (tem)
14131 return tem;
14132 }
14133
14134 if (COMPARISON_CLASS_P (arg0)
14135 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14136 op2,
14137 TREE_OPERAND (arg0, 1))
14138 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14139 {
14140 location_t loc0 = expr_location_or (arg0, loc);
14141 tem = fold_invert_truthvalue (loc0, arg0);
14142 if (tem && COMPARISON_CLASS_P (tem))
14143 {
14144 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14145 if (tem)
14146 return tem;
14147 }
14148 }
14149
14150 /* If the second operand is simpler than the third, swap them
14151 since that produces better jump optimization results. */
14152 if (truth_value_p (TREE_CODE (arg0))
14153 && tree_swap_operands_p (op1, op2, false))
14154 {
14155 location_t loc0 = expr_location_or (arg0, loc);
14156 /* See if this can be inverted. If it can't, possibly because
14157 it was a floating-point inequality comparison, don't do
14158 anything. */
14159 tem = fold_invert_truthvalue (loc0, arg0);
14160 if (tem)
14161 return fold_build3_loc (loc, code, type, tem, op2, op1);
14162 }
14163
14164 /* Convert A ? 1 : 0 to simply A. */
14165 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14166 : (integer_onep (op1)
14167 && !VECTOR_TYPE_P (type)))
14168 && integer_zerop (op2)
14169 /* If we try to convert OP0 to our type, the
14170 call to fold will try to move the conversion inside
14171 a COND, which will recurse. In that case, the COND_EXPR
14172 is probably the best choice, so leave it alone. */
14173 && type == TREE_TYPE (arg0))
14174 return pedantic_non_lvalue_loc (loc, arg0);
14175
14176 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14177 over COND_EXPR in cases such as floating point comparisons. */
14178 if (integer_zerop (op1)
14179 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14180 : (integer_onep (op2)
14181 && !VECTOR_TYPE_P (type)))
14182 && truth_value_p (TREE_CODE (arg0)))
14183 return pedantic_non_lvalue_loc (loc,
14184 fold_convert_loc (loc, type,
14185 invert_truthvalue_loc (loc,
14186 arg0)));
14187
14188 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14189 if (TREE_CODE (arg0) == LT_EXPR
14190 && integer_zerop (TREE_OPERAND (arg0, 1))
14191 && integer_zerop (op2)
14192 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14193 {
14194 /* sign_bit_p looks through both zero and sign extensions,
14195 but for this optimization only sign extensions are
14196 usable. */
14197 tree tem2 = TREE_OPERAND (arg0, 0);
14198 while (tem != tem2)
14199 {
14200 if (TREE_CODE (tem2) != NOP_EXPR
14201 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14202 {
14203 tem = NULL_TREE;
14204 break;
14205 }
14206 tem2 = TREE_OPERAND (tem2, 0);
14207 }
14208 /* sign_bit_p only checks ARG1 bits within A's precision.
14209 If <sign bit of A> has wider type than A, bits outside
14210 of A's precision in <sign bit of A> need to be checked.
14211 If they are all 0, this optimization needs to be done
14212 in unsigned A's type, if they are all 1 in signed A's type,
14213 otherwise this can't be done. */
14214 if (tem
14215 && TYPE_PRECISION (TREE_TYPE (tem))
14216 < TYPE_PRECISION (TREE_TYPE (arg1))
14217 && TYPE_PRECISION (TREE_TYPE (tem))
14218 < TYPE_PRECISION (type))
14219 {
14220 int inner_width, outer_width;
14221 tree tem_type;
14222
14223 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14224 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14225 if (outer_width > TYPE_PRECISION (type))
14226 outer_width = TYPE_PRECISION (type);
14227
14228 wide_int mask = wi::shifted_mask
14229 (inner_width, outer_width - inner_width, false,
14230 TYPE_PRECISION (TREE_TYPE (arg1)));
14231
14232 wide_int common = mask & arg1;
14233 if (common == mask)
14234 {
14235 tem_type = signed_type_for (TREE_TYPE (tem));
14236 tem = fold_convert_loc (loc, tem_type, tem);
14237 }
14238 else if (common == 0)
14239 {
14240 tem_type = unsigned_type_for (TREE_TYPE (tem));
14241 tem = fold_convert_loc (loc, tem_type, tem);
14242 }
14243 else
14244 tem = NULL;
14245 }
14246
14247 if (tem)
14248 return
14249 fold_convert_loc (loc, type,
14250 fold_build2_loc (loc, BIT_AND_EXPR,
14251 TREE_TYPE (tem), tem,
14252 fold_convert_loc (loc,
14253 TREE_TYPE (tem),
14254 arg1)));
14255 }
14256
14257 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14258 already handled above. */
14259 if (TREE_CODE (arg0) == BIT_AND_EXPR
14260 && integer_onep (TREE_OPERAND (arg0, 1))
14261 && integer_zerop (op2)
14262 && integer_pow2p (arg1))
14263 {
14264 tree tem = TREE_OPERAND (arg0, 0);
14265 STRIP_NOPS (tem);
14266 if (TREE_CODE (tem) == RSHIFT_EXPR
14267 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14268 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14269 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14270 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14271 TREE_OPERAND (tem, 0), arg1);
14272 }
14273
14274 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14275 is probably obsolete because the first operand should be a
14276 truth value (that's why we have the two cases above), but let's
14277 leave it in until we can confirm this for all front-ends. */
14278 if (integer_zerop (op2)
14279 && TREE_CODE (arg0) == NE_EXPR
14280 && integer_zerop (TREE_OPERAND (arg0, 1))
14281 && integer_pow2p (arg1)
14282 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14283 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14284 arg1, OEP_ONLY_CONST))
14285 return pedantic_non_lvalue_loc (loc,
14286 fold_convert_loc (loc, type,
14287 TREE_OPERAND (arg0, 0)));
14288
14289 /* Disable the transformations below for vectors, since
14290 fold_binary_op_with_conditional_arg may undo them immediately,
14291 yielding an infinite loop. */
14292 if (code == VEC_COND_EXPR)
14293 return NULL_TREE;
14294
14295 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14296 if (integer_zerop (op2)
14297 && truth_value_p (TREE_CODE (arg0))
14298 && truth_value_p (TREE_CODE (arg1))
14299 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14300 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14301 : TRUTH_ANDIF_EXPR,
14302 type, fold_convert_loc (loc, type, arg0), arg1);
14303
14304 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14305 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14306 && truth_value_p (TREE_CODE (arg0))
14307 && truth_value_p (TREE_CODE (arg1))
14308 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14309 {
14310 location_t loc0 = expr_location_or (arg0, loc);
14311 /* Only perform transformation if ARG0 is easily inverted. */
14312 tem = fold_invert_truthvalue (loc0, arg0);
14313 if (tem)
14314 return fold_build2_loc (loc, code == VEC_COND_EXPR
14315 ? BIT_IOR_EXPR
14316 : TRUTH_ORIF_EXPR,
14317 type, fold_convert_loc (loc, type, tem),
14318 arg1);
14319 }
14320
14321 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14322 if (integer_zerop (arg1)
14323 && truth_value_p (TREE_CODE (arg0))
14324 && truth_value_p (TREE_CODE (op2))
14325 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14326 {
14327 location_t loc0 = expr_location_or (arg0, loc);
14328 /* Only perform transformation if ARG0 is easily inverted. */
14329 tem = fold_invert_truthvalue (loc0, arg0);
14330 if (tem)
14331 return fold_build2_loc (loc, code == VEC_COND_EXPR
14332 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14333 type, fold_convert_loc (loc, type, tem),
14334 op2);
14335 }
14336
14337 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14338 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14339 && truth_value_p (TREE_CODE (arg0))
14340 && truth_value_p (TREE_CODE (op2))
14341 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14342 return fold_build2_loc (loc, code == VEC_COND_EXPR
14343 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14344 type, fold_convert_loc (loc, type, arg0), op2);
14345
14346 return NULL_TREE;
14347
14348 case CALL_EXPR:
14349 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14350 of fold_ternary on them. */
14351 gcc_unreachable ();
14352
14353 case BIT_FIELD_REF:
14354 if ((TREE_CODE (arg0) == VECTOR_CST
14355 || (TREE_CODE (arg0) == CONSTRUCTOR
14356 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14357 && (type == TREE_TYPE (TREE_TYPE (arg0))
14358 || (TREE_CODE (type) == VECTOR_TYPE
14359 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14360 {
14361 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14362 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14363 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14364 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14365
14366 if (n != 0
14367 && (idx % width) == 0
14368 && (n % width) == 0
14369 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14370 {
14371 idx = idx / width;
14372 n = n / width;
14373
14374 if (TREE_CODE (arg0) == VECTOR_CST)
14375 {
14376 if (n == 1)
14377 return VECTOR_CST_ELT (arg0, idx);
14378
14379 tree *vals = XALLOCAVEC (tree, n);
14380 for (unsigned i = 0; i < n; ++i)
14381 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14382 return build_vector (type, vals);
14383 }
14384
14385 /* Constructor elements can be subvectors. */
14386 unsigned HOST_WIDE_INT k = 1;
14387 if (CONSTRUCTOR_NELTS (arg0) != 0)
14388 {
14389 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14390 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14391 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14392 }
14393
14394 /* We keep an exact subset of the constructor elements. */
14395 if ((idx % k) == 0 && (n % k) == 0)
14396 {
14397 if (CONSTRUCTOR_NELTS (arg0) == 0)
14398 return build_constructor (type, NULL);
14399 idx /= k;
14400 n /= k;
14401 if (n == 1)
14402 {
14403 if (idx < CONSTRUCTOR_NELTS (arg0))
14404 return CONSTRUCTOR_ELT (arg0, idx)->value;
14405 return build_zero_cst (type);
14406 }
14407
14408 vec<constructor_elt, va_gc> *vals;
14409 vec_alloc (vals, n);
14410 for (unsigned i = 0;
14411 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14412 ++i)
14413 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14414 CONSTRUCTOR_ELT
14415 (arg0, idx + i)->value);
14416 return build_constructor (type, vals);
14417 }
14418 /* The bitfield references a single constructor element. */
14419 else if (idx + n <= (idx / k + 1) * k)
14420 {
14421 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14422 return build_zero_cst (type);
14423 else if (n == k)
14424 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14425 else
14426 return fold_build3_loc (loc, code, type,
14427 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14428 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14429 }
14430 }
14431 }
14432
14433 /* A bit-field-ref that referenced the full argument can be stripped. */
14434 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14435 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14436 && integer_zerop (op2))
14437 return fold_convert_loc (loc, type, arg0);
14438
14439 /* On constants we can use native encode/interpret to constant
14440 fold (nearly) all BIT_FIELD_REFs. */
14441 if (CONSTANT_CLASS_P (arg0)
14442 && can_native_interpret_type_p (type)
14443 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14444 /* This limitation should not be necessary, we just need to
14445 round this up to mode size. */
14446 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14447 /* Need bit-shifting of the buffer to relax the following. */
14448 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14449 {
14450 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14451 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14452 unsigned HOST_WIDE_INT clen;
14453 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14454 /* ??? We cannot tell native_encode_expr to start at
14455 some random byte only. So limit us to a reasonable amount
14456 of work. */
14457 if (clen <= 4096)
14458 {
14459 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14460 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14461 if (len > 0
14462 && len * BITS_PER_UNIT >= bitpos + bitsize)
14463 {
14464 tree v = native_interpret_expr (type,
14465 b + bitpos / BITS_PER_UNIT,
14466 bitsize / BITS_PER_UNIT);
14467 if (v)
14468 return v;
14469 }
14470 }
14471 }
14472
14473 return NULL_TREE;
14474
14475 case FMA_EXPR:
14476 /* For integers we can decompose the FMA if possible. */
14477 if (TREE_CODE (arg0) == INTEGER_CST
14478 && TREE_CODE (arg1) == INTEGER_CST)
14479 return fold_build2_loc (loc, PLUS_EXPR, type,
14480 const_binop (MULT_EXPR, arg0, arg1), arg2);
14481 if (integer_zerop (arg2))
14482 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14483
14484 return fold_fma (loc, type, arg0, arg1, arg2);
14485
14486 case VEC_PERM_EXPR:
14487 if (TREE_CODE (arg2) == VECTOR_CST)
14488 {
14489 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14490 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14491 bool need_mask_canon = false;
14492 bool all_in_vec0 = true;
14493 bool all_in_vec1 = true;
14494 bool maybe_identity = true;
14495 bool single_arg = (op0 == op1);
14496 bool changed = false;
14497
14498 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14499 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14500 for (i = 0; i < nelts; i++)
14501 {
14502 tree val = VECTOR_CST_ELT (arg2, i);
14503 if (TREE_CODE (val) != INTEGER_CST)
14504 return NULL_TREE;
14505
14506 /* Make sure that the perm value is in an acceptable
14507 range. */
14508 wide_int t = val;
14509 if (wi::gtu_p (t, mask))
14510 {
14511 need_mask_canon = true;
14512 sel[i] = t.to_uhwi () & mask;
14513 }
14514 else
14515 sel[i] = t.to_uhwi ();
14516
14517 if (sel[i] < nelts)
14518 all_in_vec1 = false;
14519 else
14520 all_in_vec0 = false;
14521
14522 if ((sel[i] & (nelts-1)) != i)
14523 maybe_identity = false;
14524 }
14525
14526 if (maybe_identity)
14527 {
14528 if (all_in_vec0)
14529 return op0;
14530 if (all_in_vec1)
14531 return op1;
14532 }
14533
14534 if (all_in_vec0)
14535 op1 = op0;
14536 else if (all_in_vec1)
14537 {
14538 op0 = op1;
14539 for (i = 0; i < nelts; i++)
14540 sel[i] -= nelts;
14541 need_mask_canon = true;
14542 }
14543
14544 if ((TREE_CODE (op0) == VECTOR_CST
14545 || TREE_CODE (op0) == CONSTRUCTOR)
14546 && (TREE_CODE (op1) == VECTOR_CST
14547 || TREE_CODE (op1) == CONSTRUCTOR))
14548 {
14549 tree t = fold_vec_perm (type, op0, op1, sel);
14550 if (t != NULL_TREE)
14551 return t;
14552 }
14553
14554 if (op0 == op1 && !single_arg)
14555 changed = true;
14556
14557 if (need_mask_canon && arg2 == op2)
14558 {
14559 tree *tsel = XALLOCAVEC (tree, nelts);
14560 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14561 for (i = 0; i < nelts; i++)
14562 tsel[i] = build_int_cst (eltype, sel[i]);
14563 op2 = build_vector (TREE_TYPE (arg2), tsel);
14564 changed = true;
14565 }
14566
14567 if (changed)
14568 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14569 }
14570 return NULL_TREE;
14571
14572 default:
14573 return NULL_TREE;
14574 } /* switch (code) */
14575 }
14576
14577 /* Perform constant folding and related simplification of EXPR.
14578 The related simplifications include x*1 => x, x*0 => 0, etc.,
14579 and application of the associative law.
14580 NOP_EXPR conversions may be removed freely (as long as we
14581 are careful not to change the type of the overall expression).
14582 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14583 but we can constant-fold them if they have constant operands. */
14584
14585 #ifdef ENABLE_FOLD_CHECKING
14586 # define fold(x) fold_1 (x)
14587 static tree fold_1 (tree);
14588 static
14589 #endif
14590 tree
14591 fold (tree expr)
14592 {
14593 const tree t = expr;
14594 enum tree_code code = TREE_CODE (t);
14595 enum tree_code_class kind = TREE_CODE_CLASS (code);
14596 tree tem;
14597 location_t loc = EXPR_LOCATION (expr);
14598
14599 /* Return right away if a constant. */
14600 if (kind == tcc_constant)
14601 return t;
14602
14603 /* CALL_EXPR-like objects with variable numbers of operands are
14604 treated specially. */
14605 if (kind == tcc_vl_exp)
14606 {
14607 if (code == CALL_EXPR)
14608 {
14609 tem = fold_call_expr (loc, expr, false);
14610 return tem ? tem : expr;
14611 }
14612 return expr;
14613 }
14614
14615 if (IS_EXPR_CODE_CLASS (kind))
14616 {
14617 tree type = TREE_TYPE (t);
14618 tree op0, op1, op2;
14619
14620 switch (TREE_CODE_LENGTH (code))
14621 {
14622 case 1:
14623 op0 = TREE_OPERAND (t, 0);
14624 tem = fold_unary_loc (loc, code, type, op0);
14625 return tem ? tem : expr;
14626 case 2:
14627 op0 = TREE_OPERAND (t, 0);
14628 op1 = TREE_OPERAND (t, 1);
14629 tem = fold_binary_loc (loc, code, type, op0, op1);
14630 return tem ? tem : expr;
14631 case 3:
14632 op0 = TREE_OPERAND (t, 0);
14633 op1 = TREE_OPERAND (t, 1);
14634 op2 = TREE_OPERAND (t, 2);
14635 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14636 return tem ? tem : expr;
14637 default:
14638 break;
14639 }
14640 }
14641
14642 switch (code)
14643 {
14644 case ARRAY_REF:
14645 {
14646 tree op0 = TREE_OPERAND (t, 0);
14647 tree op1 = TREE_OPERAND (t, 1);
14648
14649 if (TREE_CODE (op1) == INTEGER_CST
14650 && TREE_CODE (op0) == CONSTRUCTOR
14651 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14652 {
14653 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14654 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14655 unsigned HOST_WIDE_INT begin = 0;
14656
14657 /* Find a matching index by means of a binary search. */
14658 while (begin != end)
14659 {
14660 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14661 tree index = (*elts)[middle].index;
14662
14663 if (TREE_CODE (index) == INTEGER_CST
14664 && tree_int_cst_lt (index, op1))
14665 begin = middle + 1;
14666 else if (TREE_CODE (index) == INTEGER_CST
14667 && tree_int_cst_lt (op1, index))
14668 end = middle;
14669 else if (TREE_CODE (index) == RANGE_EXPR
14670 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14671 begin = middle + 1;
14672 else if (TREE_CODE (index) == RANGE_EXPR
14673 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14674 end = middle;
14675 else
14676 return (*elts)[middle].value;
14677 }
14678 }
14679
14680 return t;
14681 }
14682
14683 /* Return a VECTOR_CST if possible. */
14684 case CONSTRUCTOR:
14685 {
14686 tree type = TREE_TYPE (t);
14687 if (TREE_CODE (type) != VECTOR_TYPE)
14688 return t;
14689
14690 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14691 unsigned HOST_WIDE_INT idx, pos = 0;
14692 tree value;
14693
14694 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14695 {
14696 if (!CONSTANT_CLASS_P (value))
14697 return t;
14698 if (TREE_CODE (value) == VECTOR_CST)
14699 {
14700 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14701 vec[pos++] = VECTOR_CST_ELT (value, i);
14702 }
14703 else
14704 vec[pos++] = value;
14705 }
14706 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14707 vec[pos] = build_zero_cst (TREE_TYPE (type));
14708
14709 return build_vector (type, vec);
14710 }
14711
14712 case CONST_DECL:
14713 return fold (DECL_INITIAL (t));
14714
14715 default:
14716 return t;
14717 } /* switch (code) */
14718 }
14719
14720 #ifdef ENABLE_FOLD_CHECKING
14721 #undef fold
14722
14723 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14724 hash_table<pointer_hash<const tree_node> > *);
14725 static void fold_check_failed (const_tree, const_tree);
14726 void print_fold_checksum (const_tree);
14727
14728 /* When --enable-checking=fold, compute a digest of expr before
14729 and after actual fold call to see if fold did not accidentally
14730 change original expr. */
14731
14732 tree
14733 fold (tree expr)
14734 {
14735 tree ret;
14736 struct md5_ctx ctx;
14737 unsigned char checksum_before[16], checksum_after[16];
14738 hash_table<pointer_hash<const tree_node> > ht (32);
14739
14740 md5_init_ctx (&ctx);
14741 fold_checksum_tree (expr, &ctx, &ht);
14742 md5_finish_ctx (&ctx, checksum_before);
14743 ht.empty ();
14744
14745 ret = fold_1 (expr);
14746
14747 md5_init_ctx (&ctx);
14748 fold_checksum_tree (expr, &ctx, &ht);
14749 md5_finish_ctx (&ctx, checksum_after);
14750
14751 if (memcmp (checksum_before, checksum_after, 16))
14752 fold_check_failed (expr, ret);
14753
14754 return ret;
14755 }
14756
14757 void
14758 print_fold_checksum (const_tree expr)
14759 {
14760 struct md5_ctx ctx;
14761 unsigned char checksum[16], cnt;
14762 hash_table<pointer_hash<const tree_node> > ht (32);
14763
14764 md5_init_ctx (&ctx);
14765 fold_checksum_tree (expr, &ctx, &ht);
14766 md5_finish_ctx (&ctx, checksum);
14767 for (cnt = 0; cnt < 16; ++cnt)
14768 fprintf (stderr, "%02x", checksum[cnt]);
14769 putc ('\n', stderr);
14770 }
14771
14772 static void
14773 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14774 {
14775 internal_error ("fold check: original tree changed by fold");
14776 }
14777
14778 static void
14779 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14780 hash_table<pointer_hash <const tree_node> > *ht)
14781 {
14782 const tree_node **slot;
14783 enum tree_code code;
14784 union tree_node buf;
14785 int i, len;
14786
14787 recursive_label:
14788 if (expr == NULL)
14789 return;
14790 slot = ht->find_slot (expr, INSERT);
14791 if (*slot != NULL)
14792 return;
14793 *slot = expr;
14794 code = TREE_CODE (expr);
14795 if (TREE_CODE_CLASS (code) == tcc_declaration
14796 && DECL_ASSEMBLER_NAME_SET_P (expr))
14797 {
14798 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14799 memcpy ((char *) &buf, expr, tree_size (expr));
14800 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14801 expr = (tree) &buf;
14802 }
14803 else if (TREE_CODE_CLASS (code) == tcc_type
14804 && (TYPE_POINTER_TO (expr)
14805 || TYPE_REFERENCE_TO (expr)
14806 || TYPE_CACHED_VALUES_P (expr)
14807 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14808 || TYPE_NEXT_VARIANT (expr)))
14809 {
14810 /* Allow these fields to be modified. */
14811 tree tmp;
14812 memcpy ((char *) &buf, expr, tree_size (expr));
14813 expr = tmp = (tree) &buf;
14814 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14815 TYPE_POINTER_TO (tmp) = NULL;
14816 TYPE_REFERENCE_TO (tmp) = NULL;
14817 TYPE_NEXT_VARIANT (tmp) = NULL;
14818 if (TYPE_CACHED_VALUES_P (tmp))
14819 {
14820 TYPE_CACHED_VALUES_P (tmp) = 0;
14821 TYPE_CACHED_VALUES (tmp) = NULL;
14822 }
14823 }
14824 md5_process_bytes (expr, tree_size (expr), ctx);
14825 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14826 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14827 if (TREE_CODE_CLASS (code) != tcc_type
14828 && TREE_CODE_CLASS (code) != tcc_declaration
14829 && code != TREE_LIST
14830 && code != SSA_NAME
14831 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14832 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14833 switch (TREE_CODE_CLASS (code))
14834 {
14835 case tcc_constant:
14836 switch (code)
14837 {
14838 case STRING_CST:
14839 md5_process_bytes (TREE_STRING_POINTER (expr),
14840 TREE_STRING_LENGTH (expr), ctx);
14841 break;
14842 case COMPLEX_CST:
14843 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14844 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14845 break;
14846 case VECTOR_CST:
14847 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14848 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14849 break;
14850 default:
14851 break;
14852 }
14853 break;
14854 case tcc_exceptional:
14855 switch (code)
14856 {
14857 case TREE_LIST:
14858 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14859 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14860 expr = TREE_CHAIN (expr);
14861 goto recursive_label;
14862 break;
14863 case TREE_VEC:
14864 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14865 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14866 break;
14867 default:
14868 break;
14869 }
14870 break;
14871 case tcc_expression:
14872 case tcc_reference:
14873 case tcc_comparison:
14874 case tcc_unary:
14875 case tcc_binary:
14876 case tcc_statement:
14877 case tcc_vl_exp:
14878 len = TREE_OPERAND_LENGTH (expr);
14879 for (i = 0; i < len; ++i)
14880 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14881 break;
14882 case tcc_declaration:
14883 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14884 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14885 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14886 {
14887 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14888 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14889 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14890 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14891 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14892 }
14893
14894 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14895 {
14896 if (TREE_CODE (expr) == FUNCTION_DECL)
14897 {
14898 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14899 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14900 }
14901 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14902 }
14903 break;
14904 case tcc_type:
14905 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14906 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14907 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14908 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14909 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14910 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14911 if (INTEGRAL_TYPE_P (expr)
14912 || SCALAR_FLOAT_TYPE_P (expr))
14913 {
14914 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14915 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14916 }
14917 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14918 if (TREE_CODE (expr) == RECORD_TYPE
14919 || TREE_CODE (expr) == UNION_TYPE
14920 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14921 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14922 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14923 break;
14924 default:
14925 break;
14926 }
14927 }
14928
14929 /* Helper function for outputting the checksum of a tree T. When
14930 debugging with gdb, you can "define mynext" to be "next" followed
14931 by "call debug_fold_checksum (op0)", then just trace down till the
14932 outputs differ. */
14933
14934 DEBUG_FUNCTION void
14935 debug_fold_checksum (const_tree t)
14936 {
14937 int i;
14938 unsigned char checksum[16];
14939 struct md5_ctx ctx;
14940 hash_table<pointer_hash<const tree_node> > ht (32);
14941
14942 md5_init_ctx (&ctx);
14943 fold_checksum_tree (t, &ctx, &ht);
14944 md5_finish_ctx (&ctx, checksum);
14945 ht.empty ();
14946
14947 for (i = 0; i < 16; i++)
14948 fprintf (stderr, "%d ", checksum[i]);
14949
14950 fprintf (stderr, "\n");
14951 }
14952
14953 #endif
14954
14955 /* Fold a unary tree expression with code CODE of type TYPE with an
14956 operand OP0. LOC is the location of the resulting expression.
14957 Return a folded expression if successful. Otherwise, return a tree
14958 expression with code CODE of type TYPE with an operand OP0. */
14959
14960 tree
14961 fold_build1_stat_loc (location_t loc,
14962 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14963 {
14964 tree tem;
14965 #ifdef ENABLE_FOLD_CHECKING
14966 unsigned char checksum_before[16], checksum_after[16];
14967 struct md5_ctx ctx;
14968 hash_table<pointer_hash<const tree_node> > ht (32);
14969
14970 md5_init_ctx (&ctx);
14971 fold_checksum_tree (op0, &ctx, &ht);
14972 md5_finish_ctx (&ctx, checksum_before);
14973 ht.empty ();
14974 #endif
14975
14976 tem = fold_unary_loc (loc, code, type, op0);
14977 if (!tem)
14978 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14979
14980 #ifdef ENABLE_FOLD_CHECKING
14981 md5_init_ctx (&ctx);
14982 fold_checksum_tree (op0, &ctx, &ht);
14983 md5_finish_ctx (&ctx, checksum_after);
14984
14985 if (memcmp (checksum_before, checksum_after, 16))
14986 fold_check_failed (op0, tem);
14987 #endif
14988 return tem;
14989 }
14990
14991 /* Fold a binary tree expression with code CODE of type TYPE with
14992 operands OP0 and OP1. LOC is the location of the resulting
14993 expression. Return a folded expression if successful. Otherwise,
14994 return a tree expression with code CODE of type TYPE with operands
14995 OP0 and OP1. */
14996
14997 tree
14998 fold_build2_stat_loc (location_t loc,
14999 enum tree_code code, tree type, tree op0, tree op1
15000 MEM_STAT_DECL)
15001 {
15002 tree tem;
15003 #ifdef ENABLE_FOLD_CHECKING
15004 unsigned char checksum_before_op0[16],
15005 checksum_before_op1[16],
15006 checksum_after_op0[16],
15007 checksum_after_op1[16];
15008 struct md5_ctx ctx;
15009 hash_table<pointer_hash<const tree_node> > ht (32);
15010
15011 md5_init_ctx (&ctx);
15012 fold_checksum_tree (op0, &ctx, &ht);
15013 md5_finish_ctx (&ctx, checksum_before_op0);
15014 ht.empty ();
15015
15016 md5_init_ctx (&ctx);
15017 fold_checksum_tree (op1, &ctx, &ht);
15018 md5_finish_ctx (&ctx, checksum_before_op1);
15019 ht.empty ();
15020 #endif
15021
15022 tem = fold_binary_loc (loc, code, type, op0, op1);
15023 if (!tem)
15024 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15025
15026 #ifdef ENABLE_FOLD_CHECKING
15027 md5_init_ctx (&ctx);
15028 fold_checksum_tree (op0, &ctx, &ht);
15029 md5_finish_ctx (&ctx, checksum_after_op0);
15030 ht.empty ();
15031
15032 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15033 fold_check_failed (op0, tem);
15034
15035 md5_init_ctx (&ctx);
15036 fold_checksum_tree (op1, &ctx, &ht);
15037 md5_finish_ctx (&ctx, checksum_after_op1);
15038
15039 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15040 fold_check_failed (op1, tem);
15041 #endif
15042 return tem;
15043 }
15044
15045 /* Fold a ternary tree expression with code CODE of type TYPE with
15046 operands OP0, OP1, and OP2. Return a folded expression if
15047 successful. Otherwise, return a tree expression with code CODE of
15048 type TYPE with operands OP0, OP1, and OP2. */
15049
15050 tree
15051 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15052 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15053 {
15054 tree tem;
15055 #ifdef ENABLE_FOLD_CHECKING
15056 unsigned char checksum_before_op0[16],
15057 checksum_before_op1[16],
15058 checksum_before_op2[16],
15059 checksum_after_op0[16],
15060 checksum_after_op1[16],
15061 checksum_after_op2[16];
15062 struct md5_ctx ctx;
15063 hash_table<pointer_hash<const tree_node> > ht (32);
15064
15065 md5_init_ctx (&ctx);
15066 fold_checksum_tree (op0, &ctx, &ht);
15067 md5_finish_ctx (&ctx, checksum_before_op0);
15068 ht.empty ();
15069
15070 md5_init_ctx (&ctx);
15071 fold_checksum_tree (op1, &ctx, &ht);
15072 md5_finish_ctx (&ctx, checksum_before_op1);
15073 ht.empty ();
15074
15075 md5_init_ctx (&ctx);
15076 fold_checksum_tree (op2, &ctx, &ht);
15077 md5_finish_ctx (&ctx, checksum_before_op2);
15078 ht.empty ();
15079 #endif
15080
15081 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15082 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15083 if (!tem)
15084 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15085
15086 #ifdef ENABLE_FOLD_CHECKING
15087 md5_init_ctx (&ctx);
15088 fold_checksum_tree (op0, &ctx, &ht);
15089 md5_finish_ctx (&ctx, checksum_after_op0);
15090 ht.empty ();
15091
15092 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15093 fold_check_failed (op0, tem);
15094
15095 md5_init_ctx (&ctx);
15096 fold_checksum_tree (op1, &ctx, &ht);
15097 md5_finish_ctx (&ctx, checksum_after_op1);
15098 ht.empty ();
15099
15100 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15101 fold_check_failed (op1, tem);
15102
15103 md5_init_ctx (&ctx);
15104 fold_checksum_tree (op2, &ctx, &ht);
15105 md5_finish_ctx (&ctx, checksum_after_op2);
15106
15107 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15108 fold_check_failed (op2, tem);
15109 #endif
15110 return tem;
15111 }
15112
15113 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15114 arguments in ARGARRAY, and a null static chain.
15115 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15116 of type TYPE from the given operands as constructed by build_call_array. */
15117
15118 tree
15119 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15120 int nargs, tree *argarray)
15121 {
15122 tree tem;
15123 #ifdef ENABLE_FOLD_CHECKING
15124 unsigned char checksum_before_fn[16],
15125 checksum_before_arglist[16],
15126 checksum_after_fn[16],
15127 checksum_after_arglist[16];
15128 struct md5_ctx ctx;
15129 hash_table<pointer_hash<const tree_node> > ht (32);
15130 int i;
15131
15132 md5_init_ctx (&ctx);
15133 fold_checksum_tree (fn, &ctx, &ht);
15134 md5_finish_ctx (&ctx, checksum_before_fn);
15135 ht.empty ();
15136
15137 md5_init_ctx (&ctx);
15138 for (i = 0; i < nargs; i++)
15139 fold_checksum_tree (argarray[i], &ctx, &ht);
15140 md5_finish_ctx (&ctx, checksum_before_arglist);
15141 ht.empty ();
15142 #endif
15143
15144 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15145
15146 #ifdef ENABLE_FOLD_CHECKING
15147 md5_init_ctx (&ctx);
15148 fold_checksum_tree (fn, &ctx, &ht);
15149 md5_finish_ctx (&ctx, checksum_after_fn);
15150 ht.empty ();
15151
15152 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15153 fold_check_failed (fn, tem);
15154
15155 md5_init_ctx (&ctx);
15156 for (i = 0; i < nargs; i++)
15157 fold_checksum_tree (argarray[i], &ctx, &ht);
15158 md5_finish_ctx (&ctx, checksum_after_arglist);
15159
15160 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15161 fold_check_failed (NULL_TREE, tem);
15162 #endif
15163 return tem;
15164 }
15165
15166 /* Perform constant folding and related simplification of initializer
15167 expression EXPR. These behave identically to "fold_buildN" but ignore
15168 potential run-time traps and exceptions that fold must preserve. */
15169
15170 #define START_FOLD_INIT \
15171 int saved_signaling_nans = flag_signaling_nans;\
15172 int saved_trapping_math = flag_trapping_math;\
15173 int saved_rounding_math = flag_rounding_math;\
15174 int saved_trapv = flag_trapv;\
15175 int saved_folding_initializer = folding_initializer;\
15176 flag_signaling_nans = 0;\
15177 flag_trapping_math = 0;\
15178 flag_rounding_math = 0;\
15179 flag_trapv = 0;\
15180 folding_initializer = 1;
15181
15182 #define END_FOLD_INIT \
15183 flag_signaling_nans = saved_signaling_nans;\
15184 flag_trapping_math = saved_trapping_math;\
15185 flag_rounding_math = saved_rounding_math;\
15186 flag_trapv = saved_trapv;\
15187 folding_initializer = saved_folding_initializer;
15188
15189 tree
15190 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15191 tree type, tree op)
15192 {
15193 tree result;
15194 START_FOLD_INIT;
15195
15196 result = fold_build1_loc (loc, code, type, op);
15197
15198 END_FOLD_INIT;
15199 return result;
15200 }
15201
15202 tree
15203 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15204 tree type, tree op0, tree op1)
15205 {
15206 tree result;
15207 START_FOLD_INIT;
15208
15209 result = fold_build2_loc (loc, code, type, op0, op1);
15210
15211 END_FOLD_INIT;
15212 return result;
15213 }
15214
15215 tree
15216 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15217 int nargs, tree *argarray)
15218 {
15219 tree result;
15220 START_FOLD_INIT;
15221
15222 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15223
15224 END_FOLD_INIT;
15225 return result;
15226 }
15227
15228 #undef START_FOLD_INIT
15229 #undef END_FOLD_INIT
15230
15231 /* Determine if first argument is a multiple of second argument. Return 0 if
15232 it is not, or we cannot easily determined it to be.
15233
15234 An example of the sort of thing we care about (at this point; this routine
15235 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15236 fold cases do now) is discovering that
15237
15238 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15239
15240 is a multiple of
15241
15242 SAVE_EXPR (J * 8)
15243
15244 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15245
15246 This code also handles discovering that
15247
15248 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15249
15250 is a multiple of 8 so we don't have to worry about dealing with a
15251 possible remainder.
15252
15253 Note that we *look* inside a SAVE_EXPR only to determine how it was
15254 calculated; it is not safe for fold to do much of anything else with the
15255 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15256 at run time. For example, the latter example above *cannot* be implemented
15257 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15258 evaluation time of the original SAVE_EXPR is not necessarily the same at
15259 the time the new expression is evaluated. The only optimization of this
15260 sort that would be valid is changing
15261
15262 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15263
15264 divided by 8 to
15265
15266 SAVE_EXPR (I) * SAVE_EXPR (J)
15267
15268 (where the same SAVE_EXPR (J) is used in the original and the
15269 transformed version). */
15270
15271 int
15272 multiple_of_p (tree type, const_tree top, const_tree bottom)
15273 {
15274 if (operand_equal_p (top, bottom, 0))
15275 return 1;
15276
15277 if (TREE_CODE (type) != INTEGER_TYPE)
15278 return 0;
15279
15280 switch (TREE_CODE (top))
15281 {
15282 case BIT_AND_EXPR:
15283 /* Bitwise and provides a power of two multiple. If the mask is
15284 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15285 if (!integer_pow2p (bottom))
15286 return 0;
15287 /* FALLTHRU */
15288
15289 case MULT_EXPR:
15290 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15291 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15292
15293 case PLUS_EXPR:
15294 case MINUS_EXPR:
15295 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15296 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15297
15298 case LSHIFT_EXPR:
15299 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15300 {
15301 tree op1, t1;
15302
15303 op1 = TREE_OPERAND (top, 1);
15304 /* const_binop may not detect overflow correctly,
15305 so check for it explicitly here. */
15306 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15307 && 0 != (t1 = fold_convert (type,
15308 const_binop (LSHIFT_EXPR,
15309 size_one_node,
15310 op1)))
15311 && !TREE_OVERFLOW (t1))
15312 return multiple_of_p (type, t1, bottom);
15313 }
15314 return 0;
15315
15316 case NOP_EXPR:
15317 /* Can't handle conversions from non-integral or wider integral type. */
15318 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15319 || (TYPE_PRECISION (type)
15320 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15321 return 0;
15322
15323 /* .. fall through ... */
15324
15325 case SAVE_EXPR:
15326 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15327
15328 case COND_EXPR:
15329 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15330 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15331
15332 case INTEGER_CST:
15333 if (TREE_CODE (bottom) != INTEGER_CST
15334 || integer_zerop (bottom)
15335 || (TYPE_UNSIGNED (type)
15336 && (tree_int_cst_sgn (top) < 0
15337 || tree_int_cst_sgn (bottom) < 0)))
15338 return 0;
15339 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15340 SIGNED);
15341
15342 default:
15343 return 0;
15344 }
15345 }
15346
15347 /* Return true if CODE or TYPE is known to be non-negative. */
15348
15349 static bool
15350 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15351 {
15352 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15353 && truth_value_p (code))
15354 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15355 have a signed:1 type (where the value is -1 and 0). */
15356 return true;
15357 return false;
15358 }
15359
15360 /* Return true if (CODE OP0) is known to be non-negative. If the return
15361 value is based on the assumption that signed overflow is undefined,
15362 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15363 *STRICT_OVERFLOW_P. */
15364
15365 bool
15366 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15367 bool *strict_overflow_p)
15368 {
15369 if (TYPE_UNSIGNED (type))
15370 return true;
15371
15372 switch (code)
15373 {
15374 case ABS_EXPR:
15375 /* We can't return 1 if flag_wrapv is set because
15376 ABS_EXPR<INT_MIN> = INT_MIN. */
15377 if (!INTEGRAL_TYPE_P (type))
15378 return true;
15379 if (TYPE_OVERFLOW_UNDEFINED (type))
15380 {
15381 *strict_overflow_p = true;
15382 return true;
15383 }
15384 break;
15385
15386 case NON_LVALUE_EXPR:
15387 case FLOAT_EXPR:
15388 case FIX_TRUNC_EXPR:
15389 return tree_expr_nonnegative_warnv_p (op0,
15390 strict_overflow_p);
15391
15392 case NOP_EXPR:
15393 {
15394 tree inner_type = TREE_TYPE (op0);
15395 tree outer_type = type;
15396
15397 if (TREE_CODE (outer_type) == REAL_TYPE)
15398 {
15399 if (TREE_CODE (inner_type) == REAL_TYPE)
15400 return tree_expr_nonnegative_warnv_p (op0,
15401 strict_overflow_p);
15402 if (INTEGRAL_TYPE_P (inner_type))
15403 {
15404 if (TYPE_UNSIGNED (inner_type))
15405 return true;
15406 return tree_expr_nonnegative_warnv_p (op0,
15407 strict_overflow_p);
15408 }
15409 }
15410 else if (INTEGRAL_TYPE_P (outer_type))
15411 {
15412 if (TREE_CODE (inner_type) == REAL_TYPE)
15413 return tree_expr_nonnegative_warnv_p (op0,
15414 strict_overflow_p);
15415 if (INTEGRAL_TYPE_P (inner_type))
15416 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15417 && TYPE_UNSIGNED (inner_type);
15418 }
15419 }
15420 break;
15421
15422 default:
15423 return tree_simple_nonnegative_warnv_p (code, type);
15424 }
15425
15426 /* We don't know sign of `t', so be conservative and return false. */
15427 return false;
15428 }
15429
15430 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15431 value is based on the assumption that signed overflow is undefined,
15432 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15433 *STRICT_OVERFLOW_P. */
15434
15435 bool
15436 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15437 tree op1, bool *strict_overflow_p)
15438 {
15439 if (TYPE_UNSIGNED (type))
15440 return true;
15441
15442 switch (code)
15443 {
15444 case POINTER_PLUS_EXPR:
15445 case PLUS_EXPR:
15446 if (FLOAT_TYPE_P (type))
15447 return (tree_expr_nonnegative_warnv_p (op0,
15448 strict_overflow_p)
15449 && tree_expr_nonnegative_warnv_p (op1,
15450 strict_overflow_p));
15451
15452 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15453 both unsigned and at least 2 bits shorter than the result. */
15454 if (TREE_CODE (type) == INTEGER_TYPE
15455 && TREE_CODE (op0) == NOP_EXPR
15456 && TREE_CODE (op1) == NOP_EXPR)
15457 {
15458 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15459 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15460 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15461 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15462 {
15463 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15464 TYPE_PRECISION (inner2)) + 1;
15465 return prec < TYPE_PRECISION (type);
15466 }
15467 }
15468 break;
15469
15470 case MULT_EXPR:
15471 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15472 {
15473 /* x * x is always non-negative for floating point x
15474 or without overflow. */
15475 if (operand_equal_p (op0, op1, 0)
15476 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15477 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15478 {
15479 if (TYPE_OVERFLOW_UNDEFINED (type))
15480 *strict_overflow_p = true;
15481 return true;
15482 }
15483 }
15484
15485 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15486 both unsigned and their total bits is shorter than the result. */
15487 if (TREE_CODE (type) == INTEGER_TYPE
15488 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15489 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15490 {
15491 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15492 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15493 : TREE_TYPE (op0);
15494 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15495 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15496 : TREE_TYPE (op1);
15497
15498 bool unsigned0 = TYPE_UNSIGNED (inner0);
15499 bool unsigned1 = TYPE_UNSIGNED (inner1);
15500
15501 if (TREE_CODE (op0) == INTEGER_CST)
15502 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15503
15504 if (TREE_CODE (op1) == INTEGER_CST)
15505 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15506
15507 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15508 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15509 {
15510 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15511 ? tree_int_cst_min_precision (op0, UNSIGNED)
15512 : TYPE_PRECISION (inner0);
15513
15514 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15515 ? tree_int_cst_min_precision (op1, UNSIGNED)
15516 : TYPE_PRECISION (inner1);
15517
15518 return precision0 + precision1 < TYPE_PRECISION (type);
15519 }
15520 }
15521 return false;
15522
15523 case BIT_AND_EXPR:
15524 case MAX_EXPR:
15525 return (tree_expr_nonnegative_warnv_p (op0,
15526 strict_overflow_p)
15527 || tree_expr_nonnegative_warnv_p (op1,
15528 strict_overflow_p));
15529
15530 case BIT_IOR_EXPR:
15531 case BIT_XOR_EXPR:
15532 case MIN_EXPR:
15533 case RDIV_EXPR:
15534 case TRUNC_DIV_EXPR:
15535 case CEIL_DIV_EXPR:
15536 case FLOOR_DIV_EXPR:
15537 case ROUND_DIV_EXPR:
15538 return (tree_expr_nonnegative_warnv_p (op0,
15539 strict_overflow_p)
15540 && tree_expr_nonnegative_warnv_p (op1,
15541 strict_overflow_p));
15542
15543 case TRUNC_MOD_EXPR:
15544 case CEIL_MOD_EXPR:
15545 case FLOOR_MOD_EXPR:
15546 case ROUND_MOD_EXPR:
15547 return tree_expr_nonnegative_warnv_p (op0,
15548 strict_overflow_p);
15549 default:
15550 return tree_simple_nonnegative_warnv_p (code, type);
15551 }
15552
15553 /* We don't know sign of `t', so be conservative and return false. */
15554 return false;
15555 }
15556
15557 /* Return true if T is known to be non-negative. If the return
15558 value is based on the assumption that signed overflow is undefined,
15559 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15560 *STRICT_OVERFLOW_P. */
15561
15562 bool
15563 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15564 {
15565 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15566 return true;
15567
15568 switch (TREE_CODE (t))
15569 {
15570 case INTEGER_CST:
15571 return tree_int_cst_sgn (t) >= 0;
15572
15573 case REAL_CST:
15574 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15575
15576 case FIXED_CST:
15577 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15578
15579 case COND_EXPR:
15580 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15581 strict_overflow_p)
15582 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15583 strict_overflow_p));
15584 default:
15585 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15586 TREE_TYPE (t));
15587 }
15588 /* We don't know sign of `t', so be conservative and return false. */
15589 return false;
15590 }
15591
15592 /* Return true if T is known to be non-negative. If the return
15593 value is based on the assumption that signed overflow is undefined,
15594 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15595 *STRICT_OVERFLOW_P. */
15596
15597 bool
15598 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15599 tree arg0, tree arg1, bool *strict_overflow_p)
15600 {
15601 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15602 switch (DECL_FUNCTION_CODE (fndecl))
15603 {
15604 CASE_FLT_FN (BUILT_IN_ACOS):
15605 CASE_FLT_FN (BUILT_IN_ACOSH):
15606 CASE_FLT_FN (BUILT_IN_CABS):
15607 CASE_FLT_FN (BUILT_IN_COSH):
15608 CASE_FLT_FN (BUILT_IN_ERFC):
15609 CASE_FLT_FN (BUILT_IN_EXP):
15610 CASE_FLT_FN (BUILT_IN_EXP10):
15611 CASE_FLT_FN (BUILT_IN_EXP2):
15612 CASE_FLT_FN (BUILT_IN_FABS):
15613 CASE_FLT_FN (BUILT_IN_FDIM):
15614 CASE_FLT_FN (BUILT_IN_HYPOT):
15615 CASE_FLT_FN (BUILT_IN_POW10):
15616 CASE_INT_FN (BUILT_IN_FFS):
15617 CASE_INT_FN (BUILT_IN_PARITY):
15618 CASE_INT_FN (BUILT_IN_POPCOUNT):
15619 CASE_INT_FN (BUILT_IN_CLZ):
15620 CASE_INT_FN (BUILT_IN_CLRSB):
15621 case BUILT_IN_BSWAP32:
15622 case BUILT_IN_BSWAP64:
15623 /* Always true. */
15624 return true;
15625
15626 CASE_FLT_FN (BUILT_IN_SQRT):
15627 /* sqrt(-0.0) is -0.0. */
15628 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15629 return true;
15630 return tree_expr_nonnegative_warnv_p (arg0,
15631 strict_overflow_p);
15632
15633 CASE_FLT_FN (BUILT_IN_ASINH):
15634 CASE_FLT_FN (BUILT_IN_ATAN):
15635 CASE_FLT_FN (BUILT_IN_ATANH):
15636 CASE_FLT_FN (BUILT_IN_CBRT):
15637 CASE_FLT_FN (BUILT_IN_CEIL):
15638 CASE_FLT_FN (BUILT_IN_ERF):
15639 CASE_FLT_FN (BUILT_IN_EXPM1):
15640 CASE_FLT_FN (BUILT_IN_FLOOR):
15641 CASE_FLT_FN (BUILT_IN_FMOD):
15642 CASE_FLT_FN (BUILT_IN_FREXP):
15643 CASE_FLT_FN (BUILT_IN_ICEIL):
15644 CASE_FLT_FN (BUILT_IN_IFLOOR):
15645 CASE_FLT_FN (BUILT_IN_IRINT):
15646 CASE_FLT_FN (BUILT_IN_IROUND):
15647 CASE_FLT_FN (BUILT_IN_LCEIL):
15648 CASE_FLT_FN (BUILT_IN_LDEXP):
15649 CASE_FLT_FN (BUILT_IN_LFLOOR):
15650 CASE_FLT_FN (BUILT_IN_LLCEIL):
15651 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15652 CASE_FLT_FN (BUILT_IN_LLRINT):
15653 CASE_FLT_FN (BUILT_IN_LLROUND):
15654 CASE_FLT_FN (BUILT_IN_LRINT):
15655 CASE_FLT_FN (BUILT_IN_LROUND):
15656 CASE_FLT_FN (BUILT_IN_MODF):
15657 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15658 CASE_FLT_FN (BUILT_IN_RINT):
15659 CASE_FLT_FN (BUILT_IN_ROUND):
15660 CASE_FLT_FN (BUILT_IN_SCALB):
15661 CASE_FLT_FN (BUILT_IN_SCALBLN):
15662 CASE_FLT_FN (BUILT_IN_SCALBN):
15663 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15664 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15665 CASE_FLT_FN (BUILT_IN_SINH):
15666 CASE_FLT_FN (BUILT_IN_TANH):
15667 CASE_FLT_FN (BUILT_IN_TRUNC):
15668 /* True if the 1st argument is nonnegative. */
15669 return tree_expr_nonnegative_warnv_p (arg0,
15670 strict_overflow_p);
15671
15672 CASE_FLT_FN (BUILT_IN_FMAX):
15673 /* True if the 1st OR 2nd arguments are nonnegative. */
15674 return (tree_expr_nonnegative_warnv_p (arg0,
15675 strict_overflow_p)
15676 || (tree_expr_nonnegative_warnv_p (arg1,
15677 strict_overflow_p)));
15678
15679 CASE_FLT_FN (BUILT_IN_FMIN):
15680 /* True if the 1st AND 2nd arguments are nonnegative. */
15681 return (tree_expr_nonnegative_warnv_p (arg0,
15682 strict_overflow_p)
15683 && (tree_expr_nonnegative_warnv_p (arg1,
15684 strict_overflow_p)));
15685
15686 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15687 /* True if the 2nd argument is nonnegative. */
15688 return tree_expr_nonnegative_warnv_p (arg1,
15689 strict_overflow_p);
15690
15691 CASE_FLT_FN (BUILT_IN_POWI):
15692 /* True if the 1st argument is nonnegative or the second
15693 argument is an even integer. */
15694 if (TREE_CODE (arg1) == INTEGER_CST
15695 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15696 return true;
15697 return tree_expr_nonnegative_warnv_p (arg0,
15698 strict_overflow_p);
15699
15700 CASE_FLT_FN (BUILT_IN_POW):
15701 /* True if the 1st argument is nonnegative or the second
15702 argument is an even integer valued real. */
15703 if (TREE_CODE (arg1) == REAL_CST)
15704 {
15705 REAL_VALUE_TYPE c;
15706 HOST_WIDE_INT n;
15707
15708 c = TREE_REAL_CST (arg1);
15709 n = real_to_integer (&c);
15710 if ((n & 1) == 0)
15711 {
15712 REAL_VALUE_TYPE cint;
15713 real_from_integer (&cint, VOIDmode, n, SIGNED);
15714 if (real_identical (&c, &cint))
15715 return true;
15716 }
15717 }
15718 return tree_expr_nonnegative_warnv_p (arg0,
15719 strict_overflow_p);
15720
15721 default:
15722 break;
15723 }
15724 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15725 type);
15726 }
15727
15728 /* Return true if T is known to be non-negative. If the return
15729 value is based on the assumption that signed overflow is undefined,
15730 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15731 *STRICT_OVERFLOW_P. */
15732
15733 static bool
15734 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15735 {
15736 enum tree_code code = TREE_CODE (t);
15737 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15738 return true;
15739
15740 switch (code)
15741 {
15742 case TARGET_EXPR:
15743 {
15744 tree temp = TARGET_EXPR_SLOT (t);
15745 t = TARGET_EXPR_INITIAL (t);
15746
15747 /* If the initializer is non-void, then it's a normal expression
15748 that will be assigned to the slot. */
15749 if (!VOID_TYPE_P (t))
15750 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15751
15752 /* Otherwise, the initializer sets the slot in some way. One common
15753 way is an assignment statement at the end of the initializer. */
15754 while (1)
15755 {
15756 if (TREE_CODE (t) == BIND_EXPR)
15757 t = expr_last (BIND_EXPR_BODY (t));
15758 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15759 || TREE_CODE (t) == TRY_CATCH_EXPR)
15760 t = expr_last (TREE_OPERAND (t, 0));
15761 else if (TREE_CODE (t) == STATEMENT_LIST)
15762 t = expr_last (t);
15763 else
15764 break;
15765 }
15766 if (TREE_CODE (t) == MODIFY_EXPR
15767 && TREE_OPERAND (t, 0) == temp)
15768 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15769 strict_overflow_p);
15770
15771 return false;
15772 }
15773
15774 case CALL_EXPR:
15775 {
15776 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15777 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15778
15779 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15780 get_callee_fndecl (t),
15781 arg0,
15782 arg1,
15783 strict_overflow_p);
15784 }
15785 case COMPOUND_EXPR:
15786 case MODIFY_EXPR:
15787 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15788 strict_overflow_p);
15789 case BIND_EXPR:
15790 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15791 strict_overflow_p);
15792 case SAVE_EXPR:
15793 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15794 strict_overflow_p);
15795
15796 default:
15797 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15798 TREE_TYPE (t));
15799 }
15800
15801 /* We don't know sign of `t', so be conservative and return false. */
15802 return false;
15803 }
15804
15805 /* Return true if T is known to be non-negative. If the return
15806 value is based on the assumption that signed overflow is undefined,
15807 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15808 *STRICT_OVERFLOW_P. */
15809
15810 bool
15811 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15812 {
15813 enum tree_code code;
15814 if (t == error_mark_node)
15815 return false;
15816
15817 code = TREE_CODE (t);
15818 switch (TREE_CODE_CLASS (code))
15819 {
15820 case tcc_binary:
15821 case tcc_comparison:
15822 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15823 TREE_TYPE (t),
15824 TREE_OPERAND (t, 0),
15825 TREE_OPERAND (t, 1),
15826 strict_overflow_p);
15827
15828 case tcc_unary:
15829 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15830 TREE_TYPE (t),
15831 TREE_OPERAND (t, 0),
15832 strict_overflow_p);
15833
15834 case tcc_constant:
15835 case tcc_declaration:
15836 case tcc_reference:
15837 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15838
15839 default:
15840 break;
15841 }
15842
15843 switch (code)
15844 {
15845 case TRUTH_AND_EXPR:
15846 case TRUTH_OR_EXPR:
15847 case TRUTH_XOR_EXPR:
15848 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15849 TREE_TYPE (t),
15850 TREE_OPERAND (t, 0),
15851 TREE_OPERAND (t, 1),
15852 strict_overflow_p);
15853 case TRUTH_NOT_EXPR:
15854 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15855 TREE_TYPE (t),
15856 TREE_OPERAND (t, 0),
15857 strict_overflow_p);
15858
15859 case COND_EXPR:
15860 case CONSTRUCTOR:
15861 case OBJ_TYPE_REF:
15862 case ASSERT_EXPR:
15863 case ADDR_EXPR:
15864 case WITH_SIZE_EXPR:
15865 case SSA_NAME:
15866 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15867
15868 default:
15869 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15870 }
15871 }
15872
15873 /* Return true if `t' is known to be non-negative. Handle warnings
15874 about undefined signed overflow. */
15875
15876 bool
15877 tree_expr_nonnegative_p (tree t)
15878 {
15879 bool ret, strict_overflow_p;
15880
15881 strict_overflow_p = false;
15882 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15883 if (strict_overflow_p)
15884 fold_overflow_warning (("assuming signed overflow does not occur when "
15885 "determining that expression is always "
15886 "non-negative"),
15887 WARN_STRICT_OVERFLOW_MISC);
15888 return ret;
15889 }
15890
15891
15892 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15893 For floating point we further ensure that T is not denormal.
15894 Similar logic is present in nonzero_address in rtlanal.h.
15895
15896 If the return value is based on the assumption that signed overflow
15897 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15898 change *STRICT_OVERFLOW_P. */
15899
15900 bool
15901 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15902 bool *strict_overflow_p)
15903 {
15904 switch (code)
15905 {
15906 case ABS_EXPR:
15907 return tree_expr_nonzero_warnv_p (op0,
15908 strict_overflow_p);
15909
15910 case NOP_EXPR:
15911 {
15912 tree inner_type = TREE_TYPE (op0);
15913 tree outer_type = type;
15914
15915 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15916 && tree_expr_nonzero_warnv_p (op0,
15917 strict_overflow_p));
15918 }
15919 break;
15920
15921 case NON_LVALUE_EXPR:
15922 return tree_expr_nonzero_warnv_p (op0,
15923 strict_overflow_p);
15924
15925 default:
15926 break;
15927 }
15928
15929 return false;
15930 }
15931
15932 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15933 For floating point we further ensure that T is not denormal.
15934 Similar logic is present in nonzero_address in rtlanal.h.
15935
15936 If the return value is based on the assumption that signed overflow
15937 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15938 change *STRICT_OVERFLOW_P. */
15939
15940 bool
15941 tree_binary_nonzero_warnv_p (enum tree_code code,
15942 tree type,
15943 tree op0,
15944 tree op1, bool *strict_overflow_p)
15945 {
15946 bool sub_strict_overflow_p;
15947 switch (code)
15948 {
15949 case POINTER_PLUS_EXPR:
15950 case PLUS_EXPR:
15951 if (TYPE_OVERFLOW_UNDEFINED (type))
15952 {
15953 /* With the presence of negative values it is hard
15954 to say something. */
15955 sub_strict_overflow_p = false;
15956 if (!tree_expr_nonnegative_warnv_p (op0,
15957 &sub_strict_overflow_p)
15958 || !tree_expr_nonnegative_warnv_p (op1,
15959 &sub_strict_overflow_p))
15960 return false;
15961 /* One of operands must be positive and the other non-negative. */
15962 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15963 overflows, on a twos-complement machine the sum of two
15964 nonnegative numbers can never be zero. */
15965 return (tree_expr_nonzero_warnv_p (op0,
15966 strict_overflow_p)
15967 || tree_expr_nonzero_warnv_p (op1,
15968 strict_overflow_p));
15969 }
15970 break;
15971
15972 case MULT_EXPR:
15973 if (TYPE_OVERFLOW_UNDEFINED (type))
15974 {
15975 if (tree_expr_nonzero_warnv_p (op0,
15976 strict_overflow_p)
15977 && tree_expr_nonzero_warnv_p (op1,
15978 strict_overflow_p))
15979 {
15980 *strict_overflow_p = true;
15981 return true;
15982 }
15983 }
15984 break;
15985
15986 case MIN_EXPR:
15987 sub_strict_overflow_p = false;
15988 if (tree_expr_nonzero_warnv_p (op0,
15989 &sub_strict_overflow_p)
15990 && tree_expr_nonzero_warnv_p (op1,
15991 &sub_strict_overflow_p))
15992 {
15993 if (sub_strict_overflow_p)
15994 *strict_overflow_p = true;
15995 }
15996 break;
15997
15998 case MAX_EXPR:
15999 sub_strict_overflow_p = false;
16000 if (tree_expr_nonzero_warnv_p (op0,
16001 &sub_strict_overflow_p))
16002 {
16003 if (sub_strict_overflow_p)
16004 *strict_overflow_p = true;
16005
16006 /* When both operands are nonzero, then MAX must be too. */
16007 if (tree_expr_nonzero_warnv_p (op1,
16008 strict_overflow_p))
16009 return true;
16010
16011 /* MAX where operand 0 is positive is positive. */
16012 return tree_expr_nonnegative_warnv_p (op0,
16013 strict_overflow_p);
16014 }
16015 /* MAX where operand 1 is positive is positive. */
16016 else if (tree_expr_nonzero_warnv_p (op1,
16017 &sub_strict_overflow_p)
16018 && tree_expr_nonnegative_warnv_p (op1,
16019 &sub_strict_overflow_p))
16020 {
16021 if (sub_strict_overflow_p)
16022 *strict_overflow_p = true;
16023 return true;
16024 }
16025 break;
16026
16027 case BIT_IOR_EXPR:
16028 return (tree_expr_nonzero_warnv_p (op1,
16029 strict_overflow_p)
16030 || tree_expr_nonzero_warnv_p (op0,
16031 strict_overflow_p));
16032
16033 default:
16034 break;
16035 }
16036
16037 return false;
16038 }
16039
16040 /* Return true when T is an address and is known to be nonzero.
16041 For floating point we further ensure that T is not denormal.
16042 Similar logic is present in nonzero_address in rtlanal.h.
16043
16044 If the return value is based on the assumption that signed overflow
16045 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16046 change *STRICT_OVERFLOW_P. */
16047
16048 bool
16049 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16050 {
16051 bool sub_strict_overflow_p;
16052 switch (TREE_CODE (t))
16053 {
16054 case INTEGER_CST:
16055 return !integer_zerop (t);
16056
16057 case ADDR_EXPR:
16058 {
16059 tree base = TREE_OPERAND (t, 0);
16060
16061 if (!DECL_P (base))
16062 base = get_base_address (base);
16063
16064 if (!base)
16065 return false;
16066
16067 /* For objects in symbol table check if we know they are non-zero.
16068 Don't do anything for variables and functions before symtab is built;
16069 it is quite possible that they will be declared weak later. */
16070 if (DECL_P (base) && decl_in_symtab_p (base))
16071 {
16072 struct symtab_node *symbol;
16073
16074 symbol = symtab_node::get (base);
16075 if (symbol)
16076 return symbol->nonzero_address ();
16077 else
16078 return false;
16079 }
16080
16081 /* Function local objects are never NULL. */
16082 if (DECL_P (base)
16083 && (DECL_CONTEXT (base)
16084 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16085 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
16086 return true;
16087
16088 /* Constants are never weak. */
16089 if (CONSTANT_CLASS_P (base))
16090 return true;
16091
16092 return false;
16093 }
16094
16095 case COND_EXPR:
16096 sub_strict_overflow_p = false;
16097 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16098 &sub_strict_overflow_p)
16099 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16100 &sub_strict_overflow_p))
16101 {
16102 if (sub_strict_overflow_p)
16103 *strict_overflow_p = true;
16104 return true;
16105 }
16106 break;
16107
16108 default:
16109 break;
16110 }
16111 return false;
16112 }
16113
16114 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16115 attempt to fold the expression to a constant without modifying TYPE,
16116 OP0 or OP1.
16117
16118 If the expression could be simplified to a constant, then return
16119 the constant. If the expression would not be simplified to a
16120 constant, then return NULL_TREE. */
16121
16122 tree
16123 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16124 {
16125 tree tem = fold_binary (code, type, op0, op1);
16126 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16127 }
16128
16129 /* Given the components of a unary expression CODE, TYPE and OP0,
16130 attempt to fold the expression to a constant without modifying
16131 TYPE or OP0.
16132
16133 If the expression could be simplified to a constant, then return
16134 the constant. If the expression would not be simplified to a
16135 constant, then return NULL_TREE. */
16136
16137 tree
16138 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16139 {
16140 tree tem = fold_unary (code, type, op0);
16141 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16142 }
16143
16144 /* If EXP represents referencing an element in a constant string
16145 (either via pointer arithmetic or array indexing), return the
16146 tree representing the value accessed, otherwise return NULL. */
16147
16148 tree
16149 fold_read_from_constant_string (tree exp)
16150 {
16151 if ((TREE_CODE (exp) == INDIRECT_REF
16152 || TREE_CODE (exp) == ARRAY_REF)
16153 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16154 {
16155 tree exp1 = TREE_OPERAND (exp, 0);
16156 tree index;
16157 tree string;
16158 location_t loc = EXPR_LOCATION (exp);
16159
16160 if (TREE_CODE (exp) == INDIRECT_REF)
16161 string = string_constant (exp1, &index);
16162 else
16163 {
16164 tree low_bound = array_ref_low_bound (exp);
16165 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16166
16167 /* Optimize the special-case of a zero lower bound.
16168
16169 We convert the low_bound to sizetype to avoid some problems
16170 with constant folding. (E.g. suppose the lower bound is 1,
16171 and its mode is QI. Without the conversion,l (ARRAY
16172 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16173 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16174 if (! integer_zerop (low_bound))
16175 index = size_diffop_loc (loc, index,
16176 fold_convert_loc (loc, sizetype, low_bound));
16177
16178 string = exp1;
16179 }
16180
16181 if (string
16182 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16183 && TREE_CODE (string) == STRING_CST
16184 && TREE_CODE (index) == INTEGER_CST
16185 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16186 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16187 == MODE_INT)
16188 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16189 return build_int_cst_type (TREE_TYPE (exp),
16190 (TREE_STRING_POINTER (string)
16191 [TREE_INT_CST_LOW (index)]));
16192 }
16193 return NULL;
16194 }
16195
16196 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16197 an integer constant, real, or fixed-point constant.
16198
16199 TYPE is the type of the result. */
16200
16201 static tree
16202 fold_negate_const (tree arg0, tree type)
16203 {
16204 tree t = NULL_TREE;
16205
16206 switch (TREE_CODE (arg0))
16207 {
16208 case INTEGER_CST:
16209 {
16210 bool overflow;
16211 wide_int val = wi::neg (arg0, &overflow);
16212 t = force_fit_type (type, val, 1,
16213 (overflow | TREE_OVERFLOW (arg0))
16214 && !TYPE_UNSIGNED (type));
16215 break;
16216 }
16217
16218 case REAL_CST:
16219 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16220 break;
16221
16222 case FIXED_CST:
16223 {
16224 FIXED_VALUE_TYPE f;
16225 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16226 &(TREE_FIXED_CST (arg0)), NULL,
16227 TYPE_SATURATING (type));
16228 t = build_fixed (type, f);
16229 /* Propagate overflow flags. */
16230 if (overflow_p | TREE_OVERFLOW (arg0))
16231 TREE_OVERFLOW (t) = 1;
16232 break;
16233 }
16234
16235 default:
16236 gcc_unreachable ();
16237 }
16238
16239 return t;
16240 }
16241
16242 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16243 an integer constant or real constant.
16244
16245 TYPE is the type of the result. */
16246
16247 tree
16248 fold_abs_const (tree arg0, tree type)
16249 {
16250 tree t = NULL_TREE;
16251
16252 switch (TREE_CODE (arg0))
16253 {
16254 case INTEGER_CST:
16255 {
16256 /* If the value is unsigned or non-negative, then the absolute value
16257 is the same as the ordinary value. */
16258 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16259 t = arg0;
16260
16261 /* If the value is negative, then the absolute value is
16262 its negation. */
16263 else
16264 {
16265 bool overflow;
16266 wide_int val = wi::neg (arg0, &overflow);
16267 t = force_fit_type (type, val, -1,
16268 overflow | TREE_OVERFLOW (arg0));
16269 }
16270 }
16271 break;
16272
16273 case REAL_CST:
16274 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16275 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16276 else
16277 t = arg0;
16278 break;
16279
16280 default:
16281 gcc_unreachable ();
16282 }
16283
16284 return t;
16285 }
16286
16287 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16288 constant. TYPE is the type of the result. */
16289
16290 static tree
16291 fold_not_const (const_tree arg0, tree type)
16292 {
16293 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16294
16295 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16296 }
16297
16298 /* Given CODE, a relational operator, the target type, TYPE and two
16299 constant operands OP0 and OP1, return the result of the
16300 relational operation. If the result is not a compile time
16301 constant, then return NULL_TREE. */
16302
16303 static tree
16304 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16305 {
16306 int result, invert;
16307
16308 /* From here on, the only cases we handle are when the result is
16309 known to be a constant. */
16310
16311 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16312 {
16313 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16314 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16315
16316 /* Handle the cases where either operand is a NaN. */
16317 if (real_isnan (c0) || real_isnan (c1))
16318 {
16319 switch (code)
16320 {
16321 case EQ_EXPR:
16322 case ORDERED_EXPR:
16323 result = 0;
16324 break;
16325
16326 case NE_EXPR:
16327 case UNORDERED_EXPR:
16328 case UNLT_EXPR:
16329 case UNLE_EXPR:
16330 case UNGT_EXPR:
16331 case UNGE_EXPR:
16332 case UNEQ_EXPR:
16333 result = 1;
16334 break;
16335
16336 case LT_EXPR:
16337 case LE_EXPR:
16338 case GT_EXPR:
16339 case GE_EXPR:
16340 case LTGT_EXPR:
16341 if (flag_trapping_math)
16342 return NULL_TREE;
16343 result = 0;
16344 break;
16345
16346 default:
16347 gcc_unreachable ();
16348 }
16349
16350 return constant_boolean_node (result, type);
16351 }
16352
16353 return constant_boolean_node (real_compare (code, c0, c1), type);
16354 }
16355
16356 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16357 {
16358 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16359 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16360 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16361 }
16362
16363 /* Handle equality/inequality of complex constants. */
16364 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16365 {
16366 tree rcond = fold_relational_const (code, type,
16367 TREE_REALPART (op0),
16368 TREE_REALPART (op1));
16369 tree icond = fold_relational_const (code, type,
16370 TREE_IMAGPART (op0),
16371 TREE_IMAGPART (op1));
16372 if (code == EQ_EXPR)
16373 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16374 else if (code == NE_EXPR)
16375 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16376 else
16377 return NULL_TREE;
16378 }
16379
16380 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16381 {
16382 unsigned count = VECTOR_CST_NELTS (op0);
16383 tree *elts = XALLOCAVEC (tree, count);
16384 gcc_assert (VECTOR_CST_NELTS (op1) == count
16385 && TYPE_VECTOR_SUBPARTS (type) == count);
16386
16387 for (unsigned i = 0; i < count; i++)
16388 {
16389 tree elem_type = TREE_TYPE (type);
16390 tree elem0 = VECTOR_CST_ELT (op0, i);
16391 tree elem1 = VECTOR_CST_ELT (op1, i);
16392
16393 tree tem = fold_relational_const (code, elem_type,
16394 elem0, elem1);
16395
16396 if (tem == NULL_TREE)
16397 return NULL_TREE;
16398
16399 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16400 }
16401
16402 return build_vector (type, elts);
16403 }
16404
16405 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16406
16407 To compute GT, swap the arguments and do LT.
16408 To compute GE, do LT and invert the result.
16409 To compute LE, swap the arguments, do LT and invert the result.
16410 To compute NE, do EQ and invert the result.
16411
16412 Therefore, the code below must handle only EQ and LT. */
16413
16414 if (code == LE_EXPR || code == GT_EXPR)
16415 {
16416 tree tem = op0;
16417 op0 = op1;
16418 op1 = tem;
16419 code = swap_tree_comparison (code);
16420 }
16421
16422 /* Note that it is safe to invert for real values here because we
16423 have already handled the one case that it matters. */
16424
16425 invert = 0;
16426 if (code == NE_EXPR || code == GE_EXPR)
16427 {
16428 invert = 1;
16429 code = invert_tree_comparison (code, false);
16430 }
16431
16432 /* Compute a result for LT or EQ if args permit;
16433 Otherwise return T. */
16434 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16435 {
16436 if (code == EQ_EXPR)
16437 result = tree_int_cst_equal (op0, op1);
16438 else
16439 result = tree_int_cst_lt (op0, op1);
16440 }
16441 else
16442 return NULL_TREE;
16443
16444 if (invert)
16445 result ^= 1;
16446 return constant_boolean_node (result, type);
16447 }
16448
16449 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16450 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16451 itself. */
16452
16453 tree
16454 fold_build_cleanup_point_expr (tree type, tree expr)
16455 {
16456 /* If the expression does not have side effects then we don't have to wrap
16457 it with a cleanup point expression. */
16458 if (!TREE_SIDE_EFFECTS (expr))
16459 return expr;
16460
16461 /* If the expression is a return, check to see if the expression inside the
16462 return has no side effects or the right hand side of the modify expression
16463 inside the return. If either don't have side effects set we don't need to
16464 wrap the expression in a cleanup point expression. Note we don't check the
16465 left hand side of the modify because it should always be a return decl. */
16466 if (TREE_CODE (expr) == RETURN_EXPR)
16467 {
16468 tree op = TREE_OPERAND (expr, 0);
16469 if (!op || !TREE_SIDE_EFFECTS (op))
16470 return expr;
16471 op = TREE_OPERAND (op, 1);
16472 if (!TREE_SIDE_EFFECTS (op))
16473 return expr;
16474 }
16475
16476 return build1 (CLEANUP_POINT_EXPR, type, expr);
16477 }
16478
16479 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16480 of an indirection through OP0, or NULL_TREE if no simplification is
16481 possible. */
16482
16483 tree
16484 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16485 {
16486 tree sub = op0;
16487 tree subtype;
16488
16489 STRIP_NOPS (sub);
16490 subtype = TREE_TYPE (sub);
16491 if (!POINTER_TYPE_P (subtype))
16492 return NULL_TREE;
16493
16494 if (TREE_CODE (sub) == ADDR_EXPR)
16495 {
16496 tree op = TREE_OPERAND (sub, 0);
16497 tree optype = TREE_TYPE (op);
16498 /* *&CONST_DECL -> to the value of the const decl. */
16499 if (TREE_CODE (op) == CONST_DECL)
16500 return DECL_INITIAL (op);
16501 /* *&p => p; make sure to handle *&"str"[cst] here. */
16502 if (type == optype)
16503 {
16504 tree fop = fold_read_from_constant_string (op);
16505 if (fop)
16506 return fop;
16507 else
16508 return op;
16509 }
16510 /* *(foo *)&fooarray => fooarray[0] */
16511 else if (TREE_CODE (optype) == ARRAY_TYPE
16512 && type == TREE_TYPE (optype)
16513 && (!in_gimple_form
16514 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16515 {
16516 tree type_domain = TYPE_DOMAIN (optype);
16517 tree min_val = size_zero_node;
16518 if (type_domain && TYPE_MIN_VALUE (type_domain))
16519 min_val = TYPE_MIN_VALUE (type_domain);
16520 if (in_gimple_form
16521 && TREE_CODE (min_val) != INTEGER_CST)
16522 return NULL_TREE;
16523 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16524 NULL_TREE, NULL_TREE);
16525 }
16526 /* *(foo *)&complexfoo => __real__ complexfoo */
16527 else if (TREE_CODE (optype) == COMPLEX_TYPE
16528 && type == TREE_TYPE (optype))
16529 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16530 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16531 else if (TREE_CODE (optype) == VECTOR_TYPE
16532 && type == TREE_TYPE (optype))
16533 {
16534 tree part_width = TYPE_SIZE (type);
16535 tree index = bitsize_int (0);
16536 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16537 }
16538 }
16539
16540 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16541 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16542 {
16543 tree op00 = TREE_OPERAND (sub, 0);
16544 tree op01 = TREE_OPERAND (sub, 1);
16545
16546 STRIP_NOPS (op00);
16547 if (TREE_CODE (op00) == ADDR_EXPR)
16548 {
16549 tree op00type;
16550 op00 = TREE_OPERAND (op00, 0);
16551 op00type = TREE_TYPE (op00);
16552
16553 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16554 if (TREE_CODE (op00type) == VECTOR_TYPE
16555 && type == TREE_TYPE (op00type))
16556 {
16557 HOST_WIDE_INT offset = tree_to_shwi (op01);
16558 tree part_width = TYPE_SIZE (type);
16559 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16560 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16561 tree index = bitsize_int (indexi);
16562
16563 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16564 return fold_build3_loc (loc,
16565 BIT_FIELD_REF, type, op00,
16566 part_width, index);
16567
16568 }
16569 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16570 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16571 && type == TREE_TYPE (op00type))
16572 {
16573 tree size = TYPE_SIZE_UNIT (type);
16574 if (tree_int_cst_equal (size, op01))
16575 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16576 }
16577 /* ((foo *)&fooarray)[1] => fooarray[1] */
16578 else if (TREE_CODE (op00type) == ARRAY_TYPE
16579 && type == TREE_TYPE (op00type))
16580 {
16581 tree type_domain = TYPE_DOMAIN (op00type);
16582 tree min_val = size_zero_node;
16583 if (type_domain && TYPE_MIN_VALUE (type_domain))
16584 min_val = TYPE_MIN_VALUE (type_domain);
16585 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16586 TYPE_SIZE_UNIT (type));
16587 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16588 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16589 NULL_TREE, NULL_TREE);
16590 }
16591 }
16592 }
16593
16594 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16595 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16596 && type == TREE_TYPE (TREE_TYPE (subtype))
16597 && (!in_gimple_form
16598 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16599 {
16600 tree type_domain;
16601 tree min_val = size_zero_node;
16602 sub = build_fold_indirect_ref_loc (loc, sub);
16603 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16604 if (type_domain && TYPE_MIN_VALUE (type_domain))
16605 min_val = TYPE_MIN_VALUE (type_domain);
16606 if (in_gimple_form
16607 && TREE_CODE (min_val) != INTEGER_CST)
16608 return NULL_TREE;
16609 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16610 NULL_TREE);
16611 }
16612
16613 return NULL_TREE;
16614 }
16615
16616 /* Builds an expression for an indirection through T, simplifying some
16617 cases. */
16618
16619 tree
16620 build_fold_indirect_ref_loc (location_t loc, tree t)
16621 {
16622 tree type = TREE_TYPE (TREE_TYPE (t));
16623 tree sub = fold_indirect_ref_1 (loc, type, t);
16624
16625 if (sub)
16626 return sub;
16627
16628 return build1_loc (loc, INDIRECT_REF, type, t);
16629 }
16630
16631 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16632
16633 tree
16634 fold_indirect_ref_loc (location_t loc, tree t)
16635 {
16636 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16637
16638 if (sub)
16639 return sub;
16640 else
16641 return t;
16642 }
16643
16644 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16645 whose result is ignored. The type of the returned tree need not be
16646 the same as the original expression. */
16647
16648 tree
16649 fold_ignored_result (tree t)
16650 {
16651 if (!TREE_SIDE_EFFECTS (t))
16652 return integer_zero_node;
16653
16654 for (;;)
16655 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16656 {
16657 case tcc_unary:
16658 t = TREE_OPERAND (t, 0);
16659 break;
16660
16661 case tcc_binary:
16662 case tcc_comparison:
16663 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16664 t = TREE_OPERAND (t, 0);
16665 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16666 t = TREE_OPERAND (t, 1);
16667 else
16668 return t;
16669 break;
16670
16671 case tcc_expression:
16672 switch (TREE_CODE (t))
16673 {
16674 case COMPOUND_EXPR:
16675 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16676 return t;
16677 t = TREE_OPERAND (t, 0);
16678 break;
16679
16680 case COND_EXPR:
16681 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16682 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16683 return t;
16684 t = TREE_OPERAND (t, 0);
16685 break;
16686
16687 default:
16688 return t;
16689 }
16690 break;
16691
16692 default:
16693 return t;
16694 }
16695 }
16696
16697 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16698
16699 tree
16700 round_up_loc (location_t loc, tree value, unsigned int divisor)
16701 {
16702 tree div = NULL_TREE;
16703
16704 if (divisor == 1)
16705 return value;
16706
16707 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16708 have to do anything. Only do this when we are not given a const,
16709 because in that case, this check is more expensive than just
16710 doing it. */
16711 if (TREE_CODE (value) != INTEGER_CST)
16712 {
16713 div = build_int_cst (TREE_TYPE (value), divisor);
16714
16715 if (multiple_of_p (TREE_TYPE (value), value, div))
16716 return value;
16717 }
16718
16719 /* If divisor is a power of two, simplify this to bit manipulation. */
16720 if (divisor == (divisor & -divisor))
16721 {
16722 if (TREE_CODE (value) == INTEGER_CST)
16723 {
16724 wide_int val = value;
16725 bool overflow_p;
16726
16727 if ((val & (divisor - 1)) == 0)
16728 return value;
16729
16730 overflow_p = TREE_OVERFLOW (value);
16731 val &= ~(divisor - 1);
16732 val += divisor;
16733 if (val == 0)
16734 overflow_p = true;
16735
16736 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16737 }
16738 else
16739 {
16740 tree t;
16741
16742 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16743 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16744 t = build_int_cst (TREE_TYPE (value), -divisor);
16745 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16746 }
16747 }
16748 else
16749 {
16750 if (!div)
16751 div = build_int_cst (TREE_TYPE (value), divisor);
16752 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16753 value = size_binop_loc (loc, MULT_EXPR, value, div);
16754 }
16755
16756 return value;
16757 }
16758
16759 /* Likewise, but round down. */
16760
16761 tree
16762 round_down_loc (location_t loc, tree value, int divisor)
16763 {
16764 tree div = NULL_TREE;
16765
16766 gcc_assert (divisor > 0);
16767 if (divisor == 1)
16768 return value;
16769
16770 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16771 have to do anything. Only do this when we are not given a const,
16772 because in that case, this check is more expensive than just
16773 doing it. */
16774 if (TREE_CODE (value) != INTEGER_CST)
16775 {
16776 div = build_int_cst (TREE_TYPE (value), divisor);
16777
16778 if (multiple_of_p (TREE_TYPE (value), value, div))
16779 return value;
16780 }
16781
16782 /* If divisor is a power of two, simplify this to bit manipulation. */
16783 if (divisor == (divisor & -divisor))
16784 {
16785 tree t;
16786
16787 t = build_int_cst (TREE_TYPE (value), -divisor);
16788 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16789 }
16790 else
16791 {
16792 if (!div)
16793 div = build_int_cst (TREE_TYPE (value), divisor);
16794 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16795 value = size_binop_loc (loc, MULT_EXPR, value, div);
16796 }
16797
16798 return value;
16799 }
16800
16801 /* Returns the pointer to the base of the object addressed by EXP and
16802 extracts the information about the offset of the access, storing it
16803 to PBITPOS and POFFSET. */
16804
16805 static tree
16806 split_address_to_core_and_offset (tree exp,
16807 HOST_WIDE_INT *pbitpos, tree *poffset)
16808 {
16809 tree core;
16810 enum machine_mode mode;
16811 int unsignedp, volatilep;
16812 HOST_WIDE_INT bitsize;
16813 location_t loc = EXPR_LOCATION (exp);
16814
16815 if (TREE_CODE (exp) == ADDR_EXPR)
16816 {
16817 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16818 poffset, &mode, &unsignedp, &volatilep,
16819 false);
16820 core = build_fold_addr_expr_loc (loc, core);
16821 }
16822 else
16823 {
16824 core = exp;
16825 *pbitpos = 0;
16826 *poffset = NULL_TREE;
16827 }
16828
16829 return core;
16830 }
16831
16832 /* Returns true if addresses of E1 and E2 differ by a constant, false
16833 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16834
16835 bool
16836 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16837 {
16838 tree core1, core2;
16839 HOST_WIDE_INT bitpos1, bitpos2;
16840 tree toffset1, toffset2, tdiff, type;
16841
16842 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16843 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16844
16845 if (bitpos1 % BITS_PER_UNIT != 0
16846 || bitpos2 % BITS_PER_UNIT != 0
16847 || !operand_equal_p (core1, core2, 0))
16848 return false;
16849
16850 if (toffset1 && toffset2)
16851 {
16852 type = TREE_TYPE (toffset1);
16853 if (type != TREE_TYPE (toffset2))
16854 toffset2 = fold_convert (type, toffset2);
16855
16856 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16857 if (!cst_and_fits_in_hwi (tdiff))
16858 return false;
16859
16860 *diff = int_cst_value (tdiff);
16861 }
16862 else if (toffset1 || toffset2)
16863 {
16864 /* If only one of the offsets is non-constant, the difference cannot
16865 be a constant. */
16866 return false;
16867 }
16868 else
16869 *diff = 0;
16870
16871 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16872 return true;
16873 }
16874
16875 /* Simplify the floating point expression EXP when the sign of the
16876 result is not significant. Return NULL_TREE if no simplification
16877 is possible. */
16878
16879 tree
16880 fold_strip_sign_ops (tree exp)
16881 {
16882 tree arg0, arg1;
16883 location_t loc = EXPR_LOCATION (exp);
16884
16885 switch (TREE_CODE (exp))
16886 {
16887 case ABS_EXPR:
16888 case NEGATE_EXPR:
16889 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16890 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16891
16892 case MULT_EXPR:
16893 case RDIV_EXPR:
16894 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16895 return NULL_TREE;
16896 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16897 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16898 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16899 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16900 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16901 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16902 break;
16903
16904 case COMPOUND_EXPR:
16905 arg0 = TREE_OPERAND (exp, 0);
16906 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16907 if (arg1)
16908 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16909 break;
16910
16911 case COND_EXPR:
16912 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16913 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16914 if (arg0 || arg1)
16915 return fold_build3_loc (loc,
16916 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16917 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16918 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16919 break;
16920
16921 case CALL_EXPR:
16922 {
16923 const enum built_in_function fcode = builtin_mathfn_code (exp);
16924 switch (fcode)
16925 {
16926 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16927 /* Strip copysign function call, return the 1st argument. */
16928 arg0 = CALL_EXPR_ARG (exp, 0);
16929 arg1 = CALL_EXPR_ARG (exp, 1);
16930 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16931
16932 default:
16933 /* Strip sign ops from the argument of "odd" math functions. */
16934 if (negate_mathfn_p (fcode))
16935 {
16936 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16937 if (arg0)
16938 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16939 }
16940 break;
16941 }
16942 }
16943 break;
16944
16945 default:
16946 break;
16947 }
16948 return NULL_TREE;
16949 }