decl.c, [...]: Remove redundant enum from machine_mode.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
89
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
110 };
111
112 static bool negate_mathfn_p (enum built_in_function);
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
116 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
117 static tree const_binop (enum tree_code, tree, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 tree, tree, tree);
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
132 tree *, tree *);
133 static tree sign_bit_p (tree, const_tree);
134 static int simple_operand_p (const_tree);
135 static bool simple_operand_p_2 (tree);
136 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
137 static tree range_predecessor (tree);
138 static tree range_successor (tree);
139 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
141 static tree unextend (tree, int, int, tree);
142 static tree optimize_minmax_comparison (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
145 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
146 static tree fold_binary_op_with_conditional_arg (location_t,
147 enum tree_code, tree,
148 tree, tree,
149 tree, tree, int);
150 static tree fold_mathfn_compare (location_t,
151 enum built_in_function, enum tree_code,
152 tree, tree, tree);
153 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
155 static bool reorder_operands_p (const_tree, const_tree);
156 static tree fold_negate_const (tree, tree);
157 static tree fold_not_const (const_tree, tree);
158 static tree fold_relational_const (enum tree_code, tree, tree, tree);
159 static tree fold_convert_const (enum tree_code, tree, tree);
160
161 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
162 Otherwise, return LOC. */
163
164 static location_t
165 expr_location_or (tree t, location_t loc)
166 {
167 location_t tloc = EXPR_LOCATION (t);
168 return tloc == UNKNOWN_LOCATION ? loc : tloc;
169 }
170
171 /* Similar to protected_set_expr_location, but never modify x in place,
172 if location can and needs to be set, unshare it. */
173
174 static inline tree
175 protected_set_expr_location_unshare (tree x, location_t loc)
176 {
177 if (CAN_HAVE_LOCATION_P (x)
178 && EXPR_LOCATION (x) != loc
179 && !(TREE_CODE (x) == SAVE_EXPR
180 || TREE_CODE (x) == TARGET_EXPR
181 || TREE_CODE (x) == BIND_EXPR))
182 {
183 x = copy_node (x);
184 SET_EXPR_LOCATION (x, loc);
185 }
186 return x;
187 }
188 \f
189 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
190 division and returns the quotient. Otherwise returns
191 NULL_TREE. */
192
193 tree
194 div_if_zero_remainder (const_tree arg1, const_tree arg2)
195 {
196 widest_int quo;
197
198 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
199 SIGNED, &quo))
200 return wide_int_to_tree (TREE_TYPE (arg1), quo);
201
202 return NULL_TREE;
203 }
204 \f
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
213
214 static int fold_deferring_overflow_warnings;
215
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
220
221 static const char* fold_deferred_overflow_warning;
222
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
225
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
227
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
230
231 void
232 fold_defer_overflow_warnings (void)
233 {
234 ++fold_deferring_overflow_warnings;
235 }
236
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
245
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
248 {
249 const char *warnmsg;
250 location_t locus;
251
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
255 {
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
261 }
262
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
265
266 if (!issue || warnmsg == NULL)
267 return;
268
269 if (gimple_no_warning_p (stmt))
270 return;
271
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
276
277 if (!issue_strict_overflow_warning (code))
278 return;
279
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
285 }
286
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
289
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
292 {
293 fold_undefer_overflow_warnings (false, NULL, 0);
294 }
295
296 /* Whether we are deferring overflow warnings. */
297
298 bool
299 fold_deferring_overflow_warnings_p (void)
300 {
301 return fold_deferring_overflow_warnings > 0;
302 }
303
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
306
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
309 {
310 if (fold_deferring_overflow_warnings > 0)
311 {
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
314 {
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
317 }
318 }
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
321 }
322 \f
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
325
326 static bool
327 negate_mathfn_p (enum built_in_function code)
328 {
329 switch (code)
330 {
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
355
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
361
362 default:
363 break;
364 }
365 return false;
366 }
367
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
370
371 bool
372 may_negate_without_overflow_p (const_tree t)
373 {
374 tree type;
375
376 gcc_assert (TREE_CODE (t) == INTEGER_CST);
377
378 type = TREE_TYPE (t);
379 if (TYPE_UNSIGNED (type))
380 return false;
381
382 return !wi::only_sign_bit_p (t);
383 }
384
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
387
388 static bool
389 negate_expr_p (tree t)
390 {
391 tree type;
392
393 if (t == 0)
394 return false;
395
396 type = TREE_TYPE (t);
397
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
400 {
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
404
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
410
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
414
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
423
424 case VECTOR_CST:
425 {
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
428
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
430
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
434
435 return true;
436 }
437
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
441
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
444
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
456
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
463
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
467
468 /* Fall through. */
469
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
475
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
483 overflow. */
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
485 {
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 break;
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
497 return true;
498 }
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
500 return true;
501 return negate_expr_p (TREE_OPERAND (t, 1));
502
503 case NOP_EXPR:
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
506 {
507 tree tem = strip_float_extensions (t);
508 if (tem != t)
509 return negate_expr_p (tem);
510 }
511 break;
512
513 case CALL_EXPR:
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
517 break;
518
519 case RSHIFT_EXPR:
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
522 {
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
525 return true;
526 }
527 break;
528
529 default:
530 break;
531 }
532 return false;
533 }
534
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
538 returned. */
539
540 static tree
541 fold_negate_expr (location_t loc, tree t)
542 {
543 tree type = TREE_TYPE (t);
544 tree tem;
545
546 switch (TREE_CODE (t))
547 {
548 /* Convert - (~A) to A + 1. */
549 case BIT_NOT_EXPR:
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
553 break;
554
555 case INTEGER_CST:
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || !TYPE_OVERFLOW_TRAPS (type))
559 return tem;
560 break;
561
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
566 return tem;
567 break;
568
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
572
573 case COMPLEX_CST:
574 {
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
577
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
583 }
584 break;
585
586 case VECTOR_CST:
587 {
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
590
591 for (i = 0; i < count; i++)
592 {
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
595 return NULL_TREE;
596 }
597
598 return build_vector (type, elts);
599 }
600
601 case COMPLEX_EXPR:
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
606 break;
607
608 case CONJ_EXPR:
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
612 break;
613
614 case NEGATE_EXPR:
615 return TREE_OPERAND (t, 0);
616
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
620 {
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
625 {
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
629 }
630
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
633 {
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
637 }
638 }
639 break;
640
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
649
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
653
654 /* Fall through. */
655
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
658 {
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
667 }
668 break;
669
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
679 {
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
684 {
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
691 }
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
706 }
707 break;
708
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
712 {
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
716 }
717 break;
718
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
723 {
724 tree fndecl, arg;
725
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
729 }
730 break;
731
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
735 {
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
738 {
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
745 }
746 }
747 break;
748
749 default:
750 break;
751 }
752
753 return NULL_TREE;
754 }
755
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
759
760 static tree
761 negate_expr (tree t)
762 {
763 tree type, tem;
764 location_t loc;
765
766 if (t == NULL_TREE)
767 return NULL_TREE;
768
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
772
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
777 }
778 \f
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
786
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
790
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
793
794 If IN is itself a literal or constant, return it as appropriate.
795
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
798
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
802 {
803 tree var = 0;
804
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
808
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
811
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
824 {
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
829
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
837
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
842
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
851
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
859 }
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
862 {
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
866 }
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
871
872 if (negate_p)
873 {
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
880 }
881
882 return var;
883 }
884
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
889
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
892 {
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
897
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
903 {
904 if (code == PLUS_EXPR)
905 {
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
918 }
919 else if (code == MINUS_EXPR)
920 {
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
923 }
924
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
927 }
928
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
931 }
932 \f
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
935
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
938 {
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
943
944 switch (code)
945 {
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
951
952 default:
953 break;
954 }
955
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
959 }
960
961
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
965
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
969 {
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
975
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
978
979 switch (code)
980 {
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
984
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
988
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
992
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
996 {
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1002 }
1003
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1012
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1016 {
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1022 }
1023
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1029
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1033
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1037
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1041
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1045
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1052
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1064
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1070
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1076
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1082
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1088
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1094
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1098
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1102
1103 default:
1104 return NULL_TREE;
1105 }
1106
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1111
1112 return t;
1113 }
1114
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1117 {
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1119 }
1120
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1125
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1128 {
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1132
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1135
1136 if (TREE_CODE (arg1) == INTEGER_CST)
1137 return int_const_binop (code, arg1, arg2);
1138
1139 if (TREE_CODE (arg1) == REAL_CST)
1140 {
1141 machine_mode mode;
1142 REAL_VALUE_TYPE d1;
1143 REAL_VALUE_TYPE d2;
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1146 bool inexact;
1147 tree t, type;
1148
1149 /* The following codes are handled by real_arithmetic. */
1150 switch (code)
1151 {
1152 case PLUS_EXPR:
1153 case MINUS_EXPR:
1154 case MULT_EXPR:
1155 case RDIV_EXPR:
1156 case MIN_EXPR:
1157 case MAX_EXPR:
1158 break;
1159
1160 default:
1161 return NULL_TREE;
1162 }
1163
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1166
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1169
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1174 return NULL_TREE;
1175
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1181 return NULL_TREE;
1182
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1186 return arg1;
1187 else if (REAL_VALUE_ISNAN (d2))
1188 return arg2;
1189
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1192
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1200 return NULL_TREE;
1201
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1209 return NULL_TREE;
1210
1211 t = build_real (type, result);
1212
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1214 return t;
1215 }
1216
1217 if (TREE_CODE (arg1) == FIXED_CST)
1218 {
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1222 tree t, type;
1223 int sat_p;
1224 bool overflow_p;
1225
1226 /* The following codes are handled by fixed_arithmetic. */
1227 switch (code)
1228 {
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 case MULT_EXPR:
1232 case TRUNC_DIV_EXPR:
1233 f2 = TREE_FIXED_CST (arg2);
1234 break;
1235
1236 case LSHIFT_EXPR:
1237 case RSHIFT_EXPR:
1238 {
1239 wide_int w2 = arg2;
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1242 f2.mode = SImode;
1243 }
1244 break;
1245
1246 default:
1247 return NULL_TREE;
1248 }
1249
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1258 return t;
1259 }
1260
1261 if (TREE_CODE (arg1) == COMPLEX_CST)
1262 {
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1268 tree real, imag;
1269
1270 switch (code)
1271 {
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1276 break;
1277
1278 case MULT_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_mul);
1283
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1290 break;
1291
1292 case RDIV_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_div);
1297 /* Fallthru ... */
1298 case TRUNC_DIV_EXPR:
1299 case CEIL_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1303 {
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1306
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1309 t = br*br + bi*bi
1310 */
1311 tree magsquared
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1315 tree t1
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 tree t2
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1323
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1326 }
1327 else
1328 {
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1331
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1337
1338 if (integer_nonzerop (compare))
1339 {
1340 /* In the TRUE branch, we compute
1341 ratio = br/bi;
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1345 tr = tr / div;
1346 ti = ti / div; */
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1353
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1357 }
1358 else
1359 {
1360 /* In the FALSE branch, we compute
1361 ratio = d/c;
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1370
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1374
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1378 }
1379 }
1380 break;
1381
1382 default:
1383 return NULL_TREE;
1384 }
1385
1386 if (real && imag)
1387 return build_complex (type, real, imag);
1388 }
1389
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1392 {
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1396
1397 for (i = 0; i < count; i++)
1398 {
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1401
1402 elts[i] = const_binop (code, elem1, elem2);
1403
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1407 return NULL_TREE;
1408 }
1409
1410 return build_vector (type, elts);
1411 }
1412
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1416 {
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1420
1421 if (code == VEC_RSHIFT_EXPR)
1422 {
1423 if (!tree_fits_uhwi_p (arg2))
1424 return NULL_TREE;
1425
1426 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1427 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1428 unsigned HOST_WIDE_INT innerc
1429 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1430 if (shiftc >= outerc || (shiftc % innerc) != 0)
1431 return NULL_TREE;
1432 int offset = shiftc / innerc;
1433 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1434 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1435 vector element, but last element if BYTES_BIG_ENDIAN. */
1436 if (BYTES_BIG_ENDIAN)
1437 offset = -offset;
1438 tree zero = build_zero_cst (TREE_TYPE (type));
1439 for (i = 0; i < count; i++)
1440 {
1441 if (i + offset < 0 || i + offset >= count)
1442 elts[i] = zero;
1443 else
1444 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1445 }
1446 }
1447 else
1448 for (i = 0; i < count; i++)
1449 {
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1451
1452 elts[i] = const_binop (code, elem1, arg2);
1453
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1458 }
1459
1460 return build_vector (type, elts);
1461 }
1462 return NULL_TREE;
1463 }
1464
1465 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1466 indicates which particular sizetype to create. */
1467
1468 tree
1469 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1470 {
1471 return build_int_cst (sizetype_tab[(int) kind], number);
1472 }
1473 \f
1474 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1475 is a tree code. The type of the result is taken from the operands.
1476 Both must be equivalent integer types, ala int_binop_types_match_p.
1477 If the operands are constant, so is the result. */
1478
1479 tree
1480 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1481 {
1482 tree type = TREE_TYPE (arg0);
1483
1484 if (arg0 == error_mark_node || arg1 == error_mark_node)
1485 return error_mark_node;
1486
1487 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1488 TREE_TYPE (arg1)));
1489
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1492 {
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR)
1495 {
1496 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1497 return arg1;
1498 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1499 return arg0;
1500 }
1501 else if (code == MINUS_EXPR)
1502 {
1503 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1504 return arg0;
1505 }
1506 else if (code == MULT_EXPR)
1507 {
1508 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1509 return arg1;
1510 }
1511
1512 /* Handle general case of two integer constants. For sizetype
1513 constant calculations we always want to know about overflow,
1514 even in the unsigned case. */
1515 return int_const_binop_1 (code, arg0, arg1, -1);
1516 }
1517
1518 return fold_build2_loc (loc, code, type, arg0, arg1);
1519 }
1520
1521 /* Given two values, either both of sizetype or both of bitsizetype,
1522 compute the difference between the two values. Return the value
1523 in signed type corresponding to the type of the operands. */
1524
1525 tree
1526 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1527 {
1528 tree type = TREE_TYPE (arg0);
1529 tree ctype;
1530
1531 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1532 TREE_TYPE (arg1)));
1533
1534 /* If the type is already signed, just do the simple thing. */
1535 if (!TYPE_UNSIGNED (type))
1536 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1537
1538 if (type == sizetype)
1539 ctype = ssizetype;
1540 else if (type == bitsizetype)
1541 ctype = sbitsizetype;
1542 else
1543 ctype = signed_type_for (type);
1544
1545 /* If either operand is not a constant, do the conversions to the signed
1546 type and subtract. The hardware will do the right thing with any
1547 overflow in the subtraction. */
1548 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1549 return size_binop_loc (loc, MINUS_EXPR,
1550 fold_convert_loc (loc, ctype, arg0),
1551 fold_convert_loc (loc, ctype, arg1));
1552
1553 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1554 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1555 overflow) and negate (which can't either). Special-case a result
1556 of zero while we're here. */
1557 if (tree_int_cst_equal (arg0, arg1))
1558 return build_int_cst (ctype, 0);
1559 else if (tree_int_cst_lt (arg1, arg0))
1560 return fold_convert_loc (loc, ctype,
1561 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1562 else
1563 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1564 fold_convert_loc (loc, ctype,
1565 size_binop_loc (loc,
1566 MINUS_EXPR,
1567 arg1, arg0)));
1568 }
1569 \f
1570 /* A subroutine of fold_convert_const handling conversions of an
1571 INTEGER_CST to another integer type. */
1572
1573 static tree
1574 fold_convert_const_int_from_int (tree type, const_tree arg1)
1575 {
1576 /* Given an integer constant, make new constant with new type,
1577 appropriately sign-extended or truncated. Use widest_int
1578 so that any extension is done according ARG1's type. */
1579 return force_fit_type (type, wi::to_widest (arg1),
1580 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1581 TREE_OVERFLOW (arg1));
1582 }
1583
1584 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1585 to an integer type. */
1586
1587 static tree
1588 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1589 {
1590 bool overflow = false;
1591 tree t;
1592
1593 /* The following code implements the floating point to integer
1594 conversion rules required by the Java Language Specification,
1595 that IEEE NaNs are mapped to zero and values that overflow
1596 the target precision saturate, i.e. values greater than
1597 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1598 are mapped to INT_MIN. These semantics are allowed by the
1599 C and C++ standards that simply state that the behavior of
1600 FP-to-integer conversion is unspecified upon overflow. */
1601
1602 wide_int val;
1603 REAL_VALUE_TYPE r;
1604 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1605
1606 switch (code)
1607 {
1608 case FIX_TRUNC_EXPR:
1609 real_trunc (&r, VOIDmode, &x);
1610 break;
1611
1612 default:
1613 gcc_unreachable ();
1614 }
1615
1616 /* If R is NaN, return zero and show we have an overflow. */
1617 if (REAL_VALUE_ISNAN (r))
1618 {
1619 overflow = true;
1620 val = wi::zero (TYPE_PRECISION (type));
1621 }
1622
1623 /* See if R is less than the lower bound or greater than the
1624 upper bound. */
1625
1626 if (! overflow)
1627 {
1628 tree lt = TYPE_MIN_VALUE (type);
1629 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1630 if (REAL_VALUES_LESS (r, l))
1631 {
1632 overflow = true;
1633 val = lt;
1634 }
1635 }
1636
1637 if (! overflow)
1638 {
1639 tree ut = TYPE_MAX_VALUE (type);
1640 if (ut)
1641 {
1642 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1643 if (REAL_VALUES_LESS (u, r))
1644 {
1645 overflow = true;
1646 val = ut;
1647 }
1648 }
1649 }
1650
1651 if (! overflow)
1652 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1653
1654 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1655 return t;
1656 }
1657
1658 /* A subroutine of fold_convert_const handling conversions of a
1659 FIXED_CST to an integer type. */
1660
1661 static tree
1662 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1663 {
1664 tree t;
1665 double_int temp, temp_trunc;
1666 unsigned int mode;
1667
1668 /* Right shift FIXED_CST to temp by fbit. */
1669 temp = TREE_FIXED_CST (arg1).data;
1670 mode = TREE_FIXED_CST (arg1).mode;
1671 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1672 {
1673 temp = temp.rshift (GET_MODE_FBIT (mode),
1674 HOST_BITS_PER_DOUBLE_INT,
1675 SIGNED_FIXED_POINT_MODE_P (mode));
1676
1677 /* Left shift temp to temp_trunc by fbit. */
1678 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1679 HOST_BITS_PER_DOUBLE_INT,
1680 SIGNED_FIXED_POINT_MODE_P (mode));
1681 }
1682 else
1683 {
1684 temp = double_int_zero;
1685 temp_trunc = double_int_zero;
1686 }
1687
1688 /* If FIXED_CST is negative, we need to round the value toward 0.
1689 By checking if the fractional bits are not zero to add 1 to temp. */
1690 if (SIGNED_FIXED_POINT_MODE_P (mode)
1691 && temp_trunc.is_negative ()
1692 && TREE_FIXED_CST (arg1).data != temp_trunc)
1693 temp += double_int_one;
1694
1695 /* Given a fixed-point constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = force_fit_type (type, temp, -1,
1698 (temp.is_negative ()
1699 && (TYPE_UNSIGNED (type)
1700 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1701 | TREE_OVERFLOW (arg1));
1702
1703 return t;
1704 }
1705
1706 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1707 to another floating point type. */
1708
1709 static tree
1710 fold_convert_const_real_from_real (tree type, const_tree arg1)
1711 {
1712 REAL_VALUE_TYPE value;
1713 tree t;
1714
1715 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1716 t = build_real (type, value);
1717
1718 /* If converting an infinity or NAN to a representation that doesn't
1719 have one, set the overflow bit so that we can produce some kind of
1720 error message at the appropriate point if necessary. It's not the
1721 most user-friendly message, but it's better than nothing. */
1722 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1723 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1724 TREE_OVERFLOW (t) = 1;
1725 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1726 && !MODE_HAS_NANS (TYPE_MODE (type)))
1727 TREE_OVERFLOW (t) = 1;
1728 /* Regular overflow, conversion produced an infinity in a mode that
1729 can't represent them. */
1730 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1731 && REAL_VALUE_ISINF (value)
1732 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1733 TREE_OVERFLOW (t) = 1;
1734 else
1735 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1736 return t;
1737 }
1738
1739 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1740 to a floating point type. */
1741
1742 static tree
1743 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1744 {
1745 REAL_VALUE_TYPE value;
1746 tree t;
1747
1748 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1749 t = build_real (type, value);
1750
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1753 }
1754
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to another fixed-point type. */
1757
1758 static tree
1759 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1760 {
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1764
1765 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1766 TYPE_SATURATING (type));
1767 t = build_fixed (type, value);
1768
1769 /* Propagate overflow flags. */
1770 if (overflow_p | TREE_OVERFLOW (arg1))
1771 TREE_OVERFLOW (t) = 1;
1772 return t;
1773 }
1774
1775 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1776 to a fixed-point type. */
1777
1778 static tree
1779 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1780 {
1781 FIXED_VALUE_TYPE value;
1782 tree t;
1783 bool overflow_p;
1784 double_int di;
1785
1786 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1787
1788 di.low = TREE_INT_CST_ELT (arg1, 0);
1789 if (TREE_INT_CST_NUNITS (arg1) == 1)
1790 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1791 else
1792 di.high = TREE_INT_CST_ELT (arg1, 1);
1793
1794 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1795 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1796 TYPE_SATURATING (type));
1797 t = build_fixed (type, value);
1798
1799 /* Propagate overflow flags. */
1800 if (overflow_p | TREE_OVERFLOW (arg1))
1801 TREE_OVERFLOW (t) = 1;
1802 return t;
1803 }
1804
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to a fixed-point type. */
1807
1808 static tree
1809 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1810 {
1811 FIXED_VALUE_TYPE value;
1812 tree t;
1813 bool overflow_p;
1814
1815 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1816 &TREE_REAL_CST (arg1),
1817 TYPE_SATURATING (type));
1818 t = build_fixed (type, value);
1819
1820 /* Propagate overflow flags. */
1821 if (overflow_p | TREE_OVERFLOW (arg1))
1822 TREE_OVERFLOW (t) = 1;
1823 return t;
1824 }
1825
1826 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1827 type TYPE. If no simplification can be done return NULL_TREE. */
1828
1829 static tree
1830 fold_convert_const (enum tree_code code, tree type, tree arg1)
1831 {
1832 if (TREE_TYPE (arg1) == type)
1833 return arg1;
1834
1835 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1836 || TREE_CODE (type) == OFFSET_TYPE)
1837 {
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return fold_convert_const_int_from_int (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_int_from_real (code, type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_int_from_fixed (type, arg1);
1844 }
1845 else if (TREE_CODE (type) == REAL_TYPE)
1846 {
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return build_real_from_int_cst (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_real_from_real (type, arg1);
1851 else if (TREE_CODE (arg1) == FIXED_CST)
1852 return fold_convert_const_real_from_fixed (type, arg1);
1853 }
1854 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1855 {
1856 if (TREE_CODE (arg1) == FIXED_CST)
1857 return fold_convert_const_fixed_from_fixed (type, arg1);
1858 else if (TREE_CODE (arg1) == INTEGER_CST)
1859 return fold_convert_const_fixed_from_int (type, arg1);
1860 else if (TREE_CODE (arg1) == REAL_CST)
1861 return fold_convert_const_fixed_from_real (type, arg1);
1862 }
1863 return NULL_TREE;
1864 }
1865
1866 /* Construct a vector of zero elements of vector type TYPE. */
1867
1868 static tree
1869 build_zero_vector (tree type)
1870 {
1871 tree t;
1872
1873 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1874 return build_vector_from_val (type, t);
1875 }
1876
1877 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1878
1879 bool
1880 fold_convertible_p (const_tree type, const_tree arg)
1881 {
1882 tree orig = TREE_TYPE (arg);
1883
1884 if (type == orig)
1885 return true;
1886
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return false;
1891
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1893 return true;
1894
1895 switch (TREE_CODE (type))
1896 {
1897 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1898 case POINTER_TYPE: case REFERENCE_TYPE:
1899 case OFFSET_TYPE:
1900 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1901 || TREE_CODE (orig) == OFFSET_TYPE)
1902 return true;
1903 return (TREE_CODE (orig) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1905
1906 case REAL_TYPE:
1907 case FIXED_POINT_TYPE:
1908 case COMPLEX_TYPE:
1909 case VECTOR_TYPE:
1910 case VOID_TYPE:
1911 return TREE_CODE (type) == TREE_CODE (orig);
1912
1913 default:
1914 return false;
1915 }
1916 }
1917
1918 /* Convert expression ARG to type TYPE. Used by the middle-end for
1919 simple conversions in preference to calling the front-end's convert. */
1920
1921 tree
1922 fold_convert_loc (location_t loc, tree type, tree arg)
1923 {
1924 tree orig = TREE_TYPE (arg);
1925 tree tem;
1926
1927 if (type == orig)
1928 return arg;
1929
1930 if (TREE_CODE (arg) == ERROR_MARK
1931 || TREE_CODE (type) == ERROR_MARK
1932 || TREE_CODE (orig) == ERROR_MARK)
1933 return error_mark_node;
1934
1935 switch (TREE_CODE (type))
1936 {
1937 case POINTER_TYPE:
1938 case REFERENCE_TYPE:
1939 /* Handle conversions between pointers to different address spaces. */
1940 if (POINTER_TYPE_P (orig)
1941 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1942 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1943 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1944 /* fall through */
1945
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case OFFSET_TYPE:
1948 if (TREE_CODE (arg) == INTEGER_CST)
1949 {
1950 tem = fold_convert_const (NOP_EXPR, type, arg);
1951 if (tem != NULL_TREE)
1952 return tem;
1953 }
1954 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1955 || TREE_CODE (orig) == OFFSET_TYPE)
1956 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 return fold_convert_loc (loc, type,
1959 fold_build1_loc (loc, REALPART_EXPR,
1960 TREE_TYPE (orig), arg));
1961 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1962 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1963 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1964
1965 case REAL_TYPE:
1966 if (TREE_CODE (arg) == INTEGER_CST)
1967 {
1968 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 return tem;
1971 }
1972 else if (TREE_CODE (arg) == REAL_CST)
1973 {
1974 tem = fold_convert_const (NOP_EXPR, type, arg);
1975 if (tem != NULL_TREE)
1976 return tem;
1977 }
1978 else if (TREE_CODE (arg) == FIXED_CST)
1979 {
1980 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1981 if (tem != NULL_TREE)
1982 return tem;
1983 }
1984
1985 switch (TREE_CODE (orig))
1986 {
1987 case INTEGER_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1991
1992 case REAL_TYPE:
1993 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1994
1995 case FIXED_POINT_TYPE:
1996 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1997
1998 case COMPLEX_TYPE:
1999 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2000 return fold_convert_loc (loc, type, tem);
2001
2002 default:
2003 gcc_unreachable ();
2004 }
2005
2006 case FIXED_POINT_TYPE:
2007 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2008 || TREE_CODE (arg) == REAL_CST)
2009 {
2010 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2011 if (tem != NULL_TREE)
2012 goto fold_convert_exit;
2013 }
2014
2015 switch (TREE_CODE (orig))
2016 {
2017 case FIXED_POINT_TYPE:
2018 case INTEGER_TYPE:
2019 case ENUMERAL_TYPE:
2020 case BOOLEAN_TYPE:
2021 case REAL_TYPE:
2022 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2023
2024 case COMPLEX_TYPE:
2025 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert_loc (loc, type, tem);
2027
2028 default:
2029 gcc_unreachable ();
2030 }
2031
2032 case COMPLEX_TYPE:
2033 switch (TREE_CODE (orig))
2034 {
2035 case INTEGER_TYPE:
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 case REAL_TYPE:
2039 case FIXED_POINT_TYPE:
2040 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2041 fold_convert_loc (loc, TREE_TYPE (type), arg),
2042 fold_convert_loc (loc, TREE_TYPE (type),
2043 integer_zero_node));
2044 case COMPLEX_TYPE:
2045 {
2046 tree rpart, ipart;
2047
2048 if (TREE_CODE (arg) == COMPLEX_EXPR)
2049 {
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2051 TREE_OPERAND (arg, 0));
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2053 TREE_OPERAND (arg, 1));
2054 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2055 }
2056
2057 arg = save_expr (arg);
2058 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2059 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2060 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2061 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2063 }
2064
2065 default:
2066 gcc_unreachable ();
2067 }
2068
2069 case VECTOR_TYPE:
2070 if (integer_zerop (arg))
2071 return build_zero_vector (type);
2072 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2073 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2074 || TREE_CODE (orig) == VECTOR_TYPE);
2075 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2076
2077 case VOID_TYPE:
2078 tem = fold_ignored_result (arg);
2079 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2080
2081 default:
2082 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2083 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2084 gcc_unreachable ();
2085 }
2086 fold_convert_exit:
2087 protected_set_expr_location_unshare (tem, loc);
2088 return tem;
2089 }
2090 \f
2091 /* Return false if expr can be assumed not to be an lvalue, true
2092 otherwise. */
2093
2094 static bool
2095 maybe_lvalue_p (const_tree x)
2096 {
2097 /* We only need to wrap lvalue tree codes. */
2098 switch (TREE_CODE (x))
2099 {
2100 case VAR_DECL:
2101 case PARM_DECL:
2102 case RESULT_DECL:
2103 case LABEL_DECL:
2104 case FUNCTION_DECL:
2105 case SSA_NAME:
2106
2107 case COMPONENT_REF:
2108 case MEM_REF:
2109 case INDIRECT_REF:
2110 case ARRAY_REF:
2111 case ARRAY_RANGE_REF:
2112 case BIT_FIELD_REF:
2113 case OBJ_TYPE_REF:
2114
2115 case REALPART_EXPR:
2116 case IMAGPART_EXPR:
2117 case PREINCREMENT_EXPR:
2118 case PREDECREMENT_EXPR:
2119 case SAVE_EXPR:
2120 case TRY_CATCH_EXPR:
2121 case WITH_CLEANUP_EXPR:
2122 case COMPOUND_EXPR:
2123 case MODIFY_EXPR:
2124 case TARGET_EXPR:
2125 case COND_EXPR:
2126 case BIND_EXPR:
2127 break;
2128
2129 default:
2130 /* Assume the worst for front-end tree codes. */
2131 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2132 break;
2133 return false;
2134 }
2135
2136 return true;
2137 }
2138
2139 /* Return an expr equal to X but certainly not valid as an lvalue. */
2140
2141 tree
2142 non_lvalue_loc (location_t loc, tree x)
2143 {
2144 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2145 us. */
2146 if (in_gimple_form)
2147 return x;
2148
2149 if (! maybe_lvalue_p (x))
2150 return x;
2151 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2152 }
2153
2154 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2155 Zero means allow extended lvalues. */
2156
2157 int pedantic_lvalues;
2158
2159 /* When pedantic, return an expr equal to X but certainly not valid as a
2160 pedantic lvalue. Otherwise, return X. */
2161
2162 static tree
2163 pedantic_non_lvalue_loc (location_t loc, tree x)
2164 {
2165 if (pedantic_lvalues)
2166 return non_lvalue_loc (loc, x);
2167
2168 return protected_set_expr_location_unshare (x, loc);
2169 }
2170 \f
2171 /* Given a tree comparison code, return the code that is the logical inverse.
2172 It is generally not safe to do this for floating-point comparisons, except
2173 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2174 ERROR_MARK in this case. */
2175
2176 enum tree_code
2177 invert_tree_comparison (enum tree_code code, bool honor_nans)
2178 {
2179 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2180 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2181 return ERROR_MARK;
2182
2183 switch (code)
2184 {
2185 case EQ_EXPR:
2186 return NE_EXPR;
2187 case NE_EXPR:
2188 return EQ_EXPR;
2189 case GT_EXPR:
2190 return honor_nans ? UNLE_EXPR : LE_EXPR;
2191 case GE_EXPR:
2192 return honor_nans ? UNLT_EXPR : LT_EXPR;
2193 case LT_EXPR:
2194 return honor_nans ? UNGE_EXPR : GE_EXPR;
2195 case LE_EXPR:
2196 return honor_nans ? UNGT_EXPR : GT_EXPR;
2197 case LTGT_EXPR:
2198 return UNEQ_EXPR;
2199 case UNEQ_EXPR:
2200 return LTGT_EXPR;
2201 case UNGT_EXPR:
2202 return LE_EXPR;
2203 case UNGE_EXPR:
2204 return LT_EXPR;
2205 case UNLT_EXPR:
2206 return GE_EXPR;
2207 case UNLE_EXPR:
2208 return GT_EXPR;
2209 case ORDERED_EXPR:
2210 return UNORDERED_EXPR;
2211 case UNORDERED_EXPR:
2212 return ORDERED_EXPR;
2213 default:
2214 gcc_unreachable ();
2215 }
2216 }
2217
2218 /* Similar, but return the comparison that results if the operands are
2219 swapped. This is safe for floating-point. */
2220
2221 enum tree_code
2222 swap_tree_comparison (enum tree_code code)
2223 {
2224 switch (code)
2225 {
2226 case EQ_EXPR:
2227 case NE_EXPR:
2228 case ORDERED_EXPR:
2229 case UNORDERED_EXPR:
2230 case LTGT_EXPR:
2231 case UNEQ_EXPR:
2232 return code;
2233 case GT_EXPR:
2234 return LT_EXPR;
2235 case GE_EXPR:
2236 return LE_EXPR;
2237 case LT_EXPR:
2238 return GT_EXPR;
2239 case LE_EXPR:
2240 return GE_EXPR;
2241 case UNGT_EXPR:
2242 return UNLT_EXPR;
2243 case UNGE_EXPR:
2244 return UNLE_EXPR;
2245 case UNLT_EXPR:
2246 return UNGT_EXPR;
2247 case UNLE_EXPR:
2248 return UNGE_EXPR;
2249 default:
2250 gcc_unreachable ();
2251 }
2252 }
2253
2254
2255 /* Convert a comparison tree code from an enum tree_code representation
2256 into a compcode bit-based encoding. This function is the inverse of
2257 compcode_to_comparison. */
2258
2259 static enum comparison_code
2260 comparison_to_compcode (enum tree_code code)
2261 {
2262 switch (code)
2263 {
2264 case LT_EXPR:
2265 return COMPCODE_LT;
2266 case EQ_EXPR:
2267 return COMPCODE_EQ;
2268 case LE_EXPR:
2269 return COMPCODE_LE;
2270 case GT_EXPR:
2271 return COMPCODE_GT;
2272 case NE_EXPR:
2273 return COMPCODE_NE;
2274 case GE_EXPR:
2275 return COMPCODE_GE;
2276 case ORDERED_EXPR:
2277 return COMPCODE_ORD;
2278 case UNORDERED_EXPR:
2279 return COMPCODE_UNORD;
2280 case UNLT_EXPR:
2281 return COMPCODE_UNLT;
2282 case UNEQ_EXPR:
2283 return COMPCODE_UNEQ;
2284 case UNLE_EXPR:
2285 return COMPCODE_UNLE;
2286 case UNGT_EXPR:
2287 return COMPCODE_UNGT;
2288 case LTGT_EXPR:
2289 return COMPCODE_LTGT;
2290 case UNGE_EXPR:
2291 return COMPCODE_UNGE;
2292 default:
2293 gcc_unreachable ();
2294 }
2295 }
2296
2297 /* Convert a compcode bit-based encoding of a comparison operator back
2298 to GCC's enum tree_code representation. This function is the
2299 inverse of comparison_to_compcode. */
2300
2301 static enum tree_code
2302 compcode_to_comparison (enum comparison_code code)
2303 {
2304 switch (code)
2305 {
2306 case COMPCODE_LT:
2307 return LT_EXPR;
2308 case COMPCODE_EQ:
2309 return EQ_EXPR;
2310 case COMPCODE_LE:
2311 return LE_EXPR;
2312 case COMPCODE_GT:
2313 return GT_EXPR;
2314 case COMPCODE_NE:
2315 return NE_EXPR;
2316 case COMPCODE_GE:
2317 return GE_EXPR;
2318 case COMPCODE_ORD:
2319 return ORDERED_EXPR;
2320 case COMPCODE_UNORD:
2321 return UNORDERED_EXPR;
2322 case COMPCODE_UNLT:
2323 return UNLT_EXPR;
2324 case COMPCODE_UNEQ:
2325 return UNEQ_EXPR;
2326 case COMPCODE_UNLE:
2327 return UNLE_EXPR;
2328 case COMPCODE_UNGT:
2329 return UNGT_EXPR;
2330 case COMPCODE_LTGT:
2331 return LTGT_EXPR;
2332 case COMPCODE_UNGE:
2333 return UNGE_EXPR;
2334 default:
2335 gcc_unreachable ();
2336 }
2337 }
2338
2339 /* Return a tree for the comparison which is the combination of
2340 doing the AND or OR (depending on CODE) of the two operations LCODE
2341 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2342 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2343 if this makes the transformation invalid. */
2344
2345 tree
2346 combine_comparisons (location_t loc,
2347 enum tree_code code, enum tree_code lcode,
2348 enum tree_code rcode, tree truth_type,
2349 tree ll_arg, tree lr_arg)
2350 {
2351 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2352 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2353 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2354 int compcode;
2355
2356 switch (code)
2357 {
2358 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2359 compcode = lcompcode & rcompcode;
2360 break;
2361
2362 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2363 compcode = lcompcode | rcompcode;
2364 break;
2365
2366 default:
2367 return NULL_TREE;
2368 }
2369
2370 if (!honor_nans)
2371 {
2372 /* Eliminate unordered comparisons, as well as LTGT and ORD
2373 which are not used unless the mode has NaNs. */
2374 compcode &= ~COMPCODE_UNORD;
2375 if (compcode == COMPCODE_LTGT)
2376 compcode = COMPCODE_NE;
2377 else if (compcode == COMPCODE_ORD)
2378 compcode = COMPCODE_TRUE;
2379 }
2380 else if (flag_trapping_math)
2381 {
2382 /* Check that the original operation and the optimized ones will trap
2383 under the same condition. */
2384 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2385 && (lcompcode != COMPCODE_EQ)
2386 && (lcompcode != COMPCODE_ORD);
2387 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2388 && (rcompcode != COMPCODE_EQ)
2389 && (rcompcode != COMPCODE_ORD);
2390 bool trap = (compcode & COMPCODE_UNORD) == 0
2391 && (compcode != COMPCODE_EQ)
2392 && (compcode != COMPCODE_ORD);
2393
2394 /* In a short-circuited boolean expression the LHS might be
2395 such that the RHS, if evaluated, will never trap. For
2396 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2397 if neither x nor y is NaN. (This is a mixed blessing: for
2398 example, the expression above will never trap, hence
2399 optimizing it to x < y would be invalid). */
2400 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2401 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2402 rtrap = false;
2403
2404 /* If the comparison was short-circuited, and only the RHS
2405 trapped, we may now generate a spurious trap. */
2406 if (rtrap && !ltrap
2407 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2408 return NULL_TREE;
2409
2410 /* If we changed the conditions that cause a trap, we lose. */
2411 if ((ltrap || rtrap) != trap)
2412 return NULL_TREE;
2413 }
2414
2415 if (compcode == COMPCODE_TRUE)
2416 return constant_boolean_node (true, truth_type);
2417 else if (compcode == COMPCODE_FALSE)
2418 return constant_boolean_node (false, truth_type);
2419 else
2420 {
2421 enum tree_code tcode;
2422
2423 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2424 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2425 }
2426 }
2427 \f
2428 /* Return nonzero if two operands (typically of the same tree node)
2429 are necessarily equal. If either argument has side-effects this
2430 function returns zero. FLAGS modifies behavior as follows:
2431
2432 If OEP_ONLY_CONST is set, only return nonzero for constants.
2433 This function tests whether the operands are indistinguishable;
2434 it does not test whether they are equal using C's == operation.
2435 The distinction is important for IEEE floating point, because
2436 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2437 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2438
2439 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2440 even though it may hold multiple values during a function.
2441 This is because a GCC tree node guarantees that nothing else is
2442 executed between the evaluation of its "operands" (which may often
2443 be evaluated in arbitrary order). Hence if the operands themselves
2444 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2445 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2446 unset means assuming isochronic (or instantaneous) tree equivalence.
2447 Unless comparing arbitrary expression trees, such as from different
2448 statements, this flag can usually be left unset.
2449
2450 If OEP_PURE_SAME is set, then pure functions with identical arguments
2451 are considered the same. It is used when the caller has other ways
2452 to ensure that global memory is unchanged in between. */
2453
2454 int
2455 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2456 {
2457 /* If either is ERROR_MARK, they aren't equal. */
2458 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2459 || TREE_TYPE (arg0) == error_mark_node
2460 || TREE_TYPE (arg1) == error_mark_node)
2461 return 0;
2462
2463 /* Similar, if either does not have a type (like a released SSA name),
2464 they aren't equal. */
2465 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2466 return 0;
2467
2468 /* Check equality of integer constants before bailing out due to
2469 precision differences. */
2470 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2471 return tree_int_cst_equal (arg0, arg1);
2472
2473 /* If both types don't have the same signedness, then we can't consider
2474 them equal. We must check this before the STRIP_NOPS calls
2475 because they may change the signedness of the arguments. As pointers
2476 strictly don't have a signedness, require either two pointers or
2477 two non-pointers as well. */
2478 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2479 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2480 return 0;
2481
2482 /* We cannot consider pointers to different address space equal. */
2483 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2484 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2485 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2486 return 0;
2487
2488 /* If both types don't have the same precision, then it is not safe
2489 to strip NOPs. */
2490 if (element_precision (TREE_TYPE (arg0))
2491 != element_precision (TREE_TYPE (arg1)))
2492 return 0;
2493
2494 STRIP_NOPS (arg0);
2495 STRIP_NOPS (arg1);
2496
2497 /* In case both args are comparisons but with different comparison
2498 code, try to swap the comparison operands of one arg to produce
2499 a match and compare that variant. */
2500 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2501 && COMPARISON_CLASS_P (arg0)
2502 && COMPARISON_CLASS_P (arg1))
2503 {
2504 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2505
2506 if (TREE_CODE (arg0) == swap_code)
2507 return operand_equal_p (TREE_OPERAND (arg0, 0),
2508 TREE_OPERAND (arg1, 1), flags)
2509 && operand_equal_p (TREE_OPERAND (arg0, 1),
2510 TREE_OPERAND (arg1, 0), flags);
2511 }
2512
2513 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2514 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2515 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2516 return 0;
2517
2518 /* This is needed for conversions and for COMPONENT_REF.
2519 Might as well play it safe and always test this. */
2520 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2521 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2522 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2523 return 0;
2524
2525 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2526 We don't care about side effects in that case because the SAVE_EXPR
2527 takes care of that for us. In all other cases, two expressions are
2528 equal if they have no side effects. If we have two identical
2529 expressions with side effects that should be treated the same due
2530 to the only side effects being identical SAVE_EXPR's, that will
2531 be detected in the recursive calls below.
2532 If we are taking an invariant address of two identical objects
2533 they are necessarily equal as well. */
2534 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2535 && (TREE_CODE (arg0) == SAVE_EXPR
2536 || (flags & OEP_CONSTANT_ADDRESS_OF)
2537 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2538 return 1;
2539
2540 /* Next handle constant cases, those for which we can return 1 even
2541 if ONLY_CONST is set. */
2542 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2543 switch (TREE_CODE (arg0))
2544 {
2545 case INTEGER_CST:
2546 return tree_int_cst_equal (arg0, arg1);
2547
2548 case FIXED_CST:
2549 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2550 TREE_FIXED_CST (arg1));
2551
2552 case REAL_CST:
2553 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2554 TREE_REAL_CST (arg1)))
2555 return 1;
2556
2557
2558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2559 {
2560 /* If we do not distinguish between signed and unsigned zero,
2561 consider them equal. */
2562 if (real_zerop (arg0) && real_zerop (arg1))
2563 return 1;
2564 }
2565 return 0;
2566
2567 case VECTOR_CST:
2568 {
2569 unsigned i;
2570
2571 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2572 return 0;
2573
2574 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2575 {
2576 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2577 VECTOR_CST_ELT (arg1, i), flags))
2578 return 0;
2579 }
2580 return 1;
2581 }
2582
2583 case COMPLEX_CST:
2584 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2585 flags)
2586 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2587 flags));
2588
2589 case STRING_CST:
2590 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2591 && ! memcmp (TREE_STRING_POINTER (arg0),
2592 TREE_STRING_POINTER (arg1),
2593 TREE_STRING_LENGTH (arg0)));
2594
2595 case ADDR_EXPR:
2596 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2597 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2598 ? OEP_CONSTANT_ADDRESS_OF : 0);
2599 default:
2600 break;
2601 }
2602
2603 if (flags & OEP_ONLY_CONST)
2604 return 0;
2605
2606 /* Define macros to test an operand from arg0 and arg1 for equality and a
2607 variant that allows null and views null as being different from any
2608 non-null value. In the latter case, if either is null, the both
2609 must be; otherwise, do the normal comparison. */
2610 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2611 TREE_OPERAND (arg1, N), flags)
2612
2613 #define OP_SAME_WITH_NULL(N) \
2614 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2615 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2616
2617 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2618 {
2619 case tcc_unary:
2620 /* Two conversions are equal only if signedness and modes match. */
2621 switch (TREE_CODE (arg0))
2622 {
2623 CASE_CONVERT:
2624 case FIX_TRUNC_EXPR:
2625 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2626 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2627 return 0;
2628 break;
2629 default:
2630 break;
2631 }
2632
2633 return OP_SAME (0);
2634
2635
2636 case tcc_comparison:
2637 case tcc_binary:
2638 if (OP_SAME (0) && OP_SAME (1))
2639 return 1;
2640
2641 /* For commutative ops, allow the other order. */
2642 return (commutative_tree_code (TREE_CODE (arg0))
2643 && operand_equal_p (TREE_OPERAND (arg0, 0),
2644 TREE_OPERAND (arg1, 1), flags)
2645 && operand_equal_p (TREE_OPERAND (arg0, 1),
2646 TREE_OPERAND (arg1, 0), flags));
2647
2648 case tcc_reference:
2649 /* If either of the pointer (or reference) expressions we are
2650 dereferencing contain a side effect, these cannot be equal,
2651 but their addresses can be. */
2652 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2653 && (TREE_SIDE_EFFECTS (arg0)
2654 || TREE_SIDE_EFFECTS (arg1)))
2655 return 0;
2656
2657 switch (TREE_CODE (arg0))
2658 {
2659 case INDIRECT_REF:
2660 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2661 return OP_SAME (0);
2662
2663 case REALPART_EXPR:
2664 case IMAGPART_EXPR:
2665 return OP_SAME (0);
2666
2667 case TARGET_MEM_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 /* Require equal extra operands and then fall through to MEM_REF
2670 handling of the two common operands. */
2671 if (!OP_SAME_WITH_NULL (2)
2672 || !OP_SAME_WITH_NULL (3)
2673 || !OP_SAME_WITH_NULL (4))
2674 return 0;
2675 /* Fallthru. */
2676 case MEM_REF:
2677 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2678 /* Require equal access sizes, and similar pointer types.
2679 We can have incomplete types for array references of
2680 variable-sized arrays from the Fortran frontend
2681 though. Also verify the types are compatible. */
2682 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2683 || (TYPE_SIZE (TREE_TYPE (arg0))
2684 && TYPE_SIZE (TREE_TYPE (arg1))
2685 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2686 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2687 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2688 && alias_ptr_types_compatible_p
2689 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2690 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2691 && OP_SAME (0) && OP_SAME (1));
2692
2693 case ARRAY_REF:
2694 case ARRAY_RANGE_REF:
2695 /* Operands 2 and 3 may be null.
2696 Compare the array index by value if it is constant first as we
2697 may have different types but same value here. */
2698 if (!OP_SAME (0))
2699 return 0;
2700 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2701 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2702 TREE_OPERAND (arg1, 1))
2703 || OP_SAME (1))
2704 && OP_SAME_WITH_NULL (2)
2705 && OP_SAME_WITH_NULL (3));
2706
2707 case COMPONENT_REF:
2708 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2709 may be NULL when we're called to compare MEM_EXPRs. */
2710 if (!OP_SAME_WITH_NULL (0)
2711 || !OP_SAME (1))
2712 return 0;
2713 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2714 return OP_SAME_WITH_NULL (2);
2715
2716 case BIT_FIELD_REF:
2717 if (!OP_SAME (0))
2718 return 0;
2719 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2720 return OP_SAME (1) && OP_SAME (2);
2721
2722 default:
2723 return 0;
2724 }
2725
2726 case tcc_expression:
2727 switch (TREE_CODE (arg0))
2728 {
2729 case ADDR_EXPR:
2730 case TRUTH_NOT_EXPR:
2731 return OP_SAME (0);
2732
2733 case TRUTH_ANDIF_EXPR:
2734 case TRUTH_ORIF_EXPR:
2735 return OP_SAME (0) && OP_SAME (1);
2736
2737 case FMA_EXPR:
2738 case WIDEN_MULT_PLUS_EXPR:
2739 case WIDEN_MULT_MINUS_EXPR:
2740 if (!OP_SAME (2))
2741 return 0;
2742 /* The multiplcation operands are commutative. */
2743 /* FALLTHRU */
2744
2745 case TRUTH_AND_EXPR:
2746 case TRUTH_OR_EXPR:
2747 case TRUTH_XOR_EXPR:
2748 if (OP_SAME (0) && OP_SAME (1))
2749 return 1;
2750
2751 /* Otherwise take into account this is a commutative operation. */
2752 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2753 TREE_OPERAND (arg1, 1), flags)
2754 && operand_equal_p (TREE_OPERAND (arg0, 1),
2755 TREE_OPERAND (arg1, 0), flags));
2756
2757 case COND_EXPR:
2758 case VEC_COND_EXPR:
2759 case DOT_PROD_EXPR:
2760 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2761
2762 default:
2763 return 0;
2764 }
2765
2766 case tcc_vl_exp:
2767 switch (TREE_CODE (arg0))
2768 {
2769 case CALL_EXPR:
2770 /* If the CALL_EXPRs call different functions, then they
2771 clearly can not be equal. */
2772 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2773 flags))
2774 return 0;
2775
2776 {
2777 unsigned int cef = call_expr_flags (arg0);
2778 if (flags & OEP_PURE_SAME)
2779 cef &= ECF_CONST | ECF_PURE;
2780 else
2781 cef &= ECF_CONST;
2782 if (!cef)
2783 return 0;
2784 }
2785
2786 /* Now see if all the arguments are the same. */
2787 {
2788 const_call_expr_arg_iterator iter0, iter1;
2789 const_tree a0, a1;
2790 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2791 a1 = first_const_call_expr_arg (arg1, &iter1);
2792 a0 && a1;
2793 a0 = next_const_call_expr_arg (&iter0),
2794 a1 = next_const_call_expr_arg (&iter1))
2795 if (! operand_equal_p (a0, a1, flags))
2796 return 0;
2797
2798 /* If we get here and both argument lists are exhausted
2799 then the CALL_EXPRs are equal. */
2800 return ! (a0 || a1);
2801 }
2802 default:
2803 return 0;
2804 }
2805
2806 case tcc_declaration:
2807 /* Consider __builtin_sqrt equal to sqrt. */
2808 return (TREE_CODE (arg0) == FUNCTION_DECL
2809 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2810 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2811 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2812
2813 default:
2814 return 0;
2815 }
2816
2817 #undef OP_SAME
2818 #undef OP_SAME_WITH_NULL
2819 }
2820 \f
2821 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2822 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2823
2824 When in doubt, return 0. */
2825
2826 static int
2827 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2828 {
2829 int unsignedp1, unsignedpo;
2830 tree primarg0, primarg1, primother;
2831 unsigned int correct_width;
2832
2833 if (operand_equal_p (arg0, arg1, 0))
2834 return 1;
2835
2836 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2837 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2838 return 0;
2839
2840 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2841 and see if the inner values are the same. This removes any
2842 signedness comparison, which doesn't matter here. */
2843 primarg0 = arg0, primarg1 = arg1;
2844 STRIP_NOPS (primarg0);
2845 STRIP_NOPS (primarg1);
2846 if (operand_equal_p (primarg0, primarg1, 0))
2847 return 1;
2848
2849 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2850 actual comparison operand, ARG0.
2851
2852 First throw away any conversions to wider types
2853 already present in the operands. */
2854
2855 primarg1 = get_narrower (arg1, &unsignedp1);
2856 primother = get_narrower (other, &unsignedpo);
2857
2858 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2859 if (unsignedp1 == unsignedpo
2860 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2861 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2862 {
2863 tree type = TREE_TYPE (arg0);
2864
2865 /* Make sure shorter operand is extended the right way
2866 to match the longer operand. */
2867 primarg1 = fold_convert (signed_or_unsigned_type_for
2868 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2869
2870 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2871 return 1;
2872 }
2873
2874 return 0;
2875 }
2876 \f
2877 /* See if ARG is an expression that is either a comparison or is performing
2878 arithmetic on comparisons. The comparisons must only be comparing
2879 two different values, which will be stored in *CVAL1 and *CVAL2; if
2880 they are nonzero it means that some operands have already been found.
2881 No variables may be used anywhere else in the expression except in the
2882 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2883 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2884
2885 If this is true, return 1. Otherwise, return zero. */
2886
2887 static int
2888 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2889 {
2890 enum tree_code code = TREE_CODE (arg);
2891 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2892
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2895 tclass = tcc_unary;
2896 else if (tclass == tcc_expression
2897 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2898 || code == COMPOUND_EXPR))
2899 tclass = tcc_binary;
2900
2901 else if (tclass == tcc_expression && code == SAVE_EXPR
2902 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2903 {
2904 /* If we've already found a CVAL1 or CVAL2, this expression is
2905 two complex to handle. */
2906 if (*cval1 || *cval2)
2907 return 0;
2908
2909 tclass = tcc_unary;
2910 *save_p = 1;
2911 }
2912
2913 switch (tclass)
2914 {
2915 case tcc_unary:
2916 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2917
2918 case tcc_binary:
2919 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2920 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2921 cval1, cval2, save_p));
2922
2923 case tcc_constant:
2924 return 1;
2925
2926 case tcc_expression:
2927 if (code == COND_EXPR)
2928 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2929 cval1, cval2, save_p)
2930 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2931 cval1, cval2, save_p)
2932 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2933 cval1, cval2, save_p));
2934 return 0;
2935
2936 case tcc_comparison:
2937 /* First see if we can handle the first operand, then the second. For
2938 the second operand, we know *CVAL1 can't be zero. It must be that
2939 one side of the comparison is each of the values; test for the
2940 case where this isn't true by failing if the two operands
2941 are the same. */
2942
2943 if (operand_equal_p (TREE_OPERAND (arg, 0),
2944 TREE_OPERAND (arg, 1), 0))
2945 return 0;
2946
2947 if (*cval1 == 0)
2948 *cval1 = TREE_OPERAND (arg, 0);
2949 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2950 ;
2951 else if (*cval2 == 0)
2952 *cval2 = TREE_OPERAND (arg, 0);
2953 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2954 ;
2955 else
2956 return 0;
2957
2958 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2959 ;
2960 else if (*cval2 == 0)
2961 *cval2 = TREE_OPERAND (arg, 1);
2962 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2963 ;
2964 else
2965 return 0;
2966
2967 return 1;
2968
2969 default:
2970 return 0;
2971 }
2972 }
2973 \f
2974 /* ARG is a tree that is known to contain just arithmetic operations and
2975 comparisons. Evaluate the operations in the tree substituting NEW0 for
2976 any occurrence of OLD0 as an operand of a comparison and likewise for
2977 NEW1 and OLD1. */
2978
2979 static tree
2980 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2981 tree old1, tree new1)
2982 {
2983 tree type = TREE_TYPE (arg);
2984 enum tree_code code = TREE_CODE (arg);
2985 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2986
2987 /* We can handle some of the tcc_expression cases here. */
2988 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2989 tclass = tcc_unary;
2990 else if (tclass == tcc_expression
2991 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2992 tclass = tcc_binary;
2993
2994 switch (tclass)
2995 {
2996 case tcc_unary:
2997 return fold_build1_loc (loc, code, type,
2998 eval_subst (loc, TREE_OPERAND (arg, 0),
2999 old0, new0, old1, new1));
3000
3001 case tcc_binary:
3002 return fold_build2_loc (loc, code, type,
3003 eval_subst (loc, TREE_OPERAND (arg, 0),
3004 old0, new0, old1, new1),
3005 eval_subst (loc, TREE_OPERAND (arg, 1),
3006 old0, new0, old1, new1));
3007
3008 case tcc_expression:
3009 switch (code)
3010 {
3011 case SAVE_EXPR:
3012 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3013 old1, new1);
3014
3015 case COMPOUND_EXPR:
3016 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3017 old1, new1);
3018
3019 case COND_EXPR:
3020 return fold_build3_loc (loc, code, type,
3021 eval_subst (loc, TREE_OPERAND (arg, 0),
3022 old0, new0, old1, new1),
3023 eval_subst (loc, TREE_OPERAND (arg, 1),
3024 old0, new0, old1, new1),
3025 eval_subst (loc, TREE_OPERAND (arg, 2),
3026 old0, new0, old1, new1));
3027 default:
3028 break;
3029 }
3030 /* Fall through - ??? */
3031
3032 case tcc_comparison:
3033 {
3034 tree arg0 = TREE_OPERAND (arg, 0);
3035 tree arg1 = TREE_OPERAND (arg, 1);
3036
3037 /* We need to check both for exact equality and tree equality. The
3038 former will be true if the operand has a side-effect. In that
3039 case, we know the operand occurred exactly once. */
3040
3041 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3042 arg0 = new0;
3043 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3044 arg0 = new1;
3045
3046 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3047 arg1 = new0;
3048 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3049 arg1 = new1;
3050
3051 return fold_build2_loc (loc, code, type, arg0, arg1);
3052 }
3053
3054 default:
3055 return arg;
3056 }
3057 }
3058 \f
3059 /* Return a tree for the case when the result of an expression is RESULT
3060 converted to TYPE and OMITTED was previously an operand of the expression
3061 but is now not needed (e.g., we folded OMITTED * 0).
3062
3063 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3064 the conversion of RESULT to TYPE. */
3065
3066 tree
3067 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3068 {
3069 tree t = fold_convert_loc (loc, type, result);
3070
3071 /* If the resulting operand is an empty statement, just return the omitted
3072 statement casted to void. */
3073 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3074 return build1_loc (loc, NOP_EXPR, void_type_node,
3075 fold_ignored_result (omitted));
3076
3077 if (TREE_SIDE_EFFECTS (omitted))
3078 return build2_loc (loc, COMPOUND_EXPR, type,
3079 fold_ignored_result (omitted), t);
3080
3081 return non_lvalue_loc (loc, t);
3082 }
3083
3084 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3085
3086 static tree
3087 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3088 tree omitted)
3089 {
3090 tree t = fold_convert_loc (loc, type, result);
3091
3092 /* If the resulting operand is an empty statement, just return the omitted
3093 statement casted to void. */
3094 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3095 return build1_loc (loc, NOP_EXPR, void_type_node,
3096 fold_ignored_result (omitted));
3097
3098 if (TREE_SIDE_EFFECTS (omitted))
3099 return build2_loc (loc, COMPOUND_EXPR, type,
3100 fold_ignored_result (omitted), t);
3101
3102 return pedantic_non_lvalue_loc (loc, t);
3103 }
3104
3105 /* Return a tree for the case when the result of an expression is RESULT
3106 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3107 of the expression but are now not needed.
3108
3109 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3110 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3111 evaluated before OMITTED2. Otherwise, if neither has side effects,
3112 just do the conversion of RESULT to TYPE. */
3113
3114 tree
3115 omit_two_operands_loc (location_t loc, tree type, tree result,
3116 tree omitted1, tree omitted2)
3117 {
3118 tree t = fold_convert_loc (loc, type, result);
3119
3120 if (TREE_SIDE_EFFECTS (omitted2))
3121 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3122 if (TREE_SIDE_EFFECTS (omitted1))
3123 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3124
3125 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3126 }
3127
3128 \f
3129 /* Return a simplified tree node for the truth-negation of ARG. This
3130 never alters ARG itself. We assume that ARG is an operation that
3131 returns a truth value (0 or 1).
3132
3133 FIXME: one would think we would fold the result, but it causes
3134 problems with the dominator optimizer. */
3135
3136 static tree
3137 fold_truth_not_expr (location_t loc, tree arg)
3138 {
3139 tree type = TREE_TYPE (arg);
3140 enum tree_code code = TREE_CODE (arg);
3141 location_t loc1, loc2;
3142
3143 /* If this is a comparison, we can simply invert it, except for
3144 floating-point non-equality comparisons, in which case we just
3145 enclose a TRUTH_NOT_EXPR around what we have. */
3146
3147 if (TREE_CODE_CLASS (code) == tcc_comparison)
3148 {
3149 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3150 if (FLOAT_TYPE_P (op_type)
3151 && flag_trapping_math
3152 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3153 && code != NE_EXPR && code != EQ_EXPR)
3154 return NULL_TREE;
3155
3156 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3157 if (code == ERROR_MARK)
3158 return NULL_TREE;
3159
3160 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3161 TREE_OPERAND (arg, 1));
3162 }
3163
3164 switch (code)
3165 {
3166 case INTEGER_CST:
3167 return constant_boolean_node (integer_zerop (arg), type);
3168
3169 case TRUTH_AND_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3172 return build2_loc (loc, TRUTH_OR_EXPR, type,
3173 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3174 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3175
3176 case TRUTH_OR_EXPR:
3177 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3178 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3179 return build2_loc (loc, TRUTH_AND_EXPR, type,
3180 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3181 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3182
3183 case TRUTH_XOR_EXPR:
3184 /* Here we can invert either operand. We invert the first operand
3185 unless the second operand is a TRUTH_NOT_EXPR in which case our
3186 result is the XOR of the first operand with the inside of the
3187 negation of the second operand. */
3188
3189 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3190 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3191 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3192 else
3193 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3194 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3195 TREE_OPERAND (arg, 1));
3196
3197 case TRUTH_ANDIF_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3203
3204 case TRUTH_ORIF_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3207 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3209 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3210
3211 case TRUTH_NOT_EXPR:
3212 return TREE_OPERAND (arg, 0);
3213
3214 case COND_EXPR:
3215 {
3216 tree arg1 = TREE_OPERAND (arg, 1);
3217 tree arg2 = TREE_OPERAND (arg, 2);
3218
3219 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3220 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3221
3222 /* A COND_EXPR may have a throw as one operand, which
3223 then has void type. Just leave void operands
3224 as they are. */
3225 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3226 VOID_TYPE_P (TREE_TYPE (arg1))
3227 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3228 VOID_TYPE_P (TREE_TYPE (arg2))
3229 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3230 }
3231
3232 case COMPOUND_EXPR:
3233 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3234 return build2_loc (loc, COMPOUND_EXPR, type,
3235 TREE_OPERAND (arg, 0),
3236 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3237
3238 case NON_LVALUE_EXPR:
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3240 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3241
3242 CASE_CONVERT:
3243 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3244 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3245
3246 /* ... fall through ... */
3247
3248 case FLOAT_EXPR:
3249 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3250 return build1_loc (loc, TREE_CODE (arg), type,
3251 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3252
3253 case BIT_AND_EXPR:
3254 if (!integer_onep (TREE_OPERAND (arg, 1)))
3255 return NULL_TREE;
3256 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3257
3258 case SAVE_EXPR:
3259 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3260
3261 case CLEANUP_POINT_EXPR:
3262 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3263 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3264 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3265
3266 default:
3267 return NULL_TREE;
3268 }
3269 }
3270
3271 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3272 assume that ARG is an operation that returns a truth value (0 or 1
3273 for scalars, 0 or -1 for vectors). Return the folded expression if
3274 folding is successful. Otherwise, return NULL_TREE. */
3275
3276 static tree
3277 fold_invert_truthvalue (location_t loc, tree arg)
3278 {
3279 tree type = TREE_TYPE (arg);
3280 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3281 ? BIT_NOT_EXPR
3282 : TRUTH_NOT_EXPR,
3283 type, arg);
3284 }
3285
3286 /* Return a simplified tree node for the truth-negation of ARG. This
3287 never alters ARG itself. We assume that ARG is an operation that
3288 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3289
3290 tree
3291 invert_truthvalue_loc (location_t loc, tree arg)
3292 {
3293 if (TREE_CODE (arg) == ERROR_MARK)
3294 return arg;
3295
3296 tree type = TREE_TYPE (arg);
3297 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3298 ? BIT_NOT_EXPR
3299 : TRUTH_NOT_EXPR,
3300 type, arg);
3301 }
3302
3303 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3304 operands are another bit-wise operation with a common input. If so,
3305 distribute the bit operations to save an operation and possibly two if
3306 constants are involved. For example, convert
3307 (A | B) & (A | C) into A | (B & C)
3308 Further simplification will occur if B and C are constants.
3309
3310 If this optimization cannot be done, 0 will be returned. */
3311
3312 static tree
3313 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3314 tree arg0, tree arg1)
3315 {
3316 tree common;
3317 tree left, right;
3318
3319 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3320 || TREE_CODE (arg0) == code
3321 || (TREE_CODE (arg0) != BIT_AND_EXPR
3322 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3323 return 0;
3324
3325 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3326 {
3327 common = TREE_OPERAND (arg0, 0);
3328 left = TREE_OPERAND (arg0, 1);
3329 right = TREE_OPERAND (arg1, 1);
3330 }
3331 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3332 {
3333 common = TREE_OPERAND (arg0, 0);
3334 left = TREE_OPERAND (arg0, 1);
3335 right = TREE_OPERAND (arg1, 0);
3336 }
3337 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3338 {
3339 common = TREE_OPERAND (arg0, 1);
3340 left = TREE_OPERAND (arg0, 0);
3341 right = TREE_OPERAND (arg1, 1);
3342 }
3343 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3344 {
3345 common = TREE_OPERAND (arg0, 1);
3346 left = TREE_OPERAND (arg0, 0);
3347 right = TREE_OPERAND (arg1, 0);
3348 }
3349 else
3350 return 0;
3351
3352 common = fold_convert_loc (loc, type, common);
3353 left = fold_convert_loc (loc, type, left);
3354 right = fold_convert_loc (loc, type, right);
3355 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3356 fold_build2_loc (loc, code, type, left, right));
3357 }
3358
3359 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3360 with code CODE. This optimization is unsafe. */
3361 static tree
3362 distribute_real_division (location_t loc, enum tree_code code, tree type,
3363 tree arg0, tree arg1)
3364 {
3365 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3366 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3367
3368 /* (A / C) +- (B / C) -> (A +- B) / C. */
3369 if (mul0 == mul1
3370 && operand_equal_p (TREE_OPERAND (arg0, 1),
3371 TREE_OPERAND (arg1, 1), 0))
3372 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3373 fold_build2_loc (loc, code, type,
3374 TREE_OPERAND (arg0, 0),
3375 TREE_OPERAND (arg1, 0)),
3376 TREE_OPERAND (arg0, 1));
3377
3378 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3379 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3380 TREE_OPERAND (arg1, 0), 0)
3381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3382 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3383 {
3384 REAL_VALUE_TYPE r0, r1;
3385 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3386 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3387 if (!mul0)
3388 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3389 if (!mul1)
3390 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3391 real_arithmetic (&r0, code, &r0, &r1);
3392 return fold_build2_loc (loc, MULT_EXPR, type,
3393 TREE_OPERAND (arg0, 0),
3394 build_real (type, r0));
3395 }
3396
3397 return NULL_TREE;
3398 }
3399 \f
3400 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3401 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3402
3403 static tree
3404 make_bit_field_ref (location_t loc, tree inner, tree type,
3405 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3406 {
3407 tree result, bftype;
3408
3409 if (bitpos == 0)
3410 {
3411 tree size = TYPE_SIZE (TREE_TYPE (inner));
3412 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3413 || POINTER_TYPE_P (TREE_TYPE (inner)))
3414 && tree_fits_shwi_p (size)
3415 && tree_to_shwi (size) == bitsize)
3416 return fold_convert_loc (loc, type, inner);
3417 }
3418
3419 bftype = type;
3420 if (TYPE_PRECISION (bftype) != bitsize
3421 || TYPE_UNSIGNED (bftype) == !unsignedp)
3422 bftype = build_nonstandard_integer_type (bitsize, 0);
3423
3424 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3425 size_int (bitsize), bitsize_int (bitpos));
3426
3427 if (bftype != type)
3428 result = fold_convert_loc (loc, type, result);
3429
3430 return result;
3431 }
3432
3433 /* Optimize a bit-field compare.
3434
3435 There are two cases: First is a compare against a constant and the
3436 second is a comparison of two items where the fields are at the same
3437 bit position relative to the start of a chunk (byte, halfword, word)
3438 large enough to contain it. In these cases we can avoid the shift
3439 implicit in bitfield extractions.
3440
3441 For constants, we emit a compare of the shifted constant with the
3442 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3443 compared. For two fields at the same position, we do the ANDs with the
3444 similar mask and compare the result of the ANDs.
3445
3446 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3447 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3448 are the left and right operands of the comparison, respectively.
3449
3450 If the optimization described above can be done, we return the resulting
3451 tree. Otherwise we return zero. */
3452
3453 static tree
3454 optimize_bit_field_compare (location_t loc, enum tree_code code,
3455 tree compare_type, tree lhs, tree rhs)
3456 {
3457 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3458 tree type = TREE_TYPE (lhs);
3459 tree unsigned_type;
3460 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3461 machine_mode lmode, rmode, nmode;
3462 int lunsignedp, runsignedp;
3463 int lvolatilep = 0, rvolatilep = 0;
3464 tree linner, rinner = NULL_TREE;
3465 tree mask;
3466 tree offset;
3467
3468 /* Get all the information about the extractions being done. If the bit size
3469 if the same as the size of the underlying object, we aren't doing an
3470 extraction at all and so can do nothing. We also don't want to
3471 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3472 then will no longer be able to replace it. */
3473 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3474 &lunsignedp, &lvolatilep, false);
3475 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3476 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3477 return 0;
3478
3479 if (!const_p)
3480 {
3481 /* If this is not a constant, we can only do something if bit positions,
3482 sizes, and signedness are the same. */
3483 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3484 &runsignedp, &rvolatilep, false);
3485
3486 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3487 || lunsignedp != runsignedp || offset != 0
3488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3489 return 0;
3490 }
3491
3492 /* See if we can find a mode to refer to this field. We should be able to,
3493 but fail if we can't. */
3494 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3495 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3496 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3497 TYPE_ALIGN (TREE_TYPE (rinner))),
3498 word_mode, false);
3499 if (nmode == VOIDmode)
3500 return 0;
3501
3502 /* Set signed and unsigned types of the precision of this mode for the
3503 shifts below. */
3504 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3505
3506 /* Compute the bit position and size for the new reference and our offset
3507 within it. If the new reference is the same size as the original, we
3508 won't optimize anything, so return zero. */
3509 nbitsize = GET_MODE_BITSIZE (nmode);
3510 nbitpos = lbitpos & ~ (nbitsize - 1);
3511 lbitpos -= nbitpos;
3512 if (nbitsize == lbitsize)
3513 return 0;
3514
3515 if (BYTES_BIG_ENDIAN)
3516 lbitpos = nbitsize - lbitsize - lbitpos;
3517
3518 /* Make the mask to be used against the extracted field. */
3519 mask = build_int_cst_type (unsigned_type, -1);
3520 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3521 mask = const_binop (RSHIFT_EXPR, mask,
3522 size_int (nbitsize - lbitsize - lbitpos));
3523
3524 if (! const_p)
3525 /* If not comparing with constant, just rework the comparison
3526 and return. */
3527 return fold_build2_loc (loc, code, compare_type,
3528 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3529 make_bit_field_ref (loc, linner,
3530 unsigned_type,
3531 nbitsize, nbitpos,
3532 1),
3533 mask),
3534 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3535 make_bit_field_ref (loc, rinner,
3536 unsigned_type,
3537 nbitsize, nbitpos,
3538 1),
3539 mask));
3540
3541 /* Otherwise, we are handling the constant case. See if the constant is too
3542 big for the field. Warn and return a tree of for 0 (false) if so. We do
3543 this not only for its own sake, but to avoid having to test for this
3544 error case below. If we didn't, we might generate wrong code.
3545
3546 For unsigned fields, the constant shifted right by the field length should
3547 be all zero. For signed fields, the high-order bits should agree with
3548 the sign bit. */
3549
3550 if (lunsignedp)
3551 {
3552 if (wi::lrshift (rhs, lbitsize) != 0)
3553 {
3554 warning (0, "comparison is always %d due to width of bit-field",
3555 code == NE_EXPR);
3556 return constant_boolean_node (code == NE_EXPR, compare_type);
3557 }
3558 }
3559 else
3560 {
3561 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3562 if (tem != 0 && tem != -1)
3563 {
3564 warning (0, "comparison is always %d due to width of bit-field",
3565 code == NE_EXPR);
3566 return constant_boolean_node (code == NE_EXPR, compare_type);
3567 }
3568 }
3569
3570 /* Single-bit compares should always be against zero. */
3571 if (lbitsize == 1 && ! integer_zerop (rhs))
3572 {
3573 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3574 rhs = build_int_cst (type, 0);
3575 }
3576
3577 /* Make a new bitfield reference, shift the constant over the
3578 appropriate number of bits and mask it with the computed mask
3579 (in case this was a signed field). If we changed it, make a new one. */
3580 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3581
3582 rhs = const_binop (BIT_AND_EXPR,
3583 const_binop (LSHIFT_EXPR,
3584 fold_convert_loc (loc, unsigned_type, rhs),
3585 size_int (lbitpos)),
3586 mask);
3587
3588 lhs = build2_loc (loc, code, compare_type,
3589 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3590 return lhs;
3591 }
3592 \f
3593 /* Subroutine for fold_truth_andor_1: decode a field reference.
3594
3595 If EXP is a comparison reference, we return the innermost reference.
3596
3597 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3598 set to the starting bit number.
3599
3600 If the innermost field can be completely contained in a mode-sized
3601 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3602
3603 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3604 otherwise it is not changed.
3605
3606 *PUNSIGNEDP is set to the signedness of the field.
3607
3608 *PMASK is set to the mask used. This is either contained in a
3609 BIT_AND_EXPR or derived from the width of the field.
3610
3611 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3612
3613 Return 0 if this is not a component reference or is one that we can't
3614 do anything with. */
3615
3616 static tree
3617 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3618 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3619 int *punsignedp, int *pvolatilep,
3620 tree *pmask, tree *pand_mask)
3621 {
3622 tree outer_type = 0;
3623 tree and_mask = 0;
3624 tree mask, inner, offset;
3625 tree unsigned_type;
3626 unsigned int precision;
3627
3628 /* All the optimizations using this function assume integer fields.
3629 There are problems with FP fields since the type_for_size call
3630 below can fail for, e.g., XFmode. */
3631 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3632 return 0;
3633
3634 /* We are interested in the bare arrangement of bits, so strip everything
3635 that doesn't affect the machine mode. However, record the type of the
3636 outermost expression if it may matter below. */
3637 if (CONVERT_EXPR_P (exp)
3638 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3639 outer_type = TREE_TYPE (exp);
3640 STRIP_NOPS (exp);
3641
3642 if (TREE_CODE (exp) == BIT_AND_EXPR)
3643 {
3644 and_mask = TREE_OPERAND (exp, 1);
3645 exp = TREE_OPERAND (exp, 0);
3646 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3647 if (TREE_CODE (and_mask) != INTEGER_CST)
3648 return 0;
3649 }
3650
3651 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3652 punsignedp, pvolatilep, false);
3653 if ((inner == exp && and_mask == 0)
3654 || *pbitsize < 0 || offset != 0
3655 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3656 return 0;
3657
3658 /* If the number of bits in the reference is the same as the bitsize of
3659 the outer type, then the outer type gives the signedness. Otherwise
3660 (in case of a small bitfield) the signedness is unchanged. */
3661 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3662 *punsignedp = TYPE_UNSIGNED (outer_type);
3663
3664 /* Compute the mask to access the bitfield. */
3665 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3666 precision = TYPE_PRECISION (unsigned_type);
3667
3668 mask = build_int_cst_type (unsigned_type, -1);
3669
3670 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3671 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3672
3673 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3674 if (and_mask != 0)
3675 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3676 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3677
3678 *pmask = mask;
3679 *pand_mask = and_mask;
3680 return inner;
3681 }
3682
3683 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3684 bit positions and MASK is SIGNED. */
3685
3686 static int
3687 all_ones_mask_p (const_tree mask, unsigned int size)
3688 {
3689 tree type = TREE_TYPE (mask);
3690 unsigned int precision = TYPE_PRECISION (type);
3691
3692 /* If this function returns true when the type of the mask is
3693 UNSIGNED, then there will be errors. In particular see
3694 gcc.c-torture/execute/990326-1.c. There does not appear to be
3695 any documentation paper trail as to why this is so. But the pre
3696 wide-int worked with that restriction and it has been preserved
3697 here. */
3698 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3699 return false;
3700
3701 return wi::mask (size, false, precision) == mask;
3702 }
3703
3704 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3705 represents the sign bit of EXP's type. If EXP represents a sign
3706 or zero extension, also test VAL against the unextended type.
3707 The return value is the (sub)expression whose sign bit is VAL,
3708 or NULL_TREE otherwise. */
3709
3710 static tree
3711 sign_bit_p (tree exp, const_tree val)
3712 {
3713 int width;
3714 tree t;
3715
3716 /* Tree EXP must have an integral type. */
3717 t = TREE_TYPE (exp);
3718 if (! INTEGRAL_TYPE_P (t))
3719 return NULL_TREE;
3720
3721 /* Tree VAL must be an integer constant. */
3722 if (TREE_CODE (val) != INTEGER_CST
3723 || TREE_OVERFLOW (val))
3724 return NULL_TREE;
3725
3726 width = TYPE_PRECISION (t);
3727 if (wi::only_sign_bit_p (val, width))
3728 return exp;
3729
3730 /* Handle extension from a narrower type. */
3731 if (TREE_CODE (exp) == NOP_EXPR
3732 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3733 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3734
3735 return NULL_TREE;
3736 }
3737
3738 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3739 to be evaluated unconditionally. */
3740
3741 static int
3742 simple_operand_p (const_tree exp)
3743 {
3744 /* Strip any conversions that don't change the machine mode. */
3745 STRIP_NOPS (exp);
3746
3747 return (CONSTANT_CLASS_P (exp)
3748 || TREE_CODE (exp) == SSA_NAME
3749 || (DECL_P (exp)
3750 && ! TREE_ADDRESSABLE (exp)
3751 && ! TREE_THIS_VOLATILE (exp)
3752 && ! DECL_NONLOCAL (exp)
3753 /* Don't regard global variables as simple. They may be
3754 allocated in ways unknown to the compiler (shared memory,
3755 #pragma weak, etc). */
3756 && ! TREE_PUBLIC (exp)
3757 && ! DECL_EXTERNAL (exp)
3758 /* Weakrefs are not safe to be read, since they can be NULL.
3759 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3760 have DECL_WEAK flag set. */
3761 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3762 /* Loading a static variable is unduly expensive, but global
3763 registers aren't expensive. */
3764 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3765 }
3766
3767 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3768 to be evaluated unconditionally.
3769 I addition to simple_operand_p, we assume that comparisons, conversions,
3770 and logic-not operations are simple, if their operands are simple, too. */
3771
3772 static bool
3773 simple_operand_p_2 (tree exp)
3774 {
3775 enum tree_code code;
3776
3777 if (TREE_SIDE_EFFECTS (exp)
3778 || tree_could_trap_p (exp))
3779 return false;
3780
3781 while (CONVERT_EXPR_P (exp))
3782 exp = TREE_OPERAND (exp, 0);
3783
3784 code = TREE_CODE (exp);
3785
3786 if (TREE_CODE_CLASS (code) == tcc_comparison)
3787 return (simple_operand_p (TREE_OPERAND (exp, 0))
3788 && simple_operand_p (TREE_OPERAND (exp, 1)));
3789
3790 if (code == TRUTH_NOT_EXPR)
3791 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3792
3793 return simple_operand_p (exp);
3794 }
3795
3796 \f
3797 /* The following functions are subroutines to fold_range_test and allow it to
3798 try to change a logical combination of comparisons into a range test.
3799
3800 For example, both
3801 X == 2 || X == 3 || X == 4 || X == 5
3802 and
3803 X >= 2 && X <= 5
3804 are converted to
3805 (unsigned) (X - 2) <= 3
3806
3807 We describe each set of comparisons as being either inside or outside
3808 a range, using a variable named like IN_P, and then describe the
3809 range with a lower and upper bound. If one of the bounds is omitted,
3810 it represents either the highest or lowest value of the type.
3811
3812 In the comments below, we represent a range by two numbers in brackets
3813 preceded by a "+" to designate being inside that range, or a "-" to
3814 designate being outside that range, so the condition can be inverted by
3815 flipping the prefix. An omitted bound is represented by a "-". For
3816 example, "- [-, 10]" means being outside the range starting at the lowest
3817 possible value and ending at 10, in other words, being greater than 10.
3818 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3819 always false.
3820
3821 We set up things so that the missing bounds are handled in a consistent
3822 manner so neither a missing bound nor "true" and "false" need to be
3823 handled using a special case. */
3824
3825 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3826 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3827 and UPPER1_P are nonzero if the respective argument is an upper bound
3828 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3829 must be specified for a comparison. ARG1 will be converted to ARG0's
3830 type if both are specified. */
3831
3832 static tree
3833 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3834 tree arg1, int upper1_p)
3835 {
3836 tree tem;
3837 int result;
3838 int sgn0, sgn1;
3839
3840 /* If neither arg represents infinity, do the normal operation.
3841 Else, if not a comparison, return infinity. Else handle the special
3842 comparison rules. Note that most of the cases below won't occur, but
3843 are handled for consistency. */
3844
3845 if (arg0 != 0 && arg1 != 0)
3846 {
3847 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3848 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3849 STRIP_NOPS (tem);
3850 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3851 }
3852
3853 if (TREE_CODE_CLASS (code) != tcc_comparison)
3854 return 0;
3855
3856 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3857 for neither. In real maths, we cannot assume open ended ranges are
3858 the same. But, this is computer arithmetic, where numbers are finite.
3859 We can therefore make the transformation of any unbounded range with
3860 the value Z, Z being greater than any representable number. This permits
3861 us to treat unbounded ranges as equal. */
3862 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3863 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3864 switch (code)
3865 {
3866 case EQ_EXPR:
3867 result = sgn0 == sgn1;
3868 break;
3869 case NE_EXPR:
3870 result = sgn0 != sgn1;
3871 break;
3872 case LT_EXPR:
3873 result = sgn0 < sgn1;
3874 break;
3875 case LE_EXPR:
3876 result = sgn0 <= sgn1;
3877 break;
3878 case GT_EXPR:
3879 result = sgn0 > sgn1;
3880 break;
3881 case GE_EXPR:
3882 result = sgn0 >= sgn1;
3883 break;
3884 default:
3885 gcc_unreachable ();
3886 }
3887
3888 return constant_boolean_node (result, type);
3889 }
3890 \f
3891 /* Helper routine for make_range. Perform one step for it, return
3892 new expression if the loop should continue or NULL_TREE if it should
3893 stop. */
3894
3895 tree
3896 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3897 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3898 bool *strict_overflow_p)
3899 {
3900 tree arg0_type = TREE_TYPE (arg0);
3901 tree n_low, n_high, low = *p_low, high = *p_high;
3902 int in_p = *p_in_p, n_in_p;
3903
3904 switch (code)
3905 {
3906 case TRUTH_NOT_EXPR:
3907 /* We can only do something if the range is testing for zero. */
3908 if (low == NULL_TREE || high == NULL_TREE
3909 || ! integer_zerop (low) || ! integer_zerop (high))
3910 return NULL_TREE;
3911 *p_in_p = ! in_p;
3912 return arg0;
3913
3914 case EQ_EXPR: case NE_EXPR:
3915 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3916 /* We can only do something if the range is testing for zero
3917 and if the second operand is an integer constant. Note that
3918 saying something is "in" the range we make is done by
3919 complementing IN_P since it will set in the initial case of
3920 being not equal to zero; "out" is leaving it alone. */
3921 if (low == NULL_TREE || high == NULL_TREE
3922 || ! integer_zerop (low) || ! integer_zerop (high)
3923 || TREE_CODE (arg1) != INTEGER_CST)
3924 return NULL_TREE;
3925
3926 switch (code)
3927 {
3928 case NE_EXPR: /* - [c, c] */
3929 low = high = arg1;
3930 break;
3931 case EQ_EXPR: /* + [c, c] */
3932 in_p = ! in_p, low = high = arg1;
3933 break;
3934 case GT_EXPR: /* - [-, c] */
3935 low = 0, high = arg1;
3936 break;
3937 case GE_EXPR: /* + [c, -] */
3938 in_p = ! in_p, low = arg1, high = 0;
3939 break;
3940 case LT_EXPR: /* - [c, -] */
3941 low = arg1, high = 0;
3942 break;
3943 case LE_EXPR: /* + [-, c] */
3944 in_p = ! in_p, low = 0, high = arg1;
3945 break;
3946 default:
3947 gcc_unreachable ();
3948 }
3949
3950 /* If this is an unsigned comparison, we also know that EXP is
3951 greater than or equal to zero. We base the range tests we make
3952 on that fact, so we record it here so we can parse existing
3953 range tests. We test arg0_type since often the return type
3954 of, e.g. EQ_EXPR, is boolean. */
3955 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3956 {
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 in_p, low, high, 1,
3959 build_int_cst (arg0_type, 0),
3960 NULL_TREE))
3961 return NULL_TREE;
3962
3963 in_p = n_in_p, low = n_low, high = n_high;
3964
3965 /* If the high bound is missing, but we have a nonzero low
3966 bound, reverse the range so it goes from zero to the low bound
3967 minus 1. */
3968 if (high == 0 && low && ! integer_zerop (low))
3969 {
3970 in_p = ! in_p;
3971 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3972 build_int_cst (TREE_TYPE (low), 1), 0);
3973 low = build_int_cst (arg0_type, 0);
3974 }
3975 }
3976
3977 *p_low = low;
3978 *p_high = high;
3979 *p_in_p = in_p;
3980 return arg0;
3981
3982 case NEGATE_EXPR:
3983 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3984 low and high are non-NULL, then normalize will DTRT. */
3985 if (!TYPE_UNSIGNED (arg0_type)
3986 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3987 {
3988 if (low == NULL_TREE)
3989 low = TYPE_MIN_VALUE (arg0_type);
3990 if (high == NULL_TREE)
3991 high = TYPE_MAX_VALUE (arg0_type);
3992 }
3993
3994 /* (-x) IN [a,b] -> x in [-b, -a] */
3995 n_low = range_binop (MINUS_EXPR, exp_type,
3996 build_int_cst (exp_type, 0),
3997 0, high, 1);
3998 n_high = range_binop (MINUS_EXPR, exp_type,
3999 build_int_cst (exp_type, 0),
4000 0, low, 0);
4001 if (n_high != 0 && TREE_OVERFLOW (n_high))
4002 return NULL_TREE;
4003 goto normalize;
4004
4005 case BIT_NOT_EXPR:
4006 /* ~ X -> -X - 1 */
4007 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4008 build_int_cst (exp_type, 1));
4009
4010 case PLUS_EXPR:
4011 case MINUS_EXPR:
4012 if (TREE_CODE (arg1) != INTEGER_CST)
4013 return NULL_TREE;
4014
4015 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4016 move a constant to the other side. */
4017 if (!TYPE_UNSIGNED (arg0_type)
4018 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4019 return NULL_TREE;
4020
4021 /* If EXP is signed, any overflow in the computation is undefined,
4022 so we don't worry about it so long as our computations on
4023 the bounds don't overflow. For unsigned, overflow is defined
4024 and this is exactly the right thing. */
4025 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4026 arg0_type, low, 0, arg1, 0);
4027 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4028 arg0_type, high, 1, arg1, 0);
4029 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4030 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4031 return NULL_TREE;
4032
4033 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4034 *strict_overflow_p = true;
4035
4036 normalize:
4037 /* Check for an unsigned range which has wrapped around the maximum
4038 value thus making n_high < n_low, and normalize it. */
4039 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4040 {
4041 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4042 build_int_cst (TREE_TYPE (n_high), 1), 0);
4043 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4044 build_int_cst (TREE_TYPE (n_low), 1), 0);
4045
4046 /* If the range is of the form +/- [ x+1, x ], we won't
4047 be able to normalize it. But then, it represents the
4048 whole range or the empty set, so make it
4049 +/- [ -, - ]. */
4050 if (tree_int_cst_equal (n_low, low)
4051 && tree_int_cst_equal (n_high, high))
4052 low = high = 0;
4053 else
4054 in_p = ! in_p;
4055 }
4056 else
4057 low = n_low, high = n_high;
4058
4059 *p_low = low;
4060 *p_high = high;
4061 *p_in_p = in_p;
4062 return arg0;
4063
4064 CASE_CONVERT:
4065 case NON_LVALUE_EXPR:
4066 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4067 return NULL_TREE;
4068
4069 if (! INTEGRAL_TYPE_P (arg0_type)
4070 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4071 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4072 return NULL_TREE;
4073
4074 n_low = low, n_high = high;
4075
4076 if (n_low != 0)
4077 n_low = fold_convert_loc (loc, arg0_type, n_low);
4078
4079 if (n_high != 0)
4080 n_high = fold_convert_loc (loc, arg0_type, n_high);
4081
4082 /* If we're converting arg0 from an unsigned type, to exp,
4083 a signed type, we will be doing the comparison as unsigned.
4084 The tests above have already verified that LOW and HIGH
4085 are both positive.
4086
4087 So we have to ensure that we will handle large unsigned
4088 values the same way that the current signed bounds treat
4089 negative values. */
4090
4091 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4092 {
4093 tree high_positive;
4094 tree equiv_type;
4095 /* For fixed-point modes, we need to pass the saturating flag
4096 as the 2nd parameter. */
4097 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4098 equiv_type
4099 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4100 TYPE_SATURATING (arg0_type));
4101 else
4102 equiv_type
4103 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4104
4105 /* A range without an upper bound is, naturally, unbounded.
4106 Since convert would have cropped a very large value, use
4107 the max value for the destination type. */
4108 high_positive
4109 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4110 : TYPE_MAX_VALUE (arg0_type);
4111
4112 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4113 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4114 fold_convert_loc (loc, arg0_type,
4115 high_positive),
4116 build_int_cst (arg0_type, 1));
4117
4118 /* If the low bound is specified, "and" the range with the
4119 range for which the original unsigned value will be
4120 positive. */
4121 if (low != 0)
4122 {
4123 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4124 1, fold_convert_loc (loc, arg0_type,
4125 integer_zero_node),
4126 high_positive))
4127 return NULL_TREE;
4128
4129 in_p = (n_in_p == in_p);
4130 }
4131 else
4132 {
4133 /* Otherwise, "or" the range with the range of the input
4134 that will be interpreted as negative. */
4135 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4136 1, fold_convert_loc (loc, arg0_type,
4137 integer_zero_node),
4138 high_positive))
4139 return NULL_TREE;
4140
4141 in_p = (in_p != n_in_p);
4142 }
4143 }
4144
4145 *p_low = n_low;
4146 *p_high = n_high;
4147 *p_in_p = in_p;
4148 return arg0;
4149
4150 default:
4151 return NULL_TREE;
4152 }
4153 }
4154
4155 /* Given EXP, a logical expression, set the range it is testing into
4156 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4157 actually being tested. *PLOW and *PHIGH will be made of the same
4158 type as the returned expression. If EXP is not a comparison, we
4159 will most likely not be returning a useful value and range. Set
4160 *STRICT_OVERFLOW_P to true if the return value is only valid
4161 because signed overflow is undefined; otherwise, do not change
4162 *STRICT_OVERFLOW_P. */
4163
4164 tree
4165 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4166 bool *strict_overflow_p)
4167 {
4168 enum tree_code code;
4169 tree arg0, arg1 = NULL_TREE;
4170 tree exp_type, nexp;
4171 int in_p;
4172 tree low, high;
4173 location_t loc = EXPR_LOCATION (exp);
4174
4175 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4176 and see if we can refine the range. Some of the cases below may not
4177 happen, but it doesn't seem worth worrying about this. We "continue"
4178 the outer loop when we've changed something; otherwise we "break"
4179 the switch, which will "break" the while. */
4180
4181 in_p = 0;
4182 low = high = build_int_cst (TREE_TYPE (exp), 0);
4183
4184 while (1)
4185 {
4186 code = TREE_CODE (exp);
4187 exp_type = TREE_TYPE (exp);
4188 arg0 = NULL_TREE;
4189
4190 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4191 {
4192 if (TREE_OPERAND_LENGTH (exp) > 0)
4193 arg0 = TREE_OPERAND (exp, 0);
4194 if (TREE_CODE_CLASS (code) == tcc_binary
4195 || TREE_CODE_CLASS (code) == tcc_comparison
4196 || (TREE_CODE_CLASS (code) == tcc_expression
4197 && TREE_OPERAND_LENGTH (exp) > 1))
4198 arg1 = TREE_OPERAND (exp, 1);
4199 }
4200 if (arg0 == NULL_TREE)
4201 break;
4202
4203 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4204 &high, &in_p, strict_overflow_p);
4205 if (nexp == NULL_TREE)
4206 break;
4207 exp = nexp;
4208 }
4209
4210 /* If EXP is a constant, we can evaluate whether this is true or false. */
4211 if (TREE_CODE (exp) == INTEGER_CST)
4212 {
4213 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4214 exp, 0, low, 0))
4215 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4216 exp, 1, high, 1)));
4217 low = high = 0;
4218 exp = 0;
4219 }
4220
4221 *pin_p = in_p, *plow = low, *phigh = high;
4222 return exp;
4223 }
4224 \f
4225 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4226 type, TYPE, return an expression to test if EXP is in (or out of, depending
4227 on IN_P) the range. Return 0 if the test couldn't be created. */
4228
4229 tree
4230 build_range_check (location_t loc, tree type, tree exp, int in_p,
4231 tree low, tree high)
4232 {
4233 tree etype = TREE_TYPE (exp), value;
4234
4235 #ifdef HAVE_canonicalize_funcptr_for_compare
4236 /* Disable this optimization for function pointer expressions
4237 on targets that require function pointer canonicalization. */
4238 if (HAVE_canonicalize_funcptr_for_compare
4239 && TREE_CODE (etype) == POINTER_TYPE
4240 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4241 return NULL_TREE;
4242 #endif
4243
4244 if (! in_p)
4245 {
4246 value = build_range_check (loc, type, exp, 1, low, high);
4247 if (value != 0)
4248 return invert_truthvalue_loc (loc, value);
4249
4250 return 0;
4251 }
4252
4253 if (low == 0 && high == 0)
4254 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4255
4256 if (low == 0)
4257 return fold_build2_loc (loc, LE_EXPR, type, exp,
4258 fold_convert_loc (loc, etype, high));
4259
4260 if (high == 0)
4261 return fold_build2_loc (loc, GE_EXPR, type, exp,
4262 fold_convert_loc (loc, etype, low));
4263
4264 if (operand_equal_p (low, high, 0))
4265 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4266 fold_convert_loc (loc, etype, low));
4267
4268 if (integer_zerop (low))
4269 {
4270 if (! TYPE_UNSIGNED (etype))
4271 {
4272 etype = unsigned_type_for (etype);
4273 high = fold_convert_loc (loc, etype, high);
4274 exp = fold_convert_loc (loc, etype, exp);
4275 }
4276 return build_range_check (loc, type, exp, 1, 0, high);
4277 }
4278
4279 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4280 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4281 {
4282 int prec = TYPE_PRECISION (etype);
4283
4284 if (wi::mask (prec - 1, false, prec) == high)
4285 {
4286 if (TYPE_UNSIGNED (etype))
4287 {
4288 tree signed_etype = signed_type_for (etype);
4289 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4290 etype
4291 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4292 else
4293 etype = signed_etype;
4294 exp = fold_convert_loc (loc, etype, exp);
4295 }
4296 return fold_build2_loc (loc, GT_EXPR, type, exp,
4297 build_int_cst (etype, 0));
4298 }
4299 }
4300
4301 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4302 This requires wrap-around arithmetics for the type of the expression.
4303 First make sure that arithmetics in this type is valid, then make sure
4304 that it wraps around. */
4305 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4306 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4307 TYPE_UNSIGNED (etype));
4308
4309 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4310 {
4311 tree utype, minv, maxv;
4312
4313 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4314 for the type in question, as we rely on this here. */
4315 utype = unsigned_type_for (etype);
4316 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4317 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4318 build_int_cst (TREE_TYPE (maxv), 1), 1);
4319 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4320
4321 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4322 minv, 1, maxv, 1)))
4323 etype = utype;
4324 else
4325 return 0;
4326 }
4327
4328 high = fold_convert_loc (loc, etype, high);
4329 low = fold_convert_loc (loc, etype, low);
4330 exp = fold_convert_loc (loc, etype, exp);
4331
4332 value = const_binop (MINUS_EXPR, high, low);
4333
4334
4335 if (POINTER_TYPE_P (etype))
4336 {
4337 if (value != 0 && !TREE_OVERFLOW (value))
4338 {
4339 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4340 return build_range_check (loc, type,
4341 fold_build_pointer_plus_loc (loc, exp, low),
4342 1, build_int_cst (etype, 0), value);
4343 }
4344 return 0;
4345 }
4346
4347 if (value != 0 && !TREE_OVERFLOW (value))
4348 return build_range_check (loc, type,
4349 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4350 1, build_int_cst (etype, 0), value);
4351
4352 return 0;
4353 }
4354 \f
4355 /* Return the predecessor of VAL in its type, handling the infinite case. */
4356
4357 static tree
4358 range_predecessor (tree val)
4359 {
4360 tree type = TREE_TYPE (val);
4361
4362 if (INTEGRAL_TYPE_P (type)
4363 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4364 return 0;
4365 else
4366 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4367 build_int_cst (TREE_TYPE (val), 1), 0);
4368 }
4369
4370 /* Return the successor of VAL in its type, handling the infinite case. */
4371
4372 static tree
4373 range_successor (tree val)
4374 {
4375 tree type = TREE_TYPE (val);
4376
4377 if (INTEGRAL_TYPE_P (type)
4378 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4379 return 0;
4380 else
4381 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4382 build_int_cst (TREE_TYPE (val), 1), 0);
4383 }
4384
4385 /* Given two ranges, see if we can merge them into one. Return 1 if we
4386 can, 0 if we can't. Set the output range into the specified parameters. */
4387
4388 bool
4389 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4390 tree high0, int in1_p, tree low1, tree high1)
4391 {
4392 int no_overlap;
4393 int subset;
4394 int temp;
4395 tree tem;
4396 int in_p;
4397 tree low, high;
4398 int lowequal = ((low0 == 0 && low1 == 0)
4399 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400 low0, 0, low1, 0)));
4401 int highequal = ((high0 == 0 && high1 == 0)
4402 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4403 high0, 1, high1, 1)));
4404
4405 /* Make range 0 be the range that starts first, or ends last if they
4406 start at the same value. Swap them if it isn't. */
4407 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4408 low0, 0, low1, 0))
4409 || (lowequal
4410 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4411 high1, 1, high0, 1))))
4412 {
4413 temp = in0_p, in0_p = in1_p, in1_p = temp;
4414 tem = low0, low0 = low1, low1 = tem;
4415 tem = high0, high0 = high1, high1 = tem;
4416 }
4417
4418 /* Now flag two cases, whether the ranges are disjoint or whether the
4419 second range is totally subsumed in the first. Note that the tests
4420 below are simplified by the ones above. */
4421 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4422 high0, 1, low1, 0));
4423 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4424 high1, 1, high0, 1));
4425
4426 /* We now have four cases, depending on whether we are including or
4427 excluding the two ranges. */
4428 if (in0_p && in1_p)
4429 {
4430 /* If they don't overlap, the result is false. If the second range
4431 is a subset it is the result. Otherwise, the range is from the start
4432 of the second to the end of the first. */
4433 if (no_overlap)
4434 in_p = 0, low = high = 0;
4435 else if (subset)
4436 in_p = 1, low = low1, high = high1;
4437 else
4438 in_p = 1, low = low1, high = high0;
4439 }
4440
4441 else if (in0_p && ! in1_p)
4442 {
4443 /* If they don't overlap, the result is the first range. If they are
4444 equal, the result is false. If the second range is a subset of the
4445 first, and the ranges begin at the same place, we go from just after
4446 the end of the second range to the end of the first. If the second
4447 range is not a subset of the first, or if it is a subset and both
4448 ranges end at the same place, the range starts at the start of the
4449 first range and ends just before the second range.
4450 Otherwise, we can't describe this as a single range. */
4451 if (no_overlap)
4452 in_p = 1, low = low0, high = high0;
4453 else if (lowequal && highequal)
4454 in_p = 0, low = high = 0;
4455 else if (subset && lowequal)
4456 {
4457 low = range_successor (high1);
4458 high = high0;
4459 in_p = 1;
4460 if (low == 0)
4461 {
4462 /* We are in the weird situation where high0 > high1 but
4463 high1 has no successor. Punt. */
4464 return 0;
4465 }
4466 }
4467 else if (! subset || highequal)
4468 {
4469 low = low0;
4470 high = range_predecessor (low1);
4471 in_p = 1;
4472 if (high == 0)
4473 {
4474 /* low0 < low1 but low1 has no predecessor. Punt. */
4475 return 0;
4476 }
4477 }
4478 else
4479 return 0;
4480 }
4481
4482 else if (! in0_p && in1_p)
4483 {
4484 /* If they don't overlap, the result is the second range. If the second
4485 is a subset of the first, the result is false. Otherwise,
4486 the range starts just after the first range and ends at the
4487 end of the second. */
4488 if (no_overlap)
4489 in_p = 1, low = low1, high = high1;
4490 else if (subset || highequal)
4491 in_p = 0, low = high = 0;
4492 else
4493 {
4494 low = range_successor (high0);
4495 high = high1;
4496 in_p = 1;
4497 if (low == 0)
4498 {
4499 /* high1 > high0 but high0 has no successor. Punt. */
4500 return 0;
4501 }
4502 }
4503 }
4504
4505 else
4506 {
4507 /* The case where we are excluding both ranges. Here the complex case
4508 is if they don't overlap. In that case, the only time we have a
4509 range is if they are adjacent. If the second is a subset of the
4510 first, the result is the first. Otherwise, the range to exclude
4511 starts at the beginning of the first range and ends at the end of the
4512 second. */
4513 if (no_overlap)
4514 {
4515 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4516 range_successor (high0),
4517 1, low1, 0)))
4518 in_p = 0, low = low0, high = high1;
4519 else
4520 {
4521 /* Canonicalize - [min, x] into - [-, x]. */
4522 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4523 switch (TREE_CODE (TREE_TYPE (low0)))
4524 {
4525 case ENUMERAL_TYPE:
4526 if (TYPE_PRECISION (TREE_TYPE (low0))
4527 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4528 break;
4529 /* FALLTHROUGH */
4530 case INTEGER_TYPE:
4531 if (tree_int_cst_equal (low0,
4532 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4533 low0 = 0;
4534 break;
4535 case POINTER_TYPE:
4536 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4537 && integer_zerop (low0))
4538 low0 = 0;
4539 break;
4540 default:
4541 break;
4542 }
4543
4544 /* Canonicalize - [x, max] into - [x, -]. */
4545 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4546 switch (TREE_CODE (TREE_TYPE (high1)))
4547 {
4548 case ENUMERAL_TYPE:
4549 if (TYPE_PRECISION (TREE_TYPE (high1))
4550 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4551 break;
4552 /* FALLTHROUGH */
4553 case INTEGER_TYPE:
4554 if (tree_int_cst_equal (high1,
4555 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4556 high1 = 0;
4557 break;
4558 case POINTER_TYPE:
4559 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4560 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4561 high1, 1,
4562 build_int_cst (TREE_TYPE (high1), 1),
4563 1)))
4564 high1 = 0;
4565 break;
4566 default:
4567 break;
4568 }
4569
4570 /* The ranges might be also adjacent between the maximum and
4571 minimum values of the given type. For
4572 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4573 return + [x + 1, y - 1]. */
4574 if (low0 == 0 && high1 == 0)
4575 {
4576 low = range_successor (high0);
4577 high = range_predecessor (low1);
4578 if (low == 0 || high == 0)
4579 return 0;
4580
4581 in_p = 1;
4582 }
4583 else
4584 return 0;
4585 }
4586 }
4587 else if (subset)
4588 in_p = 0, low = low0, high = high0;
4589 else
4590 in_p = 0, low = low0, high = high1;
4591 }
4592
4593 *pin_p = in_p, *plow = low, *phigh = high;
4594 return 1;
4595 }
4596 \f
4597
4598 /* Subroutine of fold, looking inside expressions of the form
4599 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4600 of the COND_EXPR. This function is being used also to optimize
4601 A op B ? C : A, by reversing the comparison first.
4602
4603 Return a folded expression whose code is not a COND_EXPR
4604 anymore, or NULL_TREE if no folding opportunity is found. */
4605
4606 static tree
4607 fold_cond_expr_with_comparison (location_t loc, tree type,
4608 tree arg0, tree arg1, tree arg2)
4609 {
4610 enum tree_code comp_code = TREE_CODE (arg0);
4611 tree arg00 = TREE_OPERAND (arg0, 0);
4612 tree arg01 = TREE_OPERAND (arg0, 1);
4613 tree arg1_type = TREE_TYPE (arg1);
4614 tree tem;
4615
4616 STRIP_NOPS (arg1);
4617 STRIP_NOPS (arg2);
4618
4619 /* If we have A op 0 ? A : -A, consider applying the following
4620 transformations:
4621
4622 A == 0? A : -A same as -A
4623 A != 0? A : -A same as A
4624 A >= 0? A : -A same as abs (A)
4625 A > 0? A : -A same as abs (A)
4626 A <= 0? A : -A same as -abs (A)
4627 A < 0? A : -A same as -abs (A)
4628
4629 None of these transformations work for modes with signed
4630 zeros. If A is +/-0, the first two transformations will
4631 change the sign of the result (from +0 to -0, or vice
4632 versa). The last four will fix the sign of the result,
4633 even though the original expressions could be positive or
4634 negative, depending on the sign of A.
4635
4636 Note that all these transformations are correct if A is
4637 NaN, since the two alternatives (A and -A) are also NaNs. */
4638 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4639 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4640 ? real_zerop (arg01)
4641 : integer_zerop (arg01))
4642 && ((TREE_CODE (arg2) == NEGATE_EXPR
4643 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4644 /* In the case that A is of the form X-Y, '-A' (arg2) may
4645 have already been folded to Y-X, check for that. */
4646 || (TREE_CODE (arg1) == MINUS_EXPR
4647 && TREE_CODE (arg2) == MINUS_EXPR
4648 && operand_equal_p (TREE_OPERAND (arg1, 0),
4649 TREE_OPERAND (arg2, 1), 0)
4650 && operand_equal_p (TREE_OPERAND (arg1, 1),
4651 TREE_OPERAND (arg2, 0), 0))))
4652 switch (comp_code)
4653 {
4654 case EQ_EXPR:
4655 case UNEQ_EXPR:
4656 tem = fold_convert_loc (loc, arg1_type, arg1);
4657 return pedantic_non_lvalue_loc (loc,
4658 fold_convert_loc (loc, type,
4659 negate_expr (tem)));
4660 case NE_EXPR:
4661 case LTGT_EXPR:
4662 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4663 case UNGE_EXPR:
4664 case UNGT_EXPR:
4665 if (flag_trapping_math)
4666 break;
4667 /* Fall through. */
4668 case GE_EXPR:
4669 case GT_EXPR:
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 arg1 = fold_convert_loc (loc, signed_type_for
4672 (TREE_TYPE (arg1)), arg1);
4673 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4675 case UNLE_EXPR:
4676 case UNLT_EXPR:
4677 if (flag_trapping_math)
4678 break;
4679 case LE_EXPR:
4680 case LT_EXPR:
4681 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4682 arg1 = fold_convert_loc (loc, signed_type_for
4683 (TREE_TYPE (arg1)), arg1);
4684 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4685 return negate_expr (fold_convert_loc (loc, type, tem));
4686 default:
4687 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4688 break;
4689 }
4690
4691 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4692 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4693 both transformations are correct when A is NaN: A != 0
4694 is then true, and A == 0 is false. */
4695
4696 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4697 && integer_zerop (arg01) && integer_zerop (arg2))
4698 {
4699 if (comp_code == NE_EXPR)
4700 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4701 else if (comp_code == EQ_EXPR)
4702 return build_zero_cst (type);
4703 }
4704
4705 /* Try some transformations of A op B ? A : B.
4706
4707 A == B? A : B same as B
4708 A != B? A : B same as A
4709 A >= B? A : B same as max (A, B)
4710 A > B? A : B same as max (B, A)
4711 A <= B? A : B same as min (A, B)
4712 A < B? A : B same as min (B, A)
4713
4714 As above, these transformations don't work in the presence
4715 of signed zeros. For example, if A and B are zeros of
4716 opposite sign, the first two transformations will change
4717 the sign of the result. In the last four, the original
4718 expressions give different results for (A=+0, B=-0) and
4719 (A=-0, B=+0), but the transformed expressions do not.
4720
4721 The first two transformations are correct if either A or B
4722 is a NaN. In the first transformation, the condition will
4723 be false, and B will indeed be chosen. In the case of the
4724 second transformation, the condition A != B will be true,
4725 and A will be chosen.
4726
4727 The conversions to max() and min() are not correct if B is
4728 a number and A is not. The conditions in the original
4729 expressions will be false, so all four give B. The min()
4730 and max() versions would give a NaN instead. */
4731 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4732 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4733 /* Avoid these transformations if the COND_EXPR may be used
4734 as an lvalue in the C++ front-end. PR c++/19199. */
4735 && (in_gimple_form
4736 || VECTOR_TYPE_P (type)
4737 || (strcmp (lang_hooks.name, "GNU C++") != 0
4738 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4739 || ! maybe_lvalue_p (arg1)
4740 || ! maybe_lvalue_p (arg2)))
4741 {
4742 tree comp_op0 = arg00;
4743 tree comp_op1 = arg01;
4744 tree comp_type = TREE_TYPE (comp_op0);
4745
4746 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4747 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4748 {
4749 comp_type = type;
4750 comp_op0 = arg1;
4751 comp_op1 = arg2;
4752 }
4753
4754 switch (comp_code)
4755 {
4756 case EQ_EXPR:
4757 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4758 case NE_EXPR:
4759 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4760 case LE_EXPR:
4761 case LT_EXPR:
4762 case UNLE_EXPR:
4763 case UNLT_EXPR:
4764 /* In C++ a ?: expression can be an lvalue, so put the
4765 operand which will be used if they are equal first
4766 so that we can convert this back to the
4767 corresponding COND_EXPR. */
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769 {
4770 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4771 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4772 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4773 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4774 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4775 comp_op1, comp_op0);
4776 return pedantic_non_lvalue_loc (loc,
4777 fold_convert_loc (loc, type, tem));
4778 }
4779 break;
4780 case GE_EXPR:
4781 case GT_EXPR:
4782 case UNGE_EXPR:
4783 case UNGT_EXPR:
4784 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4785 {
4786 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4787 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4788 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4789 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4790 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4791 comp_op1, comp_op0);
4792 return pedantic_non_lvalue_loc (loc,
4793 fold_convert_loc (loc, type, tem));
4794 }
4795 break;
4796 case UNEQ_EXPR:
4797 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4798 return pedantic_non_lvalue_loc (loc,
4799 fold_convert_loc (loc, type, arg2));
4800 break;
4801 case LTGT_EXPR:
4802 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4803 return pedantic_non_lvalue_loc (loc,
4804 fold_convert_loc (loc, type, arg1));
4805 break;
4806 default:
4807 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4808 break;
4809 }
4810 }
4811
4812 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4813 we might still be able to simplify this. For example,
4814 if C1 is one less or one more than C2, this might have started
4815 out as a MIN or MAX and been transformed by this function.
4816 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4817
4818 if (INTEGRAL_TYPE_P (type)
4819 && TREE_CODE (arg01) == INTEGER_CST
4820 && TREE_CODE (arg2) == INTEGER_CST)
4821 switch (comp_code)
4822 {
4823 case EQ_EXPR:
4824 if (TREE_CODE (arg1) == INTEGER_CST)
4825 break;
4826 /* We can replace A with C1 in this case. */
4827 arg1 = fold_convert_loc (loc, type, arg01);
4828 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4829
4830 case LT_EXPR:
4831 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4832 MIN_EXPR, to preserve the signedness of the comparison. */
4833 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4834 OEP_ONLY_CONST)
4835 && operand_equal_p (arg01,
4836 const_binop (PLUS_EXPR, arg2,
4837 build_int_cst (type, 1)),
4838 OEP_ONLY_CONST))
4839 {
4840 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4841 fold_convert_loc (loc, TREE_TYPE (arg00),
4842 arg2));
4843 return pedantic_non_lvalue_loc (loc,
4844 fold_convert_loc (loc, type, tem));
4845 }
4846 break;
4847
4848 case LE_EXPR:
4849 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4850 as above. */
4851 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4852 OEP_ONLY_CONST)
4853 && operand_equal_p (arg01,
4854 const_binop (MINUS_EXPR, arg2,
4855 build_int_cst (type, 1)),
4856 OEP_ONLY_CONST))
4857 {
4858 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4859 fold_convert_loc (loc, TREE_TYPE (arg00),
4860 arg2));
4861 return pedantic_non_lvalue_loc (loc,
4862 fold_convert_loc (loc, type, tem));
4863 }
4864 break;
4865
4866 case GT_EXPR:
4867 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4868 MAX_EXPR, to preserve the signedness of the comparison. */
4869 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4870 OEP_ONLY_CONST)
4871 && operand_equal_p (arg01,
4872 const_binop (MINUS_EXPR, arg2,
4873 build_int_cst (type, 1)),
4874 OEP_ONLY_CONST))
4875 {
4876 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4877 fold_convert_loc (loc, TREE_TYPE (arg00),
4878 arg2));
4879 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4880 }
4881 break;
4882
4883 case GE_EXPR:
4884 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4885 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4886 OEP_ONLY_CONST)
4887 && operand_equal_p (arg01,
4888 const_binop (PLUS_EXPR, arg2,
4889 build_int_cst (type, 1)),
4890 OEP_ONLY_CONST))
4891 {
4892 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4893 fold_convert_loc (loc, TREE_TYPE (arg00),
4894 arg2));
4895 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4896 }
4897 break;
4898 case NE_EXPR:
4899 break;
4900 default:
4901 gcc_unreachable ();
4902 }
4903
4904 return NULL_TREE;
4905 }
4906
4907
4908 \f
4909 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4910 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4911 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4912 false) >= 2)
4913 #endif
4914
4915 /* EXP is some logical combination of boolean tests. See if we can
4916 merge it into some range test. Return the new tree if so. */
4917
4918 static tree
4919 fold_range_test (location_t loc, enum tree_code code, tree type,
4920 tree op0, tree op1)
4921 {
4922 int or_op = (code == TRUTH_ORIF_EXPR
4923 || code == TRUTH_OR_EXPR);
4924 int in0_p, in1_p, in_p;
4925 tree low0, low1, low, high0, high1, high;
4926 bool strict_overflow_p = false;
4927 tree tem, lhs, rhs;
4928 const char * const warnmsg = G_("assuming signed overflow does not occur "
4929 "when simplifying range test");
4930
4931 if (!INTEGRAL_TYPE_P (type))
4932 return 0;
4933
4934 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4935 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4936
4937 /* If this is an OR operation, invert both sides; we will invert
4938 again at the end. */
4939 if (or_op)
4940 in0_p = ! in0_p, in1_p = ! in1_p;
4941
4942 /* If both expressions are the same, if we can merge the ranges, and we
4943 can build the range test, return it or it inverted. If one of the
4944 ranges is always true or always false, consider it to be the same
4945 expression as the other. */
4946 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4947 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4948 in1_p, low1, high1)
4949 && 0 != (tem = (build_range_check (loc, type,
4950 lhs != 0 ? lhs
4951 : rhs != 0 ? rhs : integer_zero_node,
4952 in_p, low, high))))
4953 {
4954 if (strict_overflow_p)
4955 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4956 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4957 }
4958
4959 /* On machines where the branch cost is expensive, if this is a
4960 short-circuited branch and the underlying object on both sides
4961 is the same, make a non-short-circuit operation. */
4962 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4963 && lhs != 0 && rhs != 0
4964 && (code == TRUTH_ANDIF_EXPR
4965 || code == TRUTH_ORIF_EXPR)
4966 && operand_equal_p (lhs, rhs, 0))
4967 {
4968 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4969 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4970 which cases we can't do this. */
4971 if (simple_operand_p (lhs))
4972 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4973 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4974 type, op0, op1);
4975
4976 else if (!lang_hooks.decls.global_bindings_p ()
4977 && !CONTAINS_PLACEHOLDER_P (lhs))
4978 {
4979 tree common = save_expr (lhs);
4980
4981 if (0 != (lhs = build_range_check (loc, type, common,
4982 or_op ? ! in0_p : in0_p,
4983 low0, high0))
4984 && (0 != (rhs = build_range_check (loc, type, common,
4985 or_op ? ! in1_p : in1_p,
4986 low1, high1))))
4987 {
4988 if (strict_overflow_p)
4989 fold_overflow_warning (warnmsg,
4990 WARN_STRICT_OVERFLOW_COMPARISON);
4991 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4992 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4993 type, lhs, rhs);
4994 }
4995 }
4996 }
4997
4998 return 0;
4999 }
5000 \f
5001 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5002 bit value. Arrange things so the extra bits will be set to zero if and
5003 only if C is signed-extended to its full width. If MASK is nonzero,
5004 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5005
5006 static tree
5007 unextend (tree c, int p, int unsignedp, tree mask)
5008 {
5009 tree type = TREE_TYPE (c);
5010 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5011 tree temp;
5012
5013 if (p == modesize || unsignedp)
5014 return c;
5015
5016 /* We work by getting just the sign bit into the low-order bit, then
5017 into the high-order bit, then sign-extend. We then XOR that value
5018 with C. */
5019 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5020
5021 /* We must use a signed type in order to get an arithmetic right shift.
5022 However, we must also avoid introducing accidental overflows, so that
5023 a subsequent call to integer_zerop will work. Hence we must
5024 do the type conversion here. At this point, the constant is either
5025 zero or one, and the conversion to a signed type can never overflow.
5026 We could get an overflow if this conversion is done anywhere else. */
5027 if (TYPE_UNSIGNED (type))
5028 temp = fold_convert (signed_type_for (type), temp);
5029
5030 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5031 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5032 if (mask != 0)
5033 temp = const_binop (BIT_AND_EXPR, temp,
5034 fold_convert (TREE_TYPE (c), mask));
5035 /* If necessary, convert the type back to match the type of C. */
5036 if (TYPE_UNSIGNED (type))
5037 temp = fold_convert (type, temp);
5038
5039 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5040 }
5041 \f
5042 /* For an expression that has the form
5043 (A && B) || ~B
5044 or
5045 (A || B) && ~B,
5046 we can drop one of the inner expressions and simplify to
5047 A || ~B
5048 or
5049 A && ~B
5050 LOC is the location of the resulting expression. OP is the inner
5051 logical operation; the left-hand side in the examples above, while CMPOP
5052 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5053 removing a condition that guards another, as in
5054 (A != NULL && A->...) || A == NULL
5055 which we must not transform. If RHS_ONLY is true, only eliminate the
5056 right-most operand of the inner logical operation. */
5057
5058 static tree
5059 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5060 bool rhs_only)
5061 {
5062 tree type = TREE_TYPE (cmpop);
5063 enum tree_code code = TREE_CODE (cmpop);
5064 enum tree_code truthop_code = TREE_CODE (op);
5065 tree lhs = TREE_OPERAND (op, 0);
5066 tree rhs = TREE_OPERAND (op, 1);
5067 tree orig_lhs = lhs, orig_rhs = rhs;
5068 enum tree_code rhs_code = TREE_CODE (rhs);
5069 enum tree_code lhs_code = TREE_CODE (lhs);
5070 enum tree_code inv_code;
5071
5072 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5073 return NULL_TREE;
5074
5075 if (TREE_CODE_CLASS (code) != tcc_comparison)
5076 return NULL_TREE;
5077
5078 if (rhs_code == truthop_code)
5079 {
5080 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5081 if (newrhs != NULL_TREE)
5082 {
5083 rhs = newrhs;
5084 rhs_code = TREE_CODE (rhs);
5085 }
5086 }
5087 if (lhs_code == truthop_code && !rhs_only)
5088 {
5089 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5090 if (newlhs != NULL_TREE)
5091 {
5092 lhs = newlhs;
5093 lhs_code = TREE_CODE (lhs);
5094 }
5095 }
5096
5097 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5098 if (inv_code == rhs_code
5099 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5100 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5101 return lhs;
5102 if (!rhs_only && inv_code == lhs_code
5103 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5104 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5105 return rhs;
5106 if (rhs != orig_rhs || lhs != orig_lhs)
5107 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5108 lhs, rhs);
5109 return NULL_TREE;
5110 }
5111
5112 /* Find ways of folding logical expressions of LHS and RHS:
5113 Try to merge two comparisons to the same innermost item.
5114 Look for range tests like "ch >= '0' && ch <= '9'".
5115 Look for combinations of simple terms on machines with expensive branches
5116 and evaluate the RHS unconditionally.
5117
5118 For example, if we have p->a == 2 && p->b == 4 and we can make an
5119 object large enough to span both A and B, we can do this with a comparison
5120 against the object ANDed with the a mask.
5121
5122 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5123 operations to do this with one comparison.
5124
5125 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5126 function and the one above.
5127
5128 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5129 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5130
5131 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5132 two operands.
5133
5134 We return the simplified tree or 0 if no optimization is possible. */
5135
5136 static tree
5137 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5138 tree lhs, tree rhs)
5139 {
5140 /* If this is the "or" of two comparisons, we can do something if
5141 the comparisons are NE_EXPR. If this is the "and", we can do something
5142 if the comparisons are EQ_EXPR. I.e.,
5143 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5144
5145 WANTED_CODE is this operation code. For single bit fields, we can
5146 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5147 comparison for one-bit fields. */
5148
5149 enum tree_code wanted_code;
5150 enum tree_code lcode, rcode;
5151 tree ll_arg, lr_arg, rl_arg, rr_arg;
5152 tree ll_inner, lr_inner, rl_inner, rr_inner;
5153 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5154 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5155 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5156 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5157 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5158 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5159 machine_mode lnmode, rnmode;
5160 tree ll_mask, lr_mask, rl_mask, rr_mask;
5161 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5162 tree l_const, r_const;
5163 tree lntype, rntype, result;
5164 HOST_WIDE_INT first_bit, end_bit;
5165 int volatilep;
5166
5167 /* Start by getting the comparison codes. Fail if anything is volatile.
5168 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5169 it were surrounded with a NE_EXPR. */
5170
5171 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5172 return 0;
5173
5174 lcode = TREE_CODE (lhs);
5175 rcode = TREE_CODE (rhs);
5176
5177 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5178 {
5179 lhs = build2 (NE_EXPR, truth_type, lhs,
5180 build_int_cst (TREE_TYPE (lhs), 0));
5181 lcode = NE_EXPR;
5182 }
5183
5184 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5185 {
5186 rhs = build2 (NE_EXPR, truth_type, rhs,
5187 build_int_cst (TREE_TYPE (rhs), 0));
5188 rcode = NE_EXPR;
5189 }
5190
5191 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5192 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5193 return 0;
5194
5195 ll_arg = TREE_OPERAND (lhs, 0);
5196 lr_arg = TREE_OPERAND (lhs, 1);
5197 rl_arg = TREE_OPERAND (rhs, 0);
5198 rr_arg = TREE_OPERAND (rhs, 1);
5199
5200 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5201 if (simple_operand_p (ll_arg)
5202 && simple_operand_p (lr_arg))
5203 {
5204 if (operand_equal_p (ll_arg, rl_arg, 0)
5205 && operand_equal_p (lr_arg, rr_arg, 0))
5206 {
5207 result = combine_comparisons (loc, code, lcode, rcode,
5208 truth_type, ll_arg, lr_arg);
5209 if (result)
5210 return result;
5211 }
5212 else if (operand_equal_p (ll_arg, rr_arg, 0)
5213 && operand_equal_p (lr_arg, rl_arg, 0))
5214 {
5215 result = combine_comparisons (loc, code, lcode,
5216 swap_tree_comparison (rcode),
5217 truth_type, ll_arg, lr_arg);
5218 if (result)
5219 return result;
5220 }
5221 }
5222
5223 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5224 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5225
5226 /* If the RHS can be evaluated unconditionally and its operands are
5227 simple, it wins to evaluate the RHS unconditionally on machines
5228 with expensive branches. In this case, this isn't a comparison
5229 that can be merged. */
5230
5231 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5232 false) >= 2
5233 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5234 && simple_operand_p (rl_arg)
5235 && simple_operand_p (rr_arg))
5236 {
5237 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5238 if (code == TRUTH_OR_EXPR
5239 && lcode == NE_EXPR && integer_zerop (lr_arg)
5240 && rcode == NE_EXPR && integer_zerop (rr_arg)
5241 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5243 return build2_loc (loc, NE_EXPR, truth_type,
5244 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5245 ll_arg, rl_arg),
5246 build_int_cst (TREE_TYPE (ll_arg), 0));
5247
5248 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5249 if (code == TRUTH_AND_EXPR
5250 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5251 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5252 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5253 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5254 return build2_loc (loc, EQ_EXPR, truth_type,
5255 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5256 ll_arg, rl_arg),
5257 build_int_cst (TREE_TYPE (ll_arg), 0));
5258 }
5259
5260 /* See if the comparisons can be merged. Then get all the parameters for
5261 each side. */
5262
5263 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5264 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5265 return 0;
5266
5267 volatilep = 0;
5268 ll_inner = decode_field_reference (loc, ll_arg,
5269 &ll_bitsize, &ll_bitpos, &ll_mode,
5270 &ll_unsignedp, &volatilep, &ll_mask,
5271 &ll_and_mask);
5272 lr_inner = decode_field_reference (loc, lr_arg,
5273 &lr_bitsize, &lr_bitpos, &lr_mode,
5274 &lr_unsignedp, &volatilep, &lr_mask,
5275 &lr_and_mask);
5276 rl_inner = decode_field_reference (loc, rl_arg,
5277 &rl_bitsize, &rl_bitpos, &rl_mode,
5278 &rl_unsignedp, &volatilep, &rl_mask,
5279 &rl_and_mask);
5280 rr_inner = decode_field_reference (loc, rr_arg,
5281 &rr_bitsize, &rr_bitpos, &rr_mode,
5282 &rr_unsignedp, &volatilep, &rr_mask,
5283 &rr_and_mask);
5284
5285 /* It must be true that the inner operation on the lhs of each
5286 comparison must be the same if we are to be able to do anything.
5287 Then see if we have constants. If not, the same must be true for
5288 the rhs's. */
5289 if (volatilep || ll_inner == 0 || rl_inner == 0
5290 || ! operand_equal_p (ll_inner, rl_inner, 0))
5291 return 0;
5292
5293 if (TREE_CODE (lr_arg) == INTEGER_CST
5294 && TREE_CODE (rr_arg) == INTEGER_CST)
5295 l_const = lr_arg, r_const = rr_arg;
5296 else if (lr_inner == 0 || rr_inner == 0
5297 || ! operand_equal_p (lr_inner, rr_inner, 0))
5298 return 0;
5299 else
5300 l_const = r_const = 0;
5301
5302 /* If either comparison code is not correct for our logical operation,
5303 fail. However, we can convert a one-bit comparison against zero into
5304 the opposite comparison against that bit being set in the field. */
5305
5306 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5307 if (lcode != wanted_code)
5308 {
5309 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5310 {
5311 /* Make the left operand unsigned, since we are only interested
5312 in the value of one bit. Otherwise we are doing the wrong
5313 thing below. */
5314 ll_unsignedp = 1;
5315 l_const = ll_mask;
5316 }
5317 else
5318 return 0;
5319 }
5320
5321 /* This is analogous to the code for l_const above. */
5322 if (rcode != wanted_code)
5323 {
5324 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5325 {
5326 rl_unsignedp = 1;
5327 r_const = rl_mask;
5328 }
5329 else
5330 return 0;
5331 }
5332
5333 /* See if we can find a mode that contains both fields being compared on
5334 the left. If we can't, fail. Otherwise, update all constants and masks
5335 to be relative to a field of that size. */
5336 first_bit = MIN (ll_bitpos, rl_bitpos);
5337 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5338 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5339 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5340 volatilep);
5341 if (lnmode == VOIDmode)
5342 return 0;
5343
5344 lnbitsize = GET_MODE_BITSIZE (lnmode);
5345 lnbitpos = first_bit & ~ (lnbitsize - 1);
5346 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5347 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5348
5349 if (BYTES_BIG_ENDIAN)
5350 {
5351 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5352 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5353 }
5354
5355 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5356 size_int (xll_bitpos));
5357 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5358 size_int (xrl_bitpos));
5359
5360 if (l_const)
5361 {
5362 l_const = fold_convert_loc (loc, lntype, l_const);
5363 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5364 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5365 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5366 fold_build1_loc (loc, BIT_NOT_EXPR,
5367 lntype, ll_mask))))
5368 {
5369 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5370
5371 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5372 }
5373 }
5374 if (r_const)
5375 {
5376 r_const = fold_convert_loc (loc, lntype, r_const);
5377 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5378 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5379 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5380 fold_build1_loc (loc, BIT_NOT_EXPR,
5381 lntype, rl_mask))))
5382 {
5383 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5384
5385 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5386 }
5387 }
5388
5389 /* If the right sides are not constant, do the same for it. Also,
5390 disallow this optimization if a size or signedness mismatch occurs
5391 between the left and right sides. */
5392 if (l_const == 0)
5393 {
5394 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5395 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5396 /* Make sure the two fields on the right
5397 correspond to the left without being swapped. */
5398 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5399 return 0;
5400
5401 first_bit = MIN (lr_bitpos, rr_bitpos);
5402 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5403 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5404 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5405 volatilep);
5406 if (rnmode == VOIDmode)
5407 return 0;
5408
5409 rnbitsize = GET_MODE_BITSIZE (rnmode);
5410 rnbitpos = first_bit & ~ (rnbitsize - 1);
5411 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5412 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5413
5414 if (BYTES_BIG_ENDIAN)
5415 {
5416 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5417 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5418 }
5419
5420 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5421 rntype, lr_mask),
5422 size_int (xlr_bitpos));
5423 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5424 rntype, rr_mask),
5425 size_int (xrr_bitpos));
5426
5427 /* Make a mask that corresponds to both fields being compared.
5428 Do this for both items being compared. If the operands are the
5429 same size and the bits being compared are in the same position
5430 then we can do this by masking both and comparing the masked
5431 results. */
5432 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5433 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5434 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5435 {
5436 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5437 ll_unsignedp || rl_unsignedp);
5438 if (! all_ones_mask_p (ll_mask, lnbitsize))
5439 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5440
5441 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5442 lr_unsignedp || rr_unsignedp);
5443 if (! all_ones_mask_p (lr_mask, rnbitsize))
5444 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5445
5446 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5447 }
5448
5449 /* There is still another way we can do something: If both pairs of
5450 fields being compared are adjacent, we may be able to make a wider
5451 field containing them both.
5452
5453 Note that we still must mask the lhs/rhs expressions. Furthermore,
5454 the mask must be shifted to account for the shift done by
5455 make_bit_field_ref. */
5456 if ((ll_bitsize + ll_bitpos == rl_bitpos
5457 && lr_bitsize + lr_bitpos == rr_bitpos)
5458 || (ll_bitpos == rl_bitpos + rl_bitsize
5459 && lr_bitpos == rr_bitpos + rr_bitsize))
5460 {
5461 tree type;
5462
5463 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5464 ll_bitsize + rl_bitsize,
5465 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5466 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5467 lr_bitsize + rr_bitsize,
5468 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5469
5470 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5471 size_int (MIN (xll_bitpos, xrl_bitpos)));
5472 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5473 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5474
5475 /* Convert to the smaller type before masking out unwanted bits. */
5476 type = lntype;
5477 if (lntype != rntype)
5478 {
5479 if (lnbitsize > rnbitsize)
5480 {
5481 lhs = fold_convert_loc (loc, rntype, lhs);
5482 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5483 type = rntype;
5484 }
5485 else if (lnbitsize < rnbitsize)
5486 {
5487 rhs = fold_convert_loc (loc, lntype, rhs);
5488 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5489 type = lntype;
5490 }
5491 }
5492
5493 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5494 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5495
5496 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5497 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5498
5499 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5500 }
5501
5502 return 0;
5503 }
5504
5505 /* Handle the case of comparisons with constants. If there is something in
5506 common between the masks, those bits of the constants must be the same.
5507 If not, the condition is always false. Test for this to avoid generating
5508 incorrect code below. */
5509 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5510 if (! integer_zerop (result)
5511 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5512 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5513 {
5514 if (wanted_code == NE_EXPR)
5515 {
5516 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5517 return constant_boolean_node (true, truth_type);
5518 }
5519 else
5520 {
5521 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5522 return constant_boolean_node (false, truth_type);
5523 }
5524 }
5525
5526 /* Construct the expression we will return. First get the component
5527 reference we will make. Unless the mask is all ones the width of
5528 that field, perform the mask operation. Then compare with the
5529 merged constant. */
5530 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5531 ll_unsignedp || rl_unsignedp);
5532
5533 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5534 if (! all_ones_mask_p (ll_mask, lnbitsize))
5535 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5536
5537 return build2_loc (loc, wanted_code, truth_type, result,
5538 const_binop (BIT_IOR_EXPR, l_const, r_const));
5539 }
5540 \f
5541 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5542 constant. */
5543
5544 static tree
5545 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5546 tree op0, tree op1)
5547 {
5548 tree arg0 = op0;
5549 enum tree_code op_code;
5550 tree comp_const;
5551 tree minmax_const;
5552 int consts_equal, consts_lt;
5553 tree inner;
5554
5555 STRIP_SIGN_NOPS (arg0);
5556
5557 op_code = TREE_CODE (arg0);
5558 minmax_const = TREE_OPERAND (arg0, 1);
5559 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5560 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5561 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5562 inner = TREE_OPERAND (arg0, 0);
5563
5564 /* If something does not permit us to optimize, return the original tree. */
5565 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5566 || TREE_CODE (comp_const) != INTEGER_CST
5567 || TREE_OVERFLOW (comp_const)
5568 || TREE_CODE (minmax_const) != INTEGER_CST
5569 || TREE_OVERFLOW (minmax_const))
5570 return NULL_TREE;
5571
5572 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5573 and GT_EXPR, doing the rest with recursive calls using logical
5574 simplifications. */
5575 switch (code)
5576 {
5577 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5578 {
5579 tree tem
5580 = optimize_minmax_comparison (loc,
5581 invert_tree_comparison (code, false),
5582 type, op0, op1);
5583 if (tem)
5584 return invert_truthvalue_loc (loc, tem);
5585 return NULL_TREE;
5586 }
5587
5588 case GE_EXPR:
5589 return
5590 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5591 optimize_minmax_comparison
5592 (loc, EQ_EXPR, type, arg0, comp_const),
5593 optimize_minmax_comparison
5594 (loc, GT_EXPR, type, arg0, comp_const));
5595
5596 case EQ_EXPR:
5597 if (op_code == MAX_EXPR && consts_equal)
5598 /* MAX (X, 0) == 0 -> X <= 0 */
5599 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5600
5601 else if (op_code == MAX_EXPR && consts_lt)
5602 /* MAX (X, 0) == 5 -> X == 5 */
5603 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5604
5605 else if (op_code == MAX_EXPR)
5606 /* MAX (X, 0) == -1 -> false */
5607 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5608
5609 else if (consts_equal)
5610 /* MIN (X, 0) == 0 -> X >= 0 */
5611 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5612
5613 else if (consts_lt)
5614 /* MIN (X, 0) == 5 -> false */
5615 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5616
5617 else
5618 /* MIN (X, 0) == -1 -> X == -1 */
5619 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5620
5621 case GT_EXPR:
5622 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5623 /* MAX (X, 0) > 0 -> X > 0
5624 MAX (X, 0) > 5 -> X > 5 */
5625 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5626
5627 else if (op_code == MAX_EXPR)
5628 /* MAX (X, 0) > -1 -> true */
5629 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5630
5631 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5632 /* MIN (X, 0) > 0 -> false
5633 MIN (X, 0) > 5 -> false */
5634 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5635
5636 else
5637 /* MIN (X, 0) > -1 -> X > -1 */
5638 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5639
5640 default:
5641 return NULL_TREE;
5642 }
5643 }
5644 \f
5645 /* T is an integer expression that is being multiplied, divided, or taken a
5646 modulus (CODE says which and what kind of divide or modulus) by a
5647 constant C. See if we can eliminate that operation by folding it with
5648 other operations already in T. WIDE_TYPE, if non-null, is a type that
5649 should be used for the computation if wider than our type.
5650
5651 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5652 (X * 2) + (Y * 4). We must, however, be assured that either the original
5653 expression would not overflow or that overflow is undefined for the type
5654 in the language in question.
5655
5656 If we return a non-null expression, it is an equivalent form of the
5657 original computation, but need not be in the original type.
5658
5659 We set *STRICT_OVERFLOW_P to true if the return values depends on
5660 signed overflow being undefined. Otherwise we do not change
5661 *STRICT_OVERFLOW_P. */
5662
5663 static tree
5664 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5665 bool *strict_overflow_p)
5666 {
5667 /* To avoid exponential search depth, refuse to allow recursion past
5668 three levels. Beyond that (1) it's highly unlikely that we'll find
5669 something interesting and (2) we've probably processed it before
5670 when we built the inner expression. */
5671
5672 static int depth;
5673 tree ret;
5674
5675 if (depth > 3)
5676 return NULL;
5677
5678 depth++;
5679 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5680 depth--;
5681
5682 return ret;
5683 }
5684
5685 static tree
5686 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5687 bool *strict_overflow_p)
5688 {
5689 tree type = TREE_TYPE (t);
5690 enum tree_code tcode = TREE_CODE (t);
5691 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5692 > GET_MODE_SIZE (TYPE_MODE (type)))
5693 ? wide_type : type);
5694 tree t1, t2;
5695 int same_p = tcode == code;
5696 tree op0 = NULL_TREE, op1 = NULL_TREE;
5697 bool sub_strict_overflow_p;
5698
5699 /* Don't deal with constants of zero here; they confuse the code below. */
5700 if (integer_zerop (c))
5701 return NULL_TREE;
5702
5703 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5704 op0 = TREE_OPERAND (t, 0);
5705
5706 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5707 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5708
5709 /* Note that we need not handle conditional operations here since fold
5710 already handles those cases. So just do arithmetic here. */
5711 switch (tcode)
5712 {
5713 case INTEGER_CST:
5714 /* For a constant, we can always simplify if we are a multiply
5715 or (for divide and modulus) if it is a multiple of our constant. */
5716 if (code == MULT_EXPR
5717 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5718 return const_binop (code, fold_convert (ctype, t),
5719 fold_convert (ctype, c));
5720 break;
5721
5722 CASE_CONVERT: case NON_LVALUE_EXPR:
5723 /* If op0 is an expression ... */
5724 if ((COMPARISON_CLASS_P (op0)
5725 || UNARY_CLASS_P (op0)
5726 || BINARY_CLASS_P (op0)
5727 || VL_EXP_CLASS_P (op0)
5728 || EXPRESSION_CLASS_P (op0))
5729 /* ... and has wrapping overflow, and its type is smaller
5730 than ctype, then we cannot pass through as widening. */
5731 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5732 && (TYPE_PRECISION (ctype)
5733 > TYPE_PRECISION (TREE_TYPE (op0))))
5734 /* ... or this is a truncation (t is narrower than op0),
5735 then we cannot pass through this narrowing. */
5736 || (TYPE_PRECISION (type)
5737 < TYPE_PRECISION (TREE_TYPE (op0)))
5738 /* ... or signedness changes for division or modulus,
5739 then we cannot pass through this conversion. */
5740 || (code != MULT_EXPR
5741 && (TYPE_UNSIGNED (ctype)
5742 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5743 /* ... or has undefined overflow while the converted to
5744 type has not, we cannot do the operation in the inner type
5745 as that would introduce undefined overflow. */
5746 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5747 && !TYPE_OVERFLOW_UNDEFINED (type))))
5748 break;
5749
5750 /* Pass the constant down and see if we can make a simplification. If
5751 we can, replace this expression with the inner simplification for
5752 possible later conversion to our or some other type. */
5753 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5754 && TREE_CODE (t2) == INTEGER_CST
5755 && !TREE_OVERFLOW (t2)
5756 && (0 != (t1 = extract_muldiv (op0, t2, code,
5757 code == MULT_EXPR
5758 ? ctype : NULL_TREE,
5759 strict_overflow_p))))
5760 return t1;
5761 break;
5762
5763 case ABS_EXPR:
5764 /* If widening the type changes it from signed to unsigned, then we
5765 must avoid building ABS_EXPR itself as unsigned. */
5766 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5767 {
5768 tree cstype = (*signed_type_for) (ctype);
5769 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5770 != 0)
5771 {
5772 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5773 return fold_convert (ctype, t1);
5774 }
5775 break;
5776 }
5777 /* If the constant is negative, we cannot simplify this. */
5778 if (tree_int_cst_sgn (c) == -1)
5779 break;
5780 /* FALLTHROUGH */
5781 case NEGATE_EXPR:
5782 /* For division and modulus, type can't be unsigned, as e.g.
5783 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5784 For signed types, even with wrapping overflow, this is fine. */
5785 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5786 break;
5787 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5788 != 0)
5789 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5790 break;
5791
5792 case MIN_EXPR: case MAX_EXPR:
5793 /* If widening the type changes the signedness, then we can't perform
5794 this optimization as that changes the result. */
5795 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5796 break;
5797
5798 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5799 sub_strict_overflow_p = false;
5800 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5801 &sub_strict_overflow_p)) != 0
5802 && (t2 = extract_muldiv (op1, c, code, wide_type,
5803 &sub_strict_overflow_p)) != 0)
5804 {
5805 if (tree_int_cst_sgn (c) < 0)
5806 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5807 if (sub_strict_overflow_p)
5808 *strict_overflow_p = true;
5809 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5810 fold_convert (ctype, t2));
5811 }
5812 break;
5813
5814 case LSHIFT_EXPR: case RSHIFT_EXPR:
5815 /* If the second operand is constant, this is a multiplication
5816 or floor division, by a power of two, so we can treat it that
5817 way unless the multiplier or divisor overflows. Signed
5818 left-shift overflow is implementation-defined rather than
5819 undefined in C90, so do not convert signed left shift into
5820 multiplication. */
5821 if (TREE_CODE (op1) == INTEGER_CST
5822 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5823 /* const_binop may not detect overflow correctly,
5824 so check for it explicitly here. */
5825 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5826 && 0 != (t1 = fold_convert (ctype,
5827 const_binop (LSHIFT_EXPR,
5828 size_one_node,
5829 op1)))
5830 && !TREE_OVERFLOW (t1))
5831 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5832 ? MULT_EXPR : FLOOR_DIV_EXPR,
5833 ctype,
5834 fold_convert (ctype, op0),
5835 t1),
5836 c, code, wide_type, strict_overflow_p);
5837 break;
5838
5839 case PLUS_EXPR: case MINUS_EXPR:
5840 /* See if we can eliminate the operation on both sides. If we can, we
5841 can return a new PLUS or MINUS. If we can't, the only remaining
5842 cases where we can do anything are if the second operand is a
5843 constant. */
5844 sub_strict_overflow_p = false;
5845 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5846 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5847 if (t1 != 0 && t2 != 0
5848 && (code == MULT_EXPR
5849 /* If not multiplication, we can only do this if both operands
5850 are divisible by c. */
5851 || (multiple_of_p (ctype, op0, c)
5852 && multiple_of_p (ctype, op1, c))))
5853 {
5854 if (sub_strict_overflow_p)
5855 *strict_overflow_p = true;
5856 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5857 fold_convert (ctype, t2));
5858 }
5859
5860 /* If this was a subtraction, negate OP1 and set it to be an addition.
5861 This simplifies the logic below. */
5862 if (tcode == MINUS_EXPR)
5863 {
5864 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5865 /* If OP1 was not easily negatable, the constant may be OP0. */
5866 if (TREE_CODE (op0) == INTEGER_CST)
5867 {
5868 tree tem = op0;
5869 op0 = op1;
5870 op1 = tem;
5871 tem = t1;
5872 t1 = t2;
5873 t2 = tem;
5874 }
5875 }
5876
5877 if (TREE_CODE (op1) != INTEGER_CST)
5878 break;
5879
5880 /* If either OP1 or C are negative, this optimization is not safe for
5881 some of the division and remainder types while for others we need
5882 to change the code. */
5883 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5884 {
5885 if (code == CEIL_DIV_EXPR)
5886 code = FLOOR_DIV_EXPR;
5887 else if (code == FLOOR_DIV_EXPR)
5888 code = CEIL_DIV_EXPR;
5889 else if (code != MULT_EXPR
5890 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5891 break;
5892 }
5893
5894 /* If it's a multiply or a division/modulus operation of a multiple
5895 of our constant, do the operation and verify it doesn't overflow. */
5896 if (code == MULT_EXPR
5897 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5898 {
5899 op1 = const_binop (code, fold_convert (ctype, op1),
5900 fold_convert (ctype, c));
5901 /* We allow the constant to overflow with wrapping semantics. */
5902 if (op1 == 0
5903 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5904 break;
5905 }
5906 else
5907 break;
5908
5909 /* If we have an unsigned type, we cannot widen the operation since it
5910 will change the result if the original computation overflowed. */
5911 if (TYPE_UNSIGNED (ctype) && ctype != type)
5912 break;
5913
5914 /* If we were able to eliminate our operation from the first side,
5915 apply our operation to the second side and reform the PLUS. */
5916 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5917 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5918
5919 /* The last case is if we are a multiply. In that case, we can
5920 apply the distributive law to commute the multiply and addition
5921 if the multiplication of the constants doesn't overflow
5922 and overflow is defined. With undefined overflow
5923 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5924 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5925 return fold_build2 (tcode, ctype,
5926 fold_build2 (code, ctype,
5927 fold_convert (ctype, op0),
5928 fold_convert (ctype, c)),
5929 op1);
5930
5931 break;
5932
5933 case MULT_EXPR:
5934 /* We have a special case here if we are doing something like
5935 (C * 8) % 4 since we know that's zero. */
5936 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5937 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5938 /* If the multiplication can overflow we cannot optimize this. */
5939 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5940 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5941 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5942 {
5943 *strict_overflow_p = true;
5944 return omit_one_operand (type, integer_zero_node, op0);
5945 }
5946
5947 /* ... fall through ... */
5948
5949 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5950 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5951 /* If we can extract our operation from the LHS, do so and return a
5952 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5953 do something only if the second operand is a constant. */
5954 if (same_p
5955 && (t1 = extract_muldiv (op0, c, code, wide_type,
5956 strict_overflow_p)) != 0)
5957 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5958 fold_convert (ctype, op1));
5959 else if (tcode == MULT_EXPR && code == MULT_EXPR
5960 && (t1 = extract_muldiv (op1, c, code, wide_type,
5961 strict_overflow_p)) != 0)
5962 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5963 fold_convert (ctype, t1));
5964 else if (TREE_CODE (op1) != INTEGER_CST)
5965 return 0;
5966
5967 /* If these are the same operation types, we can associate them
5968 assuming no overflow. */
5969 if (tcode == code)
5970 {
5971 bool overflow_p = false;
5972 bool overflow_mul_p;
5973 signop sign = TYPE_SIGN (ctype);
5974 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5975 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5976 if (overflow_mul_p
5977 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5978 overflow_p = true;
5979 if (!overflow_p)
5980 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5981 wide_int_to_tree (ctype, mul));
5982 }
5983
5984 /* If these operations "cancel" each other, we have the main
5985 optimizations of this pass, which occur when either constant is a
5986 multiple of the other, in which case we replace this with either an
5987 operation or CODE or TCODE.
5988
5989 If we have an unsigned type, we cannot do this since it will change
5990 the result if the original computation overflowed. */
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5992 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5993 || (tcode == MULT_EXPR
5994 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5995 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5996 && code != MULT_EXPR)))
5997 {
5998 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5999 {
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6001 *strict_overflow_p = true;
6002 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6003 fold_convert (ctype,
6004 const_binop (TRUNC_DIV_EXPR,
6005 op1, c)));
6006 }
6007 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6008 {
6009 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6010 *strict_overflow_p = true;
6011 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6012 fold_convert (ctype,
6013 const_binop (TRUNC_DIV_EXPR,
6014 c, op1)));
6015 }
6016 }
6017 break;
6018
6019 default:
6020 break;
6021 }
6022
6023 return 0;
6024 }
6025 \f
6026 /* Return a node which has the indicated constant VALUE (either 0 or
6027 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6028 and is of the indicated TYPE. */
6029
6030 tree
6031 constant_boolean_node (bool value, tree type)
6032 {
6033 if (type == integer_type_node)
6034 return value ? integer_one_node : integer_zero_node;
6035 else if (type == boolean_type_node)
6036 return value ? boolean_true_node : boolean_false_node;
6037 else if (TREE_CODE (type) == VECTOR_TYPE)
6038 return build_vector_from_val (type,
6039 build_int_cst (TREE_TYPE (type),
6040 value ? -1 : 0));
6041 else
6042 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6043 }
6044
6045
6046 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6047 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6048 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6049 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6050 COND is the first argument to CODE; otherwise (as in the example
6051 given here), it is the second argument. TYPE is the type of the
6052 original expression. Return NULL_TREE if no simplification is
6053 possible. */
6054
6055 static tree
6056 fold_binary_op_with_conditional_arg (location_t loc,
6057 enum tree_code code,
6058 tree type, tree op0, tree op1,
6059 tree cond, tree arg, int cond_first_p)
6060 {
6061 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6062 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6063 tree test, true_value, false_value;
6064 tree lhs = NULL_TREE;
6065 tree rhs = NULL_TREE;
6066 enum tree_code cond_code = COND_EXPR;
6067
6068 if (TREE_CODE (cond) == COND_EXPR
6069 || TREE_CODE (cond) == VEC_COND_EXPR)
6070 {
6071 test = TREE_OPERAND (cond, 0);
6072 true_value = TREE_OPERAND (cond, 1);
6073 false_value = TREE_OPERAND (cond, 2);
6074 /* If this operand throws an expression, then it does not make
6075 sense to try to perform a logical or arithmetic operation
6076 involving it. */
6077 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6078 lhs = true_value;
6079 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6080 rhs = false_value;
6081 }
6082 else
6083 {
6084 tree testtype = TREE_TYPE (cond);
6085 test = cond;
6086 true_value = constant_boolean_node (true, testtype);
6087 false_value = constant_boolean_node (false, testtype);
6088 }
6089
6090 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6091 cond_code = VEC_COND_EXPR;
6092
6093 /* This transformation is only worthwhile if we don't have to wrap ARG
6094 in a SAVE_EXPR and the operation can be simplified without recursing
6095 on at least one of the branches once its pushed inside the COND_EXPR. */
6096 if (!TREE_CONSTANT (arg)
6097 && (TREE_SIDE_EFFECTS (arg)
6098 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6099 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6100 return NULL_TREE;
6101
6102 arg = fold_convert_loc (loc, arg_type, arg);
6103 if (lhs == 0)
6104 {
6105 true_value = fold_convert_loc (loc, cond_type, true_value);
6106 if (cond_first_p)
6107 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6108 else
6109 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6110 }
6111 if (rhs == 0)
6112 {
6113 false_value = fold_convert_loc (loc, cond_type, false_value);
6114 if (cond_first_p)
6115 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6116 else
6117 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6118 }
6119
6120 /* Check that we have simplified at least one of the branches. */
6121 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6122 return NULL_TREE;
6123
6124 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6125 }
6126
6127 \f
6128 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6129
6130 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6131 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6132 ADDEND is the same as X.
6133
6134 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6135 and finite. The problematic cases are when X is zero, and its mode
6136 has signed zeros. In the case of rounding towards -infinity,
6137 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6138 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6139
6140 bool
6141 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6142 {
6143 if (!real_zerop (addend))
6144 return false;
6145
6146 /* Don't allow the fold with -fsignaling-nans. */
6147 if (HONOR_SNANS (TYPE_MODE (type)))
6148 return false;
6149
6150 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6151 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6152 return true;
6153
6154 /* In a vector or complex, we would need to check the sign of all zeros. */
6155 if (TREE_CODE (addend) != REAL_CST)
6156 return false;
6157
6158 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6159 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6160 negate = !negate;
6161
6162 /* The mode has signed zeros, and we have to honor their sign.
6163 In this situation, there is only one case we can return true for.
6164 X - 0 is the same as X unless rounding towards -infinity is
6165 supported. */
6166 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6167 }
6168
6169 /* Subroutine of fold() that checks comparisons of built-in math
6170 functions against real constants.
6171
6172 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6173 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6174 is the type of the result and ARG0 and ARG1 are the operands of the
6175 comparison. ARG1 must be a TREE_REAL_CST.
6176
6177 The function returns the constant folded tree if a simplification
6178 can be made, and NULL_TREE otherwise. */
6179
6180 static tree
6181 fold_mathfn_compare (location_t loc,
6182 enum built_in_function fcode, enum tree_code code,
6183 tree type, tree arg0, tree arg1)
6184 {
6185 REAL_VALUE_TYPE c;
6186
6187 if (BUILTIN_SQRT_P (fcode))
6188 {
6189 tree arg = CALL_EXPR_ARG (arg0, 0);
6190 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6191
6192 c = TREE_REAL_CST (arg1);
6193 if (REAL_VALUE_NEGATIVE (c))
6194 {
6195 /* sqrt(x) < y is always false, if y is negative. */
6196 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6197 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6198
6199 /* sqrt(x) > y is always true, if y is negative and we
6200 don't care about NaNs, i.e. negative values of x. */
6201 if (code == NE_EXPR || !HONOR_NANS (mode))
6202 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6203
6204 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6205 return fold_build2_loc (loc, GE_EXPR, type, arg,
6206 build_real (TREE_TYPE (arg), dconst0));
6207 }
6208 else if (code == GT_EXPR || code == GE_EXPR)
6209 {
6210 REAL_VALUE_TYPE c2;
6211
6212 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6213 real_convert (&c2, mode, &c2);
6214
6215 if (REAL_VALUE_ISINF (c2))
6216 {
6217 /* sqrt(x) > y is x == +Inf, when y is very large. */
6218 if (HONOR_INFINITIES (mode))
6219 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg), c2));
6221
6222 /* sqrt(x) > y is always false, when y is very large
6223 and we don't care about infinities. */
6224 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6225 }
6226
6227 /* sqrt(x) > c is the same as x > c*c. */
6228 return fold_build2_loc (loc, code, type, arg,
6229 build_real (TREE_TYPE (arg), c2));
6230 }
6231 else if (code == LT_EXPR || code == LE_EXPR)
6232 {
6233 REAL_VALUE_TYPE c2;
6234
6235 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6236 real_convert (&c2, mode, &c2);
6237
6238 if (REAL_VALUE_ISINF (c2))
6239 {
6240 /* sqrt(x) < y is always true, when y is a very large
6241 value and we don't care about NaNs or Infinities. */
6242 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6243 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6244
6245 /* sqrt(x) < y is x != +Inf when y is very large and we
6246 don't care about NaNs. */
6247 if (! HONOR_NANS (mode))
6248 return fold_build2_loc (loc, NE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg), c2));
6250
6251 /* sqrt(x) < y is x >= 0 when y is very large and we
6252 don't care about Infinities. */
6253 if (! HONOR_INFINITIES (mode))
6254 return fold_build2_loc (loc, GE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg), dconst0));
6256
6257 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6258 arg = save_expr (arg);
6259 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6260 fold_build2_loc (loc, GE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg),
6262 dconst0)),
6263 fold_build2_loc (loc, NE_EXPR, type, arg,
6264 build_real (TREE_TYPE (arg),
6265 c2)));
6266 }
6267
6268 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6269 if (! HONOR_NANS (mode))
6270 return fold_build2_loc (loc, code, type, arg,
6271 build_real (TREE_TYPE (arg), c2));
6272
6273 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6274 arg = save_expr (arg);
6275 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6276 fold_build2_loc (loc, GE_EXPR, type, arg,
6277 build_real (TREE_TYPE (arg),
6278 dconst0)),
6279 fold_build2_loc (loc, code, type, arg,
6280 build_real (TREE_TYPE (arg),
6281 c2)));
6282 }
6283 }
6284
6285 return NULL_TREE;
6286 }
6287
6288 /* Subroutine of fold() that optimizes comparisons against Infinities,
6289 either +Inf or -Inf.
6290
6291 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6292 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6293 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6294
6295 The function returns the constant folded tree if a simplification
6296 can be made, and NULL_TREE otherwise. */
6297
6298 static tree
6299 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6300 tree arg0, tree arg1)
6301 {
6302 machine_mode mode;
6303 REAL_VALUE_TYPE max;
6304 tree temp;
6305 bool neg;
6306
6307 mode = TYPE_MODE (TREE_TYPE (arg0));
6308
6309 /* For negative infinity swap the sense of the comparison. */
6310 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6311 if (neg)
6312 code = swap_tree_comparison (code);
6313
6314 switch (code)
6315 {
6316 case GT_EXPR:
6317 /* x > +Inf is always false, if with ignore sNANs. */
6318 if (HONOR_SNANS (mode))
6319 return NULL_TREE;
6320 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6321
6322 case LE_EXPR:
6323 /* x <= +Inf is always true, if we don't case about NaNs. */
6324 if (! HONOR_NANS (mode))
6325 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6326
6327 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6328 arg0 = save_expr (arg0);
6329 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6330
6331 case EQ_EXPR:
6332 case GE_EXPR:
6333 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6334 real_maxval (&max, neg, mode);
6335 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6336 arg0, build_real (TREE_TYPE (arg0), max));
6337
6338 case LT_EXPR:
6339 /* x < +Inf is always equal to x <= DBL_MAX. */
6340 real_maxval (&max, neg, mode);
6341 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6343
6344 case NE_EXPR:
6345 /* x != +Inf is always equal to !(x > DBL_MAX). */
6346 real_maxval (&max, neg, mode);
6347 if (! HONOR_NANS (mode))
6348 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6349 arg0, build_real (TREE_TYPE (arg0), max));
6350
6351 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6352 arg0, build_real (TREE_TYPE (arg0), max));
6353 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6354
6355 default:
6356 break;
6357 }
6358
6359 return NULL_TREE;
6360 }
6361
6362 /* Subroutine of fold() that optimizes comparisons of a division by
6363 a nonzero integer constant against an integer constant, i.e.
6364 X/C1 op C2.
6365
6366 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6367 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6368 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6369
6370 The function returns the constant folded tree if a simplification
6371 can be made, and NULL_TREE otherwise. */
6372
6373 static tree
6374 fold_div_compare (location_t loc,
6375 enum tree_code code, tree type, tree arg0, tree arg1)
6376 {
6377 tree prod, tmp, hi, lo;
6378 tree arg00 = TREE_OPERAND (arg0, 0);
6379 tree arg01 = TREE_OPERAND (arg0, 1);
6380 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6381 bool neg_overflow = false;
6382 bool overflow;
6383
6384 /* We have to do this the hard way to detect unsigned overflow.
6385 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6386 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6387 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6388 neg_overflow = false;
6389
6390 if (sign == UNSIGNED)
6391 {
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1));
6394 lo = prod;
6395
6396 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6397 val = wi::add (prod, tmp, sign, &overflow);
6398 hi = force_fit_type (TREE_TYPE (arg00), val,
6399 -1, overflow | TREE_OVERFLOW (prod));
6400 }
6401 else if (tree_int_cst_sgn (arg01) >= 0)
6402 {
6403 tmp = int_const_binop (MINUS_EXPR, arg01,
6404 build_int_cst (TREE_TYPE (arg01), 1));
6405 switch (tree_int_cst_sgn (arg1))
6406 {
6407 case -1:
6408 neg_overflow = true;
6409 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6410 hi = prod;
6411 break;
6412
6413 case 0:
6414 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6415 hi = tmp;
6416 break;
6417
6418 case 1:
6419 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6420 lo = prod;
6421 break;
6422
6423 default:
6424 gcc_unreachable ();
6425 }
6426 }
6427 else
6428 {
6429 /* A negative divisor reverses the relational operators. */
6430 code = swap_tree_comparison (code);
6431
6432 tmp = int_const_binop (PLUS_EXPR, arg01,
6433 build_int_cst (TREE_TYPE (arg01), 1));
6434 switch (tree_int_cst_sgn (arg1))
6435 {
6436 case -1:
6437 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6438 lo = prod;
6439 break;
6440
6441 case 0:
6442 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6443 lo = tmp;
6444 break;
6445
6446 case 1:
6447 neg_overflow = true;
6448 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6449 hi = prod;
6450 break;
6451
6452 default:
6453 gcc_unreachable ();
6454 }
6455 }
6456
6457 switch (code)
6458 {
6459 case EQ_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6466 return build_range_check (loc, type, arg00, 1, lo, hi);
6467
6468 case NE_EXPR:
6469 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6470 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6471 if (TREE_OVERFLOW (hi))
6472 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6473 if (TREE_OVERFLOW (lo))
6474 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6475 return build_range_check (loc, type, arg00, 0, lo, hi);
6476
6477 case LT_EXPR:
6478 if (TREE_OVERFLOW (lo))
6479 {
6480 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6481 return omit_one_operand_loc (loc, type, tmp, arg00);
6482 }
6483 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6484
6485 case LE_EXPR:
6486 if (TREE_OVERFLOW (hi))
6487 {
6488 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6489 return omit_one_operand_loc (loc, type, tmp, arg00);
6490 }
6491 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6492
6493 case GT_EXPR:
6494 if (TREE_OVERFLOW (hi))
6495 {
6496 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6497 return omit_one_operand_loc (loc, type, tmp, arg00);
6498 }
6499 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6500
6501 case GE_EXPR:
6502 if (TREE_OVERFLOW (lo))
6503 {
6504 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6505 return omit_one_operand_loc (loc, type, tmp, arg00);
6506 }
6507 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6508
6509 default:
6510 break;
6511 }
6512
6513 return NULL_TREE;
6514 }
6515
6516
6517 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6518 equality/inequality test, then return a simplified form of the test
6519 using a sign testing. Otherwise return NULL. TYPE is the desired
6520 result type. */
6521
6522 static tree
6523 fold_single_bit_test_into_sign_test (location_t loc,
6524 enum tree_code code, tree arg0, tree arg1,
6525 tree result_type)
6526 {
6527 /* If this is testing a single bit, we can optimize the test. */
6528 if ((code == NE_EXPR || code == EQ_EXPR)
6529 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6530 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6531 {
6532 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6533 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6534 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6535
6536 if (arg00 != NULL_TREE
6537 /* This is only a win if casting to a signed type is cheap,
6538 i.e. when arg00's type is not a partial mode. */
6539 && TYPE_PRECISION (TREE_TYPE (arg00))
6540 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6541 {
6542 tree stype = signed_type_for (TREE_TYPE (arg00));
6543 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6544 result_type,
6545 fold_convert_loc (loc, stype, arg00),
6546 build_int_cst (stype, 0));
6547 }
6548 }
6549
6550 return NULL_TREE;
6551 }
6552
6553 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6554 equality/inequality test, then return a simplified form of
6555 the test using shifts and logical operations. Otherwise return
6556 NULL. TYPE is the desired result type. */
6557
6558 tree
6559 fold_single_bit_test (location_t loc, enum tree_code code,
6560 tree arg0, tree arg1, tree result_type)
6561 {
6562 /* If this is testing a single bit, we can optimize the test. */
6563 if ((code == NE_EXPR || code == EQ_EXPR)
6564 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6565 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6566 {
6567 tree inner = TREE_OPERAND (arg0, 0);
6568 tree type = TREE_TYPE (arg0);
6569 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6570 machine_mode operand_mode = TYPE_MODE (type);
6571 int ops_unsigned;
6572 tree signed_type, unsigned_type, intermediate_type;
6573 tree tem, one;
6574
6575 /* First, see if we can fold the single bit test into a sign-bit
6576 test. */
6577 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6578 result_type);
6579 if (tem)
6580 return tem;
6581
6582 /* Otherwise we have (A & C) != 0 where C is a single bit,
6583 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6584 Similarly for (A & C) == 0. */
6585
6586 /* If INNER is a right shift of a constant and it plus BITNUM does
6587 not overflow, adjust BITNUM and INNER. */
6588 if (TREE_CODE (inner) == RSHIFT_EXPR
6589 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6590 && bitnum < TYPE_PRECISION (type)
6591 && wi::ltu_p (TREE_OPERAND (inner, 1),
6592 TYPE_PRECISION (type) - bitnum))
6593 {
6594 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6595 inner = TREE_OPERAND (inner, 0);
6596 }
6597
6598 /* If we are going to be able to omit the AND below, we must do our
6599 operations as unsigned. If we must use the AND, we have a choice.
6600 Normally unsigned is faster, but for some machines signed is. */
6601 #ifdef LOAD_EXTEND_OP
6602 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6603 && !flag_syntax_only) ? 0 : 1;
6604 #else
6605 ops_unsigned = 1;
6606 #endif
6607
6608 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6609 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6610 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6611 inner = fold_convert_loc (loc, intermediate_type, inner);
6612
6613 if (bitnum != 0)
6614 inner = build2 (RSHIFT_EXPR, intermediate_type,
6615 inner, size_int (bitnum));
6616
6617 one = build_int_cst (intermediate_type, 1);
6618
6619 if (code == EQ_EXPR)
6620 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6621
6622 /* Put the AND last so it can combine with more things. */
6623 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6624
6625 /* Make sure to return the proper type. */
6626 inner = fold_convert_loc (loc, result_type, inner);
6627
6628 return inner;
6629 }
6630 return NULL_TREE;
6631 }
6632
6633 /* Check whether we are allowed to reorder operands arg0 and arg1,
6634 such that the evaluation of arg1 occurs before arg0. */
6635
6636 static bool
6637 reorder_operands_p (const_tree arg0, const_tree arg1)
6638 {
6639 if (! flag_evaluation_order)
6640 return true;
6641 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6642 return true;
6643 return ! TREE_SIDE_EFFECTS (arg0)
6644 && ! TREE_SIDE_EFFECTS (arg1);
6645 }
6646
6647 /* Test whether it is preferable two swap two operands, ARG0 and
6648 ARG1, for example because ARG0 is an integer constant and ARG1
6649 isn't. If REORDER is true, only recommend swapping if we can
6650 evaluate the operands in reverse order. */
6651
6652 bool
6653 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6654 {
6655 if (CONSTANT_CLASS_P (arg1))
6656 return 0;
6657 if (CONSTANT_CLASS_P (arg0))
6658 return 1;
6659
6660 STRIP_SIGN_NOPS (arg0);
6661 STRIP_SIGN_NOPS (arg1);
6662
6663 if (TREE_CONSTANT (arg1))
6664 return 0;
6665 if (TREE_CONSTANT (arg0))
6666 return 1;
6667
6668 if (reorder && flag_evaluation_order
6669 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6670 return 0;
6671
6672 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6673 for commutative and comparison operators. Ensuring a canonical
6674 form allows the optimizers to find additional redundancies without
6675 having to explicitly check for both orderings. */
6676 if (TREE_CODE (arg0) == SSA_NAME
6677 && TREE_CODE (arg1) == SSA_NAME
6678 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6679 return 1;
6680
6681 /* Put SSA_NAMEs last. */
6682 if (TREE_CODE (arg1) == SSA_NAME)
6683 return 0;
6684 if (TREE_CODE (arg0) == SSA_NAME)
6685 return 1;
6686
6687 /* Put variables last. */
6688 if (DECL_P (arg1))
6689 return 0;
6690 if (DECL_P (arg0))
6691 return 1;
6692
6693 return 0;
6694 }
6695
6696 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6697 ARG0 is extended to a wider type. */
6698
6699 static tree
6700 fold_widened_comparison (location_t loc, enum tree_code code,
6701 tree type, tree arg0, tree arg1)
6702 {
6703 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6704 tree arg1_unw;
6705 tree shorter_type, outer_type;
6706 tree min, max;
6707 bool above, below;
6708
6709 if (arg0_unw == arg0)
6710 return NULL_TREE;
6711 shorter_type = TREE_TYPE (arg0_unw);
6712
6713 #ifdef HAVE_canonicalize_funcptr_for_compare
6714 /* Disable this optimization if we're casting a function pointer
6715 type on targets that require function pointer canonicalization. */
6716 if (HAVE_canonicalize_funcptr_for_compare
6717 && TREE_CODE (shorter_type) == POINTER_TYPE
6718 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6719 return NULL_TREE;
6720 #endif
6721
6722 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6723 return NULL_TREE;
6724
6725 arg1_unw = get_unwidened (arg1, NULL_TREE);
6726
6727 /* If possible, express the comparison in the shorter mode. */
6728 if ((code == EQ_EXPR || code == NE_EXPR
6729 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6730 && (TREE_TYPE (arg1_unw) == shorter_type
6731 || ((TYPE_PRECISION (shorter_type)
6732 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6733 && (TYPE_UNSIGNED (shorter_type)
6734 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6735 || (TREE_CODE (arg1_unw) == INTEGER_CST
6736 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6737 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6738 && int_fits_type_p (arg1_unw, shorter_type))))
6739 return fold_build2_loc (loc, code, type, arg0_unw,
6740 fold_convert_loc (loc, shorter_type, arg1_unw));
6741
6742 if (TREE_CODE (arg1_unw) != INTEGER_CST
6743 || TREE_CODE (shorter_type) != INTEGER_TYPE
6744 || !int_fits_type_p (arg1_unw, shorter_type))
6745 return NULL_TREE;
6746
6747 /* If we are comparing with the integer that does not fit into the range
6748 of the shorter type, the result is known. */
6749 outer_type = TREE_TYPE (arg1_unw);
6750 min = lower_bound_in_type (outer_type, shorter_type);
6751 max = upper_bound_in_type (outer_type, shorter_type);
6752
6753 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6754 max, arg1_unw));
6755 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6756 arg1_unw, min));
6757
6758 switch (code)
6759 {
6760 case EQ_EXPR:
6761 if (above || below)
6762 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6763 break;
6764
6765 case NE_EXPR:
6766 if (above || below)
6767 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6768 break;
6769
6770 case LT_EXPR:
6771 case LE_EXPR:
6772 if (above)
6773 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6774 else if (below)
6775 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6776
6777 case GT_EXPR:
6778 case GE_EXPR:
6779 if (above)
6780 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6781 else if (below)
6782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6783
6784 default:
6785 break;
6786 }
6787
6788 return NULL_TREE;
6789 }
6790
6791 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6792 ARG0 just the signedness is changed. */
6793
6794 static tree
6795 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6796 tree arg0, tree arg1)
6797 {
6798 tree arg0_inner;
6799 tree inner_type, outer_type;
6800
6801 if (!CONVERT_EXPR_P (arg0))
6802 return NULL_TREE;
6803
6804 outer_type = TREE_TYPE (arg0);
6805 arg0_inner = TREE_OPERAND (arg0, 0);
6806 inner_type = TREE_TYPE (arg0_inner);
6807
6808 #ifdef HAVE_canonicalize_funcptr_for_compare
6809 /* Disable this optimization if we're casting a function pointer
6810 type on targets that require function pointer canonicalization. */
6811 if (HAVE_canonicalize_funcptr_for_compare
6812 && TREE_CODE (inner_type) == POINTER_TYPE
6813 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6814 return NULL_TREE;
6815 #endif
6816
6817 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6818 return NULL_TREE;
6819
6820 if (TREE_CODE (arg1) != INTEGER_CST
6821 && !(CONVERT_EXPR_P (arg1)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6823 return NULL_TREE;
6824
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 && code != NE_EXPR
6827 && code != EQ_EXPR)
6828 return NULL_TREE;
6829
6830 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6831 return NULL_TREE;
6832
6833 if (TREE_CODE (arg1) == INTEGER_CST)
6834 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6835 TREE_OVERFLOW (arg1));
6836 else
6837 arg1 = fold_convert_loc (loc, inner_type, arg1);
6838
6839 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6840 }
6841
6842
6843 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6844 means A >= Y && A != MAX, but in this case we know that
6845 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6846
6847 static tree
6848 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6849 {
6850 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6851
6852 if (TREE_CODE (bound) == LT_EXPR)
6853 a = TREE_OPERAND (bound, 0);
6854 else if (TREE_CODE (bound) == GT_EXPR)
6855 a = TREE_OPERAND (bound, 1);
6856 else
6857 return NULL_TREE;
6858
6859 typea = TREE_TYPE (a);
6860 if (!INTEGRAL_TYPE_P (typea)
6861 && !POINTER_TYPE_P (typea))
6862 return NULL_TREE;
6863
6864 if (TREE_CODE (ineq) == LT_EXPR)
6865 {
6866 a1 = TREE_OPERAND (ineq, 1);
6867 y = TREE_OPERAND (ineq, 0);
6868 }
6869 else if (TREE_CODE (ineq) == GT_EXPR)
6870 {
6871 a1 = TREE_OPERAND (ineq, 0);
6872 y = TREE_OPERAND (ineq, 1);
6873 }
6874 else
6875 return NULL_TREE;
6876
6877 if (TREE_TYPE (a1) != typea)
6878 return NULL_TREE;
6879
6880 if (POINTER_TYPE_P (typea))
6881 {
6882 /* Convert the pointer types into integer before taking the difference. */
6883 tree ta = fold_convert_loc (loc, ssizetype, a);
6884 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6885 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6886 }
6887 else
6888 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6889
6890 if (!diff || !integer_onep (diff))
6891 return NULL_TREE;
6892
6893 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6894 }
6895
6896 /* Fold a sum or difference of at least one multiplication.
6897 Returns the folded tree or NULL if no simplification could be made. */
6898
6899 static tree
6900 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6901 tree arg0, tree arg1)
6902 {
6903 tree arg00, arg01, arg10, arg11;
6904 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6905
6906 /* (A * C) +- (B * C) -> (A+-B) * C.
6907 (A * C) +- A -> A * (C+-1).
6908 We are most concerned about the case where C is a constant,
6909 but other combinations show up during loop reduction. Since
6910 it is not difficult, try all four possibilities. */
6911
6912 if (TREE_CODE (arg0) == MULT_EXPR)
6913 {
6914 arg00 = TREE_OPERAND (arg0, 0);
6915 arg01 = TREE_OPERAND (arg0, 1);
6916 }
6917 else if (TREE_CODE (arg0) == INTEGER_CST)
6918 {
6919 arg00 = build_one_cst (type);
6920 arg01 = arg0;
6921 }
6922 else
6923 {
6924 /* We cannot generate constant 1 for fract. */
6925 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6926 return NULL_TREE;
6927 arg00 = arg0;
6928 arg01 = build_one_cst (type);
6929 }
6930 if (TREE_CODE (arg1) == MULT_EXPR)
6931 {
6932 arg10 = TREE_OPERAND (arg1, 0);
6933 arg11 = TREE_OPERAND (arg1, 1);
6934 }
6935 else if (TREE_CODE (arg1) == INTEGER_CST)
6936 {
6937 arg10 = build_one_cst (type);
6938 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6939 the purpose of this canonicalization. */
6940 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6941 && negate_expr_p (arg1)
6942 && code == PLUS_EXPR)
6943 {
6944 arg11 = negate_expr (arg1);
6945 code = MINUS_EXPR;
6946 }
6947 else
6948 arg11 = arg1;
6949 }
6950 else
6951 {
6952 /* We cannot generate constant 1 for fract. */
6953 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6954 return NULL_TREE;
6955 arg10 = arg1;
6956 arg11 = build_one_cst (type);
6957 }
6958 same = NULL_TREE;
6959
6960 if (operand_equal_p (arg01, arg11, 0))
6961 same = arg01, alt0 = arg00, alt1 = arg10;
6962 else if (operand_equal_p (arg00, arg10, 0))
6963 same = arg00, alt0 = arg01, alt1 = arg11;
6964 else if (operand_equal_p (arg00, arg11, 0))
6965 same = arg00, alt0 = arg01, alt1 = arg10;
6966 else if (operand_equal_p (arg01, arg10, 0))
6967 same = arg01, alt0 = arg00, alt1 = arg11;
6968
6969 /* No identical multiplicands; see if we can find a common
6970 power-of-two factor in non-power-of-two multiplies. This
6971 can help in multi-dimensional array access. */
6972 else if (tree_fits_shwi_p (arg01)
6973 && tree_fits_shwi_p (arg11))
6974 {
6975 HOST_WIDE_INT int01, int11, tmp;
6976 bool swap = false;
6977 tree maybe_same;
6978 int01 = tree_to_shwi (arg01);
6979 int11 = tree_to_shwi (arg11);
6980
6981 /* Move min of absolute values to int11. */
6982 if (absu_hwi (int01) < absu_hwi (int11))
6983 {
6984 tmp = int01, int01 = int11, int11 = tmp;
6985 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6986 maybe_same = arg01;
6987 swap = true;
6988 }
6989 else
6990 maybe_same = arg11;
6991
6992 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6993 /* The remainder should not be a constant, otherwise we
6994 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6995 increased the number of multiplications necessary. */
6996 && TREE_CODE (arg10) != INTEGER_CST)
6997 {
6998 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6999 build_int_cst (TREE_TYPE (arg00),
7000 int01 / int11));
7001 alt1 = arg10;
7002 same = maybe_same;
7003 if (swap)
7004 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7005 }
7006 }
7007
7008 if (same)
7009 return fold_build2_loc (loc, MULT_EXPR, type,
7010 fold_build2_loc (loc, code, type,
7011 fold_convert_loc (loc, type, alt0),
7012 fold_convert_loc (loc, type, alt1)),
7013 fold_convert_loc (loc, type, same));
7014
7015 return NULL_TREE;
7016 }
7017
7018 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7019 specified by EXPR into the buffer PTR of length LEN bytes.
7020 Return the number of bytes placed in the buffer, or zero
7021 upon failure. */
7022
7023 static int
7024 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7025 {
7026 tree type = TREE_TYPE (expr);
7027 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7028 int byte, offset, word, words;
7029 unsigned char value;
7030
7031 if ((off == -1 && total_bytes > len)
7032 || off >= total_bytes)
7033 return 0;
7034 if (off == -1)
7035 off = 0;
7036 words = total_bytes / UNITS_PER_WORD;
7037
7038 for (byte = 0; byte < total_bytes; byte++)
7039 {
7040 int bitpos = byte * BITS_PER_UNIT;
7041 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7042 number of bytes. */
7043 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7044
7045 if (total_bytes > UNITS_PER_WORD)
7046 {
7047 word = byte / UNITS_PER_WORD;
7048 if (WORDS_BIG_ENDIAN)
7049 word = (words - 1) - word;
7050 offset = word * UNITS_PER_WORD;
7051 if (BYTES_BIG_ENDIAN)
7052 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7053 else
7054 offset += byte % UNITS_PER_WORD;
7055 }
7056 else
7057 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7058 if (offset >= off
7059 && offset - off < len)
7060 ptr[offset - off] = value;
7061 }
7062 return MIN (len, total_bytes - off);
7063 }
7064
7065
7066 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7067 specified by EXPR into the buffer PTR of length LEN bytes.
7068 Return the number of bytes placed in the buffer, or zero
7069 upon failure. */
7070
7071 static int
7072 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7073 {
7074 tree type = TREE_TYPE (expr);
7075 machine_mode mode = TYPE_MODE (type);
7076 int total_bytes = GET_MODE_SIZE (mode);
7077 FIXED_VALUE_TYPE value;
7078 tree i_value, i_type;
7079
7080 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7081 return 0;
7082
7083 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7084
7085 if (NULL_TREE == i_type
7086 || TYPE_PRECISION (i_type) != total_bytes)
7087 return 0;
7088
7089 value = TREE_FIXED_CST (expr);
7090 i_value = double_int_to_tree (i_type, value.data);
7091
7092 return native_encode_int (i_value, ptr, len, off);
7093 }
7094
7095
7096 /* Subroutine of native_encode_expr. Encode the REAL_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7099 upon failure. */
7100
7101 static int
7102 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7103 {
7104 tree type = TREE_TYPE (expr);
7105 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7106 int byte, offset, word, words, bitpos;
7107 unsigned char value;
7108
7109 /* There are always 32 bits in each long, no matter the size of
7110 the hosts long. We handle floating point representations with
7111 up to 192 bits. */
7112 long tmp[6];
7113
7114 if ((off == -1 && total_bytes > len)
7115 || off >= total_bytes)
7116 return 0;
7117 if (off == -1)
7118 off = 0;
7119 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7120
7121 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7122
7123 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7124 bitpos += BITS_PER_UNIT)
7125 {
7126 byte = (bitpos / BITS_PER_UNIT) & 3;
7127 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7128
7129 if (UNITS_PER_WORD < 4)
7130 {
7131 word = byte / UNITS_PER_WORD;
7132 if (WORDS_BIG_ENDIAN)
7133 word = (words - 1) - word;
7134 offset = word * UNITS_PER_WORD;
7135 if (BYTES_BIG_ENDIAN)
7136 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7137 else
7138 offset += byte % UNITS_PER_WORD;
7139 }
7140 else
7141 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7142 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7143 if (offset >= off
7144 && offset - off < len)
7145 ptr[offset - off] = value;
7146 }
7147 return MIN (len, total_bytes - off);
7148 }
7149
7150 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7151 specified by EXPR into the buffer PTR of length LEN bytes.
7152 Return the number of bytes placed in the buffer, or zero
7153 upon failure. */
7154
7155 static int
7156 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7157 {
7158 int rsize, isize;
7159 tree part;
7160
7161 part = TREE_REALPART (expr);
7162 rsize = native_encode_expr (part, ptr, len, off);
7163 if (off == -1
7164 && rsize == 0)
7165 return 0;
7166 part = TREE_IMAGPART (expr);
7167 if (off != -1)
7168 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7169 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7170 if (off == -1
7171 && isize != rsize)
7172 return 0;
7173 return rsize + isize;
7174 }
7175
7176
7177 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7178 specified by EXPR into the buffer PTR of length LEN bytes.
7179 Return the number of bytes placed in the buffer, or zero
7180 upon failure. */
7181
7182 static int
7183 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7184 {
7185 unsigned i, count;
7186 int size, offset;
7187 tree itype, elem;
7188
7189 offset = 0;
7190 count = VECTOR_CST_NELTS (expr);
7191 itype = TREE_TYPE (TREE_TYPE (expr));
7192 size = GET_MODE_SIZE (TYPE_MODE (itype));
7193 for (i = 0; i < count; i++)
7194 {
7195 if (off >= size)
7196 {
7197 off -= size;
7198 continue;
7199 }
7200 elem = VECTOR_CST_ELT (expr, i);
7201 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7202 if ((off == -1 && res != size)
7203 || res == 0)
7204 return 0;
7205 offset += res;
7206 if (offset >= len)
7207 return offset;
7208 if (off != -1)
7209 off = 0;
7210 }
7211 return offset;
7212 }
7213
7214
7215 /* Subroutine of native_encode_expr. Encode the STRING_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7218 upon failure. */
7219
7220 static int
7221 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7222 {
7223 tree type = TREE_TYPE (expr);
7224 HOST_WIDE_INT total_bytes;
7225
7226 if (TREE_CODE (type) != ARRAY_TYPE
7227 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7228 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7229 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7230 return 0;
7231 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7232 if ((off == -1 && total_bytes > len)
7233 || off >= total_bytes)
7234 return 0;
7235 if (off == -1)
7236 off = 0;
7237 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7238 {
7239 int written = 0;
7240 if (off < TREE_STRING_LENGTH (expr))
7241 {
7242 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7243 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7244 }
7245 memset (ptr + written, 0,
7246 MIN (total_bytes - written, len - written));
7247 }
7248 else
7249 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7250 return MIN (total_bytes - off, len);
7251 }
7252
7253
7254 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7255 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7256 buffer PTR of length LEN bytes. If OFF is not -1 then start
7257 the encoding at byte offset OFF and encode at most LEN bytes.
7258 Return the number of bytes placed in the buffer, or zero upon failure. */
7259
7260 int
7261 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7262 {
7263 switch (TREE_CODE (expr))
7264 {
7265 case INTEGER_CST:
7266 return native_encode_int (expr, ptr, len, off);
7267
7268 case REAL_CST:
7269 return native_encode_real (expr, ptr, len, off);
7270
7271 case FIXED_CST:
7272 return native_encode_fixed (expr, ptr, len, off);
7273
7274 case COMPLEX_CST:
7275 return native_encode_complex (expr, ptr, len, off);
7276
7277 case VECTOR_CST:
7278 return native_encode_vector (expr, ptr, len, off);
7279
7280 case STRING_CST:
7281 return native_encode_string (expr, ptr, len, off);
7282
7283 default:
7284 return 0;
7285 }
7286 }
7287
7288
7289 /* Subroutine of native_interpret_expr. Interpret the contents of
7290 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7291 If the buffer cannot be interpreted, return NULL_TREE. */
7292
7293 static tree
7294 native_interpret_int (tree type, const unsigned char *ptr, int len)
7295 {
7296 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7297
7298 if (total_bytes > len
7299 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7300 return NULL_TREE;
7301
7302 wide_int result = wi::from_buffer (ptr, total_bytes);
7303
7304 return wide_int_to_tree (type, result);
7305 }
7306
7307
7308 /* Subroutine of native_interpret_expr. Interpret the contents of
7309 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7310 If the buffer cannot be interpreted, return NULL_TREE. */
7311
7312 static tree
7313 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7314 {
7315 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7316 double_int result;
7317 FIXED_VALUE_TYPE fixed_value;
7318
7319 if (total_bytes > len
7320 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7321 return NULL_TREE;
7322
7323 result = double_int::from_buffer (ptr, total_bytes);
7324 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7325
7326 return build_fixed (type, fixed_value);
7327 }
7328
7329
7330 /* Subroutine of native_interpret_expr. Interpret the contents of
7331 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7332 If the buffer cannot be interpreted, return NULL_TREE. */
7333
7334 static tree
7335 native_interpret_real (tree type, const unsigned char *ptr, int len)
7336 {
7337 machine_mode mode = TYPE_MODE (type);
7338 int total_bytes = GET_MODE_SIZE (mode);
7339 int byte, offset, word, words, bitpos;
7340 unsigned char value;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7343 up to 192 bits. */
7344 REAL_VALUE_TYPE r;
7345 long tmp[6];
7346
7347 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348 if (total_bytes > len || total_bytes > 24)
7349 return NULL_TREE;
7350 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7351
7352 memset (tmp, 0, sizeof (tmp));
7353 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7354 bitpos += BITS_PER_UNIT)
7355 {
7356 byte = (bitpos / BITS_PER_UNIT) & 3;
7357 if (UNITS_PER_WORD < 4)
7358 {
7359 word = byte / UNITS_PER_WORD;
7360 if (WORDS_BIG_ENDIAN)
7361 word = (words - 1) - word;
7362 offset = word * UNITS_PER_WORD;
7363 if (BYTES_BIG_ENDIAN)
7364 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7365 else
7366 offset += byte % UNITS_PER_WORD;
7367 }
7368 else
7369 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7370 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7371
7372 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7373 }
7374
7375 real_from_target (&r, tmp, mode);
7376 return build_real (type, r);
7377 }
7378
7379
7380 /* Subroutine of native_interpret_expr. Interpret the contents of
7381 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7382 If the buffer cannot be interpreted, return NULL_TREE. */
7383
7384 static tree
7385 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7386 {
7387 tree etype, rpart, ipart;
7388 int size;
7389
7390 etype = TREE_TYPE (type);
7391 size = GET_MODE_SIZE (TYPE_MODE (etype));
7392 if (size * 2 > len)
7393 return NULL_TREE;
7394 rpart = native_interpret_expr (etype, ptr, size);
7395 if (!rpart)
7396 return NULL_TREE;
7397 ipart = native_interpret_expr (etype, ptr+size, size);
7398 if (!ipart)
7399 return NULL_TREE;
7400 return build_complex (type, rpart, ipart);
7401 }
7402
7403
7404 /* Subroutine of native_interpret_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7406 If the buffer cannot be interpreted, return NULL_TREE. */
7407
7408 static tree
7409 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7410 {
7411 tree etype, elem;
7412 int i, size, count;
7413 tree *elements;
7414
7415 etype = TREE_TYPE (type);
7416 size = GET_MODE_SIZE (TYPE_MODE (etype));
7417 count = TYPE_VECTOR_SUBPARTS (type);
7418 if (size * count > len)
7419 return NULL_TREE;
7420
7421 elements = XALLOCAVEC (tree, count);
7422 for (i = count - 1; i >= 0; i--)
7423 {
7424 elem = native_interpret_expr (etype, ptr+(i*size), size);
7425 if (!elem)
7426 return NULL_TREE;
7427 elements[i] = elem;
7428 }
7429 return build_vector (type, elements);
7430 }
7431
7432
7433 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a constant of type TYPE. For
7435 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7436 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7437 return NULL_TREE. */
7438
7439 tree
7440 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7441 {
7442 switch (TREE_CODE (type))
7443 {
7444 case INTEGER_TYPE:
7445 case ENUMERAL_TYPE:
7446 case BOOLEAN_TYPE:
7447 case POINTER_TYPE:
7448 case REFERENCE_TYPE:
7449 return native_interpret_int (type, ptr, len);
7450
7451 case REAL_TYPE:
7452 return native_interpret_real (type, ptr, len);
7453
7454 case FIXED_POINT_TYPE:
7455 return native_interpret_fixed (type, ptr, len);
7456
7457 case COMPLEX_TYPE:
7458 return native_interpret_complex (type, ptr, len);
7459
7460 case VECTOR_TYPE:
7461 return native_interpret_vector (type, ptr, len);
7462
7463 default:
7464 return NULL_TREE;
7465 }
7466 }
7467
7468 /* Returns true if we can interpret the contents of a native encoding
7469 as TYPE. */
7470
7471 static bool
7472 can_native_interpret_type_p (tree type)
7473 {
7474 switch (TREE_CODE (type))
7475 {
7476 case INTEGER_TYPE:
7477 case ENUMERAL_TYPE:
7478 case BOOLEAN_TYPE:
7479 case POINTER_TYPE:
7480 case REFERENCE_TYPE:
7481 case FIXED_POINT_TYPE:
7482 case REAL_TYPE:
7483 case COMPLEX_TYPE:
7484 case VECTOR_TYPE:
7485 return true;
7486 default:
7487 return false;
7488 }
7489 }
7490
7491 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7492 TYPE at compile-time. If we're unable to perform the conversion
7493 return NULL_TREE. */
7494
7495 static tree
7496 fold_view_convert_expr (tree type, tree expr)
7497 {
7498 /* We support up to 512-bit values (for V8DFmode). */
7499 unsigned char buffer[64];
7500 int len;
7501
7502 /* Check that the host and target are sane. */
7503 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7504 return NULL_TREE;
7505
7506 len = native_encode_expr (expr, buffer, sizeof (buffer));
7507 if (len == 0)
7508 return NULL_TREE;
7509
7510 return native_interpret_expr (type, buffer, len);
7511 }
7512
7513 /* Build an expression for the address of T. Folds away INDIRECT_REF
7514 to avoid confusing the gimplify process. */
7515
7516 tree
7517 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7518 {
7519 /* The size of the object is not relevant when talking about its address. */
7520 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7521 t = TREE_OPERAND (t, 0);
7522
7523 if (TREE_CODE (t) == INDIRECT_REF)
7524 {
7525 t = TREE_OPERAND (t, 0);
7526
7527 if (TREE_TYPE (t) != ptrtype)
7528 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7529 }
7530 else if (TREE_CODE (t) == MEM_REF
7531 && integer_zerop (TREE_OPERAND (t, 1)))
7532 return TREE_OPERAND (t, 0);
7533 else if (TREE_CODE (t) == MEM_REF
7534 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7535 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7536 TREE_OPERAND (t, 0),
7537 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7538 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7539 {
7540 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7541
7542 if (TREE_TYPE (t) != ptrtype)
7543 t = fold_convert_loc (loc, ptrtype, t);
7544 }
7545 else
7546 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7547
7548 return t;
7549 }
7550
7551 /* Build an expression for the address of T. */
7552
7553 tree
7554 build_fold_addr_expr_loc (location_t loc, tree t)
7555 {
7556 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7557
7558 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7559 }
7560
7561 static bool vec_cst_ctor_to_array (tree, tree *);
7562
7563 /* Fold a unary expression of code CODE and type TYPE with operand
7564 OP0. Return the folded expression if folding is successful.
7565 Otherwise, return NULL_TREE. */
7566
7567 tree
7568 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7569 {
7570 tree tem;
7571 tree arg0;
7572 enum tree_code_class kind = TREE_CODE_CLASS (code);
7573
7574 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7575 && TREE_CODE_LENGTH (code) == 1);
7576
7577 tem = generic_simplify (loc, code, type, op0);
7578 if (tem)
7579 return tem;
7580
7581 arg0 = op0;
7582 if (arg0)
7583 {
7584 if (CONVERT_EXPR_CODE_P (code)
7585 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7586 {
7587 /* Don't use STRIP_NOPS, because signedness of argument type
7588 matters. */
7589 STRIP_SIGN_NOPS (arg0);
7590 }
7591 else
7592 {
7593 /* Strip any conversions that don't change the mode. This
7594 is safe for every expression, except for a comparison
7595 expression because its signedness is derived from its
7596 operands.
7597
7598 Note that this is done as an internal manipulation within
7599 the constant folder, in order to find the simplest
7600 representation of the arguments so that their form can be
7601 studied. In any cases, the appropriate type conversions
7602 should be put back in the tree that will get out of the
7603 constant folder. */
7604 STRIP_NOPS (arg0);
7605 }
7606 }
7607
7608 if (TREE_CODE_CLASS (code) == tcc_unary)
7609 {
7610 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7611 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7612 fold_build1_loc (loc, code, type,
7613 fold_convert_loc (loc, TREE_TYPE (op0),
7614 TREE_OPERAND (arg0, 1))));
7615 else if (TREE_CODE (arg0) == COND_EXPR)
7616 {
7617 tree arg01 = TREE_OPERAND (arg0, 1);
7618 tree arg02 = TREE_OPERAND (arg0, 2);
7619 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7620 arg01 = fold_build1_loc (loc, code, type,
7621 fold_convert_loc (loc,
7622 TREE_TYPE (op0), arg01));
7623 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7624 arg02 = fold_build1_loc (loc, code, type,
7625 fold_convert_loc (loc,
7626 TREE_TYPE (op0), arg02));
7627 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7628 arg01, arg02);
7629
7630 /* If this was a conversion, and all we did was to move into
7631 inside the COND_EXPR, bring it back out. But leave it if
7632 it is a conversion from integer to integer and the
7633 result precision is no wider than a word since such a
7634 conversion is cheap and may be optimized away by combine,
7635 while it couldn't if it were outside the COND_EXPR. Then return
7636 so we don't get into an infinite recursion loop taking the
7637 conversion out and then back in. */
7638
7639 if ((CONVERT_EXPR_CODE_P (code)
7640 || code == NON_LVALUE_EXPR)
7641 && TREE_CODE (tem) == COND_EXPR
7642 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7643 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7644 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7646 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7647 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7648 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7649 && (INTEGRAL_TYPE_P
7650 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7651 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7652 || flag_syntax_only))
7653 tem = build1_loc (loc, code, type,
7654 build3 (COND_EXPR,
7655 TREE_TYPE (TREE_OPERAND
7656 (TREE_OPERAND (tem, 1), 0)),
7657 TREE_OPERAND (tem, 0),
7658 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7659 TREE_OPERAND (TREE_OPERAND (tem, 2),
7660 0)));
7661 return tem;
7662 }
7663 }
7664
7665 switch (code)
7666 {
7667 case PAREN_EXPR:
7668 /* Re-association barriers around constants and other re-association
7669 barriers can be removed. */
7670 if (CONSTANT_CLASS_P (op0)
7671 || TREE_CODE (op0) == PAREN_EXPR)
7672 return fold_convert_loc (loc, type, op0);
7673 return NULL_TREE;
7674
7675 case NON_LVALUE_EXPR:
7676 if (!maybe_lvalue_p (op0))
7677 return fold_convert_loc (loc, type, op0);
7678 return NULL_TREE;
7679
7680 CASE_CONVERT:
7681 case FLOAT_EXPR:
7682 case FIX_TRUNC_EXPR:
7683 if (TREE_TYPE (op0) == type)
7684 return op0;
7685
7686 if (COMPARISON_CLASS_P (op0))
7687 {
7688 /* If we have (type) (a CMP b) and type is an integral type, return
7689 new expression involving the new type. Canonicalize
7690 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7691 non-integral type.
7692 Do not fold the result as that would not simplify further, also
7693 folding again results in recursions. */
7694 if (TREE_CODE (type) == BOOLEAN_TYPE)
7695 return build2_loc (loc, TREE_CODE (op0), type,
7696 TREE_OPERAND (op0, 0),
7697 TREE_OPERAND (op0, 1));
7698 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7699 && TREE_CODE (type) != VECTOR_TYPE)
7700 return build3_loc (loc, COND_EXPR, type, op0,
7701 constant_boolean_node (true, type),
7702 constant_boolean_node (false, type));
7703 }
7704
7705 /* Handle cases of two conversions in a row. */
7706 if (CONVERT_EXPR_P (op0))
7707 {
7708 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7709 tree inter_type = TREE_TYPE (op0);
7710 int inside_int = INTEGRAL_TYPE_P (inside_type);
7711 int inside_ptr = POINTER_TYPE_P (inside_type);
7712 int inside_float = FLOAT_TYPE_P (inside_type);
7713 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7714 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7715 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7716 int inter_int = INTEGRAL_TYPE_P (inter_type);
7717 int inter_ptr = POINTER_TYPE_P (inter_type);
7718 int inter_float = FLOAT_TYPE_P (inter_type);
7719 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7720 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7721 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7722 int final_int = INTEGRAL_TYPE_P (type);
7723 int final_ptr = POINTER_TYPE_P (type);
7724 int final_float = FLOAT_TYPE_P (type);
7725 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7726 unsigned int final_prec = TYPE_PRECISION (type);
7727 int final_unsignedp = TYPE_UNSIGNED (type);
7728
7729 /* In addition to the cases of two conversions in a row
7730 handled below, if we are converting something to its own
7731 type via an object of identical or wider precision, neither
7732 conversion is needed. */
7733 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7734 && (((inter_int || inter_ptr) && final_int)
7735 || (inter_float && final_float))
7736 && inter_prec >= final_prec)
7737 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7738
7739 /* Likewise, if the intermediate and initial types are either both
7740 float or both integer, we don't need the middle conversion if the
7741 former is wider than the latter and doesn't change the signedness
7742 (for integers). Avoid this if the final type is a pointer since
7743 then we sometimes need the middle conversion. Likewise if the
7744 final type has a precision not equal to the size of its mode. */
7745 if (((inter_int && inside_int)
7746 || (inter_float && inside_float)
7747 || (inter_vec && inside_vec))
7748 && inter_prec >= inside_prec
7749 && (inter_float || inter_vec
7750 || inter_unsignedp == inside_unsignedp)
7751 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7752 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7753 && ! final_ptr
7754 && (! final_vec || inter_prec == inside_prec))
7755 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7756
7757 /* If we have a sign-extension of a zero-extended value, we can
7758 replace that by a single zero-extension. Likewise if the
7759 final conversion does not change precision we can drop the
7760 intermediate conversion. */
7761 if (inside_int && inter_int && final_int
7762 && ((inside_prec < inter_prec && inter_prec < final_prec
7763 && inside_unsignedp && !inter_unsignedp)
7764 || final_prec == inter_prec))
7765 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7766
7767 /* Two conversions in a row are not needed unless:
7768 - some conversion is floating-point (overstrict for now), or
7769 - some conversion is a vector (overstrict for now), or
7770 - the intermediate type is narrower than both initial and
7771 final, or
7772 - the intermediate type and innermost type differ in signedness,
7773 and the outermost type is wider than the intermediate, or
7774 - the initial type is a pointer type and the precisions of the
7775 intermediate and final types differ, or
7776 - the final type is a pointer type and the precisions of the
7777 initial and intermediate types differ. */
7778 if (! inside_float && ! inter_float && ! final_float
7779 && ! inside_vec && ! inter_vec && ! final_vec
7780 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7781 && ! (inside_int && inter_int
7782 && inter_unsignedp != inside_unsignedp
7783 && inter_prec < final_prec)
7784 && ((inter_unsignedp && inter_prec > inside_prec)
7785 == (final_unsignedp && final_prec > inter_prec))
7786 && ! (inside_ptr && inter_prec != final_prec)
7787 && ! (final_ptr && inside_prec != inter_prec)
7788 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7789 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7790 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7791 }
7792
7793 /* Handle (T *)&A.B.C for A being of type T and B and C
7794 living at offset zero. This occurs frequently in
7795 C++ upcasting and then accessing the base. */
7796 if (TREE_CODE (op0) == ADDR_EXPR
7797 && POINTER_TYPE_P (type)
7798 && handled_component_p (TREE_OPERAND (op0, 0)))
7799 {
7800 HOST_WIDE_INT bitsize, bitpos;
7801 tree offset;
7802 machine_mode mode;
7803 int unsignedp, volatilep;
7804 tree base = TREE_OPERAND (op0, 0);
7805 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7806 &mode, &unsignedp, &volatilep, false);
7807 /* If the reference was to a (constant) zero offset, we can use
7808 the address of the base if it has the same base type
7809 as the result type and the pointer type is unqualified. */
7810 if (! offset && bitpos == 0
7811 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7812 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7813 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7814 return fold_convert_loc (loc, type,
7815 build_fold_addr_expr_loc (loc, base));
7816 }
7817
7818 if (TREE_CODE (op0) == MODIFY_EXPR
7819 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7820 /* Detect assigning a bitfield. */
7821 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7822 && DECL_BIT_FIELD
7823 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7824 {
7825 /* Don't leave an assignment inside a conversion
7826 unless assigning a bitfield. */
7827 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7828 /* First do the assignment, then return converted constant. */
7829 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7830 TREE_NO_WARNING (tem) = 1;
7831 TREE_USED (tem) = 1;
7832 return tem;
7833 }
7834
7835 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7836 constants (if x has signed type, the sign bit cannot be set
7837 in c). This folds extension into the BIT_AND_EXPR.
7838 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7839 very likely don't have maximal range for their precision and this
7840 transformation effectively doesn't preserve non-maximal ranges. */
7841 if (TREE_CODE (type) == INTEGER_TYPE
7842 && TREE_CODE (op0) == BIT_AND_EXPR
7843 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7844 {
7845 tree and_expr = op0;
7846 tree and0 = TREE_OPERAND (and_expr, 0);
7847 tree and1 = TREE_OPERAND (and_expr, 1);
7848 int change = 0;
7849
7850 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7851 || (TYPE_PRECISION (type)
7852 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7853 change = 1;
7854 else if (TYPE_PRECISION (TREE_TYPE (and1))
7855 <= HOST_BITS_PER_WIDE_INT
7856 && tree_fits_uhwi_p (and1))
7857 {
7858 unsigned HOST_WIDE_INT cst;
7859
7860 cst = tree_to_uhwi (and1);
7861 cst &= HOST_WIDE_INT_M1U
7862 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7863 change = (cst == 0);
7864 #ifdef LOAD_EXTEND_OP
7865 if (change
7866 && !flag_syntax_only
7867 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7868 == ZERO_EXTEND))
7869 {
7870 tree uns = unsigned_type_for (TREE_TYPE (and0));
7871 and0 = fold_convert_loc (loc, uns, and0);
7872 and1 = fold_convert_loc (loc, uns, and1);
7873 }
7874 #endif
7875 }
7876 if (change)
7877 {
7878 tem = force_fit_type (type, wi::to_widest (and1), 0,
7879 TREE_OVERFLOW (and1));
7880 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7881 fold_convert_loc (loc, type, and0), tem);
7882 }
7883 }
7884
7885 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7886 when one of the new casts will fold away. Conservatively we assume
7887 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7888 if (POINTER_TYPE_P (type)
7889 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7890 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7891 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7892 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7893 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7894 {
7895 tree arg00 = TREE_OPERAND (arg0, 0);
7896 tree arg01 = TREE_OPERAND (arg0, 1);
7897
7898 return fold_build_pointer_plus_loc
7899 (loc, fold_convert_loc (loc, type, arg00), arg01);
7900 }
7901
7902 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7903 of the same precision, and X is an integer type not narrower than
7904 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7905 if (INTEGRAL_TYPE_P (type)
7906 && TREE_CODE (op0) == BIT_NOT_EXPR
7907 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7908 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7909 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7910 {
7911 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7912 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7913 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7914 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7915 fold_convert_loc (loc, type, tem));
7916 }
7917
7918 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7919 type of X and Y (integer types only). */
7920 if (INTEGRAL_TYPE_P (type)
7921 && TREE_CODE (op0) == MULT_EXPR
7922 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7923 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7924 {
7925 /* Be careful not to introduce new overflows. */
7926 tree mult_type;
7927 if (TYPE_OVERFLOW_WRAPS (type))
7928 mult_type = type;
7929 else
7930 mult_type = unsigned_type_for (type);
7931
7932 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7933 {
7934 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7935 fold_convert_loc (loc, mult_type,
7936 TREE_OPERAND (op0, 0)),
7937 fold_convert_loc (loc, mult_type,
7938 TREE_OPERAND (op0, 1)));
7939 return fold_convert_loc (loc, type, tem);
7940 }
7941 }
7942
7943 tem = fold_convert_const (code, type, arg0);
7944 return tem ? tem : NULL_TREE;
7945
7946 case ADDR_SPACE_CONVERT_EXPR:
7947 if (integer_zerop (arg0))
7948 return fold_convert_const (code, type, arg0);
7949 return NULL_TREE;
7950
7951 case FIXED_CONVERT_EXPR:
7952 tem = fold_convert_const (code, type, arg0);
7953 return tem ? tem : NULL_TREE;
7954
7955 case VIEW_CONVERT_EXPR:
7956 if (TREE_TYPE (op0) == type)
7957 return op0;
7958 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7959 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7960 type, TREE_OPERAND (op0, 0));
7961 if (TREE_CODE (op0) == MEM_REF)
7962 return fold_build2_loc (loc, MEM_REF, type,
7963 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7964
7965 /* For integral conversions with the same precision or pointer
7966 conversions use a NOP_EXPR instead. */
7967 if ((INTEGRAL_TYPE_P (type)
7968 || POINTER_TYPE_P (type))
7969 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7970 || POINTER_TYPE_P (TREE_TYPE (op0)))
7971 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7972 return fold_convert_loc (loc, type, op0);
7973
7974 /* Strip inner integral conversions that do not change the precision. */
7975 if (CONVERT_EXPR_P (op0)
7976 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7977 || POINTER_TYPE_P (TREE_TYPE (op0)))
7978 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7979 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7980 && (TYPE_PRECISION (TREE_TYPE (op0))
7981 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7982 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7983 type, TREE_OPERAND (op0, 0));
7984
7985 return fold_view_convert_expr (type, op0);
7986
7987 case NEGATE_EXPR:
7988 tem = fold_negate_expr (loc, arg0);
7989 if (tem)
7990 return fold_convert_loc (loc, type, tem);
7991 return NULL_TREE;
7992
7993 case ABS_EXPR:
7994 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7995 return fold_abs_const (arg0, type);
7996 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7997 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7998 /* Convert fabs((double)float) into (double)fabsf(float). */
7999 else if (TREE_CODE (arg0) == NOP_EXPR
8000 && TREE_CODE (type) == REAL_TYPE)
8001 {
8002 tree targ0 = strip_float_extensions (arg0);
8003 if (targ0 != arg0)
8004 return fold_convert_loc (loc, type,
8005 fold_build1_loc (loc, ABS_EXPR,
8006 TREE_TYPE (targ0),
8007 targ0));
8008 }
8009 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8010 else if (TREE_CODE (arg0) == ABS_EXPR)
8011 return arg0;
8012 else if (tree_expr_nonnegative_p (arg0))
8013 return arg0;
8014
8015 /* Strip sign ops from argument. */
8016 if (TREE_CODE (type) == REAL_TYPE)
8017 {
8018 tem = fold_strip_sign_ops (arg0);
8019 if (tem)
8020 return fold_build1_loc (loc, ABS_EXPR, type,
8021 fold_convert_loc (loc, type, tem));
8022 }
8023 return NULL_TREE;
8024
8025 case CONJ_EXPR:
8026 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8027 return fold_convert_loc (loc, type, arg0);
8028 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8029 {
8030 tree itype = TREE_TYPE (type);
8031 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8032 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8033 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8034 negate_expr (ipart));
8035 }
8036 if (TREE_CODE (arg0) == COMPLEX_CST)
8037 {
8038 tree itype = TREE_TYPE (type);
8039 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8040 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8041 return build_complex (type, rpart, negate_expr (ipart));
8042 }
8043 if (TREE_CODE (arg0) == CONJ_EXPR)
8044 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8045 return NULL_TREE;
8046
8047 case BIT_NOT_EXPR:
8048 if (TREE_CODE (arg0) == INTEGER_CST)
8049 return fold_not_const (arg0, type);
8050 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8051 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8052 /* Convert ~ (-A) to A - 1. */
8053 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8054 return fold_build2_loc (loc, MINUS_EXPR, type,
8055 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8056 build_int_cst (type, 1));
8057 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8058 else if (INTEGRAL_TYPE_P (type)
8059 && ((TREE_CODE (arg0) == MINUS_EXPR
8060 && integer_onep (TREE_OPERAND (arg0, 1)))
8061 || (TREE_CODE (arg0) == PLUS_EXPR
8062 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8063 return fold_build1_loc (loc, NEGATE_EXPR, type,
8064 fold_convert_loc (loc, type,
8065 TREE_OPERAND (arg0, 0)));
8066 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8067 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8068 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8069 fold_convert_loc (loc, type,
8070 TREE_OPERAND (arg0, 0)))))
8071 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8072 fold_convert_loc (loc, type,
8073 TREE_OPERAND (arg0, 1)));
8074 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8075 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8076 fold_convert_loc (loc, type,
8077 TREE_OPERAND (arg0, 1)))))
8078 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8079 fold_convert_loc (loc, type,
8080 TREE_OPERAND (arg0, 0)), tem);
8081 /* Perform BIT_NOT_EXPR on each element individually. */
8082 else if (TREE_CODE (arg0) == VECTOR_CST)
8083 {
8084 tree *elements;
8085 tree elem;
8086 unsigned count = VECTOR_CST_NELTS (arg0), i;
8087
8088 elements = XALLOCAVEC (tree, count);
8089 for (i = 0; i < count; i++)
8090 {
8091 elem = VECTOR_CST_ELT (arg0, i);
8092 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8093 if (elem == NULL_TREE)
8094 break;
8095 elements[i] = elem;
8096 }
8097 if (i == count)
8098 return build_vector (type, elements);
8099 }
8100 else if (COMPARISON_CLASS_P (arg0)
8101 && (VECTOR_TYPE_P (type)
8102 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8103 {
8104 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8105 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8106 HONOR_NANS (TYPE_MODE (op_type)));
8107 if (subcode != ERROR_MARK)
8108 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8109 TREE_OPERAND (arg0, 1));
8110 }
8111
8112
8113 return NULL_TREE;
8114
8115 case TRUTH_NOT_EXPR:
8116 /* Note that the operand of this must be an int
8117 and its values must be 0 or 1.
8118 ("true" is a fixed value perhaps depending on the language,
8119 but we don't handle values other than 1 correctly yet.) */
8120 tem = fold_truth_not_expr (loc, arg0);
8121 if (!tem)
8122 return NULL_TREE;
8123 return fold_convert_loc (loc, type, tem);
8124
8125 case REALPART_EXPR:
8126 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8127 return fold_convert_loc (loc, type, arg0);
8128 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8129 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8130 TREE_OPERAND (arg0, 1));
8131 if (TREE_CODE (arg0) == COMPLEX_CST)
8132 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8133 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8134 {
8135 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8136 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8137 fold_build1_loc (loc, REALPART_EXPR, itype,
8138 TREE_OPERAND (arg0, 0)),
8139 fold_build1_loc (loc, REALPART_EXPR, itype,
8140 TREE_OPERAND (arg0, 1)));
8141 return fold_convert_loc (loc, type, tem);
8142 }
8143 if (TREE_CODE (arg0) == CONJ_EXPR)
8144 {
8145 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8146 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8147 TREE_OPERAND (arg0, 0));
8148 return fold_convert_loc (loc, type, tem);
8149 }
8150 if (TREE_CODE (arg0) == CALL_EXPR)
8151 {
8152 tree fn = get_callee_fndecl (arg0);
8153 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8154 switch (DECL_FUNCTION_CODE (fn))
8155 {
8156 CASE_FLT_FN (BUILT_IN_CEXPI):
8157 fn = mathfn_built_in (type, BUILT_IN_COS);
8158 if (fn)
8159 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8160 break;
8161
8162 default:
8163 break;
8164 }
8165 }
8166 return NULL_TREE;
8167
8168 case IMAGPART_EXPR:
8169 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8170 return build_zero_cst (type);
8171 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8172 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8173 TREE_OPERAND (arg0, 0));
8174 if (TREE_CODE (arg0) == COMPLEX_CST)
8175 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8176 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8177 {
8178 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8179 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8180 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8181 TREE_OPERAND (arg0, 0)),
8182 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8183 TREE_OPERAND (arg0, 1)));
8184 return fold_convert_loc (loc, type, tem);
8185 }
8186 if (TREE_CODE (arg0) == CONJ_EXPR)
8187 {
8188 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8189 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8190 return fold_convert_loc (loc, type, negate_expr (tem));
8191 }
8192 if (TREE_CODE (arg0) == CALL_EXPR)
8193 {
8194 tree fn = get_callee_fndecl (arg0);
8195 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8196 switch (DECL_FUNCTION_CODE (fn))
8197 {
8198 CASE_FLT_FN (BUILT_IN_CEXPI):
8199 fn = mathfn_built_in (type, BUILT_IN_SIN);
8200 if (fn)
8201 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8202 break;
8203
8204 default:
8205 break;
8206 }
8207 }
8208 return NULL_TREE;
8209
8210 case INDIRECT_REF:
8211 /* Fold *&X to X if X is an lvalue. */
8212 if (TREE_CODE (op0) == ADDR_EXPR)
8213 {
8214 tree op00 = TREE_OPERAND (op0, 0);
8215 if ((TREE_CODE (op00) == VAR_DECL
8216 || TREE_CODE (op00) == PARM_DECL
8217 || TREE_CODE (op00) == RESULT_DECL)
8218 && !TREE_READONLY (op00))
8219 return op00;
8220 }
8221 return NULL_TREE;
8222
8223 case VEC_UNPACK_LO_EXPR:
8224 case VEC_UNPACK_HI_EXPR:
8225 case VEC_UNPACK_FLOAT_LO_EXPR:
8226 case VEC_UNPACK_FLOAT_HI_EXPR:
8227 {
8228 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8229 tree *elts;
8230 enum tree_code subcode;
8231
8232 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8233 if (TREE_CODE (arg0) != VECTOR_CST)
8234 return NULL_TREE;
8235
8236 elts = XALLOCAVEC (tree, nelts * 2);
8237 if (!vec_cst_ctor_to_array (arg0, elts))
8238 return NULL_TREE;
8239
8240 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8241 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8242 elts += nelts;
8243
8244 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8245 subcode = NOP_EXPR;
8246 else
8247 subcode = FLOAT_EXPR;
8248
8249 for (i = 0; i < nelts; i++)
8250 {
8251 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8252 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8253 return NULL_TREE;
8254 }
8255
8256 return build_vector (type, elts);
8257 }
8258
8259 case REDUC_MIN_EXPR:
8260 case REDUC_MAX_EXPR:
8261 case REDUC_PLUS_EXPR:
8262 {
8263 unsigned int nelts, i;
8264 tree *elts;
8265 enum tree_code subcode;
8266
8267 if (TREE_CODE (op0) != VECTOR_CST)
8268 return NULL_TREE;
8269 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8270
8271 elts = XALLOCAVEC (tree, nelts);
8272 if (!vec_cst_ctor_to_array (op0, elts))
8273 return NULL_TREE;
8274
8275 switch (code)
8276 {
8277 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8278 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8279 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8280 default: gcc_unreachable ();
8281 }
8282
8283 for (i = 1; i < nelts; i++)
8284 {
8285 elts[0] = const_binop (subcode, elts[0], elts[i]);
8286 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8287 return NULL_TREE;
8288 }
8289
8290 return elts[0];
8291 }
8292
8293 default:
8294 return NULL_TREE;
8295 } /* switch (code) */
8296 }
8297
8298
8299 /* If the operation was a conversion do _not_ mark a resulting constant
8300 with TREE_OVERFLOW if the original constant was not. These conversions
8301 have implementation defined behavior and retaining the TREE_OVERFLOW
8302 flag here would confuse later passes such as VRP. */
8303 tree
8304 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8305 tree type, tree op0)
8306 {
8307 tree res = fold_unary_loc (loc, code, type, op0);
8308 if (res
8309 && TREE_CODE (res) == INTEGER_CST
8310 && TREE_CODE (op0) == INTEGER_CST
8311 && CONVERT_EXPR_CODE_P (code))
8312 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8313
8314 return res;
8315 }
8316
8317 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8318 operands OP0 and OP1. LOC is the location of the resulting expression.
8319 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8320 Return the folded expression if folding is successful. Otherwise,
8321 return NULL_TREE. */
8322 static tree
8323 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8324 tree arg0, tree arg1, tree op0, tree op1)
8325 {
8326 tree tem;
8327
8328 /* We only do these simplifications if we are optimizing. */
8329 if (!optimize)
8330 return NULL_TREE;
8331
8332 /* Check for things like (A || B) && (A || C). We can convert this
8333 to A || (B && C). Note that either operator can be any of the four
8334 truth and/or operations and the transformation will still be
8335 valid. Also note that we only care about order for the
8336 ANDIF and ORIF operators. If B contains side effects, this
8337 might change the truth-value of A. */
8338 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8339 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8340 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8341 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8342 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8343 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8344 {
8345 tree a00 = TREE_OPERAND (arg0, 0);
8346 tree a01 = TREE_OPERAND (arg0, 1);
8347 tree a10 = TREE_OPERAND (arg1, 0);
8348 tree a11 = TREE_OPERAND (arg1, 1);
8349 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8350 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8351 && (code == TRUTH_AND_EXPR
8352 || code == TRUTH_OR_EXPR));
8353
8354 if (operand_equal_p (a00, a10, 0))
8355 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8356 fold_build2_loc (loc, code, type, a01, a11));
8357 else if (commutative && operand_equal_p (a00, a11, 0))
8358 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8359 fold_build2_loc (loc, code, type, a01, a10));
8360 else if (commutative && operand_equal_p (a01, a10, 0))
8361 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8362 fold_build2_loc (loc, code, type, a00, a11));
8363
8364 /* This case if tricky because we must either have commutative
8365 operators or else A10 must not have side-effects. */
8366
8367 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8368 && operand_equal_p (a01, a11, 0))
8369 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8370 fold_build2_loc (loc, code, type, a00, a10),
8371 a01);
8372 }
8373
8374 /* See if we can build a range comparison. */
8375 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8376 return tem;
8377
8378 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8379 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8380 {
8381 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8382 if (tem)
8383 return fold_build2_loc (loc, code, type, tem, arg1);
8384 }
8385
8386 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8387 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8388 {
8389 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8390 if (tem)
8391 return fold_build2_loc (loc, code, type, arg0, tem);
8392 }
8393
8394 /* Check for the possibility of merging component references. If our
8395 lhs is another similar operation, try to merge its rhs with our
8396 rhs. Then try to merge our lhs and rhs. */
8397 if (TREE_CODE (arg0) == code
8398 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8399 TREE_OPERAND (arg0, 1), arg1)))
8400 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8401
8402 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8403 return tem;
8404
8405 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8406 && (code == TRUTH_AND_EXPR
8407 || code == TRUTH_ANDIF_EXPR
8408 || code == TRUTH_OR_EXPR
8409 || code == TRUTH_ORIF_EXPR))
8410 {
8411 enum tree_code ncode, icode;
8412
8413 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8414 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8415 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8416
8417 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8418 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8419 We don't want to pack more than two leafs to a non-IF AND/OR
8420 expression.
8421 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8422 equal to IF-CODE, then we don't want to add right-hand operand.
8423 If the inner right-hand side of left-hand operand has
8424 side-effects, or isn't simple, then we can't add to it,
8425 as otherwise we might destroy if-sequence. */
8426 if (TREE_CODE (arg0) == icode
8427 && simple_operand_p_2 (arg1)
8428 /* Needed for sequence points to handle trappings, and
8429 side-effects. */
8430 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8431 {
8432 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8433 arg1);
8434 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8435 tem);
8436 }
8437 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8438 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8439 else if (TREE_CODE (arg1) == icode
8440 && simple_operand_p_2 (arg0)
8441 /* Needed for sequence points to handle trappings, and
8442 side-effects. */
8443 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8444 {
8445 tem = fold_build2_loc (loc, ncode, type,
8446 arg0, TREE_OPERAND (arg1, 0));
8447 return fold_build2_loc (loc, icode, type, tem,
8448 TREE_OPERAND (arg1, 1));
8449 }
8450 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8451 into (A OR B).
8452 For sequence point consistancy, we need to check for trapping,
8453 and side-effects. */
8454 else if (code == icode && simple_operand_p_2 (arg0)
8455 && simple_operand_p_2 (arg1))
8456 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8457 }
8458
8459 return NULL_TREE;
8460 }
8461
8462 /* Fold a binary expression of code CODE and type TYPE with operands
8463 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8464 Return the folded expression if folding is successful. Otherwise,
8465 return NULL_TREE. */
8466
8467 static tree
8468 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8469 {
8470 enum tree_code compl_code;
8471
8472 if (code == MIN_EXPR)
8473 compl_code = MAX_EXPR;
8474 else if (code == MAX_EXPR)
8475 compl_code = MIN_EXPR;
8476 else
8477 gcc_unreachable ();
8478
8479 /* MIN (MAX (a, b), b) == b. */
8480 if (TREE_CODE (op0) == compl_code
8481 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8482 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8483
8484 /* MIN (MAX (b, a), b) == b. */
8485 if (TREE_CODE (op0) == compl_code
8486 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8487 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8488 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8489
8490 /* MIN (a, MAX (a, b)) == a. */
8491 if (TREE_CODE (op1) == compl_code
8492 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8493 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8494 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8495
8496 /* MIN (a, MAX (b, a)) == a. */
8497 if (TREE_CODE (op1) == compl_code
8498 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8499 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8500 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8501
8502 return NULL_TREE;
8503 }
8504
8505 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8506 by changing CODE to reduce the magnitude of constants involved in
8507 ARG0 of the comparison.
8508 Returns a canonicalized comparison tree if a simplification was
8509 possible, otherwise returns NULL_TREE.
8510 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8511 valid if signed overflow is undefined. */
8512
8513 static tree
8514 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8515 tree arg0, tree arg1,
8516 bool *strict_overflow_p)
8517 {
8518 enum tree_code code0 = TREE_CODE (arg0);
8519 tree t, cst0 = NULL_TREE;
8520 int sgn0;
8521 bool swap = false;
8522
8523 /* Match A +- CST code arg1 and CST code arg1. We can change the
8524 first form only if overflow is undefined. */
8525 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8526 /* In principle pointers also have undefined overflow behavior,
8527 but that causes problems elsewhere. */
8528 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8529 && (code0 == MINUS_EXPR
8530 || code0 == PLUS_EXPR)
8531 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8532 || code0 == INTEGER_CST))
8533 return NULL_TREE;
8534
8535 /* Identify the constant in arg0 and its sign. */
8536 if (code0 == INTEGER_CST)
8537 cst0 = arg0;
8538 else
8539 cst0 = TREE_OPERAND (arg0, 1);
8540 sgn0 = tree_int_cst_sgn (cst0);
8541
8542 /* Overflowed constants and zero will cause problems. */
8543 if (integer_zerop (cst0)
8544 || TREE_OVERFLOW (cst0))
8545 return NULL_TREE;
8546
8547 /* See if we can reduce the magnitude of the constant in
8548 arg0 by changing the comparison code. */
8549 if (code0 == INTEGER_CST)
8550 {
8551 /* CST <= arg1 -> CST-1 < arg1. */
8552 if (code == LE_EXPR && sgn0 == 1)
8553 code = LT_EXPR;
8554 /* -CST < arg1 -> -CST-1 <= arg1. */
8555 else if (code == LT_EXPR && sgn0 == -1)
8556 code = LE_EXPR;
8557 /* CST > arg1 -> CST-1 >= arg1. */
8558 else if (code == GT_EXPR && sgn0 == 1)
8559 code = GE_EXPR;
8560 /* -CST >= arg1 -> -CST-1 > arg1. */
8561 else if (code == GE_EXPR && sgn0 == -1)
8562 code = GT_EXPR;
8563 else
8564 return NULL_TREE;
8565 /* arg1 code' CST' might be more canonical. */
8566 swap = true;
8567 }
8568 else
8569 {
8570 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8571 if (code == LT_EXPR
8572 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8573 code = LE_EXPR;
8574 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8575 else if (code == GT_EXPR
8576 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8577 code = GE_EXPR;
8578 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8579 else if (code == LE_EXPR
8580 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8581 code = LT_EXPR;
8582 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8583 else if (code == GE_EXPR
8584 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8585 code = GT_EXPR;
8586 else
8587 return NULL_TREE;
8588 *strict_overflow_p = true;
8589 }
8590
8591 /* Now build the constant reduced in magnitude. But not if that
8592 would produce one outside of its types range. */
8593 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8594 && ((sgn0 == 1
8595 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8596 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8597 || (sgn0 == -1
8598 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8599 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8600 /* We cannot swap the comparison here as that would cause us to
8601 endlessly recurse. */
8602 return NULL_TREE;
8603
8604 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8605 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8606 if (code0 != INTEGER_CST)
8607 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8608 t = fold_convert (TREE_TYPE (arg1), t);
8609
8610 /* If swapping might yield to a more canonical form, do so. */
8611 if (swap)
8612 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8613 else
8614 return fold_build2_loc (loc, code, type, t, arg1);
8615 }
8616
8617 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8618 overflow further. Try to decrease the magnitude of constants involved
8619 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8620 and put sole constants at the second argument position.
8621 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8622
8623 static tree
8624 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8625 tree arg0, tree arg1)
8626 {
8627 tree t;
8628 bool strict_overflow_p;
8629 const char * const warnmsg = G_("assuming signed overflow does not occur "
8630 "when reducing constant in comparison");
8631
8632 /* Try canonicalization by simplifying arg0. */
8633 strict_overflow_p = false;
8634 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8635 &strict_overflow_p);
8636 if (t)
8637 {
8638 if (strict_overflow_p)
8639 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8640 return t;
8641 }
8642
8643 /* Try canonicalization by simplifying arg1 using the swapped
8644 comparison. */
8645 code = swap_tree_comparison (code);
8646 strict_overflow_p = false;
8647 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8648 &strict_overflow_p);
8649 if (t && strict_overflow_p)
8650 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8651 return t;
8652 }
8653
8654 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8655 space. This is used to avoid issuing overflow warnings for
8656 expressions like &p->x which can not wrap. */
8657
8658 static bool
8659 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8660 {
8661 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8662 return true;
8663
8664 if (bitpos < 0)
8665 return true;
8666
8667 wide_int wi_offset;
8668 int precision = TYPE_PRECISION (TREE_TYPE (base));
8669 if (offset == NULL_TREE)
8670 wi_offset = wi::zero (precision);
8671 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8672 return true;
8673 else
8674 wi_offset = offset;
8675
8676 bool overflow;
8677 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8678 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8679 if (overflow)
8680 return true;
8681
8682 if (!wi::fits_uhwi_p (total))
8683 return true;
8684
8685 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8686 if (size <= 0)
8687 return true;
8688
8689 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8690 array. */
8691 if (TREE_CODE (base) == ADDR_EXPR)
8692 {
8693 HOST_WIDE_INT base_size;
8694
8695 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8696 if (base_size > 0 && size < base_size)
8697 size = base_size;
8698 }
8699
8700 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8701 }
8702
8703 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8704 kind INTEGER_CST. This makes sure to properly sign-extend the
8705 constant. */
8706
8707 static HOST_WIDE_INT
8708 size_low_cst (const_tree t)
8709 {
8710 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8711 int prec = TYPE_PRECISION (TREE_TYPE (t));
8712 if (prec < HOST_BITS_PER_WIDE_INT)
8713 return sext_hwi (w, prec);
8714 return w;
8715 }
8716
8717 /* Subroutine of fold_binary. This routine performs all of the
8718 transformations that are common to the equality/inequality
8719 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8720 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8721 fold_binary should call fold_binary. Fold a comparison with
8722 tree code CODE and type TYPE with operands OP0 and OP1. Return
8723 the folded comparison or NULL_TREE. */
8724
8725 static tree
8726 fold_comparison (location_t loc, enum tree_code code, tree type,
8727 tree op0, tree op1)
8728 {
8729 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8730 tree arg0, arg1, tem;
8731
8732 arg0 = op0;
8733 arg1 = op1;
8734
8735 STRIP_SIGN_NOPS (arg0);
8736 STRIP_SIGN_NOPS (arg1);
8737
8738 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8739 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8740 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8742 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8743 && TREE_CODE (arg1) == INTEGER_CST
8744 && !TREE_OVERFLOW (arg1))
8745 {
8746 const enum tree_code
8747 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8748 tree const1 = TREE_OPERAND (arg0, 1);
8749 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8750 tree variable = TREE_OPERAND (arg0, 0);
8751 tree new_const = int_const_binop (reverse_op, const2, const1);
8752
8753 /* If the constant operation overflowed this can be
8754 simplified as a comparison against INT_MAX/INT_MIN. */
8755 if (TREE_OVERFLOW (new_const)
8756 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8757 {
8758 int const1_sgn = tree_int_cst_sgn (const1);
8759 enum tree_code code2 = code;
8760
8761 /* Get the sign of the constant on the lhs if the
8762 operation were VARIABLE + CONST1. */
8763 if (TREE_CODE (arg0) == MINUS_EXPR)
8764 const1_sgn = -const1_sgn;
8765
8766 /* The sign of the constant determines if we overflowed
8767 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8768 Canonicalize to the INT_MIN overflow by swapping the comparison
8769 if necessary. */
8770 if (const1_sgn == -1)
8771 code2 = swap_tree_comparison (code);
8772
8773 /* We now can look at the canonicalized case
8774 VARIABLE + 1 CODE2 INT_MIN
8775 and decide on the result. */
8776 switch (code2)
8777 {
8778 case EQ_EXPR:
8779 case LT_EXPR:
8780 case LE_EXPR:
8781 return
8782 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8783
8784 case NE_EXPR:
8785 case GE_EXPR:
8786 case GT_EXPR:
8787 return
8788 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8789
8790 default:
8791 gcc_unreachable ();
8792 }
8793 }
8794 else
8795 {
8796 if (!equality_code)
8797 fold_overflow_warning ("assuming signed overflow does not occur "
8798 "when changing X +- C1 cmp C2 to "
8799 "X cmp C2 -+ C1",
8800 WARN_STRICT_OVERFLOW_COMPARISON);
8801 return fold_build2_loc (loc, code, type, variable, new_const);
8802 }
8803 }
8804
8805 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8806 if (TREE_CODE (arg0) == MINUS_EXPR
8807 && equality_code
8808 && integer_zerop (arg1))
8809 {
8810 /* ??? The transformation is valid for the other operators if overflow
8811 is undefined for the type, but performing it here badly interacts
8812 with the transformation in fold_cond_expr_with_comparison which
8813 attempts to synthetize ABS_EXPR. */
8814 if (!equality_code)
8815 fold_overflow_warning ("assuming signed overflow does not occur "
8816 "when changing X - Y cmp 0 to X cmp Y",
8817 WARN_STRICT_OVERFLOW_COMPARISON);
8818 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8819 TREE_OPERAND (arg0, 1));
8820 }
8821
8822 /* For comparisons of pointers we can decompose it to a compile time
8823 comparison of the base objects and the offsets into the object.
8824 This requires at least one operand being an ADDR_EXPR or a
8825 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8826 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8827 && (TREE_CODE (arg0) == ADDR_EXPR
8828 || TREE_CODE (arg1) == ADDR_EXPR
8829 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8830 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8831 {
8832 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8833 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8834 machine_mode mode;
8835 int volatilep, unsignedp;
8836 bool indirect_base0 = false, indirect_base1 = false;
8837
8838 /* Get base and offset for the access. Strip ADDR_EXPR for
8839 get_inner_reference, but put it back by stripping INDIRECT_REF
8840 off the base object if possible. indirect_baseN will be true
8841 if baseN is not an address but refers to the object itself. */
8842 base0 = arg0;
8843 if (TREE_CODE (arg0) == ADDR_EXPR)
8844 {
8845 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8846 &bitsize, &bitpos0, &offset0, &mode,
8847 &unsignedp, &volatilep, false);
8848 if (TREE_CODE (base0) == INDIRECT_REF)
8849 base0 = TREE_OPERAND (base0, 0);
8850 else
8851 indirect_base0 = true;
8852 }
8853 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8854 {
8855 base0 = TREE_OPERAND (arg0, 0);
8856 STRIP_SIGN_NOPS (base0);
8857 if (TREE_CODE (base0) == ADDR_EXPR)
8858 {
8859 base0 = TREE_OPERAND (base0, 0);
8860 indirect_base0 = true;
8861 }
8862 offset0 = TREE_OPERAND (arg0, 1);
8863 if (tree_fits_shwi_p (offset0))
8864 {
8865 HOST_WIDE_INT off = size_low_cst (offset0);
8866 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8867 * BITS_PER_UNIT)
8868 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8869 {
8870 bitpos0 = off * BITS_PER_UNIT;
8871 offset0 = NULL_TREE;
8872 }
8873 }
8874 }
8875
8876 base1 = arg1;
8877 if (TREE_CODE (arg1) == ADDR_EXPR)
8878 {
8879 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8880 &bitsize, &bitpos1, &offset1, &mode,
8881 &unsignedp, &volatilep, false);
8882 if (TREE_CODE (base1) == INDIRECT_REF)
8883 base1 = TREE_OPERAND (base1, 0);
8884 else
8885 indirect_base1 = true;
8886 }
8887 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8888 {
8889 base1 = TREE_OPERAND (arg1, 0);
8890 STRIP_SIGN_NOPS (base1);
8891 if (TREE_CODE (base1) == ADDR_EXPR)
8892 {
8893 base1 = TREE_OPERAND (base1, 0);
8894 indirect_base1 = true;
8895 }
8896 offset1 = TREE_OPERAND (arg1, 1);
8897 if (tree_fits_shwi_p (offset1))
8898 {
8899 HOST_WIDE_INT off = size_low_cst (offset1);
8900 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8901 * BITS_PER_UNIT)
8902 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8903 {
8904 bitpos1 = off * BITS_PER_UNIT;
8905 offset1 = NULL_TREE;
8906 }
8907 }
8908 }
8909
8910 /* A local variable can never be pointed to by
8911 the default SSA name of an incoming parameter. */
8912 if ((TREE_CODE (arg0) == ADDR_EXPR
8913 && indirect_base0
8914 && TREE_CODE (base0) == VAR_DECL
8915 && auto_var_in_fn_p (base0, current_function_decl)
8916 && !indirect_base1
8917 && TREE_CODE (base1) == SSA_NAME
8918 && SSA_NAME_IS_DEFAULT_DEF (base1)
8919 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8920 || (TREE_CODE (arg1) == ADDR_EXPR
8921 && indirect_base1
8922 && TREE_CODE (base1) == VAR_DECL
8923 && auto_var_in_fn_p (base1, current_function_decl)
8924 && !indirect_base0
8925 && TREE_CODE (base0) == SSA_NAME
8926 && SSA_NAME_IS_DEFAULT_DEF (base0)
8927 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8928 {
8929 if (code == NE_EXPR)
8930 return constant_boolean_node (1, type);
8931 else if (code == EQ_EXPR)
8932 return constant_boolean_node (0, type);
8933 }
8934 /* If we have equivalent bases we might be able to simplify. */
8935 else if (indirect_base0 == indirect_base1
8936 && operand_equal_p (base0, base1, 0))
8937 {
8938 /* We can fold this expression to a constant if the non-constant
8939 offset parts are equal. */
8940 if ((offset0 == offset1
8941 || (offset0 && offset1
8942 && operand_equal_p (offset0, offset1, 0)))
8943 && (code == EQ_EXPR
8944 || code == NE_EXPR
8945 || (indirect_base0 && DECL_P (base0))
8946 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8947
8948 {
8949 if (!equality_code
8950 && bitpos0 != bitpos1
8951 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8952 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8953 fold_overflow_warning (("assuming pointer wraparound does not "
8954 "occur when comparing P +- C1 with "
8955 "P +- C2"),
8956 WARN_STRICT_OVERFLOW_CONDITIONAL);
8957
8958 switch (code)
8959 {
8960 case EQ_EXPR:
8961 return constant_boolean_node (bitpos0 == bitpos1, type);
8962 case NE_EXPR:
8963 return constant_boolean_node (bitpos0 != bitpos1, type);
8964 case LT_EXPR:
8965 return constant_boolean_node (bitpos0 < bitpos1, type);
8966 case LE_EXPR:
8967 return constant_boolean_node (bitpos0 <= bitpos1, type);
8968 case GE_EXPR:
8969 return constant_boolean_node (bitpos0 >= bitpos1, type);
8970 case GT_EXPR:
8971 return constant_boolean_node (bitpos0 > bitpos1, type);
8972 default:;
8973 }
8974 }
8975 /* We can simplify the comparison to a comparison of the variable
8976 offset parts if the constant offset parts are equal.
8977 Be careful to use signed sizetype here because otherwise we
8978 mess with array offsets in the wrong way. This is possible
8979 because pointer arithmetic is restricted to retain within an
8980 object and overflow on pointer differences is undefined as of
8981 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8982 else if (bitpos0 == bitpos1
8983 && (equality_code
8984 || (indirect_base0 && DECL_P (base0))
8985 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8986 {
8987 /* By converting to signed sizetype we cover middle-end pointer
8988 arithmetic which operates on unsigned pointer types of size
8989 type size and ARRAY_REF offsets which are properly sign or
8990 zero extended from their type in case it is narrower than
8991 sizetype. */
8992 if (offset0 == NULL_TREE)
8993 offset0 = build_int_cst (ssizetype, 0);
8994 else
8995 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8996 if (offset1 == NULL_TREE)
8997 offset1 = build_int_cst (ssizetype, 0);
8998 else
8999 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9000
9001 if (!equality_code
9002 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9003 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9004 fold_overflow_warning (("assuming pointer wraparound does not "
9005 "occur when comparing P +- C1 with "
9006 "P +- C2"),
9007 WARN_STRICT_OVERFLOW_COMPARISON);
9008
9009 return fold_build2_loc (loc, code, type, offset0, offset1);
9010 }
9011 }
9012 /* For non-equal bases we can simplify if they are addresses
9013 of local binding decls or constants. */
9014 else if (indirect_base0 && indirect_base1
9015 /* We know that !operand_equal_p (base0, base1, 0)
9016 because the if condition was false. But make
9017 sure two decls are not the same. */
9018 && base0 != base1
9019 && TREE_CODE (arg0) == ADDR_EXPR
9020 && TREE_CODE (arg1) == ADDR_EXPR
9021 && (((TREE_CODE (base0) == VAR_DECL
9022 || TREE_CODE (base0) == PARM_DECL)
9023 && (targetm.binds_local_p (base0)
9024 || CONSTANT_CLASS_P (base1)))
9025 || CONSTANT_CLASS_P (base0))
9026 && (((TREE_CODE (base1) == VAR_DECL
9027 || TREE_CODE (base1) == PARM_DECL)
9028 && (targetm.binds_local_p (base1)
9029 || CONSTANT_CLASS_P (base0)))
9030 || CONSTANT_CLASS_P (base1)))
9031 {
9032 if (code == EQ_EXPR)
9033 return omit_two_operands_loc (loc, type, boolean_false_node,
9034 arg0, arg1);
9035 else if (code == NE_EXPR)
9036 return omit_two_operands_loc (loc, type, boolean_true_node,
9037 arg0, arg1);
9038 }
9039 /* For equal offsets we can simplify to a comparison of the
9040 base addresses. */
9041 else if (bitpos0 == bitpos1
9042 && (indirect_base0
9043 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9044 && (indirect_base1
9045 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9046 && ((offset0 == offset1)
9047 || (offset0 && offset1
9048 && operand_equal_p (offset0, offset1, 0))))
9049 {
9050 if (indirect_base0)
9051 base0 = build_fold_addr_expr_loc (loc, base0);
9052 if (indirect_base1)
9053 base1 = build_fold_addr_expr_loc (loc, base1);
9054 return fold_build2_loc (loc, code, type, base0, base1);
9055 }
9056 }
9057
9058 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9059 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9060 the resulting offset is smaller in absolute value than the
9061 original one and has the same sign. */
9062 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9063 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9064 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9065 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9066 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9067 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9068 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9069 {
9070 tree const1 = TREE_OPERAND (arg0, 1);
9071 tree const2 = TREE_OPERAND (arg1, 1);
9072 tree variable1 = TREE_OPERAND (arg0, 0);
9073 tree variable2 = TREE_OPERAND (arg1, 0);
9074 tree cst;
9075 const char * const warnmsg = G_("assuming signed overflow does not "
9076 "occur when combining constants around "
9077 "a comparison");
9078
9079 /* Put the constant on the side where it doesn't overflow and is
9080 of lower absolute value and of same sign than before. */
9081 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9082 ? MINUS_EXPR : PLUS_EXPR,
9083 const2, const1);
9084 if (!TREE_OVERFLOW (cst)
9085 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9086 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9087 {
9088 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9089 return fold_build2_loc (loc, code, type,
9090 variable1,
9091 fold_build2_loc (loc, TREE_CODE (arg1),
9092 TREE_TYPE (arg1),
9093 variable2, cst));
9094 }
9095
9096 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9097 ? MINUS_EXPR : PLUS_EXPR,
9098 const1, const2);
9099 if (!TREE_OVERFLOW (cst)
9100 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9101 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9102 {
9103 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9104 return fold_build2_loc (loc, code, type,
9105 fold_build2_loc (loc, TREE_CODE (arg0),
9106 TREE_TYPE (arg0),
9107 variable1, cst),
9108 variable2);
9109 }
9110 }
9111
9112 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9113 signed arithmetic case. That form is created by the compiler
9114 often enough for folding it to be of value. One example is in
9115 computing loop trip counts after Operator Strength Reduction. */
9116 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9117 && TREE_CODE (arg0) == MULT_EXPR
9118 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9119 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9120 && integer_zerop (arg1))
9121 {
9122 tree const1 = TREE_OPERAND (arg0, 1);
9123 tree const2 = arg1; /* zero */
9124 tree variable1 = TREE_OPERAND (arg0, 0);
9125 enum tree_code cmp_code = code;
9126
9127 /* Handle unfolded multiplication by zero. */
9128 if (integer_zerop (const1))
9129 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9130
9131 fold_overflow_warning (("assuming signed overflow does not occur when "
9132 "eliminating multiplication in comparison "
9133 "with zero"),
9134 WARN_STRICT_OVERFLOW_COMPARISON);
9135
9136 /* If const1 is negative we swap the sense of the comparison. */
9137 if (tree_int_cst_sgn (const1) < 0)
9138 cmp_code = swap_tree_comparison (cmp_code);
9139
9140 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9141 }
9142
9143 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9144 if (tem)
9145 return tem;
9146
9147 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9148 {
9149 tree targ0 = strip_float_extensions (arg0);
9150 tree targ1 = strip_float_extensions (arg1);
9151 tree newtype = TREE_TYPE (targ0);
9152
9153 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9154 newtype = TREE_TYPE (targ1);
9155
9156 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9157 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9158 return fold_build2_loc (loc, code, type,
9159 fold_convert_loc (loc, newtype, targ0),
9160 fold_convert_loc (loc, newtype, targ1));
9161
9162 /* (-a) CMP (-b) -> b CMP a */
9163 if (TREE_CODE (arg0) == NEGATE_EXPR
9164 && TREE_CODE (arg1) == NEGATE_EXPR)
9165 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9166 TREE_OPERAND (arg0, 0));
9167
9168 if (TREE_CODE (arg1) == REAL_CST)
9169 {
9170 REAL_VALUE_TYPE cst;
9171 cst = TREE_REAL_CST (arg1);
9172
9173 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9174 if (TREE_CODE (arg0) == NEGATE_EXPR)
9175 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9176 TREE_OPERAND (arg0, 0),
9177 build_real (TREE_TYPE (arg1),
9178 real_value_negate (&cst)));
9179
9180 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9181 /* a CMP (-0) -> a CMP 0 */
9182 if (REAL_VALUE_MINUS_ZERO (cst))
9183 return fold_build2_loc (loc, code, type, arg0,
9184 build_real (TREE_TYPE (arg1), dconst0));
9185
9186 /* x != NaN is always true, other ops are always false. */
9187 if (REAL_VALUE_ISNAN (cst)
9188 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9189 {
9190 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9191 return omit_one_operand_loc (loc, type, tem, arg0);
9192 }
9193
9194 /* Fold comparisons against infinity. */
9195 if (REAL_VALUE_ISINF (cst)
9196 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9197 {
9198 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9199 if (tem != NULL_TREE)
9200 return tem;
9201 }
9202 }
9203
9204 /* If this is a comparison of a real constant with a PLUS_EXPR
9205 or a MINUS_EXPR of a real constant, we can convert it into a
9206 comparison with a revised real constant as long as no overflow
9207 occurs when unsafe_math_optimizations are enabled. */
9208 if (flag_unsafe_math_optimizations
9209 && TREE_CODE (arg1) == REAL_CST
9210 && (TREE_CODE (arg0) == PLUS_EXPR
9211 || TREE_CODE (arg0) == MINUS_EXPR)
9212 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9213 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9214 ? MINUS_EXPR : PLUS_EXPR,
9215 arg1, TREE_OPERAND (arg0, 1)))
9216 && !TREE_OVERFLOW (tem))
9217 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9218
9219 /* Likewise, we can simplify a comparison of a real constant with
9220 a MINUS_EXPR whose first operand is also a real constant, i.e.
9221 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9222 floating-point types only if -fassociative-math is set. */
9223 if (flag_associative_math
9224 && TREE_CODE (arg1) == REAL_CST
9225 && TREE_CODE (arg0) == MINUS_EXPR
9226 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9227 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9228 arg1))
9229 && !TREE_OVERFLOW (tem))
9230 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9231 TREE_OPERAND (arg0, 1), tem);
9232
9233 /* Fold comparisons against built-in math functions. */
9234 if (TREE_CODE (arg1) == REAL_CST
9235 && flag_unsafe_math_optimizations
9236 && ! flag_errno_math)
9237 {
9238 enum built_in_function fcode = builtin_mathfn_code (arg0);
9239
9240 if (fcode != END_BUILTINS)
9241 {
9242 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9243 if (tem != NULL_TREE)
9244 return tem;
9245 }
9246 }
9247 }
9248
9249 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9250 && CONVERT_EXPR_P (arg0))
9251 {
9252 /* If we are widening one operand of an integer comparison,
9253 see if the other operand is similarly being widened. Perhaps we
9254 can do the comparison in the narrower type. */
9255 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9256 if (tem)
9257 return tem;
9258
9259 /* Or if we are changing signedness. */
9260 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9261 if (tem)
9262 return tem;
9263 }
9264
9265 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9266 constant, we can simplify it. */
9267 if (TREE_CODE (arg1) == INTEGER_CST
9268 && (TREE_CODE (arg0) == MIN_EXPR
9269 || TREE_CODE (arg0) == MAX_EXPR)
9270 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9271 {
9272 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9273 if (tem)
9274 return tem;
9275 }
9276
9277 /* Simplify comparison of something with itself. (For IEEE
9278 floating-point, we can only do some of these simplifications.) */
9279 if (operand_equal_p (arg0, arg1, 0))
9280 {
9281 switch (code)
9282 {
9283 case EQ_EXPR:
9284 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9285 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9286 return constant_boolean_node (1, type);
9287 break;
9288
9289 case GE_EXPR:
9290 case LE_EXPR:
9291 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9292 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9293 return constant_boolean_node (1, type);
9294 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9295
9296 case NE_EXPR:
9297 /* For NE, we can only do this simplification if integer
9298 or we don't honor IEEE floating point NaNs. */
9299 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9300 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9301 break;
9302 /* ... fall through ... */
9303 case GT_EXPR:
9304 case LT_EXPR:
9305 return constant_boolean_node (0, type);
9306 default:
9307 gcc_unreachable ();
9308 }
9309 }
9310
9311 /* If we are comparing an expression that just has comparisons
9312 of two integer values, arithmetic expressions of those comparisons,
9313 and constants, we can simplify it. There are only three cases
9314 to check: the two values can either be equal, the first can be
9315 greater, or the second can be greater. Fold the expression for
9316 those three values. Since each value must be 0 or 1, we have
9317 eight possibilities, each of which corresponds to the constant 0
9318 or 1 or one of the six possible comparisons.
9319
9320 This handles common cases like (a > b) == 0 but also handles
9321 expressions like ((x > y) - (y > x)) > 0, which supposedly
9322 occur in macroized code. */
9323
9324 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9325 {
9326 tree cval1 = 0, cval2 = 0;
9327 int save_p = 0;
9328
9329 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9330 /* Don't handle degenerate cases here; they should already
9331 have been handled anyway. */
9332 && cval1 != 0 && cval2 != 0
9333 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9334 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9335 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9336 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9337 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9338 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9339 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9340 {
9341 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9342 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9343
9344 /* We can't just pass T to eval_subst in case cval1 or cval2
9345 was the same as ARG1. */
9346
9347 tree high_result
9348 = fold_build2_loc (loc, code, type,
9349 eval_subst (loc, arg0, cval1, maxval,
9350 cval2, minval),
9351 arg1);
9352 tree equal_result
9353 = fold_build2_loc (loc, code, type,
9354 eval_subst (loc, arg0, cval1, maxval,
9355 cval2, maxval),
9356 arg1);
9357 tree low_result
9358 = fold_build2_loc (loc, code, type,
9359 eval_subst (loc, arg0, cval1, minval,
9360 cval2, maxval),
9361 arg1);
9362
9363 /* All three of these results should be 0 or 1. Confirm they are.
9364 Then use those values to select the proper code to use. */
9365
9366 if (TREE_CODE (high_result) == INTEGER_CST
9367 && TREE_CODE (equal_result) == INTEGER_CST
9368 && TREE_CODE (low_result) == INTEGER_CST)
9369 {
9370 /* Make a 3-bit mask with the high-order bit being the
9371 value for `>', the next for '=', and the low for '<'. */
9372 switch ((integer_onep (high_result) * 4)
9373 + (integer_onep (equal_result) * 2)
9374 + integer_onep (low_result))
9375 {
9376 case 0:
9377 /* Always false. */
9378 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9379 case 1:
9380 code = LT_EXPR;
9381 break;
9382 case 2:
9383 code = EQ_EXPR;
9384 break;
9385 case 3:
9386 code = LE_EXPR;
9387 break;
9388 case 4:
9389 code = GT_EXPR;
9390 break;
9391 case 5:
9392 code = NE_EXPR;
9393 break;
9394 case 6:
9395 code = GE_EXPR;
9396 break;
9397 case 7:
9398 /* Always true. */
9399 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9400 }
9401
9402 if (save_p)
9403 {
9404 tem = save_expr (build2 (code, type, cval1, cval2));
9405 SET_EXPR_LOCATION (tem, loc);
9406 return tem;
9407 }
9408 return fold_build2_loc (loc, code, type, cval1, cval2);
9409 }
9410 }
9411 }
9412
9413 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9414 into a single range test. */
9415 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9416 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9417 && TREE_CODE (arg1) == INTEGER_CST
9418 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9419 && !integer_zerop (TREE_OPERAND (arg0, 1))
9420 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9421 && !TREE_OVERFLOW (arg1))
9422 {
9423 tem = fold_div_compare (loc, code, type, arg0, arg1);
9424 if (tem != NULL_TREE)
9425 return tem;
9426 }
9427
9428 /* Fold ~X op ~Y as Y op X. */
9429 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9430 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9431 {
9432 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9433 return fold_build2_loc (loc, code, type,
9434 fold_convert_loc (loc, cmp_type,
9435 TREE_OPERAND (arg1, 0)),
9436 TREE_OPERAND (arg0, 0));
9437 }
9438
9439 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9440 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9441 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9442 {
9443 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9444 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9445 TREE_OPERAND (arg0, 0),
9446 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9447 fold_convert_loc (loc, cmp_type, arg1)));
9448 }
9449
9450 return NULL_TREE;
9451 }
9452
9453
9454 /* Subroutine of fold_binary. Optimize complex multiplications of the
9455 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9456 argument EXPR represents the expression "z" of type TYPE. */
9457
9458 static tree
9459 fold_mult_zconjz (location_t loc, tree type, tree expr)
9460 {
9461 tree itype = TREE_TYPE (type);
9462 tree rpart, ipart, tem;
9463
9464 if (TREE_CODE (expr) == COMPLEX_EXPR)
9465 {
9466 rpart = TREE_OPERAND (expr, 0);
9467 ipart = TREE_OPERAND (expr, 1);
9468 }
9469 else if (TREE_CODE (expr) == COMPLEX_CST)
9470 {
9471 rpart = TREE_REALPART (expr);
9472 ipart = TREE_IMAGPART (expr);
9473 }
9474 else
9475 {
9476 expr = save_expr (expr);
9477 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9478 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9479 }
9480
9481 rpart = save_expr (rpart);
9482 ipart = save_expr (ipart);
9483 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9484 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9485 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9486 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9487 build_zero_cst (itype));
9488 }
9489
9490
9491 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9492 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9493 guarantees that P and N have the same least significant log2(M) bits.
9494 N is not otherwise constrained. In particular, N is not normalized to
9495 0 <= N < M as is common. In general, the precise value of P is unknown.
9496 M is chosen as large as possible such that constant N can be determined.
9497
9498 Returns M and sets *RESIDUE to N.
9499
9500 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9501 account. This is not always possible due to PR 35705.
9502 */
9503
9504 static unsigned HOST_WIDE_INT
9505 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9506 bool allow_func_align)
9507 {
9508 enum tree_code code;
9509
9510 *residue = 0;
9511
9512 code = TREE_CODE (expr);
9513 if (code == ADDR_EXPR)
9514 {
9515 unsigned int bitalign;
9516 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9517 *residue /= BITS_PER_UNIT;
9518 return bitalign / BITS_PER_UNIT;
9519 }
9520 else if (code == POINTER_PLUS_EXPR)
9521 {
9522 tree op0, op1;
9523 unsigned HOST_WIDE_INT modulus;
9524 enum tree_code inner_code;
9525
9526 op0 = TREE_OPERAND (expr, 0);
9527 STRIP_NOPS (op0);
9528 modulus = get_pointer_modulus_and_residue (op0, residue,
9529 allow_func_align);
9530
9531 op1 = TREE_OPERAND (expr, 1);
9532 STRIP_NOPS (op1);
9533 inner_code = TREE_CODE (op1);
9534 if (inner_code == INTEGER_CST)
9535 {
9536 *residue += TREE_INT_CST_LOW (op1);
9537 return modulus;
9538 }
9539 else if (inner_code == MULT_EXPR)
9540 {
9541 op1 = TREE_OPERAND (op1, 1);
9542 if (TREE_CODE (op1) == INTEGER_CST)
9543 {
9544 unsigned HOST_WIDE_INT align;
9545
9546 /* Compute the greatest power-of-2 divisor of op1. */
9547 align = TREE_INT_CST_LOW (op1);
9548 align &= -align;
9549
9550 /* If align is non-zero and less than *modulus, replace
9551 *modulus with align., If align is 0, then either op1 is 0
9552 or the greatest power-of-2 divisor of op1 doesn't fit in an
9553 unsigned HOST_WIDE_INT. In either case, no additional
9554 constraint is imposed. */
9555 if (align)
9556 modulus = MIN (modulus, align);
9557
9558 return modulus;
9559 }
9560 }
9561 }
9562
9563 /* If we get here, we were unable to determine anything useful about the
9564 expression. */
9565 return 1;
9566 }
9567
9568 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9569 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9570
9571 static bool
9572 vec_cst_ctor_to_array (tree arg, tree *elts)
9573 {
9574 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9575
9576 if (TREE_CODE (arg) == VECTOR_CST)
9577 {
9578 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9579 elts[i] = VECTOR_CST_ELT (arg, i);
9580 }
9581 else if (TREE_CODE (arg) == CONSTRUCTOR)
9582 {
9583 constructor_elt *elt;
9584
9585 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9586 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9587 return false;
9588 else
9589 elts[i] = elt->value;
9590 }
9591 else
9592 return false;
9593 for (; i < nelts; i++)
9594 elts[i]
9595 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9596 return true;
9597 }
9598
9599 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9600 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9601 NULL_TREE otherwise. */
9602
9603 static tree
9604 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9605 {
9606 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9607 tree *elts;
9608 bool need_ctor = false;
9609
9610 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9611 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9612 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9613 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9614 return NULL_TREE;
9615
9616 elts = XALLOCAVEC (tree, nelts * 3);
9617 if (!vec_cst_ctor_to_array (arg0, elts)
9618 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9619 return NULL_TREE;
9620
9621 for (i = 0; i < nelts; i++)
9622 {
9623 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9624 need_ctor = true;
9625 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9626 }
9627
9628 if (need_ctor)
9629 {
9630 vec<constructor_elt, va_gc> *v;
9631 vec_alloc (v, nelts);
9632 for (i = 0; i < nelts; i++)
9633 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9634 return build_constructor (type, v);
9635 }
9636 else
9637 return build_vector (type, &elts[2 * nelts]);
9638 }
9639
9640 /* Try to fold a pointer difference of type TYPE two address expressions of
9641 array references AREF0 and AREF1 using location LOC. Return a
9642 simplified expression for the difference or NULL_TREE. */
9643
9644 static tree
9645 fold_addr_of_array_ref_difference (location_t loc, tree type,
9646 tree aref0, tree aref1)
9647 {
9648 tree base0 = TREE_OPERAND (aref0, 0);
9649 tree base1 = TREE_OPERAND (aref1, 0);
9650 tree base_offset = build_int_cst (type, 0);
9651
9652 /* If the bases are array references as well, recurse. If the bases
9653 are pointer indirections compute the difference of the pointers.
9654 If the bases are equal, we are set. */
9655 if ((TREE_CODE (base0) == ARRAY_REF
9656 && TREE_CODE (base1) == ARRAY_REF
9657 && (base_offset
9658 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9659 || (INDIRECT_REF_P (base0)
9660 && INDIRECT_REF_P (base1)
9661 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9662 TREE_OPERAND (base0, 0),
9663 TREE_OPERAND (base1, 0))))
9664 || operand_equal_p (base0, base1, 0))
9665 {
9666 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9667 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9668 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9669 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9670 return fold_build2_loc (loc, PLUS_EXPR, type,
9671 base_offset,
9672 fold_build2_loc (loc, MULT_EXPR, type,
9673 diff, esz));
9674 }
9675 return NULL_TREE;
9676 }
9677
9678 /* If the real or vector real constant CST of type TYPE has an exact
9679 inverse, return it, else return NULL. */
9680
9681 static tree
9682 exact_inverse (tree type, tree cst)
9683 {
9684 REAL_VALUE_TYPE r;
9685 tree unit_type, *elts;
9686 machine_mode mode;
9687 unsigned vec_nelts, i;
9688
9689 switch (TREE_CODE (cst))
9690 {
9691 case REAL_CST:
9692 r = TREE_REAL_CST (cst);
9693
9694 if (exact_real_inverse (TYPE_MODE (type), &r))
9695 return build_real (type, r);
9696
9697 return NULL_TREE;
9698
9699 case VECTOR_CST:
9700 vec_nelts = VECTOR_CST_NELTS (cst);
9701 elts = XALLOCAVEC (tree, vec_nelts);
9702 unit_type = TREE_TYPE (type);
9703 mode = TYPE_MODE (unit_type);
9704
9705 for (i = 0; i < vec_nelts; i++)
9706 {
9707 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9708 if (!exact_real_inverse (mode, &r))
9709 return NULL_TREE;
9710 elts[i] = build_real (unit_type, r);
9711 }
9712
9713 return build_vector (type, elts);
9714
9715 default:
9716 return NULL_TREE;
9717 }
9718 }
9719
9720 /* Mask out the tz least significant bits of X of type TYPE where
9721 tz is the number of trailing zeroes in Y. */
9722 static wide_int
9723 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9724 {
9725 int tz = wi::ctz (y);
9726 if (tz > 0)
9727 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9728 return x;
9729 }
9730
9731 /* Return true when T is an address and is known to be nonzero.
9732 For floating point we further ensure that T is not denormal.
9733 Similar logic is present in nonzero_address in rtlanal.h.
9734
9735 If the return value is based on the assumption that signed overflow
9736 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9737 change *STRICT_OVERFLOW_P. */
9738
9739 static bool
9740 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9741 {
9742 tree type = TREE_TYPE (t);
9743 enum tree_code code;
9744
9745 /* Doing something useful for floating point would need more work. */
9746 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9747 return false;
9748
9749 code = TREE_CODE (t);
9750 switch (TREE_CODE_CLASS (code))
9751 {
9752 case tcc_unary:
9753 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9754 strict_overflow_p);
9755 case tcc_binary:
9756 case tcc_comparison:
9757 return tree_binary_nonzero_warnv_p (code, type,
9758 TREE_OPERAND (t, 0),
9759 TREE_OPERAND (t, 1),
9760 strict_overflow_p);
9761 case tcc_constant:
9762 case tcc_declaration:
9763 case tcc_reference:
9764 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9765
9766 default:
9767 break;
9768 }
9769
9770 switch (code)
9771 {
9772 case TRUTH_NOT_EXPR:
9773 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9774 strict_overflow_p);
9775
9776 case TRUTH_AND_EXPR:
9777 case TRUTH_OR_EXPR:
9778 case TRUTH_XOR_EXPR:
9779 return tree_binary_nonzero_warnv_p (code, type,
9780 TREE_OPERAND (t, 0),
9781 TREE_OPERAND (t, 1),
9782 strict_overflow_p);
9783
9784 case COND_EXPR:
9785 case CONSTRUCTOR:
9786 case OBJ_TYPE_REF:
9787 case ASSERT_EXPR:
9788 case ADDR_EXPR:
9789 case WITH_SIZE_EXPR:
9790 case SSA_NAME:
9791 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9792
9793 case COMPOUND_EXPR:
9794 case MODIFY_EXPR:
9795 case BIND_EXPR:
9796 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9797 strict_overflow_p);
9798
9799 case SAVE_EXPR:
9800 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9801 strict_overflow_p);
9802
9803 case CALL_EXPR:
9804 {
9805 tree fndecl = get_callee_fndecl (t);
9806 if (!fndecl) return false;
9807 if (flag_delete_null_pointer_checks && !flag_check_new
9808 && DECL_IS_OPERATOR_NEW (fndecl)
9809 && !TREE_NOTHROW (fndecl))
9810 return true;
9811 if (flag_delete_null_pointer_checks
9812 && lookup_attribute ("returns_nonnull",
9813 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9814 return true;
9815 return alloca_call_p (t);
9816 }
9817
9818 default:
9819 break;
9820 }
9821 return false;
9822 }
9823
9824 /* Return true when T is an address and is known to be nonzero.
9825 Handle warnings about undefined signed overflow. */
9826
9827 static bool
9828 tree_expr_nonzero_p (tree t)
9829 {
9830 bool ret, strict_overflow_p;
9831
9832 strict_overflow_p = false;
9833 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9834 if (strict_overflow_p)
9835 fold_overflow_warning (("assuming signed overflow does not occur when "
9836 "determining that expression is always "
9837 "non-zero"),
9838 WARN_STRICT_OVERFLOW_MISC);
9839 return ret;
9840 }
9841
9842 /* Fold a binary expression of code CODE and type TYPE with operands
9843 OP0 and OP1. LOC is the location of the resulting expression.
9844 Return the folded expression if folding is successful. Otherwise,
9845 return NULL_TREE. */
9846
9847 tree
9848 fold_binary_loc (location_t loc,
9849 enum tree_code code, tree type, tree op0, tree op1)
9850 {
9851 enum tree_code_class kind = TREE_CODE_CLASS (code);
9852 tree arg0, arg1, tem;
9853 tree t1 = NULL_TREE;
9854 bool strict_overflow_p;
9855 unsigned int prec;
9856
9857 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9858 && TREE_CODE_LENGTH (code) == 2
9859 && op0 != NULL_TREE
9860 && op1 != NULL_TREE);
9861
9862 arg0 = op0;
9863 arg1 = op1;
9864
9865 /* Strip any conversions that don't change the mode. This is
9866 safe for every expression, except for a comparison expression
9867 because its signedness is derived from its operands. So, in
9868 the latter case, only strip conversions that don't change the
9869 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9870 preserved.
9871
9872 Note that this is done as an internal manipulation within the
9873 constant folder, in order to find the simplest representation
9874 of the arguments so that their form can be studied. In any
9875 cases, the appropriate type conversions should be put back in
9876 the tree that will get out of the constant folder. */
9877
9878 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9879 {
9880 STRIP_SIGN_NOPS (arg0);
9881 STRIP_SIGN_NOPS (arg1);
9882 }
9883 else
9884 {
9885 STRIP_NOPS (arg0);
9886 STRIP_NOPS (arg1);
9887 }
9888
9889 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9890 constant but we can't do arithmetic on them. */
9891 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9892 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9893 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9894 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9895 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9896 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9897 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9898 {
9899 if (kind == tcc_binary)
9900 {
9901 /* Make sure type and arg0 have the same saturating flag. */
9902 gcc_assert (TYPE_SATURATING (type)
9903 == TYPE_SATURATING (TREE_TYPE (arg0)));
9904 tem = const_binop (code, arg0, arg1);
9905 }
9906 else if (kind == tcc_comparison)
9907 tem = fold_relational_const (code, type, arg0, arg1);
9908 else
9909 tem = NULL_TREE;
9910
9911 if (tem != NULL_TREE)
9912 {
9913 if (TREE_TYPE (tem) != type)
9914 tem = fold_convert_loc (loc, type, tem);
9915 return tem;
9916 }
9917 }
9918
9919 /* If this is a commutative operation, and ARG0 is a constant, move it
9920 to ARG1 to reduce the number of tests below. */
9921 if (commutative_tree_code (code)
9922 && tree_swap_operands_p (arg0, arg1, true))
9923 return fold_build2_loc (loc, code, type, op1, op0);
9924
9925 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9926 to ARG1 to reduce the number of tests below. */
9927 if (kind == tcc_comparison
9928 && tree_swap_operands_p (arg0, arg1, true))
9929 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9930
9931 tem = generic_simplify (loc, code, type, op0, op1);
9932 if (tem)
9933 return tem;
9934
9935 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9936
9937 First check for cases where an arithmetic operation is applied to a
9938 compound, conditional, or comparison operation. Push the arithmetic
9939 operation inside the compound or conditional to see if any folding
9940 can then be done. Convert comparison to conditional for this purpose.
9941 The also optimizes non-constant cases that used to be done in
9942 expand_expr.
9943
9944 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9945 one of the operands is a comparison and the other is a comparison, a
9946 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9947 code below would make the expression more complex. Change it to a
9948 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9949 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9950
9951 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9952 || code == EQ_EXPR || code == NE_EXPR)
9953 && TREE_CODE (type) != VECTOR_TYPE
9954 && ((truth_value_p (TREE_CODE (arg0))
9955 && (truth_value_p (TREE_CODE (arg1))
9956 || (TREE_CODE (arg1) == BIT_AND_EXPR
9957 && integer_onep (TREE_OPERAND (arg1, 1)))))
9958 || (truth_value_p (TREE_CODE (arg1))
9959 && (truth_value_p (TREE_CODE (arg0))
9960 || (TREE_CODE (arg0) == BIT_AND_EXPR
9961 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9962 {
9963 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9964 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9965 : TRUTH_XOR_EXPR,
9966 boolean_type_node,
9967 fold_convert_loc (loc, boolean_type_node, arg0),
9968 fold_convert_loc (loc, boolean_type_node, arg1));
9969
9970 if (code == EQ_EXPR)
9971 tem = invert_truthvalue_loc (loc, tem);
9972
9973 return fold_convert_loc (loc, type, tem);
9974 }
9975
9976 if (TREE_CODE_CLASS (code) == tcc_binary
9977 || TREE_CODE_CLASS (code) == tcc_comparison)
9978 {
9979 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9980 {
9981 tem = fold_build2_loc (loc, code, type,
9982 fold_convert_loc (loc, TREE_TYPE (op0),
9983 TREE_OPERAND (arg0, 1)), op1);
9984 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9985 tem);
9986 }
9987 if (TREE_CODE (arg1) == COMPOUND_EXPR
9988 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9989 {
9990 tem = fold_build2_loc (loc, code, type, op0,
9991 fold_convert_loc (loc, TREE_TYPE (op1),
9992 TREE_OPERAND (arg1, 1)));
9993 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9994 tem);
9995 }
9996
9997 if (TREE_CODE (arg0) == COND_EXPR
9998 || TREE_CODE (arg0) == VEC_COND_EXPR
9999 || COMPARISON_CLASS_P (arg0))
10000 {
10001 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10002 arg0, arg1,
10003 /*cond_first_p=*/1);
10004 if (tem != NULL_TREE)
10005 return tem;
10006 }
10007
10008 if (TREE_CODE (arg1) == COND_EXPR
10009 || TREE_CODE (arg1) == VEC_COND_EXPR
10010 || COMPARISON_CLASS_P (arg1))
10011 {
10012 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10013 arg1, arg0,
10014 /*cond_first_p=*/0);
10015 if (tem != NULL_TREE)
10016 return tem;
10017 }
10018 }
10019
10020 switch (code)
10021 {
10022 case MEM_REF:
10023 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10024 if (TREE_CODE (arg0) == ADDR_EXPR
10025 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10026 {
10027 tree iref = TREE_OPERAND (arg0, 0);
10028 return fold_build2 (MEM_REF, type,
10029 TREE_OPERAND (iref, 0),
10030 int_const_binop (PLUS_EXPR, arg1,
10031 TREE_OPERAND (iref, 1)));
10032 }
10033
10034 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10035 if (TREE_CODE (arg0) == ADDR_EXPR
10036 && handled_component_p (TREE_OPERAND (arg0, 0)))
10037 {
10038 tree base;
10039 HOST_WIDE_INT coffset;
10040 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10041 &coffset);
10042 if (!base)
10043 return NULL_TREE;
10044 return fold_build2 (MEM_REF, type,
10045 build_fold_addr_expr (base),
10046 int_const_binop (PLUS_EXPR, arg1,
10047 size_int (coffset)));
10048 }
10049
10050 return NULL_TREE;
10051
10052 case POINTER_PLUS_EXPR:
10053 /* 0 +p index -> (type)index */
10054 if (integer_zerop (arg0))
10055 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10056
10057 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10058 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10059 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10060 return fold_convert_loc (loc, type,
10061 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10062 fold_convert_loc (loc, sizetype,
10063 arg1),
10064 fold_convert_loc (loc, sizetype,
10065 arg0)));
10066
10067 /* (PTR +p B) +p A -> PTR +p (B + A) */
10068 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10069 {
10070 tree inner;
10071 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10072 tree arg00 = TREE_OPERAND (arg0, 0);
10073 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10074 arg01, fold_convert_loc (loc, sizetype, arg1));
10075 return fold_convert_loc (loc, type,
10076 fold_build_pointer_plus_loc (loc,
10077 arg00, inner));
10078 }
10079
10080 /* PTR_CST +p CST -> CST1 */
10081 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10082 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10083 fold_convert_loc (loc, type, arg1));
10084
10085 return NULL_TREE;
10086
10087 case PLUS_EXPR:
10088 /* A + (-B) -> A - B */
10089 if (TREE_CODE (arg1) == NEGATE_EXPR
10090 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10091 return fold_build2_loc (loc, MINUS_EXPR, type,
10092 fold_convert_loc (loc, type, arg0),
10093 fold_convert_loc (loc, type,
10094 TREE_OPERAND (arg1, 0)));
10095 /* (-A) + B -> B - A */
10096 if (TREE_CODE (arg0) == NEGATE_EXPR
10097 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10098 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10099 return fold_build2_loc (loc, MINUS_EXPR, type,
10100 fold_convert_loc (loc, type, arg1),
10101 fold_convert_loc (loc, type,
10102 TREE_OPERAND (arg0, 0)));
10103
10104 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10105 {
10106 /* Convert ~A + 1 to -A. */
10107 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10108 && integer_each_onep (arg1))
10109 return fold_build1_loc (loc, NEGATE_EXPR, type,
10110 fold_convert_loc (loc, type,
10111 TREE_OPERAND (arg0, 0)));
10112
10113 /* ~X + X is -1. */
10114 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10115 && !TYPE_OVERFLOW_TRAPS (type))
10116 {
10117 tree tem = TREE_OPERAND (arg0, 0);
10118
10119 STRIP_NOPS (tem);
10120 if (operand_equal_p (tem, arg1, 0))
10121 {
10122 t1 = build_all_ones_cst (type);
10123 return omit_one_operand_loc (loc, type, t1, arg1);
10124 }
10125 }
10126
10127 /* X + ~X is -1. */
10128 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10129 && !TYPE_OVERFLOW_TRAPS (type))
10130 {
10131 tree tem = TREE_OPERAND (arg1, 0);
10132
10133 STRIP_NOPS (tem);
10134 if (operand_equal_p (arg0, tem, 0))
10135 {
10136 t1 = build_all_ones_cst (type);
10137 return omit_one_operand_loc (loc, type, t1, arg0);
10138 }
10139 }
10140
10141 /* X + (X / CST) * -CST is X % CST. */
10142 if (TREE_CODE (arg1) == MULT_EXPR
10143 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10144 && operand_equal_p (arg0,
10145 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10146 {
10147 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10148 tree cst1 = TREE_OPERAND (arg1, 1);
10149 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10150 cst1, cst0);
10151 if (sum && integer_zerop (sum))
10152 return fold_convert_loc (loc, type,
10153 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10154 TREE_TYPE (arg0), arg0,
10155 cst0));
10156 }
10157 }
10158
10159 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10160 one. Make sure the type is not saturating and has the signedness of
10161 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10162 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10163 if ((TREE_CODE (arg0) == MULT_EXPR
10164 || TREE_CODE (arg1) == MULT_EXPR)
10165 && !TYPE_SATURATING (type)
10166 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10167 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10168 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10169 {
10170 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10171 if (tem)
10172 return tem;
10173 }
10174
10175 if (! FLOAT_TYPE_P (type))
10176 {
10177 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10178 with a constant, and the two constants have no bits in common,
10179 we should treat this as a BIT_IOR_EXPR since this may produce more
10180 simplifications. */
10181 if (TREE_CODE (arg0) == BIT_AND_EXPR
10182 && TREE_CODE (arg1) == BIT_AND_EXPR
10183 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10184 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10185 && wi::bit_and (TREE_OPERAND (arg0, 1),
10186 TREE_OPERAND (arg1, 1)) == 0)
10187 {
10188 code = BIT_IOR_EXPR;
10189 goto bit_ior;
10190 }
10191
10192 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10193 (plus (plus (mult) (mult)) (foo)) so that we can
10194 take advantage of the factoring cases below. */
10195 if (TYPE_OVERFLOW_WRAPS (type)
10196 && (((TREE_CODE (arg0) == PLUS_EXPR
10197 || TREE_CODE (arg0) == MINUS_EXPR)
10198 && TREE_CODE (arg1) == MULT_EXPR)
10199 || ((TREE_CODE (arg1) == PLUS_EXPR
10200 || TREE_CODE (arg1) == MINUS_EXPR)
10201 && TREE_CODE (arg0) == MULT_EXPR)))
10202 {
10203 tree parg0, parg1, parg, marg;
10204 enum tree_code pcode;
10205
10206 if (TREE_CODE (arg1) == MULT_EXPR)
10207 parg = arg0, marg = arg1;
10208 else
10209 parg = arg1, marg = arg0;
10210 pcode = TREE_CODE (parg);
10211 parg0 = TREE_OPERAND (parg, 0);
10212 parg1 = TREE_OPERAND (parg, 1);
10213 STRIP_NOPS (parg0);
10214 STRIP_NOPS (parg1);
10215
10216 if (TREE_CODE (parg0) == MULT_EXPR
10217 && TREE_CODE (parg1) != MULT_EXPR)
10218 return fold_build2_loc (loc, pcode, type,
10219 fold_build2_loc (loc, PLUS_EXPR, type,
10220 fold_convert_loc (loc, type,
10221 parg0),
10222 fold_convert_loc (loc, type,
10223 marg)),
10224 fold_convert_loc (loc, type, parg1));
10225 if (TREE_CODE (parg0) != MULT_EXPR
10226 && TREE_CODE (parg1) == MULT_EXPR)
10227 return
10228 fold_build2_loc (loc, PLUS_EXPR, type,
10229 fold_convert_loc (loc, type, parg0),
10230 fold_build2_loc (loc, pcode, type,
10231 fold_convert_loc (loc, type, marg),
10232 fold_convert_loc (loc, type,
10233 parg1)));
10234 }
10235 }
10236 else
10237 {
10238 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10239 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10240 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10241
10242 /* Likewise if the operands are reversed. */
10243 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10244 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10245
10246 /* Convert X + -C into X - C. */
10247 if (TREE_CODE (arg1) == REAL_CST
10248 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10249 {
10250 tem = fold_negate_const (arg1, type);
10251 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10252 return fold_build2_loc (loc, MINUS_EXPR, type,
10253 fold_convert_loc (loc, type, arg0),
10254 fold_convert_loc (loc, type, tem));
10255 }
10256
10257 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10258 to __complex__ ( x, y ). This is not the same for SNaNs or
10259 if signed zeros are involved. */
10260 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10261 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10262 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10263 {
10264 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10265 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10266 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10267 bool arg0rz = false, arg0iz = false;
10268 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10269 || (arg0i && (arg0iz = real_zerop (arg0i))))
10270 {
10271 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10272 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10273 if (arg0rz && arg1i && real_zerop (arg1i))
10274 {
10275 tree rp = arg1r ? arg1r
10276 : build1 (REALPART_EXPR, rtype, arg1);
10277 tree ip = arg0i ? arg0i
10278 : build1 (IMAGPART_EXPR, rtype, arg0);
10279 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10280 }
10281 else if (arg0iz && arg1r && real_zerop (arg1r))
10282 {
10283 tree rp = arg0r ? arg0r
10284 : build1 (REALPART_EXPR, rtype, arg0);
10285 tree ip = arg1i ? arg1i
10286 : build1 (IMAGPART_EXPR, rtype, arg1);
10287 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10288 }
10289 }
10290 }
10291
10292 if (flag_unsafe_math_optimizations
10293 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10294 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10295 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10296 return tem;
10297
10298 /* Convert x+x into x*2.0. */
10299 if (operand_equal_p (arg0, arg1, 0)
10300 && SCALAR_FLOAT_TYPE_P (type))
10301 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10302 build_real (type, dconst2));
10303
10304 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10305 We associate floats only if the user has specified
10306 -fassociative-math. */
10307 if (flag_associative_math
10308 && TREE_CODE (arg1) == PLUS_EXPR
10309 && TREE_CODE (arg0) != MULT_EXPR)
10310 {
10311 tree tree10 = TREE_OPERAND (arg1, 0);
10312 tree tree11 = TREE_OPERAND (arg1, 1);
10313 if (TREE_CODE (tree11) == MULT_EXPR
10314 && TREE_CODE (tree10) == MULT_EXPR)
10315 {
10316 tree tree0;
10317 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10318 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10319 }
10320 }
10321 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10322 We associate floats only if the user has specified
10323 -fassociative-math. */
10324 if (flag_associative_math
10325 && TREE_CODE (arg0) == PLUS_EXPR
10326 && TREE_CODE (arg1) != MULT_EXPR)
10327 {
10328 tree tree00 = TREE_OPERAND (arg0, 0);
10329 tree tree01 = TREE_OPERAND (arg0, 1);
10330 if (TREE_CODE (tree01) == MULT_EXPR
10331 && TREE_CODE (tree00) == MULT_EXPR)
10332 {
10333 tree tree0;
10334 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10335 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10336 }
10337 }
10338 }
10339
10340 bit_rotate:
10341 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10342 is a rotate of A by C1 bits. */
10343 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10344 is a rotate of A by B bits. */
10345 {
10346 enum tree_code code0, code1;
10347 tree rtype;
10348 code0 = TREE_CODE (arg0);
10349 code1 = TREE_CODE (arg1);
10350 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10351 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10352 && operand_equal_p (TREE_OPERAND (arg0, 0),
10353 TREE_OPERAND (arg1, 0), 0)
10354 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10355 TYPE_UNSIGNED (rtype))
10356 /* Only create rotates in complete modes. Other cases are not
10357 expanded properly. */
10358 && (element_precision (rtype)
10359 == element_precision (TYPE_MODE (rtype))))
10360 {
10361 tree tree01, tree11;
10362 enum tree_code code01, code11;
10363
10364 tree01 = TREE_OPERAND (arg0, 1);
10365 tree11 = TREE_OPERAND (arg1, 1);
10366 STRIP_NOPS (tree01);
10367 STRIP_NOPS (tree11);
10368 code01 = TREE_CODE (tree01);
10369 code11 = TREE_CODE (tree11);
10370 if (code01 == INTEGER_CST
10371 && code11 == INTEGER_CST
10372 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10373 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10374 {
10375 tem = build2_loc (loc, LROTATE_EXPR,
10376 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10377 TREE_OPERAND (arg0, 0),
10378 code0 == LSHIFT_EXPR ? tree01 : tree11);
10379 return fold_convert_loc (loc, type, tem);
10380 }
10381 else if (code11 == MINUS_EXPR)
10382 {
10383 tree tree110, tree111;
10384 tree110 = TREE_OPERAND (tree11, 0);
10385 tree111 = TREE_OPERAND (tree11, 1);
10386 STRIP_NOPS (tree110);
10387 STRIP_NOPS (tree111);
10388 if (TREE_CODE (tree110) == INTEGER_CST
10389 && 0 == compare_tree_int (tree110,
10390 element_precision
10391 (TREE_TYPE (TREE_OPERAND
10392 (arg0, 0))))
10393 && operand_equal_p (tree01, tree111, 0))
10394 return
10395 fold_convert_loc (loc, type,
10396 build2 ((code0 == LSHIFT_EXPR
10397 ? LROTATE_EXPR
10398 : RROTATE_EXPR),
10399 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10400 TREE_OPERAND (arg0, 0), tree01));
10401 }
10402 else if (code01 == MINUS_EXPR)
10403 {
10404 tree tree010, tree011;
10405 tree010 = TREE_OPERAND (tree01, 0);
10406 tree011 = TREE_OPERAND (tree01, 1);
10407 STRIP_NOPS (tree010);
10408 STRIP_NOPS (tree011);
10409 if (TREE_CODE (tree010) == INTEGER_CST
10410 && 0 == compare_tree_int (tree010,
10411 element_precision
10412 (TREE_TYPE (TREE_OPERAND
10413 (arg0, 0))))
10414 && operand_equal_p (tree11, tree011, 0))
10415 return fold_convert_loc
10416 (loc, type,
10417 build2 ((code0 != LSHIFT_EXPR
10418 ? LROTATE_EXPR
10419 : RROTATE_EXPR),
10420 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10421 TREE_OPERAND (arg0, 0), tree11));
10422 }
10423 }
10424 }
10425
10426 associate:
10427 /* In most languages, can't associate operations on floats through
10428 parentheses. Rather than remember where the parentheses were, we
10429 don't associate floats at all, unless the user has specified
10430 -fassociative-math.
10431 And, we need to make sure type is not saturating. */
10432
10433 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10434 && !TYPE_SATURATING (type))
10435 {
10436 tree var0, con0, lit0, minus_lit0;
10437 tree var1, con1, lit1, minus_lit1;
10438 tree atype = type;
10439 bool ok = true;
10440
10441 /* Split both trees into variables, constants, and literals. Then
10442 associate each group together, the constants with literals,
10443 then the result with variables. This increases the chances of
10444 literals being recombined later and of generating relocatable
10445 expressions for the sum of a constant and literal. */
10446 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10447 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10448 code == MINUS_EXPR);
10449
10450 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10451 if (code == MINUS_EXPR)
10452 code = PLUS_EXPR;
10453
10454 /* With undefined overflow prefer doing association in a type
10455 which wraps on overflow, if that is one of the operand types. */
10456 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10457 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10458 {
10459 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10460 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10461 atype = TREE_TYPE (arg0);
10462 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10463 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10464 atype = TREE_TYPE (arg1);
10465 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10466 }
10467
10468 /* With undefined overflow we can only associate constants with one
10469 variable, and constants whose association doesn't overflow. */
10470 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10471 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10472 {
10473 if (var0 && var1)
10474 {
10475 tree tmp0 = var0;
10476 tree tmp1 = var1;
10477
10478 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10479 tmp0 = TREE_OPERAND (tmp0, 0);
10480 if (CONVERT_EXPR_P (tmp0)
10481 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10482 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10483 <= TYPE_PRECISION (atype)))
10484 tmp0 = TREE_OPERAND (tmp0, 0);
10485 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10486 tmp1 = TREE_OPERAND (tmp1, 0);
10487 if (CONVERT_EXPR_P (tmp1)
10488 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10489 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10490 <= TYPE_PRECISION (atype)))
10491 tmp1 = TREE_OPERAND (tmp1, 0);
10492 /* The only case we can still associate with two variables
10493 is if they are the same, modulo negation and bit-pattern
10494 preserving conversions. */
10495 if (!operand_equal_p (tmp0, tmp1, 0))
10496 ok = false;
10497 }
10498 }
10499
10500 /* Only do something if we found more than two objects. Otherwise,
10501 nothing has changed and we risk infinite recursion. */
10502 if (ok
10503 && (2 < ((var0 != 0) + (var1 != 0)
10504 + (con0 != 0) + (con1 != 0)
10505 + (lit0 != 0) + (lit1 != 0)
10506 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10507 {
10508 bool any_overflows = false;
10509 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10510 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10511 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10512 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10513 var0 = associate_trees (loc, var0, var1, code, atype);
10514 con0 = associate_trees (loc, con0, con1, code, atype);
10515 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10516 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10517 code, atype);
10518
10519 /* Preserve the MINUS_EXPR if the negative part of the literal is
10520 greater than the positive part. Otherwise, the multiplicative
10521 folding code (i.e extract_muldiv) may be fooled in case
10522 unsigned constants are subtracted, like in the following
10523 example: ((X*2 + 4) - 8U)/2. */
10524 if (minus_lit0 && lit0)
10525 {
10526 if (TREE_CODE (lit0) == INTEGER_CST
10527 && TREE_CODE (minus_lit0) == INTEGER_CST
10528 && tree_int_cst_lt (lit0, minus_lit0))
10529 {
10530 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10531 MINUS_EXPR, atype);
10532 lit0 = 0;
10533 }
10534 else
10535 {
10536 lit0 = associate_trees (loc, lit0, minus_lit0,
10537 MINUS_EXPR, atype);
10538 minus_lit0 = 0;
10539 }
10540 }
10541
10542 /* Don't introduce overflows through reassociation. */
10543 if (!any_overflows
10544 && ((lit0 && TREE_OVERFLOW (lit0))
10545 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10546 return NULL_TREE;
10547
10548 if (minus_lit0)
10549 {
10550 if (con0 == 0)
10551 return
10552 fold_convert_loc (loc, type,
10553 associate_trees (loc, var0, minus_lit0,
10554 MINUS_EXPR, atype));
10555 else
10556 {
10557 con0 = associate_trees (loc, con0, minus_lit0,
10558 MINUS_EXPR, atype);
10559 return
10560 fold_convert_loc (loc, type,
10561 associate_trees (loc, var0, con0,
10562 PLUS_EXPR, atype));
10563 }
10564 }
10565
10566 con0 = associate_trees (loc, con0, lit0, code, atype);
10567 return
10568 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10569 code, atype));
10570 }
10571 }
10572
10573 return NULL_TREE;
10574
10575 case MINUS_EXPR:
10576 /* Pointer simplifications for subtraction, simple reassociations. */
10577 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10578 {
10579 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10580 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10581 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10582 {
10583 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10584 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10585 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10586 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10587 return fold_build2_loc (loc, PLUS_EXPR, type,
10588 fold_build2_loc (loc, MINUS_EXPR, type,
10589 arg00, arg10),
10590 fold_build2_loc (loc, MINUS_EXPR, type,
10591 arg01, arg11));
10592 }
10593 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10594 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10595 {
10596 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10597 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10598 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10599 fold_convert_loc (loc, type, arg1));
10600 if (tmp)
10601 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10602 }
10603 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10604 simplifies. */
10605 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10606 {
10607 tree arg10 = fold_convert_loc (loc, type,
10608 TREE_OPERAND (arg1, 0));
10609 tree arg11 = fold_convert_loc (loc, type,
10610 TREE_OPERAND (arg1, 1));
10611 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10612 fold_convert_loc (loc, type, arg0),
10613 arg10);
10614 if (tmp)
10615 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10616 }
10617 }
10618 /* A - (-B) -> A + B */
10619 if (TREE_CODE (arg1) == NEGATE_EXPR)
10620 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10621 fold_convert_loc (loc, type,
10622 TREE_OPERAND (arg1, 0)));
10623 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10624 if (TREE_CODE (arg0) == NEGATE_EXPR
10625 && negate_expr_p (arg1)
10626 && reorder_operands_p (arg0, arg1))
10627 return fold_build2_loc (loc, MINUS_EXPR, type,
10628 fold_convert_loc (loc, type,
10629 negate_expr (arg1)),
10630 fold_convert_loc (loc, type,
10631 TREE_OPERAND (arg0, 0)));
10632 /* Convert -A - 1 to ~A. */
10633 if (TREE_CODE (arg0) == NEGATE_EXPR
10634 && integer_each_onep (arg1)
10635 && !TYPE_OVERFLOW_TRAPS (type))
10636 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10637 fold_convert_loc (loc, type,
10638 TREE_OPERAND (arg0, 0)));
10639
10640 /* Convert -1 - A to ~A. */
10641 if (TREE_CODE (type) != COMPLEX_TYPE
10642 && integer_all_onesp (arg0))
10643 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10644
10645
10646 /* X - (X / Y) * Y is X % Y. */
10647 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10648 && TREE_CODE (arg1) == MULT_EXPR
10649 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10650 && operand_equal_p (arg0,
10651 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10652 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10653 TREE_OPERAND (arg1, 1), 0))
10654 return
10655 fold_convert_loc (loc, type,
10656 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10657 arg0, TREE_OPERAND (arg1, 1)));
10658
10659 if (! FLOAT_TYPE_P (type))
10660 {
10661 if (integer_zerop (arg0))
10662 return negate_expr (fold_convert_loc (loc, type, arg1));
10663
10664 /* Fold A - (A & B) into ~B & A. */
10665 if (!TREE_SIDE_EFFECTS (arg0)
10666 && TREE_CODE (arg1) == BIT_AND_EXPR)
10667 {
10668 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10669 {
10670 tree arg10 = fold_convert_loc (loc, type,
10671 TREE_OPERAND (arg1, 0));
10672 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10673 fold_build1_loc (loc, BIT_NOT_EXPR,
10674 type, arg10),
10675 fold_convert_loc (loc, type, arg0));
10676 }
10677 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10678 {
10679 tree arg11 = fold_convert_loc (loc,
10680 type, TREE_OPERAND (arg1, 1));
10681 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10682 fold_build1_loc (loc, BIT_NOT_EXPR,
10683 type, arg11),
10684 fold_convert_loc (loc, type, arg0));
10685 }
10686 }
10687
10688 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10689 any power of 2 minus 1. */
10690 if (TREE_CODE (arg0) == BIT_AND_EXPR
10691 && TREE_CODE (arg1) == BIT_AND_EXPR
10692 && operand_equal_p (TREE_OPERAND (arg0, 0),
10693 TREE_OPERAND (arg1, 0), 0))
10694 {
10695 tree mask0 = TREE_OPERAND (arg0, 1);
10696 tree mask1 = TREE_OPERAND (arg1, 1);
10697 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10698
10699 if (operand_equal_p (tem, mask1, 0))
10700 {
10701 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10702 TREE_OPERAND (arg0, 0), mask1);
10703 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10704 }
10705 }
10706 }
10707
10708 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10709 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10710 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10711
10712 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10713 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10714 (-ARG1 + ARG0) reduces to -ARG1. */
10715 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10716 return negate_expr (fold_convert_loc (loc, type, arg1));
10717
10718 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10719 __complex__ ( x, -y ). This is not the same for SNaNs or if
10720 signed zeros are involved. */
10721 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10722 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10723 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10724 {
10725 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10726 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10727 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10728 bool arg0rz = false, arg0iz = false;
10729 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10730 || (arg0i && (arg0iz = real_zerop (arg0i))))
10731 {
10732 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10733 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10734 if (arg0rz && arg1i && real_zerop (arg1i))
10735 {
10736 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10737 arg1r ? arg1r
10738 : build1 (REALPART_EXPR, rtype, arg1));
10739 tree ip = arg0i ? arg0i
10740 : build1 (IMAGPART_EXPR, rtype, arg0);
10741 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10742 }
10743 else if (arg0iz && arg1r && real_zerop (arg1r))
10744 {
10745 tree rp = arg0r ? arg0r
10746 : build1 (REALPART_EXPR, rtype, arg0);
10747 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10748 arg1i ? arg1i
10749 : build1 (IMAGPART_EXPR, rtype, arg1));
10750 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10751 }
10752 }
10753 }
10754
10755 /* A - B -> A + (-B) if B is easily negatable. */
10756 if (negate_expr_p (arg1)
10757 && ((FLOAT_TYPE_P (type)
10758 /* Avoid this transformation if B is a positive REAL_CST. */
10759 && (TREE_CODE (arg1) != REAL_CST
10760 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10761 || INTEGRAL_TYPE_P (type)))
10762 return fold_build2_loc (loc, PLUS_EXPR, type,
10763 fold_convert_loc (loc, type, arg0),
10764 fold_convert_loc (loc, type,
10765 negate_expr (arg1)));
10766
10767 /* Try folding difference of addresses. */
10768 {
10769 HOST_WIDE_INT diff;
10770
10771 if ((TREE_CODE (arg0) == ADDR_EXPR
10772 || TREE_CODE (arg1) == ADDR_EXPR)
10773 && ptr_difference_const (arg0, arg1, &diff))
10774 return build_int_cst_type (type, diff);
10775 }
10776
10777 /* Fold &a[i] - &a[j] to i-j. */
10778 if (TREE_CODE (arg0) == ADDR_EXPR
10779 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10780 && TREE_CODE (arg1) == ADDR_EXPR
10781 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10782 {
10783 tree tem = fold_addr_of_array_ref_difference (loc, type,
10784 TREE_OPERAND (arg0, 0),
10785 TREE_OPERAND (arg1, 0));
10786 if (tem)
10787 return tem;
10788 }
10789
10790 if (FLOAT_TYPE_P (type)
10791 && flag_unsafe_math_optimizations
10792 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10793 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10794 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10795 return tem;
10796
10797 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10798 one. Make sure the type is not saturating and has the signedness of
10799 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10800 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10801 if ((TREE_CODE (arg0) == MULT_EXPR
10802 || TREE_CODE (arg1) == MULT_EXPR)
10803 && !TYPE_SATURATING (type)
10804 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10805 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10806 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10807 {
10808 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10809 if (tem)
10810 return tem;
10811 }
10812
10813 goto associate;
10814
10815 case MULT_EXPR:
10816 /* (-A) * (-B) -> A * B */
10817 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10818 return fold_build2_loc (loc, MULT_EXPR, type,
10819 fold_convert_loc (loc, type,
10820 TREE_OPERAND (arg0, 0)),
10821 fold_convert_loc (loc, type,
10822 negate_expr (arg1)));
10823 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10824 return fold_build2_loc (loc, MULT_EXPR, type,
10825 fold_convert_loc (loc, type,
10826 negate_expr (arg0)),
10827 fold_convert_loc (loc, type,
10828 TREE_OPERAND (arg1, 0)));
10829
10830 if (! FLOAT_TYPE_P (type))
10831 {
10832 /* Transform x * -1 into -x. Make sure to do the negation
10833 on the original operand with conversions not stripped
10834 because we can only strip non-sign-changing conversions. */
10835 if (integer_minus_onep (arg1))
10836 return fold_convert_loc (loc, type, negate_expr (op0));
10837 /* Transform x * -C into -x * C if x is easily negatable. */
10838 if (TREE_CODE (arg1) == INTEGER_CST
10839 && tree_int_cst_sgn (arg1) == -1
10840 && negate_expr_p (arg0)
10841 && (tem = negate_expr (arg1)) != arg1
10842 && !TREE_OVERFLOW (tem))
10843 return fold_build2_loc (loc, MULT_EXPR, type,
10844 fold_convert_loc (loc, type,
10845 negate_expr (arg0)),
10846 tem);
10847
10848 /* (a * (1 << b)) is (a << b) */
10849 if (TREE_CODE (arg1) == LSHIFT_EXPR
10850 && integer_onep (TREE_OPERAND (arg1, 0)))
10851 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10852 TREE_OPERAND (arg1, 1));
10853 if (TREE_CODE (arg0) == LSHIFT_EXPR
10854 && integer_onep (TREE_OPERAND (arg0, 0)))
10855 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10856 TREE_OPERAND (arg0, 1));
10857
10858 /* (A + A) * C -> A * 2 * C */
10859 if (TREE_CODE (arg0) == PLUS_EXPR
10860 && TREE_CODE (arg1) == INTEGER_CST
10861 && operand_equal_p (TREE_OPERAND (arg0, 0),
10862 TREE_OPERAND (arg0, 1), 0))
10863 return fold_build2_loc (loc, MULT_EXPR, type,
10864 omit_one_operand_loc (loc, type,
10865 TREE_OPERAND (arg0, 0),
10866 TREE_OPERAND (arg0, 1)),
10867 fold_build2_loc (loc, MULT_EXPR, type,
10868 build_int_cst (type, 2) , arg1));
10869
10870 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10871 sign-changing only. */
10872 if (TREE_CODE (arg1) == INTEGER_CST
10873 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10874 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10875 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10876
10877 strict_overflow_p = false;
10878 if (TREE_CODE (arg1) == INTEGER_CST
10879 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10880 &strict_overflow_p)))
10881 {
10882 if (strict_overflow_p)
10883 fold_overflow_warning (("assuming signed overflow does not "
10884 "occur when simplifying "
10885 "multiplication"),
10886 WARN_STRICT_OVERFLOW_MISC);
10887 return fold_convert_loc (loc, type, tem);
10888 }
10889
10890 /* Optimize z * conj(z) for integer complex numbers. */
10891 if (TREE_CODE (arg0) == CONJ_EXPR
10892 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10893 return fold_mult_zconjz (loc, type, arg1);
10894 if (TREE_CODE (arg1) == CONJ_EXPR
10895 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10896 return fold_mult_zconjz (loc, type, arg0);
10897 }
10898 else
10899 {
10900 /* Maybe fold x * 0 to 0. The expressions aren't the same
10901 when x is NaN, since x * 0 is also NaN. Nor are they the
10902 same in modes with signed zeros, since multiplying a
10903 negative value by 0 gives -0, not +0. */
10904 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10905 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10906 && real_zerop (arg1))
10907 return omit_one_operand_loc (loc, type, arg1, arg0);
10908 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10909 Likewise for complex arithmetic with signed zeros. */
10910 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10911 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10912 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10913 && real_onep (arg1))
10914 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10915
10916 /* Transform x * -1.0 into -x. */
10917 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10918 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10919 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10920 && real_minus_onep (arg1))
10921 return fold_convert_loc (loc, type, negate_expr (arg0));
10922
10923 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10924 the result for floating point types due to rounding so it is applied
10925 only if -fassociative-math was specify. */
10926 if (flag_associative_math
10927 && TREE_CODE (arg0) == RDIV_EXPR
10928 && TREE_CODE (arg1) == REAL_CST
10929 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10930 {
10931 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10932 arg1);
10933 if (tem)
10934 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10935 TREE_OPERAND (arg0, 1));
10936 }
10937
10938 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10939 if (operand_equal_p (arg0, arg1, 0))
10940 {
10941 tree tem = fold_strip_sign_ops (arg0);
10942 if (tem != NULL_TREE)
10943 {
10944 tem = fold_convert_loc (loc, type, tem);
10945 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10946 }
10947 }
10948
10949 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10950 This is not the same for NaNs or if signed zeros are
10951 involved. */
10952 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10953 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10954 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10955 && TREE_CODE (arg1) == COMPLEX_CST
10956 && real_zerop (TREE_REALPART (arg1)))
10957 {
10958 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10959 if (real_onep (TREE_IMAGPART (arg1)))
10960 return
10961 fold_build2_loc (loc, COMPLEX_EXPR, type,
10962 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10963 rtype, arg0)),
10964 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10965 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10966 return
10967 fold_build2_loc (loc, COMPLEX_EXPR, type,
10968 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10969 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10970 rtype, arg0)));
10971 }
10972
10973 /* Optimize z * conj(z) for floating point complex numbers.
10974 Guarded by flag_unsafe_math_optimizations as non-finite
10975 imaginary components don't produce scalar results. */
10976 if (flag_unsafe_math_optimizations
10977 && TREE_CODE (arg0) == CONJ_EXPR
10978 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10979 return fold_mult_zconjz (loc, type, arg1);
10980 if (flag_unsafe_math_optimizations
10981 && TREE_CODE (arg1) == CONJ_EXPR
10982 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10983 return fold_mult_zconjz (loc, type, arg0);
10984
10985 if (flag_unsafe_math_optimizations)
10986 {
10987 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10988 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10989
10990 /* Optimizations of root(...)*root(...). */
10991 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10992 {
10993 tree rootfn, arg;
10994 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10995 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10996
10997 /* Optimize sqrt(x)*sqrt(x) as x. */
10998 if (BUILTIN_SQRT_P (fcode0)
10999 && operand_equal_p (arg00, arg10, 0)
11000 && ! HONOR_SNANS (TYPE_MODE (type)))
11001 return arg00;
11002
11003 /* Optimize root(x)*root(y) as root(x*y). */
11004 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11005 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11006 return build_call_expr_loc (loc, rootfn, 1, arg);
11007 }
11008
11009 /* Optimize expN(x)*expN(y) as expN(x+y). */
11010 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11011 {
11012 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11013 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11014 CALL_EXPR_ARG (arg0, 0),
11015 CALL_EXPR_ARG (arg1, 0));
11016 return build_call_expr_loc (loc, expfn, 1, arg);
11017 }
11018
11019 /* Optimizations of pow(...)*pow(...). */
11020 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11021 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11022 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11023 {
11024 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11025 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11026 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11027 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11028
11029 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11030 if (operand_equal_p (arg01, arg11, 0))
11031 {
11032 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11033 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11034 arg00, arg10);
11035 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11036 }
11037
11038 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11039 if (operand_equal_p (arg00, arg10, 0))
11040 {
11041 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11042 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11043 arg01, arg11);
11044 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11045 }
11046 }
11047
11048 /* Optimize tan(x)*cos(x) as sin(x). */
11049 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11050 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11051 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11052 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11053 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11054 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11055 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11056 CALL_EXPR_ARG (arg1, 0), 0))
11057 {
11058 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11059
11060 if (sinfn != NULL_TREE)
11061 return build_call_expr_loc (loc, sinfn, 1,
11062 CALL_EXPR_ARG (arg0, 0));
11063 }
11064
11065 /* Optimize x*pow(x,c) as pow(x,c+1). */
11066 if (fcode1 == BUILT_IN_POW
11067 || fcode1 == BUILT_IN_POWF
11068 || fcode1 == BUILT_IN_POWL)
11069 {
11070 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11071 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11072 if (TREE_CODE (arg11) == REAL_CST
11073 && !TREE_OVERFLOW (arg11)
11074 && operand_equal_p (arg0, arg10, 0))
11075 {
11076 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11077 REAL_VALUE_TYPE c;
11078 tree arg;
11079
11080 c = TREE_REAL_CST (arg11);
11081 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11082 arg = build_real (type, c);
11083 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11084 }
11085 }
11086
11087 /* Optimize pow(x,c)*x as pow(x,c+1). */
11088 if (fcode0 == BUILT_IN_POW
11089 || fcode0 == BUILT_IN_POWF
11090 || fcode0 == BUILT_IN_POWL)
11091 {
11092 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11093 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11094 if (TREE_CODE (arg01) == REAL_CST
11095 && !TREE_OVERFLOW (arg01)
11096 && operand_equal_p (arg1, arg00, 0))
11097 {
11098 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11099 REAL_VALUE_TYPE c;
11100 tree arg;
11101
11102 c = TREE_REAL_CST (arg01);
11103 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11104 arg = build_real (type, c);
11105 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11106 }
11107 }
11108
11109 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11110 if (!in_gimple_form
11111 && optimize
11112 && operand_equal_p (arg0, arg1, 0))
11113 {
11114 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11115
11116 if (powfn)
11117 {
11118 tree arg = build_real (type, dconst2);
11119 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11120 }
11121 }
11122 }
11123 }
11124 goto associate;
11125
11126 case BIT_IOR_EXPR:
11127 bit_ior:
11128 if (operand_equal_p (arg0, arg1, 0))
11129 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11130
11131 /* ~X | X is -1. */
11132 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11134 {
11135 t1 = build_zero_cst (type);
11136 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11137 return omit_one_operand_loc (loc, type, t1, arg1);
11138 }
11139
11140 /* X | ~X is -1. */
11141 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11142 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11143 {
11144 t1 = build_zero_cst (type);
11145 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11146 return omit_one_operand_loc (loc, type, t1, arg0);
11147 }
11148
11149 /* Canonicalize (X & C1) | C2. */
11150 if (TREE_CODE (arg0) == BIT_AND_EXPR
11151 && TREE_CODE (arg1) == INTEGER_CST
11152 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11153 {
11154 int width = TYPE_PRECISION (type), w;
11155 wide_int c1 = TREE_OPERAND (arg0, 1);
11156 wide_int c2 = arg1;
11157
11158 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11159 if ((c1 & c2) == c1)
11160 return omit_one_operand_loc (loc, type, arg1,
11161 TREE_OPERAND (arg0, 0));
11162
11163 wide_int msk = wi::mask (width, false,
11164 TYPE_PRECISION (TREE_TYPE (arg1)));
11165
11166 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11167 if (msk.and_not (c1 | c2) == 0)
11168 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11169 TREE_OPERAND (arg0, 0), arg1);
11170
11171 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11172 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11173 mode which allows further optimizations. */
11174 c1 &= msk;
11175 c2 &= msk;
11176 wide_int c3 = c1.and_not (c2);
11177 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11178 {
11179 wide_int mask = wi::mask (w, false,
11180 TYPE_PRECISION (type));
11181 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11182 {
11183 c3 = mask;
11184 break;
11185 }
11186 }
11187
11188 if (c3 != c1)
11189 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11190 fold_build2_loc (loc, BIT_AND_EXPR, type,
11191 TREE_OPERAND (arg0, 0),
11192 wide_int_to_tree (type,
11193 c3)),
11194 arg1);
11195 }
11196
11197 /* (X & Y) | Y is (X, Y). */
11198 if (TREE_CODE (arg0) == BIT_AND_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11200 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11201 /* (X & Y) | X is (Y, X). */
11202 if (TREE_CODE (arg0) == BIT_AND_EXPR
11203 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11204 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11205 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11206 /* X | (X & Y) is (Y, X). */
11207 if (TREE_CODE (arg1) == BIT_AND_EXPR
11208 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11209 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11210 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11211 /* X | (Y & X) is (Y, X). */
11212 if (TREE_CODE (arg1) == BIT_AND_EXPR
11213 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11214 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11215 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11216
11217 /* (X & ~Y) | (~X & Y) is X ^ Y */
11218 if (TREE_CODE (arg0) == BIT_AND_EXPR
11219 && TREE_CODE (arg1) == BIT_AND_EXPR)
11220 {
11221 tree a0, a1, l0, l1, n0, n1;
11222
11223 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11224 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11225
11226 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11227 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11228
11229 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11230 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11231
11232 if ((operand_equal_p (n0, a0, 0)
11233 && operand_equal_p (n1, a1, 0))
11234 || (operand_equal_p (n0, a1, 0)
11235 && operand_equal_p (n1, a0, 0)))
11236 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11237 }
11238
11239 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11240 if (t1 != NULL_TREE)
11241 return t1;
11242
11243 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11244
11245 This results in more efficient code for machines without a NAND
11246 instruction. Combine will canonicalize to the first form
11247 which will allow use of NAND instructions provided by the
11248 backend if they exist. */
11249 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11250 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11251 {
11252 return
11253 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11254 build2 (BIT_AND_EXPR, type,
11255 fold_convert_loc (loc, type,
11256 TREE_OPERAND (arg0, 0)),
11257 fold_convert_loc (loc, type,
11258 TREE_OPERAND (arg1, 0))));
11259 }
11260
11261 /* See if this can be simplified into a rotate first. If that
11262 is unsuccessful continue in the association code. */
11263 goto bit_rotate;
11264
11265 case BIT_XOR_EXPR:
11266 if (integer_all_onesp (arg1))
11267 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11268
11269 /* ~X ^ X is -1. */
11270 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11271 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11272 {
11273 t1 = build_zero_cst (type);
11274 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11275 return omit_one_operand_loc (loc, type, t1, arg1);
11276 }
11277
11278 /* X ^ ~X is -1. */
11279 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11280 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11281 {
11282 t1 = build_zero_cst (type);
11283 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11284 return omit_one_operand_loc (loc, type, t1, arg0);
11285 }
11286
11287 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11288 with a constant, and the two constants have no bits in common,
11289 we should treat this as a BIT_IOR_EXPR since this may produce more
11290 simplifications. */
11291 if (TREE_CODE (arg0) == BIT_AND_EXPR
11292 && TREE_CODE (arg1) == BIT_AND_EXPR
11293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11294 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11295 && wi::bit_and (TREE_OPERAND (arg0, 1),
11296 TREE_OPERAND (arg1, 1)) == 0)
11297 {
11298 code = BIT_IOR_EXPR;
11299 goto bit_ior;
11300 }
11301
11302 /* (X | Y) ^ X -> Y & ~ X*/
11303 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11304 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11305 {
11306 tree t2 = TREE_OPERAND (arg0, 1);
11307 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11308 arg1);
11309 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11310 fold_convert_loc (loc, type, t2),
11311 fold_convert_loc (loc, type, t1));
11312 return t1;
11313 }
11314
11315 /* (Y | X) ^ X -> Y & ~ X*/
11316 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11317 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11318 {
11319 tree t2 = TREE_OPERAND (arg0, 0);
11320 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11321 arg1);
11322 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11323 fold_convert_loc (loc, type, t2),
11324 fold_convert_loc (loc, type, t1));
11325 return t1;
11326 }
11327
11328 /* X ^ (X | Y) -> Y & ~ X*/
11329 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11330 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11331 {
11332 tree t2 = TREE_OPERAND (arg1, 1);
11333 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11334 arg0);
11335 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11336 fold_convert_loc (loc, type, t2),
11337 fold_convert_loc (loc, type, t1));
11338 return t1;
11339 }
11340
11341 /* X ^ (Y | X) -> Y & ~ X*/
11342 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11343 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11344 {
11345 tree t2 = TREE_OPERAND (arg1, 0);
11346 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11347 arg0);
11348 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11349 fold_convert_loc (loc, type, t2),
11350 fold_convert_loc (loc, type, t1));
11351 return t1;
11352 }
11353
11354 /* Convert ~X ^ ~Y to X ^ Y. */
11355 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11356 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11357 return fold_build2_loc (loc, code, type,
11358 fold_convert_loc (loc, type,
11359 TREE_OPERAND (arg0, 0)),
11360 fold_convert_loc (loc, type,
11361 TREE_OPERAND (arg1, 0)));
11362
11363 /* Convert ~X ^ C to X ^ ~C. */
11364 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11365 && TREE_CODE (arg1) == INTEGER_CST)
11366 return fold_build2_loc (loc, code, type,
11367 fold_convert_loc (loc, type,
11368 TREE_OPERAND (arg0, 0)),
11369 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11370
11371 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11372 if (TREE_CODE (arg0) == BIT_AND_EXPR
11373 && INTEGRAL_TYPE_P (type)
11374 && integer_onep (TREE_OPERAND (arg0, 1))
11375 && integer_onep (arg1))
11376 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11377 build_zero_cst (TREE_TYPE (arg0)));
11378
11379 /* Fold (X & Y) ^ Y as ~X & Y. */
11380 if (TREE_CODE (arg0) == BIT_AND_EXPR
11381 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11382 {
11383 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11384 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11385 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11386 fold_convert_loc (loc, type, arg1));
11387 }
11388 /* Fold (X & Y) ^ X as ~Y & X. */
11389 if (TREE_CODE (arg0) == BIT_AND_EXPR
11390 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11391 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11392 {
11393 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11394 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11395 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11396 fold_convert_loc (loc, type, arg1));
11397 }
11398 /* Fold X ^ (X & Y) as X & ~Y. */
11399 if (TREE_CODE (arg1) == BIT_AND_EXPR
11400 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11401 {
11402 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11403 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11404 fold_convert_loc (loc, type, arg0),
11405 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11406 }
11407 /* Fold X ^ (Y & X) as ~Y & X. */
11408 if (TREE_CODE (arg1) == BIT_AND_EXPR
11409 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11410 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11411 {
11412 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11413 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11414 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11415 fold_convert_loc (loc, type, arg0));
11416 }
11417
11418 /* See if this can be simplified into a rotate first. If that
11419 is unsuccessful continue in the association code. */
11420 goto bit_rotate;
11421
11422 case BIT_AND_EXPR:
11423 if (integer_all_onesp (arg1))
11424 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11425 if (operand_equal_p (arg0, arg1, 0))
11426 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11427
11428 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11429 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11430 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11431 || (TREE_CODE (arg0) == EQ_EXPR
11432 && integer_zerop (TREE_OPERAND (arg0, 1))))
11433 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11434 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11435
11436 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11437 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11438 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11439 || (TREE_CODE (arg1) == EQ_EXPR
11440 && integer_zerop (TREE_OPERAND (arg1, 1))))
11441 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11442 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11443
11444 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11445 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11446 && TREE_CODE (arg1) == INTEGER_CST
11447 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11448 {
11449 tree tmp1 = fold_convert_loc (loc, type, arg1);
11450 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11451 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11452 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11453 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11454 return
11455 fold_convert_loc (loc, type,
11456 fold_build2_loc (loc, BIT_IOR_EXPR,
11457 type, tmp2, tmp3));
11458 }
11459
11460 /* (X | Y) & Y is (X, Y). */
11461 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11462 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11463 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11464 /* (X | Y) & X is (Y, X). */
11465 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11466 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11467 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11468 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11469 /* X & (X | Y) is (Y, X). */
11470 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11471 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11472 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11473 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11474 /* X & (Y | X) is (Y, X). */
11475 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11476 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11477 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11478 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11479
11480 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11481 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11482 && INTEGRAL_TYPE_P (type)
11483 && integer_onep (TREE_OPERAND (arg0, 1))
11484 && integer_onep (arg1))
11485 {
11486 tree tem2;
11487 tem = TREE_OPERAND (arg0, 0);
11488 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11489 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11490 tem, tem2);
11491 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11492 build_zero_cst (TREE_TYPE (tem)));
11493 }
11494 /* Fold ~X & 1 as (X & 1) == 0. */
11495 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11496 && INTEGRAL_TYPE_P (type)
11497 && integer_onep (arg1))
11498 {
11499 tree tem2;
11500 tem = TREE_OPERAND (arg0, 0);
11501 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11502 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11503 tem, tem2);
11504 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11505 build_zero_cst (TREE_TYPE (tem)));
11506 }
11507 /* Fold !X & 1 as X == 0. */
11508 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11509 && integer_onep (arg1))
11510 {
11511 tem = TREE_OPERAND (arg0, 0);
11512 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11513 build_zero_cst (TREE_TYPE (tem)));
11514 }
11515
11516 /* Fold (X ^ Y) & Y as ~X & Y. */
11517 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11518 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11519 {
11520 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11521 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11522 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11523 fold_convert_loc (loc, type, arg1));
11524 }
11525 /* Fold (X ^ Y) & X as ~Y & X. */
11526 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11527 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11528 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11529 {
11530 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11531 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11532 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11533 fold_convert_loc (loc, type, arg1));
11534 }
11535 /* Fold X & (X ^ Y) as X & ~Y. */
11536 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11537 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11538 {
11539 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11540 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11541 fold_convert_loc (loc, type, arg0),
11542 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11543 }
11544 /* Fold X & (Y ^ X) as ~Y & X. */
11545 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11546 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11547 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11548 {
11549 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11550 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11551 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11552 fold_convert_loc (loc, type, arg0));
11553 }
11554
11555 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11556 multiple of 1 << CST. */
11557 if (TREE_CODE (arg1) == INTEGER_CST)
11558 {
11559 wide_int cst1 = arg1;
11560 wide_int ncst1 = -cst1;
11561 if ((cst1 & ncst1) == ncst1
11562 && multiple_of_p (type, arg0,
11563 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11564 return fold_convert_loc (loc, type, arg0);
11565 }
11566
11567 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11568 bits from CST2. */
11569 if (TREE_CODE (arg1) == INTEGER_CST
11570 && TREE_CODE (arg0) == MULT_EXPR
11571 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11572 {
11573 wide_int warg1 = arg1;
11574 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11575
11576 if (masked == 0)
11577 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11578 arg0, arg1);
11579 else if (masked != warg1)
11580 {
11581 /* Avoid the transform if arg1 is a mask of some
11582 mode which allows further optimizations. */
11583 int pop = wi::popcount (warg1);
11584 if (!(pop >= BITS_PER_UNIT
11585 && exact_log2 (pop) != -1
11586 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11587 return fold_build2_loc (loc, code, type, op0,
11588 wide_int_to_tree (type, masked));
11589 }
11590 }
11591
11592 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11593 ((A & N) + B) & M -> (A + B) & M
11594 Similarly if (N & M) == 0,
11595 ((A | N) + B) & M -> (A + B) & M
11596 and for - instead of + (or unary - instead of +)
11597 and/or ^ instead of |.
11598 If B is constant and (B & M) == 0, fold into A & M. */
11599 if (TREE_CODE (arg1) == INTEGER_CST)
11600 {
11601 wide_int cst1 = arg1;
11602 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11603 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11604 && (TREE_CODE (arg0) == PLUS_EXPR
11605 || TREE_CODE (arg0) == MINUS_EXPR
11606 || TREE_CODE (arg0) == NEGATE_EXPR)
11607 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11608 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11609 {
11610 tree pmop[2];
11611 int which = 0;
11612 wide_int cst0;
11613
11614 /* Now we know that arg0 is (C + D) or (C - D) or
11615 -C and arg1 (M) is == (1LL << cst) - 1.
11616 Store C into PMOP[0] and D into PMOP[1]. */
11617 pmop[0] = TREE_OPERAND (arg0, 0);
11618 pmop[1] = NULL;
11619 if (TREE_CODE (arg0) != NEGATE_EXPR)
11620 {
11621 pmop[1] = TREE_OPERAND (arg0, 1);
11622 which = 1;
11623 }
11624
11625 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11626 which = -1;
11627
11628 for (; which >= 0; which--)
11629 switch (TREE_CODE (pmop[which]))
11630 {
11631 case BIT_AND_EXPR:
11632 case BIT_IOR_EXPR:
11633 case BIT_XOR_EXPR:
11634 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11635 != INTEGER_CST)
11636 break;
11637 cst0 = TREE_OPERAND (pmop[which], 1);
11638 cst0 &= cst1;
11639 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11640 {
11641 if (cst0 != cst1)
11642 break;
11643 }
11644 else if (cst0 != 0)
11645 break;
11646 /* If C or D is of the form (A & N) where
11647 (N & M) == M, or of the form (A | N) or
11648 (A ^ N) where (N & M) == 0, replace it with A. */
11649 pmop[which] = TREE_OPERAND (pmop[which], 0);
11650 break;
11651 case INTEGER_CST:
11652 /* If C or D is a N where (N & M) == 0, it can be
11653 omitted (assumed 0). */
11654 if ((TREE_CODE (arg0) == PLUS_EXPR
11655 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11656 && (cst1 & pmop[which]) == 0)
11657 pmop[which] = NULL;
11658 break;
11659 default:
11660 break;
11661 }
11662
11663 /* Only build anything new if we optimized one or both arguments
11664 above. */
11665 if (pmop[0] != TREE_OPERAND (arg0, 0)
11666 || (TREE_CODE (arg0) != NEGATE_EXPR
11667 && pmop[1] != TREE_OPERAND (arg0, 1)))
11668 {
11669 tree utype = TREE_TYPE (arg0);
11670 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11671 {
11672 /* Perform the operations in a type that has defined
11673 overflow behavior. */
11674 utype = unsigned_type_for (TREE_TYPE (arg0));
11675 if (pmop[0] != NULL)
11676 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11677 if (pmop[1] != NULL)
11678 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11679 }
11680
11681 if (TREE_CODE (arg0) == NEGATE_EXPR)
11682 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11683 else if (TREE_CODE (arg0) == PLUS_EXPR)
11684 {
11685 if (pmop[0] != NULL && pmop[1] != NULL)
11686 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11687 pmop[0], pmop[1]);
11688 else if (pmop[0] != NULL)
11689 tem = pmop[0];
11690 else if (pmop[1] != NULL)
11691 tem = pmop[1];
11692 else
11693 return build_int_cst (type, 0);
11694 }
11695 else if (pmop[0] == NULL)
11696 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11697 else
11698 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11699 pmop[0], pmop[1]);
11700 /* TEM is now the new binary +, - or unary - replacement. */
11701 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11702 fold_convert_loc (loc, utype, arg1));
11703 return fold_convert_loc (loc, type, tem);
11704 }
11705 }
11706 }
11707
11708 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11709 if (t1 != NULL_TREE)
11710 return t1;
11711 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11712 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11713 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11714 {
11715 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11716
11717 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11718 if (mask == -1)
11719 return
11720 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11721 }
11722
11723 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11724
11725 This results in more efficient code for machines without a NOR
11726 instruction. Combine will canonicalize to the first form
11727 which will allow use of NOR instructions provided by the
11728 backend if they exist. */
11729 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11730 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11731 {
11732 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11733 build2 (BIT_IOR_EXPR, type,
11734 fold_convert_loc (loc, type,
11735 TREE_OPERAND (arg0, 0)),
11736 fold_convert_loc (loc, type,
11737 TREE_OPERAND (arg1, 0))));
11738 }
11739
11740 /* If arg0 is derived from the address of an object or function, we may
11741 be able to fold this expression using the object or function's
11742 alignment. */
11743 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11744 {
11745 unsigned HOST_WIDE_INT modulus, residue;
11746 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11747
11748 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11749 integer_onep (arg1));
11750
11751 /* This works because modulus is a power of 2. If this weren't the
11752 case, we'd have to replace it by its greatest power-of-2
11753 divisor: modulus & -modulus. */
11754 if (low < modulus)
11755 return build_int_cst (type, residue & low);
11756 }
11757
11758 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11759 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11760 if the new mask might be further optimized. */
11761 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11762 || TREE_CODE (arg0) == RSHIFT_EXPR)
11763 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11764 && TREE_CODE (arg1) == INTEGER_CST
11765 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11766 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11767 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11768 < TYPE_PRECISION (TREE_TYPE (arg0))))
11769 {
11770 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11771 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11772 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11773 tree shift_type = TREE_TYPE (arg0);
11774
11775 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11776 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11777 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11778 && TYPE_PRECISION (TREE_TYPE (arg0))
11779 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11780 {
11781 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11782 tree arg00 = TREE_OPERAND (arg0, 0);
11783 /* See if more bits can be proven as zero because of
11784 zero extension. */
11785 if (TREE_CODE (arg00) == NOP_EXPR
11786 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11787 {
11788 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11789 if (TYPE_PRECISION (inner_type)
11790 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11791 && TYPE_PRECISION (inner_type) < prec)
11792 {
11793 prec = TYPE_PRECISION (inner_type);
11794 /* See if we can shorten the right shift. */
11795 if (shiftc < prec)
11796 shift_type = inner_type;
11797 /* Otherwise X >> C1 is all zeros, so we'll optimize
11798 it into (X, 0) later on by making sure zerobits
11799 is all ones. */
11800 }
11801 }
11802 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11803 if (shiftc < prec)
11804 {
11805 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11806 zerobits <<= prec - shiftc;
11807 }
11808 /* For arithmetic shift if sign bit could be set, zerobits
11809 can contain actually sign bits, so no transformation is
11810 possible, unless MASK masks them all away. In that
11811 case the shift needs to be converted into logical shift. */
11812 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11813 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11814 {
11815 if ((mask & zerobits) == 0)
11816 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11817 else
11818 zerobits = 0;
11819 }
11820 }
11821
11822 /* ((X << 16) & 0xff00) is (X, 0). */
11823 if ((mask & zerobits) == mask)
11824 return omit_one_operand_loc (loc, type,
11825 build_int_cst (type, 0), arg0);
11826
11827 newmask = mask | zerobits;
11828 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11829 {
11830 /* Only do the transformation if NEWMASK is some integer
11831 mode's mask. */
11832 for (prec = BITS_PER_UNIT;
11833 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11834 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11835 break;
11836 if (prec < HOST_BITS_PER_WIDE_INT
11837 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11838 {
11839 tree newmaskt;
11840
11841 if (shift_type != TREE_TYPE (arg0))
11842 {
11843 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11844 fold_convert_loc (loc, shift_type,
11845 TREE_OPERAND (arg0, 0)),
11846 TREE_OPERAND (arg0, 1));
11847 tem = fold_convert_loc (loc, type, tem);
11848 }
11849 else
11850 tem = op0;
11851 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11852 if (!tree_int_cst_equal (newmaskt, arg1))
11853 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11854 }
11855 }
11856 }
11857
11858 goto associate;
11859
11860 case RDIV_EXPR:
11861 /* Don't touch a floating-point divide by zero unless the mode
11862 of the constant can represent infinity. */
11863 if (TREE_CODE (arg1) == REAL_CST
11864 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11865 && real_zerop (arg1))
11866 return NULL_TREE;
11867
11868 /* Optimize A / A to 1.0 if we don't care about
11869 NaNs or Infinities. Skip the transformation
11870 for non-real operands. */
11871 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11872 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11873 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11874 && operand_equal_p (arg0, arg1, 0))
11875 {
11876 tree r = build_real (TREE_TYPE (arg0), dconst1);
11877
11878 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11879 }
11880
11881 /* The complex version of the above A / A optimization. */
11882 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11883 && operand_equal_p (arg0, arg1, 0))
11884 {
11885 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11886 if (! HONOR_NANS (TYPE_MODE (elem_type))
11887 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11888 {
11889 tree r = build_real (elem_type, dconst1);
11890 /* omit_two_operands will call fold_convert for us. */
11891 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11892 }
11893 }
11894
11895 /* (-A) / (-B) -> A / B */
11896 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11897 return fold_build2_loc (loc, RDIV_EXPR, type,
11898 TREE_OPERAND (arg0, 0),
11899 negate_expr (arg1));
11900 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11901 return fold_build2_loc (loc, RDIV_EXPR, type,
11902 negate_expr (arg0),
11903 TREE_OPERAND (arg1, 0));
11904
11905 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11906 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11907 && real_onep (arg1))
11908 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11909
11910 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11911 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11912 && real_minus_onep (arg1))
11913 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11914 negate_expr (arg0)));
11915
11916 /* If ARG1 is a constant, we can convert this to a multiply by the
11917 reciprocal. This does not have the same rounding properties,
11918 so only do this if -freciprocal-math. We can actually
11919 always safely do it if ARG1 is a power of two, but it's hard to
11920 tell if it is or not in a portable manner. */
11921 if (optimize
11922 && (TREE_CODE (arg1) == REAL_CST
11923 || (TREE_CODE (arg1) == COMPLEX_CST
11924 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11925 || (TREE_CODE (arg1) == VECTOR_CST
11926 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11927 {
11928 if (flag_reciprocal_math
11929 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11930 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11931 /* Find the reciprocal if optimizing and the result is exact.
11932 TODO: Complex reciprocal not implemented. */
11933 if (TREE_CODE (arg1) != COMPLEX_CST)
11934 {
11935 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11936
11937 if (inverse)
11938 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11939 }
11940 }
11941 /* Convert A/B/C to A/(B*C). */
11942 if (flag_reciprocal_math
11943 && TREE_CODE (arg0) == RDIV_EXPR)
11944 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11945 fold_build2_loc (loc, MULT_EXPR, type,
11946 TREE_OPERAND (arg0, 1), arg1));
11947
11948 /* Convert A/(B/C) to (A/B)*C. */
11949 if (flag_reciprocal_math
11950 && TREE_CODE (arg1) == RDIV_EXPR)
11951 return fold_build2_loc (loc, MULT_EXPR, type,
11952 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11953 TREE_OPERAND (arg1, 0)),
11954 TREE_OPERAND (arg1, 1));
11955
11956 /* Convert C1/(X*C2) into (C1/C2)/X. */
11957 if (flag_reciprocal_math
11958 && TREE_CODE (arg1) == MULT_EXPR
11959 && TREE_CODE (arg0) == REAL_CST
11960 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11961 {
11962 tree tem = const_binop (RDIV_EXPR, arg0,
11963 TREE_OPERAND (arg1, 1));
11964 if (tem)
11965 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11966 TREE_OPERAND (arg1, 0));
11967 }
11968
11969 if (flag_unsafe_math_optimizations)
11970 {
11971 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11972 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11973
11974 /* Optimize sin(x)/cos(x) as tan(x). */
11975 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11976 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11977 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11978 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11979 CALL_EXPR_ARG (arg1, 0), 0))
11980 {
11981 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11982
11983 if (tanfn != NULL_TREE)
11984 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11985 }
11986
11987 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11988 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11989 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11990 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11991 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11992 CALL_EXPR_ARG (arg1, 0), 0))
11993 {
11994 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11995
11996 if (tanfn != NULL_TREE)
11997 {
11998 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11999 CALL_EXPR_ARG (arg0, 0));
12000 return fold_build2_loc (loc, RDIV_EXPR, type,
12001 build_real (type, dconst1), tmp);
12002 }
12003 }
12004
12005 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12006 NaNs or Infinities. */
12007 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12008 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12009 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12010 {
12011 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12012 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12013
12014 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12015 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12016 && operand_equal_p (arg00, arg01, 0))
12017 {
12018 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12019
12020 if (cosfn != NULL_TREE)
12021 return build_call_expr_loc (loc, cosfn, 1, arg00);
12022 }
12023 }
12024
12025 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12026 NaNs or Infinities. */
12027 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12028 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12029 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12030 {
12031 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12032 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12033
12034 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12035 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12036 && operand_equal_p (arg00, arg01, 0))
12037 {
12038 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12039
12040 if (cosfn != NULL_TREE)
12041 {
12042 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12043 return fold_build2_loc (loc, RDIV_EXPR, type,
12044 build_real (type, dconst1),
12045 tmp);
12046 }
12047 }
12048 }
12049
12050 /* Optimize pow(x,c)/x as pow(x,c-1). */
12051 if (fcode0 == BUILT_IN_POW
12052 || fcode0 == BUILT_IN_POWF
12053 || fcode0 == BUILT_IN_POWL)
12054 {
12055 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12056 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12057 if (TREE_CODE (arg01) == REAL_CST
12058 && !TREE_OVERFLOW (arg01)
12059 && operand_equal_p (arg1, arg00, 0))
12060 {
12061 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12062 REAL_VALUE_TYPE c;
12063 tree arg;
12064
12065 c = TREE_REAL_CST (arg01);
12066 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12067 arg = build_real (type, c);
12068 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12069 }
12070 }
12071
12072 /* Optimize a/root(b/c) into a*root(c/b). */
12073 if (BUILTIN_ROOT_P (fcode1))
12074 {
12075 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12076
12077 if (TREE_CODE (rootarg) == RDIV_EXPR)
12078 {
12079 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12080 tree b = TREE_OPERAND (rootarg, 0);
12081 tree c = TREE_OPERAND (rootarg, 1);
12082
12083 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12084
12085 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12086 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12087 }
12088 }
12089
12090 /* Optimize x/expN(y) into x*expN(-y). */
12091 if (BUILTIN_EXPONENT_P (fcode1))
12092 {
12093 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12094 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12095 arg1 = build_call_expr_loc (loc,
12096 expfn, 1,
12097 fold_convert_loc (loc, type, arg));
12098 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12099 }
12100
12101 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12102 if (fcode1 == BUILT_IN_POW
12103 || fcode1 == BUILT_IN_POWF
12104 || fcode1 == BUILT_IN_POWL)
12105 {
12106 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12107 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12108 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12109 tree neg11 = fold_convert_loc (loc, type,
12110 negate_expr (arg11));
12111 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12112 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12113 }
12114 }
12115 return NULL_TREE;
12116
12117 case TRUNC_DIV_EXPR:
12118 /* Optimize (X & (-A)) / A where A is a power of 2,
12119 to X >> log2(A) */
12120 if (TREE_CODE (arg0) == BIT_AND_EXPR
12121 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12122 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12123 {
12124 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12125 arg1, TREE_OPERAND (arg0, 1));
12126 if (sum && integer_zerop (sum)) {
12127 tree pow2 = build_int_cst (integer_type_node,
12128 wi::exact_log2 (arg1));
12129 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12130 TREE_OPERAND (arg0, 0), pow2);
12131 }
12132 }
12133
12134 /* Fall through */
12135
12136 case FLOOR_DIV_EXPR:
12137 /* Simplify A / (B << N) where A and B are positive and B is
12138 a power of 2, to A >> (N + log2(B)). */
12139 strict_overflow_p = false;
12140 if (TREE_CODE (arg1) == LSHIFT_EXPR
12141 && (TYPE_UNSIGNED (type)
12142 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12143 {
12144 tree sval = TREE_OPERAND (arg1, 0);
12145 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12146 {
12147 tree sh_cnt = TREE_OPERAND (arg1, 1);
12148 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12149 wi::exact_log2 (sval));
12150
12151 if (strict_overflow_p)
12152 fold_overflow_warning (("assuming signed overflow does not "
12153 "occur when simplifying A / (B << N)"),
12154 WARN_STRICT_OVERFLOW_MISC);
12155
12156 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12157 sh_cnt, pow2);
12158 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12159 fold_convert_loc (loc, type, arg0), sh_cnt);
12160 }
12161 }
12162
12163 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12164 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12165 if (INTEGRAL_TYPE_P (type)
12166 && TYPE_UNSIGNED (type)
12167 && code == FLOOR_DIV_EXPR)
12168 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12169
12170 /* Fall through */
12171
12172 case ROUND_DIV_EXPR:
12173 case CEIL_DIV_EXPR:
12174 case EXACT_DIV_EXPR:
12175 if (integer_zerop (arg1))
12176 return NULL_TREE;
12177 /* X / -1 is -X. */
12178 if (!TYPE_UNSIGNED (type)
12179 && TREE_CODE (arg1) == INTEGER_CST
12180 && wi::eq_p (arg1, -1))
12181 return fold_convert_loc (loc, type, negate_expr (arg0));
12182
12183 /* Convert -A / -B to A / B when the type is signed and overflow is
12184 undefined. */
12185 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12186 && TREE_CODE (arg0) == NEGATE_EXPR
12187 && negate_expr_p (arg1))
12188 {
12189 if (INTEGRAL_TYPE_P (type))
12190 fold_overflow_warning (("assuming signed overflow does not occur "
12191 "when distributing negation across "
12192 "division"),
12193 WARN_STRICT_OVERFLOW_MISC);
12194 return fold_build2_loc (loc, code, type,
12195 fold_convert_loc (loc, type,
12196 TREE_OPERAND (arg0, 0)),
12197 fold_convert_loc (loc, type,
12198 negate_expr (arg1)));
12199 }
12200 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12201 && TREE_CODE (arg1) == NEGATE_EXPR
12202 && negate_expr_p (arg0))
12203 {
12204 if (INTEGRAL_TYPE_P (type))
12205 fold_overflow_warning (("assuming signed overflow does not occur "
12206 "when distributing negation across "
12207 "division"),
12208 WARN_STRICT_OVERFLOW_MISC);
12209 return fold_build2_loc (loc, code, type,
12210 fold_convert_loc (loc, type,
12211 negate_expr (arg0)),
12212 fold_convert_loc (loc, type,
12213 TREE_OPERAND (arg1, 0)));
12214 }
12215
12216 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12217 operation, EXACT_DIV_EXPR.
12218
12219 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12220 At one time others generated faster code, it's not clear if they do
12221 after the last round to changes to the DIV code in expmed.c. */
12222 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12223 && multiple_of_p (type, arg0, arg1))
12224 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12225
12226 strict_overflow_p = false;
12227 if (TREE_CODE (arg1) == INTEGER_CST
12228 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12229 &strict_overflow_p)))
12230 {
12231 if (strict_overflow_p)
12232 fold_overflow_warning (("assuming signed overflow does not occur "
12233 "when simplifying division"),
12234 WARN_STRICT_OVERFLOW_MISC);
12235 return fold_convert_loc (loc, type, tem);
12236 }
12237
12238 return NULL_TREE;
12239
12240 case CEIL_MOD_EXPR:
12241 case FLOOR_MOD_EXPR:
12242 case ROUND_MOD_EXPR:
12243 case TRUNC_MOD_EXPR:
12244 /* X % -1 is zero. */
12245 if (!TYPE_UNSIGNED (type)
12246 && TREE_CODE (arg1) == INTEGER_CST
12247 && wi::eq_p (arg1, -1))
12248 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12249
12250 /* X % -C is the same as X % C. */
12251 if (code == TRUNC_MOD_EXPR
12252 && TYPE_SIGN (type) == SIGNED
12253 && TREE_CODE (arg1) == INTEGER_CST
12254 && !TREE_OVERFLOW (arg1)
12255 && wi::neg_p (arg1)
12256 && !TYPE_OVERFLOW_TRAPS (type)
12257 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12258 && !sign_bit_p (arg1, arg1))
12259 return fold_build2_loc (loc, code, type,
12260 fold_convert_loc (loc, type, arg0),
12261 fold_convert_loc (loc, type,
12262 negate_expr (arg1)));
12263
12264 /* X % -Y is the same as X % Y. */
12265 if (code == TRUNC_MOD_EXPR
12266 && !TYPE_UNSIGNED (type)
12267 && TREE_CODE (arg1) == NEGATE_EXPR
12268 && !TYPE_OVERFLOW_TRAPS (type))
12269 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12270 fold_convert_loc (loc, type,
12271 TREE_OPERAND (arg1, 0)));
12272
12273 strict_overflow_p = false;
12274 if (TREE_CODE (arg1) == INTEGER_CST
12275 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12276 &strict_overflow_p)))
12277 {
12278 if (strict_overflow_p)
12279 fold_overflow_warning (("assuming signed overflow does not occur "
12280 "when simplifying modulus"),
12281 WARN_STRICT_OVERFLOW_MISC);
12282 return fold_convert_loc (loc, type, tem);
12283 }
12284
12285 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12286 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12287 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12288 && (TYPE_UNSIGNED (type)
12289 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12290 {
12291 tree c = arg1;
12292 /* Also optimize A % (C << N) where C is a power of 2,
12293 to A & ((C << N) - 1). */
12294 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12295 c = TREE_OPERAND (arg1, 0);
12296
12297 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12298 {
12299 tree mask
12300 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12301 build_int_cst (TREE_TYPE (arg1), 1));
12302 if (strict_overflow_p)
12303 fold_overflow_warning (("assuming signed overflow does not "
12304 "occur when simplifying "
12305 "X % (power of two)"),
12306 WARN_STRICT_OVERFLOW_MISC);
12307 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12308 fold_convert_loc (loc, type, arg0),
12309 fold_convert_loc (loc, type, mask));
12310 }
12311 }
12312
12313 return NULL_TREE;
12314
12315 case LROTATE_EXPR:
12316 case RROTATE_EXPR:
12317 if (integer_all_onesp (arg0))
12318 return omit_one_operand_loc (loc, type, arg0, arg1);
12319 goto shift;
12320
12321 case RSHIFT_EXPR:
12322 /* Optimize -1 >> x for arithmetic right shifts. */
12323 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12324 && tree_expr_nonnegative_p (arg1))
12325 return omit_one_operand_loc (loc, type, arg0, arg1);
12326 /* ... fall through ... */
12327
12328 case LSHIFT_EXPR:
12329 shift:
12330 if (integer_zerop (arg1))
12331 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12332 if (integer_zerop (arg0))
12333 return omit_one_operand_loc (loc, type, arg0, arg1);
12334
12335 /* Prefer vector1 << scalar to vector1 << vector2
12336 if vector2 is uniform. */
12337 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12338 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12339 return fold_build2_loc (loc, code, type, op0, tem);
12340
12341 /* Since negative shift count is not well-defined,
12342 don't try to compute it in the compiler. */
12343 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12344 return NULL_TREE;
12345
12346 prec = element_precision (type);
12347
12348 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12349 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12350 && tree_to_uhwi (arg1) < prec
12351 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12352 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12353 {
12354 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12355 + tree_to_uhwi (arg1));
12356
12357 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12358 being well defined. */
12359 if (low >= prec)
12360 {
12361 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12362 low = low % prec;
12363 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12364 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12365 TREE_OPERAND (arg0, 0));
12366 else
12367 low = prec - 1;
12368 }
12369
12370 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12371 build_int_cst (TREE_TYPE (arg1), low));
12372 }
12373
12374 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12375 into x & ((unsigned)-1 >> c) for unsigned types. */
12376 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12377 || (TYPE_UNSIGNED (type)
12378 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12379 && tree_fits_uhwi_p (arg1)
12380 && tree_to_uhwi (arg1) < prec
12381 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12382 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12383 {
12384 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12385 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12386 tree lshift;
12387 tree arg00;
12388
12389 if (low0 == low1)
12390 {
12391 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12392
12393 lshift = build_minus_one_cst (type);
12394 lshift = const_binop (code, lshift, arg1);
12395
12396 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12397 }
12398 }
12399
12400 /* Rewrite an LROTATE_EXPR by a constant into an
12401 RROTATE_EXPR by a new constant. */
12402 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12403 {
12404 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12405 tem = const_binop (MINUS_EXPR, tem, arg1);
12406 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12407 }
12408
12409 /* If we have a rotate of a bit operation with the rotate count and
12410 the second operand of the bit operation both constant,
12411 permute the two operations. */
12412 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12413 && (TREE_CODE (arg0) == BIT_AND_EXPR
12414 || TREE_CODE (arg0) == BIT_IOR_EXPR
12415 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12417 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12418 fold_build2_loc (loc, code, type,
12419 TREE_OPERAND (arg0, 0), arg1),
12420 fold_build2_loc (loc, code, type,
12421 TREE_OPERAND (arg0, 1), arg1));
12422
12423 /* Two consecutive rotates adding up to the some integer
12424 multiple of the precision of the type can be ignored. */
12425 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12426 && TREE_CODE (arg0) == RROTATE_EXPR
12427 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12428 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12429 prec) == 0)
12430 return TREE_OPERAND (arg0, 0);
12431
12432 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12433 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12434 if the latter can be further optimized. */
12435 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12436 && TREE_CODE (arg0) == BIT_AND_EXPR
12437 && TREE_CODE (arg1) == INTEGER_CST
12438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12439 {
12440 tree mask = fold_build2_loc (loc, code, type,
12441 fold_convert_loc (loc, type,
12442 TREE_OPERAND (arg0, 1)),
12443 arg1);
12444 tree shift = fold_build2_loc (loc, code, type,
12445 fold_convert_loc (loc, type,
12446 TREE_OPERAND (arg0, 0)),
12447 arg1);
12448 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12449 if (tem)
12450 return tem;
12451 }
12452
12453 return NULL_TREE;
12454
12455 case MIN_EXPR:
12456 if (operand_equal_p (arg0, arg1, 0))
12457 return omit_one_operand_loc (loc, type, arg0, arg1);
12458 if (INTEGRAL_TYPE_P (type)
12459 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12460 return omit_one_operand_loc (loc, type, arg1, arg0);
12461 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12462 if (tem)
12463 return tem;
12464 goto associate;
12465
12466 case MAX_EXPR:
12467 if (operand_equal_p (arg0, arg1, 0))
12468 return omit_one_operand_loc (loc, type, arg0, arg1);
12469 if (INTEGRAL_TYPE_P (type)
12470 && TYPE_MAX_VALUE (type)
12471 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12472 return omit_one_operand_loc (loc, type, arg1, arg0);
12473 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12474 if (tem)
12475 return tem;
12476 goto associate;
12477
12478 case TRUTH_ANDIF_EXPR:
12479 /* Note that the operands of this must be ints
12480 and their values must be 0 or 1.
12481 ("true" is a fixed value perhaps depending on the language.) */
12482 /* If first arg is constant zero, return it. */
12483 if (integer_zerop (arg0))
12484 return fold_convert_loc (loc, type, arg0);
12485 case TRUTH_AND_EXPR:
12486 /* If either arg is constant true, drop it. */
12487 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12488 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12489 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12490 /* Preserve sequence points. */
12491 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12492 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12493 /* If second arg is constant zero, result is zero, but first arg
12494 must be evaluated. */
12495 if (integer_zerop (arg1))
12496 return omit_one_operand_loc (loc, type, arg1, arg0);
12497 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12498 case will be handled here. */
12499 if (integer_zerop (arg0))
12500 return omit_one_operand_loc (loc, type, arg0, arg1);
12501
12502 /* !X && X is always false. */
12503 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12504 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12505 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12506 /* X && !X is always false. */
12507 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12508 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12509 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12510
12511 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12512 means A >= Y && A != MAX, but in this case we know that
12513 A < X <= MAX. */
12514
12515 if (!TREE_SIDE_EFFECTS (arg0)
12516 && !TREE_SIDE_EFFECTS (arg1))
12517 {
12518 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12519 if (tem && !operand_equal_p (tem, arg0, 0))
12520 return fold_build2_loc (loc, code, type, tem, arg1);
12521
12522 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12523 if (tem && !operand_equal_p (tem, arg1, 0))
12524 return fold_build2_loc (loc, code, type, arg0, tem);
12525 }
12526
12527 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12528 != NULL_TREE)
12529 return tem;
12530
12531 return NULL_TREE;
12532
12533 case TRUTH_ORIF_EXPR:
12534 /* Note that the operands of this must be ints
12535 and their values must be 0 or true.
12536 ("true" is a fixed value perhaps depending on the language.) */
12537 /* If first arg is constant true, return it. */
12538 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12539 return fold_convert_loc (loc, type, arg0);
12540 case TRUTH_OR_EXPR:
12541 /* If either arg is constant zero, drop it. */
12542 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12543 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12544 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12545 /* Preserve sequence points. */
12546 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12547 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12548 /* If second arg is constant true, result is true, but we must
12549 evaluate first arg. */
12550 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12551 return omit_one_operand_loc (loc, type, arg1, arg0);
12552 /* Likewise for first arg, but note this only occurs here for
12553 TRUTH_OR_EXPR. */
12554 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12555 return omit_one_operand_loc (loc, type, arg0, arg1);
12556
12557 /* !X || X is always true. */
12558 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12559 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12560 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12561 /* X || !X is always true. */
12562 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12563 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12564 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12565
12566 /* (X && !Y) || (!X && Y) is X ^ Y */
12567 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12568 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12569 {
12570 tree a0, a1, l0, l1, n0, n1;
12571
12572 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12573 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12574
12575 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12576 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12577
12578 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12579 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12580
12581 if ((operand_equal_p (n0, a0, 0)
12582 && operand_equal_p (n1, a1, 0))
12583 || (operand_equal_p (n0, a1, 0)
12584 && operand_equal_p (n1, a0, 0)))
12585 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12586 }
12587
12588 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12589 != NULL_TREE)
12590 return tem;
12591
12592 return NULL_TREE;
12593
12594 case TRUTH_XOR_EXPR:
12595 /* If the second arg is constant zero, drop it. */
12596 if (integer_zerop (arg1))
12597 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12598 /* If the second arg is constant true, this is a logical inversion. */
12599 if (integer_onep (arg1))
12600 {
12601 tem = invert_truthvalue_loc (loc, arg0);
12602 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12603 }
12604 /* Identical arguments cancel to zero. */
12605 if (operand_equal_p (arg0, arg1, 0))
12606 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12607
12608 /* !X ^ X is always true. */
12609 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12610 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12611 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12612
12613 /* X ^ !X is always true. */
12614 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12615 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12616 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12617
12618 return NULL_TREE;
12619
12620 case EQ_EXPR:
12621 case NE_EXPR:
12622 STRIP_NOPS (arg0);
12623 STRIP_NOPS (arg1);
12624
12625 tem = fold_comparison (loc, code, type, op0, op1);
12626 if (tem != NULL_TREE)
12627 return tem;
12628
12629 /* bool_var != 0 becomes bool_var. */
12630 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12631 && code == NE_EXPR)
12632 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12633
12634 /* bool_var == 1 becomes bool_var. */
12635 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12636 && code == EQ_EXPR)
12637 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12638
12639 /* bool_var != 1 becomes !bool_var. */
12640 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12641 && code == NE_EXPR)
12642 return fold_convert_loc (loc, type,
12643 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12644 TREE_TYPE (arg0), arg0));
12645
12646 /* bool_var == 0 becomes !bool_var. */
12647 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12648 && code == EQ_EXPR)
12649 return fold_convert_loc (loc, type,
12650 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12651 TREE_TYPE (arg0), arg0));
12652
12653 /* !exp != 0 becomes !exp */
12654 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12655 && code == NE_EXPR)
12656 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12657
12658 /* If this is an equality comparison of the address of two non-weak,
12659 unaliased symbols neither of which are extern (since we do not
12660 have access to attributes for externs), then we know the result. */
12661 if (TREE_CODE (arg0) == ADDR_EXPR
12662 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12663 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12664 && ! lookup_attribute ("alias",
12665 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12666 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12667 && TREE_CODE (arg1) == ADDR_EXPR
12668 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12669 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12670 && ! lookup_attribute ("alias",
12671 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12672 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12673 {
12674 /* We know that we're looking at the address of two
12675 non-weak, unaliased, static _DECL nodes.
12676
12677 It is both wasteful and incorrect to call operand_equal_p
12678 to compare the two ADDR_EXPR nodes. It is wasteful in that
12679 all we need to do is test pointer equality for the arguments
12680 to the two ADDR_EXPR nodes. It is incorrect to use
12681 operand_equal_p as that function is NOT equivalent to a
12682 C equality test. It can in fact return false for two
12683 objects which would test as equal using the C equality
12684 operator. */
12685 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12686 return constant_boolean_node (equal
12687 ? code == EQ_EXPR : code != EQ_EXPR,
12688 type);
12689 }
12690
12691 /* Similarly for a NEGATE_EXPR. */
12692 if (TREE_CODE (arg0) == NEGATE_EXPR
12693 && TREE_CODE (arg1) == INTEGER_CST
12694 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12695 arg1)))
12696 && TREE_CODE (tem) == INTEGER_CST
12697 && !TREE_OVERFLOW (tem))
12698 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12699
12700 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12701 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12702 && TREE_CODE (arg1) == INTEGER_CST
12703 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12704 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12705 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12706 fold_convert_loc (loc,
12707 TREE_TYPE (arg0),
12708 arg1),
12709 TREE_OPERAND (arg0, 1)));
12710
12711 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12712 if ((TREE_CODE (arg0) == PLUS_EXPR
12713 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12714 || TREE_CODE (arg0) == MINUS_EXPR)
12715 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12716 0)),
12717 arg1, 0)
12718 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12719 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12720 {
12721 tree val = TREE_OPERAND (arg0, 1);
12722 return omit_two_operands_loc (loc, type,
12723 fold_build2_loc (loc, code, type,
12724 val,
12725 build_int_cst (TREE_TYPE (val),
12726 0)),
12727 TREE_OPERAND (arg0, 0), arg1);
12728 }
12729
12730 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12731 if (TREE_CODE (arg0) == MINUS_EXPR
12732 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12733 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12734 1)),
12735 arg1, 0)
12736 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12737 {
12738 return omit_two_operands_loc (loc, type,
12739 code == NE_EXPR
12740 ? boolean_true_node : boolean_false_node,
12741 TREE_OPERAND (arg0, 1), arg1);
12742 }
12743
12744 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12745 if (TREE_CODE (arg0) == ABS_EXPR
12746 && (integer_zerop (arg1) || real_zerop (arg1)))
12747 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12748
12749 /* If this is an EQ or NE comparison with zero and ARG0 is
12750 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12751 two operations, but the latter can be done in one less insn
12752 on machines that have only two-operand insns or on which a
12753 constant cannot be the first operand. */
12754 if (TREE_CODE (arg0) == BIT_AND_EXPR
12755 && integer_zerop (arg1))
12756 {
12757 tree arg00 = TREE_OPERAND (arg0, 0);
12758 tree arg01 = TREE_OPERAND (arg0, 1);
12759 if (TREE_CODE (arg00) == LSHIFT_EXPR
12760 && integer_onep (TREE_OPERAND (arg00, 0)))
12761 {
12762 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12763 arg01, TREE_OPERAND (arg00, 1));
12764 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12765 build_int_cst (TREE_TYPE (arg0), 1));
12766 return fold_build2_loc (loc, code, type,
12767 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12768 arg1);
12769 }
12770 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12771 && integer_onep (TREE_OPERAND (arg01, 0)))
12772 {
12773 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12774 arg00, TREE_OPERAND (arg01, 1));
12775 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12776 build_int_cst (TREE_TYPE (arg0), 1));
12777 return fold_build2_loc (loc, code, type,
12778 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12779 arg1);
12780 }
12781 }
12782
12783 /* If this is an NE or EQ comparison of zero against the result of a
12784 signed MOD operation whose second operand is a power of 2, make
12785 the MOD operation unsigned since it is simpler and equivalent. */
12786 if (integer_zerop (arg1)
12787 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12788 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12789 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12790 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12791 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12792 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12793 {
12794 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12795 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12796 fold_convert_loc (loc, newtype,
12797 TREE_OPERAND (arg0, 0)),
12798 fold_convert_loc (loc, newtype,
12799 TREE_OPERAND (arg0, 1)));
12800
12801 return fold_build2_loc (loc, code, type, newmod,
12802 fold_convert_loc (loc, newtype, arg1));
12803 }
12804
12805 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12806 C1 is a valid shift constant, and C2 is a power of two, i.e.
12807 a single bit. */
12808 if (TREE_CODE (arg0) == BIT_AND_EXPR
12809 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12810 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12811 == INTEGER_CST
12812 && integer_pow2p (TREE_OPERAND (arg0, 1))
12813 && integer_zerop (arg1))
12814 {
12815 tree itype = TREE_TYPE (arg0);
12816 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12817 prec = TYPE_PRECISION (itype);
12818
12819 /* Check for a valid shift count. */
12820 if (wi::ltu_p (arg001, prec))
12821 {
12822 tree arg01 = TREE_OPERAND (arg0, 1);
12823 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12824 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12825 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12826 can be rewritten as (X & (C2 << C1)) != 0. */
12827 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12828 {
12829 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12830 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12831 return fold_build2_loc (loc, code, type, tem,
12832 fold_convert_loc (loc, itype, arg1));
12833 }
12834 /* Otherwise, for signed (arithmetic) shifts,
12835 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12836 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12837 else if (!TYPE_UNSIGNED (itype))
12838 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12839 arg000, build_int_cst (itype, 0));
12840 /* Otherwise, of unsigned (logical) shifts,
12841 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12842 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12843 else
12844 return omit_one_operand_loc (loc, type,
12845 code == EQ_EXPR ? integer_one_node
12846 : integer_zero_node,
12847 arg000);
12848 }
12849 }
12850
12851 /* If we have (A & C) == C where C is a power of 2, convert this into
12852 (A & C) != 0. Similarly for NE_EXPR. */
12853 if (TREE_CODE (arg0) == BIT_AND_EXPR
12854 && integer_pow2p (TREE_OPERAND (arg0, 1))
12855 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12856 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12857 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12858 integer_zero_node));
12859
12860 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12861 bit, then fold the expression into A < 0 or A >= 0. */
12862 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12863 if (tem)
12864 return tem;
12865
12866 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12867 Similarly for NE_EXPR. */
12868 if (TREE_CODE (arg0) == BIT_AND_EXPR
12869 && TREE_CODE (arg1) == INTEGER_CST
12870 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12871 {
12872 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12873 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12874 TREE_OPERAND (arg0, 1));
12875 tree dandnotc
12876 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12877 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12878 notc);
12879 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12880 if (integer_nonzerop (dandnotc))
12881 return omit_one_operand_loc (loc, type, rslt, arg0);
12882 }
12883
12884 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12885 Similarly for NE_EXPR. */
12886 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12887 && TREE_CODE (arg1) == INTEGER_CST
12888 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12889 {
12890 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12891 tree candnotd
12892 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12893 TREE_OPERAND (arg0, 1),
12894 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12895 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12896 if (integer_nonzerop (candnotd))
12897 return omit_one_operand_loc (loc, type, rslt, arg0);
12898 }
12899
12900 /* If this is a comparison of a field, we may be able to simplify it. */
12901 if ((TREE_CODE (arg0) == COMPONENT_REF
12902 || TREE_CODE (arg0) == BIT_FIELD_REF)
12903 /* Handle the constant case even without -O
12904 to make sure the warnings are given. */
12905 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12906 {
12907 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12908 if (t1)
12909 return t1;
12910 }
12911
12912 /* Optimize comparisons of strlen vs zero to a compare of the
12913 first character of the string vs zero. To wit,
12914 strlen(ptr) == 0 => *ptr == 0
12915 strlen(ptr) != 0 => *ptr != 0
12916 Other cases should reduce to one of these two (or a constant)
12917 due to the return value of strlen being unsigned. */
12918 if (TREE_CODE (arg0) == CALL_EXPR
12919 && integer_zerop (arg1))
12920 {
12921 tree fndecl = get_callee_fndecl (arg0);
12922
12923 if (fndecl
12924 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12925 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12926 && call_expr_nargs (arg0) == 1
12927 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12928 {
12929 tree iref = build_fold_indirect_ref_loc (loc,
12930 CALL_EXPR_ARG (arg0, 0));
12931 return fold_build2_loc (loc, code, type, iref,
12932 build_int_cst (TREE_TYPE (iref), 0));
12933 }
12934 }
12935
12936 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12937 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12938 if (TREE_CODE (arg0) == RSHIFT_EXPR
12939 && integer_zerop (arg1)
12940 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12941 {
12942 tree arg00 = TREE_OPERAND (arg0, 0);
12943 tree arg01 = TREE_OPERAND (arg0, 1);
12944 tree itype = TREE_TYPE (arg00);
12945 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12946 {
12947 if (TYPE_UNSIGNED (itype))
12948 {
12949 itype = signed_type_for (itype);
12950 arg00 = fold_convert_loc (loc, itype, arg00);
12951 }
12952 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12953 type, arg00, build_zero_cst (itype));
12954 }
12955 }
12956
12957 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12958 if (integer_zerop (arg1)
12959 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12960 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12961 TREE_OPERAND (arg0, 1));
12962
12963 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12964 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12965 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12966 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12967 build_zero_cst (TREE_TYPE (arg0)));
12968 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12969 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12970 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12971 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12972 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12973 build_zero_cst (TREE_TYPE (arg0)));
12974
12975 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12976 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12977 && TREE_CODE (arg1) == INTEGER_CST
12978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12979 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12980 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12981 TREE_OPERAND (arg0, 1), arg1));
12982
12983 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12984 (X & C) == 0 when C is a single bit. */
12985 if (TREE_CODE (arg0) == BIT_AND_EXPR
12986 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12987 && integer_zerop (arg1)
12988 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12989 {
12990 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12991 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12992 TREE_OPERAND (arg0, 1));
12993 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12994 type, tem,
12995 fold_convert_loc (loc, TREE_TYPE (arg0),
12996 arg1));
12997 }
12998
12999 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13000 constant C is a power of two, i.e. a single bit. */
13001 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13002 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13003 && integer_zerop (arg1)
13004 && integer_pow2p (TREE_OPERAND (arg0, 1))
13005 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13006 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13007 {
13008 tree arg00 = TREE_OPERAND (arg0, 0);
13009 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13010 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13011 }
13012
13013 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13014 when is C is a power of two, i.e. a single bit. */
13015 if (TREE_CODE (arg0) == BIT_AND_EXPR
13016 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13017 && integer_zerop (arg1)
13018 && integer_pow2p (TREE_OPERAND (arg0, 1))
13019 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13020 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13021 {
13022 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13023 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13024 arg000, TREE_OPERAND (arg0, 1));
13025 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13026 tem, build_int_cst (TREE_TYPE (tem), 0));
13027 }
13028
13029 if (integer_zerop (arg1)
13030 && tree_expr_nonzero_p (arg0))
13031 {
13032 tree res = constant_boolean_node (code==NE_EXPR, type);
13033 return omit_one_operand_loc (loc, type, res, arg0);
13034 }
13035
13036 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13037 if (TREE_CODE (arg0) == NEGATE_EXPR
13038 && TREE_CODE (arg1) == NEGATE_EXPR)
13039 return fold_build2_loc (loc, code, type,
13040 TREE_OPERAND (arg0, 0),
13041 fold_convert_loc (loc, TREE_TYPE (arg0),
13042 TREE_OPERAND (arg1, 0)));
13043
13044 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13045 if (TREE_CODE (arg0) == BIT_AND_EXPR
13046 && TREE_CODE (arg1) == BIT_AND_EXPR)
13047 {
13048 tree arg00 = TREE_OPERAND (arg0, 0);
13049 tree arg01 = TREE_OPERAND (arg0, 1);
13050 tree arg10 = TREE_OPERAND (arg1, 0);
13051 tree arg11 = TREE_OPERAND (arg1, 1);
13052 tree itype = TREE_TYPE (arg0);
13053
13054 if (operand_equal_p (arg01, arg11, 0))
13055 return fold_build2_loc (loc, code, type,
13056 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13057 fold_build2_loc (loc,
13058 BIT_XOR_EXPR, itype,
13059 arg00, arg10),
13060 arg01),
13061 build_zero_cst (itype));
13062
13063 if (operand_equal_p (arg01, arg10, 0))
13064 return fold_build2_loc (loc, code, type,
13065 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13066 fold_build2_loc (loc,
13067 BIT_XOR_EXPR, itype,
13068 arg00, arg11),
13069 arg01),
13070 build_zero_cst (itype));
13071
13072 if (operand_equal_p (arg00, arg11, 0))
13073 return fold_build2_loc (loc, code, type,
13074 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13075 fold_build2_loc (loc,
13076 BIT_XOR_EXPR, itype,
13077 arg01, arg10),
13078 arg00),
13079 build_zero_cst (itype));
13080
13081 if (operand_equal_p (arg00, arg10, 0))
13082 return fold_build2_loc (loc, code, type,
13083 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13084 fold_build2_loc (loc,
13085 BIT_XOR_EXPR, itype,
13086 arg01, arg11),
13087 arg00),
13088 build_zero_cst (itype));
13089 }
13090
13091 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13092 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13093 {
13094 tree arg00 = TREE_OPERAND (arg0, 0);
13095 tree arg01 = TREE_OPERAND (arg0, 1);
13096 tree arg10 = TREE_OPERAND (arg1, 0);
13097 tree arg11 = TREE_OPERAND (arg1, 1);
13098 tree itype = TREE_TYPE (arg0);
13099
13100 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13101 operand_equal_p guarantees no side-effects so we don't need
13102 to use omit_one_operand on Z. */
13103 if (operand_equal_p (arg01, arg11, 0))
13104 return fold_build2_loc (loc, code, type, arg00,
13105 fold_convert_loc (loc, TREE_TYPE (arg00),
13106 arg10));
13107 if (operand_equal_p (arg01, arg10, 0))
13108 return fold_build2_loc (loc, code, type, arg00,
13109 fold_convert_loc (loc, TREE_TYPE (arg00),
13110 arg11));
13111 if (operand_equal_p (arg00, arg11, 0))
13112 return fold_build2_loc (loc, code, type, arg01,
13113 fold_convert_loc (loc, TREE_TYPE (arg01),
13114 arg10));
13115 if (operand_equal_p (arg00, arg10, 0))
13116 return fold_build2_loc (loc, code, type, arg01,
13117 fold_convert_loc (loc, TREE_TYPE (arg01),
13118 arg11));
13119
13120 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13121 if (TREE_CODE (arg01) == INTEGER_CST
13122 && TREE_CODE (arg11) == INTEGER_CST)
13123 {
13124 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13125 fold_convert_loc (loc, itype, arg11));
13126 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13127 return fold_build2_loc (loc, code, type, tem,
13128 fold_convert_loc (loc, itype, arg10));
13129 }
13130 }
13131
13132 /* Attempt to simplify equality/inequality comparisons of complex
13133 values. Only lower the comparison if the result is known or
13134 can be simplified to a single scalar comparison. */
13135 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13136 || TREE_CODE (arg0) == COMPLEX_CST)
13137 && (TREE_CODE (arg1) == COMPLEX_EXPR
13138 || TREE_CODE (arg1) == COMPLEX_CST))
13139 {
13140 tree real0, imag0, real1, imag1;
13141 tree rcond, icond;
13142
13143 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13144 {
13145 real0 = TREE_OPERAND (arg0, 0);
13146 imag0 = TREE_OPERAND (arg0, 1);
13147 }
13148 else
13149 {
13150 real0 = TREE_REALPART (arg0);
13151 imag0 = TREE_IMAGPART (arg0);
13152 }
13153
13154 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13155 {
13156 real1 = TREE_OPERAND (arg1, 0);
13157 imag1 = TREE_OPERAND (arg1, 1);
13158 }
13159 else
13160 {
13161 real1 = TREE_REALPART (arg1);
13162 imag1 = TREE_IMAGPART (arg1);
13163 }
13164
13165 rcond = fold_binary_loc (loc, code, type, real0, real1);
13166 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13167 {
13168 if (integer_zerop (rcond))
13169 {
13170 if (code == EQ_EXPR)
13171 return omit_two_operands_loc (loc, type, boolean_false_node,
13172 imag0, imag1);
13173 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13174 }
13175 else
13176 {
13177 if (code == NE_EXPR)
13178 return omit_two_operands_loc (loc, type, boolean_true_node,
13179 imag0, imag1);
13180 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13181 }
13182 }
13183
13184 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13185 if (icond && TREE_CODE (icond) == INTEGER_CST)
13186 {
13187 if (integer_zerop (icond))
13188 {
13189 if (code == EQ_EXPR)
13190 return omit_two_operands_loc (loc, type, boolean_false_node,
13191 real0, real1);
13192 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13193 }
13194 else
13195 {
13196 if (code == NE_EXPR)
13197 return omit_two_operands_loc (loc, type, boolean_true_node,
13198 real0, real1);
13199 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13200 }
13201 }
13202 }
13203
13204 return NULL_TREE;
13205
13206 case LT_EXPR:
13207 case GT_EXPR:
13208 case LE_EXPR:
13209 case GE_EXPR:
13210 tem = fold_comparison (loc, code, type, op0, op1);
13211 if (tem != NULL_TREE)
13212 return tem;
13213
13214 /* Transform comparisons of the form X +- C CMP X. */
13215 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13216 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13217 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13218 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13219 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13220 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13221 {
13222 tree arg01 = TREE_OPERAND (arg0, 1);
13223 enum tree_code code0 = TREE_CODE (arg0);
13224 int is_positive;
13225
13226 if (TREE_CODE (arg01) == REAL_CST)
13227 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13228 else
13229 is_positive = tree_int_cst_sgn (arg01);
13230
13231 /* (X - c) > X becomes false. */
13232 if (code == GT_EXPR
13233 && ((code0 == MINUS_EXPR && is_positive >= 0)
13234 || (code0 == PLUS_EXPR && is_positive <= 0)))
13235 {
13236 if (TREE_CODE (arg01) == INTEGER_CST
13237 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13238 fold_overflow_warning (("assuming signed overflow does not "
13239 "occur when assuming that (X - c) > X "
13240 "is always false"),
13241 WARN_STRICT_OVERFLOW_ALL);
13242 return constant_boolean_node (0, type);
13243 }
13244
13245 /* Likewise (X + c) < X becomes false. */
13246 if (code == LT_EXPR
13247 && ((code0 == PLUS_EXPR && is_positive >= 0)
13248 || (code0 == MINUS_EXPR && is_positive <= 0)))
13249 {
13250 if (TREE_CODE (arg01) == INTEGER_CST
13251 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13252 fold_overflow_warning (("assuming signed overflow does not "
13253 "occur when assuming that "
13254 "(X + c) < X is always false"),
13255 WARN_STRICT_OVERFLOW_ALL);
13256 return constant_boolean_node (0, type);
13257 }
13258
13259 /* Convert (X - c) <= X to true. */
13260 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13261 && code == LE_EXPR
13262 && ((code0 == MINUS_EXPR && is_positive >= 0)
13263 || (code0 == PLUS_EXPR && is_positive <= 0)))
13264 {
13265 if (TREE_CODE (arg01) == INTEGER_CST
13266 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13267 fold_overflow_warning (("assuming signed overflow does not "
13268 "occur when assuming that "
13269 "(X - c) <= X is always true"),
13270 WARN_STRICT_OVERFLOW_ALL);
13271 return constant_boolean_node (1, type);
13272 }
13273
13274 /* Convert (X + c) >= X to true. */
13275 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13276 && code == GE_EXPR
13277 && ((code0 == PLUS_EXPR && is_positive >= 0)
13278 || (code0 == MINUS_EXPR && is_positive <= 0)))
13279 {
13280 if (TREE_CODE (arg01) == INTEGER_CST
13281 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13282 fold_overflow_warning (("assuming signed overflow does not "
13283 "occur when assuming that "
13284 "(X + c) >= X is always true"),
13285 WARN_STRICT_OVERFLOW_ALL);
13286 return constant_boolean_node (1, type);
13287 }
13288
13289 if (TREE_CODE (arg01) == INTEGER_CST)
13290 {
13291 /* Convert X + c > X and X - c < X to true for integers. */
13292 if (code == GT_EXPR
13293 && ((code0 == PLUS_EXPR && is_positive > 0)
13294 || (code0 == MINUS_EXPR && is_positive < 0)))
13295 {
13296 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13297 fold_overflow_warning (("assuming signed overflow does "
13298 "not occur when assuming that "
13299 "(X + c) > X is always true"),
13300 WARN_STRICT_OVERFLOW_ALL);
13301 return constant_boolean_node (1, type);
13302 }
13303
13304 if (code == LT_EXPR
13305 && ((code0 == MINUS_EXPR && is_positive > 0)
13306 || (code0 == PLUS_EXPR && is_positive < 0)))
13307 {
13308 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13309 fold_overflow_warning (("assuming signed overflow does "
13310 "not occur when assuming that "
13311 "(X - c) < X is always true"),
13312 WARN_STRICT_OVERFLOW_ALL);
13313 return constant_boolean_node (1, type);
13314 }
13315
13316 /* Convert X + c <= X and X - c >= X to false for integers. */
13317 if (code == LE_EXPR
13318 && ((code0 == PLUS_EXPR && is_positive > 0)
13319 || (code0 == MINUS_EXPR && is_positive < 0)))
13320 {
13321 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13322 fold_overflow_warning (("assuming signed overflow does "
13323 "not occur when assuming that "
13324 "(X + c) <= X is always false"),
13325 WARN_STRICT_OVERFLOW_ALL);
13326 return constant_boolean_node (0, type);
13327 }
13328
13329 if (code == GE_EXPR
13330 && ((code0 == MINUS_EXPR && is_positive > 0)
13331 || (code0 == PLUS_EXPR && is_positive < 0)))
13332 {
13333 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13334 fold_overflow_warning (("assuming signed overflow does "
13335 "not occur when assuming that "
13336 "(X - c) >= X is always false"),
13337 WARN_STRICT_OVERFLOW_ALL);
13338 return constant_boolean_node (0, type);
13339 }
13340 }
13341 }
13342
13343 /* Comparisons with the highest or lowest possible integer of
13344 the specified precision will have known values. */
13345 {
13346 tree arg1_type = TREE_TYPE (arg1);
13347 unsigned int prec = TYPE_PRECISION (arg1_type);
13348
13349 if (TREE_CODE (arg1) == INTEGER_CST
13350 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13351 {
13352 wide_int max = wi::max_value (arg1_type);
13353 wide_int signed_max = wi::max_value (prec, SIGNED);
13354 wide_int min = wi::min_value (arg1_type);
13355
13356 if (wi::eq_p (arg1, max))
13357 switch (code)
13358 {
13359 case GT_EXPR:
13360 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13361
13362 case GE_EXPR:
13363 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13364
13365 case LE_EXPR:
13366 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13367
13368 case LT_EXPR:
13369 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13370
13371 /* The GE_EXPR and LT_EXPR cases above are not normally
13372 reached because of previous transformations. */
13373
13374 default:
13375 break;
13376 }
13377 else if (wi::eq_p (arg1, max - 1))
13378 switch (code)
13379 {
13380 case GT_EXPR:
13381 arg1 = const_binop (PLUS_EXPR, arg1,
13382 build_int_cst (TREE_TYPE (arg1), 1));
13383 return fold_build2_loc (loc, EQ_EXPR, type,
13384 fold_convert_loc (loc,
13385 TREE_TYPE (arg1), arg0),
13386 arg1);
13387 case LE_EXPR:
13388 arg1 = const_binop (PLUS_EXPR, arg1,
13389 build_int_cst (TREE_TYPE (arg1), 1));
13390 return fold_build2_loc (loc, NE_EXPR, type,
13391 fold_convert_loc (loc, TREE_TYPE (arg1),
13392 arg0),
13393 arg1);
13394 default:
13395 break;
13396 }
13397 else if (wi::eq_p (arg1, min))
13398 switch (code)
13399 {
13400 case LT_EXPR:
13401 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13402
13403 case LE_EXPR:
13404 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13405
13406 case GE_EXPR:
13407 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13408
13409 case GT_EXPR:
13410 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13411
13412 default:
13413 break;
13414 }
13415 else if (wi::eq_p (arg1, min + 1))
13416 switch (code)
13417 {
13418 case GE_EXPR:
13419 arg1 = const_binop (MINUS_EXPR, arg1,
13420 build_int_cst (TREE_TYPE (arg1), 1));
13421 return fold_build2_loc (loc, NE_EXPR, type,
13422 fold_convert_loc (loc,
13423 TREE_TYPE (arg1), arg0),
13424 arg1);
13425 case LT_EXPR:
13426 arg1 = const_binop (MINUS_EXPR, arg1,
13427 build_int_cst (TREE_TYPE (arg1), 1));
13428 return fold_build2_loc (loc, EQ_EXPR, type,
13429 fold_convert_loc (loc, TREE_TYPE (arg1),
13430 arg0),
13431 arg1);
13432 default:
13433 break;
13434 }
13435
13436 else if (wi::eq_p (arg1, signed_max)
13437 && TYPE_UNSIGNED (arg1_type)
13438 /* We will flip the signedness of the comparison operator
13439 associated with the mode of arg1, so the sign bit is
13440 specified by this mode. Check that arg1 is the signed
13441 max associated with this sign bit. */
13442 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13443 /* signed_type does not work on pointer types. */
13444 && INTEGRAL_TYPE_P (arg1_type))
13445 {
13446 /* The following case also applies to X < signed_max+1
13447 and X >= signed_max+1 because previous transformations. */
13448 if (code == LE_EXPR || code == GT_EXPR)
13449 {
13450 tree st = signed_type_for (arg1_type);
13451 return fold_build2_loc (loc,
13452 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13453 type, fold_convert_loc (loc, st, arg0),
13454 build_int_cst (st, 0));
13455 }
13456 }
13457 }
13458 }
13459
13460 /* If we are comparing an ABS_EXPR with a constant, we can
13461 convert all the cases into explicit comparisons, but they may
13462 well not be faster than doing the ABS and one comparison.
13463 But ABS (X) <= C is a range comparison, which becomes a subtraction
13464 and a comparison, and is probably faster. */
13465 if (code == LE_EXPR
13466 && TREE_CODE (arg1) == INTEGER_CST
13467 && TREE_CODE (arg0) == ABS_EXPR
13468 && ! TREE_SIDE_EFFECTS (arg0)
13469 && (0 != (tem = negate_expr (arg1)))
13470 && TREE_CODE (tem) == INTEGER_CST
13471 && !TREE_OVERFLOW (tem))
13472 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13473 build2 (GE_EXPR, type,
13474 TREE_OPERAND (arg0, 0), tem),
13475 build2 (LE_EXPR, type,
13476 TREE_OPERAND (arg0, 0), arg1));
13477
13478 /* Convert ABS_EXPR<x> >= 0 to true. */
13479 strict_overflow_p = false;
13480 if (code == GE_EXPR
13481 && (integer_zerop (arg1)
13482 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13483 && real_zerop (arg1)))
13484 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13485 {
13486 if (strict_overflow_p)
13487 fold_overflow_warning (("assuming signed overflow does not occur "
13488 "when simplifying comparison of "
13489 "absolute value and zero"),
13490 WARN_STRICT_OVERFLOW_CONDITIONAL);
13491 return omit_one_operand_loc (loc, type,
13492 constant_boolean_node (true, type),
13493 arg0);
13494 }
13495
13496 /* Convert ABS_EXPR<x> < 0 to false. */
13497 strict_overflow_p = false;
13498 if (code == LT_EXPR
13499 && (integer_zerop (arg1) || real_zerop (arg1))
13500 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13501 {
13502 if (strict_overflow_p)
13503 fold_overflow_warning (("assuming signed overflow does not occur "
13504 "when simplifying comparison of "
13505 "absolute value and zero"),
13506 WARN_STRICT_OVERFLOW_CONDITIONAL);
13507 return omit_one_operand_loc (loc, type,
13508 constant_boolean_node (false, type),
13509 arg0);
13510 }
13511
13512 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13513 and similarly for >= into !=. */
13514 if ((code == LT_EXPR || code == GE_EXPR)
13515 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13516 && TREE_CODE (arg1) == LSHIFT_EXPR
13517 && integer_onep (TREE_OPERAND (arg1, 0)))
13518 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13519 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13520 TREE_OPERAND (arg1, 1)),
13521 build_zero_cst (TREE_TYPE (arg0)));
13522
13523 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13524 otherwise Y might be >= # of bits in X's type and thus e.g.
13525 (unsigned char) (1 << Y) for Y 15 might be 0.
13526 If the cast is widening, then 1 << Y should have unsigned type,
13527 otherwise if Y is number of bits in the signed shift type minus 1,
13528 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13529 31 might be 0xffffffff80000000. */
13530 if ((code == LT_EXPR || code == GE_EXPR)
13531 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13532 && CONVERT_EXPR_P (arg1)
13533 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13534 && (TYPE_PRECISION (TREE_TYPE (arg1))
13535 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13536 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13537 || (TYPE_PRECISION (TREE_TYPE (arg1))
13538 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13539 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13540 {
13541 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13542 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13543 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13544 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13545 build_zero_cst (TREE_TYPE (arg0)));
13546 }
13547
13548 return NULL_TREE;
13549
13550 case UNORDERED_EXPR:
13551 case ORDERED_EXPR:
13552 case UNLT_EXPR:
13553 case UNLE_EXPR:
13554 case UNGT_EXPR:
13555 case UNGE_EXPR:
13556 case UNEQ_EXPR:
13557 case LTGT_EXPR:
13558 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13559 {
13560 t1 = fold_relational_const (code, type, arg0, arg1);
13561 if (t1 != NULL_TREE)
13562 return t1;
13563 }
13564
13565 /* If the first operand is NaN, the result is constant. */
13566 if (TREE_CODE (arg0) == REAL_CST
13567 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13568 && (code != LTGT_EXPR || ! flag_trapping_math))
13569 {
13570 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13571 ? integer_zero_node
13572 : integer_one_node;
13573 return omit_one_operand_loc (loc, type, t1, arg1);
13574 }
13575
13576 /* If the second operand is NaN, the result is constant. */
13577 if (TREE_CODE (arg1) == REAL_CST
13578 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13579 && (code != LTGT_EXPR || ! flag_trapping_math))
13580 {
13581 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13582 ? integer_zero_node
13583 : integer_one_node;
13584 return omit_one_operand_loc (loc, type, t1, arg0);
13585 }
13586
13587 /* Simplify unordered comparison of something with itself. */
13588 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13589 && operand_equal_p (arg0, arg1, 0))
13590 return constant_boolean_node (1, type);
13591
13592 if (code == LTGT_EXPR
13593 && !flag_trapping_math
13594 && operand_equal_p (arg0, arg1, 0))
13595 return constant_boolean_node (0, type);
13596
13597 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13598 {
13599 tree targ0 = strip_float_extensions (arg0);
13600 tree targ1 = strip_float_extensions (arg1);
13601 tree newtype = TREE_TYPE (targ0);
13602
13603 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13604 newtype = TREE_TYPE (targ1);
13605
13606 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13607 return fold_build2_loc (loc, code, type,
13608 fold_convert_loc (loc, newtype, targ0),
13609 fold_convert_loc (loc, newtype, targ1));
13610 }
13611
13612 return NULL_TREE;
13613
13614 case COMPOUND_EXPR:
13615 /* When pedantic, a compound expression can be neither an lvalue
13616 nor an integer constant expression. */
13617 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13618 return NULL_TREE;
13619 /* Don't let (0, 0) be null pointer constant. */
13620 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13621 : fold_convert_loc (loc, type, arg1);
13622 return pedantic_non_lvalue_loc (loc, tem);
13623
13624 case COMPLEX_EXPR:
13625 if ((TREE_CODE (arg0) == REAL_CST
13626 && TREE_CODE (arg1) == REAL_CST)
13627 || (TREE_CODE (arg0) == INTEGER_CST
13628 && TREE_CODE (arg1) == INTEGER_CST))
13629 return build_complex (type, arg0, arg1);
13630 if (TREE_CODE (arg0) == REALPART_EXPR
13631 && TREE_CODE (arg1) == IMAGPART_EXPR
13632 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13633 && operand_equal_p (TREE_OPERAND (arg0, 0),
13634 TREE_OPERAND (arg1, 0), 0))
13635 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13636 TREE_OPERAND (arg1, 0));
13637 return NULL_TREE;
13638
13639 case ASSERT_EXPR:
13640 /* An ASSERT_EXPR should never be passed to fold_binary. */
13641 gcc_unreachable ();
13642
13643 case VEC_PACK_TRUNC_EXPR:
13644 case VEC_PACK_FIX_TRUNC_EXPR:
13645 {
13646 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13647 tree *elts;
13648
13649 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13650 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13651 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13652 return NULL_TREE;
13653
13654 elts = XALLOCAVEC (tree, nelts);
13655 if (!vec_cst_ctor_to_array (arg0, elts)
13656 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13657 return NULL_TREE;
13658
13659 for (i = 0; i < nelts; i++)
13660 {
13661 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13662 ? NOP_EXPR : FIX_TRUNC_EXPR,
13663 TREE_TYPE (type), elts[i]);
13664 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13665 return NULL_TREE;
13666 }
13667
13668 return build_vector (type, elts);
13669 }
13670
13671 case VEC_WIDEN_MULT_LO_EXPR:
13672 case VEC_WIDEN_MULT_HI_EXPR:
13673 case VEC_WIDEN_MULT_EVEN_EXPR:
13674 case VEC_WIDEN_MULT_ODD_EXPR:
13675 {
13676 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13677 unsigned int out, ofs, scale;
13678 tree *elts;
13679
13680 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13681 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13682 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13683 return NULL_TREE;
13684
13685 elts = XALLOCAVEC (tree, nelts * 4);
13686 if (!vec_cst_ctor_to_array (arg0, elts)
13687 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13688 return NULL_TREE;
13689
13690 if (code == VEC_WIDEN_MULT_LO_EXPR)
13691 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13692 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13693 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13694 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13695 scale = 1, ofs = 0;
13696 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13697 scale = 1, ofs = 1;
13698
13699 for (out = 0; out < nelts; out++)
13700 {
13701 unsigned int in1 = (out << scale) + ofs;
13702 unsigned int in2 = in1 + nelts * 2;
13703 tree t1, t2;
13704
13705 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13706 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13707
13708 if (t1 == NULL_TREE || t2 == NULL_TREE)
13709 return NULL_TREE;
13710 elts[out] = const_binop (MULT_EXPR, t1, t2);
13711 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13712 return NULL_TREE;
13713 }
13714
13715 return build_vector (type, elts);
13716 }
13717
13718 default:
13719 return NULL_TREE;
13720 } /* switch (code) */
13721 }
13722
13723 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13724 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13725 of GOTO_EXPR. */
13726
13727 static tree
13728 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13729 {
13730 switch (TREE_CODE (*tp))
13731 {
13732 case LABEL_EXPR:
13733 return *tp;
13734
13735 case GOTO_EXPR:
13736 *walk_subtrees = 0;
13737
13738 /* ... fall through ... */
13739
13740 default:
13741 return NULL_TREE;
13742 }
13743 }
13744
13745 /* Return whether the sub-tree ST contains a label which is accessible from
13746 outside the sub-tree. */
13747
13748 static bool
13749 contains_label_p (tree st)
13750 {
13751 return
13752 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13753 }
13754
13755 /* Fold a ternary expression of code CODE and type TYPE with operands
13756 OP0, OP1, and OP2. Return the folded expression if folding is
13757 successful. Otherwise, return NULL_TREE. */
13758
13759 tree
13760 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13761 tree op0, tree op1, tree op2)
13762 {
13763 tree tem;
13764 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13765 enum tree_code_class kind = TREE_CODE_CLASS (code);
13766
13767 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13768 && TREE_CODE_LENGTH (code) == 3);
13769
13770 /* If this is a commutative operation, and OP0 is a constant, move it
13771 to OP1 to reduce the number of tests below. */
13772 if (commutative_ternary_tree_code (code)
13773 && tree_swap_operands_p (op0, op1, true))
13774 return fold_build3_loc (loc, code, type, op1, op0, op2);
13775
13776 tem = generic_simplify (loc, code, type, op0, op1, op2);
13777 if (tem)
13778 return tem;
13779
13780 /* Strip any conversions that don't change the mode. This is safe
13781 for every expression, except for a comparison expression because
13782 its signedness is derived from its operands. So, in the latter
13783 case, only strip conversions that don't change the signedness.
13784
13785 Note that this is done as an internal manipulation within the
13786 constant folder, in order to find the simplest representation of
13787 the arguments so that their form can be studied. In any cases,
13788 the appropriate type conversions should be put back in the tree
13789 that will get out of the constant folder. */
13790 if (op0)
13791 {
13792 arg0 = op0;
13793 STRIP_NOPS (arg0);
13794 }
13795
13796 if (op1)
13797 {
13798 arg1 = op1;
13799 STRIP_NOPS (arg1);
13800 }
13801
13802 if (op2)
13803 {
13804 arg2 = op2;
13805 STRIP_NOPS (arg2);
13806 }
13807
13808 switch (code)
13809 {
13810 case COMPONENT_REF:
13811 if (TREE_CODE (arg0) == CONSTRUCTOR
13812 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13813 {
13814 unsigned HOST_WIDE_INT idx;
13815 tree field, value;
13816 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13817 if (field == arg1)
13818 return value;
13819 }
13820 return NULL_TREE;
13821
13822 case COND_EXPR:
13823 case VEC_COND_EXPR:
13824 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13825 so all simple results must be passed through pedantic_non_lvalue. */
13826 if (TREE_CODE (arg0) == INTEGER_CST)
13827 {
13828 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13829 tem = integer_zerop (arg0) ? op2 : op1;
13830 /* Only optimize constant conditions when the selected branch
13831 has the same type as the COND_EXPR. This avoids optimizing
13832 away "c ? x : throw", where the throw has a void type.
13833 Avoid throwing away that operand which contains label. */
13834 if ((!TREE_SIDE_EFFECTS (unused_op)
13835 || !contains_label_p (unused_op))
13836 && (! VOID_TYPE_P (TREE_TYPE (tem))
13837 || VOID_TYPE_P (type)))
13838 return pedantic_non_lvalue_loc (loc, tem);
13839 return NULL_TREE;
13840 }
13841 else if (TREE_CODE (arg0) == VECTOR_CST)
13842 {
13843 if (integer_all_onesp (arg0))
13844 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13845 if (integer_zerop (arg0))
13846 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13847
13848 if ((TREE_CODE (arg1) == VECTOR_CST
13849 || TREE_CODE (arg1) == CONSTRUCTOR)
13850 && (TREE_CODE (arg2) == VECTOR_CST
13851 || TREE_CODE (arg2) == CONSTRUCTOR))
13852 {
13853 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13854 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13855 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13856 for (i = 0; i < nelts; i++)
13857 {
13858 tree val = VECTOR_CST_ELT (arg0, i);
13859 if (integer_all_onesp (val))
13860 sel[i] = i;
13861 else if (integer_zerop (val))
13862 sel[i] = nelts + i;
13863 else /* Currently unreachable. */
13864 return NULL_TREE;
13865 }
13866 tree t = fold_vec_perm (type, arg1, arg2, sel);
13867 if (t != NULL_TREE)
13868 return t;
13869 }
13870 }
13871
13872 if (operand_equal_p (arg1, op2, 0))
13873 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13874
13875 /* If we have A op B ? A : C, we may be able to convert this to a
13876 simpler expression, depending on the operation and the values
13877 of B and C. Signed zeros prevent all of these transformations,
13878 for reasons given above each one.
13879
13880 Also try swapping the arguments and inverting the conditional. */
13881 if (COMPARISON_CLASS_P (arg0)
13882 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13883 arg1, TREE_OPERAND (arg0, 1))
13884 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13885 {
13886 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13887 if (tem)
13888 return tem;
13889 }
13890
13891 if (COMPARISON_CLASS_P (arg0)
13892 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13893 op2,
13894 TREE_OPERAND (arg0, 1))
13895 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13896 {
13897 location_t loc0 = expr_location_or (arg0, loc);
13898 tem = fold_invert_truthvalue (loc0, arg0);
13899 if (tem && COMPARISON_CLASS_P (tem))
13900 {
13901 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13902 if (tem)
13903 return tem;
13904 }
13905 }
13906
13907 /* If the second operand is simpler than the third, swap them
13908 since that produces better jump optimization results. */
13909 if (truth_value_p (TREE_CODE (arg0))
13910 && tree_swap_operands_p (op1, op2, false))
13911 {
13912 location_t loc0 = expr_location_or (arg0, loc);
13913 /* See if this can be inverted. If it can't, possibly because
13914 it was a floating-point inequality comparison, don't do
13915 anything. */
13916 tem = fold_invert_truthvalue (loc0, arg0);
13917 if (tem)
13918 return fold_build3_loc (loc, code, type, tem, op2, op1);
13919 }
13920
13921 /* Convert A ? 1 : 0 to simply A. */
13922 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13923 : (integer_onep (op1)
13924 && !VECTOR_TYPE_P (type)))
13925 && integer_zerop (op2)
13926 /* If we try to convert OP0 to our type, the
13927 call to fold will try to move the conversion inside
13928 a COND, which will recurse. In that case, the COND_EXPR
13929 is probably the best choice, so leave it alone. */
13930 && type == TREE_TYPE (arg0))
13931 return pedantic_non_lvalue_loc (loc, arg0);
13932
13933 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13934 over COND_EXPR in cases such as floating point comparisons. */
13935 if (integer_zerop (op1)
13936 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13937 : (integer_onep (op2)
13938 && !VECTOR_TYPE_P (type)))
13939 && truth_value_p (TREE_CODE (arg0)))
13940 return pedantic_non_lvalue_loc (loc,
13941 fold_convert_loc (loc, type,
13942 invert_truthvalue_loc (loc,
13943 arg0)));
13944
13945 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13946 if (TREE_CODE (arg0) == LT_EXPR
13947 && integer_zerop (TREE_OPERAND (arg0, 1))
13948 && integer_zerop (op2)
13949 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13950 {
13951 /* sign_bit_p looks through both zero and sign extensions,
13952 but for this optimization only sign extensions are
13953 usable. */
13954 tree tem2 = TREE_OPERAND (arg0, 0);
13955 while (tem != tem2)
13956 {
13957 if (TREE_CODE (tem2) != NOP_EXPR
13958 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13959 {
13960 tem = NULL_TREE;
13961 break;
13962 }
13963 tem2 = TREE_OPERAND (tem2, 0);
13964 }
13965 /* sign_bit_p only checks ARG1 bits within A's precision.
13966 If <sign bit of A> has wider type than A, bits outside
13967 of A's precision in <sign bit of A> need to be checked.
13968 If they are all 0, this optimization needs to be done
13969 in unsigned A's type, if they are all 1 in signed A's type,
13970 otherwise this can't be done. */
13971 if (tem
13972 && TYPE_PRECISION (TREE_TYPE (tem))
13973 < TYPE_PRECISION (TREE_TYPE (arg1))
13974 && TYPE_PRECISION (TREE_TYPE (tem))
13975 < TYPE_PRECISION (type))
13976 {
13977 int inner_width, outer_width;
13978 tree tem_type;
13979
13980 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13981 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13982 if (outer_width > TYPE_PRECISION (type))
13983 outer_width = TYPE_PRECISION (type);
13984
13985 wide_int mask = wi::shifted_mask
13986 (inner_width, outer_width - inner_width, false,
13987 TYPE_PRECISION (TREE_TYPE (arg1)));
13988
13989 wide_int common = mask & arg1;
13990 if (common == mask)
13991 {
13992 tem_type = signed_type_for (TREE_TYPE (tem));
13993 tem = fold_convert_loc (loc, tem_type, tem);
13994 }
13995 else if (common == 0)
13996 {
13997 tem_type = unsigned_type_for (TREE_TYPE (tem));
13998 tem = fold_convert_loc (loc, tem_type, tem);
13999 }
14000 else
14001 tem = NULL;
14002 }
14003
14004 if (tem)
14005 return
14006 fold_convert_loc (loc, type,
14007 fold_build2_loc (loc, BIT_AND_EXPR,
14008 TREE_TYPE (tem), tem,
14009 fold_convert_loc (loc,
14010 TREE_TYPE (tem),
14011 arg1)));
14012 }
14013
14014 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14015 already handled above. */
14016 if (TREE_CODE (arg0) == BIT_AND_EXPR
14017 && integer_onep (TREE_OPERAND (arg0, 1))
14018 && integer_zerop (op2)
14019 && integer_pow2p (arg1))
14020 {
14021 tree tem = TREE_OPERAND (arg0, 0);
14022 STRIP_NOPS (tem);
14023 if (TREE_CODE (tem) == RSHIFT_EXPR
14024 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14025 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14026 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14027 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14028 TREE_OPERAND (tem, 0), arg1);
14029 }
14030
14031 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14032 is probably obsolete because the first operand should be a
14033 truth value (that's why we have the two cases above), but let's
14034 leave it in until we can confirm this for all front-ends. */
14035 if (integer_zerop (op2)
14036 && TREE_CODE (arg0) == NE_EXPR
14037 && integer_zerop (TREE_OPERAND (arg0, 1))
14038 && integer_pow2p (arg1)
14039 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14040 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14041 arg1, OEP_ONLY_CONST))
14042 return pedantic_non_lvalue_loc (loc,
14043 fold_convert_loc (loc, type,
14044 TREE_OPERAND (arg0, 0)));
14045
14046 /* Disable the transformations below for vectors, since
14047 fold_binary_op_with_conditional_arg may undo them immediately,
14048 yielding an infinite loop. */
14049 if (code == VEC_COND_EXPR)
14050 return NULL_TREE;
14051
14052 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14053 if (integer_zerop (op2)
14054 && truth_value_p (TREE_CODE (arg0))
14055 && truth_value_p (TREE_CODE (arg1))
14056 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14057 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14058 : TRUTH_ANDIF_EXPR,
14059 type, fold_convert_loc (loc, type, arg0), arg1);
14060
14061 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14062 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14063 && truth_value_p (TREE_CODE (arg0))
14064 && truth_value_p (TREE_CODE (arg1))
14065 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14066 {
14067 location_t loc0 = expr_location_or (arg0, loc);
14068 /* Only perform transformation if ARG0 is easily inverted. */
14069 tem = fold_invert_truthvalue (loc0, arg0);
14070 if (tem)
14071 return fold_build2_loc (loc, code == VEC_COND_EXPR
14072 ? BIT_IOR_EXPR
14073 : TRUTH_ORIF_EXPR,
14074 type, fold_convert_loc (loc, type, tem),
14075 arg1);
14076 }
14077
14078 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14079 if (integer_zerop (arg1)
14080 && truth_value_p (TREE_CODE (arg0))
14081 && truth_value_p (TREE_CODE (op2))
14082 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14083 {
14084 location_t loc0 = expr_location_or (arg0, loc);
14085 /* Only perform transformation if ARG0 is easily inverted. */
14086 tem = fold_invert_truthvalue (loc0, arg0);
14087 if (tem)
14088 return fold_build2_loc (loc, code == VEC_COND_EXPR
14089 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14090 type, fold_convert_loc (loc, type, tem),
14091 op2);
14092 }
14093
14094 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14095 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14096 && truth_value_p (TREE_CODE (arg0))
14097 && truth_value_p (TREE_CODE (op2))
14098 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14099 return fold_build2_loc (loc, code == VEC_COND_EXPR
14100 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14101 type, fold_convert_loc (loc, type, arg0), op2);
14102
14103 return NULL_TREE;
14104
14105 case CALL_EXPR:
14106 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14107 of fold_ternary on them. */
14108 gcc_unreachable ();
14109
14110 case BIT_FIELD_REF:
14111 if ((TREE_CODE (arg0) == VECTOR_CST
14112 || (TREE_CODE (arg0) == CONSTRUCTOR
14113 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14114 && (type == TREE_TYPE (TREE_TYPE (arg0))
14115 || (TREE_CODE (type) == VECTOR_TYPE
14116 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14117 {
14118 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14119 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14120 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14121 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14122
14123 if (n != 0
14124 && (idx % width) == 0
14125 && (n % width) == 0
14126 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14127 {
14128 idx = idx / width;
14129 n = n / width;
14130
14131 if (TREE_CODE (arg0) == VECTOR_CST)
14132 {
14133 if (n == 1)
14134 return VECTOR_CST_ELT (arg0, idx);
14135
14136 tree *vals = XALLOCAVEC (tree, n);
14137 for (unsigned i = 0; i < n; ++i)
14138 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14139 return build_vector (type, vals);
14140 }
14141
14142 /* Constructor elements can be subvectors. */
14143 unsigned HOST_WIDE_INT k = 1;
14144 if (CONSTRUCTOR_NELTS (arg0) != 0)
14145 {
14146 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14147 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14148 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14149 }
14150
14151 /* We keep an exact subset of the constructor elements. */
14152 if ((idx % k) == 0 && (n % k) == 0)
14153 {
14154 if (CONSTRUCTOR_NELTS (arg0) == 0)
14155 return build_constructor (type, NULL);
14156 idx /= k;
14157 n /= k;
14158 if (n == 1)
14159 {
14160 if (idx < CONSTRUCTOR_NELTS (arg0))
14161 return CONSTRUCTOR_ELT (arg0, idx)->value;
14162 return build_zero_cst (type);
14163 }
14164
14165 vec<constructor_elt, va_gc> *vals;
14166 vec_alloc (vals, n);
14167 for (unsigned i = 0;
14168 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14169 ++i)
14170 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14171 CONSTRUCTOR_ELT
14172 (arg0, idx + i)->value);
14173 return build_constructor (type, vals);
14174 }
14175 /* The bitfield references a single constructor element. */
14176 else if (idx + n <= (idx / k + 1) * k)
14177 {
14178 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14179 return build_zero_cst (type);
14180 else if (n == k)
14181 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14182 else
14183 return fold_build3_loc (loc, code, type,
14184 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14185 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14186 }
14187 }
14188 }
14189
14190 /* A bit-field-ref that referenced the full argument can be stripped. */
14191 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14192 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14193 && integer_zerop (op2))
14194 return fold_convert_loc (loc, type, arg0);
14195
14196 /* On constants we can use native encode/interpret to constant
14197 fold (nearly) all BIT_FIELD_REFs. */
14198 if (CONSTANT_CLASS_P (arg0)
14199 && can_native_interpret_type_p (type)
14200 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14201 /* This limitation should not be necessary, we just need to
14202 round this up to mode size. */
14203 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14204 /* Need bit-shifting of the buffer to relax the following. */
14205 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14206 {
14207 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14208 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14209 unsigned HOST_WIDE_INT clen;
14210 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14211 /* ??? We cannot tell native_encode_expr to start at
14212 some random byte only. So limit us to a reasonable amount
14213 of work. */
14214 if (clen <= 4096)
14215 {
14216 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14217 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14218 if (len > 0
14219 && len * BITS_PER_UNIT >= bitpos + bitsize)
14220 {
14221 tree v = native_interpret_expr (type,
14222 b + bitpos / BITS_PER_UNIT,
14223 bitsize / BITS_PER_UNIT);
14224 if (v)
14225 return v;
14226 }
14227 }
14228 }
14229
14230 return NULL_TREE;
14231
14232 case FMA_EXPR:
14233 /* For integers we can decompose the FMA if possible. */
14234 if (TREE_CODE (arg0) == INTEGER_CST
14235 && TREE_CODE (arg1) == INTEGER_CST)
14236 return fold_build2_loc (loc, PLUS_EXPR, type,
14237 const_binop (MULT_EXPR, arg0, arg1), arg2);
14238 if (integer_zerop (arg2))
14239 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14240
14241 return fold_fma (loc, type, arg0, arg1, arg2);
14242
14243 case VEC_PERM_EXPR:
14244 if (TREE_CODE (arg2) == VECTOR_CST)
14245 {
14246 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14247 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14248 bool need_mask_canon = false;
14249 bool all_in_vec0 = true;
14250 bool all_in_vec1 = true;
14251 bool maybe_identity = true;
14252 bool single_arg = (op0 == op1);
14253 bool changed = false;
14254
14255 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14256 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14257 for (i = 0; i < nelts; i++)
14258 {
14259 tree val = VECTOR_CST_ELT (arg2, i);
14260 if (TREE_CODE (val) != INTEGER_CST)
14261 return NULL_TREE;
14262
14263 /* Make sure that the perm value is in an acceptable
14264 range. */
14265 wide_int t = val;
14266 if (wi::gtu_p (t, mask))
14267 {
14268 need_mask_canon = true;
14269 sel[i] = t.to_uhwi () & mask;
14270 }
14271 else
14272 sel[i] = t.to_uhwi ();
14273
14274 if (sel[i] < nelts)
14275 all_in_vec1 = false;
14276 else
14277 all_in_vec0 = false;
14278
14279 if ((sel[i] & (nelts-1)) != i)
14280 maybe_identity = false;
14281 }
14282
14283 if (maybe_identity)
14284 {
14285 if (all_in_vec0)
14286 return op0;
14287 if (all_in_vec1)
14288 return op1;
14289 }
14290
14291 if (all_in_vec0)
14292 op1 = op0;
14293 else if (all_in_vec1)
14294 {
14295 op0 = op1;
14296 for (i = 0; i < nelts; i++)
14297 sel[i] -= nelts;
14298 need_mask_canon = true;
14299 }
14300
14301 if ((TREE_CODE (op0) == VECTOR_CST
14302 || TREE_CODE (op0) == CONSTRUCTOR)
14303 && (TREE_CODE (op1) == VECTOR_CST
14304 || TREE_CODE (op1) == CONSTRUCTOR))
14305 {
14306 tree t = fold_vec_perm (type, op0, op1, sel);
14307 if (t != NULL_TREE)
14308 return t;
14309 }
14310
14311 if (op0 == op1 && !single_arg)
14312 changed = true;
14313
14314 if (need_mask_canon && arg2 == op2)
14315 {
14316 tree *tsel = XALLOCAVEC (tree, nelts);
14317 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14318 for (i = 0; i < nelts; i++)
14319 tsel[i] = build_int_cst (eltype, sel[i]);
14320 op2 = build_vector (TREE_TYPE (arg2), tsel);
14321 changed = true;
14322 }
14323
14324 if (changed)
14325 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14326 }
14327 return NULL_TREE;
14328
14329 default:
14330 return NULL_TREE;
14331 } /* switch (code) */
14332 }
14333
14334 /* Perform constant folding and related simplification of EXPR.
14335 The related simplifications include x*1 => x, x*0 => 0, etc.,
14336 and application of the associative law.
14337 NOP_EXPR conversions may be removed freely (as long as we
14338 are careful not to change the type of the overall expression).
14339 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14340 but we can constant-fold them if they have constant operands. */
14341
14342 #ifdef ENABLE_FOLD_CHECKING
14343 # define fold(x) fold_1 (x)
14344 static tree fold_1 (tree);
14345 static
14346 #endif
14347 tree
14348 fold (tree expr)
14349 {
14350 const tree t = expr;
14351 enum tree_code code = TREE_CODE (t);
14352 enum tree_code_class kind = TREE_CODE_CLASS (code);
14353 tree tem;
14354 location_t loc = EXPR_LOCATION (expr);
14355
14356 /* Return right away if a constant. */
14357 if (kind == tcc_constant)
14358 return t;
14359
14360 /* CALL_EXPR-like objects with variable numbers of operands are
14361 treated specially. */
14362 if (kind == tcc_vl_exp)
14363 {
14364 if (code == CALL_EXPR)
14365 {
14366 tem = fold_call_expr (loc, expr, false);
14367 return tem ? tem : expr;
14368 }
14369 return expr;
14370 }
14371
14372 if (IS_EXPR_CODE_CLASS (kind))
14373 {
14374 tree type = TREE_TYPE (t);
14375 tree op0, op1, op2;
14376
14377 switch (TREE_CODE_LENGTH (code))
14378 {
14379 case 1:
14380 op0 = TREE_OPERAND (t, 0);
14381 tem = fold_unary_loc (loc, code, type, op0);
14382 return tem ? tem : expr;
14383 case 2:
14384 op0 = TREE_OPERAND (t, 0);
14385 op1 = TREE_OPERAND (t, 1);
14386 tem = fold_binary_loc (loc, code, type, op0, op1);
14387 return tem ? tem : expr;
14388 case 3:
14389 op0 = TREE_OPERAND (t, 0);
14390 op1 = TREE_OPERAND (t, 1);
14391 op2 = TREE_OPERAND (t, 2);
14392 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14393 return tem ? tem : expr;
14394 default:
14395 break;
14396 }
14397 }
14398
14399 switch (code)
14400 {
14401 case ARRAY_REF:
14402 {
14403 tree op0 = TREE_OPERAND (t, 0);
14404 tree op1 = TREE_OPERAND (t, 1);
14405
14406 if (TREE_CODE (op1) == INTEGER_CST
14407 && TREE_CODE (op0) == CONSTRUCTOR
14408 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14409 {
14410 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14411 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14412 unsigned HOST_WIDE_INT begin = 0;
14413
14414 /* Find a matching index by means of a binary search. */
14415 while (begin != end)
14416 {
14417 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14418 tree index = (*elts)[middle].index;
14419
14420 if (TREE_CODE (index) == INTEGER_CST
14421 && tree_int_cst_lt (index, op1))
14422 begin = middle + 1;
14423 else if (TREE_CODE (index) == INTEGER_CST
14424 && tree_int_cst_lt (op1, index))
14425 end = middle;
14426 else if (TREE_CODE (index) == RANGE_EXPR
14427 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14428 begin = middle + 1;
14429 else if (TREE_CODE (index) == RANGE_EXPR
14430 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14431 end = middle;
14432 else
14433 return (*elts)[middle].value;
14434 }
14435 }
14436
14437 return t;
14438 }
14439
14440 /* Return a VECTOR_CST if possible. */
14441 case CONSTRUCTOR:
14442 {
14443 tree type = TREE_TYPE (t);
14444 if (TREE_CODE (type) != VECTOR_TYPE)
14445 return t;
14446
14447 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14448 unsigned HOST_WIDE_INT idx, pos = 0;
14449 tree value;
14450
14451 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14452 {
14453 if (!CONSTANT_CLASS_P (value))
14454 return t;
14455 if (TREE_CODE (value) == VECTOR_CST)
14456 {
14457 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14458 vec[pos++] = VECTOR_CST_ELT (value, i);
14459 }
14460 else
14461 vec[pos++] = value;
14462 }
14463 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14464 vec[pos] = build_zero_cst (TREE_TYPE (type));
14465
14466 return build_vector (type, vec);
14467 }
14468
14469 case CONST_DECL:
14470 return fold (DECL_INITIAL (t));
14471
14472 default:
14473 return t;
14474 } /* switch (code) */
14475 }
14476
14477 #ifdef ENABLE_FOLD_CHECKING
14478 #undef fold
14479
14480 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14481 hash_table<pointer_hash<const tree_node> > *);
14482 static void fold_check_failed (const_tree, const_tree);
14483 void print_fold_checksum (const_tree);
14484
14485 /* When --enable-checking=fold, compute a digest of expr before
14486 and after actual fold call to see if fold did not accidentally
14487 change original expr. */
14488
14489 tree
14490 fold (tree expr)
14491 {
14492 tree ret;
14493 struct md5_ctx ctx;
14494 unsigned char checksum_before[16], checksum_after[16];
14495 hash_table<pointer_hash<const tree_node> > ht (32);
14496
14497 md5_init_ctx (&ctx);
14498 fold_checksum_tree (expr, &ctx, &ht);
14499 md5_finish_ctx (&ctx, checksum_before);
14500 ht.empty ();
14501
14502 ret = fold_1 (expr);
14503
14504 md5_init_ctx (&ctx);
14505 fold_checksum_tree (expr, &ctx, &ht);
14506 md5_finish_ctx (&ctx, checksum_after);
14507
14508 if (memcmp (checksum_before, checksum_after, 16))
14509 fold_check_failed (expr, ret);
14510
14511 return ret;
14512 }
14513
14514 void
14515 print_fold_checksum (const_tree expr)
14516 {
14517 struct md5_ctx ctx;
14518 unsigned char checksum[16], cnt;
14519 hash_table<pointer_hash<const tree_node> > ht (32);
14520
14521 md5_init_ctx (&ctx);
14522 fold_checksum_tree (expr, &ctx, &ht);
14523 md5_finish_ctx (&ctx, checksum);
14524 for (cnt = 0; cnt < 16; ++cnt)
14525 fprintf (stderr, "%02x", checksum[cnt]);
14526 putc ('\n', stderr);
14527 }
14528
14529 static void
14530 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14531 {
14532 internal_error ("fold check: original tree changed by fold");
14533 }
14534
14535 static void
14536 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14537 hash_table<pointer_hash <const tree_node> > *ht)
14538 {
14539 const tree_node **slot;
14540 enum tree_code code;
14541 union tree_node buf;
14542 int i, len;
14543
14544 recursive_label:
14545 if (expr == NULL)
14546 return;
14547 slot = ht->find_slot (expr, INSERT);
14548 if (*slot != NULL)
14549 return;
14550 *slot = expr;
14551 code = TREE_CODE (expr);
14552 if (TREE_CODE_CLASS (code) == tcc_declaration
14553 && DECL_ASSEMBLER_NAME_SET_P (expr))
14554 {
14555 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14556 memcpy ((char *) &buf, expr, tree_size (expr));
14557 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14558 expr = (tree) &buf;
14559 }
14560 else if (TREE_CODE_CLASS (code) == tcc_type
14561 && (TYPE_POINTER_TO (expr)
14562 || TYPE_REFERENCE_TO (expr)
14563 || TYPE_CACHED_VALUES_P (expr)
14564 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14565 || TYPE_NEXT_VARIANT (expr)))
14566 {
14567 /* Allow these fields to be modified. */
14568 tree tmp;
14569 memcpy ((char *) &buf, expr, tree_size (expr));
14570 expr = tmp = (tree) &buf;
14571 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14572 TYPE_POINTER_TO (tmp) = NULL;
14573 TYPE_REFERENCE_TO (tmp) = NULL;
14574 TYPE_NEXT_VARIANT (tmp) = NULL;
14575 if (TYPE_CACHED_VALUES_P (tmp))
14576 {
14577 TYPE_CACHED_VALUES_P (tmp) = 0;
14578 TYPE_CACHED_VALUES (tmp) = NULL;
14579 }
14580 }
14581 md5_process_bytes (expr, tree_size (expr), ctx);
14582 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14583 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14584 if (TREE_CODE_CLASS (code) != tcc_type
14585 && TREE_CODE_CLASS (code) != tcc_declaration
14586 && code != TREE_LIST
14587 && code != SSA_NAME
14588 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14589 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14590 switch (TREE_CODE_CLASS (code))
14591 {
14592 case tcc_constant:
14593 switch (code)
14594 {
14595 case STRING_CST:
14596 md5_process_bytes (TREE_STRING_POINTER (expr),
14597 TREE_STRING_LENGTH (expr), ctx);
14598 break;
14599 case COMPLEX_CST:
14600 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14601 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14602 break;
14603 case VECTOR_CST:
14604 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14605 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14606 break;
14607 default:
14608 break;
14609 }
14610 break;
14611 case tcc_exceptional:
14612 switch (code)
14613 {
14614 case TREE_LIST:
14615 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14616 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14617 expr = TREE_CHAIN (expr);
14618 goto recursive_label;
14619 break;
14620 case TREE_VEC:
14621 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14622 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14623 break;
14624 default:
14625 break;
14626 }
14627 break;
14628 case tcc_expression:
14629 case tcc_reference:
14630 case tcc_comparison:
14631 case tcc_unary:
14632 case tcc_binary:
14633 case tcc_statement:
14634 case tcc_vl_exp:
14635 len = TREE_OPERAND_LENGTH (expr);
14636 for (i = 0; i < len; ++i)
14637 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14638 break;
14639 case tcc_declaration:
14640 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14641 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14642 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14643 {
14644 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14645 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14646 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14647 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14648 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14649 }
14650
14651 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14652 {
14653 if (TREE_CODE (expr) == FUNCTION_DECL)
14654 {
14655 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14656 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14657 }
14658 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14659 }
14660 break;
14661 case tcc_type:
14662 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14663 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14664 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14665 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14666 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14667 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14668 if (INTEGRAL_TYPE_P (expr)
14669 || SCALAR_FLOAT_TYPE_P (expr))
14670 {
14671 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14672 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14673 }
14674 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14675 if (TREE_CODE (expr) == RECORD_TYPE
14676 || TREE_CODE (expr) == UNION_TYPE
14677 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14678 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14679 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14680 break;
14681 default:
14682 break;
14683 }
14684 }
14685
14686 /* Helper function for outputting the checksum of a tree T. When
14687 debugging with gdb, you can "define mynext" to be "next" followed
14688 by "call debug_fold_checksum (op0)", then just trace down till the
14689 outputs differ. */
14690
14691 DEBUG_FUNCTION void
14692 debug_fold_checksum (const_tree t)
14693 {
14694 int i;
14695 unsigned char checksum[16];
14696 struct md5_ctx ctx;
14697 hash_table<pointer_hash<const tree_node> > ht (32);
14698
14699 md5_init_ctx (&ctx);
14700 fold_checksum_tree (t, &ctx, &ht);
14701 md5_finish_ctx (&ctx, checksum);
14702 ht.empty ();
14703
14704 for (i = 0; i < 16; i++)
14705 fprintf (stderr, "%d ", checksum[i]);
14706
14707 fprintf (stderr, "\n");
14708 }
14709
14710 #endif
14711
14712 /* Fold a unary tree expression with code CODE of type TYPE with an
14713 operand OP0. LOC is the location of the resulting expression.
14714 Return a folded expression if successful. Otherwise, return a tree
14715 expression with code CODE of type TYPE with an operand OP0. */
14716
14717 tree
14718 fold_build1_stat_loc (location_t loc,
14719 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14720 {
14721 tree tem;
14722 #ifdef ENABLE_FOLD_CHECKING
14723 unsigned char checksum_before[16], checksum_after[16];
14724 struct md5_ctx ctx;
14725 hash_table<pointer_hash<const tree_node> > ht (32);
14726
14727 md5_init_ctx (&ctx);
14728 fold_checksum_tree (op0, &ctx, &ht);
14729 md5_finish_ctx (&ctx, checksum_before);
14730 ht.empty ();
14731 #endif
14732
14733 tem = fold_unary_loc (loc, code, type, op0);
14734 if (!tem)
14735 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14736
14737 #ifdef ENABLE_FOLD_CHECKING
14738 md5_init_ctx (&ctx);
14739 fold_checksum_tree (op0, &ctx, &ht);
14740 md5_finish_ctx (&ctx, checksum_after);
14741
14742 if (memcmp (checksum_before, checksum_after, 16))
14743 fold_check_failed (op0, tem);
14744 #endif
14745 return tem;
14746 }
14747
14748 /* Fold a binary tree expression with code CODE of type TYPE with
14749 operands OP0 and OP1. LOC is the location of the resulting
14750 expression. Return a folded expression if successful. Otherwise,
14751 return a tree expression with code CODE of type TYPE with operands
14752 OP0 and OP1. */
14753
14754 tree
14755 fold_build2_stat_loc (location_t loc,
14756 enum tree_code code, tree type, tree op0, tree op1
14757 MEM_STAT_DECL)
14758 {
14759 tree tem;
14760 #ifdef ENABLE_FOLD_CHECKING
14761 unsigned char checksum_before_op0[16],
14762 checksum_before_op1[16],
14763 checksum_after_op0[16],
14764 checksum_after_op1[16];
14765 struct md5_ctx ctx;
14766 hash_table<pointer_hash<const tree_node> > ht (32);
14767
14768 md5_init_ctx (&ctx);
14769 fold_checksum_tree (op0, &ctx, &ht);
14770 md5_finish_ctx (&ctx, checksum_before_op0);
14771 ht.empty ();
14772
14773 md5_init_ctx (&ctx);
14774 fold_checksum_tree (op1, &ctx, &ht);
14775 md5_finish_ctx (&ctx, checksum_before_op1);
14776 ht.empty ();
14777 #endif
14778
14779 tem = fold_binary_loc (loc, code, type, op0, op1);
14780 if (!tem)
14781 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14782
14783 #ifdef ENABLE_FOLD_CHECKING
14784 md5_init_ctx (&ctx);
14785 fold_checksum_tree (op0, &ctx, &ht);
14786 md5_finish_ctx (&ctx, checksum_after_op0);
14787 ht.empty ();
14788
14789 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14790 fold_check_failed (op0, tem);
14791
14792 md5_init_ctx (&ctx);
14793 fold_checksum_tree (op1, &ctx, &ht);
14794 md5_finish_ctx (&ctx, checksum_after_op1);
14795
14796 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14797 fold_check_failed (op1, tem);
14798 #endif
14799 return tem;
14800 }
14801
14802 /* Fold a ternary tree expression with code CODE of type TYPE with
14803 operands OP0, OP1, and OP2. Return a folded expression if
14804 successful. Otherwise, return a tree expression with code CODE of
14805 type TYPE with operands OP0, OP1, and OP2. */
14806
14807 tree
14808 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14809 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14810 {
14811 tree tem;
14812 #ifdef ENABLE_FOLD_CHECKING
14813 unsigned char checksum_before_op0[16],
14814 checksum_before_op1[16],
14815 checksum_before_op2[16],
14816 checksum_after_op0[16],
14817 checksum_after_op1[16],
14818 checksum_after_op2[16];
14819 struct md5_ctx ctx;
14820 hash_table<pointer_hash<const tree_node> > ht (32);
14821
14822 md5_init_ctx (&ctx);
14823 fold_checksum_tree (op0, &ctx, &ht);
14824 md5_finish_ctx (&ctx, checksum_before_op0);
14825 ht.empty ();
14826
14827 md5_init_ctx (&ctx);
14828 fold_checksum_tree (op1, &ctx, &ht);
14829 md5_finish_ctx (&ctx, checksum_before_op1);
14830 ht.empty ();
14831
14832 md5_init_ctx (&ctx);
14833 fold_checksum_tree (op2, &ctx, &ht);
14834 md5_finish_ctx (&ctx, checksum_before_op2);
14835 ht.empty ();
14836 #endif
14837
14838 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14839 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14840 if (!tem)
14841 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14842
14843 #ifdef ENABLE_FOLD_CHECKING
14844 md5_init_ctx (&ctx);
14845 fold_checksum_tree (op0, &ctx, &ht);
14846 md5_finish_ctx (&ctx, checksum_after_op0);
14847 ht.empty ();
14848
14849 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14850 fold_check_failed (op0, tem);
14851
14852 md5_init_ctx (&ctx);
14853 fold_checksum_tree (op1, &ctx, &ht);
14854 md5_finish_ctx (&ctx, checksum_after_op1);
14855 ht.empty ();
14856
14857 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14858 fold_check_failed (op1, tem);
14859
14860 md5_init_ctx (&ctx);
14861 fold_checksum_tree (op2, &ctx, &ht);
14862 md5_finish_ctx (&ctx, checksum_after_op2);
14863
14864 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14865 fold_check_failed (op2, tem);
14866 #endif
14867 return tem;
14868 }
14869
14870 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14871 arguments in ARGARRAY, and a null static chain.
14872 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14873 of type TYPE from the given operands as constructed by build_call_array. */
14874
14875 tree
14876 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14877 int nargs, tree *argarray)
14878 {
14879 tree tem;
14880 #ifdef ENABLE_FOLD_CHECKING
14881 unsigned char checksum_before_fn[16],
14882 checksum_before_arglist[16],
14883 checksum_after_fn[16],
14884 checksum_after_arglist[16];
14885 struct md5_ctx ctx;
14886 hash_table<pointer_hash<const tree_node> > ht (32);
14887 int i;
14888
14889 md5_init_ctx (&ctx);
14890 fold_checksum_tree (fn, &ctx, &ht);
14891 md5_finish_ctx (&ctx, checksum_before_fn);
14892 ht.empty ();
14893
14894 md5_init_ctx (&ctx);
14895 for (i = 0; i < nargs; i++)
14896 fold_checksum_tree (argarray[i], &ctx, &ht);
14897 md5_finish_ctx (&ctx, checksum_before_arglist);
14898 ht.empty ();
14899 #endif
14900
14901 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14902
14903 #ifdef ENABLE_FOLD_CHECKING
14904 md5_init_ctx (&ctx);
14905 fold_checksum_tree (fn, &ctx, &ht);
14906 md5_finish_ctx (&ctx, checksum_after_fn);
14907 ht.empty ();
14908
14909 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14910 fold_check_failed (fn, tem);
14911
14912 md5_init_ctx (&ctx);
14913 for (i = 0; i < nargs; i++)
14914 fold_checksum_tree (argarray[i], &ctx, &ht);
14915 md5_finish_ctx (&ctx, checksum_after_arglist);
14916
14917 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14918 fold_check_failed (NULL_TREE, tem);
14919 #endif
14920 return tem;
14921 }
14922
14923 /* Perform constant folding and related simplification of initializer
14924 expression EXPR. These behave identically to "fold_buildN" but ignore
14925 potential run-time traps and exceptions that fold must preserve. */
14926
14927 #define START_FOLD_INIT \
14928 int saved_signaling_nans = flag_signaling_nans;\
14929 int saved_trapping_math = flag_trapping_math;\
14930 int saved_rounding_math = flag_rounding_math;\
14931 int saved_trapv = flag_trapv;\
14932 int saved_folding_initializer = folding_initializer;\
14933 flag_signaling_nans = 0;\
14934 flag_trapping_math = 0;\
14935 flag_rounding_math = 0;\
14936 flag_trapv = 0;\
14937 folding_initializer = 1;
14938
14939 #define END_FOLD_INIT \
14940 flag_signaling_nans = saved_signaling_nans;\
14941 flag_trapping_math = saved_trapping_math;\
14942 flag_rounding_math = saved_rounding_math;\
14943 flag_trapv = saved_trapv;\
14944 folding_initializer = saved_folding_initializer;
14945
14946 tree
14947 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14948 tree type, tree op)
14949 {
14950 tree result;
14951 START_FOLD_INIT;
14952
14953 result = fold_build1_loc (loc, code, type, op);
14954
14955 END_FOLD_INIT;
14956 return result;
14957 }
14958
14959 tree
14960 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14961 tree type, tree op0, tree op1)
14962 {
14963 tree result;
14964 START_FOLD_INIT;
14965
14966 result = fold_build2_loc (loc, code, type, op0, op1);
14967
14968 END_FOLD_INIT;
14969 return result;
14970 }
14971
14972 tree
14973 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14974 int nargs, tree *argarray)
14975 {
14976 tree result;
14977 START_FOLD_INIT;
14978
14979 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14980
14981 END_FOLD_INIT;
14982 return result;
14983 }
14984
14985 #undef START_FOLD_INIT
14986 #undef END_FOLD_INIT
14987
14988 /* Determine if first argument is a multiple of second argument. Return 0 if
14989 it is not, or we cannot easily determined it to be.
14990
14991 An example of the sort of thing we care about (at this point; this routine
14992 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14993 fold cases do now) is discovering that
14994
14995 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14996
14997 is a multiple of
14998
14999 SAVE_EXPR (J * 8)
15000
15001 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15002
15003 This code also handles discovering that
15004
15005 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15006
15007 is a multiple of 8 so we don't have to worry about dealing with a
15008 possible remainder.
15009
15010 Note that we *look* inside a SAVE_EXPR only to determine how it was
15011 calculated; it is not safe for fold to do much of anything else with the
15012 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15013 at run time. For example, the latter example above *cannot* be implemented
15014 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15015 evaluation time of the original SAVE_EXPR is not necessarily the same at
15016 the time the new expression is evaluated. The only optimization of this
15017 sort that would be valid is changing
15018
15019 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15020
15021 divided by 8 to
15022
15023 SAVE_EXPR (I) * SAVE_EXPR (J)
15024
15025 (where the same SAVE_EXPR (J) is used in the original and the
15026 transformed version). */
15027
15028 int
15029 multiple_of_p (tree type, const_tree top, const_tree bottom)
15030 {
15031 if (operand_equal_p (top, bottom, 0))
15032 return 1;
15033
15034 if (TREE_CODE (type) != INTEGER_TYPE)
15035 return 0;
15036
15037 switch (TREE_CODE (top))
15038 {
15039 case BIT_AND_EXPR:
15040 /* Bitwise and provides a power of two multiple. If the mask is
15041 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15042 if (!integer_pow2p (bottom))
15043 return 0;
15044 /* FALLTHRU */
15045
15046 case MULT_EXPR:
15047 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15048 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15049
15050 case PLUS_EXPR:
15051 case MINUS_EXPR:
15052 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15053 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15054
15055 case LSHIFT_EXPR:
15056 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15057 {
15058 tree op1, t1;
15059
15060 op1 = TREE_OPERAND (top, 1);
15061 /* const_binop may not detect overflow correctly,
15062 so check for it explicitly here. */
15063 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15064 && 0 != (t1 = fold_convert (type,
15065 const_binop (LSHIFT_EXPR,
15066 size_one_node,
15067 op1)))
15068 && !TREE_OVERFLOW (t1))
15069 return multiple_of_p (type, t1, bottom);
15070 }
15071 return 0;
15072
15073 case NOP_EXPR:
15074 /* Can't handle conversions from non-integral or wider integral type. */
15075 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15076 || (TYPE_PRECISION (type)
15077 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15078 return 0;
15079
15080 /* .. fall through ... */
15081
15082 case SAVE_EXPR:
15083 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15084
15085 case COND_EXPR:
15086 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15087 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15088
15089 case INTEGER_CST:
15090 if (TREE_CODE (bottom) != INTEGER_CST
15091 || integer_zerop (bottom)
15092 || (TYPE_UNSIGNED (type)
15093 && (tree_int_cst_sgn (top) < 0
15094 || tree_int_cst_sgn (bottom) < 0)))
15095 return 0;
15096 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15097 SIGNED);
15098
15099 default:
15100 return 0;
15101 }
15102 }
15103
15104 /* Return true if CODE or TYPE is known to be non-negative. */
15105
15106 static bool
15107 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15108 {
15109 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15110 && truth_value_p (code))
15111 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15112 have a signed:1 type (where the value is -1 and 0). */
15113 return true;
15114 return false;
15115 }
15116
15117 /* Return true if (CODE OP0) is known to be non-negative. If the return
15118 value is based on the assumption that signed overflow is undefined,
15119 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15120 *STRICT_OVERFLOW_P. */
15121
15122 bool
15123 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15124 bool *strict_overflow_p)
15125 {
15126 if (TYPE_UNSIGNED (type))
15127 return true;
15128
15129 switch (code)
15130 {
15131 case ABS_EXPR:
15132 /* We can't return 1 if flag_wrapv is set because
15133 ABS_EXPR<INT_MIN> = INT_MIN. */
15134 if (!INTEGRAL_TYPE_P (type))
15135 return true;
15136 if (TYPE_OVERFLOW_UNDEFINED (type))
15137 {
15138 *strict_overflow_p = true;
15139 return true;
15140 }
15141 break;
15142
15143 case NON_LVALUE_EXPR:
15144 case FLOAT_EXPR:
15145 case FIX_TRUNC_EXPR:
15146 return tree_expr_nonnegative_warnv_p (op0,
15147 strict_overflow_p);
15148
15149 case NOP_EXPR:
15150 {
15151 tree inner_type = TREE_TYPE (op0);
15152 tree outer_type = type;
15153
15154 if (TREE_CODE (outer_type) == REAL_TYPE)
15155 {
15156 if (TREE_CODE (inner_type) == REAL_TYPE)
15157 return tree_expr_nonnegative_warnv_p (op0,
15158 strict_overflow_p);
15159 if (INTEGRAL_TYPE_P (inner_type))
15160 {
15161 if (TYPE_UNSIGNED (inner_type))
15162 return true;
15163 return tree_expr_nonnegative_warnv_p (op0,
15164 strict_overflow_p);
15165 }
15166 }
15167 else if (INTEGRAL_TYPE_P (outer_type))
15168 {
15169 if (TREE_CODE (inner_type) == REAL_TYPE)
15170 return tree_expr_nonnegative_warnv_p (op0,
15171 strict_overflow_p);
15172 if (INTEGRAL_TYPE_P (inner_type))
15173 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15174 && TYPE_UNSIGNED (inner_type);
15175 }
15176 }
15177 break;
15178
15179 default:
15180 return tree_simple_nonnegative_warnv_p (code, type);
15181 }
15182
15183 /* We don't know sign of `t', so be conservative and return false. */
15184 return false;
15185 }
15186
15187 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15188 value is based on the assumption that signed overflow is undefined,
15189 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15190 *STRICT_OVERFLOW_P. */
15191
15192 bool
15193 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15194 tree op1, bool *strict_overflow_p)
15195 {
15196 if (TYPE_UNSIGNED (type))
15197 return true;
15198
15199 switch (code)
15200 {
15201 case POINTER_PLUS_EXPR:
15202 case PLUS_EXPR:
15203 if (FLOAT_TYPE_P (type))
15204 return (tree_expr_nonnegative_warnv_p (op0,
15205 strict_overflow_p)
15206 && tree_expr_nonnegative_warnv_p (op1,
15207 strict_overflow_p));
15208
15209 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15210 both unsigned and at least 2 bits shorter than the result. */
15211 if (TREE_CODE (type) == INTEGER_TYPE
15212 && TREE_CODE (op0) == NOP_EXPR
15213 && TREE_CODE (op1) == NOP_EXPR)
15214 {
15215 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15216 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15217 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15218 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15219 {
15220 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15221 TYPE_PRECISION (inner2)) + 1;
15222 return prec < TYPE_PRECISION (type);
15223 }
15224 }
15225 break;
15226
15227 case MULT_EXPR:
15228 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15229 {
15230 /* x * x is always non-negative for floating point x
15231 or without overflow. */
15232 if (operand_equal_p (op0, op1, 0)
15233 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15234 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15235 {
15236 if (TYPE_OVERFLOW_UNDEFINED (type))
15237 *strict_overflow_p = true;
15238 return true;
15239 }
15240 }
15241
15242 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15243 both unsigned and their total bits is shorter than the result. */
15244 if (TREE_CODE (type) == INTEGER_TYPE
15245 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15246 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15247 {
15248 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15249 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15250 : TREE_TYPE (op0);
15251 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15252 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15253 : TREE_TYPE (op1);
15254
15255 bool unsigned0 = TYPE_UNSIGNED (inner0);
15256 bool unsigned1 = TYPE_UNSIGNED (inner1);
15257
15258 if (TREE_CODE (op0) == INTEGER_CST)
15259 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15260
15261 if (TREE_CODE (op1) == INTEGER_CST)
15262 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15263
15264 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15265 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15266 {
15267 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15268 ? tree_int_cst_min_precision (op0, UNSIGNED)
15269 : TYPE_PRECISION (inner0);
15270
15271 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15272 ? tree_int_cst_min_precision (op1, UNSIGNED)
15273 : TYPE_PRECISION (inner1);
15274
15275 return precision0 + precision1 < TYPE_PRECISION (type);
15276 }
15277 }
15278 return false;
15279
15280 case BIT_AND_EXPR:
15281 case MAX_EXPR:
15282 return (tree_expr_nonnegative_warnv_p (op0,
15283 strict_overflow_p)
15284 || tree_expr_nonnegative_warnv_p (op1,
15285 strict_overflow_p));
15286
15287 case BIT_IOR_EXPR:
15288 case BIT_XOR_EXPR:
15289 case MIN_EXPR:
15290 case RDIV_EXPR:
15291 case TRUNC_DIV_EXPR:
15292 case CEIL_DIV_EXPR:
15293 case FLOOR_DIV_EXPR:
15294 case ROUND_DIV_EXPR:
15295 return (tree_expr_nonnegative_warnv_p (op0,
15296 strict_overflow_p)
15297 && tree_expr_nonnegative_warnv_p (op1,
15298 strict_overflow_p));
15299
15300 case TRUNC_MOD_EXPR:
15301 case CEIL_MOD_EXPR:
15302 case FLOOR_MOD_EXPR:
15303 case ROUND_MOD_EXPR:
15304 return tree_expr_nonnegative_warnv_p (op0,
15305 strict_overflow_p);
15306 default:
15307 return tree_simple_nonnegative_warnv_p (code, type);
15308 }
15309
15310 /* We don't know sign of `t', so be conservative and return false. */
15311 return false;
15312 }
15313
15314 /* Return true if T is known to be non-negative. If the return
15315 value is based on the assumption that signed overflow is undefined,
15316 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15317 *STRICT_OVERFLOW_P. */
15318
15319 bool
15320 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15321 {
15322 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15323 return true;
15324
15325 switch (TREE_CODE (t))
15326 {
15327 case INTEGER_CST:
15328 return tree_int_cst_sgn (t) >= 0;
15329
15330 case REAL_CST:
15331 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15332
15333 case FIXED_CST:
15334 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15335
15336 case COND_EXPR:
15337 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15338 strict_overflow_p)
15339 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15340 strict_overflow_p));
15341 default:
15342 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15343 TREE_TYPE (t));
15344 }
15345 /* We don't know sign of `t', so be conservative and return false. */
15346 return false;
15347 }
15348
15349 /* Return true if T is known to be non-negative. If the return
15350 value is based on the assumption that signed overflow is undefined,
15351 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15352 *STRICT_OVERFLOW_P. */
15353
15354 bool
15355 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15356 tree arg0, tree arg1, bool *strict_overflow_p)
15357 {
15358 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15359 switch (DECL_FUNCTION_CODE (fndecl))
15360 {
15361 CASE_FLT_FN (BUILT_IN_ACOS):
15362 CASE_FLT_FN (BUILT_IN_ACOSH):
15363 CASE_FLT_FN (BUILT_IN_CABS):
15364 CASE_FLT_FN (BUILT_IN_COSH):
15365 CASE_FLT_FN (BUILT_IN_ERFC):
15366 CASE_FLT_FN (BUILT_IN_EXP):
15367 CASE_FLT_FN (BUILT_IN_EXP10):
15368 CASE_FLT_FN (BUILT_IN_EXP2):
15369 CASE_FLT_FN (BUILT_IN_FABS):
15370 CASE_FLT_FN (BUILT_IN_FDIM):
15371 CASE_FLT_FN (BUILT_IN_HYPOT):
15372 CASE_FLT_FN (BUILT_IN_POW10):
15373 CASE_INT_FN (BUILT_IN_FFS):
15374 CASE_INT_FN (BUILT_IN_PARITY):
15375 CASE_INT_FN (BUILT_IN_POPCOUNT):
15376 CASE_INT_FN (BUILT_IN_CLZ):
15377 CASE_INT_FN (BUILT_IN_CLRSB):
15378 case BUILT_IN_BSWAP32:
15379 case BUILT_IN_BSWAP64:
15380 /* Always true. */
15381 return true;
15382
15383 CASE_FLT_FN (BUILT_IN_SQRT):
15384 /* sqrt(-0.0) is -0.0. */
15385 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15386 return true;
15387 return tree_expr_nonnegative_warnv_p (arg0,
15388 strict_overflow_p);
15389
15390 CASE_FLT_FN (BUILT_IN_ASINH):
15391 CASE_FLT_FN (BUILT_IN_ATAN):
15392 CASE_FLT_FN (BUILT_IN_ATANH):
15393 CASE_FLT_FN (BUILT_IN_CBRT):
15394 CASE_FLT_FN (BUILT_IN_CEIL):
15395 CASE_FLT_FN (BUILT_IN_ERF):
15396 CASE_FLT_FN (BUILT_IN_EXPM1):
15397 CASE_FLT_FN (BUILT_IN_FLOOR):
15398 CASE_FLT_FN (BUILT_IN_FMOD):
15399 CASE_FLT_FN (BUILT_IN_FREXP):
15400 CASE_FLT_FN (BUILT_IN_ICEIL):
15401 CASE_FLT_FN (BUILT_IN_IFLOOR):
15402 CASE_FLT_FN (BUILT_IN_IRINT):
15403 CASE_FLT_FN (BUILT_IN_IROUND):
15404 CASE_FLT_FN (BUILT_IN_LCEIL):
15405 CASE_FLT_FN (BUILT_IN_LDEXP):
15406 CASE_FLT_FN (BUILT_IN_LFLOOR):
15407 CASE_FLT_FN (BUILT_IN_LLCEIL):
15408 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15409 CASE_FLT_FN (BUILT_IN_LLRINT):
15410 CASE_FLT_FN (BUILT_IN_LLROUND):
15411 CASE_FLT_FN (BUILT_IN_LRINT):
15412 CASE_FLT_FN (BUILT_IN_LROUND):
15413 CASE_FLT_FN (BUILT_IN_MODF):
15414 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15415 CASE_FLT_FN (BUILT_IN_RINT):
15416 CASE_FLT_FN (BUILT_IN_ROUND):
15417 CASE_FLT_FN (BUILT_IN_SCALB):
15418 CASE_FLT_FN (BUILT_IN_SCALBLN):
15419 CASE_FLT_FN (BUILT_IN_SCALBN):
15420 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15421 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15422 CASE_FLT_FN (BUILT_IN_SINH):
15423 CASE_FLT_FN (BUILT_IN_TANH):
15424 CASE_FLT_FN (BUILT_IN_TRUNC):
15425 /* True if the 1st argument is nonnegative. */
15426 return tree_expr_nonnegative_warnv_p (arg0,
15427 strict_overflow_p);
15428
15429 CASE_FLT_FN (BUILT_IN_FMAX):
15430 /* True if the 1st OR 2nd arguments are nonnegative. */
15431 return (tree_expr_nonnegative_warnv_p (arg0,
15432 strict_overflow_p)
15433 || (tree_expr_nonnegative_warnv_p (arg1,
15434 strict_overflow_p)));
15435
15436 CASE_FLT_FN (BUILT_IN_FMIN):
15437 /* True if the 1st AND 2nd arguments are nonnegative. */
15438 return (tree_expr_nonnegative_warnv_p (arg0,
15439 strict_overflow_p)
15440 && (tree_expr_nonnegative_warnv_p (arg1,
15441 strict_overflow_p)));
15442
15443 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15444 /* True if the 2nd argument is nonnegative. */
15445 return tree_expr_nonnegative_warnv_p (arg1,
15446 strict_overflow_p);
15447
15448 CASE_FLT_FN (BUILT_IN_POWI):
15449 /* True if the 1st argument is nonnegative or the second
15450 argument is an even integer. */
15451 if (TREE_CODE (arg1) == INTEGER_CST
15452 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15453 return true;
15454 return tree_expr_nonnegative_warnv_p (arg0,
15455 strict_overflow_p);
15456
15457 CASE_FLT_FN (BUILT_IN_POW):
15458 /* True if the 1st argument is nonnegative or the second
15459 argument is an even integer valued real. */
15460 if (TREE_CODE (arg1) == REAL_CST)
15461 {
15462 REAL_VALUE_TYPE c;
15463 HOST_WIDE_INT n;
15464
15465 c = TREE_REAL_CST (arg1);
15466 n = real_to_integer (&c);
15467 if ((n & 1) == 0)
15468 {
15469 REAL_VALUE_TYPE cint;
15470 real_from_integer (&cint, VOIDmode, n, SIGNED);
15471 if (real_identical (&c, &cint))
15472 return true;
15473 }
15474 }
15475 return tree_expr_nonnegative_warnv_p (arg0,
15476 strict_overflow_p);
15477
15478 default:
15479 break;
15480 }
15481 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15482 type);
15483 }
15484
15485 /* Return true if T is known to be non-negative. If the return
15486 value is based on the assumption that signed overflow is undefined,
15487 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15488 *STRICT_OVERFLOW_P. */
15489
15490 static bool
15491 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15492 {
15493 enum tree_code code = TREE_CODE (t);
15494 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15495 return true;
15496
15497 switch (code)
15498 {
15499 case TARGET_EXPR:
15500 {
15501 tree temp = TARGET_EXPR_SLOT (t);
15502 t = TARGET_EXPR_INITIAL (t);
15503
15504 /* If the initializer is non-void, then it's a normal expression
15505 that will be assigned to the slot. */
15506 if (!VOID_TYPE_P (t))
15507 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15508
15509 /* Otherwise, the initializer sets the slot in some way. One common
15510 way is an assignment statement at the end of the initializer. */
15511 while (1)
15512 {
15513 if (TREE_CODE (t) == BIND_EXPR)
15514 t = expr_last (BIND_EXPR_BODY (t));
15515 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15516 || TREE_CODE (t) == TRY_CATCH_EXPR)
15517 t = expr_last (TREE_OPERAND (t, 0));
15518 else if (TREE_CODE (t) == STATEMENT_LIST)
15519 t = expr_last (t);
15520 else
15521 break;
15522 }
15523 if (TREE_CODE (t) == MODIFY_EXPR
15524 && TREE_OPERAND (t, 0) == temp)
15525 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15526 strict_overflow_p);
15527
15528 return false;
15529 }
15530
15531 case CALL_EXPR:
15532 {
15533 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15534 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15535
15536 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15537 get_callee_fndecl (t),
15538 arg0,
15539 arg1,
15540 strict_overflow_p);
15541 }
15542 case COMPOUND_EXPR:
15543 case MODIFY_EXPR:
15544 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15545 strict_overflow_p);
15546 case BIND_EXPR:
15547 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15548 strict_overflow_p);
15549 case SAVE_EXPR:
15550 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15551 strict_overflow_p);
15552
15553 default:
15554 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15555 TREE_TYPE (t));
15556 }
15557
15558 /* We don't know sign of `t', so be conservative and return false. */
15559 return false;
15560 }
15561
15562 /* Return true if T is known to be non-negative. If the return
15563 value is based on the assumption that signed overflow is undefined,
15564 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15565 *STRICT_OVERFLOW_P. */
15566
15567 bool
15568 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15569 {
15570 enum tree_code code;
15571 if (t == error_mark_node)
15572 return false;
15573
15574 code = TREE_CODE (t);
15575 switch (TREE_CODE_CLASS (code))
15576 {
15577 case tcc_binary:
15578 case tcc_comparison:
15579 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15580 TREE_TYPE (t),
15581 TREE_OPERAND (t, 0),
15582 TREE_OPERAND (t, 1),
15583 strict_overflow_p);
15584
15585 case tcc_unary:
15586 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15587 TREE_TYPE (t),
15588 TREE_OPERAND (t, 0),
15589 strict_overflow_p);
15590
15591 case tcc_constant:
15592 case tcc_declaration:
15593 case tcc_reference:
15594 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15595
15596 default:
15597 break;
15598 }
15599
15600 switch (code)
15601 {
15602 case TRUTH_AND_EXPR:
15603 case TRUTH_OR_EXPR:
15604 case TRUTH_XOR_EXPR:
15605 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15606 TREE_TYPE (t),
15607 TREE_OPERAND (t, 0),
15608 TREE_OPERAND (t, 1),
15609 strict_overflow_p);
15610 case TRUTH_NOT_EXPR:
15611 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15612 TREE_TYPE (t),
15613 TREE_OPERAND (t, 0),
15614 strict_overflow_p);
15615
15616 case COND_EXPR:
15617 case CONSTRUCTOR:
15618 case OBJ_TYPE_REF:
15619 case ASSERT_EXPR:
15620 case ADDR_EXPR:
15621 case WITH_SIZE_EXPR:
15622 case SSA_NAME:
15623 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15624
15625 default:
15626 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15627 }
15628 }
15629
15630 /* Return true if `t' is known to be non-negative. Handle warnings
15631 about undefined signed overflow. */
15632
15633 bool
15634 tree_expr_nonnegative_p (tree t)
15635 {
15636 bool ret, strict_overflow_p;
15637
15638 strict_overflow_p = false;
15639 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15640 if (strict_overflow_p)
15641 fold_overflow_warning (("assuming signed overflow does not occur when "
15642 "determining that expression is always "
15643 "non-negative"),
15644 WARN_STRICT_OVERFLOW_MISC);
15645 return ret;
15646 }
15647
15648
15649 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15650 For floating point we further ensure that T is not denormal.
15651 Similar logic is present in nonzero_address in rtlanal.h.
15652
15653 If the return value is based on the assumption that signed overflow
15654 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15655 change *STRICT_OVERFLOW_P. */
15656
15657 bool
15658 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15659 bool *strict_overflow_p)
15660 {
15661 switch (code)
15662 {
15663 case ABS_EXPR:
15664 return tree_expr_nonzero_warnv_p (op0,
15665 strict_overflow_p);
15666
15667 case NOP_EXPR:
15668 {
15669 tree inner_type = TREE_TYPE (op0);
15670 tree outer_type = type;
15671
15672 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15673 && tree_expr_nonzero_warnv_p (op0,
15674 strict_overflow_p));
15675 }
15676 break;
15677
15678 case NON_LVALUE_EXPR:
15679 return tree_expr_nonzero_warnv_p (op0,
15680 strict_overflow_p);
15681
15682 default:
15683 break;
15684 }
15685
15686 return false;
15687 }
15688
15689 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15690 For floating point we further ensure that T is not denormal.
15691 Similar logic is present in nonzero_address in rtlanal.h.
15692
15693 If the return value is based on the assumption that signed overflow
15694 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15695 change *STRICT_OVERFLOW_P. */
15696
15697 bool
15698 tree_binary_nonzero_warnv_p (enum tree_code code,
15699 tree type,
15700 tree op0,
15701 tree op1, bool *strict_overflow_p)
15702 {
15703 bool sub_strict_overflow_p;
15704 switch (code)
15705 {
15706 case POINTER_PLUS_EXPR:
15707 case PLUS_EXPR:
15708 if (TYPE_OVERFLOW_UNDEFINED (type))
15709 {
15710 /* With the presence of negative values it is hard
15711 to say something. */
15712 sub_strict_overflow_p = false;
15713 if (!tree_expr_nonnegative_warnv_p (op0,
15714 &sub_strict_overflow_p)
15715 || !tree_expr_nonnegative_warnv_p (op1,
15716 &sub_strict_overflow_p))
15717 return false;
15718 /* One of operands must be positive and the other non-negative. */
15719 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15720 overflows, on a twos-complement machine the sum of two
15721 nonnegative numbers can never be zero. */
15722 return (tree_expr_nonzero_warnv_p (op0,
15723 strict_overflow_p)
15724 || tree_expr_nonzero_warnv_p (op1,
15725 strict_overflow_p));
15726 }
15727 break;
15728
15729 case MULT_EXPR:
15730 if (TYPE_OVERFLOW_UNDEFINED (type))
15731 {
15732 if (tree_expr_nonzero_warnv_p (op0,
15733 strict_overflow_p)
15734 && tree_expr_nonzero_warnv_p (op1,
15735 strict_overflow_p))
15736 {
15737 *strict_overflow_p = true;
15738 return true;
15739 }
15740 }
15741 break;
15742
15743 case MIN_EXPR:
15744 sub_strict_overflow_p = false;
15745 if (tree_expr_nonzero_warnv_p (op0,
15746 &sub_strict_overflow_p)
15747 && tree_expr_nonzero_warnv_p (op1,
15748 &sub_strict_overflow_p))
15749 {
15750 if (sub_strict_overflow_p)
15751 *strict_overflow_p = true;
15752 }
15753 break;
15754
15755 case MAX_EXPR:
15756 sub_strict_overflow_p = false;
15757 if (tree_expr_nonzero_warnv_p (op0,
15758 &sub_strict_overflow_p))
15759 {
15760 if (sub_strict_overflow_p)
15761 *strict_overflow_p = true;
15762
15763 /* When both operands are nonzero, then MAX must be too. */
15764 if (tree_expr_nonzero_warnv_p (op1,
15765 strict_overflow_p))
15766 return true;
15767
15768 /* MAX where operand 0 is positive is positive. */
15769 return tree_expr_nonnegative_warnv_p (op0,
15770 strict_overflow_p);
15771 }
15772 /* MAX where operand 1 is positive is positive. */
15773 else if (tree_expr_nonzero_warnv_p (op1,
15774 &sub_strict_overflow_p)
15775 && tree_expr_nonnegative_warnv_p (op1,
15776 &sub_strict_overflow_p))
15777 {
15778 if (sub_strict_overflow_p)
15779 *strict_overflow_p = true;
15780 return true;
15781 }
15782 break;
15783
15784 case BIT_IOR_EXPR:
15785 return (tree_expr_nonzero_warnv_p (op1,
15786 strict_overflow_p)
15787 || tree_expr_nonzero_warnv_p (op0,
15788 strict_overflow_p));
15789
15790 default:
15791 break;
15792 }
15793
15794 return false;
15795 }
15796
15797 /* Return true when T is an address and is known to be nonzero.
15798 For floating point we further ensure that T is not denormal.
15799 Similar logic is present in nonzero_address in rtlanal.h.
15800
15801 If the return value is based on the assumption that signed overflow
15802 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15803 change *STRICT_OVERFLOW_P. */
15804
15805 bool
15806 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15807 {
15808 bool sub_strict_overflow_p;
15809 switch (TREE_CODE (t))
15810 {
15811 case INTEGER_CST:
15812 return !integer_zerop (t);
15813
15814 case ADDR_EXPR:
15815 {
15816 tree base = TREE_OPERAND (t, 0);
15817
15818 if (!DECL_P (base))
15819 base = get_base_address (base);
15820
15821 if (!base)
15822 return false;
15823
15824 /* For objects in symbol table check if we know they are non-zero.
15825 Don't do anything for variables and functions before symtab is built;
15826 it is quite possible that they will be declared weak later. */
15827 if (DECL_P (base) && decl_in_symtab_p (base))
15828 {
15829 struct symtab_node *symbol;
15830
15831 symbol = symtab_node::get_create (base);
15832 if (symbol)
15833 return symbol->nonzero_address ();
15834 else
15835 return false;
15836 }
15837
15838 /* Function local objects are never NULL. */
15839 if (DECL_P (base)
15840 && (DECL_CONTEXT (base)
15841 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15842 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15843 return true;
15844
15845 /* Constants are never weak. */
15846 if (CONSTANT_CLASS_P (base))
15847 return true;
15848
15849 return false;
15850 }
15851
15852 case COND_EXPR:
15853 sub_strict_overflow_p = false;
15854 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15855 &sub_strict_overflow_p)
15856 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15857 &sub_strict_overflow_p))
15858 {
15859 if (sub_strict_overflow_p)
15860 *strict_overflow_p = true;
15861 return true;
15862 }
15863 break;
15864
15865 default:
15866 break;
15867 }
15868 return false;
15869 }
15870
15871 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15872 attempt to fold the expression to a constant without modifying TYPE,
15873 OP0 or OP1.
15874
15875 If the expression could be simplified to a constant, then return
15876 the constant. If the expression would not be simplified to a
15877 constant, then return NULL_TREE. */
15878
15879 tree
15880 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15881 {
15882 tree tem = fold_binary (code, type, op0, op1);
15883 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15884 }
15885
15886 /* Given the components of a unary expression CODE, TYPE and OP0,
15887 attempt to fold the expression to a constant without modifying
15888 TYPE or OP0.
15889
15890 If the expression could be simplified to a constant, then return
15891 the constant. If the expression would not be simplified to a
15892 constant, then return NULL_TREE. */
15893
15894 tree
15895 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15896 {
15897 tree tem = fold_unary (code, type, op0);
15898 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15899 }
15900
15901 /* If EXP represents referencing an element in a constant string
15902 (either via pointer arithmetic or array indexing), return the
15903 tree representing the value accessed, otherwise return NULL. */
15904
15905 tree
15906 fold_read_from_constant_string (tree exp)
15907 {
15908 if ((TREE_CODE (exp) == INDIRECT_REF
15909 || TREE_CODE (exp) == ARRAY_REF)
15910 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15911 {
15912 tree exp1 = TREE_OPERAND (exp, 0);
15913 tree index;
15914 tree string;
15915 location_t loc = EXPR_LOCATION (exp);
15916
15917 if (TREE_CODE (exp) == INDIRECT_REF)
15918 string = string_constant (exp1, &index);
15919 else
15920 {
15921 tree low_bound = array_ref_low_bound (exp);
15922 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15923
15924 /* Optimize the special-case of a zero lower bound.
15925
15926 We convert the low_bound to sizetype to avoid some problems
15927 with constant folding. (E.g. suppose the lower bound is 1,
15928 and its mode is QI. Without the conversion,l (ARRAY
15929 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15930 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15931 if (! integer_zerop (low_bound))
15932 index = size_diffop_loc (loc, index,
15933 fold_convert_loc (loc, sizetype, low_bound));
15934
15935 string = exp1;
15936 }
15937
15938 if (string
15939 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15940 && TREE_CODE (string) == STRING_CST
15941 && TREE_CODE (index) == INTEGER_CST
15942 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15943 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15944 == MODE_INT)
15945 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15946 return build_int_cst_type (TREE_TYPE (exp),
15947 (TREE_STRING_POINTER (string)
15948 [TREE_INT_CST_LOW (index)]));
15949 }
15950 return NULL;
15951 }
15952
15953 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15954 an integer constant, real, or fixed-point constant.
15955
15956 TYPE is the type of the result. */
15957
15958 static tree
15959 fold_negate_const (tree arg0, tree type)
15960 {
15961 tree t = NULL_TREE;
15962
15963 switch (TREE_CODE (arg0))
15964 {
15965 case INTEGER_CST:
15966 {
15967 bool overflow;
15968 wide_int val = wi::neg (arg0, &overflow);
15969 t = force_fit_type (type, val, 1,
15970 (overflow | TREE_OVERFLOW (arg0))
15971 && !TYPE_UNSIGNED (type));
15972 break;
15973 }
15974
15975 case REAL_CST:
15976 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15977 break;
15978
15979 case FIXED_CST:
15980 {
15981 FIXED_VALUE_TYPE f;
15982 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15983 &(TREE_FIXED_CST (arg0)), NULL,
15984 TYPE_SATURATING (type));
15985 t = build_fixed (type, f);
15986 /* Propagate overflow flags. */
15987 if (overflow_p | TREE_OVERFLOW (arg0))
15988 TREE_OVERFLOW (t) = 1;
15989 break;
15990 }
15991
15992 default:
15993 gcc_unreachable ();
15994 }
15995
15996 return t;
15997 }
15998
15999 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16000 an integer constant or real constant.
16001
16002 TYPE is the type of the result. */
16003
16004 tree
16005 fold_abs_const (tree arg0, tree type)
16006 {
16007 tree t = NULL_TREE;
16008
16009 switch (TREE_CODE (arg0))
16010 {
16011 case INTEGER_CST:
16012 {
16013 /* If the value is unsigned or non-negative, then the absolute value
16014 is the same as the ordinary value. */
16015 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16016 t = arg0;
16017
16018 /* If the value is negative, then the absolute value is
16019 its negation. */
16020 else
16021 {
16022 bool overflow;
16023 wide_int val = wi::neg (arg0, &overflow);
16024 t = force_fit_type (type, val, -1,
16025 overflow | TREE_OVERFLOW (arg0));
16026 }
16027 }
16028 break;
16029
16030 case REAL_CST:
16031 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16032 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16033 else
16034 t = arg0;
16035 break;
16036
16037 default:
16038 gcc_unreachable ();
16039 }
16040
16041 return t;
16042 }
16043
16044 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16045 constant. TYPE is the type of the result. */
16046
16047 static tree
16048 fold_not_const (const_tree arg0, tree type)
16049 {
16050 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16051
16052 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16053 }
16054
16055 /* Given CODE, a relational operator, the target type, TYPE and two
16056 constant operands OP0 and OP1, return the result of the
16057 relational operation. If the result is not a compile time
16058 constant, then return NULL_TREE. */
16059
16060 static tree
16061 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16062 {
16063 int result, invert;
16064
16065 /* From here on, the only cases we handle are when the result is
16066 known to be a constant. */
16067
16068 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16069 {
16070 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16071 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16072
16073 /* Handle the cases where either operand is a NaN. */
16074 if (real_isnan (c0) || real_isnan (c1))
16075 {
16076 switch (code)
16077 {
16078 case EQ_EXPR:
16079 case ORDERED_EXPR:
16080 result = 0;
16081 break;
16082
16083 case NE_EXPR:
16084 case UNORDERED_EXPR:
16085 case UNLT_EXPR:
16086 case UNLE_EXPR:
16087 case UNGT_EXPR:
16088 case UNGE_EXPR:
16089 case UNEQ_EXPR:
16090 result = 1;
16091 break;
16092
16093 case LT_EXPR:
16094 case LE_EXPR:
16095 case GT_EXPR:
16096 case GE_EXPR:
16097 case LTGT_EXPR:
16098 if (flag_trapping_math)
16099 return NULL_TREE;
16100 result = 0;
16101 break;
16102
16103 default:
16104 gcc_unreachable ();
16105 }
16106
16107 return constant_boolean_node (result, type);
16108 }
16109
16110 return constant_boolean_node (real_compare (code, c0, c1), type);
16111 }
16112
16113 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16114 {
16115 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16116 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16117 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16118 }
16119
16120 /* Handle equality/inequality of complex constants. */
16121 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16122 {
16123 tree rcond = fold_relational_const (code, type,
16124 TREE_REALPART (op0),
16125 TREE_REALPART (op1));
16126 tree icond = fold_relational_const (code, type,
16127 TREE_IMAGPART (op0),
16128 TREE_IMAGPART (op1));
16129 if (code == EQ_EXPR)
16130 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16131 else if (code == NE_EXPR)
16132 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16133 else
16134 return NULL_TREE;
16135 }
16136
16137 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16138 {
16139 unsigned count = VECTOR_CST_NELTS (op0);
16140 tree *elts = XALLOCAVEC (tree, count);
16141 gcc_assert (VECTOR_CST_NELTS (op1) == count
16142 && TYPE_VECTOR_SUBPARTS (type) == count);
16143
16144 for (unsigned i = 0; i < count; i++)
16145 {
16146 tree elem_type = TREE_TYPE (type);
16147 tree elem0 = VECTOR_CST_ELT (op0, i);
16148 tree elem1 = VECTOR_CST_ELT (op1, i);
16149
16150 tree tem = fold_relational_const (code, elem_type,
16151 elem0, elem1);
16152
16153 if (tem == NULL_TREE)
16154 return NULL_TREE;
16155
16156 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16157 }
16158
16159 return build_vector (type, elts);
16160 }
16161
16162 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16163
16164 To compute GT, swap the arguments and do LT.
16165 To compute GE, do LT and invert the result.
16166 To compute LE, swap the arguments, do LT and invert the result.
16167 To compute NE, do EQ and invert the result.
16168
16169 Therefore, the code below must handle only EQ and LT. */
16170
16171 if (code == LE_EXPR || code == GT_EXPR)
16172 {
16173 tree tem = op0;
16174 op0 = op1;
16175 op1 = tem;
16176 code = swap_tree_comparison (code);
16177 }
16178
16179 /* Note that it is safe to invert for real values here because we
16180 have already handled the one case that it matters. */
16181
16182 invert = 0;
16183 if (code == NE_EXPR || code == GE_EXPR)
16184 {
16185 invert = 1;
16186 code = invert_tree_comparison (code, false);
16187 }
16188
16189 /* Compute a result for LT or EQ if args permit;
16190 Otherwise return T. */
16191 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16192 {
16193 if (code == EQ_EXPR)
16194 result = tree_int_cst_equal (op0, op1);
16195 else
16196 result = tree_int_cst_lt (op0, op1);
16197 }
16198 else
16199 return NULL_TREE;
16200
16201 if (invert)
16202 result ^= 1;
16203 return constant_boolean_node (result, type);
16204 }
16205
16206 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16207 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16208 itself. */
16209
16210 tree
16211 fold_build_cleanup_point_expr (tree type, tree expr)
16212 {
16213 /* If the expression does not have side effects then we don't have to wrap
16214 it with a cleanup point expression. */
16215 if (!TREE_SIDE_EFFECTS (expr))
16216 return expr;
16217
16218 /* If the expression is a return, check to see if the expression inside the
16219 return has no side effects or the right hand side of the modify expression
16220 inside the return. If either don't have side effects set we don't need to
16221 wrap the expression in a cleanup point expression. Note we don't check the
16222 left hand side of the modify because it should always be a return decl. */
16223 if (TREE_CODE (expr) == RETURN_EXPR)
16224 {
16225 tree op = TREE_OPERAND (expr, 0);
16226 if (!op || !TREE_SIDE_EFFECTS (op))
16227 return expr;
16228 op = TREE_OPERAND (op, 1);
16229 if (!TREE_SIDE_EFFECTS (op))
16230 return expr;
16231 }
16232
16233 return build1 (CLEANUP_POINT_EXPR, type, expr);
16234 }
16235
16236 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16237 of an indirection through OP0, or NULL_TREE if no simplification is
16238 possible. */
16239
16240 tree
16241 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16242 {
16243 tree sub = op0;
16244 tree subtype;
16245
16246 STRIP_NOPS (sub);
16247 subtype = TREE_TYPE (sub);
16248 if (!POINTER_TYPE_P (subtype))
16249 return NULL_TREE;
16250
16251 if (TREE_CODE (sub) == ADDR_EXPR)
16252 {
16253 tree op = TREE_OPERAND (sub, 0);
16254 tree optype = TREE_TYPE (op);
16255 /* *&CONST_DECL -> to the value of the const decl. */
16256 if (TREE_CODE (op) == CONST_DECL)
16257 return DECL_INITIAL (op);
16258 /* *&p => p; make sure to handle *&"str"[cst] here. */
16259 if (type == optype)
16260 {
16261 tree fop = fold_read_from_constant_string (op);
16262 if (fop)
16263 return fop;
16264 else
16265 return op;
16266 }
16267 /* *(foo *)&fooarray => fooarray[0] */
16268 else if (TREE_CODE (optype) == ARRAY_TYPE
16269 && type == TREE_TYPE (optype)
16270 && (!in_gimple_form
16271 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16272 {
16273 tree type_domain = TYPE_DOMAIN (optype);
16274 tree min_val = size_zero_node;
16275 if (type_domain && TYPE_MIN_VALUE (type_domain))
16276 min_val = TYPE_MIN_VALUE (type_domain);
16277 if (in_gimple_form
16278 && TREE_CODE (min_val) != INTEGER_CST)
16279 return NULL_TREE;
16280 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16281 NULL_TREE, NULL_TREE);
16282 }
16283 /* *(foo *)&complexfoo => __real__ complexfoo */
16284 else if (TREE_CODE (optype) == COMPLEX_TYPE
16285 && type == TREE_TYPE (optype))
16286 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16287 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16288 else if (TREE_CODE (optype) == VECTOR_TYPE
16289 && type == TREE_TYPE (optype))
16290 {
16291 tree part_width = TYPE_SIZE (type);
16292 tree index = bitsize_int (0);
16293 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16294 }
16295 }
16296
16297 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16298 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16299 {
16300 tree op00 = TREE_OPERAND (sub, 0);
16301 tree op01 = TREE_OPERAND (sub, 1);
16302
16303 STRIP_NOPS (op00);
16304 if (TREE_CODE (op00) == ADDR_EXPR)
16305 {
16306 tree op00type;
16307 op00 = TREE_OPERAND (op00, 0);
16308 op00type = TREE_TYPE (op00);
16309
16310 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16311 if (TREE_CODE (op00type) == VECTOR_TYPE
16312 && type == TREE_TYPE (op00type))
16313 {
16314 HOST_WIDE_INT offset = tree_to_shwi (op01);
16315 tree part_width = TYPE_SIZE (type);
16316 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16317 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16318 tree index = bitsize_int (indexi);
16319
16320 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16321 return fold_build3_loc (loc,
16322 BIT_FIELD_REF, type, op00,
16323 part_width, index);
16324
16325 }
16326 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16327 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16328 && type == TREE_TYPE (op00type))
16329 {
16330 tree size = TYPE_SIZE_UNIT (type);
16331 if (tree_int_cst_equal (size, op01))
16332 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16333 }
16334 /* ((foo *)&fooarray)[1] => fooarray[1] */
16335 else if (TREE_CODE (op00type) == ARRAY_TYPE
16336 && type == TREE_TYPE (op00type))
16337 {
16338 tree type_domain = TYPE_DOMAIN (op00type);
16339 tree min_val = size_zero_node;
16340 if (type_domain && TYPE_MIN_VALUE (type_domain))
16341 min_val = TYPE_MIN_VALUE (type_domain);
16342 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16343 TYPE_SIZE_UNIT (type));
16344 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16345 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16346 NULL_TREE, NULL_TREE);
16347 }
16348 }
16349 }
16350
16351 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16352 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16353 && type == TREE_TYPE (TREE_TYPE (subtype))
16354 && (!in_gimple_form
16355 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16356 {
16357 tree type_domain;
16358 tree min_val = size_zero_node;
16359 sub = build_fold_indirect_ref_loc (loc, sub);
16360 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16361 if (type_domain && TYPE_MIN_VALUE (type_domain))
16362 min_val = TYPE_MIN_VALUE (type_domain);
16363 if (in_gimple_form
16364 && TREE_CODE (min_val) != INTEGER_CST)
16365 return NULL_TREE;
16366 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16367 NULL_TREE);
16368 }
16369
16370 return NULL_TREE;
16371 }
16372
16373 /* Builds an expression for an indirection through T, simplifying some
16374 cases. */
16375
16376 tree
16377 build_fold_indirect_ref_loc (location_t loc, tree t)
16378 {
16379 tree type = TREE_TYPE (TREE_TYPE (t));
16380 tree sub = fold_indirect_ref_1 (loc, type, t);
16381
16382 if (sub)
16383 return sub;
16384
16385 return build1_loc (loc, INDIRECT_REF, type, t);
16386 }
16387
16388 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16389
16390 tree
16391 fold_indirect_ref_loc (location_t loc, tree t)
16392 {
16393 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16394
16395 if (sub)
16396 return sub;
16397 else
16398 return t;
16399 }
16400
16401 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16402 whose result is ignored. The type of the returned tree need not be
16403 the same as the original expression. */
16404
16405 tree
16406 fold_ignored_result (tree t)
16407 {
16408 if (!TREE_SIDE_EFFECTS (t))
16409 return integer_zero_node;
16410
16411 for (;;)
16412 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16413 {
16414 case tcc_unary:
16415 t = TREE_OPERAND (t, 0);
16416 break;
16417
16418 case tcc_binary:
16419 case tcc_comparison:
16420 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16421 t = TREE_OPERAND (t, 0);
16422 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16423 t = TREE_OPERAND (t, 1);
16424 else
16425 return t;
16426 break;
16427
16428 case tcc_expression:
16429 switch (TREE_CODE (t))
16430 {
16431 case COMPOUND_EXPR:
16432 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16433 return t;
16434 t = TREE_OPERAND (t, 0);
16435 break;
16436
16437 case COND_EXPR:
16438 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16439 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16440 return t;
16441 t = TREE_OPERAND (t, 0);
16442 break;
16443
16444 default:
16445 return t;
16446 }
16447 break;
16448
16449 default:
16450 return t;
16451 }
16452 }
16453
16454 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16455
16456 tree
16457 round_up_loc (location_t loc, tree value, unsigned int divisor)
16458 {
16459 tree div = NULL_TREE;
16460
16461 if (divisor == 1)
16462 return value;
16463
16464 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16465 have to do anything. Only do this when we are not given a const,
16466 because in that case, this check is more expensive than just
16467 doing it. */
16468 if (TREE_CODE (value) != INTEGER_CST)
16469 {
16470 div = build_int_cst (TREE_TYPE (value), divisor);
16471
16472 if (multiple_of_p (TREE_TYPE (value), value, div))
16473 return value;
16474 }
16475
16476 /* If divisor is a power of two, simplify this to bit manipulation. */
16477 if (divisor == (divisor & -divisor))
16478 {
16479 if (TREE_CODE (value) == INTEGER_CST)
16480 {
16481 wide_int val = value;
16482 bool overflow_p;
16483
16484 if ((val & (divisor - 1)) == 0)
16485 return value;
16486
16487 overflow_p = TREE_OVERFLOW (value);
16488 val &= ~(divisor - 1);
16489 val += divisor;
16490 if (val == 0)
16491 overflow_p = true;
16492
16493 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16494 }
16495 else
16496 {
16497 tree t;
16498
16499 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16500 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16501 t = build_int_cst (TREE_TYPE (value), -divisor);
16502 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16503 }
16504 }
16505 else
16506 {
16507 if (!div)
16508 div = build_int_cst (TREE_TYPE (value), divisor);
16509 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16510 value = size_binop_loc (loc, MULT_EXPR, value, div);
16511 }
16512
16513 return value;
16514 }
16515
16516 /* Likewise, but round down. */
16517
16518 tree
16519 round_down_loc (location_t loc, tree value, int divisor)
16520 {
16521 tree div = NULL_TREE;
16522
16523 gcc_assert (divisor > 0);
16524 if (divisor == 1)
16525 return value;
16526
16527 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16528 have to do anything. Only do this when we are not given a const,
16529 because in that case, this check is more expensive than just
16530 doing it. */
16531 if (TREE_CODE (value) != INTEGER_CST)
16532 {
16533 div = build_int_cst (TREE_TYPE (value), divisor);
16534
16535 if (multiple_of_p (TREE_TYPE (value), value, div))
16536 return value;
16537 }
16538
16539 /* If divisor is a power of two, simplify this to bit manipulation. */
16540 if (divisor == (divisor & -divisor))
16541 {
16542 tree t;
16543
16544 t = build_int_cst (TREE_TYPE (value), -divisor);
16545 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16546 }
16547 else
16548 {
16549 if (!div)
16550 div = build_int_cst (TREE_TYPE (value), divisor);
16551 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16552 value = size_binop_loc (loc, MULT_EXPR, value, div);
16553 }
16554
16555 return value;
16556 }
16557
16558 /* Returns the pointer to the base of the object addressed by EXP and
16559 extracts the information about the offset of the access, storing it
16560 to PBITPOS and POFFSET. */
16561
16562 static tree
16563 split_address_to_core_and_offset (tree exp,
16564 HOST_WIDE_INT *pbitpos, tree *poffset)
16565 {
16566 tree core;
16567 machine_mode mode;
16568 int unsignedp, volatilep;
16569 HOST_WIDE_INT bitsize;
16570 location_t loc = EXPR_LOCATION (exp);
16571
16572 if (TREE_CODE (exp) == ADDR_EXPR)
16573 {
16574 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16575 poffset, &mode, &unsignedp, &volatilep,
16576 false);
16577 core = build_fold_addr_expr_loc (loc, core);
16578 }
16579 else
16580 {
16581 core = exp;
16582 *pbitpos = 0;
16583 *poffset = NULL_TREE;
16584 }
16585
16586 return core;
16587 }
16588
16589 /* Returns true if addresses of E1 and E2 differ by a constant, false
16590 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16591
16592 bool
16593 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16594 {
16595 tree core1, core2;
16596 HOST_WIDE_INT bitpos1, bitpos2;
16597 tree toffset1, toffset2, tdiff, type;
16598
16599 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16600 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16601
16602 if (bitpos1 % BITS_PER_UNIT != 0
16603 || bitpos2 % BITS_PER_UNIT != 0
16604 || !operand_equal_p (core1, core2, 0))
16605 return false;
16606
16607 if (toffset1 && toffset2)
16608 {
16609 type = TREE_TYPE (toffset1);
16610 if (type != TREE_TYPE (toffset2))
16611 toffset2 = fold_convert (type, toffset2);
16612
16613 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16614 if (!cst_and_fits_in_hwi (tdiff))
16615 return false;
16616
16617 *diff = int_cst_value (tdiff);
16618 }
16619 else if (toffset1 || toffset2)
16620 {
16621 /* If only one of the offsets is non-constant, the difference cannot
16622 be a constant. */
16623 return false;
16624 }
16625 else
16626 *diff = 0;
16627
16628 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16629 return true;
16630 }
16631
16632 /* Simplify the floating point expression EXP when the sign of the
16633 result is not significant. Return NULL_TREE if no simplification
16634 is possible. */
16635
16636 tree
16637 fold_strip_sign_ops (tree exp)
16638 {
16639 tree arg0, arg1;
16640 location_t loc = EXPR_LOCATION (exp);
16641
16642 switch (TREE_CODE (exp))
16643 {
16644 case ABS_EXPR:
16645 case NEGATE_EXPR:
16646 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16647 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16648
16649 case MULT_EXPR:
16650 case RDIV_EXPR:
16651 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16652 return NULL_TREE;
16653 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16654 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16655 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16656 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16657 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16658 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16659 break;
16660
16661 case COMPOUND_EXPR:
16662 arg0 = TREE_OPERAND (exp, 0);
16663 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16664 if (arg1)
16665 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16666 break;
16667
16668 case COND_EXPR:
16669 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16670 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16671 if (arg0 || arg1)
16672 return fold_build3_loc (loc,
16673 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16674 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16675 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16676 break;
16677
16678 case CALL_EXPR:
16679 {
16680 const enum built_in_function fcode = builtin_mathfn_code (exp);
16681 switch (fcode)
16682 {
16683 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16684 /* Strip copysign function call, return the 1st argument. */
16685 arg0 = CALL_EXPR_ARG (exp, 0);
16686 arg1 = CALL_EXPR_ARG (exp, 1);
16687 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16688
16689 default:
16690 /* Strip sign ops from the argument of "odd" math functions. */
16691 if (negate_mathfn_p (fcode))
16692 {
16693 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16694 if (arg0)
16695 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16696 }
16697 break;
16698 }
16699 }
16700 break;
16701
16702 default:
16703 break;
16704 }
16705 return NULL_TREE;
16706 }