Remove VEC_LSHIFT_EXPR and vec_shl_optab
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "cgraph.h"
81 #include "generic-match.h"
82
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
86
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
107 };
108
109 static bool negate_mathfn_p (enum built_in_function);
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static tree const_binop (enum tree_code, tree, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
121 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
122 static tree make_bit_field_ref (location_t, tree, tree,
123 HOST_WIDE_INT, HOST_WIDE_INT, int);
124 static tree optimize_bit_field_compare (location_t, enum tree_code,
125 tree, tree, tree);
126 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
127 HOST_WIDE_INT *,
128 enum machine_mode *, int *, int *,
129 tree *, tree *);
130 static tree sign_bit_p (tree, const_tree);
131 static int simple_operand_p (const_tree);
132 static bool simple_operand_p_2 (tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
138 static tree unextend (tree, int, int, tree);
139 static tree optimize_minmax_comparison (location_t, enum tree_code,
140 tree, tree, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_mathfn_compare (location_t,
148 enum built_in_function, enum tree_code,
149 tree, tree, tree);
150 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
151 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
152 static bool reorder_operands_p (const_tree, const_tree);
153 static tree fold_negate_const (tree, tree);
154 static tree fold_not_const (const_tree, tree);
155 static tree fold_relational_const (enum tree_code, tree, tree, tree);
156 static tree fold_convert_const (enum tree_code, tree, tree);
157
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
160
161 static location_t
162 expr_location_or (tree t, location_t loc)
163 {
164 location_t tloc = EXPR_LOCATION (t);
165 return tloc == UNKNOWN_LOCATION ? loc : tloc;
166 }
167
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
170
171 static inline tree
172 protected_set_expr_location_unshare (tree x, location_t loc)
173 {
174 if (CAN_HAVE_LOCATION_P (x)
175 && EXPR_LOCATION (x) != loc
176 && !(TREE_CODE (x) == SAVE_EXPR
177 || TREE_CODE (x) == TARGET_EXPR
178 || TREE_CODE (x) == BIND_EXPR))
179 {
180 x = copy_node (x);
181 SET_EXPR_LOCATION (x, loc);
182 }
183 return x;
184 }
185 \f
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
188 NULL_TREE. */
189
190 tree
191 div_if_zero_remainder (const_tree arg1, const_tree arg2)
192 {
193 widest_int quo;
194
195 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
196 SIGNED, &quo))
197 return wide_int_to_tree (TREE_TYPE (arg1), quo);
198
199 return NULL_TREE;
200 }
201 \f
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
210
211 static int fold_deferring_overflow_warnings;
212
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
217
218 static const char* fold_deferred_overflow_warning;
219
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
222
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
224
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
227
228 void
229 fold_defer_overflow_warnings (void)
230 {
231 ++fold_deferring_overflow_warnings;
232 }
233
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
242
243 void
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
245 {
246 const char *warnmsg;
247 location_t locus;
248
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
252 {
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 return;
258 }
259
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
262
263 if (!issue || warnmsg == NULL)
264 return;
265
266 if (gimple_no_warning_p (stmt))
267 return;
268
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
273
274 if (!issue_strict_overflow_warning (code))
275 return;
276
277 if (stmt == NULL)
278 locus = input_location;
279 else
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 }
283
284 /* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
286
287 void
288 fold_undefer_and_ignore_overflow_warnings (void)
289 {
290 fold_undefer_overflow_warnings (false, NULL, 0);
291 }
292
293 /* Whether we are deferring overflow warnings. */
294
295 bool
296 fold_deferring_overflow_warnings_p (void)
297 {
298 return fold_deferring_overflow_warnings > 0;
299 }
300
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
303
304 static void
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
306 {
307 if (fold_deferring_overflow_warnings > 0)
308 {
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
311 {
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
314 }
315 }
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
318 }
319 \f
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
322
323 static bool
324 negate_mathfn_p (enum built_in_function code)
325 {
326 switch (code)
327 {
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
351 return true;
352
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
358
359 default:
360 break;
361 }
362 return false;
363 }
364
365 /* Check whether we may negate an integer constant T without causing
366 overflow. */
367
368 bool
369 may_negate_without_overflow_p (const_tree t)
370 {
371 tree type;
372
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
378
379 return !wi::only_sign_bit_p (t);
380 }
381
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
384
385 static bool
386 negate_expr_p (tree t)
387 {
388 tree type;
389
390 if (t == 0)
391 return false;
392
393 type = TREE_TYPE (t);
394
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
397 {
398 case INTEGER_CST:
399 if (TYPE_OVERFLOW_WRAPS (type))
400 return true;
401
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
407
408 case FIXED_CST:
409 case NEGATE_EXPR:
410 return true;
411
412 case REAL_CST:
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
416
417 case COMPLEX_CST:
418 return negate_expr_p (TREE_REALPART (t))
419 && negate_expr_p (TREE_IMAGPART (t));
420
421 case VECTOR_CST:
422 {
423 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
424 return true;
425
426 int count = TYPE_VECTOR_SUBPARTS (type), i;
427
428 for (i = 0; i < count; i++)
429 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
430 return false;
431
432 return true;
433 }
434
435 case COMPLEX_EXPR:
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
438
439 case CONJ_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0));
441
442 case PLUS_EXPR:
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
460
461 case MULT_EXPR:
462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 /* In general we can't negate A / B, because if A is INT_MIN and
477 B is 1, we may turn this into INT_MIN / -1 which is undefined
478 and actually traps on some architectures. But if overflow is
479 undefined, we can negate, because - (INT_MIN / 1) is an
480 overflow. */
481 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
482 {
483 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
484 break;
485 /* If overflow is undefined then we have to be careful because
486 we ask whether it's ok to associate the negate with the
487 division which is not ok for example for
488 -((a - b) / c) where (-(a - b)) / c may invoke undefined
489 overflow because of negating INT_MIN. So do not use
490 negate_expr_p here but open-code the two important cases. */
491 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
492 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
493 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
494 return true;
495 }
496 else if (negate_expr_p (TREE_OPERAND (t, 0)))
497 return true;
498 return negate_expr_p (TREE_OPERAND (t, 1));
499
500 case NOP_EXPR:
501 /* Negate -((double)float) as (double)(-float). */
502 if (TREE_CODE (type) == REAL_TYPE)
503 {
504 tree tem = strip_float_extensions (t);
505 if (tem != t)
506 return negate_expr_p (tem);
507 }
508 break;
509
510 case CALL_EXPR:
511 /* Negate -f(x) as f(-x). */
512 if (negate_mathfn_p (builtin_mathfn_code (t)))
513 return negate_expr_p (CALL_EXPR_ARG (t, 0));
514 break;
515
516 case RSHIFT_EXPR:
517 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
518 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
519 {
520 tree op1 = TREE_OPERAND (t, 1);
521 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
522 return true;
523 }
524 break;
525
526 default:
527 break;
528 }
529 return false;
530 }
531
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
535 returned. */
536
537 static tree
538 fold_negate_expr (location_t loc, tree t)
539 {
540 tree type = TREE_TYPE (t);
541 tree tem;
542
543 switch (TREE_CODE (t))
544 {
545 /* Convert - (~A) to A + 1. */
546 case BIT_NOT_EXPR:
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_one_cst (type));
550 break;
551
552 case INTEGER_CST:
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
556 return tem;
557 break;
558
559 case REAL_CST:
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
563 return tem;
564 break;
565
566 case FIXED_CST:
567 tem = fold_negate_const (t, type);
568 return tem;
569
570 case COMPLEX_CST:
571 {
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
574
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
580 }
581 break;
582
583 case VECTOR_CST:
584 {
585 int count = TYPE_VECTOR_SUBPARTS (type), i;
586 tree *elts = XALLOCAVEC (tree, count);
587
588 for (i = 0; i < count; i++)
589 {
590 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
591 if (elts[i] == NULL_TREE)
592 return NULL_TREE;
593 }
594
595 return build_vector (type, elts);
596 }
597
598 case COMPLEX_EXPR:
599 if (negate_expr_p (t))
600 return fold_build2_loc (loc, COMPLEX_EXPR, type,
601 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
602 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
603 break;
604
605 case CONJ_EXPR:
606 if (negate_expr_p (t))
607 return fold_build1_loc (loc, CONJ_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
609 break;
610
611 case NEGATE_EXPR:
612 return TREE_OPERAND (t, 0);
613
614 case PLUS_EXPR:
615 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
616 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
617 {
618 /* -(A + B) -> (-B) - A. */
619 if (negate_expr_p (TREE_OPERAND (t, 1))
620 && reorder_operands_p (TREE_OPERAND (t, 0),
621 TREE_OPERAND (t, 1)))
622 {
623 tem = negate_expr (TREE_OPERAND (t, 1));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 0));
626 }
627
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 {
631 tem = negate_expr (TREE_OPERAND (t, 0));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 1));
634 }
635 }
636 break;
637
638 case MINUS_EXPR:
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
641 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
642 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
643 return fold_build2_loc (loc, MINUS_EXPR, type,
644 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
645 break;
646
647 case MULT_EXPR:
648 if (TYPE_UNSIGNED (type))
649 break;
650
651 /* Fall through. */
652
653 case RDIV_EXPR:
654 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
655 {
656 tem = TREE_OPERAND (t, 1);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 TREE_OPERAND (t, 0), negate_expr (tem));
660 tem = TREE_OPERAND (t, 0);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (tem), TREE_OPERAND (t, 1));
664 }
665 break;
666
667 case TRUNC_DIV_EXPR:
668 case ROUND_DIV_EXPR:
669 case EXACT_DIV_EXPR:
670 /* In general we can't negate A / B, because if A is INT_MIN and
671 B is 1, we may turn this into INT_MIN / -1 which is undefined
672 and actually traps on some architectures. But if overflow is
673 undefined, we can negate, because - (INT_MIN / 1) is an
674 overflow. */
675 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
676 {
677 const char * const warnmsg = G_("assuming signed overflow does not "
678 "occur when negating a division");
679 tem = TREE_OPERAND (t, 1);
680 if (negate_expr_p (tem))
681 {
682 if (INTEGRAL_TYPE_P (type)
683 && (TREE_CODE (tem) != INTEGER_CST
684 || integer_onep (tem)))
685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 TREE_OPERAND (t, 0), negate_expr (tem));
688 }
689 /* If overflow is undefined then we have to be careful because
690 we ask whether it's ok to associate the negate with the
691 division which is not ok for example for
692 -((a - b) / c) where (-(a - b)) / c may invoke undefined
693 overflow because of negating INT_MIN. So do not use
694 negate_expr_p here but open-code the two important cases. */
695 tem = TREE_OPERAND (t, 0);
696 if ((INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) == NEGATE_EXPR
698 || (TREE_CODE (tem) == INTEGER_CST
699 && may_negate_without_overflow_p (tem))))
700 || !INTEGRAL_TYPE_P (type))
701 return fold_build2_loc (loc, TREE_CODE (t), type,
702 negate_expr (tem), TREE_OPERAND (t, 1));
703 }
704 break;
705
706 case NOP_EXPR:
707 /* Convert -((double)float) into (double)(-float). */
708 if (TREE_CODE (type) == REAL_TYPE)
709 {
710 tem = strip_float_extensions (t);
711 if (tem != t && negate_expr_p (tem))
712 return fold_convert_loc (loc, type, negate_expr (tem));
713 }
714 break;
715
716 case CALL_EXPR:
717 /* Negate -f(x) as f(-x). */
718 if (negate_mathfn_p (builtin_mathfn_code (t))
719 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
720 {
721 tree fndecl, arg;
722
723 fndecl = get_callee_fndecl (t);
724 arg = negate_expr (CALL_EXPR_ARG (t, 0));
725 return build_call_expr_loc (loc, fndecl, 1, arg);
726 }
727 break;
728
729 case RSHIFT_EXPR:
730 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
731 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
732 {
733 tree op1 = TREE_OPERAND (t, 1);
734 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
735 {
736 tree ntype = TYPE_UNSIGNED (type)
737 ? signed_type_for (type)
738 : unsigned_type_for (type);
739 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
740 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
741 return fold_convert_loc (loc, type, temp);
742 }
743 }
744 break;
745
746 default:
747 break;
748 }
749
750 return NULL_TREE;
751 }
752
753 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
754 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
755 return NULL_TREE. */
756
757 static tree
758 negate_expr (tree t)
759 {
760 tree type, tem;
761 location_t loc;
762
763 if (t == NULL_TREE)
764 return NULL_TREE;
765
766 loc = EXPR_LOCATION (t);
767 type = TREE_TYPE (t);
768 STRIP_SIGN_NOPS (t);
769
770 tem = fold_negate_expr (loc, t);
771 if (!tem)
772 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
773 return fold_convert_loc (loc, type, tem);
774 }
775 \f
776 /* Split a tree IN into a constant, literal and variable parts that could be
777 combined with CODE to make IN. "constant" means an expression with
778 TREE_CONSTANT but that isn't an actual constant. CODE must be a
779 commutative arithmetic operation. Store the constant part into *CONP,
780 the literal in *LITP and return the variable part. If a part isn't
781 present, set it to null. If the tree does not decompose in this way,
782 return the entire tree as the variable part and the other parts as null.
783
784 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
785 case, we negate an operand that was subtracted. Except if it is a
786 literal for which we use *MINUS_LITP instead.
787
788 If NEGATE_P is true, we are negating all of IN, again except a literal
789 for which we use *MINUS_LITP instead.
790
791 If IN is itself a literal or constant, return it as appropriate.
792
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
795
796 static tree
797 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
798 tree *minus_litp, int negate_p)
799 {
800 tree var = 0;
801
802 *conp = 0;
803 *litp = 0;
804 *minus_litp = 0;
805
806 /* Strip any conversions that don't change the machine mode or signedness. */
807 STRIP_SIGN_NOPS (in);
808
809 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
810 || TREE_CODE (in) == FIXED_CST)
811 *litp = in;
812 else if (TREE_CODE (in) == code
813 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
814 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
815 /* We can associate addition and subtraction together (even
816 though the C standard doesn't say so) for integers because
817 the value is not affected. For reals, the value might be
818 affected, so we can't. */
819 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
820 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
821 {
822 tree op0 = TREE_OPERAND (in, 0);
823 tree op1 = TREE_OPERAND (in, 1);
824 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
825 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
826
827 /* First see if either of the operands is a literal, then a constant. */
828 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
829 || TREE_CODE (op0) == FIXED_CST)
830 *litp = op0, op0 = 0;
831 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
832 || TREE_CODE (op1) == FIXED_CST)
833 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
834
835 if (op0 != 0 && TREE_CONSTANT (op0))
836 *conp = op0, op0 = 0;
837 else if (op1 != 0 && TREE_CONSTANT (op1))
838 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
839
840 /* If we haven't dealt with either operand, this is not a case we can
841 decompose. Otherwise, VAR is either of the ones remaining, if any. */
842 if (op0 != 0 && op1 != 0)
843 var = in;
844 else if (op0 != 0)
845 var = op0;
846 else
847 var = op1, neg_var_p = neg1_p;
848
849 /* Now do any needed negations. */
850 if (neg_litp_p)
851 *minus_litp = *litp, *litp = 0;
852 if (neg_conp_p)
853 *conp = negate_expr (*conp);
854 if (neg_var_p)
855 var = negate_expr (var);
856 }
857 else if (TREE_CODE (in) == BIT_NOT_EXPR
858 && code == PLUS_EXPR)
859 {
860 /* -X - 1 is folded to ~X, undo that here. */
861 *minus_litp = build_one_cst (TREE_TYPE (in));
862 var = negate_expr (TREE_OPERAND (in, 0));
863 }
864 else if (TREE_CONSTANT (in))
865 *conp = in;
866 else
867 var = in;
868
869 if (negate_p)
870 {
871 if (*litp)
872 *minus_litp = *litp, *litp = 0;
873 else if (*minus_litp)
874 *litp = *minus_litp, *minus_litp = 0;
875 *conp = negate_expr (*conp);
876 var = negate_expr (var);
877 }
878
879 return var;
880 }
881
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
886
887 static tree
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
889 {
890 if (t1 == 0)
891 return t2;
892 else if (t2 == 0)
893 return t1;
894
895 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
896 try to fold this since we will have infinite recursion. But do
897 deal with any NEGATE_EXPRs. */
898 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
899 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
900 {
901 if (code == PLUS_EXPR)
902 {
903 if (TREE_CODE (t1) == NEGATE_EXPR)
904 return build2_loc (loc, MINUS_EXPR, type,
905 fold_convert_loc (loc, type, t2),
906 fold_convert_loc (loc, type,
907 TREE_OPERAND (t1, 0)));
908 else if (TREE_CODE (t2) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t1),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t2, 0)));
913 else if (integer_zerop (t2))
914 return fold_convert_loc (loc, type, t1);
915 }
916 else if (code == MINUS_EXPR)
917 {
918 if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921
922 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type, t2));
924 }
925
926 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
928 }
929 \f
930 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
931 for use in int_const_binop, size_binop and size_diffop. */
932
933 static bool
934 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
935 {
936 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
937 return false;
938 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
939 return false;
940
941 switch (code)
942 {
943 case LSHIFT_EXPR:
944 case RSHIFT_EXPR:
945 case LROTATE_EXPR:
946 case RROTATE_EXPR:
947 return true;
948
949 default:
950 break;
951 }
952
953 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
954 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
955 && TYPE_MODE (type1) == TYPE_MODE (type2);
956 }
957
958
959 /* Combine two integer constants ARG1 and ARG2 under operation CODE
960 to produce a new constant. Return NULL_TREE if we don't know how
961 to evaluate CODE at compile-time. */
962
963 static tree
964 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
965 int overflowable)
966 {
967 wide_int res;
968 tree t;
969 tree type = TREE_TYPE (arg1);
970 signop sign = TYPE_SIGN (type);
971 bool overflow = false;
972
973 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
974 TYPE_SIGN (TREE_TYPE (parg2)));
975
976 switch (code)
977 {
978 case BIT_IOR_EXPR:
979 res = wi::bit_or (arg1, arg2);
980 break;
981
982 case BIT_XOR_EXPR:
983 res = wi::bit_xor (arg1, arg2);
984 break;
985
986 case BIT_AND_EXPR:
987 res = wi::bit_and (arg1, arg2);
988 break;
989
990 case RSHIFT_EXPR:
991 case LSHIFT_EXPR:
992 if (wi::neg_p (arg2))
993 {
994 arg2 = -arg2;
995 if (code == RSHIFT_EXPR)
996 code = LSHIFT_EXPR;
997 else
998 code = RSHIFT_EXPR;
999 }
1000
1001 if (code == RSHIFT_EXPR)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res = wi::rshift (arg1, arg2, sign);
1006 else
1007 res = wi::lshift (arg1, arg2);
1008 break;
1009
1010 case RROTATE_EXPR:
1011 case LROTATE_EXPR:
1012 if (wi::neg_p (arg2))
1013 {
1014 arg2 = -arg2;
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1017 else
1018 code = RROTATE_EXPR;
1019 }
1020
1021 if (code == RROTATE_EXPR)
1022 res = wi::rrotate (arg1, arg2);
1023 else
1024 res = wi::lrotate (arg1, arg2);
1025 break;
1026
1027 case PLUS_EXPR:
1028 res = wi::add (arg1, arg2, sign, &overflow);
1029 break;
1030
1031 case MINUS_EXPR:
1032 res = wi::sub (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case MULT_EXPR:
1036 res = wi::mul (arg1, arg2, sign, &overflow);
1037 break;
1038
1039 case MULT_HIGHPART_EXPR:
1040 res = wi::mul_high (arg1, arg2, sign);
1041 break;
1042
1043 case TRUNC_DIV_EXPR:
1044 case EXACT_DIV_EXPR:
1045 if (arg2 == 0)
1046 return NULL_TREE;
1047 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1048 break;
1049
1050 case FLOOR_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_floor (arg1, arg2, sign, &overflow);
1054 break;
1055
1056 case CEIL_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1060 break;
1061
1062 case ROUND_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_round (arg1, arg2, sign, &overflow);
1066 break;
1067
1068 case TRUNC_MOD_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1072 break;
1073
1074 case FLOOR_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1078 break;
1079
1080 case CEIL_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1084 break;
1085
1086 case ROUND_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_round (arg1, arg2, sign, &overflow);
1090 break;
1091
1092 case MIN_EXPR:
1093 res = wi::min (arg1, arg2, sign);
1094 break;
1095
1096 case MAX_EXPR:
1097 res = wi::max (arg1, arg2, sign);
1098 break;
1099
1100 default:
1101 return NULL_TREE;
1102 }
1103
1104 t = force_fit_type (type, res, overflowable,
1105 (((sign == SIGNED || overflowable == -1)
1106 && overflow)
1107 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1108
1109 return t;
1110 }
1111
1112 tree
1113 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1114 {
1115 return int_const_binop_1 (code, arg1, arg2, 1);
1116 }
1117
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1122
1123 static tree
1124 const_binop (enum tree_code code, tree arg1, tree arg2)
1125 {
1126 /* Sanity check for the recursive cases. */
1127 if (!arg1 || !arg2)
1128 return NULL_TREE;
1129
1130 STRIP_NOPS (arg1);
1131 STRIP_NOPS (arg2);
1132
1133 if (TREE_CODE (arg1) == INTEGER_CST)
1134 return int_const_binop (code, arg1, arg2);
1135
1136 if (TREE_CODE (arg1) == REAL_CST)
1137 {
1138 enum machine_mode mode;
1139 REAL_VALUE_TYPE d1;
1140 REAL_VALUE_TYPE d2;
1141 REAL_VALUE_TYPE value;
1142 REAL_VALUE_TYPE result;
1143 bool inexact;
1144 tree t, type;
1145
1146 /* The following codes are handled by real_arithmetic. */
1147 switch (code)
1148 {
1149 case PLUS_EXPR:
1150 case MINUS_EXPR:
1151 case MULT_EXPR:
1152 case RDIV_EXPR:
1153 case MIN_EXPR:
1154 case MAX_EXPR:
1155 break;
1156
1157 default:
1158 return NULL_TREE;
1159 }
1160
1161 d1 = TREE_REAL_CST (arg1);
1162 d2 = TREE_REAL_CST (arg2);
1163
1164 type = TREE_TYPE (arg1);
1165 mode = TYPE_MODE (type);
1166
1167 /* Don't perform operation if we honor signaling NaNs and
1168 either operand is a NaN. */
1169 if (HONOR_SNANS (mode)
1170 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1171 return NULL_TREE;
1172
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && REAL_VALUES_EQUAL (d2, dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1179
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 return arg1;
1184 else if (REAL_VALUE_ISNAN (d2))
1185 return arg2;
1186
1187 inexact = real_arithmetic (&value, code, &d1, &d2);
1188 real_convert (&result, mode, &value);
1189
1190 /* Don't constant fold this floating point operation if
1191 the result has overflowed and flag_trapping_math. */
1192 if (flag_trapping_math
1193 && MODE_HAS_INFINITIES (mode)
1194 && REAL_VALUE_ISINF (result)
1195 && !REAL_VALUE_ISINF (d1)
1196 && !REAL_VALUE_ISINF (d2))
1197 return NULL_TREE;
1198
1199 /* Don't constant fold this floating point operation if the
1200 result may dependent upon the run-time rounding mode and
1201 flag_rounding_math is set, or if GCC's software emulation
1202 is unable to accurately represent the result. */
1203 if ((flag_rounding_math
1204 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1205 && (inexact || !real_identical (&result, &value)))
1206 return NULL_TREE;
1207
1208 t = build_real (type, result);
1209
1210 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1211 return t;
1212 }
1213
1214 if (TREE_CODE (arg1) == FIXED_CST)
1215 {
1216 FIXED_VALUE_TYPE f1;
1217 FIXED_VALUE_TYPE f2;
1218 FIXED_VALUE_TYPE result;
1219 tree t, type;
1220 int sat_p;
1221 bool overflow_p;
1222
1223 /* The following codes are handled by fixed_arithmetic. */
1224 switch (code)
1225 {
1226 case PLUS_EXPR:
1227 case MINUS_EXPR:
1228 case MULT_EXPR:
1229 case TRUNC_DIV_EXPR:
1230 f2 = TREE_FIXED_CST (arg2);
1231 break;
1232
1233 case LSHIFT_EXPR:
1234 case RSHIFT_EXPR:
1235 {
1236 wide_int w2 = arg2;
1237 f2.data.high = w2.elt (1);
1238 f2.data.low = w2.elt (0);
1239 f2.mode = SImode;
1240 }
1241 break;
1242
1243 default:
1244 return NULL_TREE;
1245 }
1246
1247 f1 = TREE_FIXED_CST (arg1);
1248 type = TREE_TYPE (arg1);
1249 sat_p = TYPE_SATURATING (type);
1250 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1251 t = build_fixed (type, result);
1252 /* Propagate overflow flags. */
1253 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1254 TREE_OVERFLOW (t) = 1;
1255 return t;
1256 }
1257
1258 if (TREE_CODE (arg1) == COMPLEX_CST)
1259 {
1260 tree type = TREE_TYPE (arg1);
1261 tree r1 = TREE_REALPART (arg1);
1262 tree i1 = TREE_IMAGPART (arg1);
1263 tree r2 = TREE_REALPART (arg2);
1264 tree i2 = TREE_IMAGPART (arg2);
1265 tree real, imag;
1266
1267 switch (code)
1268 {
1269 case PLUS_EXPR:
1270 case MINUS_EXPR:
1271 real = const_binop (code, r1, r2);
1272 imag = const_binop (code, i1, i2);
1273 break;
1274
1275 case MULT_EXPR:
1276 if (COMPLEX_FLOAT_TYPE_P (type))
1277 return do_mpc_arg2 (arg1, arg2, type,
1278 /* do_nonfinite= */ folding_initializer,
1279 mpc_mul);
1280
1281 real = const_binop (MINUS_EXPR,
1282 const_binop (MULT_EXPR, r1, r2),
1283 const_binop (MULT_EXPR, i1, i2));
1284 imag = const_binop (PLUS_EXPR,
1285 const_binop (MULT_EXPR, r1, i2),
1286 const_binop (MULT_EXPR, i1, r2));
1287 break;
1288
1289 case RDIV_EXPR:
1290 if (COMPLEX_FLOAT_TYPE_P (type))
1291 return do_mpc_arg2 (arg1, arg2, type,
1292 /* do_nonfinite= */ folding_initializer,
1293 mpc_div);
1294 /* Fallthru ... */
1295 case TRUNC_DIV_EXPR:
1296 case CEIL_DIV_EXPR:
1297 case FLOOR_DIV_EXPR:
1298 case ROUND_DIV_EXPR:
1299 if (flag_complex_method == 0)
1300 {
1301 /* Keep this algorithm in sync with
1302 tree-complex.c:expand_complex_div_straight().
1303
1304 Expand complex division to scalars, straightforward algorithm.
1305 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1306 t = br*br + bi*bi
1307 */
1308 tree magsquared
1309 = const_binop (PLUS_EXPR,
1310 const_binop (MULT_EXPR, r2, r2),
1311 const_binop (MULT_EXPR, i2, i2));
1312 tree t1
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r1, r2),
1315 const_binop (MULT_EXPR, i1, i2));
1316 tree t2
1317 = const_binop (MINUS_EXPR,
1318 const_binop (MULT_EXPR, i1, r2),
1319 const_binop (MULT_EXPR, r1, i2));
1320
1321 real = const_binop (code, t1, magsquared);
1322 imag = const_binop (code, t2, magsquared);
1323 }
1324 else
1325 {
1326 /* Keep this algorithm in sync with
1327 tree-complex.c:expand_complex_div_wide().
1328
1329 Expand complex division to scalars, modified algorithm to minimize
1330 overflow with wide input ranges. */
1331 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1332 fold_abs_const (r2, TREE_TYPE (type)),
1333 fold_abs_const (i2, TREE_TYPE (type)));
1334
1335 if (integer_nonzerop (compare))
1336 {
1337 /* In the TRUE branch, we compute
1338 ratio = br/bi;
1339 div = (br * ratio) + bi;
1340 tr = (ar * ratio) + ai;
1341 ti = (ai * ratio) - ar;
1342 tr = tr / div;
1343 ti = ti / div; */
1344 tree ratio = const_binop (code, r2, i2);
1345 tree div = const_binop (PLUS_EXPR, i2,
1346 const_binop (MULT_EXPR, r2, ratio));
1347 real = const_binop (MULT_EXPR, r1, ratio);
1348 real = const_binop (PLUS_EXPR, real, i1);
1349 real = const_binop (code, real, div);
1350
1351 imag = const_binop (MULT_EXPR, i1, ratio);
1352 imag = const_binop (MINUS_EXPR, imag, r1);
1353 imag = const_binop (code, imag, div);
1354 }
1355 else
1356 {
1357 /* In the FALSE branch, we compute
1358 ratio = d/c;
1359 divisor = (d * ratio) + c;
1360 tr = (b * ratio) + a;
1361 ti = b - (a * ratio);
1362 tr = tr / div;
1363 ti = ti / div; */
1364 tree ratio = const_binop (code, i2, r2);
1365 tree div = const_binop (PLUS_EXPR, r2,
1366 const_binop (MULT_EXPR, i2, ratio));
1367
1368 real = const_binop (MULT_EXPR, i1, ratio);
1369 real = const_binop (PLUS_EXPR, real, r1);
1370 real = const_binop (code, real, div);
1371
1372 imag = const_binop (MULT_EXPR, r1, ratio);
1373 imag = const_binop (MINUS_EXPR, i1, imag);
1374 imag = const_binop (code, imag, div);
1375 }
1376 }
1377 break;
1378
1379 default:
1380 return NULL_TREE;
1381 }
1382
1383 if (real && imag)
1384 return build_complex (type, real, imag);
1385 }
1386
1387 if (TREE_CODE (arg1) == VECTOR_CST
1388 && TREE_CODE (arg2) == VECTOR_CST)
1389 {
1390 tree type = TREE_TYPE (arg1);
1391 int count = TYPE_VECTOR_SUBPARTS (type), i;
1392 tree *elts = XALLOCAVEC (tree, count);
1393
1394 for (i = 0; i < count; i++)
1395 {
1396 tree elem1 = VECTOR_CST_ELT (arg1, i);
1397 tree elem2 = VECTOR_CST_ELT (arg2, i);
1398
1399 elts[i] = const_binop (code, elem1, elem2);
1400
1401 /* It is possible that const_binop cannot handle the given
1402 code and return NULL_TREE */
1403 if (elts[i] == NULL_TREE)
1404 return NULL_TREE;
1405 }
1406
1407 return build_vector (type, elts);
1408 }
1409
1410 /* Shifts allow a scalar offset for a vector. */
1411 if (TREE_CODE (arg1) == VECTOR_CST
1412 && TREE_CODE (arg2) == INTEGER_CST)
1413 {
1414 tree type = TREE_TYPE (arg1);
1415 int count = TYPE_VECTOR_SUBPARTS (type), i;
1416 tree *elts = XALLOCAVEC (tree, count);
1417
1418 if (code == VEC_RSHIFT_EXPR)
1419 {
1420 if (!tree_fits_uhwi_p (arg2))
1421 return NULL_TREE;
1422
1423 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1424 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1425 unsigned HOST_WIDE_INT innerc
1426 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1427 if (shiftc >= outerc || (shiftc % innerc) != 0)
1428 return NULL_TREE;
1429 int offset = shiftc / innerc;
1430 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1431 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1432 vector element, but last element if BYTES_BIG_ENDIAN. */
1433 if (BYTES_BIG_ENDIAN)
1434 offset = -offset;
1435 tree zero = build_zero_cst (TREE_TYPE (type));
1436 for (i = 0; i < count; i++)
1437 {
1438 if (i + offset < 0 || i + offset >= count)
1439 elts[i] = zero;
1440 else
1441 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1442 }
1443 }
1444 else
1445 for (i = 0; i < count; i++)
1446 {
1447 tree elem1 = VECTOR_CST_ELT (arg1, i);
1448
1449 elts[i] = const_binop (code, elem1, arg2);
1450
1451 /* It is possible that const_binop cannot handle the given
1452 code and return NULL_TREE */
1453 if (elts[i] == NULL_TREE)
1454 return NULL_TREE;
1455 }
1456
1457 return build_vector (type, elts);
1458 }
1459 return NULL_TREE;
1460 }
1461
1462 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1463 indicates which particular sizetype to create. */
1464
1465 tree
1466 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1467 {
1468 return build_int_cst (sizetype_tab[(int) kind], number);
1469 }
1470 \f
1471 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1472 is a tree code. The type of the result is taken from the operands.
1473 Both must be equivalent integer types, ala int_binop_types_match_p.
1474 If the operands are constant, so is the result. */
1475
1476 tree
1477 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1478 {
1479 tree type = TREE_TYPE (arg0);
1480
1481 if (arg0 == error_mark_node || arg1 == error_mark_node)
1482 return error_mark_node;
1483
1484 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1485 TREE_TYPE (arg1)));
1486
1487 /* Handle the special case of two integer constants faster. */
1488 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1489 {
1490 /* And some specific cases even faster than that. */
1491 if (code == PLUS_EXPR)
1492 {
1493 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1494 return arg1;
1495 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1496 return arg0;
1497 }
1498 else if (code == MINUS_EXPR)
1499 {
1500 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1501 return arg0;
1502 }
1503 else if (code == MULT_EXPR)
1504 {
1505 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1506 return arg1;
1507 }
1508
1509 /* Handle general case of two integer constants. For sizetype
1510 constant calculations we always want to know about overflow,
1511 even in the unsigned case. */
1512 return int_const_binop_1 (code, arg0, arg1, -1);
1513 }
1514
1515 return fold_build2_loc (loc, code, type, arg0, arg1);
1516 }
1517
1518 /* Given two values, either both of sizetype or both of bitsizetype,
1519 compute the difference between the two values. Return the value
1520 in signed type corresponding to the type of the operands. */
1521
1522 tree
1523 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1524 {
1525 tree type = TREE_TYPE (arg0);
1526 tree ctype;
1527
1528 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1529 TREE_TYPE (arg1)));
1530
1531 /* If the type is already signed, just do the simple thing. */
1532 if (!TYPE_UNSIGNED (type))
1533 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1534
1535 if (type == sizetype)
1536 ctype = ssizetype;
1537 else if (type == bitsizetype)
1538 ctype = sbitsizetype;
1539 else
1540 ctype = signed_type_for (type);
1541
1542 /* If either operand is not a constant, do the conversions to the signed
1543 type and subtract. The hardware will do the right thing with any
1544 overflow in the subtraction. */
1545 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1546 return size_binop_loc (loc, MINUS_EXPR,
1547 fold_convert_loc (loc, ctype, arg0),
1548 fold_convert_loc (loc, ctype, arg1));
1549
1550 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1551 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1552 overflow) and negate (which can't either). Special-case a result
1553 of zero while we're here. */
1554 if (tree_int_cst_equal (arg0, arg1))
1555 return build_int_cst (ctype, 0);
1556 else if (tree_int_cst_lt (arg1, arg0))
1557 return fold_convert_loc (loc, ctype,
1558 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1559 else
1560 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1561 fold_convert_loc (loc, ctype,
1562 size_binop_loc (loc,
1563 MINUS_EXPR,
1564 arg1, arg0)));
1565 }
1566 \f
1567 /* A subroutine of fold_convert_const handling conversions of an
1568 INTEGER_CST to another integer type. */
1569
1570 static tree
1571 fold_convert_const_int_from_int (tree type, const_tree arg1)
1572 {
1573 /* Given an integer constant, make new constant with new type,
1574 appropriately sign-extended or truncated. Use widest_int
1575 so that any extension is done according ARG1's type. */
1576 return force_fit_type (type, wi::to_widest (arg1),
1577 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1578 TREE_OVERFLOW (arg1));
1579 }
1580
1581 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1582 to an integer type. */
1583
1584 static tree
1585 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1586 {
1587 bool overflow = false;
1588 tree t;
1589
1590 /* The following code implements the floating point to integer
1591 conversion rules required by the Java Language Specification,
1592 that IEEE NaNs are mapped to zero and values that overflow
1593 the target precision saturate, i.e. values greater than
1594 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1595 are mapped to INT_MIN. These semantics are allowed by the
1596 C and C++ standards that simply state that the behavior of
1597 FP-to-integer conversion is unspecified upon overflow. */
1598
1599 wide_int val;
1600 REAL_VALUE_TYPE r;
1601 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1602
1603 switch (code)
1604 {
1605 case FIX_TRUNC_EXPR:
1606 real_trunc (&r, VOIDmode, &x);
1607 break;
1608
1609 default:
1610 gcc_unreachable ();
1611 }
1612
1613 /* If R is NaN, return zero and show we have an overflow. */
1614 if (REAL_VALUE_ISNAN (r))
1615 {
1616 overflow = true;
1617 val = wi::zero (TYPE_PRECISION (type));
1618 }
1619
1620 /* See if R is less than the lower bound or greater than the
1621 upper bound. */
1622
1623 if (! overflow)
1624 {
1625 tree lt = TYPE_MIN_VALUE (type);
1626 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1627 if (REAL_VALUES_LESS (r, l))
1628 {
1629 overflow = true;
1630 val = lt;
1631 }
1632 }
1633
1634 if (! overflow)
1635 {
1636 tree ut = TYPE_MAX_VALUE (type);
1637 if (ut)
1638 {
1639 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1640 if (REAL_VALUES_LESS (u, r))
1641 {
1642 overflow = true;
1643 val = ut;
1644 }
1645 }
1646 }
1647
1648 if (! overflow)
1649 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1650
1651 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1652 return t;
1653 }
1654
1655 /* A subroutine of fold_convert_const handling conversions of a
1656 FIXED_CST to an integer type. */
1657
1658 static tree
1659 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1660 {
1661 tree t;
1662 double_int temp, temp_trunc;
1663 unsigned int mode;
1664
1665 /* Right shift FIXED_CST to temp by fbit. */
1666 temp = TREE_FIXED_CST (arg1).data;
1667 mode = TREE_FIXED_CST (arg1).mode;
1668 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1669 {
1670 temp = temp.rshift (GET_MODE_FBIT (mode),
1671 HOST_BITS_PER_DOUBLE_INT,
1672 SIGNED_FIXED_POINT_MODE_P (mode));
1673
1674 /* Left shift temp to temp_trunc by fbit. */
1675 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1676 HOST_BITS_PER_DOUBLE_INT,
1677 SIGNED_FIXED_POINT_MODE_P (mode));
1678 }
1679 else
1680 {
1681 temp = double_int_zero;
1682 temp_trunc = double_int_zero;
1683 }
1684
1685 /* If FIXED_CST is negative, we need to round the value toward 0.
1686 By checking if the fractional bits are not zero to add 1 to temp. */
1687 if (SIGNED_FIXED_POINT_MODE_P (mode)
1688 && temp_trunc.is_negative ()
1689 && TREE_FIXED_CST (arg1).data != temp_trunc)
1690 temp += double_int_one;
1691
1692 /* Given a fixed-point constant, make new constant with new type,
1693 appropriately sign-extended or truncated. */
1694 t = force_fit_type (type, temp, -1,
1695 (temp.is_negative ()
1696 && (TYPE_UNSIGNED (type)
1697 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1698 | TREE_OVERFLOW (arg1));
1699
1700 return t;
1701 }
1702
1703 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1704 to another floating point type. */
1705
1706 static tree
1707 fold_convert_const_real_from_real (tree type, const_tree arg1)
1708 {
1709 REAL_VALUE_TYPE value;
1710 tree t;
1711
1712 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1713 t = build_real (type, value);
1714
1715 /* If converting an infinity or NAN to a representation that doesn't
1716 have one, set the overflow bit so that we can produce some kind of
1717 error message at the appropriate point if necessary. It's not the
1718 most user-friendly message, but it's better than nothing. */
1719 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1720 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1721 TREE_OVERFLOW (t) = 1;
1722 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1723 && !MODE_HAS_NANS (TYPE_MODE (type)))
1724 TREE_OVERFLOW (t) = 1;
1725 /* Regular overflow, conversion produced an infinity in a mode that
1726 can't represent them. */
1727 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1728 && REAL_VALUE_ISINF (value)
1729 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1730 TREE_OVERFLOW (t) = 1;
1731 else
1732 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1733 return t;
1734 }
1735
1736 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1737 to a floating point type. */
1738
1739 static tree
1740 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1741 {
1742 REAL_VALUE_TYPE value;
1743 tree t;
1744
1745 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1746 t = build_real (type, value);
1747
1748 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1749 return t;
1750 }
1751
1752 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1753 to another fixed-point type. */
1754
1755 static tree
1756 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1757 {
1758 FIXED_VALUE_TYPE value;
1759 tree t;
1760 bool overflow_p;
1761
1762 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1763 TYPE_SATURATING (type));
1764 t = build_fixed (type, value);
1765
1766 /* Propagate overflow flags. */
1767 if (overflow_p | TREE_OVERFLOW (arg1))
1768 TREE_OVERFLOW (t) = 1;
1769 return t;
1770 }
1771
1772 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1773 to a fixed-point type. */
1774
1775 static tree
1776 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1777 {
1778 FIXED_VALUE_TYPE value;
1779 tree t;
1780 bool overflow_p;
1781 double_int di;
1782
1783 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1784
1785 di.low = TREE_INT_CST_ELT (arg1, 0);
1786 if (TREE_INT_CST_NUNITS (arg1) == 1)
1787 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1788 else
1789 di.high = TREE_INT_CST_ELT (arg1, 1);
1790
1791 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1792 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1793 TYPE_SATURATING (type));
1794 t = build_fixed (type, value);
1795
1796 /* Propagate overflow flags. */
1797 if (overflow_p | TREE_OVERFLOW (arg1))
1798 TREE_OVERFLOW (t) = 1;
1799 return t;
1800 }
1801
1802 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1803 to a fixed-point type. */
1804
1805 static tree
1806 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1807 {
1808 FIXED_VALUE_TYPE value;
1809 tree t;
1810 bool overflow_p;
1811
1812 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1813 &TREE_REAL_CST (arg1),
1814 TYPE_SATURATING (type));
1815 t = build_fixed (type, value);
1816
1817 /* Propagate overflow flags. */
1818 if (overflow_p | TREE_OVERFLOW (arg1))
1819 TREE_OVERFLOW (t) = 1;
1820 return t;
1821 }
1822
1823 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1824 type TYPE. If no simplification can be done return NULL_TREE. */
1825
1826 static tree
1827 fold_convert_const (enum tree_code code, tree type, tree arg1)
1828 {
1829 if (TREE_TYPE (arg1) == type)
1830 return arg1;
1831
1832 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1833 || TREE_CODE (type) == OFFSET_TYPE)
1834 {
1835 if (TREE_CODE (arg1) == INTEGER_CST)
1836 return fold_convert_const_int_from_int (type, arg1);
1837 else if (TREE_CODE (arg1) == REAL_CST)
1838 return fold_convert_const_int_from_real (code, type, arg1);
1839 else if (TREE_CODE (arg1) == FIXED_CST)
1840 return fold_convert_const_int_from_fixed (type, arg1);
1841 }
1842 else if (TREE_CODE (type) == REAL_TYPE)
1843 {
1844 if (TREE_CODE (arg1) == INTEGER_CST)
1845 return build_real_from_int_cst (type, arg1);
1846 else if (TREE_CODE (arg1) == REAL_CST)
1847 return fold_convert_const_real_from_real (type, arg1);
1848 else if (TREE_CODE (arg1) == FIXED_CST)
1849 return fold_convert_const_real_from_fixed (type, arg1);
1850 }
1851 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1852 {
1853 if (TREE_CODE (arg1) == FIXED_CST)
1854 return fold_convert_const_fixed_from_fixed (type, arg1);
1855 else if (TREE_CODE (arg1) == INTEGER_CST)
1856 return fold_convert_const_fixed_from_int (type, arg1);
1857 else if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_fixed_from_real (type, arg1);
1859 }
1860 return NULL_TREE;
1861 }
1862
1863 /* Construct a vector of zero elements of vector type TYPE. */
1864
1865 static tree
1866 build_zero_vector (tree type)
1867 {
1868 tree t;
1869
1870 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1871 return build_vector_from_val (type, t);
1872 }
1873
1874 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1875
1876 bool
1877 fold_convertible_p (const_tree type, const_tree arg)
1878 {
1879 tree orig = TREE_TYPE (arg);
1880
1881 if (type == orig)
1882 return true;
1883
1884 if (TREE_CODE (arg) == ERROR_MARK
1885 || TREE_CODE (type) == ERROR_MARK
1886 || TREE_CODE (orig) == ERROR_MARK)
1887 return false;
1888
1889 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1890 return true;
1891
1892 switch (TREE_CODE (type))
1893 {
1894 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1895 case POINTER_TYPE: case REFERENCE_TYPE:
1896 case OFFSET_TYPE:
1897 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1898 || TREE_CODE (orig) == OFFSET_TYPE)
1899 return true;
1900 return (TREE_CODE (orig) == VECTOR_TYPE
1901 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1902
1903 case REAL_TYPE:
1904 case FIXED_POINT_TYPE:
1905 case COMPLEX_TYPE:
1906 case VECTOR_TYPE:
1907 case VOID_TYPE:
1908 return TREE_CODE (type) == TREE_CODE (orig);
1909
1910 default:
1911 return false;
1912 }
1913 }
1914
1915 /* Convert expression ARG to type TYPE. Used by the middle-end for
1916 simple conversions in preference to calling the front-end's convert. */
1917
1918 tree
1919 fold_convert_loc (location_t loc, tree type, tree arg)
1920 {
1921 tree orig = TREE_TYPE (arg);
1922 tree tem;
1923
1924 if (type == orig)
1925 return arg;
1926
1927 if (TREE_CODE (arg) == ERROR_MARK
1928 || TREE_CODE (type) == ERROR_MARK
1929 || TREE_CODE (orig) == ERROR_MARK)
1930 return error_mark_node;
1931
1932 switch (TREE_CODE (type))
1933 {
1934 case POINTER_TYPE:
1935 case REFERENCE_TYPE:
1936 /* Handle conversions between pointers to different address spaces. */
1937 if (POINTER_TYPE_P (orig)
1938 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1939 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1940 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1941 /* fall through */
1942
1943 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1944 case OFFSET_TYPE:
1945 if (TREE_CODE (arg) == INTEGER_CST)
1946 {
1947 tem = fold_convert_const (NOP_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1949 return tem;
1950 }
1951 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1952 || TREE_CODE (orig) == OFFSET_TYPE)
1953 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1954 if (TREE_CODE (orig) == COMPLEX_TYPE)
1955 return fold_convert_loc (loc, type,
1956 fold_build1_loc (loc, REALPART_EXPR,
1957 TREE_TYPE (orig), arg));
1958 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1959 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1960 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1961
1962 case REAL_TYPE:
1963 if (TREE_CODE (arg) == INTEGER_CST)
1964 {
1965 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1966 if (tem != NULL_TREE)
1967 return tem;
1968 }
1969 else if (TREE_CODE (arg) == REAL_CST)
1970 {
1971 tem = fold_convert_const (NOP_EXPR, type, arg);
1972 if (tem != NULL_TREE)
1973 return tem;
1974 }
1975 else if (TREE_CODE (arg) == FIXED_CST)
1976 {
1977 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1978 if (tem != NULL_TREE)
1979 return tem;
1980 }
1981
1982 switch (TREE_CODE (orig))
1983 {
1984 case INTEGER_TYPE:
1985 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1986 case POINTER_TYPE: case REFERENCE_TYPE:
1987 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1988
1989 case REAL_TYPE:
1990 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1991
1992 case FIXED_POINT_TYPE:
1993 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1994
1995 case COMPLEX_TYPE:
1996 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1997 return fold_convert_loc (loc, type, tem);
1998
1999 default:
2000 gcc_unreachable ();
2001 }
2002
2003 case FIXED_POINT_TYPE:
2004 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2005 || TREE_CODE (arg) == REAL_CST)
2006 {
2007 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2008 if (tem != NULL_TREE)
2009 goto fold_convert_exit;
2010 }
2011
2012 switch (TREE_CODE (orig))
2013 {
2014 case FIXED_POINT_TYPE:
2015 case INTEGER_TYPE:
2016 case ENUMERAL_TYPE:
2017 case BOOLEAN_TYPE:
2018 case REAL_TYPE:
2019 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2020
2021 case COMPLEX_TYPE:
2022 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2023 return fold_convert_loc (loc, type, tem);
2024
2025 default:
2026 gcc_unreachable ();
2027 }
2028
2029 case COMPLEX_TYPE:
2030 switch (TREE_CODE (orig))
2031 {
2032 case INTEGER_TYPE:
2033 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2034 case POINTER_TYPE: case REFERENCE_TYPE:
2035 case REAL_TYPE:
2036 case FIXED_POINT_TYPE:
2037 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2038 fold_convert_loc (loc, TREE_TYPE (type), arg),
2039 fold_convert_loc (loc, TREE_TYPE (type),
2040 integer_zero_node));
2041 case COMPLEX_TYPE:
2042 {
2043 tree rpart, ipart;
2044
2045 if (TREE_CODE (arg) == COMPLEX_EXPR)
2046 {
2047 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2048 TREE_OPERAND (arg, 0));
2049 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2050 TREE_OPERAND (arg, 1));
2051 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2052 }
2053
2054 arg = save_expr (arg);
2055 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2056 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2057 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2058 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2059 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2060 }
2061
2062 default:
2063 gcc_unreachable ();
2064 }
2065
2066 case VECTOR_TYPE:
2067 if (integer_zerop (arg))
2068 return build_zero_vector (type);
2069 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2070 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2071 || TREE_CODE (orig) == VECTOR_TYPE);
2072 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2073
2074 case VOID_TYPE:
2075 tem = fold_ignored_result (arg);
2076 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2077
2078 default:
2079 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2080 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2081 gcc_unreachable ();
2082 }
2083 fold_convert_exit:
2084 protected_set_expr_location_unshare (tem, loc);
2085 return tem;
2086 }
2087 \f
2088 /* Return false if expr can be assumed not to be an lvalue, true
2089 otherwise. */
2090
2091 static bool
2092 maybe_lvalue_p (const_tree x)
2093 {
2094 /* We only need to wrap lvalue tree codes. */
2095 switch (TREE_CODE (x))
2096 {
2097 case VAR_DECL:
2098 case PARM_DECL:
2099 case RESULT_DECL:
2100 case LABEL_DECL:
2101 case FUNCTION_DECL:
2102 case SSA_NAME:
2103
2104 case COMPONENT_REF:
2105 case MEM_REF:
2106 case INDIRECT_REF:
2107 case ARRAY_REF:
2108 case ARRAY_RANGE_REF:
2109 case BIT_FIELD_REF:
2110 case OBJ_TYPE_REF:
2111
2112 case REALPART_EXPR:
2113 case IMAGPART_EXPR:
2114 case PREINCREMENT_EXPR:
2115 case PREDECREMENT_EXPR:
2116 case SAVE_EXPR:
2117 case TRY_CATCH_EXPR:
2118 case WITH_CLEANUP_EXPR:
2119 case COMPOUND_EXPR:
2120 case MODIFY_EXPR:
2121 case TARGET_EXPR:
2122 case COND_EXPR:
2123 case BIND_EXPR:
2124 break;
2125
2126 default:
2127 /* Assume the worst for front-end tree codes. */
2128 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2129 break;
2130 return false;
2131 }
2132
2133 return true;
2134 }
2135
2136 /* Return an expr equal to X but certainly not valid as an lvalue. */
2137
2138 tree
2139 non_lvalue_loc (location_t loc, tree x)
2140 {
2141 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2142 us. */
2143 if (in_gimple_form)
2144 return x;
2145
2146 if (! maybe_lvalue_p (x))
2147 return x;
2148 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2149 }
2150
2151 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2152 Zero means allow extended lvalues. */
2153
2154 int pedantic_lvalues;
2155
2156 /* When pedantic, return an expr equal to X but certainly not valid as a
2157 pedantic lvalue. Otherwise, return X. */
2158
2159 static tree
2160 pedantic_non_lvalue_loc (location_t loc, tree x)
2161 {
2162 if (pedantic_lvalues)
2163 return non_lvalue_loc (loc, x);
2164
2165 return protected_set_expr_location_unshare (x, loc);
2166 }
2167 \f
2168 /* Given a tree comparison code, return the code that is the logical inverse.
2169 It is generally not safe to do this for floating-point comparisons, except
2170 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2171 ERROR_MARK in this case. */
2172
2173 enum tree_code
2174 invert_tree_comparison (enum tree_code code, bool honor_nans)
2175 {
2176 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2177 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2178 return ERROR_MARK;
2179
2180 switch (code)
2181 {
2182 case EQ_EXPR:
2183 return NE_EXPR;
2184 case NE_EXPR:
2185 return EQ_EXPR;
2186 case GT_EXPR:
2187 return honor_nans ? UNLE_EXPR : LE_EXPR;
2188 case GE_EXPR:
2189 return honor_nans ? UNLT_EXPR : LT_EXPR;
2190 case LT_EXPR:
2191 return honor_nans ? UNGE_EXPR : GE_EXPR;
2192 case LE_EXPR:
2193 return honor_nans ? UNGT_EXPR : GT_EXPR;
2194 case LTGT_EXPR:
2195 return UNEQ_EXPR;
2196 case UNEQ_EXPR:
2197 return LTGT_EXPR;
2198 case UNGT_EXPR:
2199 return LE_EXPR;
2200 case UNGE_EXPR:
2201 return LT_EXPR;
2202 case UNLT_EXPR:
2203 return GE_EXPR;
2204 case UNLE_EXPR:
2205 return GT_EXPR;
2206 case ORDERED_EXPR:
2207 return UNORDERED_EXPR;
2208 case UNORDERED_EXPR:
2209 return ORDERED_EXPR;
2210 default:
2211 gcc_unreachable ();
2212 }
2213 }
2214
2215 /* Similar, but return the comparison that results if the operands are
2216 swapped. This is safe for floating-point. */
2217
2218 enum tree_code
2219 swap_tree_comparison (enum tree_code code)
2220 {
2221 switch (code)
2222 {
2223 case EQ_EXPR:
2224 case NE_EXPR:
2225 case ORDERED_EXPR:
2226 case UNORDERED_EXPR:
2227 case LTGT_EXPR:
2228 case UNEQ_EXPR:
2229 return code;
2230 case GT_EXPR:
2231 return LT_EXPR;
2232 case GE_EXPR:
2233 return LE_EXPR;
2234 case LT_EXPR:
2235 return GT_EXPR;
2236 case LE_EXPR:
2237 return GE_EXPR;
2238 case UNGT_EXPR:
2239 return UNLT_EXPR;
2240 case UNGE_EXPR:
2241 return UNLE_EXPR;
2242 case UNLT_EXPR:
2243 return UNGT_EXPR;
2244 case UNLE_EXPR:
2245 return UNGE_EXPR;
2246 default:
2247 gcc_unreachable ();
2248 }
2249 }
2250
2251
2252 /* Convert a comparison tree code from an enum tree_code representation
2253 into a compcode bit-based encoding. This function is the inverse of
2254 compcode_to_comparison. */
2255
2256 static enum comparison_code
2257 comparison_to_compcode (enum tree_code code)
2258 {
2259 switch (code)
2260 {
2261 case LT_EXPR:
2262 return COMPCODE_LT;
2263 case EQ_EXPR:
2264 return COMPCODE_EQ;
2265 case LE_EXPR:
2266 return COMPCODE_LE;
2267 case GT_EXPR:
2268 return COMPCODE_GT;
2269 case NE_EXPR:
2270 return COMPCODE_NE;
2271 case GE_EXPR:
2272 return COMPCODE_GE;
2273 case ORDERED_EXPR:
2274 return COMPCODE_ORD;
2275 case UNORDERED_EXPR:
2276 return COMPCODE_UNORD;
2277 case UNLT_EXPR:
2278 return COMPCODE_UNLT;
2279 case UNEQ_EXPR:
2280 return COMPCODE_UNEQ;
2281 case UNLE_EXPR:
2282 return COMPCODE_UNLE;
2283 case UNGT_EXPR:
2284 return COMPCODE_UNGT;
2285 case LTGT_EXPR:
2286 return COMPCODE_LTGT;
2287 case UNGE_EXPR:
2288 return COMPCODE_UNGE;
2289 default:
2290 gcc_unreachable ();
2291 }
2292 }
2293
2294 /* Convert a compcode bit-based encoding of a comparison operator back
2295 to GCC's enum tree_code representation. This function is the
2296 inverse of comparison_to_compcode. */
2297
2298 static enum tree_code
2299 compcode_to_comparison (enum comparison_code code)
2300 {
2301 switch (code)
2302 {
2303 case COMPCODE_LT:
2304 return LT_EXPR;
2305 case COMPCODE_EQ:
2306 return EQ_EXPR;
2307 case COMPCODE_LE:
2308 return LE_EXPR;
2309 case COMPCODE_GT:
2310 return GT_EXPR;
2311 case COMPCODE_NE:
2312 return NE_EXPR;
2313 case COMPCODE_GE:
2314 return GE_EXPR;
2315 case COMPCODE_ORD:
2316 return ORDERED_EXPR;
2317 case COMPCODE_UNORD:
2318 return UNORDERED_EXPR;
2319 case COMPCODE_UNLT:
2320 return UNLT_EXPR;
2321 case COMPCODE_UNEQ:
2322 return UNEQ_EXPR;
2323 case COMPCODE_UNLE:
2324 return UNLE_EXPR;
2325 case COMPCODE_UNGT:
2326 return UNGT_EXPR;
2327 case COMPCODE_LTGT:
2328 return LTGT_EXPR;
2329 case COMPCODE_UNGE:
2330 return UNGE_EXPR;
2331 default:
2332 gcc_unreachable ();
2333 }
2334 }
2335
2336 /* Return a tree for the comparison which is the combination of
2337 doing the AND or OR (depending on CODE) of the two operations LCODE
2338 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2339 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2340 if this makes the transformation invalid. */
2341
2342 tree
2343 combine_comparisons (location_t loc,
2344 enum tree_code code, enum tree_code lcode,
2345 enum tree_code rcode, tree truth_type,
2346 tree ll_arg, tree lr_arg)
2347 {
2348 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2349 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2350 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2351 int compcode;
2352
2353 switch (code)
2354 {
2355 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2356 compcode = lcompcode & rcompcode;
2357 break;
2358
2359 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2360 compcode = lcompcode | rcompcode;
2361 break;
2362
2363 default:
2364 return NULL_TREE;
2365 }
2366
2367 if (!honor_nans)
2368 {
2369 /* Eliminate unordered comparisons, as well as LTGT and ORD
2370 which are not used unless the mode has NaNs. */
2371 compcode &= ~COMPCODE_UNORD;
2372 if (compcode == COMPCODE_LTGT)
2373 compcode = COMPCODE_NE;
2374 else if (compcode == COMPCODE_ORD)
2375 compcode = COMPCODE_TRUE;
2376 }
2377 else if (flag_trapping_math)
2378 {
2379 /* Check that the original operation and the optimized ones will trap
2380 under the same condition. */
2381 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2382 && (lcompcode != COMPCODE_EQ)
2383 && (lcompcode != COMPCODE_ORD);
2384 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2385 && (rcompcode != COMPCODE_EQ)
2386 && (rcompcode != COMPCODE_ORD);
2387 bool trap = (compcode & COMPCODE_UNORD) == 0
2388 && (compcode != COMPCODE_EQ)
2389 && (compcode != COMPCODE_ORD);
2390
2391 /* In a short-circuited boolean expression the LHS might be
2392 such that the RHS, if evaluated, will never trap. For
2393 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2394 if neither x nor y is NaN. (This is a mixed blessing: for
2395 example, the expression above will never trap, hence
2396 optimizing it to x < y would be invalid). */
2397 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2398 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2399 rtrap = false;
2400
2401 /* If the comparison was short-circuited, and only the RHS
2402 trapped, we may now generate a spurious trap. */
2403 if (rtrap && !ltrap
2404 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2405 return NULL_TREE;
2406
2407 /* If we changed the conditions that cause a trap, we lose. */
2408 if ((ltrap || rtrap) != trap)
2409 return NULL_TREE;
2410 }
2411
2412 if (compcode == COMPCODE_TRUE)
2413 return constant_boolean_node (true, truth_type);
2414 else if (compcode == COMPCODE_FALSE)
2415 return constant_boolean_node (false, truth_type);
2416 else
2417 {
2418 enum tree_code tcode;
2419
2420 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2421 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2422 }
2423 }
2424 \f
2425 /* Return nonzero if two operands (typically of the same tree node)
2426 are necessarily equal. If either argument has side-effects this
2427 function returns zero. FLAGS modifies behavior as follows:
2428
2429 If OEP_ONLY_CONST is set, only return nonzero for constants.
2430 This function tests whether the operands are indistinguishable;
2431 it does not test whether they are equal using C's == operation.
2432 The distinction is important for IEEE floating point, because
2433 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2434 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2435
2436 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2437 even though it may hold multiple values during a function.
2438 This is because a GCC tree node guarantees that nothing else is
2439 executed between the evaluation of its "operands" (which may often
2440 be evaluated in arbitrary order). Hence if the operands themselves
2441 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2442 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2443 unset means assuming isochronic (or instantaneous) tree equivalence.
2444 Unless comparing arbitrary expression trees, such as from different
2445 statements, this flag can usually be left unset.
2446
2447 If OEP_PURE_SAME is set, then pure functions with identical arguments
2448 are considered the same. It is used when the caller has other ways
2449 to ensure that global memory is unchanged in between. */
2450
2451 int
2452 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2453 {
2454 /* If either is ERROR_MARK, they aren't equal. */
2455 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2456 || TREE_TYPE (arg0) == error_mark_node
2457 || TREE_TYPE (arg1) == error_mark_node)
2458 return 0;
2459
2460 /* Similar, if either does not have a type (like a released SSA name),
2461 they aren't equal. */
2462 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2463 return 0;
2464
2465 /* Check equality of integer constants before bailing out due to
2466 precision differences. */
2467 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2468 return tree_int_cst_equal (arg0, arg1);
2469
2470 /* If both types don't have the same signedness, then we can't consider
2471 them equal. We must check this before the STRIP_NOPS calls
2472 because they may change the signedness of the arguments. As pointers
2473 strictly don't have a signedness, require either two pointers or
2474 two non-pointers as well. */
2475 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2476 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2477 return 0;
2478
2479 /* We cannot consider pointers to different address space equal. */
2480 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2481 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2482 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2483 return 0;
2484
2485 /* If both types don't have the same precision, then it is not safe
2486 to strip NOPs. */
2487 if (element_precision (TREE_TYPE (arg0))
2488 != element_precision (TREE_TYPE (arg1)))
2489 return 0;
2490
2491 STRIP_NOPS (arg0);
2492 STRIP_NOPS (arg1);
2493
2494 /* In case both args are comparisons but with different comparison
2495 code, try to swap the comparison operands of one arg to produce
2496 a match and compare that variant. */
2497 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2498 && COMPARISON_CLASS_P (arg0)
2499 && COMPARISON_CLASS_P (arg1))
2500 {
2501 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2502
2503 if (TREE_CODE (arg0) == swap_code)
2504 return operand_equal_p (TREE_OPERAND (arg0, 0),
2505 TREE_OPERAND (arg1, 1), flags)
2506 && operand_equal_p (TREE_OPERAND (arg0, 1),
2507 TREE_OPERAND (arg1, 0), flags);
2508 }
2509
2510 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2511 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2512 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2513 return 0;
2514
2515 /* This is needed for conversions and for COMPONENT_REF.
2516 Might as well play it safe and always test this. */
2517 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2518 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2519 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2520 return 0;
2521
2522 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2523 We don't care about side effects in that case because the SAVE_EXPR
2524 takes care of that for us. In all other cases, two expressions are
2525 equal if they have no side effects. If we have two identical
2526 expressions with side effects that should be treated the same due
2527 to the only side effects being identical SAVE_EXPR's, that will
2528 be detected in the recursive calls below.
2529 If we are taking an invariant address of two identical objects
2530 they are necessarily equal as well. */
2531 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2532 && (TREE_CODE (arg0) == SAVE_EXPR
2533 || (flags & OEP_CONSTANT_ADDRESS_OF)
2534 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2535 return 1;
2536
2537 /* Next handle constant cases, those for which we can return 1 even
2538 if ONLY_CONST is set. */
2539 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2540 switch (TREE_CODE (arg0))
2541 {
2542 case INTEGER_CST:
2543 return tree_int_cst_equal (arg0, arg1);
2544
2545 case FIXED_CST:
2546 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2547 TREE_FIXED_CST (arg1));
2548
2549 case REAL_CST:
2550 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2551 TREE_REAL_CST (arg1)))
2552 return 1;
2553
2554
2555 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2556 {
2557 /* If we do not distinguish between signed and unsigned zero,
2558 consider them equal. */
2559 if (real_zerop (arg0) && real_zerop (arg1))
2560 return 1;
2561 }
2562 return 0;
2563
2564 case VECTOR_CST:
2565 {
2566 unsigned i;
2567
2568 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2569 return 0;
2570
2571 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2572 {
2573 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2574 VECTOR_CST_ELT (arg1, i), flags))
2575 return 0;
2576 }
2577 return 1;
2578 }
2579
2580 case COMPLEX_CST:
2581 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2582 flags)
2583 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2584 flags));
2585
2586 case STRING_CST:
2587 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2588 && ! memcmp (TREE_STRING_POINTER (arg0),
2589 TREE_STRING_POINTER (arg1),
2590 TREE_STRING_LENGTH (arg0)));
2591
2592 case ADDR_EXPR:
2593 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2594 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2595 ? OEP_CONSTANT_ADDRESS_OF : 0);
2596 default:
2597 break;
2598 }
2599
2600 if (flags & OEP_ONLY_CONST)
2601 return 0;
2602
2603 /* Define macros to test an operand from arg0 and arg1 for equality and a
2604 variant that allows null and views null as being different from any
2605 non-null value. In the latter case, if either is null, the both
2606 must be; otherwise, do the normal comparison. */
2607 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2608 TREE_OPERAND (arg1, N), flags)
2609
2610 #define OP_SAME_WITH_NULL(N) \
2611 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2612 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2613
2614 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2615 {
2616 case tcc_unary:
2617 /* Two conversions are equal only if signedness and modes match. */
2618 switch (TREE_CODE (arg0))
2619 {
2620 CASE_CONVERT:
2621 case FIX_TRUNC_EXPR:
2622 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2623 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2624 return 0;
2625 break;
2626 default:
2627 break;
2628 }
2629
2630 return OP_SAME (0);
2631
2632
2633 case tcc_comparison:
2634 case tcc_binary:
2635 if (OP_SAME (0) && OP_SAME (1))
2636 return 1;
2637
2638 /* For commutative ops, allow the other order. */
2639 return (commutative_tree_code (TREE_CODE (arg0))
2640 && operand_equal_p (TREE_OPERAND (arg0, 0),
2641 TREE_OPERAND (arg1, 1), flags)
2642 && operand_equal_p (TREE_OPERAND (arg0, 1),
2643 TREE_OPERAND (arg1, 0), flags));
2644
2645 case tcc_reference:
2646 /* If either of the pointer (or reference) expressions we are
2647 dereferencing contain a side effect, these cannot be equal,
2648 but their addresses can be. */
2649 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2650 && (TREE_SIDE_EFFECTS (arg0)
2651 || TREE_SIDE_EFFECTS (arg1)))
2652 return 0;
2653
2654 switch (TREE_CODE (arg0))
2655 {
2656 case INDIRECT_REF:
2657 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2658 return OP_SAME (0);
2659
2660 case REALPART_EXPR:
2661 case IMAGPART_EXPR:
2662 return OP_SAME (0);
2663
2664 case TARGET_MEM_REF:
2665 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2666 /* Require equal extra operands and then fall through to MEM_REF
2667 handling of the two common operands. */
2668 if (!OP_SAME_WITH_NULL (2)
2669 || !OP_SAME_WITH_NULL (3)
2670 || !OP_SAME_WITH_NULL (4))
2671 return 0;
2672 /* Fallthru. */
2673 case MEM_REF:
2674 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2675 /* Require equal access sizes, and similar pointer types.
2676 We can have incomplete types for array references of
2677 variable-sized arrays from the Fortran frontend
2678 though. Also verify the types are compatible. */
2679 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2680 || (TYPE_SIZE (TREE_TYPE (arg0))
2681 && TYPE_SIZE (TREE_TYPE (arg1))
2682 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2683 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2684 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2685 && alias_ptr_types_compatible_p
2686 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2687 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2688 && OP_SAME (0) && OP_SAME (1));
2689
2690 case ARRAY_REF:
2691 case ARRAY_RANGE_REF:
2692 /* Operands 2 and 3 may be null.
2693 Compare the array index by value if it is constant first as we
2694 may have different types but same value here. */
2695 if (!OP_SAME (0))
2696 return 0;
2697 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2698 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2699 TREE_OPERAND (arg1, 1))
2700 || OP_SAME (1))
2701 && OP_SAME_WITH_NULL (2)
2702 && OP_SAME_WITH_NULL (3));
2703
2704 case COMPONENT_REF:
2705 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2706 may be NULL when we're called to compare MEM_EXPRs. */
2707 if (!OP_SAME_WITH_NULL (0)
2708 || !OP_SAME (1))
2709 return 0;
2710 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2711 return OP_SAME_WITH_NULL (2);
2712
2713 case BIT_FIELD_REF:
2714 if (!OP_SAME (0))
2715 return 0;
2716 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2717 return OP_SAME (1) && OP_SAME (2);
2718
2719 default:
2720 return 0;
2721 }
2722
2723 case tcc_expression:
2724 switch (TREE_CODE (arg0))
2725 {
2726 case ADDR_EXPR:
2727 case TRUTH_NOT_EXPR:
2728 return OP_SAME (0);
2729
2730 case TRUTH_ANDIF_EXPR:
2731 case TRUTH_ORIF_EXPR:
2732 return OP_SAME (0) && OP_SAME (1);
2733
2734 case FMA_EXPR:
2735 case WIDEN_MULT_PLUS_EXPR:
2736 case WIDEN_MULT_MINUS_EXPR:
2737 if (!OP_SAME (2))
2738 return 0;
2739 /* The multiplcation operands are commutative. */
2740 /* FALLTHRU */
2741
2742 case TRUTH_AND_EXPR:
2743 case TRUTH_OR_EXPR:
2744 case TRUTH_XOR_EXPR:
2745 if (OP_SAME (0) && OP_SAME (1))
2746 return 1;
2747
2748 /* Otherwise take into account this is a commutative operation. */
2749 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2750 TREE_OPERAND (arg1, 1), flags)
2751 && operand_equal_p (TREE_OPERAND (arg0, 1),
2752 TREE_OPERAND (arg1, 0), flags));
2753
2754 case COND_EXPR:
2755 case VEC_COND_EXPR:
2756 case DOT_PROD_EXPR:
2757 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2758
2759 default:
2760 return 0;
2761 }
2762
2763 case tcc_vl_exp:
2764 switch (TREE_CODE (arg0))
2765 {
2766 case CALL_EXPR:
2767 /* If the CALL_EXPRs call different functions, then they
2768 clearly can not be equal. */
2769 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2770 flags))
2771 return 0;
2772
2773 {
2774 unsigned int cef = call_expr_flags (arg0);
2775 if (flags & OEP_PURE_SAME)
2776 cef &= ECF_CONST | ECF_PURE;
2777 else
2778 cef &= ECF_CONST;
2779 if (!cef)
2780 return 0;
2781 }
2782
2783 /* Now see if all the arguments are the same. */
2784 {
2785 const_call_expr_arg_iterator iter0, iter1;
2786 const_tree a0, a1;
2787 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2788 a1 = first_const_call_expr_arg (arg1, &iter1);
2789 a0 && a1;
2790 a0 = next_const_call_expr_arg (&iter0),
2791 a1 = next_const_call_expr_arg (&iter1))
2792 if (! operand_equal_p (a0, a1, flags))
2793 return 0;
2794
2795 /* If we get here and both argument lists are exhausted
2796 then the CALL_EXPRs are equal. */
2797 return ! (a0 || a1);
2798 }
2799 default:
2800 return 0;
2801 }
2802
2803 case tcc_declaration:
2804 /* Consider __builtin_sqrt equal to sqrt. */
2805 return (TREE_CODE (arg0) == FUNCTION_DECL
2806 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2807 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2808 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2809
2810 default:
2811 return 0;
2812 }
2813
2814 #undef OP_SAME
2815 #undef OP_SAME_WITH_NULL
2816 }
2817 \f
2818 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2819 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2820
2821 When in doubt, return 0. */
2822
2823 static int
2824 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2825 {
2826 int unsignedp1, unsignedpo;
2827 tree primarg0, primarg1, primother;
2828 unsigned int correct_width;
2829
2830 if (operand_equal_p (arg0, arg1, 0))
2831 return 1;
2832
2833 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2834 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2835 return 0;
2836
2837 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2838 and see if the inner values are the same. This removes any
2839 signedness comparison, which doesn't matter here. */
2840 primarg0 = arg0, primarg1 = arg1;
2841 STRIP_NOPS (primarg0);
2842 STRIP_NOPS (primarg1);
2843 if (operand_equal_p (primarg0, primarg1, 0))
2844 return 1;
2845
2846 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2847 actual comparison operand, ARG0.
2848
2849 First throw away any conversions to wider types
2850 already present in the operands. */
2851
2852 primarg1 = get_narrower (arg1, &unsignedp1);
2853 primother = get_narrower (other, &unsignedpo);
2854
2855 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2856 if (unsignedp1 == unsignedpo
2857 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2858 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2859 {
2860 tree type = TREE_TYPE (arg0);
2861
2862 /* Make sure shorter operand is extended the right way
2863 to match the longer operand. */
2864 primarg1 = fold_convert (signed_or_unsigned_type_for
2865 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2866
2867 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2868 return 1;
2869 }
2870
2871 return 0;
2872 }
2873 \f
2874 /* See if ARG is an expression that is either a comparison or is performing
2875 arithmetic on comparisons. The comparisons must only be comparing
2876 two different values, which will be stored in *CVAL1 and *CVAL2; if
2877 they are nonzero it means that some operands have already been found.
2878 No variables may be used anywhere else in the expression except in the
2879 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2880 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2881
2882 If this is true, return 1. Otherwise, return zero. */
2883
2884 static int
2885 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2886 {
2887 enum tree_code code = TREE_CODE (arg);
2888 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2889
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2892 tclass = tcc_unary;
2893 else if (tclass == tcc_expression
2894 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2895 || code == COMPOUND_EXPR))
2896 tclass = tcc_binary;
2897
2898 else if (tclass == tcc_expression && code == SAVE_EXPR
2899 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2900 {
2901 /* If we've already found a CVAL1 or CVAL2, this expression is
2902 two complex to handle. */
2903 if (*cval1 || *cval2)
2904 return 0;
2905
2906 tclass = tcc_unary;
2907 *save_p = 1;
2908 }
2909
2910 switch (tclass)
2911 {
2912 case tcc_unary:
2913 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2914
2915 case tcc_binary:
2916 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2917 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2918 cval1, cval2, save_p));
2919
2920 case tcc_constant:
2921 return 1;
2922
2923 case tcc_expression:
2924 if (code == COND_EXPR)
2925 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2926 cval1, cval2, save_p)
2927 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2928 cval1, cval2, save_p)
2929 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2930 cval1, cval2, save_p));
2931 return 0;
2932
2933 case tcc_comparison:
2934 /* First see if we can handle the first operand, then the second. For
2935 the second operand, we know *CVAL1 can't be zero. It must be that
2936 one side of the comparison is each of the values; test for the
2937 case where this isn't true by failing if the two operands
2938 are the same. */
2939
2940 if (operand_equal_p (TREE_OPERAND (arg, 0),
2941 TREE_OPERAND (arg, 1), 0))
2942 return 0;
2943
2944 if (*cval1 == 0)
2945 *cval1 = TREE_OPERAND (arg, 0);
2946 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2947 ;
2948 else if (*cval2 == 0)
2949 *cval2 = TREE_OPERAND (arg, 0);
2950 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2951 ;
2952 else
2953 return 0;
2954
2955 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2956 ;
2957 else if (*cval2 == 0)
2958 *cval2 = TREE_OPERAND (arg, 1);
2959 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2960 ;
2961 else
2962 return 0;
2963
2964 return 1;
2965
2966 default:
2967 return 0;
2968 }
2969 }
2970 \f
2971 /* ARG is a tree that is known to contain just arithmetic operations and
2972 comparisons. Evaluate the operations in the tree substituting NEW0 for
2973 any occurrence of OLD0 as an operand of a comparison and likewise for
2974 NEW1 and OLD1. */
2975
2976 static tree
2977 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2978 tree old1, tree new1)
2979 {
2980 tree type = TREE_TYPE (arg);
2981 enum tree_code code = TREE_CODE (arg);
2982 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2983
2984 /* We can handle some of the tcc_expression cases here. */
2985 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2986 tclass = tcc_unary;
2987 else if (tclass == tcc_expression
2988 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2989 tclass = tcc_binary;
2990
2991 switch (tclass)
2992 {
2993 case tcc_unary:
2994 return fold_build1_loc (loc, code, type,
2995 eval_subst (loc, TREE_OPERAND (arg, 0),
2996 old0, new0, old1, new1));
2997
2998 case tcc_binary:
2999 return fold_build2_loc (loc, code, type,
3000 eval_subst (loc, TREE_OPERAND (arg, 0),
3001 old0, new0, old1, new1),
3002 eval_subst (loc, TREE_OPERAND (arg, 1),
3003 old0, new0, old1, new1));
3004
3005 case tcc_expression:
3006 switch (code)
3007 {
3008 case SAVE_EXPR:
3009 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3010 old1, new1);
3011
3012 case COMPOUND_EXPR:
3013 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3014 old1, new1);
3015
3016 case COND_EXPR:
3017 return fold_build3_loc (loc, code, type,
3018 eval_subst (loc, TREE_OPERAND (arg, 0),
3019 old0, new0, old1, new1),
3020 eval_subst (loc, TREE_OPERAND (arg, 1),
3021 old0, new0, old1, new1),
3022 eval_subst (loc, TREE_OPERAND (arg, 2),
3023 old0, new0, old1, new1));
3024 default:
3025 break;
3026 }
3027 /* Fall through - ??? */
3028
3029 case tcc_comparison:
3030 {
3031 tree arg0 = TREE_OPERAND (arg, 0);
3032 tree arg1 = TREE_OPERAND (arg, 1);
3033
3034 /* We need to check both for exact equality and tree equality. The
3035 former will be true if the operand has a side-effect. In that
3036 case, we know the operand occurred exactly once. */
3037
3038 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3039 arg0 = new0;
3040 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3041 arg0 = new1;
3042
3043 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3044 arg1 = new0;
3045 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3046 arg1 = new1;
3047
3048 return fold_build2_loc (loc, code, type, arg0, arg1);
3049 }
3050
3051 default:
3052 return arg;
3053 }
3054 }
3055 \f
3056 /* Return a tree for the case when the result of an expression is RESULT
3057 converted to TYPE and OMITTED was previously an operand of the expression
3058 but is now not needed (e.g., we folded OMITTED * 0).
3059
3060 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3061 the conversion of RESULT to TYPE. */
3062
3063 tree
3064 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3065 {
3066 tree t = fold_convert_loc (loc, type, result);
3067
3068 /* If the resulting operand is an empty statement, just return the omitted
3069 statement casted to void. */
3070 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3071 return build1_loc (loc, NOP_EXPR, void_type_node,
3072 fold_ignored_result (omitted));
3073
3074 if (TREE_SIDE_EFFECTS (omitted))
3075 return build2_loc (loc, COMPOUND_EXPR, type,
3076 fold_ignored_result (omitted), t);
3077
3078 return non_lvalue_loc (loc, t);
3079 }
3080
3081 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3082
3083 static tree
3084 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3085 tree omitted)
3086 {
3087 tree t = fold_convert_loc (loc, type, result);
3088
3089 /* If the resulting operand is an empty statement, just return the omitted
3090 statement casted to void. */
3091 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3092 return build1_loc (loc, NOP_EXPR, void_type_node,
3093 fold_ignored_result (omitted));
3094
3095 if (TREE_SIDE_EFFECTS (omitted))
3096 return build2_loc (loc, COMPOUND_EXPR, type,
3097 fold_ignored_result (omitted), t);
3098
3099 return pedantic_non_lvalue_loc (loc, t);
3100 }
3101
3102 /* Return a tree for the case when the result of an expression is RESULT
3103 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3104 of the expression but are now not needed.
3105
3106 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3107 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3108 evaluated before OMITTED2. Otherwise, if neither has side effects,
3109 just do the conversion of RESULT to TYPE. */
3110
3111 tree
3112 omit_two_operands_loc (location_t loc, tree type, tree result,
3113 tree omitted1, tree omitted2)
3114 {
3115 tree t = fold_convert_loc (loc, type, result);
3116
3117 if (TREE_SIDE_EFFECTS (omitted2))
3118 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3119 if (TREE_SIDE_EFFECTS (omitted1))
3120 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3121
3122 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3123 }
3124
3125 \f
3126 /* Return a simplified tree node for the truth-negation of ARG. This
3127 never alters ARG itself. We assume that ARG is an operation that
3128 returns a truth value (0 or 1).
3129
3130 FIXME: one would think we would fold the result, but it causes
3131 problems with the dominator optimizer. */
3132
3133 static tree
3134 fold_truth_not_expr (location_t loc, tree arg)
3135 {
3136 tree type = TREE_TYPE (arg);
3137 enum tree_code code = TREE_CODE (arg);
3138 location_t loc1, loc2;
3139
3140 /* If this is a comparison, we can simply invert it, except for
3141 floating-point non-equality comparisons, in which case we just
3142 enclose a TRUTH_NOT_EXPR around what we have. */
3143
3144 if (TREE_CODE_CLASS (code) == tcc_comparison)
3145 {
3146 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3147 if (FLOAT_TYPE_P (op_type)
3148 && flag_trapping_math
3149 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3150 && code != NE_EXPR && code != EQ_EXPR)
3151 return NULL_TREE;
3152
3153 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3154 if (code == ERROR_MARK)
3155 return NULL_TREE;
3156
3157 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3158 TREE_OPERAND (arg, 1));
3159 }
3160
3161 switch (code)
3162 {
3163 case INTEGER_CST:
3164 return constant_boolean_node (integer_zerop (arg), type);
3165
3166 case TRUTH_AND_EXPR:
3167 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3168 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3169 return build2_loc (loc, TRUTH_OR_EXPR, type,
3170 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3171 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3172
3173 case TRUTH_OR_EXPR:
3174 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3175 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3176 return build2_loc (loc, TRUTH_AND_EXPR, type,
3177 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3178 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3179
3180 case TRUTH_XOR_EXPR:
3181 /* Here we can invert either operand. We invert the first operand
3182 unless the second operand is a TRUTH_NOT_EXPR in which case our
3183 result is the XOR of the first operand with the inside of the
3184 negation of the second operand. */
3185
3186 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3187 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3188 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3189 else
3190 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3191 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3192 TREE_OPERAND (arg, 1));
3193
3194 case TRUTH_ANDIF_EXPR:
3195 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3196 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3197 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3198 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3199 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3200
3201 case TRUTH_ORIF_EXPR:
3202 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3203 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3204 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3205 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3206 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3207
3208 case TRUTH_NOT_EXPR:
3209 return TREE_OPERAND (arg, 0);
3210
3211 case COND_EXPR:
3212 {
3213 tree arg1 = TREE_OPERAND (arg, 1);
3214 tree arg2 = TREE_OPERAND (arg, 2);
3215
3216 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3217 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3218
3219 /* A COND_EXPR may have a throw as one operand, which
3220 then has void type. Just leave void operands
3221 as they are. */
3222 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3223 VOID_TYPE_P (TREE_TYPE (arg1))
3224 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3225 VOID_TYPE_P (TREE_TYPE (arg2))
3226 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3227 }
3228
3229 case COMPOUND_EXPR:
3230 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3231 return build2_loc (loc, COMPOUND_EXPR, type,
3232 TREE_OPERAND (arg, 0),
3233 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3234
3235 case NON_LVALUE_EXPR:
3236 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3237 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3238
3239 CASE_CONVERT:
3240 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3241 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3242
3243 /* ... fall through ... */
3244
3245 case FLOAT_EXPR:
3246 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3247 return build1_loc (loc, TREE_CODE (arg), type,
3248 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3249
3250 case BIT_AND_EXPR:
3251 if (!integer_onep (TREE_OPERAND (arg, 1)))
3252 return NULL_TREE;
3253 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3254
3255 case SAVE_EXPR:
3256 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3257
3258 case CLEANUP_POINT_EXPR:
3259 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3260 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3261 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3262
3263 default:
3264 return NULL_TREE;
3265 }
3266 }
3267
3268 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3269 assume that ARG is an operation that returns a truth value (0 or 1
3270 for scalars, 0 or -1 for vectors). Return the folded expression if
3271 folding is successful. Otherwise, return NULL_TREE. */
3272
3273 static tree
3274 fold_invert_truthvalue (location_t loc, tree arg)
3275 {
3276 tree type = TREE_TYPE (arg);
3277 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3278 ? BIT_NOT_EXPR
3279 : TRUTH_NOT_EXPR,
3280 type, arg);
3281 }
3282
3283 /* Return a simplified tree node for the truth-negation of ARG. This
3284 never alters ARG itself. We assume that ARG is an operation that
3285 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3286
3287 tree
3288 invert_truthvalue_loc (location_t loc, tree arg)
3289 {
3290 if (TREE_CODE (arg) == ERROR_MARK)
3291 return arg;
3292
3293 tree type = TREE_TYPE (arg);
3294 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3295 ? BIT_NOT_EXPR
3296 : TRUTH_NOT_EXPR,
3297 type, arg);
3298 }
3299
3300 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3301 operands are another bit-wise operation with a common input. If so,
3302 distribute the bit operations to save an operation and possibly two if
3303 constants are involved. For example, convert
3304 (A | B) & (A | C) into A | (B & C)
3305 Further simplification will occur if B and C are constants.
3306
3307 If this optimization cannot be done, 0 will be returned. */
3308
3309 static tree
3310 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3311 tree arg0, tree arg1)
3312 {
3313 tree common;
3314 tree left, right;
3315
3316 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3317 || TREE_CODE (arg0) == code
3318 || (TREE_CODE (arg0) != BIT_AND_EXPR
3319 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3320 return 0;
3321
3322 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3323 {
3324 common = TREE_OPERAND (arg0, 0);
3325 left = TREE_OPERAND (arg0, 1);
3326 right = TREE_OPERAND (arg1, 1);
3327 }
3328 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3329 {
3330 common = TREE_OPERAND (arg0, 0);
3331 left = TREE_OPERAND (arg0, 1);
3332 right = TREE_OPERAND (arg1, 0);
3333 }
3334 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3335 {
3336 common = TREE_OPERAND (arg0, 1);
3337 left = TREE_OPERAND (arg0, 0);
3338 right = TREE_OPERAND (arg1, 1);
3339 }
3340 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3341 {
3342 common = TREE_OPERAND (arg0, 1);
3343 left = TREE_OPERAND (arg0, 0);
3344 right = TREE_OPERAND (arg1, 0);
3345 }
3346 else
3347 return 0;
3348
3349 common = fold_convert_loc (loc, type, common);
3350 left = fold_convert_loc (loc, type, left);
3351 right = fold_convert_loc (loc, type, right);
3352 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3353 fold_build2_loc (loc, code, type, left, right));
3354 }
3355
3356 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3357 with code CODE. This optimization is unsafe. */
3358 static tree
3359 distribute_real_division (location_t loc, enum tree_code code, tree type,
3360 tree arg0, tree arg1)
3361 {
3362 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3363 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3364
3365 /* (A / C) +- (B / C) -> (A +- B) / C. */
3366 if (mul0 == mul1
3367 && operand_equal_p (TREE_OPERAND (arg0, 1),
3368 TREE_OPERAND (arg1, 1), 0))
3369 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3370 fold_build2_loc (loc, code, type,
3371 TREE_OPERAND (arg0, 0),
3372 TREE_OPERAND (arg1, 0)),
3373 TREE_OPERAND (arg0, 1));
3374
3375 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3376 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3377 TREE_OPERAND (arg1, 0), 0)
3378 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3379 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3380 {
3381 REAL_VALUE_TYPE r0, r1;
3382 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3383 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3384 if (!mul0)
3385 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3386 if (!mul1)
3387 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3388 real_arithmetic (&r0, code, &r0, &r1);
3389 return fold_build2_loc (loc, MULT_EXPR, type,
3390 TREE_OPERAND (arg0, 0),
3391 build_real (type, r0));
3392 }
3393
3394 return NULL_TREE;
3395 }
3396 \f
3397 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3398 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3399
3400 static tree
3401 make_bit_field_ref (location_t loc, tree inner, tree type,
3402 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3403 {
3404 tree result, bftype;
3405
3406 if (bitpos == 0)
3407 {
3408 tree size = TYPE_SIZE (TREE_TYPE (inner));
3409 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3410 || POINTER_TYPE_P (TREE_TYPE (inner)))
3411 && tree_fits_shwi_p (size)
3412 && tree_to_shwi (size) == bitsize)
3413 return fold_convert_loc (loc, type, inner);
3414 }
3415
3416 bftype = type;
3417 if (TYPE_PRECISION (bftype) != bitsize
3418 || TYPE_UNSIGNED (bftype) == !unsignedp)
3419 bftype = build_nonstandard_integer_type (bitsize, 0);
3420
3421 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3422 size_int (bitsize), bitsize_int (bitpos));
3423
3424 if (bftype != type)
3425 result = fold_convert_loc (loc, type, result);
3426
3427 return result;
3428 }
3429
3430 /* Optimize a bit-field compare.
3431
3432 There are two cases: First is a compare against a constant and the
3433 second is a comparison of two items where the fields are at the same
3434 bit position relative to the start of a chunk (byte, halfword, word)
3435 large enough to contain it. In these cases we can avoid the shift
3436 implicit in bitfield extractions.
3437
3438 For constants, we emit a compare of the shifted constant with the
3439 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3440 compared. For two fields at the same position, we do the ANDs with the
3441 similar mask and compare the result of the ANDs.
3442
3443 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3444 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3445 are the left and right operands of the comparison, respectively.
3446
3447 If the optimization described above can be done, we return the resulting
3448 tree. Otherwise we return zero. */
3449
3450 static tree
3451 optimize_bit_field_compare (location_t loc, enum tree_code code,
3452 tree compare_type, tree lhs, tree rhs)
3453 {
3454 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3455 tree type = TREE_TYPE (lhs);
3456 tree unsigned_type;
3457 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3458 enum machine_mode lmode, rmode, nmode;
3459 int lunsignedp, runsignedp;
3460 int lvolatilep = 0, rvolatilep = 0;
3461 tree linner, rinner = NULL_TREE;
3462 tree mask;
3463 tree offset;
3464
3465 /* Get all the information about the extractions being done. If the bit size
3466 if the same as the size of the underlying object, we aren't doing an
3467 extraction at all and so can do nothing. We also don't want to
3468 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3469 then will no longer be able to replace it. */
3470 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3471 &lunsignedp, &lvolatilep, false);
3472 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3473 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3474 return 0;
3475
3476 if (!const_p)
3477 {
3478 /* If this is not a constant, we can only do something if bit positions,
3479 sizes, and signedness are the same. */
3480 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3481 &runsignedp, &rvolatilep, false);
3482
3483 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3484 || lunsignedp != runsignedp || offset != 0
3485 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3486 return 0;
3487 }
3488
3489 /* See if we can find a mode to refer to this field. We should be able to,
3490 but fail if we can't. */
3491 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3492 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3493 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3494 TYPE_ALIGN (TREE_TYPE (rinner))),
3495 word_mode, false);
3496 if (nmode == VOIDmode)
3497 return 0;
3498
3499 /* Set signed and unsigned types of the precision of this mode for the
3500 shifts below. */
3501 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3502
3503 /* Compute the bit position and size for the new reference and our offset
3504 within it. If the new reference is the same size as the original, we
3505 won't optimize anything, so return zero. */
3506 nbitsize = GET_MODE_BITSIZE (nmode);
3507 nbitpos = lbitpos & ~ (nbitsize - 1);
3508 lbitpos -= nbitpos;
3509 if (nbitsize == lbitsize)
3510 return 0;
3511
3512 if (BYTES_BIG_ENDIAN)
3513 lbitpos = nbitsize - lbitsize - lbitpos;
3514
3515 /* Make the mask to be used against the extracted field. */
3516 mask = build_int_cst_type (unsigned_type, -1);
3517 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3518 mask = const_binop (RSHIFT_EXPR, mask,
3519 size_int (nbitsize - lbitsize - lbitpos));
3520
3521 if (! const_p)
3522 /* If not comparing with constant, just rework the comparison
3523 and return. */
3524 return fold_build2_loc (loc, code, compare_type,
3525 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3526 make_bit_field_ref (loc, linner,
3527 unsigned_type,
3528 nbitsize, nbitpos,
3529 1),
3530 mask),
3531 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3532 make_bit_field_ref (loc, rinner,
3533 unsigned_type,
3534 nbitsize, nbitpos,
3535 1),
3536 mask));
3537
3538 /* Otherwise, we are handling the constant case. See if the constant is too
3539 big for the field. Warn and return a tree of for 0 (false) if so. We do
3540 this not only for its own sake, but to avoid having to test for this
3541 error case below. If we didn't, we might generate wrong code.
3542
3543 For unsigned fields, the constant shifted right by the field length should
3544 be all zero. For signed fields, the high-order bits should agree with
3545 the sign bit. */
3546
3547 if (lunsignedp)
3548 {
3549 if (wi::lrshift (rhs, lbitsize) != 0)
3550 {
3551 warning (0, "comparison is always %d due to width of bit-field",
3552 code == NE_EXPR);
3553 return constant_boolean_node (code == NE_EXPR, compare_type);
3554 }
3555 }
3556 else
3557 {
3558 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3559 if (tem != 0 && tem != -1)
3560 {
3561 warning (0, "comparison is always %d due to width of bit-field",
3562 code == NE_EXPR);
3563 return constant_boolean_node (code == NE_EXPR, compare_type);
3564 }
3565 }
3566
3567 /* Single-bit compares should always be against zero. */
3568 if (lbitsize == 1 && ! integer_zerop (rhs))
3569 {
3570 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3571 rhs = build_int_cst (type, 0);
3572 }
3573
3574 /* Make a new bitfield reference, shift the constant over the
3575 appropriate number of bits and mask it with the computed mask
3576 (in case this was a signed field). If we changed it, make a new one. */
3577 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3578
3579 rhs = const_binop (BIT_AND_EXPR,
3580 const_binop (LSHIFT_EXPR,
3581 fold_convert_loc (loc, unsigned_type, rhs),
3582 size_int (lbitpos)),
3583 mask);
3584
3585 lhs = build2_loc (loc, code, compare_type,
3586 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3587 return lhs;
3588 }
3589 \f
3590 /* Subroutine for fold_truth_andor_1: decode a field reference.
3591
3592 If EXP is a comparison reference, we return the innermost reference.
3593
3594 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3595 set to the starting bit number.
3596
3597 If the innermost field can be completely contained in a mode-sized
3598 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3599
3600 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3601 otherwise it is not changed.
3602
3603 *PUNSIGNEDP is set to the signedness of the field.
3604
3605 *PMASK is set to the mask used. This is either contained in a
3606 BIT_AND_EXPR or derived from the width of the field.
3607
3608 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3609
3610 Return 0 if this is not a component reference or is one that we can't
3611 do anything with. */
3612
3613 static tree
3614 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3615 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3616 int *punsignedp, int *pvolatilep,
3617 tree *pmask, tree *pand_mask)
3618 {
3619 tree outer_type = 0;
3620 tree and_mask = 0;
3621 tree mask, inner, offset;
3622 tree unsigned_type;
3623 unsigned int precision;
3624
3625 /* All the optimizations using this function assume integer fields.
3626 There are problems with FP fields since the type_for_size call
3627 below can fail for, e.g., XFmode. */
3628 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3629 return 0;
3630
3631 /* We are interested in the bare arrangement of bits, so strip everything
3632 that doesn't affect the machine mode. However, record the type of the
3633 outermost expression if it may matter below. */
3634 if (CONVERT_EXPR_P (exp)
3635 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3636 outer_type = TREE_TYPE (exp);
3637 STRIP_NOPS (exp);
3638
3639 if (TREE_CODE (exp) == BIT_AND_EXPR)
3640 {
3641 and_mask = TREE_OPERAND (exp, 1);
3642 exp = TREE_OPERAND (exp, 0);
3643 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3644 if (TREE_CODE (and_mask) != INTEGER_CST)
3645 return 0;
3646 }
3647
3648 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3649 punsignedp, pvolatilep, false);
3650 if ((inner == exp && and_mask == 0)
3651 || *pbitsize < 0 || offset != 0
3652 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3653 return 0;
3654
3655 /* If the number of bits in the reference is the same as the bitsize of
3656 the outer type, then the outer type gives the signedness. Otherwise
3657 (in case of a small bitfield) the signedness is unchanged. */
3658 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3659 *punsignedp = TYPE_UNSIGNED (outer_type);
3660
3661 /* Compute the mask to access the bitfield. */
3662 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3663 precision = TYPE_PRECISION (unsigned_type);
3664
3665 mask = build_int_cst_type (unsigned_type, -1);
3666
3667 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3668 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3669
3670 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3671 if (and_mask != 0)
3672 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3673 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3674
3675 *pmask = mask;
3676 *pand_mask = and_mask;
3677 return inner;
3678 }
3679
3680 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3681 bit positions and MASK is SIGNED. */
3682
3683 static int
3684 all_ones_mask_p (const_tree mask, unsigned int size)
3685 {
3686 tree type = TREE_TYPE (mask);
3687 unsigned int precision = TYPE_PRECISION (type);
3688
3689 /* If this function returns true when the type of the mask is
3690 UNSIGNED, then there will be errors. In particular see
3691 gcc.c-torture/execute/990326-1.c. There does not appear to be
3692 any documentation paper trail as to why this is so. But the pre
3693 wide-int worked with that restriction and it has been preserved
3694 here. */
3695 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3696 return false;
3697
3698 return wi::mask (size, false, precision) == mask;
3699 }
3700
3701 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3702 represents the sign bit of EXP's type. If EXP represents a sign
3703 or zero extension, also test VAL against the unextended type.
3704 The return value is the (sub)expression whose sign bit is VAL,
3705 or NULL_TREE otherwise. */
3706
3707 static tree
3708 sign_bit_p (tree exp, const_tree val)
3709 {
3710 int width;
3711 tree t;
3712
3713 /* Tree EXP must have an integral type. */
3714 t = TREE_TYPE (exp);
3715 if (! INTEGRAL_TYPE_P (t))
3716 return NULL_TREE;
3717
3718 /* Tree VAL must be an integer constant. */
3719 if (TREE_CODE (val) != INTEGER_CST
3720 || TREE_OVERFLOW (val))
3721 return NULL_TREE;
3722
3723 width = TYPE_PRECISION (t);
3724 if (wi::only_sign_bit_p (val, width))
3725 return exp;
3726
3727 /* Handle extension from a narrower type. */
3728 if (TREE_CODE (exp) == NOP_EXPR
3729 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3730 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3731
3732 return NULL_TREE;
3733 }
3734
3735 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3736 to be evaluated unconditionally. */
3737
3738 static int
3739 simple_operand_p (const_tree exp)
3740 {
3741 /* Strip any conversions that don't change the machine mode. */
3742 STRIP_NOPS (exp);
3743
3744 return (CONSTANT_CLASS_P (exp)
3745 || TREE_CODE (exp) == SSA_NAME
3746 || (DECL_P (exp)
3747 && ! TREE_ADDRESSABLE (exp)
3748 && ! TREE_THIS_VOLATILE (exp)
3749 && ! DECL_NONLOCAL (exp)
3750 /* Don't regard global variables as simple. They may be
3751 allocated in ways unknown to the compiler (shared memory,
3752 #pragma weak, etc). */
3753 && ! TREE_PUBLIC (exp)
3754 && ! DECL_EXTERNAL (exp)
3755 /* Weakrefs are not safe to be read, since they can be NULL.
3756 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3757 have DECL_WEAK flag set. */
3758 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3759 /* Loading a static variable is unduly expensive, but global
3760 registers aren't expensive. */
3761 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3762 }
3763
3764 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3765 to be evaluated unconditionally.
3766 I addition to simple_operand_p, we assume that comparisons, conversions,
3767 and logic-not operations are simple, if their operands are simple, too. */
3768
3769 static bool
3770 simple_operand_p_2 (tree exp)
3771 {
3772 enum tree_code code;
3773
3774 if (TREE_SIDE_EFFECTS (exp)
3775 || tree_could_trap_p (exp))
3776 return false;
3777
3778 while (CONVERT_EXPR_P (exp))
3779 exp = TREE_OPERAND (exp, 0);
3780
3781 code = TREE_CODE (exp);
3782
3783 if (TREE_CODE_CLASS (code) == tcc_comparison)
3784 return (simple_operand_p (TREE_OPERAND (exp, 0))
3785 && simple_operand_p (TREE_OPERAND (exp, 1)));
3786
3787 if (code == TRUTH_NOT_EXPR)
3788 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3789
3790 return simple_operand_p (exp);
3791 }
3792
3793 \f
3794 /* The following functions are subroutines to fold_range_test and allow it to
3795 try to change a logical combination of comparisons into a range test.
3796
3797 For example, both
3798 X == 2 || X == 3 || X == 4 || X == 5
3799 and
3800 X >= 2 && X <= 5
3801 are converted to
3802 (unsigned) (X - 2) <= 3
3803
3804 We describe each set of comparisons as being either inside or outside
3805 a range, using a variable named like IN_P, and then describe the
3806 range with a lower and upper bound. If one of the bounds is omitted,
3807 it represents either the highest or lowest value of the type.
3808
3809 In the comments below, we represent a range by two numbers in brackets
3810 preceded by a "+" to designate being inside that range, or a "-" to
3811 designate being outside that range, so the condition can be inverted by
3812 flipping the prefix. An omitted bound is represented by a "-". For
3813 example, "- [-, 10]" means being outside the range starting at the lowest
3814 possible value and ending at 10, in other words, being greater than 10.
3815 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3816 always false.
3817
3818 We set up things so that the missing bounds are handled in a consistent
3819 manner so neither a missing bound nor "true" and "false" need to be
3820 handled using a special case. */
3821
3822 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3823 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3824 and UPPER1_P are nonzero if the respective argument is an upper bound
3825 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3826 must be specified for a comparison. ARG1 will be converted to ARG0's
3827 type if both are specified. */
3828
3829 static tree
3830 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3831 tree arg1, int upper1_p)
3832 {
3833 tree tem;
3834 int result;
3835 int sgn0, sgn1;
3836
3837 /* If neither arg represents infinity, do the normal operation.
3838 Else, if not a comparison, return infinity. Else handle the special
3839 comparison rules. Note that most of the cases below won't occur, but
3840 are handled for consistency. */
3841
3842 if (arg0 != 0 && arg1 != 0)
3843 {
3844 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3845 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3846 STRIP_NOPS (tem);
3847 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3848 }
3849
3850 if (TREE_CODE_CLASS (code) != tcc_comparison)
3851 return 0;
3852
3853 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3854 for neither. In real maths, we cannot assume open ended ranges are
3855 the same. But, this is computer arithmetic, where numbers are finite.
3856 We can therefore make the transformation of any unbounded range with
3857 the value Z, Z being greater than any representable number. This permits
3858 us to treat unbounded ranges as equal. */
3859 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3860 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3861 switch (code)
3862 {
3863 case EQ_EXPR:
3864 result = sgn0 == sgn1;
3865 break;
3866 case NE_EXPR:
3867 result = sgn0 != sgn1;
3868 break;
3869 case LT_EXPR:
3870 result = sgn0 < sgn1;
3871 break;
3872 case LE_EXPR:
3873 result = sgn0 <= sgn1;
3874 break;
3875 case GT_EXPR:
3876 result = sgn0 > sgn1;
3877 break;
3878 case GE_EXPR:
3879 result = sgn0 >= sgn1;
3880 break;
3881 default:
3882 gcc_unreachable ();
3883 }
3884
3885 return constant_boolean_node (result, type);
3886 }
3887 \f
3888 /* Helper routine for make_range. Perform one step for it, return
3889 new expression if the loop should continue or NULL_TREE if it should
3890 stop. */
3891
3892 tree
3893 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3894 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3895 bool *strict_overflow_p)
3896 {
3897 tree arg0_type = TREE_TYPE (arg0);
3898 tree n_low, n_high, low = *p_low, high = *p_high;
3899 int in_p = *p_in_p, n_in_p;
3900
3901 switch (code)
3902 {
3903 case TRUTH_NOT_EXPR:
3904 /* We can only do something if the range is testing for zero. */
3905 if (low == NULL_TREE || high == NULL_TREE
3906 || ! integer_zerop (low) || ! integer_zerop (high))
3907 return NULL_TREE;
3908 *p_in_p = ! in_p;
3909 return arg0;
3910
3911 case EQ_EXPR: case NE_EXPR:
3912 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3913 /* We can only do something if the range is testing for zero
3914 and if the second operand is an integer constant. Note that
3915 saying something is "in" the range we make is done by
3916 complementing IN_P since it will set in the initial case of
3917 being not equal to zero; "out" is leaving it alone. */
3918 if (low == NULL_TREE || high == NULL_TREE
3919 || ! integer_zerop (low) || ! integer_zerop (high)
3920 || TREE_CODE (arg1) != INTEGER_CST)
3921 return NULL_TREE;
3922
3923 switch (code)
3924 {
3925 case NE_EXPR: /* - [c, c] */
3926 low = high = arg1;
3927 break;
3928 case EQ_EXPR: /* + [c, c] */
3929 in_p = ! in_p, low = high = arg1;
3930 break;
3931 case GT_EXPR: /* - [-, c] */
3932 low = 0, high = arg1;
3933 break;
3934 case GE_EXPR: /* + [c, -] */
3935 in_p = ! in_p, low = arg1, high = 0;
3936 break;
3937 case LT_EXPR: /* - [c, -] */
3938 low = arg1, high = 0;
3939 break;
3940 case LE_EXPR: /* + [-, c] */
3941 in_p = ! in_p, low = 0, high = arg1;
3942 break;
3943 default:
3944 gcc_unreachable ();
3945 }
3946
3947 /* If this is an unsigned comparison, we also know that EXP is
3948 greater than or equal to zero. We base the range tests we make
3949 on that fact, so we record it here so we can parse existing
3950 range tests. We test arg0_type since often the return type
3951 of, e.g. EQ_EXPR, is boolean. */
3952 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3953 {
3954 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3955 in_p, low, high, 1,
3956 build_int_cst (arg0_type, 0),
3957 NULL_TREE))
3958 return NULL_TREE;
3959
3960 in_p = n_in_p, low = n_low, high = n_high;
3961
3962 /* If the high bound is missing, but we have a nonzero low
3963 bound, reverse the range so it goes from zero to the low bound
3964 minus 1. */
3965 if (high == 0 && low && ! integer_zerop (low))
3966 {
3967 in_p = ! in_p;
3968 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3969 build_int_cst (TREE_TYPE (low), 1), 0);
3970 low = build_int_cst (arg0_type, 0);
3971 }
3972 }
3973
3974 *p_low = low;
3975 *p_high = high;
3976 *p_in_p = in_p;
3977 return arg0;
3978
3979 case NEGATE_EXPR:
3980 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3981 low and high are non-NULL, then normalize will DTRT. */
3982 if (!TYPE_UNSIGNED (arg0_type)
3983 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3984 {
3985 if (low == NULL_TREE)
3986 low = TYPE_MIN_VALUE (arg0_type);
3987 if (high == NULL_TREE)
3988 high = TYPE_MAX_VALUE (arg0_type);
3989 }
3990
3991 /* (-x) IN [a,b] -> x in [-b, -a] */
3992 n_low = range_binop (MINUS_EXPR, exp_type,
3993 build_int_cst (exp_type, 0),
3994 0, high, 1);
3995 n_high = range_binop (MINUS_EXPR, exp_type,
3996 build_int_cst (exp_type, 0),
3997 0, low, 0);
3998 if (n_high != 0 && TREE_OVERFLOW (n_high))
3999 return NULL_TREE;
4000 goto normalize;
4001
4002 case BIT_NOT_EXPR:
4003 /* ~ X -> -X - 1 */
4004 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4005 build_int_cst (exp_type, 1));
4006
4007 case PLUS_EXPR:
4008 case MINUS_EXPR:
4009 if (TREE_CODE (arg1) != INTEGER_CST)
4010 return NULL_TREE;
4011
4012 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4013 move a constant to the other side. */
4014 if (!TYPE_UNSIGNED (arg0_type)
4015 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4016 return NULL_TREE;
4017
4018 /* If EXP is signed, any overflow in the computation is undefined,
4019 so we don't worry about it so long as our computations on
4020 the bounds don't overflow. For unsigned, overflow is defined
4021 and this is exactly the right thing. */
4022 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4023 arg0_type, low, 0, arg1, 0);
4024 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4025 arg0_type, high, 1, arg1, 0);
4026 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4027 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4028 return NULL_TREE;
4029
4030 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4031 *strict_overflow_p = true;
4032
4033 normalize:
4034 /* Check for an unsigned range which has wrapped around the maximum
4035 value thus making n_high < n_low, and normalize it. */
4036 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4037 {
4038 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4039 build_int_cst (TREE_TYPE (n_high), 1), 0);
4040 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4041 build_int_cst (TREE_TYPE (n_low), 1), 0);
4042
4043 /* If the range is of the form +/- [ x+1, x ], we won't
4044 be able to normalize it. But then, it represents the
4045 whole range or the empty set, so make it
4046 +/- [ -, - ]. */
4047 if (tree_int_cst_equal (n_low, low)
4048 && tree_int_cst_equal (n_high, high))
4049 low = high = 0;
4050 else
4051 in_p = ! in_p;
4052 }
4053 else
4054 low = n_low, high = n_high;
4055
4056 *p_low = low;
4057 *p_high = high;
4058 *p_in_p = in_p;
4059 return arg0;
4060
4061 CASE_CONVERT:
4062 case NON_LVALUE_EXPR:
4063 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4064 return NULL_TREE;
4065
4066 if (! INTEGRAL_TYPE_P (arg0_type)
4067 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4068 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4069 return NULL_TREE;
4070
4071 n_low = low, n_high = high;
4072
4073 if (n_low != 0)
4074 n_low = fold_convert_loc (loc, arg0_type, n_low);
4075
4076 if (n_high != 0)
4077 n_high = fold_convert_loc (loc, arg0_type, n_high);
4078
4079 /* If we're converting arg0 from an unsigned type, to exp,
4080 a signed type, we will be doing the comparison as unsigned.
4081 The tests above have already verified that LOW and HIGH
4082 are both positive.
4083
4084 So we have to ensure that we will handle large unsigned
4085 values the same way that the current signed bounds treat
4086 negative values. */
4087
4088 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4089 {
4090 tree high_positive;
4091 tree equiv_type;
4092 /* For fixed-point modes, we need to pass the saturating flag
4093 as the 2nd parameter. */
4094 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4095 equiv_type
4096 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4097 TYPE_SATURATING (arg0_type));
4098 else
4099 equiv_type
4100 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4101
4102 /* A range without an upper bound is, naturally, unbounded.
4103 Since convert would have cropped a very large value, use
4104 the max value for the destination type. */
4105 high_positive
4106 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4107 : TYPE_MAX_VALUE (arg0_type);
4108
4109 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4110 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4111 fold_convert_loc (loc, arg0_type,
4112 high_positive),
4113 build_int_cst (arg0_type, 1));
4114
4115 /* If the low bound is specified, "and" the range with the
4116 range for which the original unsigned value will be
4117 positive. */
4118 if (low != 0)
4119 {
4120 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4121 1, fold_convert_loc (loc, arg0_type,
4122 integer_zero_node),
4123 high_positive))
4124 return NULL_TREE;
4125
4126 in_p = (n_in_p == in_p);
4127 }
4128 else
4129 {
4130 /* Otherwise, "or" the range with the range of the input
4131 that will be interpreted as negative. */
4132 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4133 1, fold_convert_loc (loc, arg0_type,
4134 integer_zero_node),
4135 high_positive))
4136 return NULL_TREE;
4137
4138 in_p = (in_p != n_in_p);
4139 }
4140 }
4141
4142 *p_low = n_low;
4143 *p_high = n_high;
4144 *p_in_p = in_p;
4145 return arg0;
4146
4147 default:
4148 return NULL_TREE;
4149 }
4150 }
4151
4152 /* Given EXP, a logical expression, set the range it is testing into
4153 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4154 actually being tested. *PLOW and *PHIGH will be made of the same
4155 type as the returned expression. If EXP is not a comparison, we
4156 will most likely not be returning a useful value and range. Set
4157 *STRICT_OVERFLOW_P to true if the return value is only valid
4158 because signed overflow is undefined; otherwise, do not change
4159 *STRICT_OVERFLOW_P. */
4160
4161 tree
4162 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4163 bool *strict_overflow_p)
4164 {
4165 enum tree_code code;
4166 tree arg0, arg1 = NULL_TREE;
4167 tree exp_type, nexp;
4168 int in_p;
4169 tree low, high;
4170 location_t loc = EXPR_LOCATION (exp);
4171
4172 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4173 and see if we can refine the range. Some of the cases below may not
4174 happen, but it doesn't seem worth worrying about this. We "continue"
4175 the outer loop when we've changed something; otherwise we "break"
4176 the switch, which will "break" the while. */
4177
4178 in_p = 0;
4179 low = high = build_int_cst (TREE_TYPE (exp), 0);
4180
4181 while (1)
4182 {
4183 code = TREE_CODE (exp);
4184 exp_type = TREE_TYPE (exp);
4185 arg0 = NULL_TREE;
4186
4187 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4188 {
4189 if (TREE_OPERAND_LENGTH (exp) > 0)
4190 arg0 = TREE_OPERAND (exp, 0);
4191 if (TREE_CODE_CLASS (code) == tcc_binary
4192 || TREE_CODE_CLASS (code) == tcc_comparison
4193 || (TREE_CODE_CLASS (code) == tcc_expression
4194 && TREE_OPERAND_LENGTH (exp) > 1))
4195 arg1 = TREE_OPERAND (exp, 1);
4196 }
4197 if (arg0 == NULL_TREE)
4198 break;
4199
4200 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4201 &high, &in_p, strict_overflow_p);
4202 if (nexp == NULL_TREE)
4203 break;
4204 exp = nexp;
4205 }
4206
4207 /* If EXP is a constant, we can evaluate whether this is true or false. */
4208 if (TREE_CODE (exp) == INTEGER_CST)
4209 {
4210 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4211 exp, 0, low, 0))
4212 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4213 exp, 1, high, 1)));
4214 low = high = 0;
4215 exp = 0;
4216 }
4217
4218 *pin_p = in_p, *plow = low, *phigh = high;
4219 return exp;
4220 }
4221 \f
4222 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4223 type, TYPE, return an expression to test if EXP is in (or out of, depending
4224 on IN_P) the range. Return 0 if the test couldn't be created. */
4225
4226 tree
4227 build_range_check (location_t loc, tree type, tree exp, int in_p,
4228 tree low, tree high)
4229 {
4230 tree etype = TREE_TYPE (exp), value;
4231
4232 #ifdef HAVE_canonicalize_funcptr_for_compare
4233 /* Disable this optimization for function pointer expressions
4234 on targets that require function pointer canonicalization. */
4235 if (HAVE_canonicalize_funcptr_for_compare
4236 && TREE_CODE (etype) == POINTER_TYPE
4237 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4238 return NULL_TREE;
4239 #endif
4240
4241 if (! in_p)
4242 {
4243 value = build_range_check (loc, type, exp, 1, low, high);
4244 if (value != 0)
4245 return invert_truthvalue_loc (loc, value);
4246
4247 return 0;
4248 }
4249
4250 if (low == 0 && high == 0)
4251 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4252
4253 if (low == 0)
4254 return fold_build2_loc (loc, LE_EXPR, type, exp,
4255 fold_convert_loc (loc, etype, high));
4256
4257 if (high == 0)
4258 return fold_build2_loc (loc, GE_EXPR, type, exp,
4259 fold_convert_loc (loc, etype, low));
4260
4261 if (operand_equal_p (low, high, 0))
4262 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4263 fold_convert_loc (loc, etype, low));
4264
4265 if (integer_zerop (low))
4266 {
4267 if (! TYPE_UNSIGNED (etype))
4268 {
4269 etype = unsigned_type_for (etype);
4270 high = fold_convert_loc (loc, etype, high);
4271 exp = fold_convert_loc (loc, etype, exp);
4272 }
4273 return build_range_check (loc, type, exp, 1, 0, high);
4274 }
4275
4276 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4277 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4278 {
4279 int prec = TYPE_PRECISION (etype);
4280
4281 if (wi::mask (prec - 1, false, prec) == high)
4282 {
4283 if (TYPE_UNSIGNED (etype))
4284 {
4285 tree signed_etype = signed_type_for (etype);
4286 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4287 etype
4288 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4289 else
4290 etype = signed_etype;
4291 exp = fold_convert_loc (loc, etype, exp);
4292 }
4293 return fold_build2_loc (loc, GT_EXPR, type, exp,
4294 build_int_cst (etype, 0));
4295 }
4296 }
4297
4298 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4299 This requires wrap-around arithmetics for the type of the expression.
4300 First make sure that arithmetics in this type is valid, then make sure
4301 that it wraps around. */
4302 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4303 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4304 TYPE_UNSIGNED (etype));
4305
4306 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4307 {
4308 tree utype, minv, maxv;
4309
4310 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4311 for the type in question, as we rely on this here. */
4312 utype = unsigned_type_for (etype);
4313 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4314 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4315 build_int_cst (TREE_TYPE (maxv), 1), 1);
4316 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4317
4318 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4319 minv, 1, maxv, 1)))
4320 etype = utype;
4321 else
4322 return 0;
4323 }
4324
4325 high = fold_convert_loc (loc, etype, high);
4326 low = fold_convert_loc (loc, etype, low);
4327 exp = fold_convert_loc (loc, etype, exp);
4328
4329 value = const_binop (MINUS_EXPR, high, low);
4330
4331
4332 if (POINTER_TYPE_P (etype))
4333 {
4334 if (value != 0 && !TREE_OVERFLOW (value))
4335 {
4336 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4337 return build_range_check (loc, type,
4338 fold_build_pointer_plus_loc (loc, exp, low),
4339 1, build_int_cst (etype, 0), value);
4340 }
4341 return 0;
4342 }
4343
4344 if (value != 0 && !TREE_OVERFLOW (value))
4345 return build_range_check (loc, type,
4346 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4347 1, build_int_cst (etype, 0), value);
4348
4349 return 0;
4350 }
4351 \f
4352 /* Return the predecessor of VAL in its type, handling the infinite case. */
4353
4354 static tree
4355 range_predecessor (tree val)
4356 {
4357 tree type = TREE_TYPE (val);
4358
4359 if (INTEGRAL_TYPE_P (type)
4360 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4361 return 0;
4362 else
4363 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4364 build_int_cst (TREE_TYPE (val), 1), 0);
4365 }
4366
4367 /* Return the successor of VAL in its type, handling the infinite case. */
4368
4369 static tree
4370 range_successor (tree val)
4371 {
4372 tree type = TREE_TYPE (val);
4373
4374 if (INTEGRAL_TYPE_P (type)
4375 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4376 return 0;
4377 else
4378 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4379 build_int_cst (TREE_TYPE (val), 1), 0);
4380 }
4381
4382 /* Given two ranges, see if we can merge them into one. Return 1 if we
4383 can, 0 if we can't. Set the output range into the specified parameters. */
4384
4385 bool
4386 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4387 tree high0, int in1_p, tree low1, tree high1)
4388 {
4389 int no_overlap;
4390 int subset;
4391 int temp;
4392 tree tem;
4393 int in_p;
4394 tree low, high;
4395 int lowequal = ((low0 == 0 && low1 == 0)
4396 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397 low0, 0, low1, 0)));
4398 int highequal = ((high0 == 0 && high1 == 0)
4399 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400 high0, 1, high1, 1)));
4401
4402 /* Make range 0 be the range that starts first, or ends last if they
4403 start at the same value. Swap them if it isn't. */
4404 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4405 low0, 0, low1, 0))
4406 || (lowequal
4407 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4408 high1, 1, high0, 1))))
4409 {
4410 temp = in0_p, in0_p = in1_p, in1_p = temp;
4411 tem = low0, low0 = low1, low1 = tem;
4412 tem = high0, high0 = high1, high1 = tem;
4413 }
4414
4415 /* Now flag two cases, whether the ranges are disjoint or whether the
4416 second range is totally subsumed in the first. Note that the tests
4417 below are simplified by the ones above. */
4418 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4419 high0, 1, low1, 0));
4420 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4421 high1, 1, high0, 1));
4422
4423 /* We now have four cases, depending on whether we are including or
4424 excluding the two ranges. */
4425 if (in0_p && in1_p)
4426 {
4427 /* If they don't overlap, the result is false. If the second range
4428 is a subset it is the result. Otherwise, the range is from the start
4429 of the second to the end of the first. */
4430 if (no_overlap)
4431 in_p = 0, low = high = 0;
4432 else if (subset)
4433 in_p = 1, low = low1, high = high1;
4434 else
4435 in_p = 1, low = low1, high = high0;
4436 }
4437
4438 else if (in0_p && ! in1_p)
4439 {
4440 /* If they don't overlap, the result is the first range. If they are
4441 equal, the result is false. If the second range is a subset of the
4442 first, and the ranges begin at the same place, we go from just after
4443 the end of the second range to the end of the first. If the second
4444 range is not a subset of the first, or if it is a subset and both
4445 ranges end at the same place, the range starts at the start of the
4446 first range and ends just before the second range.
4447 Otherwise, we can't describe this as a single range. */
4448 if (no_overlap)
4449 in_p = 1, low = low0, high = high0;
4450 else if (lowequal && highequal)
4451 in_p = 0, low = high = 0;
4452 else if (subset && lowequal)
4453 {
4454 low = range_successor (high1);
4455 high = high0;
4456 in_p = 1;
4457 if (low == 0)
4458 {
4459 /* We are in the weird situation where high0 > high1 but
4460 high1 has no successor. Punt. */
4461 return 0;
4462 }
4463 }
4464 else if (! subset || highequal)
4465 {
4466 low = low0;
4467 high = range_predecessor (low1);
4468 in_p = 1;
4469 if (high == 0)
4470 {
4471 /* low0 < low1 but low1 has no predecessor. Punt. */
4472 return 0;
4473 }
4474 }
4475 else
4476 return 0;
4477 }
4478
4479 else if (! in0_p && in1_p)
4480 {
4481 /* If they don't overlap, the result is the second range. If the second
4482 is a subset of the first, the result is false. Otherwise,
4483 the range starts just after the first range and ends at the
4484 end of the second. */
4485 if (no_overlap)
4486 in_p = 1, low = low1, high = high1;
4487 else if (subset || highequal)
4488 in_p = 0, low = high = 0;
4489 else
4490 {
4491 low = range_successor (high0);
4492 high = high1;
4493 in_p = 1;
4494 if (low == 0)
4495 {
4496 /* high1 > high0 but high0 has no successor. Punt. */
4497 return 0;
4498 }
4499 }
4500 }
4501
4502 else
4503 {
4504 /* The case where we are excluding both ranges. Here the complex case
4505 is if they don't overlap. In that case, the only time we have a
4506 range is if they are adjacent. If the second is a subset of the
4507 first, the result is the first. Otherwise, the range to exclude
4508 starts at the beginning of the first range and ends at the end of the
4509 second. */
4510 if (no_overlap)
4511 {
4512 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4513 range_successor (high0),
4514 1, low1, 0)))
4515 in_p = 0, low = low0, high = high1;
4516 else
4517 {
4518 /* Canonicalize - [min, x] into - [-, x]. */
4519 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4520 switch (TREE_CODE (TREE_TYPE (low0)))
4521 {
4522 case ENUMERAL_TYPE:
4523 if (TYPE_PRECISION (TREE_TYPE (low0))
4524 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4525 break;
4526 /* FALLTHROUGH */
4527 case INTEGER_TYPE:
4528 if (tree_int_cst_equal (low0,
4529 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4530 low0 = 0;
4531 break;
4532 case POINTER_TYPE:
4533 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4534 && integer_zerop (low0))
4535 low0 = 0;
4536 break;
4537 default:
4538 break;
4539 }
4540
4541 /* Canonicalize - [x, max] into - [x, -]. */
4542 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4543 switch (TREE_CODE (TREE_TYPE (high1)))
4544 {
4545 case ENUMERAL_TYPE:
4546 if (TYPE_PRECISION (TREE_TYPE (high1))
4547 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4548 break;
4549 /* FALLTHROUGH */
4550 case INTEGER_TYPE:
4551 if (tree_int_cst_equal (high1,
4552 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4553 high1 = 0;
4554 break;
4555 case POINTER_TYPE:
4556 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4557 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4558 high1, 1,
4559 build_int_cst (TREE_TYPE (high1), 1),
4560 1)))
4561 high1 = 0;
4562 break;
4563 default:
4564 break;
4565 }
4566
4567 /* The ranges might be also adjacent between the maximum and
4568 minimum values of the given type. For
4569 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4570 return + [x + 1, y - 1]. */
4571 if (low0 == 0 && high1 == 0)
4572 {
4573 low = range_successor (high0);
4574 high = range_predecessor (low1);
4575 if (low == 0 || high == 0)
4576 return 0;
4577
4578 in_p = 1;
4579 }
4580 else
4581 return 0;
4582 }
4583 }
4584 else if (subset)
4585 in_p = 0, low = low0, high = high0;
4586 else
4587 in_p = 0, low = low0, high = high1;
4588 }
4589
4590 *pin_p = in_p, *plow = low, *phigh = high;
4591 return 1;
4592 }
4593 \f
4594
4595 /* Subroutine of fold, looking inside expressions of the form
4596 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4597 of the COND_EXPR. This function is being used also to optimize
4598 A op B ? C : A, by reversing the comparison first.
4599
4600 Return a folded expression whose code is not a COND_EXPR
4601 anymore, or NULL_TREE if no folding opportunity is found. */
4602
4603 static tree
4604 fold_cond_expr_with_comparison (location_t loc, tree type,
4605 tree arg0, tree arg1, tree arg2)
4606 {
4607 enum tree_code comp_code = TREE_CODE (arg0);
4608 tree arg00 = TREE_OPERAND (arg0, 0);
4609 tree arg01 = TREE_OPERAND (arg0, 1);
4610 tree arg1_type = TREE_TYPE (arg1);
4611 tree tem;
4612
4613 STRIP_NOPS (arg1);
4614 STRIP_NOPS (arg2);
4615
4616 /* If we have A op 0 ? A : -A, consider applying the following
4617 transformations:
4618
4619 A == 0? A : -A same as -A
4620 A != 0? A : -A same as A
4621 A >= 0? A : -A same as abs (A)
4622 A > 0? A : -A same as abs (A)
4623 A <= 0? A : -A same as -abs (A)
4624 A < 0? A : -A same as -abs (A)
4625
4626 None of these transformations work for modes with signed
4627 zeros. If A is +/-0, the first two transformations will
4628 change the sign of the result (from +0 to -0, or vice
4629 versa). The last four will fix the sign of the result,
4630 even though the original expressions could be positive or
4631 negative, depending on the sign of A.
4632
4633 Note that all these transformations are correct if A is
4634 NaN, since the two alternatives (A and -A) are also NaNs. */
4635 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4636 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4637 ? real_zerop (arg01)
4638 : integer_zerop (arg01))
4639 && ((TREE_CODE (arg2) == NEGATE_EXPR
4640 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4641 /* In the case that A is of the form X-Y, '-A' (arg2) may
4642 have already been folded to Y-X, check for that. */
4643 || (TREE_CODE (arg1) == MINUS_EXPR
4644 && TREE_CODE (arg2) == MINUS_EXPR
4645 && operand_equal_p (TREE_OPERAND (arg1, 0),
4646 TREE_OPERAND (arg2, 1), 0)
4647 && operand_equal_p (TREE_OPERAND (arg1, 1),
4648 TREE_OPERAND (arg2, 0), 0))))
4649 switch (comp_code)
4650 {
4651 case EQ_EXPR:
4652 case UNEQ_EXPR:
4653 tem = fold_convert_loc (loc, arg1_type, arg1);
4654 return pedantic_non_lvalue_loc (loc,
4655 fold_convert_loc (loc, type,
4656 negate_expr (tem)));
4657 case NE_EXPR:
4658 case LTGT_EXPR:
4659 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4660 case UNGE_EXPR:
4661 case UNGT_EXPR:
4662 if (flag_trapping_math)
4663 break;
4664 /* Fall through. */
4665 case GE_EXPR:
4666 case GT_EXPR:
4667 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4668 arg1 = fold_convert_loc (loc, signed_type_for
4669 (TREE_TYPE (arg1)), arg1);
4670 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4672 case UNLE_EXPR:
4673 case UNLT_EXPR:
4674 if (flag_trapping_math)
4675 break;
4676 case LE_EXPR:
4677 case LT_EXPR:
4678 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4679 arg1 = fold_convert_loc (loc, signed_type_for
4680 (TREE_TYPE (arg1)), arg1);
4681 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4682 return negate_expr (fold_convert_loc (loc, type, tem));
4683 default:
4684 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4685 break;
4686 }
4687
4688 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4689 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4690 both transformations are correct when A is NaN: A != 0
4691 is then true, and A == 0 is false. */
4692
4693 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4694 && integer_zerop (arg01) && integer_zerop (arg2))
4695 {
4696 if (comp_code == NE_EXPR)
4697 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4698 else if (comp_code == EQ_EXPR)
4699 return build_zero_cst (type);
4700 }
4701
4702 /* Try some transformations of A op B ? A : B.
4703
4704 A == B? A : B same as B
4705 A != B? A : B same as A
4706 A >= B? A : B same as max (A, B)
4707 A > B? A : B same as max (B, A)
4708 A <= B? A : B same as min (A, B)
4709 A < B? A : B same as min (B, A)
4710
4711 As above, these transformations don't work in the presence
4712 of signed zeros. For example, if A and B are zeros of
4713 opposite sign, the first two transformations will change
4714 the sign of the result. In the last four, the original
4715 expressions give different results for (A=+0, B=-0) and
4716 (A=-0, B=+0), but the transformed expressions do not.
4717
4718 The first two transformations are correct if either A or B
4719 is a NaN. In the first transformation, the condition will
4720 be false, and B will indeed be chosen. In the case of the
4721 second transformation, the condition A != B will be true,
4722 and A will be chosen.
4723
4724 The conversions to max() and min() are not correct if B is
4725 a number and A is not. The conditions in the original
4726 expressions will be false, so all four give B. The min()
4727 and max() versions would give a NaN instead. */
4728 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4729 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4730 /* Avoid these transformations if the COND_EXPR may be used
4731 as an lvalue in the C++ front-end. PR c++/19199. */
4732 && (in_gimple_form
4733 || VECTOR_TYPE_P (type)
4734 || (strcmp (lang_hooks.name, "GNU C++") != 0
4735 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4736 || ! maybe_lvalue_p (arg1)
4737 || ! maybe_lvalue_p (arg2)))
4738 {
4739 tree comp_op0 = arg00;
4740 tree comp_op1 = arg01;
4741 tree comp_type = TREE_TYPE (comp_op0);
4742
4743 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4744 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4745 {
4746 comp_type = type;
4747 comp_op0 = arg1;
4748 comp_op1 = arg2;
4749 }
4750
4751 switch (comp_code)
4752 {
4753 case EQ_EXPR:
4754 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4755 case NE_EXPR:
4756 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4757 case LE_EXPR:
4758 case LT_EXPR:
4759 case UNLE_EXPR:
4760 case UNLT_EXPR:
4761 /* In C++ a ?: expression can be an lvalue, so put the
4762 operand which will be used if they are equal first
4763 so that we can convert this back to the
4764 corresponding COND_EXPR. */
4765 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4766 {
4767 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4768 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4769 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4770 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4771 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4772 comp_op1, comp_op0);
4773 return pedantic_non_lvalue_loc (loc,
4774 fold_convert_loc (loc, type, tem));
4775 }
4776 break;
4777 case GE_EXPR:
4778 case GT_EXPR:
4779 case UNGE_EXPR:
4780 case UNGT_EXPR:
4781 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4782 {
4783 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4784 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4785 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4786 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4787 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4788 comp_op1, comp_op0);
4789 return pedantic_non_lvalue_loc (loc,
4790 fold_convert_loc (loc, type, tem));
4791 }
4792 break;
4793 case UNEQ_EXPR:
4794 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4795 return pedantic_non_lvalue_loc (loc,
4796 fold_convert_loc (loc, type, arg2));
4797 break;
4798 case LTGT_EXPR:
4799 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4800 return pedantic_non_lvalue_loc (loc,
4801 fold_convert_loc (loc, type, arg1));
4802 break;
4803 default:
4804 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4805 break;
4806 }
4807 }
4808
4809 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4810 we might still be able to simplify this. For example,
4811 if C1 is one less or one more than C2, this might have started
4812 out as a MIN or MAX and been transformed by this function.
4813 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4814
4815 if (INTEGRAL_TYPE_P (type)
4816 && TREE_CODE (arg01) == INTEGER_CST
4817 && TREE_CODE (arg2) == INTEGER_CST)
4818 switch (comp_code)
4819 {
4820 case EQ_EXPR:
4821 if (TREE_CODE (arg1) == INTEGER_CST)
4822 break;
4823 /* We can replace A with C1 in this case. */
4824 arg1 = fold_convert_loc (loc, type, arg01);
4825 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4826
4827 case LT_EXPR:
4828 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4829 MIN_EXPR, to preserve the signedness of the comparison. */
4830 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4831 OEP_ONLY_CONST)
4832 && operand_equal_p (arg01,
4833 const_binop (PLUS_EXPR, arg2,
4834 build_int_cst (type, 1)),
4835 OEP_ONLY_CONST))
4836 {
4837 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4838 fold_convert_loc (loc, TREE_TYPE (arg00),
4839 arg2));
4840 return pedantic_non_lvalue_loc (loc,
4841 fold_convert_loc (loc, type, tem));
4842 }
4843 break;
4844
4845 case LE_EXPR:
4846 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4847 as above. */
4848 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4849 OEP_ONLY_CONST)
4850 && operand_equal_p (arg01,
4851 const_binop (MINUS_EXPR, arg2,
4852 build_int_cst (type, 1)),
4853 OEP_ONLY_CONST))
4854 {
4855 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4856 fold_convert_loc (loc, TREE_TYPE (arg00),
4857 arg2));
4858 return pedantic_non_lvalue_loc (loc,
4859 fold_convert_loc (loc, type, tem));
4860 }
4861 break;
4862
4863 case GT_EXPR:
4864 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4865 MAX_EXPR, to preserve the signedness of the comparison. */
4866 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4867 OEP_ONLY_CONST)
4868 && operand_equal_p (arg01,
4869 const_binop (MINUS_EXPR, arg2,
4870 build_int_cst (type, 1)),
4871 OEP_ONLY_CONST))
4872 {
4873 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4874 fold_convert_loc (loc, TREE_TYPE (arg00),
4875 arg2));
4876 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4877 }
4878 break;
4879
4880 case GE_EXPR:
4881 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4882 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4883 OEP_ONLY_CONST)
4884 && operand_equal_p (arg01,
4885 const_binop (PLUS_EXPR, arg2,
4886 build_int_cst (type, 1)),
4887 OEP_ONLY_CONST))
4888 {
4889 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4890 fold_convert_loc (loc, TREE_TYPE (arg00),
4891 arg2));
4892 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4893 }
4894 break;
4895 case NE_EXPR:
4896 break;
4897 default:
4898 gcc_unreachable ();
4899 }
4900
4901 return NULL_TREE;
4902 }
4903
4904
4905 \f
4906 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4907 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4908 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4909 false) >= 2)
4910 #endif
4911
4912 /* EXP is some logical combination of boolean tests. See if we can
4913 merge it into some range test. Return the new tree if so. */
4914
4915 static tree
4916 fold_range_test (location_t loc, enum tree_code code, tree type,
4917 tree op0, tree op1)
4918 {
4919 int or_op = (code == TRUTH_ORIF_EXPR
4920 || code == TRUTH_OR_EXPR);
4921 int in0_p, in1_p, in_p;
4922 tree low0, low1, low, high0, high1, high;
4923 bool strict_overflow_p = false;
4924 tree tem, lhs, rhs;
4925 const char * const warnmsg = G_("assuming signed overflow does not occur "
4926 "when simplifying range test");
4927
4928 if (!INTEGRAL_TYPE_P (type))
4929 return 0;
4930
4931 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4932 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4933
4934 /* If this is an OR operation, invert both sides; we will invert
4935 again at the end. */
4936 if (or_op)
4937 in0_p = ! in0_p, in1_p = ! in1_p;
4938
4939 /* If both expressions are the same, if we can merge the ranges, and we
4940 can build the range test, return it or it inverted. If one of the
4941 ranges is always true or always false, consider it to be the same
4942 expression as the other. */
4943 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4944 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4945 in1_p, low1, high1)
4946 && 0 != (tem = (build_range_check (loc, type,
4947 lhs != 0 ? lhs
4948 : rhs != 0 ? rhs : integer_zero_node,
4949 in_p, low, high))))
4950 {
4951 if (strict_overflow_p)
4952 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4953 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4954 }
4955
4956 /* On machines where the branch cost is expensive, if this is a
4957 short-circuited branch and the underlying object on both sides
4958 is the same, make a non-short-circuit operation. */
4959 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4960 && lhs != 0 && rhs != 0
4961 && (code == TRUTH_ANDIF_EXPR
4962 || code == TRUTH_ORIF_EXPR)
4963 && operand_equal_p (lhs, rhs, 0))
4964 {
4965 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4966 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4967 which cases we can't do this. */
4968 if (simple_operand_p (lhs))
4969 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4970 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4971 type, op0, op1);
4972
4973 else if (!lang_hooks.decls.global_bindings_p ()
4974 && !CONTAINS_PLACEHOLDER_P (lhs))
4975 {
4976 tree common = save_expr (lhs);
4977
4978 if (0 != (lhs = build_range_check (loc, type, common,
4979 or_op ? ! in0_p : in0_p,
4980 low0, high0))
4981 && (0 != (rhs = build_range_check (loc, type, common,
4982 or_op ? ! in1_p : in1_p,
4983 low1, high1))))
4984 {
4985 if (strict_overflow_p)
4986 fold_overflow_warning (warnmsg,
4987 WARN_STRICT_OVERFLOW_COMPARISON);
4988 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4989 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4990 type, lhs, rhs);
4991 }
4992 }
4993 }
4994
4995 return 0;
4996 }
4997 \f
4998 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4999 bit value. Arrange things so the extra bits will be set to zero if and
5000 only if C is signed-extended to its full width. If MASK is nonzero,
5001 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5002
5003 static tree
5004 unextend (tree c, int p, int unsignedp, tree mask)
5005 {
5006 tree type = TREE_TYPE (c);
5007 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5008 tree temp;
5009
5010 if (p == modesize || unsignedp)
5011 return c;
5012
5013 /* We work by getting just the sign bit into the low-order bit, then
5014 into the high-order bit, then sign-extend. We then XOR that value
5015 with C. */
5016 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5017
5018 /* We must use a signed type in order to get an arithmetic right shift.
5019 However, we must also avoid introducing accidental overflows, so that
5020 a subsequent call to integer_zerop will work. Hence we must
5021 do the type conversion here. At this point, the constant is either
5022 zero or one, and the conversion to a signed type can never overflow.
5023 We could get an overflow if this conversion is done anywhere else. */
5024 if (TYPE_UNSIGNED (type))
5025 temp = fold_convert (signed_type_for (type), temp);
5026
5027 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5028 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5029 if (mask != 0)
5030 temp = const_binop (BIT_AND_EXPR, temp,
5031 fold_convert (TREE_TYPE (c), mask));
5032 /* If necessary, convert the type back to match the type of C. */
5033 if (TYPE_UNSIGNED (type))
5034 temp = fold_convert (type, temp);
5035
5036 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5037 }
5038 \f
5039 /* For an expression that has the form
5040 (A && B) || ~B
5041 or
5042 (A || B) && ~B,
5043 we can drop one of the inner expressions and simplify to
5044 A || ~B
5045 or
5046 A && ~B
5047 LOC is the location of the resulting expression. OP is the inner
5048 logical operation; the left-hand side in the examples above, while CMPOP
5049 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5050 removing a condition that guards another, as in
5051 (A != NULL && A->...) || A == NULL
5052 which we must not transform. If RHS_ONLY is true, only eliminate the
5053 right-most operand of the inner logical operation. */
5054
5055 static tree
5056 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5057 bool rhs_only)
5058 {
5059 tree type = TREE_TYPE (cmpop);
5060 enum tree_code code = TREE_CODE (cmpop);
5061 enum tree_code truthop_code = TREE_CODE (op);
5062 tree lhs = TREE_OPERAND (op, 0);
5063 tree rhs = TREE_OPERAND (op, 1);
5064 tree orig_lhs = lhs, orig_rhs = rhs;
5065 enum tree_code rhs_code = TREE_CODE (rhs);
5066 enum tree_code lhs_code = TREE_CODE (lhs);
5067 enum tree_code inv_code;
5068
5069 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5070 return NULL_TREE;
5071
5072 if (TREE_CODE_CLASS (code) != tcc_comparison)
5073 return NULL_TREE;
5074
5075 if (rhs_code == truthop_code)
5076 {
5077 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5078 if (newrhs != NULL_TREE)
5079 {
5080 rhs = newrhs;
5081 rhs_code = TREE_CODE (rhs);
5082 }
5083 }
5084 if (lhs_code == truthop_code && !rhs_only)
5085 {
5086 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5087 if (newlhs != NULL_TREE)
5088 {
5089 lhs = newlhs;
5090 lhs_code = TREE_CODE (lhs);
5091 }
5092 }
5093
5094 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5095 if (inv_code == rhs_code
5096 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5097 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5098 return lhs;
5099 if (!rhs_only && inv_code == lhs_code
5100 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5101 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5102 return rhs;
5103 if (rhs != orig_rhs || lhs != orig_lhs)
5104 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5105 lhs, rhs);
5106 return NULL_TREE;
5107 }
5108
5109 /* Find ways of folding logical expressions of LHS and RHS:
5110 Try to merge two comparisons to the same innermost item.
5111 Look for range tests like "ch >= '0' && ch <= '9'".
5112 Look for combinations of simple terms on machines with expensive branches
5113 and evaluate the RHS unconditionally.
5114
5115 For example, if we have p->a == 2 && p->b == 4 and we can make an
5116 object large enough to span both A and B, we can do this with a comparison
5117 against the object ANDed with the a mask.
5118
5119 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5120 operations to do this with one comparison.
5121
5122 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5123 function and the one above.
5124
5125 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5126 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5127
5128 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5129 two operands.
5130
5131 We return the simplified tree or 0 if no optimization is possible. */
5132
5133 static tree
5134 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5135 tree lhs, tree rhs)
5136 {
5137 /* If this is the "or" of two comparisons, we can do something if
5138 the comparisons are NE_EXPR. If this is the "and", we can do something
5139 if the comparisons are EQ_EXPR. I.e.,
5140 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5141
5142 WANTED_CODE is this operation code. For single bit fields, we can
5143 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5144 comparison for one-bit fields. */
5145
5146 enum tree_code wanted_code;
5147 enum tree_code lcode, rcode;
5148 tree ll_arg, lr_arg, rl_arg, rr_arg;
5149 tree ll_inner, lr_inner, rl_inner, rr_inner;
5150 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5151 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5152 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5153 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5154 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5155 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5156 enum machine_mode lnmode, rnmode;
5157 tree ll_mask, lr_mask, rl_mask, rr_mask;
5158 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5159 tree l_const, r_const;
5160 tree lntype, rntype, result;
5161 HOST_WIDE_INT first_bit, end_bit;
5162 int volatilep;
5163
5164 /* Start by getting the comparison codes. Fail if anything is volatile.
5165 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5166 it were surrounded with a NE_EXPR. */
5167
5168 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5169 return 0;
5170
5171 lcode = TREE_CODE (lhs);
5172 rcode = TREE_CODE (rhs);
5173
5174 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5175 {
5176 lhs = build2 (NE_EXPR, truth_type, lhs,
5177 build_int_cst (TREE_TYPE (lhs), 0));
5178 lcode = NE_EXPR;
5179 }
5180
5181 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5182 {
5183 rhs = build2 (NE_EXPR, truth_type, rhs,
5184 build_int_cst (TREE_TYPE (rhs), 0));
5185 rcode = NE_EXPR;
5186 }
5187
5188 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5189 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5190 return 0;
5191
5192 ll_arg = TREE_OPERAND (lhs, 0);
5193 lr_arg = TREE_OPERAND (lhs, 1);
5194 rl_arg = TREE_OPERAND (rhs, 0);
5195 rr_arg = TREE_OPERAND (rhs, 1);
5196
5197 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5198 if (simple_operand_p (ll_arg)
5199 && simple_operand_p (lr_arg))
5200 {
5201 if (operand_equal_p (ll_arg, rl_arg, 0)
5202 && operand_equal_p (lr_arg, rr_arg, 0))
5203 {
5204 result = combine_comparisons (loc, code, lcode, rcode,
5205 truth_type, ll_arg, lr_arg);
5206 if (result)
5207 return result;
5208 }
5209 else if (operand_equal_p (ll_arg, rr_arg, 0)
5210 && operand_equal_p (lr_arg, rl_arg, 0))
5211 {
5212 result = combine_comparisons (loc, code, lcode,
5213 swap_tree_comparison (rcode),
5214 truth_type, ll_arg, lr_arg);
5215 if (result)
5216 return result;
5217 }
5218 }
5219
5220 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5221 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5222
5223 /* If the RHS can be evaluated unconditionally and its operands are
5224 simple, it wins to evaluate the RHS unconditionally on machines
5225 with expensive branches. In this case, this isn't a comparison
5226 that can be merged. */
5227
5228 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5229 false) >= 2
5230 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5231 && simple_operand_p (rl_arg)
5232 && simple_operand_p (rr_arg))
5233 {
5234 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5235 if (code == TRUTH_OR_EXPR
5236 && lcode == NE_EXPR && integer_zerop (lr_arg)
5237 && rcode == NE_EXPR && integer_zerop (rr_arg)
5238 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5239 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5240 return build2_loc (loc, NE_EXPR, truth_type,
5241 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5242 ll_arg, rl_arg),
5243 build_int_cst (TREE_TYPE (ll_arg), 0));
5244
5245 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5246 if (code == TRUTH_AND_EXPR
5247 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5248 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5249 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5250 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5251 return build2_loc (loc, EQ_EXPR, truth_type,
5252 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5253 ll_arg, rl_arg),
5254 build_int_cst (TREE_TYPE (ll_arg), 0));
5255 }
5256
5257 /* See if the comparisons can be merged. Then get all the parameters for
5258 each side. */
5259
5260 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5261 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5262 return 0;
5263
5264 volatilep = 0;
5265 ll_inner = decode_field_reference (loc, ll_arg,
5266 &ll_bitsize, &ll_bitpos, &ll_mode,
5267 &ll_unsignedp, &volatilep, &ll_mask,
5268 &ll_and_mask);
5269 lr_inner = decode_field_reference (loc, lr_arg,
5270 &lr_bitsize, &lr_bitpos, &lr_mode,
5271 &lr_unsignedp, &volatilep, &lr_mask,
5272 &lr_and_mask);
5273 rl_inner = decode_field_reference (loc, rl_arg,
5274 &rl_bitsize, &rl_bitpos, &rl_mode,
5275 &rl_unsignedp, &volatilep, &rl_mask,
5276 &rl_and_mask);
5277 rr_inner = decode_field_reference (loc, rr_arg,
5278 &rr_bitsize, &rr_bitpos, &rr_mode,
5279 &rr_unsignedp, &volatilep, &rr_mask,
5280 &rr_and_mask);
5281
5282 /* It must be true that the inner operation on the lhs of each
5283 comparison must be the same if we are to be able to do anything.
5284 Then see if we have constants. If not, the same must be true for
5285 the rhs's. */
5286 if (volatilep || ll_inner == 0 || rl_inner == 0
5287 || ! operand_equal_p (ll_inner, rl_inner, 0))
5288 return 0;
5289
5290 if (TREE_CODE (lr_arg) == INTEGER_CST
5291 && TREE_CODE (rr_arg) == INTEGER_CST)
5292 l_const = lr_arg, r_const = rr_arg;
5293 else if (lr_inner == 0 || rr_inner == 0
5294 || ! operand_equal_p (lr_inner, rr_inner, 0))
5295 return 0;
5296 else
5297 l_const = r_const = 0;
5298
5299 /* If either comparison code is not correct for our logical operation,
5300 fail. However, we can convert a one-bit comparison against zero into
5301 the opposite comparison against that bit being set in the field. */
5302
5303 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5304 if (lcode != wanted_code)
5305 {
5306 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5307 {
5308 /* Make the left operand unsigned, since we are only interested
5309 in the value of one bit. Otherwise we are doing the wrong
5310 thing below. */
5311 ll_unsignedp = 1;
5312 l_const = ll_mask;
5313 }
5314 else
5315 return 0;
5316 }
5317
5318 /* This is analogous to the code for l_const above. */
5319 if (rcode != wanted_code)
5320 {
5321 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5322 {
5323 rl_unsignedp = 1;
5324 r_const = rl_mask;
5325 }
5326 else
5327 return 0;
5328 }
5329
5330 /* See if we can find a mode that contains both fields being compared on
5331 the left. If we can't, fail. Otherwise, update all constants and masks
5332 to be relative to a field of that size. */
5333 first_bit = MIN (ll_bitpos, rl_bitpos);
5334 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5335 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5336 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5337 volatilep);
5338 if (lnmode == VOIDmode)
5339 return 0;
5340
5341 lnbitsize = GET_MODE_BITSIZE (lnmode);
5342 lnbitpos = first_bit & ~ (lnbitsize - 1);
5343 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5344 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5345
5346 if (BYTES_BIG_ENDIAN)
5347 {
5348 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5349 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5350 }
5351
5352 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5353 size_int (xll_bitpos));
5354 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5355 size_int (xrl_bitpos));
5356
5357 if (l_const)
5358 {
5359 l_const = fold_convert_loc (loc, lntype, l_const);
5360 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5361 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5362 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5363 fold_build1_loc (loc, BIT_NOT_EXPR,
5364 lntype, ll_mask))))
5365 {
5366 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5367
5368 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5369 }
5370 }
5371 if (r_const)
5372 {
5373 r_const = fold_convert_loc (loc, lntype, r_const);
5374 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5375 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5376 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5377 fold_build1_loc (loc, BIT_NOT_EXPR,
5378 lntype, rl_mask))))
5379 {
5380 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5381
5382 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5383 }
5384 }
5385
5386 /* If the right sides are not constant, do the same for it. Also,
5387 disallow this optimization if a size or signedness mismatch occurs
5388 between the left and right sides. */
5389 if (l_const == 0)
5390 {
5391 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5392 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5393 /* Make sure the two fields on the right
5394 correspond to the left without being swapped. */
5395 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5396 return 0;
5397
5398 first_bit = MIN (lr_bitpos, rr_bitpos);
5399 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5400 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5401 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5402 volatilep);
5403 if (rnmode == VOIDmode)
5404 return 0;
5405
5406 rnbitsize = GET_MODE_BITSIZE (rnmode);
5407 rnbitpos = first_bit & ~ (rnbitsize - 1);
5408 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5409 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5410
5411 if (BYTES_BIG_ENDIAN)
5412 {
5413 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5414 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5415 }
5416
5417 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5418 rntype, lr_mask),
5419 size_int (xlr_bitpos));
5420 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5421 rntype, rr_mask),
5422 size_int (xrr_bitpos));
5423
5424 /* Make a mask that corresponds to both fields being compared.
5425 Do this for both items being compared. If the operands are the
5426 same size and the bits being compared are in the same position
5427 then we can do this by masking both and comparing the masked
5428 results. */
5429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5430 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5431 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5432 {
5433 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5434 ll_unsignedp || rl_unsignedp);
5435 if (! all_ones_mask_p (ll_mask, lnbitsize))
5436 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5437
5438 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5439 lr_unsignedp || rr_unsignedp);
5440 if (! all_ones_mask_p (lr_mask, rnbitsize))
5441 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5442
5443 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5444 }
5445
5446 /* There is still another way we can do something: If both pairs of
5447 fields being compared are adjacent, we may be able to make a wider
5448 field containing them both.
5449
5450 Note that we still must mask the lhs/rhs expressions. Furthermore,
5451 the mask must be shifted to account for the shift done by
5452 make_bit_field_ref. */
5453 if ((ll_bitsize + ll_bitpos == rl_bitpos
5454 && lr_bitsize + lr_bitpos == rr_bitpos)
5455 || (ll_bitpos == rl_bitpos + rl_bitsize
5456 && lr_bitpos == rr_bitpos + rr_bitsize))
5457 {
5458 tree type;
5459
5460 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5461 ll_bitsize + rl_bitsize,
5462 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5463 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5464 lr_bitsize + rr_bitsize,
5465 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5466
5467 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5468 size_int (MIN (xll_bitpos, xrl_bitpos)));
5469 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5470 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5471
5472 /* Convert to the smaller type before masking out unwanted bits. */
5473 type = lntype;
5474 if (lntype != rntype)
5475 {
5476 if (lnbitsize > rnbitsize)
5477 {
5478 lhs = fold_convert_loc (loc, rntype, lhs);
5479 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5480 type = rntype;
5481 }
5482 else if (lnbitsize < rnbitsize)
5483 {
5484 rhs = fold_convert_loc (loc, lntype, rhs);
5485 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5486 type = lntype;
5487 }
5488 }
5489
5490 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5491 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5492
5493 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5494 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5495
5496 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5497 }
5498
5499 return 0;
5500 }
5501
5502 /* Handle the case of comparisons with constants. If there is something in
5503 common between the masks, those bits of the constants must be the same.
5504 If not, the condition is always false. Test for this to avoid generating
5505 incorrect code below. */
5506 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5507 if (! integer_zerop (result)
5508 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5509 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5510 {
5511 if (wanted_code == NE_EXPR)
5512 {
5513 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5514 return constant_boolean_node (true, truth_type);
5515 }
5516 else
5517 {
5518 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5519 return constant_boolean_node (false, truth_type);
5520 }
5521 }
5522
5523 /* Construct the expression we will return. First get the component
5524 reference we will make. Unless the mask is all ones the width of
5525 that field, perform the mask operation. Then compare with the
5526 merged constant. */
5527 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5528 ll_unsignedp || rl_unsignedp);
5529
5530 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5531 if (! all_ones_mask_p (ll_mask, lnbitsize))
5532 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5533
5534 return build2_loc (loc, wanted_code, truth_type, result,
5535 const_binop (BIT_IOR_EXPR, l_const, r_const));
5536 }
5537 \f
5538 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5539 constant. */
5540
5541 static tree
5542 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5543 tree op0, tree op1)
5544 {
5545 tree arg0 = op0;
5546 enum tree_code op_code;
5547 tree comp_const;
5548 tree minmax_const;
5549 int consts_equal, consts_lt;
5550 tree inner;
5551
5552 STRIP_SIGN_NOPS (arg0);
5553
5554 op_code = TREE_CODE (arg0);
5555 minmax_const = TREE_OPERAND (arg0, 1);
5556 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5557 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5558 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5559 inner = TREE_OPERAND (arg0, 0);
5560
5561 /* If something does not permit us to optimize, return the original tree. */
5562 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5563 || TREE_CODE (comp_const) != INTEGER_CST
5564 || TREE_OVERFLOW (comp_const)
5565 || TREE_CODE (minmax_const) != INTEGER_CST
5566 || TREE_OVERFLOW (minmax_const))
5567 return NULL_TREE;
5568
5569 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5570 and GT_EXPR, doing the rest with recursive calls using logical
5571 simplifications. */
5572 switch (code)
5573 {
5574 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5575 {
5576 tree tem
5577 = optimize_minmax_comparison (loc,
5578 invert_tree_comparison (code, false),
5579 type, op0, op1);
5580 if (tem)
5581 return invert_truthvalue_loc (loc, tem);
5582 return NULL_TREE;
5583 }
5584
5585 case GE_EXPR:
5586 return
5587 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5588 optimize_minmax_comparison
5589 (loc, EQ_EXPR, type, arg0, comp_const),
5590 optimize_minmax_comparison
5591 (loc, GT_EXPR, type, arg0, comp_const));
5592
5593 case EQ_EXPR:
5594 if (op_code == MAX_EXPR && consts_equal)
5595 /* MAX (X, 0) == 0 -> X <= 0 */
5596 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5597
5598 else if (op_code == MAX_EXPR && consts_lt)
5599 /* MAX (X, 0) == 5 -> X == 5 */
5600 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5601
5602 else if (op_code == MAX_EXPR)
5603 /* MAX (X, 0) == -1 -> false */
5604 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5605
5606 else if (consts_equal)
5607 /* MIN (X, 0) == 0 -> X >= 0 */
5608 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5609
5610 else if (consts_lt)
5611 /* MIN (X, 0) == 5 -> false */
5612 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5613
5614 else
5615 /* MIN (X, 0) == -1 -> X == -1 */
5616 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5617
5618 case GT_EXPR:
5619 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5620 /* MAX (X, 0) > 0 -> X > 0
5621 MAX (X, 0) > 5 -> X > 5 */
5622 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5623
5624 else if (op_code == MAX_EXPR)
5625 /* MAX (X, 0) > -1 -> true */
5626 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5627
5628 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5629 /* MIN (X, 0) > 0 -> false
5630 MIN (X, 0) > 5 -> false */
5631 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5632
5633 else
5634 /* MIN (X, 0) > -1 -> X > -1 */
5635 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5636
5637 default:
5638 return NULL_TREE;
5639 }
5640 }
5641 \f
5642 /* T is an integer expression that is being multiplied, divided, or taken a
5643 modulus (CODE says which and what kind of divide or modulus) by a
5644 constant C. See if we can eliminate that operation by folding it with
5645 other operations already in T. WIDE_TYPE, if non-null, is a type that
5646 should be used for the computation if wider than our type.
5647
5648 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5649 (X * 2) + (Y * 4). We must, however, be assured that either the original
5650 expression would not overflow or that overflow is undefined for the type
5651 in the language in question.
5652
5653 If we return a non-null expression, it is an equivalent form of the
5654 original computation, but need not be in the original type.
5655
5656 We set *STRICT_OVERFLOW_P to true if the return values depends on
5657 signed overflow being undefined. Otherwise we do not change
5658 *STRICT_OVERFLOW_P. */
5659
5660 static tree
5661 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5662 bool *strict_overflow_p)
5663 {
5664 /* To avoid exponential search depth, refuse to allow recursion past
5665 three levels. Beyond that (1) it's highly unlikely that we'll find
5666 something interesting and (2) we've probably processed it before
5667 when we built the inner expression. */
5668
5669 static int depth;
5670 tree ret;
5671
5672 if (depth > 3)
5673 return NULL;
5674
5675 depth++;
5676 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5677 depth--;
5678
5679 return ret;
5680 }
5681
5682 static tree
5683 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5684 bool *strict_overflow_p)
5685 {
5686 tree type = TREE_TYPE (t);
5687 enum tree_code tcode = TREE_CODE (t);
5688 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5689 > GET_MODE_SIZE (TYPE_MODE (type)))
5690 ? wide_type : type);
5691 tree t1, t2;
5692 int same_p = tcode == code;
5693 tree op0 = NULL_TREE, op1 = NULL_TREE;
5694 bool sub_strict_overflow_p;
5695
5696 /* Don't deal with constants of zero here; they confuse the code below. */
5697 if (integer_zerop (c))
5698 return NULL_TREE;
5699
5700 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5701 op0 = TREE_OPERAND (t, 0);
5702
5703 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5704 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5705
5706 /* Note that we need not handle conditional operations here since fold
5707 already handles those cases. So just do arithmetic here. */
5708 switch (tcode)
5709 {
5710 case INTEGER_CST:
5711 /* For a constant, we can always simplify if we are a multiply
5712 or (for divide and modulus) if it is a multiple of our constant. */
5713 if (code == MULT_EXPR
5714 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5715 return const_binop (code, fold_convert (ctype, t),
5716 fold_convert (ctype, c));
5717 break;
5718
5719 CASE_CONVERT: case NON_LVALUE_EXPR:
5720 /* If op0 is an expression ... */
5721 if ((COMPARISON_CLASS_P (op0)
5722 || UNARY_CLASS_P (op0)
5723 || BINARY_CLASS_P (op0)
5724 || VL_EXP_CLASS_P (op0)
5725 || EXPRESSION_CLASS_P (op0))
5726 /* ... and has wrapping overflow, and its type is smaller
5727 than ctype, then we cannot pass through as widening. */
5728 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5729 && (TYPE_PRECISION (ctype)
5730 > TYPE_PRECISION (TREE_TYPE (op0))))
5731 /* ... or this is a truncation (t is narrower than op0),
5732 then we cannot pass through this narrowing. */
5733 || (TYPE_PRECISION (type)
5734 < TYPE_PRECISION (TREE_TYPE (op0)))
5735 /* ... or signedness changes for division or modulus,
5736 then we cannot pass through this conversion. */
5737 || (code != MULT_EXPR
5738 && (TYPE_UNSIGNED (ctype)
5739 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5740 /* ... or has undefined overflow while the converted to
5741 type has not, we cannot do the operation in the inner type
5742 as that would introduce undefined overflow. */
5743 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5744 && !TYPE_OVERFLOW_UNDEFINED (type))))
5745 break;
5746
5747 /* Pass the constant down and see if we can make a simplification. If
5748 we can, replace this expression with the inner simplification for
5749 possible later conversion to our or some other type. */
5750 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5751 && TREE_CODE (t2) == INTEGER_CST
5752 && !TREE_OVERFLOW (t2)
5753 && (0 != (t1 = extract_muldiv (op0, t2, code,
5754 code == MULT_EXPR
5755 ? ctype : NULL_TREE,
5756 strict_overflow_p))))
5757 return t1;
5758 break;
5759
5760 case ABS_EXPR:
5761 /* If widening the type changes it from signed to unsigned, then we
5762 must avoid building ABS_EXPR itself as unsigned. */
5763 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5764 {
5765 tree cstype = (*signed_type_for) (ctype);
5766 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5767 != 0)
5768 {
5769 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5770 return fold_convert (ctype, t1);
5771 }
5772 break;
5773 }
5774 /* If the constant is negative, we cannot simplify this. */
5775 if (tree_int_cst_sgn (c) == -1)
5776 break;
5777 /* FALLTHROUGH */
5778 case NEGATE_EXPR:
5779 /* For division and modulus, type can't be unsigned, as e.g.
5780 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5781 For signed types, even with wrapping overflow, this is fine. */
5782 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5783 break;
5784 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5785 != 0)
5786 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5787 break;
5788
5789 case MIN_EXPR: case MAX_EXPR:
5790 /* If widening the type changes the signedness, then we can't perform
5791 this optimization as that changes the result. */
5792 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5793 break;
5794
5795 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5796 sub_strict_overflow_p = false;
5797 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5798 &sub_strict_overflow_p)) != 0
5799 && (t2 = extract_muldiv (op1, c, code, wide_type,
5800 &sub_strict_overflow_p)) != 0)
5801 {
5802 if (tree_int_cst_sgn (c) < 0)
5803 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5804 if (sub_strict_overflow_p)
5805 *strict_overflow_p = true;
5806 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5807 fold_convert (ctype, t2));
5808 }
5809 break;
5810
5811 case LSHIFT_EXPR: case RSHIFT_EXPR:
5812 /* If the second operand is constant, this is a multiplication
5813 or floor division, by a power of two, so we can treat it that
5814 way unless the multiplier or divisor overflows. Signed
5815 left-shift overflow is implementation-defined rather than
5816 undefined in C90, so do not convert signed left shift into
5817 multiplication. */
5818 if (TREE_CODE (op1) == INTEGER_CST
5819 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5820 /* const_binop may not detect overflow correctly,
5821 so check for it explicitly here. */
5822 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5823 && 0 != (t1 = fold_convert (ctype,
5824 const_binop (LSHIFT_EXPR,
5825 size_one_node,
5826 op1)))
5827 && !TREE_OVERFLOW (t1))
5828 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5829 ? MULT_EXPR : FLOOR_DIV_EXPR,
5830 ctype,
5831 fold_convert (ctype, op0),
5832 t1),
5833 c, code, wide_type, strict_overflow_p);
5834 break;
5835
5836 case PLUS_EXPR: case MINUS_EXPR:
5837 /* See if we can eliminate the operation on both sides. If we can, we
5838 can return a new PLUS or MINUS. If we can't, the only remaining
5839 cases where we can do anything are if the second operand is a
5840 constant. */
5841 sub_strict_overflow_p = false;
5842 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5843 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5844 if (t1 != 0 && t2 != 0
5845 && (code == MULT_EXPR
5846 /* If not multiplication, we can only do this if both operands
5847 are divisible by c. */
5848 || (multiple_of_p (ctype, op0, c)
5849 && multiple_of_p (ctype, op1, c))))
5850 {
5851 if (sub_strict_overflow_p)
5852 *strict_overflow_p = true;
5853 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5854 fold_convert (ctype, t2));
5855 }
5856
5857 /* If this was a subtraction, negate OP1 and set it to be an addition.
5858 This simplifies the logic below. */
5859 if (tcode == MINUS_EXPR)
5860 {
5861 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5862 /* If OP1 was not easily negatable, the constant may be OP0. */
5863 if (TREE_CODE (op0) == INTEGER_CST)
5864 {
5865 tree tem = op0;
5866 op0 = op1;
5867 op1 = tem;
5868 tem = t1;
5869 t1 = t2;
5870 t2 = tem;
5871 }
5872 }
5873
5874 if (TREE_CODE (op1) != INTEGER_CST)
5875 break;
5876
5877 /* If either OP1 or C are negative, this optimization is not safe for
5878 some of the division and remainder types while for others we need
5879 to change the code. */
5880 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5881 {
5882 if (code == CEIL_DIV_EXPR)
5883 code = FLOOR_DIV_EXPR;
5884 else if (code == FLOOR_DIV_EXPR)
5885 code = CEIL_DIV_EXPR;
5886 else if (code != MULT_EXPR
5887 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5888 break;
5889 }
5890
5891 /* If it's a multiply or a division/modulus operation of a multiple
5892 of our constant, do the operation and verify it doesn't overflow. */
5893 if (code == MULT_EXPR
5894 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5895 {
5896 op1 = const_binop (code, fold_convert (ctype, op1),
5897 fold_convert (ctype, c));
5898 /* We allow the constant to overflow with wrapping semantics. */
5899 if (op1 == 0
5900 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5901 break;
5902 }
5903 else
5904 break;
5905
5906 /* If we have an unsigned type, we cannot widen the operation since it
5907 will change the result if the original computation overflowed. */
5908 if (TYPE_UNSIGNED (ctype) && ctype != type)
5909 break;
5910
5911 /* If we were able to eliminate our operation from the first side,
5912 apply our operation to the second side and reform the PLUS. */
5913 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5915
5916 /* The last case is if we are a multiply. In that case, we can
5917 apply the distributive law to commute the multiply and addition
5918 if the multiplication of the constants doesn't overflow
5919 and overflow is defined. With undefined overflow
5920 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5921 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5922 return fold_build2 (tcode, ctype,
5923 fold_build2 (code, ctype,
5924 fold_convert (ctype, op0),
5925 fold_convert (ctype, c)),
5926 op1);
5927
5928 break;
5929
5930 case MULT_EXPR:
5931 /* We have a special case here if we are doing something like
5932 (C * 8) % 4 since we know that's zero. */
5933 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5934 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5935 /* If the multiplication can overflow we cannot optimize this. */
5936 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5937 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5938 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5939 {
5940 *strict_overflow_p = true;
5941 return omit_one_operand (type, integer_zero_node, op0);
5942 }
5943
5944 /* ... fall through ... */
5945
5946 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5947 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5948 /* If we can extract our operation from the LHS, do so and return a
5949 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5950 do something only if the second operand is a constant. */
5951 if (same_p
5952 && (t1 = extract_muldiv (op0, c, code, wide_type,
5953 strict_overflow_p)) != 0)
5954 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5955 fold_convert (ctype, op1));
5956 else if (tcode == MULT_EXPR && code == MULT_EXPR
5957 && (t1 = extract_muldiv (op1, c, code, wide_type,
5958 strict_overflow_p)) != 0)
5959 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5960 fold_convert (ctype, t1));
5961 else if (TREE_CODE (op1) != INTEGER_CST)
5962 return 0;
5963
5964 /* If these are the same operation types, we can associate them
5965 assuming no overflow. */
5966 if (tcode == code)
5967 {
5968 bool overflow_p = false;
5969 bool overflow_mul_p;
5970 signop sign = TYPE_SIGN (ctype);
5971 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5972 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5973 if (overflow_mul_p
5974 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5975 overflow_p = true;
5976 if (!overflow_p)
5977 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5978 wide_int_to_tree (ctype, mul));
5979 }
5980
5981 /* If these operations "cancel" each other, we have the main
5982 optimizations of this pass, which occur when either constant is a
5983 multiple of the other, in which case we replace this with either an
5984 operation or CODE or TCODE.
5985
5986 If we have an unsigned type, we cannot do this since it will change
5987 the result if the original computation overflowed. */
5988 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5989 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5990 || (tcode == MULT_EXPR
5991 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5992 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5993 && code != MULT_EXPR)))
5994 {
5995 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5996 {
5997 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5998 *strict_overflow_p = true;
5999 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6000 fold_convert (ctype,
6001 const_binop (TRUNC_DIV_EXPR,
6002 op1, c)));
6003 }
6004 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6005 {
6006 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6007 *strict_overflow_p = true;
6008 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6009 fold_convert (ctype,
6010 const_binop (TRUNC_DIV_EXPR,
6011 c, op1)));
6012 }
6013 }
6014 break;
6015
6016 default:
6017 break;
6018 }
6019
6020 return 0;
6021 }
6022 \f
6023 /* Return a node which has the indicated constant VALUE (either 0 or
6024 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6025 and is of the indicated TYPE. */
6026
6027 tree
6028 constant_boolean_node (bool value, tree type)
6029 {
6030 if (type == integer_type_node)
6031 return value ? integer_one_node : integer_zero_node;
6032 else if (type == boolean_type_node)
6033 return value ? boolean_true_node : boolean_false_node;
6034 else if (TREE_CODE (type) == VECTOR_TYPE)
6035 return build_vector_from_val (type,
6036 build_int_cst (TREE_TYPE (type),
6037 value ? -1 : 0));
6038 else
6039 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6040 }
6041
6042
6043 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6044 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6045 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6046 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6047 COND is the first argument to CODE; otherwise (as in the example
6048 given here), it is the second argument. TYPE is the type of the
6049 original expression. Return NULL_TREE if no simplification is
6050 possible. */
6051
6052 static tree
6053 fold_binary_op_with_conditional_arg (location_t loc,
6054 enum tree_code code,
6055 tree type, tree op0, tree op1,
6056 tree cond, tree arg, int cond_first_p)
6057 {
6058 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6059 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6060 tree test, true_value, false_value;
6061 tree lhs = NULL_TREE;
6062 tree rhs = NULL_TREE;
6063 enum tree_code cond_code = COND_EXPR;
6064
6065 if (TREE_CODE (cond) == COND_EXPR
6066 || TREE_CODE (cond) == VEC_COND_EXPR)
6067 {
6068 test = TREE_OPERAND (cond, 0);
6069 true_value = TREE_OPERAND (cond, 1);
6070 false_value = TREE_OPERAND (cond, 2);
6071 /* If this operand throws an expression, then it does not make
6072 sense to try to perform a logical or arithmetic operation
6073 involving it. */
6074 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6075 lhs = true_value;
6076 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6077 rhs = false_value;
6078 }
6079 else
6080 {
6081 tree testtype = TREE_TYPE (cond);
6082 test = cond;
6083 true_value = constant_boolean_node (true, testtype);
6084 false_value = constant_boolean_node (false, testtype);
6085 }
6086
6087 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6088 cond_code = VEC_COND_EXPR;
6089
6090 /* This transformation is only worthwhile if we don't have to wrap ARG
6091 in a SAVE_EXPR and the operation can be simplified without recursing
6092 on at least one of the branches once its pushed inside the COND_EXPR. */
6093 if (!TREE_CONSTANT (arg)
6094 && (TREE_SIDE_EFFECTS (arg)
6095 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6096 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6097 return NULL_TREE;
6098
6099 arg = fold_convert_loc (loc, arg_type, arg);
6100 if (lhs == 0)
6101 {
6102 true_value = fold_convert_loc (loc, cond_type, true_value);
6103 if (cond_first_p)
6104 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6105 else
6106 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6107 }
6108 if (rhs == 0)
6109 {
6110 false_value = fold_convert_loc (loc, cond_type, false_value);
6111 if (cond_first_p)
6112 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6113 else
6114 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6115 }
6116
6117 /* Check that we have simplified at least one of the branches. */
6118 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6119 return NULL_TREE;
6120
6121 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6122 }
6123
6124 \f
6125 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6126
6127 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6128 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6129 ADDEND is the same as X.
6130
6131 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6132 and finite. The problematic cases are when X is zero, and its mode
6133 has signed zeros. In the case of rounding towards -infinity,
6134 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6135 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6136
6137 bool
6138 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6139 {
6140 if (!real_zerop (addend))
6141 return false;
6142
6143 /* Don't allow the fold with -fsignaling-nans. */
6144 if (HONOR_SNANS (TYPE_MODE (type)))
6145 return false;
6146
6147 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6148 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6149 return true;
6150
6151 /* In a vector or complex, we would need to check the sign of all zeros. */
6152 if (TREE_CODE (addend) != REAL_CST)
6153 return false;
6154
6155 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6156 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6157 negate = !negate;
6158
6159 /* The mode has signed zeros, and we have to honor their sign.
6160 In this situation, there is only one case we can return true for.
6161 X - 0 is the same as X unless rounding towards -infinity is
6162 supported. */
6163 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6164 }
6165
6166 /* Subroutine of fold() that checks comparisons of built-in math
6167 functions against real constants.
6168
6169 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6170 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6171 is the type of the result and ARG0 and ARG1 are the operands of the
6172 comparison. ARG1 must be a TREE_REAL_CST.
6173
6174 The function returns the constant folded tree if a simplification
6175 can be made, and NULL_TREE otherwise. */
6176
6177 static tree
6178 fold_mathfn_compare (location_t loc,
6179 enum built_in_function fcode, enum tree_code code,
6180 tree type, tree arg0, tree arg1)
6181 {
6182 REAL_VALUE_TYPE c;
6183
6184 if (BUILTIN_SQRT_P (fcode))
6185 {
6186 tree arg = CALL_EXPR_ARG (arg0, 0);
6187 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6188
6189 c = TREE_REAL_CST (arg1);
6190 if (REAL_VALUE_NEGATIVE (c))
6191 {
6192 /* sqrt(x) < y is always false, if y is negative. */
6193 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6194 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6195
6196 /* sqrt(x) > y is always true, if y is negative and we
6197 don't care about NaNs, i.e. negative values of x. */
6198 if (code == NE_EXPR || !HONOR_NANS (mode))
6199 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6200
6201 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6202 return fold_build2_loc (loc, GE_EXPR, type, arg,
6203 build_real (TREE_TYPE (arg), dconst0));
6204 }
6205 else if (code == GT_EXPR || code == GE_EXPR)
6206 {
6207 REAL_VALUE_TYPE c2;
6208
6209 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6210 real_convert (&c2, mode, &c2);
6211
6212 if (REAL_VALUE_ISINF (c2))
6213 {
6214 /* sqrt(x) > y is x == +Inf, when y is very large. */
6215 if (HONOR_INFINITIES (mode))
6216 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6217 build_real (TREE_TYPE (arg), c2));
6218
6219 /* sqrt(x) > y is always false, when y is very large
6220 and we don't care about infinities. */
6221 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6222 }
6223
6224 /* sqrt(x) > c is the same as x > c*c. */
6225 return fold_build2_loc (loc, code, type, arg,
6226 build_real (TREE_TYPE (arg), c2));
6227 }
6228 else if (code == LT_EXPR || code == LE_EXPR)
6229 {
6230 REAL_VALUE_TYPE c2;
6231
6232 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6233 real_convert (&c2, mode, &c2);
6234
6235 if (REAL_VALUE_ISINF (c2))
6236 {
6237 /* sqrt(x) < y is always true, when y is a very large
6238 value and we don't care about NaNs or Infinities. */
6239 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6240 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6241
6242 /* sqrt(x) < y is x != +Inf when y is very large and we
6243 don't care about NaNs. */
6244 if (! HONOR_NANS (mode))
6245 return fold_build2_loc (loc, NE_EXPR, type, arg,
6246 build_real (TREE_TYPE (arg), c2));
6247
6248 /* sqrt(x) < y is x >= 0 when y is very large and we
6249 don't care about Infinities. */
6250 if (! HONOR_INFINITIES (mode))
6251 return fold_build2_loc (loc, GE_EXPR, type, arg,
6252 build_real (TREE_TYPE (arg), dconst0));
6253
6254 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6255 arg = save_expr (arg);
6256 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6257 fold_build2_loc (loc, GE_EXPR, type, arg,
6258 build_real (TREE_TYPE (arg),
6259 dconst0)),
6260 fold_build2_loc (loc, NE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg),
6262 c2)));
6263 }
6264
6265 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6266 if (! HONOR_NANS (mode))
6267 return fold_build2_loc (loc, code, type, arg,
6268 build_real (TREE_TYPE (arg), c2));
6269
6270 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6271 arg = save_expr (arg);
6272 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6273 fold_build2_loc (loc, GE_EXPR, type, arg,
6274 build_real (TREE_TYPE (arg),
6275 dconst0)),
6276 fold_build2_loc (loc, code, type, arg,
6277 build_real (TREE_TYPE (arg),
6278 c2)));
6279 }
6280 }
6281
6282 return NULL_TREE;
6283 }
6284
6285 /* Subroutine of fold() that optimizes comparisons against Infinities,
6286 either +Inf or -Inf.
6287
6288 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6289 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6290 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6291
6292 The function returns the constant folded tree if a simplification
6293 can be made, and NULL_TREE otherwise. */
6294
6295 static tree
6296 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6297 tree arg0, tree arg1)
6298 {
6299 enum machine_mode mode;
6300 REAL_VALUE_TYPE max;
6301 tree temp;
6302 bool neg;
6303
6304 mode = TYPE_MODE (TREE_TYPE (arg0));
6305
6306 /* For negative infinity swap the sense of the comparison. */
6307 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6308 if (neg)
6309 code = swap_tree_comparison (code);
6310
6311 switch (code)
6312 {
6313 case GT_EXPR:
6314 /* x > +Inf is always false, if with ignore sNANs. */
6315 if (HONOR_SNANS (mode))
6316 return NULL_TREE;
6317 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6318
6319 case LE_EXPR:
6320 /* x <= +Inf is always true, if we don't case about NaNs. */
6321 if (! HONOR_NANS (mode))
6322 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6323
6324 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6325 arg0 = save_expr (arg0);
6326 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6327
6328 case EQ_EXPR:
6329 case GE_EXPR:
6330 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6331 real_maxval (&max, neg, mode);
6332 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6333 arg0, build_real (TREE_TYPE (arg0), max));
6334
6335 case LT_EXPR:
6336 /* x < +Inf is always equal to x <= DBL_MAX. */
6337 real_maxval (&max, neg, mode);
6338 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6339 arg0, build_real (TREE_TYPE (arg0), max));
6340
6341 case NE_EXPR:
6342 /* x != +Inf is always equal to !(x > DBL_MAX). */
6343 real_maxval (&max, neg, mode);
6344 if (! HONOR_NANS (mode))
6345 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6346 arg0, build_real (TREE_TYPE (arg0), max));
6347
6348 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6349 arg0, build_real (TREE_TYPE (arg0), max));
6350 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6351
6352 default:
6353 break;
6354 }
6355
6356 return NULL_TREE;
6357 }
6358
6359 /* Subroutine of fold() that optimizes comparisons of a division by
6360 a nonzero integer constant against an integer constant, i.e.
6361 X/C1 op C2.
6362
6363 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6364 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6365 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6366
6367 The function returns the constant folded tree if a simplification
6368 can be made, and NULL_TREE otherwise. */
6369
6370 static tree
6371 fold_div_compare (location_t loc,
6372 enum tree_code code, tree type, tree arg0, tree arg1)
6373 {
6374 tree prod, tmp, hi, lo;
6375 tree arg00 = TREE_OPERAND (arg0, 0);
6376 tree arg01 = TREE_OPERAND (arg0, 1);
6377 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6378 bool neg_overflow = false;
6379 bool overflow;
6380
6381 /* We have to do this the hard way to detect unsigned overflow.
6382 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6383 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6384 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6385 neg_overflow = false;
6386
6387 if (sign == UNSIGNED)
6388 {
6389 tmp = int_const_binop (MINUS_EXPR, arg01,
6390 build_int_cst (TREE_TYPE (arg01), 1));
6391 lo = prod;
6392
6393 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6394 val = wi::add (prod, tmp, sign, &overflow);
6395 hi = force_fit_type (TREE_TYPE (arg00), val,
6396 -1, overflow | TREE_OVERFLOW (prod));
6397 }
6398 else if (tree_int_cst_sgn (arg01) >= 0)
6399 {
6400 tmp = int_const_binop (MINUS_EXPR, arg01,
6401 build_int_cst (TREE_TYPE (arg01), 1));
6402 switch (tree_int_cst_sgn (arg1))
6403 {
6404 case -1:
6405 neg_overflow = true;
6406 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6407 hi = prod;
6408 break;
6409
6410 case 0:
6411 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6412 hi = tmp;
6413 break;
6414
6415 case 1:
6416 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6417 lo = prod;
6418 break;
6419
6420 default:
6421 gcc_unreachable ();
6422 }
6423 }
6424 else
6425 {
6426 /* A negative divisor reverses the relational operators. */
6427 code = swap_tree_comparison (code);
6428
6429 tmp = int_const_binop (PLUS_EXPR, arg01,
6430 build_int_cst (TREE_TYPE (arg01), 1));
6431 switch (tree_int_cst_sgn (arg1))
6432 {
6433 case -1:
6434 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6435 lo = prod;
6436 break;
6437
6438 case 0:
6439 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6440 lo = tmp;
6441 break;
6442
6443 case 1:
6444 neg_overflow = true;
6445 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6446 hi = prod;
6447 break;
6448
6449 default:
6450 gcc_unreachable ();
6451 }
6452 }
6453
6454 switch (code)
6455 {
6456 case EQ_EXPR:
6457 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6458 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6459 if (TREE_OVERFLOW (hi))
6460 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6461 if (TREE_OVERFLOW (lo))
6462 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6463 return build_range_check (loc, type, arg00, 1, lo, hi);
6464
6465 case NE_EXPR:
6466 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6467 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6468 if (TREE_OVERFLOW (hi))
6469 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6470 if (TREE_OVERFLOW (lo))
6471 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6472 return build_range_check (loc, type, arg00, 0, lo, hi);
6473
6474 case LT_EXPR:
6475 if (TREE_OVERFLOW (lo))
6476 {
6477 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6478 return omit_one_operand_loc (loc, type, tmp, arg00);
6479 }
6480 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6481
6482 case LE_EXPR:
6483 if (TREE_OVERFLOW (hi))
6484 {
6485 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6486 return omit_one_operand_loc (loc, type, tmp, arg00);
6487 }
6488 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6489
6490 case GT_EXPR:
6491 if (TREE_OVERFLOW (hi))
6492 {
6493 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6494 return omit_one_operand_loc (loc, type, tmp, arg00);
6495 }
6496 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6497
6498 case GE_EXPR:
6499 if (TREE_OVERFLOW (lo))
6500 {
6501 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6502 return omit_one_operand_loc (loc, type, tmp, arg00);
6503 }
6504 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6505
6506 default:
6507 break;
6508 }
6509
6510 return NULL_TREE;
6511 }
6512
6513
6514 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6515 equality/inequality test, then return a simplified form of the test
6516 using a sign testing. Otherwise return NULL. TYPE is the desired
6517 result type. */
6518
6519 static tree
6520 fold_single_bit_test_into_sign_test (location_t loc,
6521 enum tree_code code, tree arg0, tree arg1,
6522 tree result_type)
6523 {
6524 /* If this is testing a single bit, we can optimize the test. */
6525 if ((code == NE_EXPR || code == EQ_EXPR)
6526 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6527 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6528 {
6529 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6530 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6531 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6532
6533 if (arg00 != NULL_TREE
6534 /* This is only a win if casting to a signed type is cheap,
6535 i.e. when arg00's type is not a partial mode. */
6536 && TYPE_PRECISION (TREE_TYPE (arg00))
6537 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6538 {
6539 tree stype = signed_type_for (TREE_TYPE (arg00));
6540 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6541 result_type,
6542 fold_convert_loc (loc, stype, arg00),
6543 build_int_cst (stype, 0));
6544 }
6545 }
6546
6547 return NULL_TREE;
6548 }
6549
6550 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6551 equality/inequality test, then return a simplified form of
6552 the test using shifts and logical operations. Otherwise return
6553 NULL. TYPE is the desired result type. */
6554
6555 tree
6556 fold_single_bit_test (location_t loc, enum tree_code code,
6557 tree arg0, tree arg1, tree result_type)
6558 {
6559 /* If this is testing a single bit, we can optimize the test. */
6560 if ((code == NE_EXPR || code == EQ_EXPR)
6561 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6562 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6563 {
6564 tree inner = TREE_OPERAND (arg0, 0);
6565 tree type = TREE_TYPE (arg0);
6566 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6567 enum machine_mode operand_mode = TYPE_MODE (type);
6568 int ops_unsigned;
6569 tree signed_type, unsigned_type, intermediate_type;
6570 tree tem, one;
6571
6572 /* First, see if we can fold the single bit test into a sign-bit
6573 test. */
6574 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6575 result_type);
6576 if (tem)
6577 return tem;
6578
6579 /* Otherwise we have (A & C) != 0 where C is a single bit,
6580 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6581 Similarly for (A & C) == 0. */
6582
6583 /* If INNER is a right shift of a constant and it plus BITNUM does
6584 not overflow, adjust BITNUM and INNER. */
6585 if (TREE_CODE (inner) == RSHIFT_EXPR
6586 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6587 && bitnum < TYPE_PRECISION (type)
6588 && wi::ltu_p (TREE_OPERAND (inner, 1),
6589 TYPE_PRECISION (type) - bitnum))
6590 {
6591 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6592 inner = TREE_OPERAND (inner, 0);
6593 }
6594
6595 /* If we are going to be able to omit the AND below, we must do our
6596 operations as unsigned. If we must use the AND, we have a choice.
6597 Normally unsigned is faster, but for some machines signed is. */
6598 #ifdef LOAD_EXTEND_OP
6599 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6600 && !flag_syntax_only) ? 0 : 1;
6601 #else
6602 ops_unsigned = 1;
6603 #endif
6604
6605 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6606 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6607 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6608 inner = fold_convert_loc (loc, intermediate_type, inner);
6609
6610 if (bitnum != 0)
6611 inner = build2 (RSHIFT_EXPR, intermediate_type,
6612 inner, size_int (bitnum));
6613
6614 one = build_int_cst (intermediate_type, 1);
6615
6616 if (code == EQ_EXPR)
6617 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6618
6619 /* Put the AND last so it can combine with more things. */
6620 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6621
6622 /* Make sure to return the proper type. */
6623 inner = fold_convert_loc (loc, result_type, inner);
6624
6625 return inner;
6626 }
6627 return NULL_TREE;
6628 }
6629
6630 /* Check whether we are allowed to reorder operands arg0 and arg1,
6631 such that the evaluation of arg1 occurs before arg0. */
6632
6633 static bool
6634 reorder_operands_p (const_tree arg0, const_tree arg1)
6635 {
6636 if (! flag_evaluation_order)
6637 return true;
6638 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6639 return true;
6640 return ! TREE_SIDE_EFFECTS (arg0)
6641 && ! TREE_SIDE_EFFECTS (arg1);
6642 }
6643
6644 /* Test whether it is preferable two swap two operands, ARG0 and
6645 ARG1, for example because ARG0 is an integer constant and ARG1
6646 isn't. If REORDER is true, only recommend swapping if we can
6647 evaluate the operands in reverse order. */
6648
6649 bool
6650 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6651 {
6652 if (CONSTANT_CLASS_P (arg1))
6653 return 0;
6654 if (CONSTANT_CLASS_P (arg0))
6655 return 1;
6656
6657 STRIP_SIGN_NOPS (arg0);
6658 STRIP_SIGN_NOPS (arg1);
6659
6660 if (TREE_CONSTANT (arg1))
6661 return 0;
6662 if (TREE_CONSTANT (arg0))
6663 return 1;
6664
6665 if (reorder && flag_evaluation_order
6666 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6667 return 0;
6668
6669 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6670 for commutative and comparison operators. Ensuring a canonical
6671 form allows the optimizers to find additional redundancies without
6672 having to explicitly check for both orderings. */
6673 if (TREE_CODE (arg0) == SSA_NAME
6674 && TREE_CODE (arg1) == SSA_NAME
6675 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6676 return 1;
6677
6678 /* Put SSA_NAMEs last. */
6679 if (TREE_CODE (arg1) == SSA_NAME)
6680 return 0;
6681 if (TREE_CODE (arg0) == SSA_NAME)
6682 return 1;
6683
6684 /* Put variables last. */
6685 if (DECL_P (arg1))
6686 return 0;
6687 if (DECL_P (arg0))
6688 return 1;
6689
6690 return 0;
6691 }
6692
6693 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6694 ARG0 is extended to a wider type. */
6695
6696 static tree
6697 fold_widened_comparison (location_t loc, enum tree_code code,
6698 tree type, tree arg0, tree arg1)
6699 {
6700 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6701 tree arg1_unw;
6702 tree shorter_type, outer_type;
6703 tree min, max;
6704 bool above, below;
6705
6706 if (arg0_unw == arg0)
6707 return NULL_TREE;
6708 shorter_type = TREE_TYPE (arg0_unw);
6709
6710 #ifdef HAVE_canonicalize_funcptr_for_compare
6711 /* Disable this optimization if we're casting a function pointer
6712 type on targets that require function pointer canonicalization. */
6713 if (HAVE_canonicalize_funcptr_for_compare
6714 && TREE_CODE (shorter_type) == POINTER_TYPE
6715 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6716 return NULL_TREE;
6717 #endif
6718
6719 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6720 return NULL_TREE;
6721
6722 arg1_unw = get_unwidened (arg1, NULL_TREE);
6723
6724 /* If possible, express the comparison in the shorter mode. */
6725 if ((code == EQ_EXPR || code == NE_EXPR
6726 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6727 && (TREE_TYPE (arg1_unw) == shorter_type
6728 || ((TYPE_PRECISION (shorter_type)
6729 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6730 && (TYPE_UNSIGNED (shorter_type)
6731 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6732 || (TREE_CODE (arg1_unw) == INTEGER_CST
6733 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6734 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6735 && int_fits_type_p (arg1_unw, shorter_type))))
6736 return fold_build2_loc (loc, code, type, arg0_unw,
6737 fold_convert_loc (loc, shorter_type, arg1_unw));
6738
6739 if (TREE_CODE (arg1_unw) != INTEGER_CST
6740 || TREE_CODE (shorter_type) != INTEGER_TYPE
6741 || !int_fits_type_p (arg1_unw, shorter_type))
6742 return NULL_TREE;
6743
6744 /* If we are comparing with the integer that does not fit into the range
6745 of the shorter type, the result is known. */
6746 outer_type = TREE_TYPE (arg1_unw);
6747 min = lower_bound_in_type (outer_type, shorter_type);
6748 max = upper_bound_in_type (outer_type, shorter_type);
6749
6750 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6751 max, arg1_unw));
6752 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6753 arg1_unw, min));
6754
6755 switch (code)
6756 {
6757 case EQ_EXPR:
6758 if (above || below)
6759 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6760 break;
6761
6762 case NE_EXPR:
6763 if (above || below)
6764 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6765 break;
6766
6767 case LT_EXPR:
6768 case LE_EXPR:
6769 if (above)
6770 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6771 else if (below)
6772 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6773
6774 case GT_EXPR:
6775 case GE_EXPR:
6776 if (above)
6777 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6778 else if (below)
6779 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6780
6781 default:
6782 break;
6783 }
6784
6785 return NULL_TREE;
6786 }
6787
6788 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6789 ARG0 just the signedness is changed. */
6790
6791 static tree
6792 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6793 tree arg0, tree arg1)
6794 {
6795 tree arg0_inner;
6796 tree inner_type, outer_type;
6797
6798 if (!CONVERT_EXPR_P (arg0))
6799 return NULL_TREE;
6800
6801 outer_type = TREE_TYPE (arg0);
6802 arg0_inner = TREE_OPERAND (arg0, 0);
6803 inner_type = TREE_TYPE (arg0_inner);
6804
6805 #ifdef HAVE_canonicalize_funcptr_for_compare
6806 /* Disable this optimization if we're casting a function pointer
6807 type on targets that require function pointer canonicalization. */
6808 if (HAVE_canonicalize_funcptr_for_compare
6809 && TREE_CODE (inner_type) == POINTER_TYPE
6810 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6811 return NULL_TREE;
6812 #endif
6813
6814 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6815 return NULL_TREE;
6816
6817 if (TREE_CODE (arg1) != INTEGER_CST
6818 && !(CONVERT_EXPR_P (arg1)
6819 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6820 return NULL_TREE;
6821
6822 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6823 && code != NE_EXPR
6824 && code != EQ_EXPR)
6825 return NULL_TREE;
6826
6827 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6828 return NULL_TREE;
6829
6830 if (TREE_CODE (arg1) == INTEGER_CST)
6831 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6832 TREE_OVERFLOW (arg1));
6833 else
6834 arg1 = fold_convert_loc (loc, inner_type, arg1);
6835
6836 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6837 }
6838
6839
6840 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6841 means A >= Y && A != MAX, but in this case we know that
6842 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6843
6844 static tree
6845 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6846 {
6847 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6848
6849 if (TREE_CODE (bound) == LT_EXPR)
6850 a = TREE_OPERAND (bound, 0);
6851 else if (TREE_CODE (bound) == GT_EXPR)
6852 a = TREE_OPERAND (bound, 1);
6853 else
6854 return NULL_TREE;
6855
6856 typea = TREE_TYPE (a);
6857 if (!INTEGRAL_TYPE_P (typea)
6858 && !POINTER_TYPE_P (typea))
6859 return NULL_TREE;
6860
6861 if (TREE_CODE (ineq) == LT_EXPR)
6862 {
6863 a1 = TREE_OPERAND (ineq, 1);
6864 y = TREE_OPERAND (ineq, 0);
6865 }
6866 else if (TREE_CODE (ineq) == GT_EXPR)
6867 {
6868 a1 = TREE_OPERAND (ineq, 0);
6869 y = TREE_OPERAND (ineq, 1);
6870 }
6871 else
6872 return NULL_TREE;
6873
6874 if (TREE_TYPE (a1) != typea)
6875 return NULL_TREE;
6876
6877 if (POINTER_TYPE_P (typea))
6878 {
6879 /* Convert the pointer types into integer before taking the difference. */
6880 tree ta = fold_convert_loc (loc, ssizetype, a);
6881 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6882 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6883 }
6884 else
6885 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6886
6887 if (!diff || !integer_onep (diff))
6888 return NULL_TREE;
6889
6890 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6891 }
6892
6893 /* Fold a sum or difference of at least one multiplication.
6894 Returns the folded tree or NULL if no simplification could be made. */
6895
6896 static tree
6897 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6898 tree arg0, tree arg1)
6899 {
6900 tree arg00, arg01, arg10, arg11;
6901 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6902
6903 /* (A * C) +- (B * C) -> (A+-B) * C.
6904 (A * C) +- A -> A * (C+-1).
6905 We are most concerned about the case where C is a constant,
6906 but other combinations show up during loop reduction. Since
6907 it is not difficult, try all four possibilities. */
6908
6909 if (TREE_CODE (arg0) == MULT_EXPR)
6910 {
6911 arg00 = TREE_OPERAND (arg0, 0);
6912 arg01 = TREE_OPERAND (arg0, 1);
6913 }
6914 else if (TREE_CODE (arg0) == INTEGER_CST)
6915 {
6916 arg00 = build_one_cst (type);
6917 arg01 = arg0;
6918 }
6919 else
6920 {
6921 /* We cannot generate constant 1 for fract. */
6922 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6923 return NULL_TREE;
6924 arg00 = arg0;
6925 arg01 = build_one_cst (type);
6926 }
6927 if (TREE_CODE (arg1) == MULT_EXPR)
6928 {
6929 arg10 = TREE_OPERAND (arg1, 0);
6930 arg11 = TREE_OPERAND (arg1, 1);
6931 }
6932 else if (TREE_CODE (arg1) == INTEGER_CST)
6933 {
6934 arg10 = build_one_cst (type);
6935 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6936 the purpose of this canonicalization. */
6937 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6938 && negate_expr_p (arg1)
6939 && code == PLUS_EXPR)
6940 {
6941 arg11 = negate_expr (arg1);
6942 code = MINUS_EXPR;
6943 }
6944 else
6945 arg11 = arg1;
6946 }
6947 else
6948 {
6949 /* We cannot generate constant 1 for fract. */
6950 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6951 return NULL_TREE;
6952 arg10 = arg1;
6953 arg11 = build_one_cst (type);
6954 }
6955 same = NULL_TREE;
6956
6957 if (operand_equal_p (arg01, arg11, 0))
6958 same = arg01, alt0 = arg00, alt1 = arg10;
6959 else if (operand_equal_p (arg00, arg10, 0))
6960 same = arg00, alt0 = arg01, alt1 = arg11;
6961 else if (operand_equal_p (arg00, arg11, 0))
6962 same = arg00, alt0 = arg01, alt1 = arg10;
6963 else if (operand_equal_p (arg01, arg10, 0))
6964 same = arg01, alt0 = arg00, alt1 = arg11;
6965
6966 /* No identical multiplicands; see if we can find a common
6967 power-of-two factor in non-power-of-two multiplies. This
6968 can help in multi-dimensional array access. */
6969 else if (tree_fits_shwi_p (arg01)
6970 && tree_fits_shwi_p (arg11))
6971 {
6972 HOST_WIDE_INT int01, int11, tmp;
6973 bool swap = false;
6974 tree maybe_same;
6975 int01 = tree_to_shwi (arg01);
6976 int11 = tree_to_shwi (arg11);
6977
6978 /* Move min of absolute values to int11. */
6979 if (absu_hwi (int01) < absu_hwi (int11))
6980 {
6981 tmp = int01, int01 = int11, int11 = tmp;
6982 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6983 maybe_same = arg01;
6984 swap = true;
6985 }
6986 else
6987 maybe_same = arg11;
6988
6989 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6990 /* The remainder should not be a constant, otherwise we
6991 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6992 increased the number of multiplications necessary. */
6993 && TREE_CODE (arg10) != INTEGER_CST)
6994 {
6995 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6996 build_int_cst (TREE_TYPE (arg00),
6997 int01 / int11));
6998 alt1 = arg10;
6999 same = maybe_same;
7000 if (swap)
7001 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7002 }
7003 }
7004
7005 if (same)
7006 return fold_build2_loc (loc, MULT_EXPR, type,
7007 fold_build2_loc (loc, code, type,
7008 fold_convert_loc (loc, type, alt0),
7009 fold_convert_loc (loc, type, alt1)),
7010 fold_convert_loc (loc, type, same));
7011
7012 return NULL_TREE;
7013 }
7014
7015 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7016 specified by EXPR into the buffer PTR of length LEN bytes.
7017 Return the number of bytes placed in the buffer, or zero
7018 upon failure. */
7019
7020 static int
7021 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7022 {
7023 tree type = TREE_TYPE (expr);
7024 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7025 int byte, offset, word, words;
7026 unsigned char value;
7027
7028 if ((off == -1 && total_bytes > len)
7029 || off >= total_bytes)
7030 return 0;
7031 if (off == -1)
7032 off = 0;
7033 words = total_bytes / UNITS_PER_WORD;
7034
7035 for (byte = 0; byte < total_bytes; byte++)
7036 {
7037 int bitpos = byte * BITS_PER_UNIT;
7038 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7039 number of bytes. */
7040 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7041
7042 if (total_bytes > UNITS_PER_WORD)
7043 {
7044 word = byte / UNITS_PER_WORD;
7045 if (WORDS_BIG_ENDIAN)
7046 word = (words - 1) - word;
7047 offset = word * UNITS_PER_WORD;
7048 if (BYTES_BIG_ENDIAN)
7049 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7050 else
7051 offset += byte % UNITS_PER_WORD;
7052 }
7053 else
7054 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7055 if (offset >= off
7056 && offset - off < len)
7057 ptr[offset - off] = value;
7058 }
7059 return MIN (len, total_bytes - off);
7060 }
7061
7062
7063 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7064 specified by EXPR into the buffer PTR of length LEN bytes.
7065 Return the number of bytes placed in the buffer, or zero
7066 upon failure. */
7067
7068 static int
7069 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7070 {
7071 tree type = TREE_TYPE (expr);
7072 enum machine_mode mode = TYPE_MODE (type);
7073 int total_bytes = GET_MODE_SIZE (mode);
7074 FIXED_VALUE_TYPE value;
7075 tree i_value, i_type;
7076
7077 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7078 return 0;
7079
7080 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7081
7082 if (NULL_TREE == i_type
7083 || TYPE_PRECISION (i_type) != total_bytes)
7084 return 0;
7085
7086 value = TREE_FIXED_CST (expr);
7087 i_value = double_int_to_tree (i_type, value.data);
7088
7089 return native_encode_int (i_value, ptr, len, off);
7090 }
7091
7092
7093 /* Subroutine of native_encode_expr. Encode the REAL_CST
7094 specified by EXPR into the buffer PTR of length LEN bytes.
7095 Return the number of bytes placed in the buffer, or zero
7096 upon failure. */
7097
7098 static int
7099 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7100 {
7101 tree type = TREE_TYPE (expr);
7102 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7103 int byte, offset, word, words, bitpos;
7104 unsigned char value;
7105
7106 /* There are always 32 bits in each long, no matter the size of
7107 the hosts long. We handle floating point representations with
7108 up to 192 bits. */
7109 long tmp[6];
7110
7111 if ((off == -1 && total_bytes > len)
7112 || off >= total_bytes)
7113 return 0;
7114 if (off == -1)
7115 off = 0;
7116 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7117
7118 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7119
7120 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7121 bitpos += BITS_PER_UNIT)
7122 {
7123 byte = (bitpos / BITS_PER_UNIT) & 3;
7124 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7125
7126 if (UNITS_PER_WORD < 4)
7127 {
7128 word = byte / UNITS_PER_WORD;
7129 if (WORDS_BIG_ENDIAN)
7130 word = (words - 1) - word;
7131 offset = word * UNITS_PER_WORD;
7132 if (BYTES_BIG_ENDIAN)
7133 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7134 else
7135 offset += byte % UNITS_PER_WORD;
7136 }
7137 else
7138 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7139 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7140 if (offset >= off
7141 && offset - off < len)
7142 ptr[offset - off] = value;
7143 }
7144 return MIN (len, total_bytes - off);
7145 }
7146
7147 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7148 specified by EXPR into the buffer PTR of length LEN bytes.
7149 Return the number of bytes placed in the buffer, or zero
7150 upon failure. */
7151
7152 static int
7153 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7154 {
7155 int rsize, isize;
7156 tree part;
7157
7158 part = TREE_REALPART (expr);
7159 rsize = native_encode_expr (part, ptr, len, off);
7160 if (off == -1
7161 && rsize == 0)
7162 return 0;
7163 part = TREE_IMAGPART (expr);
7164 if (off != -1)
7165 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7166 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7167 if (off == -1
7168 && isize != rsize)
7169 return 0;
7170 return rsize + isize;
7171 }
7172
7173
7174 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7175 specified by EXPR into the buffer PTR of length LEN bytes.
7176 Return the number of bytes placed in the buffer, or zero
7177 upon failure. */
7178
7179 static int
7180 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7181 {
7182 unsigned i, count;
7183 int size, offset;
7184 tree itype, elem;
7185
7186 offset = 0;
7187 count = VECTOR_CST_NELTS (expr);
7188 itype = TREE_TYPE (TREE_TYPE (expr));
7189 size = GET_MODE_SIZE (TYPE_MODE (itype));
7190 for (i = 0; i < count; i++)
7191 {
7192 if (off >= size)
7193 {
7194 off -= size;
7195 continue;
7196 }
7197 elem = VECTOR_CST_ELT (expr, i);
7198 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7199 if ((off == -1 && res != size)
7200 || res == 0)
7201 return 0;
7202 offset += res;
7203 if (offset >= len)
7204 return offset;
7205 if (off != -1)
7206 off = 0;
7207 }
7208 return offset;
7209 }
7210
7211
7212 /* Subroutine of native_encode_expr. Encode the STRING_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7215 upon failure. */
7216
7217 static int
7218 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7219 {
7220 tree type = TREE_TYPE (expr);
7221 HOST_WIDE_INT total_bytes;
7222
7223 if (TREE_CODE (type) != ARRAY_TYPE
7224 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7225 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7226 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7227 return 0;
7228 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7229 if ((off == -1 && total_bytes > len)
7230 || off >= total_bytes)
7231 return 0;
7232 if (off == -1)
7233 off = 0;
7234 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7235 {
7236 int written = 0;
7237 if (off < TREE_STRING_LENGTH (expr))
7238 {
7239 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7240 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7241 }
7242 memset (ptr + written, 0,
7243 MIN (total_bytes - written, len - written));
7244 }
7245 else
7246 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7247 return MIN (total_bytes - off, len);
7248 }
7249
7250
7251 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7252 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7253 buffer PTR of length LEN bytes. If OFF is not -1 then start
7254 the encoding at byte offset OFF and encode at most LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero upon failure. */
7256
7257 int
7258 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7259 {
7260 switch (TREE_CODE (expr))
7261 {
7262 case INTEGER_CST:
7263 return native_encode_int (expr, ptr, len, off);
7264
7265 case REAL_CST:
7266 return native_encode_real (expr, ptr, len, off);
7267
7268 case FIXED_CST:
7269 return native_encode_fixed (expr, ptr, len, off);
7270
7271 case COMPLEX_CST:
7272 return native_encode_complex (expr, ptr, len, off);
7273
7274 case VECTOR_CST:
7275 return native_encode_vector (expr, ptr, len, off);
7276
7277 case STRING_CST:
7278 return native_encode_string (expr, ptr, len, off);
7279
7280 default:
7281 return 0;
7282 }
7283 }
7284
7285
7286 /* Subroutine of native_interpret_expr. Interpret the contents of
7287 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7288 If the buffer cannot be interpreted, return NULL_TREE. */
7289
7290 static tree
7291 native_interpret_int (tree type, const unsigned char *ptr, int len)
7292 {
7293 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7294
7295 if (total_bytes > len
7296 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7297 return NULL_TREE;
7298
7299 wide_int result = wi::from_buffer (ptr, total_bytes);
7300
7301 return wide_int_to_tree (type, result);
7302 }
7303
7304
7305 /* Subroutine of native_interpret_expr. Interpret the contents of
7306 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7307 If the buffer cannot be interpreted, return NULL_TREE. */
7308
7309 static tree
7310 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7311 {
7312 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7313 double_int result;
7314 FIXED_VALUE_TYPE fixed_value;
7315
7316 if (total_bytes > len
7317 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7318 return NULL_TREE;
7319
7320 result = double_int::from_buffer (ptr, total_bytes);
7321 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7322
7323 return build_fixed (type, fixed_value);
7324 }
7325
7326
7327 /* Subroutine of native_interpret_expr. Interpret the contents of
7328 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7329 If the buffer cannot be interpreted, return NULL_TREE. */
7330
7331 static tree
7332 native_interpret_real (tree type, const unsigned char *ptr, int len)
7333 {
7334 enum machine_mode mode = TYPE_MODE (type);
7335 int total_bytes = GET_MODE_SIZE (mode);
7336 int byte, offset, word, words, bitpos;
7337 unsigned char value;
7338 /* There are always 32 bits in each long, no matter the size of
7339 the hosts long. We handle floating point representations with
7340 up to 192 bits. */
7341 REAL_VALUE_TYPE r;
7342 long tmp[6];
7343
7344 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7345 if (total_bytes > len || total_bytes > 24)
7346 return NULL_TREE;
7347 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7348
7349 memset (tmp, 0, sizeof (tmp));
7350 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7351 bitpos += BITS_PER_UNIT)
7352 {
7353 byte = (bitpos / BITS_PER_UNIT) & 3;
7354 if (UNITS_PER_WORD < 4)
7355 {
7356 word = byte / UNITS_PER_WORD;
7357 if (WORDS_BIG_ENDIAN)
7358 word = (words - 1) - word;
7359 offset = word * UNITS_PER_WORD;
7360 if (BYTES_BIG_ENDIAN)
7361 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7362 else
7363 offset += byte % UNITS_PER_WORD;
7364 }
7365 else
7366 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7367 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7368
7369 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7370 }
7371
7372 real_from_target (&r, tmp, mode);
7373 return build_real (type, r);
7374 }
7375
7376
7377 /* Subroutine of native_interpret_expr. Interpret the contents of
7378 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7379 If the buffer cannot be interpreted, return NULL_TREE. */
7380
7381 static tree
7382 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7383 {
7384 tree etype, rpart, ipart;
7385 int size;
7386
7387 etype = TREE_TYPE (type);
7388 size = GET_MODE_SIZE (TYPE_MODE (etype));
7389 if (size * 2 > len)
7390 return NULL_TREE;
7391 rpart = native_interpret_expr (etype, ptr, size);
7392 if (!rpart)
7393 return NULL_TREE;
7394 ipart = native_interpret_expr (etype, ptr+size, size);
7395 if (!ipart)
7396 return NULL_TREE;
7397 return build_complex (type, rpart, ipart);
7398 }
7399
7400
7401 /* Subroutine of native_interpret_expr. Interpret the contents of
7402 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7403 If the buffer cannot be interpreted, return NULL_TREE. */
7404
7405 static tree
7406 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7407 {
7408 tree etype, elem;
7409 int i, size, count;
7410 tree *elements;
7411
7412 etype = TREE_TYPE (type);
7413 size = GET_MODE_SIZE (TYPE_MODE (etype));
7414 count = TYPE_VECTOR_SUBPARTS (type);
7415 if (size * count > len)
7416 return NULL_TREE;
7417
7418 elements = XALLOCAVEC (tree, count);
7419 for (i = count - 1; i >= 0; i--)
7420 {
7421 elem = native_interpret_expr (etype, ptr+(i*size), size);
7422 if (!elem)
7423 return NULL_TREE;
7424 elements[i] = elem;
7425 }
7426 return build_vector (type, elements);
7427 }
7428
7429
7430 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7431 the buffer PTR of length LEN as a constant of type TYPE. For
7432 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7433 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7434 return NULL_TREE. */
7435
7436 tree
7437 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7438 {
7439 switch (TREE_CODE (type))
7440 {
7441 case INTEGER_TYPE:
7442 case ENUMERAL_TYPE:
7443 case BOOLEAN_TYPE:
7444 case POINTER_TYPE:
7445 case REFERENCE_TYPE:
7446 return native_interpret_int (type, ptr, len);
7447
7448 case REAL_TYPE:
7449 return native_interpret_real (type, ptr, len);
7450
7451 case FIXED_POINT_TYPE:
7452 return native_interpret_fixed (type, ptr, len);
7453
7454 case COMPLEX_TYPE:
7455 return native_interpret_complex (type, ptr, len);
7456
7457 case VECTOR_TYPE:
7458 return native_interpret_vector (type, ptr, len);
7459
7460 default:
7461 return NULL_TREE;
7462 }
7463 }
7464
7465 /* Returns true if we can interpret the contents of a native encoding
7466 as TYPE. */
7467
7468 static bool
7469 can_native_interpret_type_p (tree type)
7470 {
7471 switch (TREE_CODE (type))
7472 {
7473 case INTEGER_TYPE:
7474 case ENUMERAL_TYPE:
7475 case BOOLEAN_TYPE:
7476 case POINTER_TYPE:
7477 case REFERENCE_TYPE:
7478 case FIXED_POINT_TYPE:
7479 case REAL_TYPE:
7480 case COMPLEX_TYPE:
7481 case VECTOR_TYPE:
7482 return true;
7483 default:
7484 return false;
7485 }
7486 }
7487
7488 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7489 TYPE at compile-time. If we're unable to perform the conversion
7490 return NULL_TREE. */
7491
7492 static tree
7493 fold_view_convert_expr (tree type, tree expr)
7494 {
7495 /* We support up to 512-bit values (for V8DFmode). */
7496 unsigned char buffer[64];
7497 int len;
7498
7499 /* Check that the host and target are sane. */
7500 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7501 return NULL_TREE;
7502
7503 len = native_encode_expr (expr, buffer, sizeof (buffer));
7504 if (len == 0)
7505 return NULL_TREE;
7506
7507 return native_interpret_expr (type, buffer, len);
7508 }
7509
7510 /* Build an expression for the address of T. Folds away INDIRECT_REF
7511 to avoid confusing the gimplify process. */
7512
7513 tree
7514 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7515 {
7516 /* The size of the object is not relevant when talking about its address. */
7517 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7518 t = TREE_OPERAND (t, 0);
7519
7520 if (TREE_CODE (t) == INDIRECT_REF)
7521 {
7522 t = TREE_OPERAND (t, 0);
7523
7524 if (TREE_TYPE (t) != ptrtype)
7525 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7526 }
7527 else if (TREE_CODE (t) == MEM_REF
7528 && integer_zerop (TREE_OPERAND (t, 1)))
7529 return TREE_OPERAND (t, 0);
7530 else if (TREE_CODE (t) == MEM_REF
7531 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7532 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7533 TREE_OPERAND (t, 0),
7534 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7535 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7536 {
7537 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7538
7539 if (TREE_TYPE (t) != ptrtype)
7540 t = fold_convert_loc (loc, ptrtype, t);
7541 }
7542 else
7543 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7544
7545 return t;
7546 }
7547
7548 /* Build an expression for the address of T. */
7549
7550 tree
7551 build_fold_addr_expr_loc (location_t loc, tree t)
7552 {
7553 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7554
7555 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7556 }
7557
7558 static bool vec_cst_ctor_to_array (tree, tree *);
7559
7560 /* Fold a unary expression of code CODE and type TYPE with operand
7561 OP0. Return the folded expression if folding is successful.
7562 Otherwise, return NULL_TREE. */
7563
7564 tree
7565 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7566 {
7567 tree tem;
7568 tree arg0;
7569 enum tree_code_class kind = TREE_CODE_CLASS (code);
7570
7571 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7572 && TREE_CODE_LENGTH (code) == 1);
7573
7574 tem = generic_simplify (loc, code, type, op0);
7575 if (tem)
7576 return tem;
7577
7578 arg0 = op0;
7579 if (arg0)
7580 {
7581 if (CONVERT_EXPR_CODE_P (code)
7582 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7583 {
7584 /* Don't use STRIP_NOPS, because signedness of argument type
7585 matters. */
7586 STRIP_SIGN_NOPS (arg0);
7587 }
7588 else
7589 {
7590 /* Strip any conversions that don't change the mode. This
7591 is safe for every expression, except for a comparison
7592 expression because its signedness is derived from its
7593 operands.
7594
7595 Note that this is done as an internal manipulation within
7596 the constant folder, in order to find the simplest
7597 representation of the arguments so that their form can be
7598 studied. In any cases, the appropriate type conversions
7599 should be put back in the tree that will get out of the
7600 constant folder. */
7601 STRIP_NOPS (arg0);
7602 }
7603 }
7604
7605 if (TREE_CODE_CLASS (code) == tcc_unary)
7606 {
7607 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7608 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7609 fold_build1_loc (loc, code, type,
7610 fold_convert_loc (loc, TREE_TYPE (op0),
7611 TREE_OPERAND (arg0, 1))));
7612 else if (TREE_CODE (arg0) == COND_EXPR)
7613 {
7614 tree arg01 = TREE_OPERAND (arg0, 1);
7615 tree arg02 = TREE_OPERAND (arg0, 2);
7616 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7617 arg01 = fold_build1_loc (loc, code, type,
7618 fold_convert_loc (loc,
7619 TREE_TYPE (op0), arg01));
7620 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7621 arg02 = fold_build1_loc (loc, code, type,
7622 fold_convert_loc (loc,
7623 TREE_TYPE (op0), arg02));
7624 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7625 arg01, arg02);
7626
7627 /* If this was a conversion, and all we did was to move into
7628 inside the COND_EXPR, bring it back out. But leave it if
7629 it is a conversion from integer to integer and the
7630 result precision is no wider than a word since such a
7631 conversion is cheap and may be optimized away by combine,
7632 while it couldn't if it were outside the COND_EXPR. Then return
7633 so we don't get into an infinite recursion loop taking the
7634 conversion out and then back in. */
7635
7636 if ((CONVERT_EXPR_CODE_P (code)
7637 || code == NON_LVALUE_EXPR)
7638 && TREE_CODE (tem) == COND_EXPR
7639 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7640 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7641 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7642 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7643 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7644 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7645 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7646 && (INTEGRAL_TYPE_P
7647 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7648 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7649 || flag_syntax_only))
7650 tem = build1_loc (loc, code, type,
7651 build3 (COND_EXPR,
7652 TREE_TYPE (TREE_OPERAND
7653 (TREE_OPERAND (tem, 1), 0)),
7654 TREE_OPERAND (tem, 0),
7655 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7656 TREE_OPERAND (TREE_OPERAND (tem, 2),
7657 0)));
7658 return tem;
7659 }
7660 }
7661
7662 switch (code)
7663 {
7664 case PAREN_EXPR:
7665 /* Re-association barriers around constants and other re-association
7666 barriers can be removed. */
7667 if (CONSTANT_CLASS_P (op0)
7668 || TREE_CODE (op0) == PAREN_EXPR)
7669 return fold_convert_loc (loc, type, op0);
7670 return NULL_TREE;
7671
7672 case NON_LVALUE_EXPR:
7673 if (!maybe_lvalue_p (op0))
7674 return fold_convert_loc (loc, type, op0);
7675 return NULL_TREE;
7676
7677 CASE_CONVERT:
7678 case FLOAT_EXPR:
7679 case FIX_TRUNC_EXPR:
7680 if (TREE_TYPE (op0) == type)
7681 return op0;
7682
7683 if (COMPARISON_CLASS_P (op0))
7684 {
7685 /* If we have (type) (a CMP b) and type is an integral type, return
7686 new expression involving the new type. Canonicalize
7687 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7688 non-integral type.
7689 Do not fold the result as that would not simplify further, also
7690 folding again results in recursions. */
7691 if (TREE_CODE (type) == BOOLEAN_TYPE)
7692 return build2_loc (loc, TREE_CODE (op0), type,
7693 TREE_OPERAND (op0, 0),
7694 TREE_OPERAND (op0, 1));
7695 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7696 && TREE_CODE (type) != VECTOR_TYPE)
7697 return build3_loc (loc, COND_EXPR, type, op0,
7698 constant_boolean_node (true, type),
7699 constant_boolean_node (false, type));
7700 }
7701
7702 /* Handle cases of two conversions in a row. */
7703 if (CONVERT_EXPR_P (op0))
7704 {
7705 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7706 tree inter_type = TREE_TYPE (op0);
7707 int inside_int = INTEGRAL_TYPE_P (inside_type);
7708 int inside_ptr = POINTER_TYPE_P (inside_type);
7709 int inside_float = FLOAT_TYPE_P (inside_type);
7710 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7711 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7712 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7713 int inter_int = INTEGRAL_TYPE_P (inter_type);
7714 int inter_ptr = POINTER_TYPE_P (inter_type);
7715 int inter_float = FLOAT_TYPE_P (inter_type);
7716 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7717 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7718 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7719 int final_int = INTEGRAL_TYPE_P (type);
7720 int final_ptr = POINTER_TYPE_P (type);
7721 int final_float = FLOAT_TYPE_P (type);
7722 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7723 unsigned int final_prec = TYPE_PRECISION (type);
7724 int final_unsignedp = TYPE_UNSIGNED (type);
7725
7726 /* In addition to the cases of two conversions in a row
7727 handled below, if we are converting something to its own
7728 type via an object of identical or wider precision, neither
7729 conversion is needed. */
7730 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7731 && (((inter_int || inter_ptr) && final_int)
7732 || (inter_float && final_float))
7733 && inter_prec >= final_prec)
7734 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7735
7736 /* Likewise, if the intermediate and initial types are either both
7737 float or both integer, we don't need the middle conversion if the
7738 former is wider than the latter and doesn't change the signedness
7739 (for integers). Avoid this if the final type is a pointer since
7740 then we sometimes need the middle conversion. Likewise if the
7741 final type has a precision not equal to the size of its mode. */
7742 if (((inter_int && inside_int)
7743 || (inter_float && inside_float)
7744 || (inter_vec && inside_vec))
7745 && inter_prec >= inside_prec
7746 && (inter_float || inter_vec
7747 || inter_unsignedp == inside_unsignedp)
7748 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7749 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7750 && ! final_ptr
7751 && (! final_vec || inter_prec == inside_prec))
7752 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7753
7754 /* If we have a sign-extension of a zero-extended value, we can
7755 replace that by a single zero-extension. Likewise if the
7756 final conversion does not change precision we can drop the
7757 intermediate conversion. */
7758 if (inside_int && inter_int && final_int
7759 && ((inside_prec < inter_prec && inter_prec < final_prec
7760 && inside_unsignedp && !inter_unsignedp)
7761 || final_prec == inter_prec))
7762 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7763
7764 /* Two conversions in a row are not needed unless:
7765 - some conversion is floating-point (overstrict for now), or
7766 - some conversion is a vector (overstrict for now), or
7767 - the intermediate type is narrower than both initial and
7768 final, or
7769 - the intermediate type and innermost type differ in signedness,
7770 and the outermost type is wider than the intermediate, or
7771 - the initial type is a pointer type and the precisions of the
7772 intermediate and final types differ, or
7773 - the final type is a pointer type and the precisions of the
7774 initial and intermediate types differ. */
7775 if (! inside_float && ! inter_float && ! final_float
7776 && ! inside_vec && ! inter_vec && ! final_vec
7777 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7778 && ! (inside_int && inter_int
7779 && inter_unsignedp != inside_unsignedp
7780 && inter_prec < final_prec)
7781 && ((inter_unsignedp && inter_prec > inside_prec)
7782 == (final_unsignedp && final_prec > inter_prec))
7783 && ! (inside_ptr && inter_prec != final_prec)
7784 && ! (final_ptr && inside_prec != inter_prec)
7785 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7786 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7787 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7788 }
7789
7790 /* Handle (T *)&A.B.C for A being of type T and B and C
7791 living at offset zero. This occurs frequently in
7792 C++ upcasting and then accessing the base. */
7793 if (TREE_CODE (op0) == ADDR_EXPR
7794 && POINTER_TYPE_P (type)
7795 && handled_component_p (TREE_OPERAND (op0, 0)))
7796 {
7797 HOST_WIDE_INT bitsize, bitpos;
7798 tree offset;
7799 enum machine_mode mode;
7800 int unsignedp, volatilep;
7801 tree base = TREE_OPERAND (op0, 0);
7802 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7803 &mode, &unsignedp, &volatilep, false);
7804 /* If the reference was to a (constant) zero offset, we can use
7805 the address of the base if it has the same base type
7806 as the result type and the pointer type is unqualified. */
7807 if (! offset && bitpos == 0
7808 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7809 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7810 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7811 return fold_convert_loc (loc, type,
7812 build_fold_addr_expr_loc (loc, base));
7813 }
7814
7815 if (TREE_CODE (op0) == MODIFY_EXPR
7816 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7817 /* Detect assigning a bitfield. */
7818 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7819 && DECL_BIT_FIELD
7820 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7821 {
7822 /* Don't leave an assignment inside a conversion
7823 unless assigning a bitfield. */
7824 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7825 /* First do the assignment, then return converted constant. */
7826 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7827 TREE_NO_WARNING (tem) = 1;
7828 TREE_USED (tem) = 1;
7829 return tem;
7830 }
7831
7832 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7833 constants (if x has signed type, the sign bit cannot be set
7834 in c). This folds extension into the BIT_AND_EXPR.
7835 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7836 very likely don't have maximal range for their precision and this
7837 transformation effectively doesn't preserve non-maximal ranges. */
7838 if (TREE_CODE (type) == INTEGER_TYPE
7839 && TREE_CODE (op0) == BIT_AND_EXPR
7840 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7841 {
7842 tree and_expr = op0;
7843 tree and0 = TREE_OPERAND (and_expr, 0);
7844 tree and1 = TREE_OPERAND (and_expr, 1);
7845 int change = 0;
7846
7847 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7848 || (TYPE_PRECISION (type)
7849 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7850 change = 1;
7851 else if (TYPE_PRECISION (TREE_TYPE (and1))
7852 <= HOST_BITS_PER_WIDE_INT
7853 && tree_fits_uhwi_p (and1))
7854 {
7855 unsigned HOST_WIDE_INT cst;
7856
7857 cst = tree_to_uhwi (and1);
7858 cst &= HOST_WIDE_INT_M1U
7859 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7860 change = (cst == 0);
7861 #ifdef LOAD_EXTEND_OP
7862 if (change
7863 && !flag_syntax_only
7864 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7865 == ZERO_EXTEND))
7866 {
7867 tree uns = unsigned_type_for (TREE_TYPE (and0));
7868 and0 = fold_convert_loc (loc, uns, and0);
7869 and1 = fold_convert_loc (loc, uns, and1);
7870 }
7871 #endif
7872 }
7873 if (change)
7874 {
7875 tem = force_fit_type (type, wi::to_widest (and1), 0,
7876 TREE_OVERFLOW (and1));
7877 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7878 fold_convert_loc (loc, type, and0), tem);
7879 }
7880 }
7881
7882 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7883 when one of the new casts will fold away. Conservatively we assume
7884 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7885 if (POINTER_TYPE_P (type)
7886 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7887 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7888 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7889 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7890 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7891 {
7892 tree arg00 = TREE_OPERAND (arg0, 0);
7893 tree arg01 = TREE_OPERAND (arg0, 1);
7894
7895 return fold_build_pointer_plus_loc
7896 (loc, fold_convert_loc (loc, type, arg00), arg01);
7897 }
7898
7899 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7900 of the same precision, and X is an integer type not narrower than
7901 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7902 if (INTEGRAL_TYPE_P (type)
7903 && TREE_CODE (op0) == BIT_NOT_EXPR
7904 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7905 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7906 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7907 {
7908 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7909 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7910 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7911 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7912 fold_convert_loc (loc, type, tem));
7913 }
7914
7915 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7916 type of X and Y (integer types only). */
7917 if (INTEGRAL_TYPE_P (type)
7918 && TREE_CODE (op0) == MULT_EXPR
7919 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7920 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7921 {
7922 /* Be careful not to introduce new overflows. */
7923 tree mult_type;
7924 if (TYPE_OVERFLOW_WRAPS (type))
7925 mult_type = type;
7926 else
7927 mult_type = unsigned_type_for (type);
7928
7929 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7930 {
7931 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7932 fold_convert_loc (loc, mult_type,
7933 TREE_OPERAND (op0, 0)),
7934 fold_convert_loc (loc, mult_type,
7935 TREE_OPERAND (op0, 1)));
7936 return fold_convert_loc (loc, type, tem);
7937 }
7938 }
7939
7940 tem = fold_convert_const (code, type, arg0);
7941 return tem ? tem : NULL_TREE;
7942
7943 case ADDR_SPACE_CONVERT_EXPR:
7944 if (integer_zerop (arg0))
7945 return fold_convert_const (code, type, arg0);
7946 return NULL_TREE;
7947
7948 case FIXED_CONVERT_EXPR:
7949 tem = fold_convert_const (code, type, arg0);
7950 return tem ? tem : NULL_TREE;
7951
7952 case VIEW_CONVERT_EXPR:
7953 if (TREE_TYPE (op0) == type)
7954 return op0;
7955 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7956 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7957 type, TREE_OPERAND (op0, 0));
7958 if (TREE_CODE (op0) == MEM_REF)
7959 return fold_build2_loc (loc, MEM_REF, type,
7960 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7961
7962 /* For integral conversions with the same precision or pointer
7963 conversions use a NOP_EXPR instead. */
7964 if ((INTEGRAL_TYPE_P (type)
7965 || POINTER_TYPE_P (type))
7966 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7967 || POINTER_TYPE_P (TREE_TYPE (op0)))
7968 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7969 return fold_convert_loc (loc, type, op0);
7970
7971 /* Strip inner integral conversions that do not change the precision. */
7972 if (CONVERT_EXPR_P (op0)
7973 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7974 || POINTER_TYPE_P (TREE_TYPE (op0)))
7975 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7976 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7977 && (TYPE_PRECISION (TREE_TYPE (op0))
7978 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7979 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7980 type, TREE_OPERAND (op0, 0));
7981
7982 return fold_view_convert_expr (type, op0);
7983
7984 case NEGATE_EXPR:
7985 tem = fold_negate_expr (loc, arg0);
7986 if (tem)
7987 return fold_convert_loc (loc, type, tem);
7988 return NULL_TREE;
7989
7990 case ABS_EXPR:
7991 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7992 return fold_abs_const (arg0, type);
7993 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7994 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7995 /* Convert fabs((double)float) into (double)fabsf(float). */
7996 else if (TREE_CODE (arg0) == NOP_EXPR
7997 && TREE_CODE (type) == REAL_TYPE)
7998 {
7999 tree targ0 = strip_float_extensions (arg0);
8000 if (targ0 != arg0)
8001 return fold_convert_loc (loc, type,
8002 fold_build1_loc (loc, ABS_EXPR,
8003 TREE_TYPE (targ0),
8004 targ0));
8005 }
8006 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8007 else if (TREE_CODE (arg0) == ABS_EXPR)
8008 return arg0;
8009 else if (tree_expr_nonnegative_p (arg0))
8010 return arg0;
8011
8012 /* Strip sign ops from argument. */
8013 if (TREE_CODE (type) == REAL_TYPE)
8014 {
8015 tem = fold_strip_sign_ops (arg0);
8016 if (tem)
8017 return fold_build1_loc (loc, ABS_EXPR, type,
8018 fold_convert_loc (loc, type, tem));
8019 }
8020 return NULL_TREE;
8021
8022 case CONJ_EXPR:
8023 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8024 return fold_convert_loc (loc, type, arg0);
8025 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8026 {
8027 tree itype = TREE_TYPE (type);
8028 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8029 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8030 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8031 negate_expr (ipart));
8032 }
8033 if (TREE_CODE (arg0) == COMPLEX_CST)
8034 {
8035 tree itype = TREE_TYPE (type);
8036 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8037 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8038 return build_complex (type, rpart, negate_expr (ipart));
8039 }
8040 if (TREE_CODE (arg0) == CONJ_EXPR)
8041 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8042 return NULL_TREE;
8043
8044 case BIT_NOT_EXPR:
8045 if (TREE_CODE (arg0) == INTEGER_CST)
8046 return fold_not_const (arg0, type);
8047 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8048 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8049 /* Convert ~ (-A) to A - 1. */
8050 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8051 return fold_build2_loc (loc, MINUS_EXPR, type,
8052 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8053 build_int_cst (type, 1));
8054 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8055 else if (INTEGRAL_TYPE_P (type)
8056 && ((TREE_CODE (arg0) == MINUS_EXPR
8057 && integer_onep (TREE_OPERAND (arg0, 1)))
8058 || (TREE_CODE (arg0) == PLUS_EXPR
8059 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8060 return fold_build1_loc (loc, NEGATE_EXPR, type,
8061 fold_convert_loc (loc, type,
8062 TREE_OPERAND (arg0, 0)));
8063 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8064 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8065 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8066 fold_convert_loc (loc, type,
8067 TREE_OPERAND (arg0, 0)))))
8068 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8069 fold_convert_loc (loc, type,
8070 TREE_OPERAND (arg0, 1)));
8071 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8072 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8073 fold_convert_loc (loc, type,
8074 TREE_OPERAND (arg0, 1)))))
8075 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8076 fold_convert_loc (loc, type,
8077 TREE_OPERAND (arg0, 0)), tem);
8078 /* Perform BIT_NOT_EXPR on each element individually. */
8079 else if (TREE_CODE (arg0) == VECTOR_CST)
8080 {
8081 tree *elements;
8082 tree elem;
8083 unsigned count = VECTOR_CST_NELTS (arg0), i;
8084
8085 elements = XALLOCAVEC (tree, count);
8086 for (i = 0; i < count; i++)
8087 {
8088 elem = VECTOR_CST_ELT (arg0, i);
8089 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8090 if (elem == NULL_TREE)
8091 break;
8092 elements[i] = elem;
8093 }
8094 if (i == count)
8095 return build_vector (type, elements);
8096 }
8097 else if (COMPARISON_CLASS_P (arg0)
8098 && (VECTOR_TYPE_P (type)
8099 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8100 {
8101 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8102 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8103 HONOR_NANS (TYPE_MODE (op_type)));
8104 if (subcode != ERROR_MARK)
8105 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8106 TREE_OPERAND (arg0, 1));
8107 }
8108
8109
8110 return NULL_TREE;
8111
8112 case TRUTH_NOT_EXPR:
8113 /* Note that the operand of this must be an int
8114 and its values must be 0 or 1.
8115 ("true" is a fixed value perhaps depending on the language,
8116 but we don't handle values other than 1 correctly yet.) */
8117 tem = fold_truth_not_expr (loc, arg0);
8118 if (!tem)
8119 return NULL_TREE;
8120 return fold_convert_loc (loc, type, tem);
8121
8122 case REALPART_EXPR:
8123 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8124 return fold_convert_loc (loc, type, arg0);
8125 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8126 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8127 TREE_OPERAND (arg0, 1));
8128 if (TREE_CODE (arg0) == COMPLEX_CST)
8129 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8130 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8131 {
8132 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8133 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8134 fold_build1_loc (loc, REALPART_EXPR, itype,
8135 TREE_OPERAND (arg0, 0)),
8136 fold_build1_loc (loc, REALPART_EXPR, itype,
8137 TREE_OPERAND (arg0, 1)));
8138 return fold_convert_loc (loc, type, tem);
8139 }
8140 if (TREE_CODE (arg0) == CONJ_EXPR)
8141 {
8142 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8143 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8144 TREE_OPERAND (arg0, 0));
8145 return fold_convert_loc (loc, type, tem);
8146 }
8147 if (TREE_CODE (arg0) == CALL_EXPR)
8148 {
8149 tree fn = get_callee_fndecl (arg0);
8150 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8151 switch (DECL_FUNCTION_CODE (fn))
8152 {
8153 CASE_FLT_FN (BUILT_IN_CEXPI):
8154 fn = mathfn_built_in (type, BUILT_IN_COS);
8155 if (fn)
8156 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8157 break;
8158
8159 default:
8160 break;
8161 }
8162 }
8163 return NULL_TREE;
8164
8165 case IMAGPART_EXPR:
8166 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8167 return build_zero_cst (type);
8168 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8169 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8170 TREE_OPERAND (arg0, 0));
8171 if (TREE_CODE (arg0) == COMPLEX_CST)
8172 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8173 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8174 {
8175 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8176 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8177 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8178 TREE_OPERAND (arg0, 0)),
8179 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8180 TREE_OPERAND (arg0, 1)));
8181 return fold_convert_loc (loc, type, tem);
8182 }
8183 if (TREE_CODE (arg0) == CONJ_EXPR)
8184 {
8185 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8186 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8187 return fold_convert_loc (loc, type, negate_expr (tem));
8188 }
8189 if (TREE_CODE (arg0) == CALL_EXPR)
8190 {
8191 tree fn = get_callee_fndecl (arg0);
8192 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8193 switch (DECL_FUNCTION_CODE (fn))
8194 {
8195 CASE_FLT_FN (BUILT_IN_CEXPI):
8196 fn = mathfn_built_in (type, BUILT_IN_SIN);
8197 if (fn)
8198 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8199 break;
8200
8201 default:
8202 break;
8203 }
8204 }
8205 return NULL_TREE;
8206
8207 case INDIRECT_REF:
8208 /* Fold *&X to X if X is an lvalue. */
8209 if (TREE_CODE (op0) == ADDR_EXPR)
8210 {
8211 tree op00 = TREE_OPERAND (op0, 0);
8212 if ((TREE_CODE (op00) == VAR_DECL
8213 || TREE_CODE (op00) == PARM_DECL
8214 || TREE_CODE (op00) == RESULT_DECL)
8215 && !TREE_READONLY (op00))
8216 return op00;
8217 }
8218 return NULL_TREE;
8219
8220 case VEC_UNPACK_LO_EXPR:
8221 case VEC_UNPACK_HI_EXPR:
8222 case VEC_UNPACK_FLOAT_LO_EXPR:
8223 case VEC_UNPACK_FLOAT_HI_EXPR:
8224 {
8225 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8226 tree *elts;
8227 enum tree_code subcode;
8228
8229 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8230 if (TREE_CODE (arg0) != VECTOR_CST)
8231 return NULL_TREE;
8232
8233 elts = XALLOCAVEC (tree, nelts * 2);
8234 if (!vec_cst_ctor_to_array (arg0, elts))
8235 return NULL_TREE;
8236
8237 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8238 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8239 elts += nelts;
8240
8241 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8242 subcode = NOP_EXPR;
8243 else
8244 subcode = FLOAT_EXPR;
8245
8246 for (i = 0; i < nelts; i++)
8247 {
8248 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8249 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8250 return NULL_TREE;
8251 }
8252
8253 return build_vector (type, elts);
8254 }
8255
8256 case REDUC_MIN_EXPR:
8257 case REDUC_MAX_EXPR:
8258 case REDUC_PLUS_EXPR:
8259 {
8260 unsigned int nelts, i;
8261 tree *elts;
8262 enum tree_code subcode;
8263
8264 if (TREE_CODE (op0) != VECTOR_CST)
8265 return NULL_TREE;
8266 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8267
8268 elts = XALLOCAVEC (tree, nelts);
8269 if (!vec_cst_ctor_to_array (op0, elts))
8270 return NULL_TREE;
8271
8272 switch (code)
8273 {
8274 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8275 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8276 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8277 default: gcc_unreachable ();
8278 }
8279
8280 for (i = 1; i < nelts; i++)
8281 {
8282 elts[0] = const_binop (subcode, elts[0], elts[i]);
8283 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8284 return NULL_TREE;
8285 }
8286
8287 return elts[0];
8288 }
8289
8290 default:
8291 return NULL_TREE;
8292 } /* switch (code) */
8293 }
8294
8295
8296 /* If the operation was a conversion do _not_ mark a resulting constant
8297 with TREE_OVERFLOW if the original constant was not. These conversions
8298 have implementation defined behavior and retaining the TREE_OVERFLOW
8299 flag here would confuse later passes such as VRP. */
8300 tree
8301 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8302 tree type, tree op0)
8303 {
8304 tree res = fold_unary_loc (loc, code, type, op0);
8305 if (res
8306 && TREE_CODE (res) == INTEGER_CST
8307 && TREE_CODE (op0) == INTEGER_CST
8308 && CONVERT_EXPR_CODE_P (code))
8309 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8310
8311 return res;
8312 }
8313
8314 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8315 operands OP0 and OP1. LOC is the location of the resulting expression.
8316 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8317 Return the folded expression if folding is successful. Otherwise,
8318 return NULL_TREE. */
8319 static tree
8320 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8321 tree arg0, tree arg1, tree op0, tree op1)
8322 {
8323 tree tem;
8324
8325 /* We only do these simplifications if we are optimizing. */
8326 if (!optimize)
8327 return NULL_TREE;
8328
8329 /* Check for things like (A || B) && (A || C). We can convert this
8330 to A || (B && C). Note that either operator can be any of the four
8331 truth and/or operations and the transformation will still be
8332 valid. Also note that we only care about order for the
8333 ANDIF and ORIF operators. If B contains side effects, this
8334 might change the truth-value of A. */
8335 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8336 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8337 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8338 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8339 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8340 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8341 {
8342 tree a00 = TREE_OPERAND (arg0, 0);
8343 tree a01 = TREE_OPERAND (arg0, 1);
8344 tree a10 = TREE_OPERAND (arg1, 0);
8345 tree a11 = TREE_OPERAND (arg1, 1);
8346 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8347 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8348 && (code == TRUTH_AND_EXPR
8349 || code == TRUTH_OR_EXPR));
8350
8351 if (operand_equal_p (a00, a10, 0))
8352 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8353 fold_build2_loc (loc, code, type, a01, a11));
8354 else if (commutative && operand_equal_p (a00, a11, 0))
8355 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8356 fold_build2_loc (loc, code, type, a01, a10));
8357 else if (commutative && operand_equal_p (a01, a10, 0))
8358 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8359 fold_build2_loc (loc, code, type, a00, a11));
8360
8361 /* This case if tricky because we must either have commutative
8362 operators or else A10 must not have side-effects. */
8363
8364 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8365 && operand_equal_p (a01, a11, 0))
8366 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8367 fold_build2_loc (loc, code, type, a00, a10),
8368 a01);
8369 }
8370
8371 /* See if we can build a range comparison. */
8372 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8373 return tem;
8374
8375 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8376 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8377 {
8378 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8379 if (tem)
8380 return fold_build2_loc (loc, code, type, tem, arg1);
8381 }
8382
8383 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8384 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8385 {
8386 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8387 if (tem)
8388 return fold_build2_loc (loc, code, type, arg0, tem);
8389 }
8390
8391 /* Check for the possibility of merging component references. If our
8392 lhs is another similar operation, try to merge its rhs with our
8393 rhs. Then try to merge our lhs and rhs. */
8394 if (TREE_CODE (arg0) == code
8395 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8396 TREE_OPERAND (arg0, 1), arg1)))
8397 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8398
8399 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8400 return tem;
8401
8402 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8403 && (code == TRUTH_AND_EXPR
8404 || code == TRUTH_ANDIF_EXPR
8405 || code == TRUTH_OR_EXPR
8406 || code == TRUTH_ORIF_EXPR))
8407 {
8408 enum tree_code ncode, icode;
8409
8410 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8411 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8412 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8413
8414 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8415 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8416 We don't want to pack more than two leafs to a non-IF AND/OR
8417 expression.
8418 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8419 equal to IF-CODE, then we don't want to add right-hand operand.
8420 If the inner right-hand side of left-hand operand has
8421 side-effects, or isn't simple, then we can't add to it,
8422 as otherwise we might destroy if-sequence. */
8423 if (TREE_CODE (arg0) == icode
8424 && simple_operand_p_2 (arg1)
8425 /* Needed for sequence points to handle trappings, and
8426 side-effects. */
8427 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8428 {
8429 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8430 arg1);
8431 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8432 tem);
8433 }
8434 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8435 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8436 else if (TREE_CODE (arg1) == icode
8437 && simple_operand_p_2 (arg0)
8438 /* Needed for sequence points to handle trappings, and
8439 side-effects. */
8440 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8441 {
8442 tem = fold_build2_loc (loc, ncode, type,
8443 arg0, TREE_OPERAND (arg1, 0));
8444 return fold_build2_loc (loc, icode, type, tem,
8445 TREE_OPERAND (arg1, 1));
8446 }
8447 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8448 into (A OR B).
8449 For sequence point consistancy, we need to check for trapping,
8450 and side-effects. */
8451 else if (code == icode && simple_operand_p_2 (arg0)
8452 && simple_operand_p_2 (arg1))
8453 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8454 }
8455
8456 return NULL_TREE;
8457 }
8458
8459 /* Fold a binary expression of code CODE and type TYPE with operands
8460 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8461 Return the folded expression if folding is successful. Otherwise,
8462 return NULL_TREE. */
8463
8464 static tree
8465 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8466 {
8467 enum tree_code compl_code;
8468
8469 if (code == MIN_EXPR)
8470 compl_code = MAX_EXPR;
8471 else if (code == MAX_EXPR)
8472 compl_code = MIN_EXPR;
8473 else
8474 gcc_unreachable ();
8475
8476 /* MIN (MAX (a, b), b) == b. */
8477 if (TREE_CODE (op0) == compl_code
8478 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8479 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8480
8481 /* MIN (MAX (b, a), b) == b. */
8482 if (TREE_CODE (op0) == compl_code
8483 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8484 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8485 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8486
8487 /* MIN (a, MAX (a, b)) == a. */
8488 if (TREE_CODE (op1) == compl_code
8489 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8490 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8491 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8492
8493 /* MIN (a, MAX (b, a)) == a. */
8494 if (TREE_CODE (op1) == compl_code
8495 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8496 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8497 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8498
8499 return NULL_TREE;
8500 }
8501
8502 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8503 by changing CODE to reduce the magnitude of constants involved in
8504 ARG0 of the comparison.
8505 Returns a canonicalized comparison tree if a simplification was
8506 possible, otherwise returns NULL_TREE.
8507 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8508 valid if signed overflow is undefined. */
8509
8510 static tree
8511 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8512 tree arg0, tree arg1,
8513 bool *strict_overflow_p)
8514 {
8515 enum tree_code code0 = TREE_CODE (arg0);
8516 tree t, cst0 = NULL_TREE;
8517 int sgn0;
8518 bool swap = false;
8519
8520 /* Match A +- CST code arg1 and CST code arg1. We can change the
8521 first form only if overflow is undefined. */
8522 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8523 /* In principle pointers also have undefined overflow behavior,
8524 but that causes problems elsewhere. */
8525 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8526 && (code0 == MINUS_EXPR
8527 || code0 == PLUS_EXPR)
8528 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8529 || code0 == INTEGER_CST))
8530 return NULL_TREE;
8531
8532 /* Identify the constant in arg0 and its sign. */
8533 if (code0 == INTEGER_CST)
8534 cst0 = arg0;
8535 else
8536 cst0 = TREE_OPERAND (arg0, 1);
8537 sgn0 = tree_int_cst_sgn (cst0);
8538
8539 /* Overflowed constants and zero will cause problems. */
8540 if (integer_zerop (cst0)
8541 || TREE_OVERFLOW (cst0))
8542 return NULL_TREE;
8543
8544 /* See if we can reduce the magnitude of the constant in
8545 arg0 by changing the comparison code. */
8546 if (code0 == INTEGER_CST)
8547 {
8548 /* CST <= arg1 -> CST-1 < arg1. */
8549 if (code == LE_EXPR && sgn0 == 1)
8550 code = LT_EXPR;
8551 /* -CST < arg1 -> -CST-1 <= arg1. */
8552 else if (code == LT_EXPR && sgn0 == -1)
8553 code = LE_EXPR;
8554 /* CST > arg1 -> CST-1 >= arg1. */
8555 else if (code == GT_EXPR && sgn0 == 1)
8556 code = GE_EXPR;
8557 /* -CST >= arg1 -> -CST-1 > arg1. */
8558 else if (code == GE_EXPR && sgn0 == -1)
8559 code = GT_EXPR;
8560 else
8561 return NULL_TREE;
8562 /* arg1 code' CST' might be more canonical. */
8563 swap = true;
8564 }
8565 else
8566 {
8567 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8568 if (code == LT_EXPR
8569 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8570 code = LE_EXPR;
8571 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8572 else if (code == GT_EXPR
8573 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8574 code = GE_EXPR;
8575 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8576 else if (code == LE_EXPR
8577 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8578 code = LT_EXPR;
8579 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8580 else if (code == GE_EXPR
8581 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8582 code = GT_EXPR;
8583 else
8584 return NULL_TREE;
8585 *strict_overflow_p = true;
8586 }
8587
8588 /* Now build the constant reduced in magnitude. But not if that
8589 would produce one outside of its types range. */
8590 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8591 && ((sgn0 == 1
8592 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8593 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8594 || (sgn0 == -1
8595 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8596 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8597 /* We cannot swap the comparison here as that would cause us to
8598 endlessly recurse. */
8599 return NULL_TREE;
8600
8601 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8602 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8603 if (code0 != INTEGER_CST)
8604 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8605 t = fold_convert (TREE_TYPE (arg1), t);
8606
8607 /* If swapping might yield to a more canonical form, do so. */
8608 if (swap)
8609 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8610 else
8611 return fold_build2_loc (loc, code, type, t, arg1);
8612 }
8613
8614 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8615 overflow further. Try to decrease the magnitude of constants involved
8616 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8617 and put sole constants at the second argument position.
8618 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8619
8620 static tree
8621 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8622 tree arg0, tree arg1)
8623 {
8624 tree t;
8625 bool strict_overflow_p;
8626 const char * const warnmsg = G_("assuming signed overflow does not occur "
8627 "when reducing constant in comparison");
8628
8629 /* Try canonicalization by simplifying arg0. */
8630 strict_overflow_p = false;
8631 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8632 &strict_overflow_p);
8633 if (t)
8634 {
8635 if (strict_overflow_p)
8636 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8637 return t;
8638 }
8639
8640 /* Try canonicalization by simplifying arg1 using the swapped
8641 comparison. */
8642 code = swap_tree_comparison (code);
8643 strict_overflow_p = false;
8644 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8645 &strict_overflow_p);
8646 if (t && strict_overflow_p)
8647 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8648 return t;
8649 }
8650
8651 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8652 space. This is used to avoid issuing overflow warnings for
8653 expressions like &p->x which can not wrap. */
8654
8655 static bool
8656 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8657 {
8658 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8659 return true;
8660
8661 if (bitpos < 0)
8662 return true;
8663
8664 wide_int wi_offset;
8665 int precision = TYPE_PRECISION (TREE_TYPE (base));
8666 if (offset == NULL_TREE)
8667 wi_offset = wi::zero (precision);
8668 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8669 return true;
8670 else
8671 wi_offset = offset;
8672
8673 bool overflow;
8674 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8675 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8676 if (overflow)
8677 return true;
8678
8679 if (!wi::fits_uhwi_p (total))
8680 return true;
8681
8682 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8683 if (size <= 0)
8684 return true;
8685
8686 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8687 array. */
8688 if (TREE_CODE (base) == ADDR_EXPR)
8689 {
8690 HOST_WIDE_INT base_size;
8691
8692 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8693 if (base_size > 0 && size < base_size)
8694 size = base_size;
8695 }
8696
8697 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8698 }
8699
8700 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8701 kind INTEGER_CST. This makes sure to properly sign-extend the
8702 constant. */
8703
8704 static HOST_WIDE_INT
8705 size_low_cst (const_tree t)
8706 {
8707 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8708 int prec = TYPE_PRECISION (TREE_TYPE (t));
8709 if (prec < HOST_BITS_PER_WIDE_INT)
8710 return sext_hwi (w, prec);
8711 return w;
8712 }
8713
8714 /* Subroutine of fold_binary. This routine performs all of the
8715 transformations that are common to the equality/inequality
8716 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8717 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8718 fold_binary should call fold_binary. Fold a comparison with
8719 tree code CODE and type TYPE with operands OP0 and OP1. Return
8720 the folded comparison or NULL_TREE. */
8721
8722 static tree
8723 fold_comparison (location_t loc, enum tree_code code, tree type,
8724 tree op0, tree op1)
8725 {
8726 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8727 tree arg0, arg1, tem;
8728
8729 arg0 = op0;
8730 arg1 = op1;
8731
8732 STRIP_SIGN_NOPS (arg0);
8733 STRIP_SIGN_NOPS (arg1);
8734
8735 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8736 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8737 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8738 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8739 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8740 && TREE_CODE (arg1) == INTEGER_CST
8741 && !TREE_OVERFLOW (arg1))
8742 {
8743 const enum tree_code
8744 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8745 tree const1 = TREE_OPERAND (arg0, 1);
8746 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8747 tree variable = TREE_OPERAND (arg0, 0);
8748 tree new_const = int_const_binop (reverse_op, const2, const1);
8749
8750 /* If the constant operation overflowed this can be
8751 simplified as a comparison against INT_MAX/INT_MIN. */
8752 if (TREE_OVERFLOW (new_const))
8753 {
8754 int const1_sgn = tree_int_cst_sgn (const1);
8755 enum tree_code code2 = code;
8756
8757 /* Get the sign of the constant on the lhs if the
8758 operation were VARIABLE + CONST1. */
8759 if (TREE_CODE (arg0) == MINUS_EXPR)
8760 const1_sgn = -const1_sgn;
8761
8762 /* The sign of the constant determines if we overflowed
8763 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8764 Canonicalize to the INT_MIN overflow by swapping the comparison
8765 if necessary. */
8766 if (const1_sgn == -1)
8767 code2 = swap_tree_comparison (code);
8768
8769 /* We now can look at the canonicalized case
8770 VARIABLE + 1 CODE2 INT_MIN
8771 and decide on the result. */
8772 switch (code2)
8773 {
8774 case EQ_EXPR:
8775 case LT_EXPR:
8776 case LE_EXPR:
8777 return
8778 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8779
8780 case NE_EXPR:
8781 case GE_EXPR:
8782 case GT_EXPR:
8783 return
8784 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8785
8786 default:
8787 gcc_unreachable ();
8788 }
8789 }
8790 else
8791 {
8792 if (!equality_code)
8793 fold_overflow_warning ("assuming signed overflow does not occur "
8794 "when changing X +- C1 cmp C2 to "
8795 "X cmp C2 -+ C1",
8796 WARN_STRICT_OVERFLOW_COMPARISON);
8797 return fold_build2_loc (loc, code, type, variable, new_const);
8798 }
8799 }
8800
8801 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8802 if (TREE_CODE (arg0) == MINUS_EXPR
8803 && equality_code
8804 && integer_zerop (arg1))
8805 {
8806 /* ??? The transformation is valid for the other operators if overflow
8807 is undefined for the type, but performing it here badly interacts
8808 with the transformation in fold_cond_expr_with_comparison which
8809 attempts to synthetize ABS_EXPR. */
8810 if (!equality_code)
8811 fold_overflow_warning ("assuming signed overflow does not occur "
8812 "when changing X - Y cmp 0 to X cmp Y",
8813 WARN_STRICT_OVERFLOW_COMPARISON);
8814 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8815 TREE_OPERAND (arg0, 1));
8816 }
8817
8818 /* For comparisons of pointers we can decompose it to a compile time
8819 comparison of the base objects and the offsets into the object.
8820 This requires at least one operand being an ADDR_EXPR or a
8821 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8822 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8823 && (TREE_CODE (arg0) == ADDR_EXPR
8824 || TREE_CODE (arg1) == ADDR_EXPR
8825 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8826 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8827 {
8828 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8829 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8830 enum machine_mode mode;
8831 int volatilep, unsignedp;
8832 bool indirect_base0 = false, indirect_base1 = false;
8833
8834 /* Get base and offset for the access. Strip ADDR_EXPR for
8835 get_inner_reference, but put it back by stripping INDIRECT_REF
8836 off the base object if possible. indirect_baseN will be true
8837 if baseN is not an address but refers to the object itself. */
8838 base0 = arg0;
8839 if (TREE_CODE (arg0) == ADDR_EXPR)
8840 {
8841 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8842 &bitsize, &bitpos0, &offset0, &mode,
8843 &unsignedp, &volatilep, false);
8844 if (TREE_CODE (base0) == INDIRECT_REF)
8845 base0 = TREE_OPERAND (base0, 0);
8846 else
8847 indirect_base0 = true;
8848 }
8849 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8850 {
8851 base0 = TREE_OPERAND (arg0, 0);
8852 STRIP_SIGN_NOPS (base0);
8853 if (TREE_CODE (base0) == ADDR_EXPR)
8854 {
8855 base0 = TREE_OPERAND (base0, 0);
8856 indirect_base0 = true;
8857 }
8858 offset0 = TREE_OPERAND (arg0, 1);
8859 if (tree_fits_shwi_p (offset0))
8860 {
8861 HOST_WIDE_INT off = size_low_cst (offset0);
8862 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8863 * BITS_PER_UNIT)
8864 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8865 {
8866 bitpos0 = off * BITS_PER_UNIT;
8867 offset0 = NULL_TREE;
8868 }
8869 }
8870 }
8871
8872 base1 = arg1;
8873 if (TREE_CODE (arg1) == ADDR_EXPR)
8874 {
8875 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8876 &bitsize, &bitpos1, &offset1, &mode,
8877 &unsignedp, &volatilep, false);
8878 if (TREE_CODE (base1) == INDIRECT_REF)
8879 base1 = TREE_OPERAND (base1, 0);
8880 else
8881 indirect_base1 = true;
8882 }
8883 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8884 {
8885 base1 = TREE_OPERAND (arg1, 0);
8886 STRIP_SIGN_NOPS (base1);
8887 if (TREE_CODE (base1) == ADDR_EXPR)
8888 {
8889 base1 = TREE_OPERAND (base1, 0);
8890 indirect_base1 = true;
8891 }
8892 offset1 = TREE_OPERAND (arg1, 1);
8893 if (tree_fits_shwi_p (offset1))
8894 {
8895 HOST_WIDE_INT off = size_low_cst (offset1);
8896 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8897 * BITS_PER_UNIT)
8898 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8899 {
8900 bitpos1 = off * BITS_PER_UNIT;
8901 offset1 = NULL_TREE;
8902 }
8903 }
8904 }
8905
8906 /* A local variable can never be pointed to by
8907 the default SSA name of an incoming parameter. */
8908 if ((TREE_CODE (arg0) == ADDR_EXPR
8909 && indirect_base0
8910 && TREE_CODE (base0) == VAR_DECL
8911 && auto_var_in_fn_p (base0, current_function_decl)
8912 && !indirect_base1
8913 && TREE_CODE (base1) == SSA_NAME
8914 && SSA_NAME_IS_DEFAULT_DEF (base1)
8915 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8916 || (TREE_CODE (arg1) == ADDR_EXPR
8917 && indirect_base1
8918 && TREE_CODE (base1) == VAR_DECL
8919 && auto_var_in_fn_p (base1, current_function_decl)
8920 && !indirect_base0
8921 && TREE_CODE (base0) == SSA_NAME
8922 && SSA_NAME_IS_DEFAULT_DEF (base0)
8923 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8924 {
8925 if (code == NE_EXPR)
8926 return constant_boolean_node (1, type);
8927 else if (code == EQ_EXPR)
8928 return constant_boolean_node (0, type);
8929 }
8930 /* If we have equivalent bases we might be able to simplify. */
8931 else if (indirect_base0 == indirect_base1
8932 && operand_equal_p (base0, base1, 0))
8933 {
8934 /* We can fold this expression to a constant if the non-constant
8935 offset parts are equal. */
8936 if ((offset0 == offset1
8937 || (offset0 && offset1
8938 && operand_equal_p (offset0, offset1, 0)))
8939 && (code == EQ_EXPR
8940 || code == NE_EXPR
8941 || (indirect_base0 && DECL_P (base0))
8942 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8943
8944 {
8945 if (!equality_code
8946 && bitpos0 != bitpos1
8947 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8948 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8949 fold_overflow_warning (("assuming pointer wraparound does not "
8950 "occur when comparing P +- C1 with "
8951 "P +- C2"),
8952 WARN_STRICT_OVERFLOW_CONDITIONAL);
8953
8954 switch (code)
8955 {
8956 case EQ_EXPR:
8957 return constant_boolean_node (bitpos0 == bitpos1, type);
8958 case NE_EXPR:
8959 return constant_boolean_node (bitpos0 != bitpos1, type);
8960 case LT_EXPR:
8961 return constant_boolean_node (bitpos0 < bitpos1, type);
8962 case LE_EXPR:
8963 return constant_boolean_node (bitpos0 <= bitpos1, type);
8964 case GE_EXPR:
8965 return constant_boolean_node (bitpos0 >= bitpos1, type);
8966 case GT_EXPR:
8967 return constant_boolean_node (bitpos0 > bitpos1, type);
8968 default:;
8969 }
8970 }
8971 /* We can simplify the comparison to a comparison of the variable
8972 offset parts if the constant offset parts are equal.
8973 Be careful to use signed sizetype here because otherwise we
8974 mess with array offsets in the wrong way. This is possible
8975 because pointer arithmetic is restricted to retain within an
8976 object and overflow on pointer differences is undefined as of
8977 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8978 else if (bitpos0 == bitpos1
8979 && (equality_code
8980 || (indirect_base0 && DECL_P (base0))
8981 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8982 {
8983 /* By converting to signed sizetype we cover middle-end pointer
8984 arithmetic which operates on unsigned pointer types of size
8985 type size and ARRAY_REF offsets which are properly sign or
8986 zero extended from their type in case it is narrower than
8987 sizetype. */
8988 if (offset0 == NULL_TREE)
8989 offset0 = build_int_cst (ssizetype, 0);
8990 else
8991 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8992 if (offset1 == NULL_TREE)
8993 offset1 = build_int_cst (ssizetype, 0);
8994 else
8995 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8996
8997 if (!equality_code
8998 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8999 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9000 fold_overflow_warning (("assuming pointer wraparound does not "
9001 "occur when comparing P +- C1 with "
9002 "P +- C2"),
9003 WARN_STRICT_OVERFLOW_COMPARISON);
9004
9005 return fold_build2_loc (loc, code, type, offset0, offset1);
9006 }
9007 }
9008 /* For non-equal bases we can simplify if they are addresses
9009 of local binding decls or constants. */
9010 else if (indirect_base0 && indirect_base1
9011 /* We know that !operand_equal_p (base0, base1, 0)
9012 because the if condition was false. But make
9013 sure two decls are not the same. */
9014 && base0 != base1
9015 && TREE_CODE (arg0) == ADDR_EXPR
9016 && TREE_CODE (arg1) == ADDR_EXPR
9017 && (((TREE_CODE (base0) == VAR_DECL
9018 || TREE_CODE (base0) == PARM_DECL)
9019 && (targetm.binds_local_p (base0)
9020 || CONSTANT_CLASS_P (base1)))
9021 || CONSTANT_CLASS_P (base0))
9022 && (((TREE_CODE (base1) == VAR_DECL
9023 || TREE_CODE (base1) == PARM_DECL)
9024 && (targetm.binds_local_p (base1)
9025 || CONSTANT_CLASS_P (base0)))
9026 || CONSTANT_CLASS_P (base1)))
9027 {
9028 if (code == EQ_EXPR)
9029 return omit_two_operands_loc (loc, type, boolean_false_node,
9030 arg0, arg1);
9031 else if (code == NE_EXPR)
9032 return omit_two_operands_loc (loc, type, boolean_true_node,
9033 arg0, arg1);
9034 }
9035 /* For equal offsets we can simplify to a comparison of the
9036 base addresses. */
9037 else if (bitpos0 == bitpos1
9038 && (indirect_base0
9039 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9040 && (indirect_base1
9041 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9042 && ((offset0 == offset1)
9043 || (offset0 && offset1
9044 && operand_equal_p (offset0, offset1, 0))))
9045 {
9046 if (indirect_base0)
9047 base0 = build_fold_addr_expr_loc (loc, base0);
9048 if (indirect_base1)
9049 base1 = build_fold_addr_expr_loc (loc, base1);
9050 return fold_build2_loc (loc, code, type, base0, base1);
9051 }
9052 }
9053
9054 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9055 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9056 the resulting offset is smaller in absolute value than the
9057 original one and has the same sign. */
9058 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9059 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9060 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9061 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9062 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9063 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9064 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9065 {
9066 tree const1 = TREE_OPERAND (arg0, 1);
9067 tree const2 = TREE_OPERAND (arg1, 1);
9068 tree variable1 = TREE_OPERAND (arg0, 0);
9069 tree variable2 = TREE_OPERAND (arg1, 0);
9070 tree cst;
9071 const char * const warnmsg = G_("assuming signed overflow does not "
9072 "occur when combining constants around "
9073 "a comparison");
9074
9075 /* Put the constant on the side where it doesn't overflow and is
9076 of lower absolute value and of same sign than before. */
9077 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9078 ? MINUS_EXPR : PLUS_EXPR,
9079 const2, const1);
9080 if (!TREE_OVERFLOW (cst)
9081 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9082 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9083 {
9084 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9085 return fold_build2_loc (loc, code, type,
9086 variable1,
9087 fold_build2_loc (loc, TREE_CODE (arg1),
9088 TREE_TYPE (arg1),
9089 variable2, cst));
9090 }
9091
9092 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9093 ? MINUS_EXPR : PLUS_EXPR,
9094 const1, const2);
9095 if (!TREE_OVERFLOW (cst)
9096 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9097 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9098 {
9099 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9100 return fold_build2_loc (loc, code, type,
9101 fold_build2_loc (loc, TREE_CODE (arg0),
9102 TREE_TYPE (arg0),
9103 variable1, cst),
9104 variable2);
9105 }
9106 }
9107
9108 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9109 signed arithmetic case. That form is created by the compiler
9110 often enough for folding it to be of value. One example is in
9111 computing loop trip counts after Operator Strength Reduction. */
9112 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9113 && TREE_CODE (arg0) == MULT_EXPR
9114 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9115 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9116 && integer_zerop (arg1))
9117 {
9118 tree const1 = TREE_OPERAND (arg0, 1);
9119 tree const2 = arg1; /* zero */
9120 tree variable1 = TREE_OPERAND (arg0, 0);
9121 enum tree_code cmp_code = code;
9122
9123 /* Handle unfolded multiplication by zero. */
9124 if (integer_zerop (const1))
9125 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9126
9127 fold_overflow_warning (("assuming signed overflow does not occur when "
9128 "eliminating multiplication in comparison "
9129 "with zero"),
9130 WARN_STRICT_OVERFLOW_COMPARISON);
9131
9132 /* If const1 is negative we swap the sense of the comparison. */
9133 if (tree_int_cst_sgn (const1) < 0)
9134 cmp_code = swap_tree_comparison (cmp_code);
9135
9136 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9137 }
9138
9139 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9140 if (tem)
9141 return tem;
9142
9143 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9144 {
9145 tree targ0 = strip_float_extensions (arg0);
9146 tree targ1 = strip_float_extensions (arg1);
9147 tree newtype = TREE_TYPE (targ0);
9148
9149 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9150 newtype = TREE_TYPE (targ1);
9151
9152 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9153 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9154 return fold_build2_loc (loc, code, type,
9155 fold_convert_loc (loc, newtype, targ0),
9156 fold_convert_loc (loc, newtype, targ1));
9157
9158 /* (-a) CMP (-b) -> b CMP a */
9159 if (TREE_CODE (arg0) == NEGATE_EXPR
9160 && TREE_CODE (arg1) == NEGATE_EXPR)
9161 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9162 TREE_OPERAND (arg0, 0));
9163
9164 if (TREE_CODE (arg1) == REAL_CST)
9165 {
9166 REAL_VALUE_TYPE cst;
9167 cst = TREE_REAL_CST (arg1);
9168
9169 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9170 if (TREE_CODE (arg0) == NEGATE_EXPR)
9171 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9172 TREE_OPERAND (arg0, 0),
9173 build_real (TREE_TYPE (arg1),
9174 real_value_negate (&cst)));
9175
9176 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9177 /* a CMP (-0) -> a CMP 0 */
9178 if (REAL_VALUE_MINUS_ZERO (cst))
9179 return fold_build2_loc (loc, code, type, arg0,
9180 build_real (TREE_TYPE (arg1), dconst0));
9181
9182 /* x != NaN is always true, other ops are always false. */
9183 if (REAL_VALUE_ISNAN (cst)
9184 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9185 {
9186 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9187 return omit_one_operand_loc (loc, type, tem, arg0);
9188 }
9189
9190 /* Fold comparisons against infinity. */
9191 if (REAL_VALUE_ISINF (cst)
9192 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9193 {
9194 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9195 if (tem != NULL_TREE)
9196 return tem;
9197 }
9198 }
9199
9200 /* If this is a comparison of a real constant with a PLUS_EXPR
9201 or a MINUS_EXPR of a real constant, we can convert it into a
9202 comparison with a revised real constant as long as no overflow
9203 occurs when unsafe_math_optimizations are enabled. */
9204 if (flag_unsafe_math_optimizations
9205 && TREE_CODE (arg1) == REAL_CST
9206 && (TREE_CODE (arg0) == PLUS_EXPR
9207 || TREE_CODE (arg0) == MINUS_EXPR)
9208 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9209 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9210 ? MINUS_EXPR : PLUS_EXPR,
9211 arg1, TREE_OPERAND (arg0, 1)))
9212 && !TREE_OVERFLOW (tem))
9213 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9214
9215 /* Likewise, we can simplify a comparison of a real constant with
9216 a MINUS_EXPR whose first operand is also a real constant, i.e.
9217 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9218 floating-point types only if -fassociative-math is set. */
9219 if (flag_associative_math
9220 && TREE_CODE (arg1) == REAL_CST
9221 && TREE_CODE (arg0) == MINUS_EXPR
9222 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9223 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9224 arg1))
9225 && !TREE_OVERFLOW (tem))
9226 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9227 TREE_OPERAND (arg0, 1), tem);
9228
9229 /* Fold comparisons against built-in math functions. */
9230 if (TREE_CODE (arg1) == REAL_CST
9231 && flag_unsafe_math_optimizations
9232 && ! flag_errno_math)
9233 {
9234 enum built_in_function fcode = builtin_mathfn_code (arg0);
9235
9236 if (fcode != END_BUILTINS)
9237 {
9238 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9239 if (tem != NULL_TREE)
9240 return tem;
9241 }
9242 }
9243 }
9244
9245 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9246 && CONVERT_EXPR_P (arg0))
9247 {
9248 /* If we are widening one operand of an integer comparison,
9249 see if the other operand is similarly being widened. Perhaps we
9250 can do the comparison in the narrower type. */
9251 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9252 if (tem)
9253 return tem;
9254
9255 /* Or if we are changing signedness. */
9256 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9257 if (tem)
9258 return tem;
9259 }
9260
9261 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9262 constant, we can simplify it. */
9263 if (TREE_CODE (arg1) == INTEGER_CST
9264 && (TREE_CODE (arg0) == MIN_EXPR
9265 || TREE_CODE (arg0) == MAX_EXPR)
9266 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9267 {
9268 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9269 if (tem)
9270 return tem;
9271 }
9272
9273 /* Simplify comparison of something with itself. (For IEEE
9274 floating-point, we can only do some of these simplifications.) */
9275 if (operand_equal_p (arg0, arg1, 0))
9276 {
9277 switch (code)
9278 {
9279 case EQ_EXPR:
9280 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9281 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9282 return constant_boolean_node (1, type);
9283 break;
9284
9285 case GE_EXPR:
9286 case LE_EXPR:
9287 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9288 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9289 return constant_boolean_node (1, type);
9290 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9291
9292 case NE_EXPR:
9293 /* For NE, we can only do this simplification if integer
9294 or we don't honor IEEE floating point NaNs. */
9295 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9296 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9297 break;
9298 /* ... fall through ... */
9299 case GT_EXPR:
9300 case LT_EXPR:
9301 return constant_boolean_node (0, type);
9302 default:
9303 gcc_unreachable ();
9304 }
9305 }
9306
9307 /* If we are comparing an expression that just has comparisons
9308 of two integer values, arithmetic expressions of those comparisons,
9309 and constants, we can simplify it. There are only three cases
9310 to check: the two values can either be equal, the first can be
9311 greater, or the second can be greater. Fold the expression for
9312 those three values. Since each value must be 0 or 1, we have
9313 eight possibilities, each of which corresponds to the constant 0
9314 or 1 or one of the six possible comparisons.
9315
9316 This handles common cases like (a > b) == 0 but also handles
9317 expressions like ((x > y) - (y > x)) > 0, which supposedly
9318 occur in macroized code. */
9319
9320 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9321 {
9322 tree cval1 = 0, cval2 = 0;
9323 int save_p = 0;
9324
9325 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9326 /* Don't handle degenerate cases here; they should already
9327 have been handled anyway. */
9328 && cval1 != 0 && cval2 != 0
9329 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9330 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9331 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9332 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9333 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9334 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9335 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9336 {
9337 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9338 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9339
9340 /* We can't just pass T to eval_subst in case cval1 or cval2
9341 was the same as ARG1. */
9342
9343 tree high_result
9344 = fold_build2_loc (loc, code, type,
9345 eval_subst (loc, arg0, cval1, maxval,
9346 cval2, minval),
9347 arg1);
9348 tree equal_result
9349 = fold_build2_loc (loc, code, type,
9350 eval_subst (loc, arg0, cval1, maxval,
9351 cval2, maxval),
9352 arg1);
9353 tree low_result
9354 = fold_build2_loc (loc, code, type,
9355 eval_subst (loc, arg0, cval1, minval,
9356 cval2, maxval),
9357 arg1);
9358
9359 /* All three of these results should be 0 or 1. Confirm they are.
9360 Then use those values to select the proper code to use. */
9361
9362 if (TREE_CODE (high_result) == INTEGER_CST
9363 && TREE_CODE (equal_result) == INTEGER_CST
9364 && TREE_CODE (low_result) == INTEGER_CST)
9365 {
9366 /* Make a 3-bit mask with the high-order bit being the
9367 value for `>', the next for '=', and the low for '<'. */
9368 switch ((integer_onep (high_result) * 4)
9369 + (integer_onep (equal_result) * 2)
9370 + integer_onep (low_result))
9371 {
9372 case 0:
9373 /* Always false. */
9374 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9375 case 1:
9376 code = LT_EXPR;
9377 break;
9378 case 2:
9379 code = EQ_EXPR;
9380 break;
9381 case 3:
9382 code = LE_EXPR;
9383 break;
9384 case 4:
9385 code = GT_EXPR;
9386 break;
9387 case 5:
9388 code = NE_EXPR;
9389 break;
9390 case 6:
9391 code = GE_EXPR;
9392 break;
9393 case 7:
9394 /* Always true. */
9395 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9396 }
9397
9398 if (save_p)
9399 {
9400 tem = save_expr (build2 (code, type, cval1, cval2));
9401 SET_EXPR_LOCATION (tem, loc);
9402 return tem;
9403 }
9404 return fold_build2_loc (loc, code, type, cval1, cval2);
9405 }
9406 }
9407 }
9408
9409 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9410 into a single range test. */
9411 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9412 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9413 && TREE_CODE (arg1) == INTEGER_CST
9414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9415 && !integer_zerop (TREE_OPERAND (arg0, 1))
9416 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9417 && !TREE_OVERFLOW (arg1))
9418 {
9419 tem = fold_div_compare (loc, code, type, arg0, arg1);
9420 if (tem != NULL_TREE)
9421 return tem;
9422 }
9423
9424 /* Fold ~X op ~Y as Y op X. */
9425 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9426 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9427 {
9428 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9429 return fold_build2_loc (loc, code, type,
9430 fold_convert_loc (loc, cmp_type,
9431 TREE_OPERAND (arg1, 0)),
9432 TREE_OPERAND (arg0, 0));
9433 }
9434
9435 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9436 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9437 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9438 {
9439 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9440 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9441 TREE_OPERAND (arg0, 0),
9442 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9443 fold_convert_loc (loc, cmp_type, arg1)));
9444 }
9445
9446 return NULL_TREE;
9447 }
9448
9449
9450 /* Subroutine of fold_binary. Optimize complex multiplications of the
9451 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9452 argument EXPR represents the expression "z" of type TYPE. */
9453
9454 static tree
9455 fold_mult_zconjz (location_t loc, tree type, tree expr)
9456 {
9457 tree itype = TREE_TYPE (type);
9458 tree rpart, ipart, tem;
9459
9460 if (TREE_CODE (expr) == COMPLEX_EXPR)
9461 {
9462 rpart = TREE_OPERAND (expr, 0);
9463 ipart = TREE_OPERAND (expr, 1);
9464 }
9465 else if (TREE_CODE (expr) == COMPLEX_CST)
9466 {
9467 rpart = TREE_REALPART (expr);
9468 ipart = TREE_IMAGPART (expr);
9469 }
9470 else
9471 {
9472 expr = save_expr (expr);
9473 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9474 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9475 }
9476
9477 rpart = save_expr (rpart);
9478 ipart = save_expr (ipart);
9479 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9480 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9481 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9482 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9483 build_zero_cst (itype));
9484 }
9485
9486
9487 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9488 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9489 guarantees that P and N have the same least significant log2(M) bits.
9490 N is not otherwise constrained. In particular, N is not normalized to
9491 0 <= N < M as is common. In general, the precise value of P is unknown.
9492 M is chosen as large as possible such that constant N can be determined.
9493
9494 Returns M and sets *RESIDUE to N.
9495
9496 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9497 account. This is not always possible due to PR 35705.
9498 */
9499
9500 static unsigned HOST_WIDE_INT
9501 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9502 bool allow_func_align)
9503 {
9504 enum tree_code code;
9505
9506 *residue = 0;
9507
9508 code = TREE_CODE (expr);
9509 if (code == ADDR_EXPR)
9510 {
9511 unsigned int bitalign;
9512 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9513 *residue /= BITS_PER_UNIT;
9514 return bitalign / BITS_PER_UNIT;
9515 }
9516 else if (code == POINTER_PLUS_EXPR)
9517 {
9518 tree op0, op1;
9519 unsigned HOST_WIDE_INT modulus;
9520 enum tree_code inner_code;
9521
9522 op0 = TREE_OPERAND (expr, 0);
9523 STRIP_NOPS (op0);
9524 modulus = get_pointer_modulus_and_residue (op0, residue,
9525 allow_func_align);
9526
9527 op1 = TREE_OPERAND (expr, 1);
9528 STRIP_NOPS (op1);
9529 inner_code = TREE_CODE (op1);
9530 if (inner_code == INTEGER_CST)
9531 {
9532 *residue += TREE_INT_CST_LOW (op1);
9533 return modulus;
9534 }
9535 else if (inner_code == MULT_EXPR)
9536 {
9537 op1 = TREE_OPERAND (op1, 1);
9538 if (TREE_CODE (op1) == INTEGER_CST)
9539 {
9540 unsigned HOST_WIDE_INT align;
9541
9542 /* Compute the greatest power-of-2 divisor of op1. */
9543 align = TREE_INT_CST_LOW (op1);
9544 align &= -align;
9545
9546 /* If align is non-zero and less than *modulus, replace
9547 *modulus with align., If align is 0, then either op1 is 0
9548 or the greatest power-of-2 divisor of op1 doesn't fit in an
9549 unsigned HOST_WIDE_INT. In either case, no additional
9550 constraint is imposed. */
9551 if (align)
9552 modulus = MIN (modulus, align);
9553
9554 return modulus;
9555 }
9556 }
9557 }
9558
9559 /* If we get here, we were unable to determine anything useful about the
9560 expression. */
9561 return 1;
9562 }
9563
9564 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9565 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9566
9567 static bool
9568 vec_cst_ctor_to_array (tree arg, tree *elts)
9569 {
9570 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9571
9572 if (TREE_CODE (arg) == VECTOR_CST)
9573 {
9574 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9575 elts[i] = VECTOR_CST_ELT (arg, i);
9576 }
9577 else if (TREE_CODE (arg) == CONSTRUCTOR)
9578 {
9579 constructor_elt *elt;
9580
9581 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9582 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9583 return false;
9584 else
9585 elts[i] = elt->value;
9586 }
9587 else
9588 return false;
9589 for (; i < nelts; i++)
9590 elts[i]
9591 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9592 return true;
9593 }
9594
9595 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9596 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9597 NULL_TREE otherwise. */
9598
9599 static tree
9600 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9601 {
9602 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9603 tree *elts;
9604 bool need_ctor = false;
9605
9606 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9607 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9608 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9609 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9610 return NULL_TREE;
9611
9612 elts = XALLOCAVEC (tree, nelts * 3);
9613 if (!vec_cst_ctor_to_array (arg0, elts)
9614 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9615 return NULL_TREE;
9616
9617 for (i = 0; i < nelts; i++)
9618 {
9619 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9620 need_ctor = true;
9621 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9622 }
9623
9624 if (need_ctor)
9625 {
9626 vec<constructor_elt, va_gc> *v;
9627 vec_alloc (v, nelts);
9628 for (i = 0; i < nelts; i++)
9629 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9630 return build_constructor (type, v);
9631 }
9632 else
9633 return build_vector (type, &elts[2 * nelts]);
9634 }
9635
9636 /* Try to fold a pointer difference of type TYPE two address expressions of
9637 array references AREF0 and AREF1 using location LOC. Return a
9638 simplified expression for the difference or NULL_TREE. */
9639
9640 static tree
9641 fold_addr_of_array_ref_difference (location_t loc, tree type,
9642 tree aref0, tree aref1)
9643 {
9644 tree base0 = TREE_OPERAND (aref0, 0);
9645 tree base1 = TREE_OPERAND (aref1, 0);
9646 tree base_offset = build_int_cst (type, 0);
9647
9648 /* If the bases are array references as well, recurse. If the bases
9649 are pointer indirections compute the difference of the pointers.
9650 If the bases are equal, we are set. */
9651 if ((TREE_CODE (base0) == ARRAY_REF
9652 && TREE_CODE (base1) == ARRAY_REF
9653 && (base_offset
9654 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9655 || (INDIRECT_REF_P (base0)
9656 && INDIRECT_REF_P (base1)
9657 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9658 TREE_OPERAND (base0, 0),
9659 TREE_OPERAND (base1, 0))))
9660 || operand_equal_p (base0, base1, 0))
9661 {
9662 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9663 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9664 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9665 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9666 return fold_build2_loc (loc, PLUS_EXPR, type,
9667 base_offset,
9668 fold_build2_loc (loc, MULT_EXPR, type,
9669 diff, esz));
9670 }
9671 return NULL_TREE;
9672 }
9673
9674 /* If the real or vector real constant CST of type TYPE has an exact
9675 inverse, return it, else return NULL. */
9676
9677 static tree
9678 exact_inverse (tree type, tree cst)
9679 {
9680 REAL_VALUE_TYPE r;
9681 tree unit_type, *elts;
9682 enum machine_mode mode;
9683 unsigned vec_nelts, i;
9684
9685 switch (TREE_CODE (cst))
9686 {
9687 case REAL_CST:
9688 r = TREE_REAL_CST (cst);
9689
9690 if (exact_real_inverse (TYPE_MODE (type), &r))
9691 return build_real (type, r);
9692
9693 return NULL_TREE;
9694
9695 case VECTOR_CST:
9696 vec_nelts = VECTOR_CST_NELTS (cst);
9697 elts = XALLOCAVEC (tree, vec_nelts);
9698 unit_type = TREE_TYPE (type);
9699 mode = TYPE_MODE (unit_type);
9700
9701 for (i = 0; i < vec_nelts; i++)
9702 {
9703 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9704 if (!exact_real_inverse (mode, &r))
9705 return NULL_TREE;
9706 elts[i] = build_real (unit_type, r);
9707 }
9708
9709 return build_vector (type, elts);
9710
9711 default:
9712 return NULL_TREE;
9713 }
9714 }
9715
9716 /* Mask out the tz least significant bits of X of type TYPE where
9717 tz is the number of trailing zeroes in Y. */
9718 static wide_int
9719 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9720 {
9721 int tz = wi::ctz (y);
9722 if (tz > 0)
9723 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9724 return x;
9725 }
9726
9727 /* Return true when T is an address and is known to be nonzero.
9728 For floating point we further ensure that T is not denormal.
9729 Similar logic is present in nonzero_address in rtlanal.h.
9730
9731 If the return value is based on the assumption that signed overflow
9732 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9733 change *STRICT_OVERFLOW_P. */
9734
9735 static bool
9736 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9737 {
9738 tree type = TREE_TYPE (t);
9739 enum tree_code code;
9740
9741 /* Doing something useful for floating point would need more work. */
9742 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9743 return false;
9744
9745 code = TREE_CODE (t);
9746 switch (TREE_CODE_CLASS (code))
9747 {
9748 case tcc_unary:
9749 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9750 strict_overflow_p);
9751 case tcc_binary:
9752 case tcc_comparison:
9753 return tree_binary_nonzero_warnv_p (code, type,
9754 TREE_OPERAND (t, 0),
9755 TREE_OPERAND (t, 1),
9756 strict_overflow_p);
9757 case tcc_constant:
9758 case tcc_declaration:
9759 case tcc_reference:
9760 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9761
9762 default:
9763 break;
9764 }
9765
9766 switch (code)
9767 {
9768 case TRUTH_NOT_EXPR:
9769 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9770 strict_overflow_p);
9771
9772 case TRUTH_AND_EXPR:
9773 case TRUTH_OR_EXPR:
9774 case TRUTH_XOR_EXPR:
9775 return tree_binary_nonzero_warnv_p (code, type,
9776 TREE_OPERAND (t, 0),
9777 TREE_OPERAND (t, 1),
9778 strict_overflow_p);
9779
9780 case COND_EXPR:
9781 case CONSTRUCTOR:
9782 case OBJ_TYPE_REF:
9783 case ASSERT_EXPR:
9784 case ADDR_EXPR:
9785 case WITH_SIZE_EXPR:
9786 case SSA_NAME:
9787 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9788
9789 case COMPOUND_EXPR:
9790 case MODIFY_EXPR:
9791 case BIND_EXPR:
9792 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9793 strict_overflow_p);
9794
9795 case SAVE_EXPR:
9796 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9797 strict_overflow_p);
9798
9799 case CALL_EXPR:
9800 {
9801 tree fndecl = get_callee_fndecl (t);
9802 if (!fndecl) return false;
9803 if (flag_delete_null_pointer_checks && !flag_check_new
9804 && DECL_IS_OPERATOR_NEW (fndecl)
9805 && !TREE_NOTHROW (fndecl))
9806 return true;
9807 if (flag_delete_null_pointer_checks
9808 && lookup_attribute ("returns_nonnull",
9809 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9810 return true;
9811 return alloca_call_p (t);
9812 }
9813
9814 default:
9815 break;
9816 }
9817 return false;
9818 }
9819
9820 /* Return true when T is an address and is known to be nonzero.
9821 Handle warnings about undefined signed overflow. */
9822
9823 static bool
9824 tree_expr_nonzero_p (tree t)
9825 {
9826 bool ret, strict_overflow_p;
9827
9828 strict_overflow_p = false;
9829 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9830 if (strict_overflow_p)
9831 fold_overflow_warning (("assuming signed overflow does not occur when "
9832 "determining that expression is always "
9833 "non-zero"),
9834 WARN_STRICT_OVERFLOW_MISC);
9835 return ret;
9836 }
9837
9838 /* Fold a binary expression of code CODE and type TYPE with operands
9839 OP0 and OP1. LOC is the location of the resulting expression.
9840 Return the folded expression if folding is successful. Otherwise,
9841 return NULL_TREE. */
9842
9843 tree
9844 fold_binary_loc (location_t loc,
9845 enum tree_code code, tree type, tree op0, tree op1)
9846 {
9847 enum tree_code_class kind = TREE_CODE_CLASS (code);
9848 tree arg0, arg1, tem;
9849 tree t1 = NULL_TREE;
9850 bool strict_overflow_p;
9851 unsigned int prec;
9852
9853 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9854 && TREE_CODE_LENGTH (code) == 2
9855 && op0 != NULL_TREE
9856 && op1 != NULL_TREE);
9857
9858 arg0 = op0;
9859 arg1 = op1;
9860
9861 /* Strip any conversions that don't change the mode. This is
9862 safe for every expression, except for a comparison expression
9863 because its signedness is derived from its operands. So, in
9864 the latter case, only strip conversions that don't change the
9865 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9866 preserved.
9867
9868 Note that this is done as an internal manipulation within the
9869 constant folder, in order to find the simplest representation
9870 of the arguments so that their form can be studied. In any
9871 cases, the appropriate type conversions should be put back in
9872 the tree that will get out of the constant folder. */
9873
9874 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9875 {
9876 STRIP_SIGN_NOPS (arg0);
9877 STRIP_SIGN_NOPS (arg1);
9878 }
9879 else
9880 {
9881 STRIP_NOPS (arg0);
9882 STRIP_NOPS (arg1);
9883 }
9884
9885 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9886 constant but we can't do arithmetic on them. */
9887 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9888 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9889 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9890 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9891 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9892 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9893 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9894 {
9895 if (kind == tcc_binary)
9896 {
9897 /* Make sure type and arg0 have the same saturating flag. */
9898 gcc_assert (TYPE_SATURATING (type)
9899 == TYPE_SATURATING (TREE_TYPE (arg0)));
9900 tem = const_binop (code, arg0, arg1);
9901 }
9902 else if (kind == tcc_comparison)
9903 tem = fold_relational_const (code, type, arg0, arg1);
9904 else
9905 tem = NULL_TREE;
9906
9907 if (tem != NULL_TREE)
9908 {
9909 if (TREE_TYPE (tem) != type)
9910 tem = fold_convert_loc (loc, type, tem);
9911 return tem;
9912 }
9913 }
9914
9915 /* If this is a commutative operation, and ARG0 is a constant, move it
9916 to ARG1 to reduce the number of tests below. */
9917 if (commutative_tree_code (code)
9918 && tree_swap_operands_p (arg0, arg1, true))
9919 return fold_build2_loc (loc, code, type, op1, op0);
9920
9921 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9922 to ARG1 to reduce the number of tests below. */
9923 if (kind == tcc_comparison
9924 && tree_swap_operands_p (arg0, arg1, true))
9925 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9926
9927 tem = generic_simplify (loc, code, type, op0, op1);
9928 if (tem)
9929 return tem;
9930
9931 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9932
9933 First check for cases where an arithmetic operation is applied to a
9934 compound, conditional, or comparison operation. Push the arithmetic
9935 operation inside the compound or conditional to see if any folding
9936 can then be done. Convert comparison to conditional for this purpose.
9937 The also optimizes non-constant cases that used to be done in
9938 expand_expr.
9939
9940 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9941 one of the operands is a comparison and the other is a comparison, a
9942 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9943 code below would make the expression more complex. Change it to a
9944 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9945 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9946
9947 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9948 || code == EQ_EXPR || code == NE_EXPR)
9949 && TREE_CODE (type) != VECTOR_TYPE
9950 && ((truth_value_p (TREE_CODE (arg0))
9951 && (truth_value_p (TREE_CODE (arg1))
9952 || (TREE_CODE (arg1) == BIT_AND_EXPR
9953 && integer_onep (TREE_OPERAND (arg1, 1)))))
9954 || (truth_value_p (TREE_CODE (arg1))
9955 && (truth_value_p (TREE_CODE (arg0))
9956 || (TREE_CODE (arg0) == BIT_AND_EXPR
9957 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9958 {
9959 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9960 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9961 : TRUTH_XOR_EXPR,
9962 boolean_type_node,
9963 fold_convert_loc (loc, boolean_type_node, arg0),
9964 fold_convert_loc (loc, boolean_type_node, arg1));
9965
9966 if (code == EQ_EXPR)
9967 tem = invert_truthvalue_loc (loc, tem);
9968
9969 return fold_convert_loc (loc, type, tem);
9970 }
9971
9972 if (TREE_CODE_CLASS (code) == tcc_binary
9973 || TREE_CODE_CLASS (code) == tcc_comparison)
9974 {
9975 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9976 {
9977 tem = fold_build2_loc (loc, code, type,
9978 fold_convert_loc (loc, TREE_TYPE (op0),
9979 TREE_OPERAND (arg0, 1)), op1);
9980 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9981 tem);
9982 }
9983 if (TREE_CODE (arg1) == COMPOUND_EXPR
9984 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9985 {
9986 tem = fold_build2_loc (loc, code, type, op0,
9987 fold_convert_loc (loc, TREE_TYPE (op1),
9988 TREE_OPERAND (arg1, 1)));
9989 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9990 tem);
9991 }
9992
9993 if (TREE_CODE (arg0) == COND_EXPR
9994 || TREE_CODE (arg0) == VEC_COND_EXPR
9995 || COMPARISON_CLASS_P (arg0))
9996 {
9997 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9998 arg0, arg1,
9999 /*cond_first_p=*/1);
10000 if (tem != NULL_TREE)
10001 return tem;
10002 }
10003
10004 if (TREE_CODE (arg1) == COND_EXPR
10005 || TREE_CODE (arg1) == VEC_COND_EXPR
10006 || COMPARISON_CLASS_P (arg1))
10007 {
10008 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10009 arg1, arg0,
10010 /*cond_first_p=*/0);
10011 if (tem != NULL_TREE)
10012 return tem;
10013 }
10014 }
10015
10016 switch (code)
10017 {
10018 case MEM_REF:
10019 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10020 if (TREE_CODE (arg0) == ADDR_EXPR
10021 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10022 {
10023 tree iref = TREE_OPERAND (arg0, 0);
10024 return fold_build2 (MEM_REF, type,
10025 TREE_OPERAND (iref, 0),
10026 int_const_binop (PLUS_EXPR, arg1,
10027 TREE_OPERAND (iref, 1)));
10028 }
10029
10030 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10031 if (TREE_CODE (arg0) == ADDR_EXPR
10032 && handled_component_p (TREE_OPERAND (arg0, 0)))
10033 {
10034 tree base;
10035 HOST_WIDE_INT coffset;
10036 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10037 &coffset);
10038 if (!base)
10039 return NULL_TREE;
10040 return fold_build2 (MEM_REF, type,
10041 build_fold_addr_expr (base),
10042 int_const_binop (PLUS_EXPR, arg1,
10043 size_int (coffset)));
10044 }
10045
10046 return NULL_TREE;
10047
10048 case POINTER_PLUS_EXPR:
10049 /* 0 +p index -> (type)index */
10050 if (integer_zerop (arg0))
10051 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10052
10053 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10054 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10055 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10056 return fold_convert_loc (loc, type,
10057 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10058 fold_convert_loc (loc, sizetype,
10059 arg1),
10060 fold_convert_loc (loc, sizetype,
10061 arg0)));
10062
10063 /* (PTR +p B) +p A -> PTR +p (B + A) */
10064 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10065 {
10066 tree inner;
10067 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10068 tree arg00 = TREE_OPERAND (arg0, 0);
10069 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10070 arg01, fold_convert_loc (loc, sizetype, arg1));
10071 return fold_convert_loc (loc, type,
10072 fold_build_pointer_plus_loc (loc,
10073 arg00, inner));
10074 }
10075
10076 /* PTR_CST +p CST -> CST1 */
10077 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10078 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10079 fold_convert_loc (loc, type, arg1));
10080
10081 return NULL_TREE;
10082
10083 case PLUS_EXPR:
10084 /* A + (-B) -> A - B */
10085 if (TREE_CODE (arg1) == NEGATE_EXPR
10086 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10087 return fold_build2_loc (loc, MINUS_EXPR, type,
10088 fold_convert_loc (loc, type, arg0),
10089 fold_convert_loc (loc, type,
10090 TREE_OPERAND (arg1, 0)));
10091 /* (-A) + B -> B - A */
10092 if (TREE_CODE (arg0) == NEGATE_EXPR
10093 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10094 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10095 return fold_build2_loc (loc, MINUS_EXPR, type,
10096 fold_convert_loc (loc, type, arg1),
10097 fold_convert_loc (loc, type,
10098 TREE_OPERAND (arg0, 0)));
10099
10100 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10101 {
10102 /* Convert ~A + 1 to -A. */
10103 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10104 && integer_each_onep (arg1))
10105 return fold_build1_loc (loc, NEGATE_EXPR, type,
10106 fold_convert_loc (loc, type,
10107 TREE_OPERAND (arg0, 0)));
10108
10109 /* ~X + X is -1. */
10110 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10111 && !TYPE_OVERFLOW_TRAPS (type))
10112 {
10113 tree tem = TREE_OPERAND (arg0, 0);
10114
10115 STRIP_NOPS (tem);
10116 if (operand_equal_p (tem, arg1, 0))
10117 {
10118 t1 = build_all_ones_cst (type);
10119 return omit_one_operand_loc (loc, type, t1, arg1);
10120 }
10121 }
10122
10123 /* X + ~X is -1. */
10124 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10125 && !TYPE_OVERFLOW_TRAPS (type))
10126 {
10127 tree tem = TREE_OPERAND (arg1, 0);
10128
10129 STRIP_NOPS (tem);
10130 if (operand_equal_p (arg0, tem, 0))
10131 {
10132 t1 = build_all_ones_cst (type);
10133 return omit_one_operand_loc (loc, type, t1, arg0);
10134 }
10135 }
10136
10137 /* X + (X / CST) * -CST is X % CST. */
10138 if (TREE_CODE (arg1) == MULT_EXPR
10139 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10140 && operand_equal_p (arg0,
10141 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10142 {
10143 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10144 tree cst1 = TREE_OPERAND (arg1, 1);
10145 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10146 cst1, cst0);
10147 if (sum && integer_zerop (sum))
10148 return fold_convert_loc (loc, type,
10149 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10150 TREE_TYPE (arg0), arg0,
10151 cst0));
10152 }
10153 }
10154
10155 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10156 one. Make sure the type is not saturating and has the signedness of
10157 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10158 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10159 if ((TREE_CODE (arg0) == MULT_EXPR
10160 || TREE_CODE (arg1) == MULT_EXPR)
10161 && !TYPE_SATURATING (type)
10162 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10163 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10164 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10165 {
10166 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10167 if (tem)
10168 return tem;
10169 }
10170
10171 if (! FLOAT_TYPE_P (type))
10172 {
10173 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10174 with a constant, and the two constants have no bits in common,
10175 we should treat this as a BIT_IOR_EXPR since this may produce more
10176 simplifications. */
10177 if (TREE_CODE (arg0) == BIT_AND_EXPR
10178 && TREE_CODE (arg1) == BIT_AND_EXPR
10179 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10180 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10181 && wi::bit_and (TREE_OPERAND (arg0, 1),
10182 TREE_OPERAND (arg1, 1)) == 0)
10183 {
10184 code = BIT_IOR_EXPR;
10185 goto bit_ior;
10186 }
10187
10188 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10189 (plus (plus (mult) (mult)) (foo)) so that we can
10190 take advantage of the factoring cases below. */
10191 if (TYPE_OVERFLOW_WRAPS (type)
10192 && (((TREE_CODE (arg0) == PLUS_EXPR
10193 || TREE_CODE (arg0) == MINUS_EXPR)
10194 && TREE_CODE (arg1) == MULT_EXPR)
10195 || ((TREE_CODE (arg1) == PLUS_EXPR
10196 || TREE_CODE (arg1) == MINUS_EXPR)
10197 && TREE_CODE (arg0) == MULT_EXPR)))
10198 {
10199 tree parg0, parg1, parg, marg;
10200 enum tree_code pcode;
10201
10202 if (TREE_CODE (arg1) == MULT_EXPR)
10203 parg = arg0, marg = arg1;
10204 else
10205 parg = arg1, marg = arg0;
10206 pcode = TREE_CODE (parg);
10207 parg0 = TREE_OPERAND (parg, 0);
10208 parg1 = TREE_OPERAND (parg, 1);
10209 STRIP_NOPS (parg0);
10210 STRIP_NOPS (parg1);
10211
10212 if (TREE_CODE (parg0) == MULT_EXPR
10213 && TREE_CODE (parg1) != MULT_EXPR)
10214 return fold_build2_loc (loc, pcode, type,
10215 fold_build2_loc (loc, PLUS_EXPR, type,
10216 fold_convert_loc (loc, type,
10217 parg0),
10218 fold_convert_loc (loc, type,
10219 marg)),
10220 fold_convert_loc (loc, type, parg1));
10221 if (TREE_CODE (parg0) != MULT_EXPR
10222 && TREE_CODE (parg1) == MULT_EXPR)
10223 return
10224 fold_build2_loc (loc, PLUS_EXPR, type,
10225 fold_convert_loc (loc, type, parg0),
10226 fold_build2_loc (loc, pcode, type,
10227 fold_convert_loc (loc, type, marg),
10228 fold_convert_loc (loc, type,
10229 parg1)));
10230 }
10231 }
10232 else
10233 {
10234 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10235 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10236 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10237
10238 /* Likewise if the operands are reversed. */
10239 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10240 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10241
10242 /* Convert X + -C into X - C. */
10243 if (TREE_CODE (arg1) == REAL_CST
10244 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10245 {
10246 tem = fold_negate_const (arg1, type);
10247 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10248 return fold_build2_loc (loc, MINUS_EXPR, type,
10249 fold_convert_loc (loc, type, arg0),
10250 fold_convert_loc (loc, type, tem));
10251 }
10252
10253 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10254 to __complex__ ( x, y ). This is not the same for SNaNs or
10255 if signed zeros are involved. */
10256 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10257 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10258 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10259 {
10260 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10261 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10262 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10263 bool arg0rz = false, arg0iz = false;
10264 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10265 || (arg0i && (arg0iz = real_zerop (arg0i))))
10266 {
10267 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10268 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10269 if (arg0rz && arg1i && real_zerop (arg1i))
10270 {
10271 tree rp = arg1r ? arg1r
10272 : build1 (REALPART_EXPR, rtype, arg1);
10273 tree ip = arg0i ? arg0i
10274 : build1 (IMAGPART_EXPR, rtype, arg0);
10275 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10276 }
10277 else if (arg0iz && arg1r && real_zerop (arg1r))
10278 {
10279 tree rp = arg0r ? arg0r
10280 : build1 (REALPART_EXPR, rtype, arg0);
10281 tree ip = arg1i ? arg1i
10282 : build1 (IMAGPART_EXPR, rtype, arg1);
10283 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10284 }
10285 }
10286 }
10287
10288 if (flag_unsafe_math_optimizations
10289 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10290 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10291 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10292 return tem;
10293
10294 /* Convert x+x into x*2.0. */
10295 if (operand_equal_p (arg0, arg1, 0)
10296 && SCALAR_FLOAT_TYPE_P (type))
10297 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10298 build_real (type, dconst2));
10299
10300 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10301 We associate floats only if the user has specified
10302 -fassociative-math. */
10303 if (flag_associative_math
10304 && TREE_CODE (arg1) == PLUS_EXPR
10305 && TREE_CODE (arg0) != MULT_EXPR)
10306 {
10307 tree tree10 = TREE_OPERAND (arg1, 0);
10308 tree tree11 = TREE_OPERAND (arg1, 1);
10309 if (TREE_CODE (tree11) == MULT_EXPR
10310 && TREE_CODE (tree10) == MULT_EXPR)
10311 {
10312 tree tree0;
10313 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10314 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10315 }
10316 }
10317 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10318 We associate floats only if the user has specified
10319 -fassociative-math. */
10320 if (flag_associative_math
10321 && TREE_CODE (arg0) == PLUS_EXPR
10322 && TREE_CODE (arg1) != MULT_EXPR)
10323 {
10324 tree tree00 = TREE_OPERAND (arg0, 0);
10325 tree tree01 = TREE_OPERAND (arg0, 1);
10326 if (TREE_CODE (tree01) == MULT_EXPR
10327 && TREE_CODE (tree00) == MULT_EXPR)
10328 {
10329 tree tree0;
10330 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10331 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10332 }
10333 }
10334 }
10335
10336 bit_rotate:
10337 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10338 is a rotate of A by C1 bits. */
10339 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10340 is a rotate of A by B bits. */
10341 {
10342 enum tree_code code0, code1;
10343 tree rtype;
10344 code0 = TREE_CODE (arg0);
10345 code1 = TREE_CODE (arg1);
10346 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10347 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10348 && operand_equal_p (TREE_OPERAND (arg0, 0),
10349 TREE_OPERAND (arg1, 0), 0)
10350 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10351 TYPE_UNSIGNED (rtype))
10352 /* Only create rotates in complete modes. Other cases are not
10353 expanded properly. */
10354 && (element_precision (rtype)
10355 == element_precision (TYPE_MODE (rtype))))
10356 {
10357 tree tree01, tree11;
10358 enum tree_code code01, code11;
10359
10360 tree01 = TREE_OPERAND (arg0, 1);
10361 tree11 = TREE_OPERAND (arg1, 1);
10362 STRIP_NOPS (tree01);
10363 STRIP_NOPS (tree11);
10364 code01 = TREE_CODE (tree01);
10365 code11 = TREE_CODE (tree11);
10366 if (code01 == INTEGER_CST
10367 && code11 == INTEGER_CST
10368 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10369 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10370 {
10371 tem = build2_loc (loc, LROTATE_EXPR,
10372 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10373 TREE_OPERAND (arg0, 0),
10374 code0 == LSHIFT_EXPR ? tree01 : tree11);
10375 return fold_convert_loc (loc, type, tem);
10376 }
10377 else if (code11 == MINUS_EXPR)
10378 {
10379 tree tree110, tree111;
10380 tree110 = TREE_OPERAND (tree11, 0);
10381 tree111 = TREE_OPERAND (tree11, 1);
10382 STRIP_NOPS (tree110);
10383 STRIP_NOPS (tree111);
10384 if (TREE_CODE (tree110) == INTEGER_CST
10385 && 0 == compare_tree_int (tree110,
10386 element_precision
10387 (TREE_TYPE (TREE_OPERAND
10388 (arg0, 0))))
10389 && operand_equal_p (tree01, tree111, 0))
10390 return
10391 fold_convert_loc (loc, type,
10392 build2 ((code0 == LSHIFT_EXPR
10393 ? LROTATE_EXPR
10394 : RROTATE_EXPR),
10395 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10396 TREE_OPERAND (arg0, 0), tree01));
10397 }
10398 else if (code01 == MINUS_EXPR)
10399 {
10400 tree tree010, tree011;
10401 tree010 = TREE_OPERAND (tree01, 0);
10402 tree011 = TREE_OPERAND (tree01, 1);
10403 STRIP_NOPS (tree010);
10404 STRIP_NOPS (tree011);
10405 if (TREE_CODE (tree010) == INTEGER_CST
10406 && 0 == compare_tree_int (tree010,
10407 element_precision
10408 (TREE_TYPE (TREE_OPERAND
10409 (arg0, 0))))
10410 && operand_equal_p (tree11, tree011, 0))
10411 return fold_convert_loc
10412 (loc, type,
10413 build2 ((code0 != LSHIFT_EXPR
10414 ? LROTATE_EXPR
10415 : RROTATE_EXPR),
10416 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10417 TREE_OPERAND (arg0, 0), tree11));
10418 }
10419 }
10420 }
10421
10422 associate:
10423 /* In most languages, can't associate operations on floats through
10424 parentheses. Rather than remember where the parentheses were, we
10425 don't associate floats at all, unless the user has specified
10426 -fassociative-math.
10427 And, we need to make sure type is not saturating. */
10428
10429 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10430 && !TYPE_SATURATING (type))
10431 {
10432 tree var0, con0, lit0, minus_lit0;
10433 tree var1, con1, lit1, minus_lit1;
10434 tree atype = type;
10435 bool ok = true;
10436
10437 /* Split both trees into variables, constants, and literals. Then
10438 associate each group together, the constants with literals,
10439 then the result with variables. This increases the chances of
10440 literals being recombined later and of generating relocatable
10441 expressions for the sum of a constant and literal. */
10442 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10443 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10444 code == MINUS_EXPR);
10445
10446 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10447 if (code == MINUS_EXPR)
10448 code = PLUS_EXPR;
10449
10450 /* With undefined overflow prefer doing association in a type
10451 which wraps on overflow, if that is one of the operand types. */
10452 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10453 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10454 {
10455 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10456 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10457 atype = TREE_TYPE (arg0);
10458 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10459 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10460 atype = TREE_TYPE (arg1);
10461 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10462 }
10463
10464 /* With undefined overflow we can only associate constants with one
10465 variable, and constants whose association doesn't overflow. */
10466 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10467 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10468 {
10469 if (var0 && var1)
10470 {
10471 tree tmp0 = var0;
10472 tree tmp1 = var1;
10473
10474 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10475 tmp0 = TREE_OPERAND (tmp0, 0);
10476 if (CONVERT_EXPR_P (tmp0)
10477 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10478 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10479 <= TYPE_PRECISION (atype)))
10480 tmp0 = TREE_OPERAND (tmp0, 0);
10481 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10482 tmp1 = TREE_OPERAND (tmp1, 0);
10483 if (CONVERT_EXPR_P (tmp1)
10484 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10485 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10486 <= TYPE_PRECISION (atype)))
10487 tmp1 = TREE_OPERAND (tmp1, 0);
10488 /* The only case we can still associate with two variables
10489 is if they are the same, modulo negation and bit-pattern
10490 preserving conversions. */
10491 if (!operand_equal_p (tmp0, tmp1, 0))
10492 ok = false;
10493 }
10494 }
10495
10496 /* Only do something if we found more than two objects. Otherwise,
10497 nothing has changed and we risk infinite recursion. */
10498 if (ok
10499 && (2 < ((var0 != 0) + (var1 != 0)
10500 + (con0 != 0) + (con1 != 0)
10501 + (lit0 != 0) + (lit1 != 0)
10502 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10503 {
10504 bool any_overflows = false;
10505 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10506 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10507 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10508 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10509 var0 = associate_trees (loc, var0, var1, code, atype);
10510 con0 = associate_trees (loc, con0, con1, code, atype);
10511 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10512 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10513 code, atype);
10514
10515 /* Preserve the MINUS_EXPR if the negative part of the literal is
10516 greater than the positive part. Otherwise, the multiplicative
10517 folding code (i.e extract_muldiv) may be fooled in case
10518 unsigned constants are subtracted, like in the following
10519 example: ((X*2 + 4) - 8U)/2. */
10520 if (minus_lit0 && lit0)
10521 {
10522 if (TREE_CODE (lit0) == INTEGER_CST
10523 && TREE_CODE (minus_lit0) == INTEGER_CST
10524 && tree_int_cst_lt (lit0, minus_lit0))
10525 {
10526 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10527 MINUS_EXPR, atype);
10528 lit0 = 0;
10529 }
10530 else
10531 {
10532 lit0 = associate_trees (loc, lit0, minus_lit0,
10533 MINUS_EXPR, atype);
10534 minus_lit0 = 0;
10535 }
10536 }
10537
10538 /* Don't introduce overflows through reassociation. */
10539 if (!any_overflows
10540 && ((lit0 && TREE_OVERFLOW (lit0))
10541 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10542 return NULL_TREE;
10543
10544 if (minus_lit0)
10545 {
10546 if (con0 == 0)
10547 return
10548 fold_convert_loc (loc, type,
10549 associate_trees (loc, var0, minus_lit0,
10550 MINUS_EXPR, atype));
10551 else
10552 {
10553 con0 = associate_trees (loc, con0, minus_lit0,
10554 MINUS_EXPR, atype);
10555 return
10556 fold_convert_loc (loc, type,
10557 associate_trees (loc, var0, con0,
10558 PLUS_EXPR, atype));
10559 }
10560 }
10561
10562 con0 = associate_trees (loc, con0, lit0, code, atype);
10563 return
10564 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10565 code, atype));
10566 }
10567 }
10568
10569 return NULL_TREE;
10570
10571 case MINUS_EXPR:
10572 /* Pointer simplifications for subtraction, simple reassociations. */
10573 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10574 {
10575 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10576 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10577 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10578 {
10579 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10580 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10581 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10582 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10583 return fold_build2_loc (loc, PLUS_EXPR, type,
10584 fold_build2_loc (loc, MINUS_EXPR, type,
10585 arg00, arg10),
10586 fold_build2_loc (loc, MINUS_EXPR, type,
10587 arg01, arg11));
10588 }
10589 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10590 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10591 {
10592 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10593 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10594 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10595 fold_convert_loc (loc, type, arg1));
10596 if (tmp)
10597 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10598 }
10599 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10600 simplifies. */
10601 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10602 {
10603 tree arg10 = fold_convert_loc (loc, type,
10604 TREE_OPERAND (arg1, 0));
10605 tree arg11 = fold_convert_loc (loc, type,
10606 TREE_OPERAND (arg1, 1));
10607 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10608 fold_convert_loc (loc, type, arg0),
10609 arg10);
10610 if (tmp)
10611 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10612 }
10613 }
10614 /* A - (-B) -> A + B */
10615 if (TREE_CODE (arg1) == NEGATE_EXPR)
10616 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10617 fold_convert_loc (loc, type,
10618 TREE_OPERAND (arg1, 0)));
10619 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10620 if (TREE_CODE (arg0) == NEGATE_EXPR
10621 && negate_expr_p (arg1)
10622 && reorder_operands_p (arg0, arg1))
10623 return fold_build2_loc (loc, MINUS_EXPR, type,
10624 fold_convert_loc (loc, type,
10625 negate_expr (arg1)),
10626 fold_convert_loc (loc, type,
10627 TREE_OPERAND (arg0, 0)));
10628 /* Convert -A - 1 to ~A. */
10629 if (TREE_CODE (arg0) == NEGATE_EXPR
10630 && integer_each_onep (arg1)
10631 && !TYPE_OVERFLOW_TRAPS (type))
10632 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10633 fold_convert_loc (loc, type,
10634 TREE_OPERAND (arg0, 0)));
10635
10636 /* Convert -1 - A to ~A. */
10637 if (TREE_CODE (type) != COMPLEX_TYPE
10638 && integer_all_onesp (arg0))
10639 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10640
10641
10642 /* X - (X / Y) * Y is X % Y. */
10643 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10644 && TREE_CODE (arg1) == MULT_EXPR
10645 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10646 && operand_equal_p (arg0,
10647 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10648 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10649 TREE_OPERAND (arg1, 1), 0))
10650 return
10651 fold_convert_loc (loc, type,
10652 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10653 arg0, TREE_OPERAND (arg1, 1)));
10654
10655 if (! FLOAT_TYPE_P (type))
10656 {
10657 if (integer_zerop (arg0))
10658 return negate_expr (fold_convert_loc (loc, type, arg1));
10659
10660 /* Fold A - (A & B) into ~B & A. */
10661 if (!TREE_SIDE_EFFECTS (arg0)
10662 && TREE_CODE (arg1) == BIT_AND_EXPR)
10663 {
10664 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10665 {
10666 tree arg10 = fold_convert_loc (loc, type,
10667 TREE_OPERAND (arg1, 0));
10668 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10669 fold_build1_loc (loc, BIT_NOT_EXPR,
10670 type, arg10),
10671 fold_convert_loc (loc, type, arg0));
10672 }
10673 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10674 {
10675 tree arg11 = fold_convert_loc (loc,
10676 type, TREE_OPERAND (arg1, 1));
10677 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10678 fold_build1_loc (loc, BIT_NOT_EXPR,
10679 type, arg11),
10680 fold_convert_loc (loc, type, arg0));
10681 }
10682 }
10683
10684 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10685 any power of 2 minus 1. */
10686 if (TREE_CODE (arg0) == BIT_AND_EXPR
10687 && TREE_CODE (arg1) == BIT_AND_EXPR
10688 && operand_equal_p (TREE_OPERAND (arg0, 0),
10689 TREE_OPERAND (arg1, 0), 0))
10690 {
10691 tree mask0 = TREE_OPERAND (arg0, 1);
10692 tree mask1 = TREE_OPERAND (arg1, 1);
10693 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10694
10695 if (operand_equal_p (tem, mask1, 0))
10696 {
10697 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10698 TREE_OPERAND (arg0, 0), mask1);
10699 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10700 }
10701 }
10702 }
10703
10704 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10705 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10706 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10707
10708 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10709 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10710 (-ARG1 + ARG0) reduces to -ARG1. */
10711 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10712 return negate_expr (fold_convert_loc (loc, type, arg1));
10713
10714 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10715 __complex__ ( x, -y ). This is not the same for SNaNs or if
10716 signed zeros are involved. */
10717 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10718 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10719 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10720 {
10721 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10722 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10723 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10724 bool arg0rz = false, arg0iz = false;
10725 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10726 || (arg0i && (arg0iz = real_zerop (arg0i))))
10727 {
10728 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10729 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10730 if (arg0rz && arg1i && real_zerop (arg1i))
10731 {
10732 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10733 arg1r ? arg1r
10734 : build1 (REALPART_EXPR, rtype, arg1));
10735 tree ip = arg0i ? arg0i
10736 : build1 (IMAGPART_EXPR, rtype, arg0);
10737 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10738 }
10739 else if (arg0iz && arg1r && real_zerop (arg1r))
10740 {
10741 tree rp = arg0r ? arg0r
10742 : build1 (REALPART_EXPR, rtype, arg0);
10743 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10744 arg1i ? arg1i
10745 : build1 (IMAGPART_EXPR, rtype, arg1));
10746 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10747 }
10748 }
10749 }
10750
10751 /* A - B -> A + (-B) if B is easily negatable. */
10752 if (negate_expr_p (arg1)
10753 && ((FLOAT_TYPE_P (type)
10754 /* Avoid this transformation if B is a positive REAL_CST. */
10755 && (TREE_CODE (arg1) != REAL_CST
10756 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10757 || INTEGRAL_TYPE_P (type)))
10758 return fold_build2_loc (loc, PLUS_EXPR, type,
10759 fold_convert_loc (loc, type, arg0),
10760 fold_convert_loc (loc, type,
10761 negate_expr (arg1)));
10762
10763 /* Try folding difference of addresses. */
10764 {
10765 HOST_WIDE_INT diff;
10766
10767 if ((TREE_CODE (arg0) == ADDR_EXPR
10768 || TREE_CODE (arg1) == ADDR_EXPR)
10769 && ptr_difference_const (arg0, arg1, &diff))
10770 return build_int_cst_type (type, diff);
10771 }
10772
10773 /* Fold &a[i] - &a[j] to i-j. */
10774 if (TREE_CODE (arg0) == ADDR_EXPR
10775 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10776 && TREE_CODE (arg1) == ADDR_EXPR
10777 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10778 {
10779 tree tem = fold_addr_of_array_ref_difference (loc, type,
10780 TREE_OPERAND (arg0, 0),
10781 TREE_OPERAND (arg1, 0));
10782 if (tem)
10783 return tem;
10784 }
10785
10786 if (FLOAT_TYPE_P (type)
10787 && flag_unsafe_math_optimizations
10788 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10789 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10790 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10791 return tem;
10792
10793 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10794 one. Make sure the type is not saturating and has the signedness of
10795 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10796 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10797 if ((TREE_CODE (arg0) == MULT_EXPR
10798 || TREE_CODE (arg1) == MULT_EXPR)
10799 && !TYPE_SATURATING (type)
10800 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10801 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10802 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10803 {
10804 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10805 if (tem)
10806 return tem;
10807 }
10808
10809 goto associate;
10810
10811 case MULT_EXPR:
10812 /* (-A) * (-B) -> A * B */
10813 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10814 return fold_build2_loc (loc, MULT_EXPR, type,
10815 fold_convert_loc (loc, type,
10816 TREE_OPERAND (arg0, 0)),
10817 fold_convert_loc (loc, type,
10818 negate_expr (arg1)));
10819 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10820 return fold_build2_loc (loc, MULT_EXPR, type,
10821 fold_convert_loc (loc, type,
10822 negate_expr (arg0)),
10823 fold_convert_loc (loc, type,
10824 TREE_OPERAND (arg1, 0)));
10825
10826 if (! FLOAT_TYPE_P (type))
10827 {
10828 /* Transform x * -1 into -x. Make sure to do the negation
10829 on the original operand with conversions not stripped
10830 because we can only strip non-sign-changing conversions. */
10831 if (integer_minus_onep (arg1))
10832 return fold_convert_loc (loc, type, negate_expr (op0));
10833 /* Transform x * -C into -x * C if x is easily negatable. */
10834 if (TREE_CODE (arg1) == INTEGER_CST
10835 && tree_int_cst_sgn (arg1) == -1
10836 && negate_expr_p (arg0)
10837 && (tem = negate_expr (arg1)) != arg1
10838 && !TREE_OVERFLOW (tem))
10839 return fold_build2_loc (loc, MULT_EXPR, type,
10840 fold_convert_loc (loc, type,
10841 negate_expr (arg0)),
10842 tem);
10843
10844 /* (a * (1 << b)) is (a << b) */
10845 if (TREE_CODE (arg1) == LSHIFT_EXPR
10846 && integer_onep (TREE_OPERAND (arg1, 0)))
10847 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10848 TREE_OPERAND (arg1, 1));
10849 if (TREE_CODE (arg0) == LSHIFT_EXPR
10850 && integer_onep (TREE_OPERAND (arg0, 0)))
10851 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10852 TREE_OPERAND (arg0, 1));
10853
10854 /* (A + A) * C -> A * 2 * C */
10855 if (TREE_CODE (arg0) == PLUS_EXPR
10856 && TREE_CODE (arg1) == INTEGER_CST
10857 && operand_equal_p (TREE_OPERAND (arg0, 0),
10858 TREE_OPERAND (arg0, 1), 0))
10859 return fold_build2_loc (loc, MULT_EXPR, type,
10860 omit_one_operand_loc (loc, type,
10861 TREE_OPERAND (arg0, 0),
10862 TREE_OPERAND (arg0, 1)),
10863 fold_build2_loc (loc, MULT_EXPR, type,
10864 build_int_cst (type, 2) , arg1));
10865
10866 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10867 sign-changing only. */
10868 if (TREE_CODE (arg1) == INTEGER_CST
10869 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10870 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10871 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10872
10873 strict_overflow_p = false;
10874 if (TREE_CODE (arg1) == INTEGER_CST
10875 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10876 &strict_overflow_p)))
10877 {
10878 if (strict_overflow_p)
10879 fold_overflow_warning (("assuming signed overflow does not "
10880 "occur when simplifying "
10881 "multiplication"),
10882 WARN_STRICT_OVERFLOW_MISC);
10883 return fold_convert_loc (loc, type, tem);
10884 }
10885
10886 /* Optimize z * conj(z) for integer complex numbers. */
10887 if (TREE_CODE (arg0) == CONJ_EXPR
10888 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10889 return fold_mult_zconjz (loc, type, arg1);
10890 if (TREE_CODE (arg1) == CONJ_EXPR
10891 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10892 return fold_mult_zconjz (loc, type, arg0);
10893 }
10894 else
10895 {
10896 /* Maybe fold x * 0 to 0. The expressions aren't the same
10897 when x is NaN, since x * 0 is also NaN. Nor are they the
10898 same in modes with signed zeros, since multiplying a
10899 negative value by 0 gives -0, not +0. */
10900 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10901 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10902 && real_zerop (arg1))
10903 return omit_one_operand_loc (loc, type, arg1, arg0);
10904 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10905 Likewise for complex arithmetic with signed zeros. */
10906 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10907 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10908 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10909 && real_onep (arg1))
10910 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10911
10912 /* Transform x * -1.0 into -x. */
10913 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10914 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10915 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10916 && real_minus_onep (arg1))
10917 return fold_convert_loc (loc, type, negate_expr (arg0));
10918
10919 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10920 the result for floating point types due to rounding so it is applied
10921 only if -fassociative-math was specify. */
10922 if (flag_associative_math
10923 && TREE_CODE (arg0) == RDIV_EXPR
10924 && TREE_CODE (arg1) == REAL_CST
10925 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10926 {
10927 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10928 arg1);
10929 if (tem)
10930 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10931 TREE_OPERAND (arg0, 1));
10932 }
10933
10934 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10935 if (operand_equal_p (arg0, arg1, 0))
10936 {
10937 tree tem = fold_strip_sign_ops (arg0);
10938 if (tem != NULL_TREE)
10939 {
10940 tem = fold_convert_loc (loc, type, tem);
10941 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10942 }
10943 }
10944
10945 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10946 This is not the same for NaNs or if signed zeros are
10947 involved. */
10948 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10949 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10950 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10951 && TREE_CODE (arg1) == COMPLEX_CST
10952 && real_zerop (TREE_REALPART (arg1)))
10953 {
10954 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10955 if (real_onep (TREE_IMAGPART (arg1)))
10956 return
10957 fold_build2_loc (loc, COMPLEX_EXPR, type,
10958 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10959 rtype, arg0)),
10960 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10961 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10962 return
10963 fold_build2_loc (loc, COMPLEX_EXPR, type,
10964 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10965 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10966 rtype, arg0)));
10967 }
10968
10969 /* Optimize z * conj(z) for floating point complex numbers.
10970 Guarded by flag_unsafe_math_optimizations as non-finite
10971 imaginary components don't produce scalar results. */
10972 if (flag_unsafe_math_optimizations
10973 && TREE_CODE (arg0) == CONJ_EXPR
10974 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10975 return fold_mult_zconjz (loc, type, arg1);
10976 if (flag_unsafe_math_optimizations
10977 && TREE_CODE (arg1) == CONJ_EXPR
10978 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10979 return fold_mult_zconjz (loc, type, arg0);
10980
10981 if (flag_unsafe_math_optimizations)
10982 {
10983 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10984 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10985
10986 /* Optimizations of root(...)*root(...). */
10987 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10988 {
10989 tree rootfn, arg;
10990 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10991 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10992
10993 /* Optimize sqrt(x)*sqrt(x) as x. */
10994 if (BUILTIN_SQRT_P (fcode0)
10995 && operand_equal_p (arg00, arg10, 0)
10996 && ! HONOR_SNANS (TYPE_MODE (type)))
10997 return arg00;
10998
10999 /* Optimize root(x)*root(y) as root(x*y). */
11000 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11001 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11002 return build_call_expr_loc (loc, rootfn, 1, arg);
11003 }
11004
11005 /* Optimize expN(x)*expN(y) as expN(x+y). */
11006 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11007 {
11008 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11009 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11010 CALL_EXPR_ARG (arg0, 0),
11011 CALL_EXPR_ARG (arg1, 0));
11012 return build_call_expr_loc (loc, expfn, 1, arg);
11013 }
11014
11015 /* Optimizations of pow(...)*pow(...). */
11016 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11017 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11018 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11019 {
11020 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11021 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11022 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11023 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11024
11025 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11026 if (operand_equal_p (arg01, arg11, 0))
11027 {
11028 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11029 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11030 arg00, arg10);
11031 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11032 }
11033
11034 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11035 if (operand_equal_p (arg00, arg10, 0))
11036 {
11037 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11038 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11039 arg01, arg11);
11040 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11041 }
11042 }
11043
11044 /* Optimize tan(x)*cos(x) as sin(x). */
11045 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11046 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11047 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11048 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11049 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11050 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11051 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11052 CALL_EXPR_ARG (arg1, 0), 0))
11053 {
11054 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11055
11056 if (sinfn != NULL_TREE)
11057 return build_call_expr_loc (loc, sinfn, 1,
11058 CALL_EXPR_ARG (arg0, 0));
11059 }
11060
11061 /* Optimize x*pow(x,c) as pow(x,c+1). */
11062 if (fcode1 == BUILT_IN_POW
11063 || fcode1 == BUILT_IN_POWF
11064 || fcode1 == BUILT_IN_POWL)
11065 {
11066 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11067 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11068 if (TREE_CODE (arg11) == REAL_CST
11069 && !TREE_OVERFLOW (arg11)
11070 && operand_equal_p (arg0, arg10, 0))
11071 {
11072 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11073 REAL_VALUE_TYPE c;
11074 tree arg;
11075
11076 c = TREE_REAL_CST (arg11);
11077 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11078 arg = build_real (type, c);
11079 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11080 }
11081 }
11082
11083 /* Optimize pow(x,c)*x as pow(x,c+1). */
11084 if (fcode0 == BUILT_IN_POW
11085 || fcode0 == BUILT_IN_POWF
11086 || fcode0 == BUILT_IN_POWL)
11087 {
11088 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11089 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11090 if (TREE_CODE (arg01) == REAL_CST
11091 && !TREE_OVERFLOW (arg01)
11092 && operand_equal_p (arg1, arg00, 0))
11093 {
11094 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11095 REAL_VALUE_TYPE c;
11096 tree arg;
11097
11098 c = TREE_REAL_CST (arg01);
11099 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11100 arg = build_real (type, c);
11101 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11102 }
11103 }
11104
11105 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11106 if (!in_gimple_form
11107 && optimize
11108 && operand_equal_p (arg0, arg1, 0))
11109 {
11110 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11111
11112 if (powfn)
11113 {
11114 tree arg = build_real (type, dconst2);
11115 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11116 }
11117 }
11118 }
11119 }
11120 goto associate;
11121
11122 case BIT_IOR_EXPR:
11123 bit_ior:
11124 if (operand_equal_p (arg0, arg1, 0))
11125 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11126
11127 /* ~X | X is -1. */
11128 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11129 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11130 {
11131 t1 = build_zero_cst (type);
11132 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11133 return omit_one_operand_loc (loc, type, t1, arg1);
11134 }
11135
11136 /* X | ~X is -1. */
11137 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11138 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11139 {
11140 t1 = build_zero_cst (type);
11141 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11142 return omit_one_operand_loc (loc, type, t1, arg0);
11143 }
11144
11145 /* Canonicalize (X & C1) | C2. */
11146 if (TREE_CODE (arg0) == BIT_AND_EXPR
11147 && TREE_CODE (arg1) == INTEGER_CST
11148 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11149 {
11150 int width = TYPE_PRECISION (type), w;
11151 wide_int c1 = TREE_OPERAND (arg0, 1);
11152 wide_int c2 = arg1;
11153
11154 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11155 if ((c1 & c2) == c1)
11156 return omit_one_operand_loc (loc, type, arg1,
11157 TREE_OPERAND (arg0, 0));
11158
11159 wide_int msk = wi::mask (width, false,
11160 TYPE_PRECISION (TREE_TYPE (arg1)));
11161
11162 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11163 if (msk.and_not (c1 | c2) == 0)
11164 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11165 TREE_OPERAND (arg0, 0), arg1);
11166
11167 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11168 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11169 mode which allows further optimizations. */
11170 c1 &= msk;
11171 c2 &= msk;
11172 wide_int c3 = c1.and_not (c2);
11173 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11174 {
11175 wide_int mask = wi::mask (w, false,
11176 TYPE_PRECISION (type));
11177 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11178 {
11179 c3 = mask;
11180 break;
11181 }
11182 }
11183
11184 if (c3 != c1)
11185 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11186 fold_build2_loc (loc, BIT_AND_EXPR, type,
11187 TREE_OPERAND (arg0, 0),
11188 wide_int_to_tree (type,
11189 c3)),
11190 arg1);
11191 }
11192
11193 /* (X & Y) | Y is (X, Y). */
11194 if (TREE_CODE (arg0) == BIT_AND_EXPR
11195 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11196 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11197 /* (X & Y) | X is (Y, X). */
11198 if (TREE_CODE (arg0) == BIT_AND_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11200 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11201 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11202 /* X | (X & Y) is (Y, X). */
11203 if (TREE_CODE (arg1) == BIT_AND_EXPR
11204 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11205 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11206 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11207 /* X | (Y & X) is (Y, X). */
11208 if (TREE_CODE (arg1) == BIT_AND_EXPR
11209 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11210 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11211 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11212
11213 /* (X & ~Y) | (~X & Y) is X ^ Y */
11214 if (TREE_CODE (arg0) == BIT_AND_EXPR
11215 && TREE_CODE (arg1) == BIT_AND_EXPR)
11216 {
11217 tree a0, a1, l0, l1, n0, n1;
11218
11219 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11220 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11221
11222 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11223 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11224
11225 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11226 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11227
11228 if ((operand_equal_p (n0, a0, 0)
11229 && operand_equal_p (n1, a1, 0))
11230 || (operand_equal_p (n0, a1, 0)
11231 && operand_equal_p (n1, a0, 0)))
11232 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11233 }
11234
11235 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11236 if (t1 != NULL_TREE)
11237 return t1;
11238
11239 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11240
11241 This results in more efficient code for machines without a NAND
11242 instruction. Combine will canonicalize to the first form
11243 which will allow use of NAND instructions provided by the
11244 backend if they exist. */
11245 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11246 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11247 {
11248 return
11249 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11250 build2 (BIT_AND_EXPR, type,
11251 fold_convert_loc (loc, type,
11252 TREE_OPERAND (arg0, 0)),
11253 fold_convert_loc (loc, type,
11254 TREE_OPERAND (arg1, 0))));
11255 }
11256
11257 /* See if this can be simplified into a rotate first. If that
11258 is unsuccessful continue in the association code. */
11259 goto bit_rotate;
11260
11261 case BIT_XOR_EXPR:
11262 if (integer_all_onesp (arg1))
11263 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11264
11265 /* ~X ^ X is -1. */
11266 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11267 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11268 {
11269 t1 = build_zero_cst (type);
11270 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11271 return omit_one_operand_loc (loc, type, t1, arg1);
11272 }
11273
11274 /* X ^ ~X is -1. */
11275 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11276 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11277 {
11278 t1 = build_zero_cst (type);
11279 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11280 return omit_one_operand_loc (loc, type, t1, arg0);
11281 }
11282
11283 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11284 with a constant, and the two constants have no bits in common,
11285 we should treat this as a BIT_IOR_EXPR since this may produce more
11286 simplifications. */
11287 if (TREE_CODE (arg0) == BIT_AND_EXPR
11288 && TREE_CODE (arg1) == BIT_AND_EXPR
11289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11290 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11291 && wi::bit_and (TREE_OPERAND (arg0, 1),
11292 TREE_OPERAND (arg1, 1)) == 0)
11293 {
11294 code = BIT_IOR_EXPR;
11295 goto bit_ior;
11296 }
11297
11298 /* (X | Y) ^ X -> Y & ~ X*/
11299 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11300 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11301 {
11302 tree t2 = TREE_OPERAND (arg0, 1);
11303 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11304 arg1);
11305 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11306 fold_convert_loc (loc, type, t2),
11307 fold_convert_loc (loc, type, t1));
11308 return t1;
11309 }
11310
11311 /* (Y | X) ^ X -> Y & ~ X*/
11312 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11313 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11314 {
11315 tree t2 = TREE_OPERAND (arg0, 0);
11316 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11317 arg1);
11318 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11319 fold_convert_loc (loc, type, t2),
11320 fold_convert_loc (loc, type, t1));
11321 return t1;
11322 }
11323
11324 /* X ^ (X | Y) -> Y & ~ X*/
11325 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11326 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11327 {
11328 tree t2 = TREE_OPERAND (arg1, 1);
11329 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11330 arg0);
11331 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11332 fold_convert_loc (loc, type, t2),
11333 fold_convert_loc (loc, type, t1));
11334 return t1;
11335 }
11336
11337 /* X ^ (Y | X) -> Y & ~ X*/
11338 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11339 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11340 {
11341 tree t2 = TREE_OPERAND (arg1, 0);
11342 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11343 arg0);
11344 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11345 fold_convert_loc (loc, type, t2),
11346 fold_convert_loc (loc, type, t1));
11347 return t1;
11348 }
11349
11350 /* Convert ~X ^ ~Y to X ^ Y. */
11351 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11352 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11353 return fold_build2_loc (loc, code, type,
11354 fold_convert_loc (loc, type,
11355 TREE_OPERAND (arg0, 0)),
11356 fold_convert_loc (loc, type,
11357 TREE_OPERAND (arg1, 0)));
11358
11359 /* Convert ~X ^ C to X ^ ~C. */
11360 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11361 && TREE_CODE (arg1) == INTEGER_CST)
11362 return fold_build2_loc (loc, code, type,
11363 fold_convert_loc (loc, type,
11364 TREE_OPERAND (arg0, 0)),
11365 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11366
11367 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11368 if (TREE_CODE (arg0) == BIT_AND_EXPR
11369 && INTEGRAL_TYPE_P (type)
11370 && integer_onep (TREE_OPERAND (arg0, 1))
11371 && integer_onep (arg1))
11372 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11373 build_zero_cst (TREE_TYPE (arg0)));
11374
11375 /* Fold (X & Y) ^ Y as ~X & Y. */
11376 if (TREE_CODE (arg0) == BIT_AND_EXPR
11377 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11378 {
11379 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11380 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11381 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11382 fold_convert_loc (loc, type, arg1));
11383 }
11384 /* Fold (X & Y) ^ X as ~Y & X. */
11385 if (TREE_CODE (arg0) == BIT_AND_EXPR
11386 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11387 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11388 {
11389 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11390 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11391 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11392 fold_convert_loc (loc, type, arg1));
11393 }
11394 /* Fold X ^ (X & Y) as X & ~Y. */
11395 if (TREE_CODE (arg1) == BIT_AND_EXPR
11396 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11397 {
11398 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11399 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11400 fold_convert_loc (loc, type, arg0),
11401 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11402 }
11403 /* Fold X ^ (Y & X) as ~Y & X. */
11404 if (TREE_CODE (arg1) == BIT_AND_EXPR
11405 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11406 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11407 {
11408 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11409 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11410 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11411 fold_convert_loc (loc, type, arg0));
11412 }
11413
11414 /* See if this can be simplified into a rotate first. If that
11415 is unsuccessful continue in the association code. */
11416 goto bit_rotate;
11417
11418 case BIT_AND_EXPR:
11419 if (integer_all_onesp (arg1))
11420 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11421 if (operand_equal_p (arg0, arg1, 0))
11422 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11423
11424 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11425 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11426 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11427 || (TREE_CODE (arg0) == EQ_EXPR
11428 && integer_zerop (TREE_OPERAND (arg0, 1))))
11429 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11430 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11431
11432 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11433 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11434 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11435 || (TREE_CODE (arg1) == EQ_EXPR
11436 && integer_zerop (TREE_OPERAND (arg1, 1))))
11437 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11438 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11439
11440 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11441 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11442 && TREE_CODE (arg1) == INTEGER_CST
11443 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11444 {
11445 tree tmp1 = fold_convert_loc (loc, type, arg1);
11446 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11447 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11448 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11449 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11450 return
11451 fold_convert_loc (loc, type,
11452 fold_build2_loc (loc, BIT_IOR_EXPR,
11453 type, tmp2, tmp3));
11454 }
11455
11456 /* (X | Y) & Y is (X, Y). */
11457 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11459 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11460 /* (X | Y) & X is (Y, X). */
11461 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11462 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11463 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11464 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11465 /* X & (X | Y) is (Y, X). */
11466 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11467 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11468 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11469 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11470 /* X & (Y | X) is (Y, X). */
11471 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11472 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11473 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11474 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11475
11476 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11477 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11478 && INTEGRAL_TYPE_P (type)
11479 && integer_onep (TREE_OPERAND (arg0, 1))
11480 && integer_onep (arg1))
11481 {
11482 tree tem2;
11483 tem = TREE_OPERAND (arg0, 0);
11484 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11485 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11486 tem, tem2);
11487 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11488 build_zero_cst (TREE_TYPE (tem)));
11489 }
11490 /* Fold ~X & 1 as (X & 1) == 0. */
11491 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11492 && INTEGRAL_TYPE_P (type)
11493 && integer_onep (arg1))
11494 {
11495 tree tem2;
11496 tem = TREE_OPERAND (arg0, 0);
11497 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11498 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11499 tem, tem2);
11500 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11501 build_zero_cst (TREE_TYPE (tem)));
11502 }
11503 /* Fold !X & 1 as X == 0. */
11504 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11505 && integer_onep (arg1))
11506 {
11507 tem = TREE_OPERAND (arg0, 0);
11508 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11509 build_zero_cst (TREE_TYPE (tem)));
11510 }
11511
11512 /* Fold (X ^ Y) & Y as ~X & Y. */
11513 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11514 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11515 {
11516 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11517 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11518 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11519 fold_convert_loc (loc, type, arg1));
11520 }
11521 /* Fold (X ^ Y) & X as ~Y & X. */
11522 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11523 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11524 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11525 {
11526 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11527 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11528 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11529 fold_convert_loc (loc, type, arg1));
11530 }
11531 /* Fold X & (X ^ Y) as X & ~Y. */
11532 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11533 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11534 {
11535 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11536 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11537 fold_convert_loc (loc, type, arg0),
11538 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11539 }
11540 /* Fold X & (Y ^ X) as ~Y & X. */
11541 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11542 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11543 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11544 {
11545 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11546 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11547 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11548 fold_convert_loc (loc, type, arg0));
11549 }
11550
11551 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11552 multiple of 1 << CST. */
11553 if (TREE_CODE (arg1) == INTEGER_CST)
11554 {
11555 wide_int cst1 = arg1;
11556 wide_int ncst1 = -cst1;
11557 if ((cst1 & ncst1) == ncst1
11558 && multiple_of_p (type, arg0,
11559 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11560 return fold_convert_loc (loc, type, arg0);
11561 }
11562
11563 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11564 bits from CST2. */
11565 if (TREE_CODE (arg1) == INTEGER_CST
11566 && TREE_CODE (arg0) == MULT_EXPR
11567 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11568 {
11569 wide_int warg1 = arg1;
11570 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11571
11572 if (masked == 0)
11573 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11574 arg0, arg1);
11575 else if (masked != warg1)
11576 {
11577 /* Avoid the transform if arg1 is a mask of some
11578 mode which allows further optimizations. */
11579 int pop = wi::popcount (warg1);
11580 if (!(pop >= BITS_PER_UNIT
11581 && exact_log2 (pop) != -1
11582 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11583 return fold_build2_loc (loc, code, type, op0,
11584 wide_int_to_tree (type, masked));
11585 }
11586 }
11587
11588 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11589 ((A & N) + B) & M -> (A + B) & M
11590 Similarly if (N & M) == 0,
11591 ((A | N) + B) & M -> (A + B) & M
11592 and for - instead of + (or unary - instead of +)
11593 and/or ^ instead of |.
11594 If B is constant and (B & M) == 0, fold into A & M. */
11595 if (TREE_CODE (arg1) == INTEGER_CST)
11596 {
11597 wide_int cst1 = arg1;
11598 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11599 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11600 && (TREE_CODE (arg0) == PLUS_EXPR
11601 || TREE_CODE (arg0) == MINUS_EXPR
11602 || TREE_CODE (arg0) == NEGATE_EXPR)
11603 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11604 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11605 {
11606 tree pmop[2];
11607 int which = 0;
11608 wide_int cst0;
11609
11610 /* Now we know that arg0 is (C + D) or (C - D) or
11611 -C and arg1 (M) is == (1LL << cst) - 1.
11612 Store C into PMOP[0] and D into PMOP[1]. */
11613 pmop[0] = TREE_OPERAND (arg0, 0);
11614 pmop[1] = NULL;
11615 if (TREE_CODE (arg0) != NEGATE_EXPR)
11616 {
11617 pmop[1] = TREE_OPERAND (arg0, 1);
11618 which = 1;
11619 }
11620
11621 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11622 which = -1;
11623
11624 for (; which >= 0; which--)
11625 switch (TREE_CODE (pmop[which]))
11626 {
11627 case BIT_AND_EXPR:
11628 case BIT_IOR_EXPR:
11629 case BIT_XOR_EXPR:
11630 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11631 != INTEGER_CST)
11632 break;
11633 cst0 = TREE_OPERAND (pmop[which], 1);
11634 cst0 &= cst1;
11635 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11636 {
11637 if (cst0 != cst1)
11638 break;
11639 }
11640 else if (cst0 != 0)
11641 break;
11642 /* If C or D is of the form (A & N) where
11643 (N & M) == M, or of the form (A | N) or
11644 (A ^ N) where (N & M) == 0, replace it with A. */
11645 pmop[which] = TREE_OPERAND (pmop[which], 0);
11646 break;
11647 case INTEGER_CST:
11648 /* If C or D is a N where (N & M) == 0, it can be
11649 omitted (assumed 0). */
11650 if ((TREE_CODE (arg0) == PLUS_EXPR
11651 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11652 && (cst1 & pmop[which]) == 0)
11653 pmop[which] = NULL;
11654 break;
11655 default:
11656 break;
11657 }
11658
11659 /* Only build anything new if we optimized one or both arguments
11660 above. */
11661 if (pmop[0] != TREE_OPERAND (arg0, 0)
11662 || (TREE_CODE (arg0) != NEGATE_EXPR
11663 && pmop[1] != TREE_OPERAND (arg0, 1)))
11664 {
11665 tree utype = TREE_TYPE (arg0);
11666 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11667 {
11668 /* Perform the operations in a type that has defined
11669 overflow behavior. */
11670 utype = unsigned_type_for (TREE_TYPE (arg0));
11671 if (pmop[0] != NULL)
11672 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11673 if (pmop[1] != NULL)
11674 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11675 }
11676
11677 if (TREE_CODE (arg0) == NEGATE_EXPR)
11678 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11679 else if (TREE_CODE (arg0) == PLUS_EXPR)
11680 {
11681 if (pmop[0] != NULL && pmop[1] != NULL)
11682 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11683 pmop[0], pmop[1]);
11684 else if (pmop[0] != NULL)
11685 tem = pmop[0];
11686 else if (pmop[1] != NULL)
11687 tem = pmop[1];
11688 else
11689 return build_int_cst (type, 0);
11690 }
11691 else if (pmop[0] == NULL)
11692 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11693 else
11694 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11695 pmop[0], pmop[1]);
11696 /* TEM is now the new binary +, - or unary - replacement. */
11697 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11698 fold_convert_loc (loc, utype, arg1));
11699 return fold_convert_loc (loc, type, tem);
11700 }
11701 }
11702 }
11703
11704 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11705 if (t1 != NULL_TREE)
11706 return t1;
11707 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11708 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11709 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11710 {
11711 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11712
11713 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11714 if (mask == -1)
11715 return
11716 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11717 }
11718
11719 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11720
11721 This results in more efficient code for machines without a NOR
11722 instruction. Combine will canonicalize to the first form
11723 which will allow use of NOR instructions provided by the
11724 backend if they exist. */
11725 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11726 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11727 {
11728 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11729 build2 (BIT_IOR_EXPR, type,
11730 fold_convert_loc (loc, type,
11731 TREE_OPERAND (arg0, 0)),
11732 fold_convert_loc (loc, type,
11733 TREE_OPERAND (arg1, 0))));
11734 }
11735
11736 /* If arg0 is derived from the address of an object or function, we may
11737 be able to fold this expression using the object or function's
11738 alignment. */
11739 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11740 {
11741 unsigned HOST_WIDE_INT modulus, residue;
11742 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11743
11744 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11745 integer_onep (arg1));
11746
11747 /* This works because modulus is a power of 2. If this weren't the
11748 case, we'd have to replace it by its greatest power-of-2
11749 divisor: modulus & -modulus. */
11750 if (low < modulus)
11751 return build_int_cst (type, residue & low);
11752 }
11753
11754 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11755 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11756 if the new mask might be further optimized. */
11757 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11758 || TREE_CODE (arg0) == RSHIFT_EXPR)
11759 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11760 && TREE_CODE (arg1) == INTEGER_CST
11761 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11762 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11763 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11764 < TYPE_PRECISION (TREE_TYPE (arg0))))
11765 {
11766 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11767 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11768 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11769 tree shift_type = TREE_TYPE (arg0);
11770
11771 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11772 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11773 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11774 && TYPE_PRECISION (TREE_TYPE (arg0))
11775 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11776 {
11777 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11778 tree arg00 = TREE_OPERAND (arg0, 0);
11779 /* See if more bits can be proven as zero because of
11780 zero extension. */
11781 if (TREE_CODE (arg00) == NOP_EXPR
11782 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11783 {
11784 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11785 if (TYPE_PRECISION (inner_type)
11786 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11787 && TYPE_PRECISION (inner_type) < prec)
11788 {
11789 prec = TYPE_PRECISION (inner_type);
11790 /* See if we can shorten the right shift. */
11791 if (shiftc < prec)
11792 shift_type = inner_type;
11793 /* Otherwise X >> C1 is all zeros, so we'll optimize
11794 it into (X, 0) later on by making sure zerobits
11795 is all ones. */
11796 }
11797 }
11798 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11799 if (shiftc < prec)
11800 {
11801 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11802 zerobits <<= prec - shiftc;
11803 }
11804 /* For arithmetic shift if sign bit could be set, zerobits
11805 can contain actually sign bits, so no transformation is
11806 possible, unless MASK masks them all away. In that
11807 case the shift needs to be converted into logical shift. */
11808 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11809 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11810 {
11811 if ((mask & zerobits) == 0)
11812 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11813 else
11814 zerobits = 0;
11815 }
11816 }
11817
11818 /* ((X << 16) & 0xff00) is (X, 0). */
11819 if ((mask & zerobits) == mask)
11820 return omit_one_operand_loc (loc, type,
11821 build_int_cst (type, 0), arg0);
11822
11823 newmask = mask | zerobits;
11824 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11825 {
11826 /* Only do the transformation if NEWMASK is some integer
11827 mode's mask. */
11828 for (prec = BITS_PER_UNIT;
11829 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11830 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11831 break;
11832 if (prec < HOST_BITS_PER_WIDE_INT
11833 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11834 {
11835 tree newmaskt;
11836
11837 if (shift_type != TREE_TYPE (arg0))
11838 {
11839 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11840 fold_convert_loc (loc, shift_type,
11841 TREE_OPERAND (arg0, 0)),
11842 TREE_OPERAND (arg0, 1));
11843 tem = fold_convert_loc (loc, type, tem);
11844 }
11845 else
11846 tem = op0;
11847 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11848 if (!tree_int_cst_equal (newmaskt, arg1))
11849 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11850 }
11851 }
11852 }
11853
11854 goto associate;
11855
11856 case RDIV_EXPR:
11857 /* Don't touch a floating-point divide by zero unless the mode
11858 of the constant can represent infinity. */
11859 if (TREE_CODE (arg1) == REAL_CST
11860 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11861 && real_zerop (arg1))
11862 return NULL_TREE;
11863
11864 /* Optimize A / A to 1.0 if we don't care about
11865 NaNs or Infinities. Skip the transformation
11866 for non-real operands. */
11867 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11868 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11869 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11870 && operand_equal_p (arg0, arg1, 0))
11871 {
11872 tree r = build_real (TREE_TYPE (arg0), dconst1);
11873
11874 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11875 }
11876
11877 /* The complex version of the above A / A optimization. */
11878 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11879 && operand_equal_p (arg0, arg1, 0))
11880 {
11881 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11882 if (! HONOR_NANS (TYPE_MODE (elem_type))
11883 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11884 {
11885 tree r = build_real (elem_type, dconst1);
11886 /* omit_two_operands will call fold_convert for us. */
11887 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11888 }
11889 }
11890
11891 /* (-A) / (-B) -> A / B */
11892 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11893 return fold_build2_loc (loc, RDIV_EXPR, type,
11894 TREE_OPERAND (arg0, 0),
11895 negate_expr (arg1));
11896 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11897 return fold_build2_loc (loc, RDIV_EXPR, type,
11898 negate_expr (arg0),
11899 TREE_OPERAND (arg1, 0));
11900
11901 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11902 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11903 && real_onep (arg1))
11904 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11905
11906 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11907 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11908 && real_minus_onep (arg1))
11909 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11910 negate_expr (arg0)));
11911
11912 /* If ARG1 is a constant, we can convert this to a multiply by the
11913 reciprocal. This does not have the same rounding properties,
11914 so only do this if -freciprocal-math. We can actually
11915 always safely do it if ARG1 is a power of two, but it's hard to
11916 tell if it is or not in a portable manner. */
11917 if (optimize
11918 && (TREE_CODE (arg1) == REAL_CST
11919 || (TREE_CODE (arg1) == COMPLEX_CST
11920 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11921 || (TREE_CODE (arg1) == VECTOR_CST
11922 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11923 {
11924 if (flag_reciprocal_math
11925 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11926 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11927 /* Find the reciprocal if optimizing and the result is exact.
11928 TODO: Complex reciprocal not implemented. */
11929 if (TREE_CODE (arg1) != COMPLEX_CST)
11930 {
11931 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11932
11933 if (inverse)
11934 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11935 }
11936 }
11937 /* Convert A/B/C to A/(B*C). */
11938 if (flag_reciprocal_math
11939 && TREE_CODE (arg0) == RDIV_EXPR)
11940 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11941 fold_build2_loc (loc, MULT_EXPR, type,
11942 TREE_OPERAND (arg0, 1), arg1));
11943
11944 /* Convert A/(B/C) to (A/B)*C. */
11945 if (flag_reciprocal_math
11946 && TREE_CODE (arg1) == RDIV_EXPR)
11947 return fold_build2_loc (loc, MULT_EXPR, type,
11948 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11949 TREE_OPERAND (arg1, 0)),
11950 TREE_OPERAND (arg1, 1));
11951
11952 /* Convert C1/(X*C2) into (C1/C2)/X. */
11953 if (flag_reciprocal_math
11954 && TREE_CODE (arg1) == MULT_EXPR
11955 && TREE_CODE (arg0) == REAL_CST
11956 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11957 {
11958 tree tem = const_binop (RDIV_EXPR, arg0,
11959 TREE_OPERAND (arg1, 1));
11960 if (tem)
11961 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11962 TREE_OPERAND (arg1, 0));
11963 }
11964
11965 if (flag_unsafe_math_optimizations)
11966 {
11967 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11968 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11969
11970 /* Optimize sin(x)/cos(x) as tan(x). */
11971 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11972 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11973 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11974 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11975 CALL_EXPR_ARG (arg1, 0), 0))
11976 {
11977 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11978
11979 if (tanfn != NULL_TREE)
11980 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11981 }
11982
11983 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11984 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11985 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11986 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11987 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11988 CALL_EXPR_ARG (arg1, 0), 0))
11989 {
11990 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11991
11992 if (tanfn != NULL_TREE)
11993 {
11994 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11995 CALL_EXPR_ARG (arg0, 0));
11996 return fold_build2_loc (loc, RDIV_EXPR, type,
11997 build_real (type, dconst1), tmp);
11998 }
11999 }
12000
12001 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12002 NaNs or Infinities. */
12003 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12004 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12005 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12006 {
12007 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12008 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12009
12010 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12011 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12012 && operand_equal_p (arg00, arg01, 0))
12013 {
12014 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12015
12016 if (cosfn != NULL_TREE)
12017 return build_call_expr_loc (loc, cosfn, 1, arg00);
12018 }
12019 }
12020
12021 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12022 NaNs or Infinities. */
12023 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12024 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12025 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12026 {
12027 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12028 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12029
12030 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12031 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12032 && operand_equal_p (arg00, arg01, 0))
12033 {
12034 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12035
12036 if (cosfn != NULL_TREE)
12037 {
12038 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12039 return fold_build2_loc (loc, RDIV_EXPR, type,
12040 build_real (type, dconst1),
12041 tmp);
12042 }
12043 }
12044 }
12045
12046 /* Optimize pow(x,c)/x as pow(x,c-1). */
12047 if (fcode0 == BUILT_IN_POW
12048 || fcode0 == BUILT_IN_POWF
12049 || fcode0 == BUILT_IN_POWL)
12050 {
12051 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12052 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12053 if (TREE_CODE (arg01) == REAL_CST
12054 && !TREE_OVERFLOW (arg01)
12055 && operand_equal_p (arg1, arg00, 0))
12056 {
12057 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12058 REAL_VALUE_TYPE c;
12059 tree arg;
12060
12061 c = TREE_REAL_CST (arg01);
12062 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12063 arg = build_real (type, c);
12064 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12065 }
12066 }
12067
12068 /* Optimize a/root(b/c) into a*root(c/b). */
12069 if (BUILTIN_ROOT_P (fcode1))
12070 {
12071 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12072
12073 if (TREE_CODE (rootarg) == RDIV_EXPR)
12074 {
12075 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12076 tree b = TREE_OPERAND (rootarg, 0);
12077 tree c = TREE_OPERAND (rootarg, 1);
12078
12079 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12080
12081 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12082 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12083 }
12084 }
12085
12086 /* Optimize x/expN(y) into x*expN(-y). */
12087 if (BUILTIN_EXPONENT_P (fcode1))
12088 {
12089 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12090 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12091 arg1 = build_call_expr_loc (loc,
12092 expfn, 1,
12093 fold_convert_loc (loc, type, arg));
12094 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12095 }
12096
12097 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12098 if (fcode1 == BUILT_IN_POW
12099 || fcode1 == BUILT_IN_POWF
12100 || fcode1 == BUILT_IN_POWL)
12101 {
12102 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12103 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12104 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12105 tree neg11 = fold_convert_loc (loc, type,
12106 negate_expr (arg11));
12107 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12108 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12109 }
12110 }
12111 return NULL_TREE;
12112
12113 case TRUNC_DIV_EXPR:
12114 /* Optimize (X & (-A)) / A where A is a power of 2,
12115 to X >> log2(A) */
12116 if (TREE_CODE (arg0) == BIT_AND_EXPR
12117 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12118 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12119 {
12120 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12121 arg1, TREE_OPERAND (arg0, 1));
12122 if (sum && integer_zerop (sum)) {
12123 tree pow2 = build_int_cst (integer_type_node,
12124 wi::exact_log2 (arg1));
12125 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12126 TREE_OPERAND (arg0, 0), pow2);
12127 }
12128 }
12129
12130 /* Fall through */
12131
12132 case FLOOR_DIV_EXPR:
12133 /* Simplify A / (B << N) where A and B are positive and B is
12134 a power of 2, to A >> (N + log2(B)). */
12135 strict_overflow_p = false;
12136 if (TREE_CODE (arg1) == LSHIFT_EXPR
12137 && (TYPE_UNSIGNED (type)
12138 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12139 {
12140 tree sval = TREE_OPERAND (arg1, 0);
12141 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12142 {
12143 tree sh_cnt = TREE_OPERAND (arg1, 1);
12144 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12145 wi::exact_log2 (sval));
12146
12147 if (strict_overflow_p)
12148 fold_overflow_warning (("assuming signed overflow does not "
12149 "occur when simplifying A / (B << N)"),
12150 WARN_STRICT_OVERFLOW_MISC);
12151
12152 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12153 sh_cnt, pow2);
12154 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12155 fold_convert_loc (loc, type, arg0), sh_cnt);
12156 }
12157 }
12158
12159 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12160 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12161 if (INTEGRAL_TYPE_P (type)
12162 && TYPE_UNSIGNED (type)
12163 && code == FLOOR_DIV_EXPR)
12164 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12165
12166 /* Fall through */
12167
12168 case ROUND_DIV_EXPR:
12169 case CEIL_DIV_EXPR:
12170 case EXACT_DIV_EXPR:
12171 if (integer_zerop (arg1))
12172 return NULL_TREE;
12173 /* X / -1 is -X. */
12174 if (!TYPE_UNSIGNED (type)
12175 && TREE_CODE (arg1) == INTEGER_CST
12176 && wi::eq_p (arg1, -1))
12177 return fold_convert_loc (loc, type, negate_expr (arg0));
12178
12179 /* Convert -A / -B to A / B when the type is signed and overflow is
12180 undefined. */
12181 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12182 && TREE_CODE (arg0) == NEGATE_EXPR
12183 && negate_expr_p (arg1))
12184 {
12185 if (INTEGRAL_TYPE_P (type))
12186 fold_overflow_warning (("assuming signed overflow does not occur "
12187 "when distributing negation across "
12188 "division"),
12189 WARN_STRICT_OVERFLOW_MISC);
12190 return fold_build2_loc (loc, code, type,
12191 fold_convert_loc (loc, type,
12192 TREE_OPERAND (arg0, 0)),
12193 fold_convert_loc (loc, type,
12194 negate_expr (arg1)));
12195 }
12196 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12197 && TREE_CODE (arg1) == NEGATE_EXPR
12198 && negate_expr_p (arg0))
12199 {
12200 if (INTEGRAL_TYPE_P (type))
12201 fold_overflow_warning (("assuming signed overflow does not occur "
12202 "when distributing negation across "
12203 "division"),
12204 WARN_STRICT_OVERFLOW_MISC);
12205 return fold_build2_loc (loc, code, type,
12206 fold_convert_loc (loc, type,
12207 negate_expr (arg0)),
12208 fold_convert_loc (loc, type,
12209 TREE_OPERAND (arg1, 0)));
12210 }
12211
12212 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12213 operation, EXACT_DIV_EXPR.
12214
12215 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12216 At one time others generated faster code, it's not clear if they do
12217 after the last round to changes to the DIV code in expmed.c. */
12218 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12219 && multiple_of_p (type, arg0, arg1))
12220 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12221
12222 strict_overflow_p = false;
12223 if (TREE_CODE (arg1) == INTEGER_CST
12224 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12225 &strict_overflow_p)))
12226 {
12227 if (strict_overflow_p)
12228 fold_overflow_warning (("assuming signed overflow does not occur "
12229 "when simplifying division"),
12230 WARN_STRICT_OVERFLOW_MISC);
12231 return fold_convert_loc (loc, type, tem);
12232 }
12233
12234 return NULL_TREE;
12235
12236 case CEIL_MOD_EXPR:
12237 case FLOOR_MOD_EXPR:
12238 case ROUND_MOD_EXPR:
12239 case TRUNC_MOD_EXPR:
12240 /* X % -1 is zero. */
12241 if (!TYPE_UNSIGNED (type)
12242 && TREE_CODE (arg1) == INTEGER_CST
12243 && wi::eq_p (arg1, -1))
12244 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12245
12246 /* X % -C is the same as X % C. */
12247 if (code == TRUNC_MOD_EXPR
12248 && TYPE_SIGN (type) == SIGNED
12249 && TREE_CODE (arg1) == INTEGER_CST
12250 && !TREE_OVERFLOW (arg1)
12251 && wi::neg_p (arg1)
12252 && !TYPE_OVERFLOW_TRAPS (type)
12253 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12254 && !sign_bit_p (arg1, arg1))
12255 return fold_build2_loc (loc, code, type,
12256 fold_convert_loc (loc, type, arg0),
12257 fold_convert_loc (loc, type,
12258 negate_expr (arg1)));
12259
12260 /* X % -Y is the same as X % Y. */
12261 if (code == TRUNC_MOD_EXPR
12262 && !TYPE_UNSIGNED (type)
12263 && TREE_CODE (arg1) == NEGATE_EXPR
12264 && !TYPE_OVERFLOW_TRAPS (type))
12265 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12266 fold_convert_loc (loc, type,
12267 TREE_OPERAND (arg1, 0)));
12268
12269 strict_overflow_p = false;
12270 if (TREE_CODE (arg1) == INTEGER_CST
12271 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12272 &strict_overflow_p)))
12273 {
12274 if (strict_overflow_p)
12275 fold_overflow_warning (("assuming signed overflow does not occur "
12276 "when simplifying modulus"),
12277 WARN_STRICT_OVERFLOW_MISC);
12278 return fold_convert_loc (loc, type, tem);
12279 }
12280
12281 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12282 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12283 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12284 && (TYPE_UNSIGNED (type)
12285 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12286 {
12287 tree c = arg1;
12288 /* Also optimize A % (C << N) where C is a power of 2,
12289 to A & ((C << N) - 1). */
12290 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12291 c = TREE_OPERAND (arg1, 0);
12292
12293 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12294 {
12295 tree mask
12296 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12297 build_int_cst (TREE_TYPE (arg1), 1));
12298 if (strict_overflow_p)
12299 fold_overflow_warning (("assuming signed overflow does not "
12300 "occur when simplifying "
12301 "X % (power of two)"),
12302 WARN_STRICT_OVERFLOW_MISC);
12303 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12304 fold_convert_loc (loc, type, arg0),
12305 fold_convert_loc (loc, type, mask));
12306 }
12307 }
12308
12309 return NULL_TREE;
12310
12311 case LROTATE_EXPR:
12312 case RROTATE_EXPR:
12313 if (integer_all_onesp (arg0))
12314 return omit_one_operand_loc (loc, type, arg0, arg1);
12315 goto shift;
12316
12317 case RSHIFT_EXPR:
12318 /* Optimize -1 >> x for arithmetic right shifts. */
12319 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12320 && tree_expr_nonnegative_p (arg1))
12321 return omit_one_operand_loc (loc, type, arg0, arg1);
12322 /* ... fall through ... */
12323
12324 case LSHIFT_EXPR:
12325 shift:
12326 if (integer_zerop (arg1))
12327 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12328 if (integer_zerop (arg0))
12329 return omit_one_operand_loc (loc, type, arg0, arg1);
12330
12331 /* Prefer vector1 << scalar to vector1 << vector2
12332 if vector2 is uniform. */
12333 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12334 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12335 return fold_build2_loc (loc, code, type, op0, tem);
12336
12337 /* Since negative shift count is not well-defined,
12338 don't try to compute it in the compiler. */
12339 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12340 return NULL_TREE;
12341
12342 prec = element_precision (type);
12343
12344 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12345 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12346 && tree_to_uhwi (arg1) < prec
12347 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12348 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12349 {
12350 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12351 + tree_to_uhwi (arg1));
12352
12353 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12354 being well defined. */
12355 if (low >= prec)
12356 {
12357 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12358 low = low % prec;
12359 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12360 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12361 TREE_OPERAND (arg0, 0));
12362 else
12363 low = prec - 1;
12364 }
12365
12366 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12367 build_int_cst (TREE_TYPE (arg1), low));
12368 }
12369
12370 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12371 into x & ((unsigned)-1 >> c) for unsigned types. */
12372 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12373 || (TYPE_UNSIGNED (type)
12374 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12375 && tree_fits_uhwi_p (arg1)
12376 && tree_to_uhwi (arg1) < prec
12377 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12378 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12379 {
12380 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12381 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12382 tree lshift;
12383 tree arg00;
12384
12385 if (low0 == low1)
12386 {
12387 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12388
12389 lshift = build_minus_one_cst (type);
12390 lshift = const_binop (code, lshift, arg1);
12391
12392 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12393 }
12394 }
12395
12396 /* Rewrite an LROTATE_EXPR by a constant into an
12397 RROTATE_EXPR by a new constant. */
12398 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12399 {
12400 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12401 tem = const_binop (MINUS_EXPR, tem, arg1);
12402 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12403 }
12404
12405 /* If we have a rotate of a bit operation with the rotate count and
12406 the second operand of the bit operation both constant,
12407 permute the two operations. */
12408 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12409 && (TREE_CODE (arg0) == BIT_AND_EXPR
12410 || TREE_CODE (arg0) == BIT_IOR_EXPR
12411 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12412 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12413 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12414 fold_build2_loc (loc, code, type,
12415 TREE_OPERAND (arg0, 0), arg1),
12416 fold_build2_loc (loc, code, type,
12417 TREE_OPERAND (arg0, 1), arg1));
12418
12419 /* Two consecutive rotates adding up to the some integer
12420 multiple of the precision of the type can be ignored. */
12421 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12422 && TREE_CODE (arg0) == RROTATE_EXPR
12423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12424 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12425 prec) == 0)
12426 return TREE_OPERAND (arg0, 0);
12427
12428 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12429 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12430 if the latter can be further optimized. */
12431 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12432 && TREE_CODE (arg0) == BIT_AND_EXPR
12433 && TREE_CODE (arg1) == INTEGER_CST
12434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12435 {
12436 tree mask = fold_build2_loc (loc, code, type,
12437 fold_convert_loc (loc, type,
12438 TREE_OPERAND (arg0, 1)),
12439 arg1);
12440 tree shift = fold_build2_loc (loc, code, type,
12441 fold_convert_loc (loc, type,
12442 TREE_OPERAND (arg0, 0)),
12443 arg1);
12444 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12445 if (tem)
12446 return tem;
12447 }
12448
12449 return NULL_TREE;
12450
12451 case MIN_EXPR:
12452 if (operand_equal_p (arg0, arg1, 0))
12453 return omit_one_operand_loc (loc, type, arg0, arg1);
12454 if (INTEGRAL_TYPE_P (type)
12455 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12456 return omit_one_operand_loc (loc, type, arg1, arg0);
12457 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12458 if (tem)
12459 return tem;
12460 goto associate;
12461
12462 case MAX_EXPR:
12463 if (operand_equal_p (arg0, arg1, 0))
12464 return omit_one_operand_loc (loc, type, arg0, arg1);
12465 if (INTEGRAL_TYPE_P (type)
12466 && TYPE_MAX_VALUE (type)
12467 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12468 return omit_one_operand_loc (loc, type, arg1, arg0);
12469 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12470 if (tem)
12471 return tem;
12472 goto associate;
12473
12474 case TRUTH_ANDIF_EXPR:
12475 /* Note that the operands of this must be ints
12476 and their values must be 0 or 1.
12477 ("true" is a fixed value perhaps depending on the language.) */
12478 /* If first arg is constant zero, return it. */
12479 if (integer_zerop (arg0))
12480 return fold_convert_loc (loc, type, arg0);
12481 case TRUTH_AND_EXPR:
12482 /* If either arg is constant true, drop it. */
12483 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12484 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12485 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12486 /* Preserve sequence points. */
12487 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12488 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12489 /* If second arg is constant zero, result is zero, but first arg
12490 must be evaluated. */
12491 if (integer_zerop (arg1))
12492 return omit_one_operand_loc (loc, type, arg1, arg0);
12493 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12494 case will be handled here. */
12495 if (integer_zerop (arg0))
12496 return omit_one_operand_loc (loc, type, arg0, arg1);
12497
12498 /* !X && X is always false. */
12499 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12500 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12501 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12502 /* X && !X is always false. */
12503 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12504 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12505 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12506
12507 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12508 means A >= Y && A != MAX, but in this case we know that
12509 A < X <= MAX. */
12510
12511 if (!TREE_SIDE_EFFECTS (arg0)
12512 && !TREE_SIDE_EFFECTS (arg1))
12513 {
12514 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12515 if (tem && !operand_equal_p (tem, arg0, 0))
12516 return fold_build2_loc (loc, code, type, tem, arg1);
12517
12518 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12519 if (tem && !operand_equal_p (tem, arg1, 0))
12520 return fold_build2_loc (loc, code, type, arg0, tem);
12521 }
12522
12523 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12524 != NULL_TREE)
12525 return tem;
12526
12527 return NULL_TREE;
12528
12529 case TRUTH_ORIF_EXPR:
12530 /* Note that the operands of this must be ints
12531 and their values must be 0 or true.
12532 ("true" is a fixed value perhaps depending on the language.) */
12533 /* If first arg is constant true, return it. */
12534 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12535 return fold_convert_loc (loc, type, arg0);
12536 case TRUTH_OR_EXPR:
12537 /* If either arg is constant zero, drop it. */
12538 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12539 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12540 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12541 /* Preserve sequence points. */
12542 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12543 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12544 /* If second arg is constant true, result is true, but we must
12545 evaluate first arg. */
12546 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12547 return omit_one_operand_loc (loc, type, arg1, arg0);
12548 /* Likewise for first arg, but note this only occurs here for
12549 TRUTH_OR_EXPR. */
12550 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12551 return omit_one_operand_loc (loc, type, arg0, arg1);
12552
12553 /* !X || X is always true. */
12554 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12555 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12556 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12557 /* X || !X is always true. */
12558 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12559 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12560 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12561
12562 /* (X && !Y) || (!X && Y) is X ^ Y */
12563 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12564 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12565 {
12566 tree a0, a1, l0, l1, n0, n1;
12567
12568 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12569 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12570
12571 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12572 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12573
12574 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12575 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12576
12577 if ((operand_equal_p (n0, a0, 0)
12578 && operand_equal_p (n1, a1, 0))
12579 || (operand_equal_p (n0, a1, 0)
12580 && operand_equal_p (n1, a0, 0)))
12581 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12582 }
12583
12584 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12585 != NULL_TREE)
12586 return tem;
12587
12588 return NULL_TREE;
12589
12590 case TRUTH_XOR_EXPR:
12591 /* If the second arg is constant zero, drop it. */
12592 if (integer_zerop (arg1))
12593 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12594 /* If the second arg is constant true, this is a logical inversion. */
12595 if (integer_onep (arg1))
12596 {
12597 tem = invert_truthvalue_loc (loc, arg0);
12598 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12599 }
12600 /* Identical arguments cancel to zero. */
12601 if (operand_equal_p (arg0, arg1, 0))
12602 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12603
12604 /* !X ^ X is always true. */
12605 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12606 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12607 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12608
12609 /* X ^ !X is always true. */
12610 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12612 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12613
12614 return NULL_TREE;
12615
12616 case EQ_EXPR:
12617 case NE_EXPR:
12618 STRIP_NOPS (arg0);
12619 STRIP_NOPS (arg1);
12620
12621 tem = fold_comparison (loc, code, type, op0, op1);
12622 if (tem != NULL_TREE)
12623 return tem;
12624
12625 /* bool_var != 0 becomes bool_var. */
12626 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12627 && code == NE_EXPR)
12628 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12629
12630 /* bool_var == 1 becomes bool_var. */
12631 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12632 && code == EQ_EXPR)
12633 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12634
12635 /* bool_var != 1 becomes !bool_var. */
12636 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12637 && code == NE_EXPR)
12638 return fold_convert_loc (loc, type,
12639 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12640 TREE_TYPE (arg0), arg0));
12641
12642 /* bool_var == 0 becomes !bool_var. */
12643 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12644 && code == EQ_EXPR)
12645 return fold_convert_loc (loc, type,
12646 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12647 TREE_TYPE (arg0), arg0));
12648
12649 /* !exp != 0 becomes !exp */
12650 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12651 && code == NE_EXPR)
12652 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12653
12654 /* If this is an equality comparison of the address of two non-weak,
12655 unaliased symbols neither of which are extern (since we do not
12656 have access to attributes for externs), then we know the result. */
12657 if (TREE_CODE (arg0) == ADDR_EXPR
12658 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12659 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12660 && ! lookup_attribute ("alias",
12661 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12662 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12663 && TREE_CODE (arg1) == ADDR_EXPR
12664 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12665 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12666 && ! lookup_attribute ("alias",
12667 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12668 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12669 {
12670 /* We know that we're looking at the address of two
12671 non-weak, unaliased, static _DECL nodes.
12672
12673 It is both wasteful and incorrect to call operand_equal_p
12674 to compare the two ADDR_EXPR nodes. It is wasteful in that
12675 all we need to do is test pointer equality for the arguments
12676 to the two ADDR_EXPR nodes. It is incorrect to use
12677 operand_equal_p as that function is NOT equivalent to a
12678 C equality test. It can in fact return false for two
12679 objects which would test as equal using the C equality
12680 operator. */
12681 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12682 return constant_boolean_node (equal
12683 ? code == EQ_EXPR : code != EQ_EXPR,
12684 type);
12685 }
12686
12687 /* Similarly for a NEGATE_EXPR. */
12688 if (TREE_CODE (arg0) == NEGATE_EXPR
12689 && TREE_CODE (arg1) == INTEGER_CST
12690 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12691 arg1)))
12692 && TREE_CODE (tem) == INTEGER_CST
12693 && !TREE_OVERFLOW (tem))
12694 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12695
12696 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12697 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12698 && TREE_CODE (arg1) == INTEGER_CST
12699 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12700 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12701 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12702 fold_convert_loc (loc,
12703 TREE_TYPE (arg0),
12704 arg1),
12705 TREE_OPERAND (arg0, 1)));
12706
12707 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12708 if ((TREE_CODE (arg0) == PLUS_EXPR
12709 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12710 || TREE_CODE (arg0) == MINUS_EXPR)
12711 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12712 0)),
12713 arg1, 0)
12714 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12715 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12716 {
12717 tree val = TREE_OPERAND (arg0, 1);
12718 return omit_two_operands_loc (loc, type,
12719 fold_build2_loc (loc, code, type,
12720 val,
12721 build_int_cst (TREE_TYPE (val),
12722 0)),
12723 TREE_OPERAND (arg0, 0), arg1);
12724 }
12725
12726 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12727 if (TREE_CODE (arg0) == MINUS_EXPR
12728 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12729 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12730 1)),
12731 arg1, 0)
12732 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12733 {
12734 return omit_two_operands_loc (loc, type,
12735 code == NE_EXPR
12736 ? boolean_true_node : boolean_false_node,
12737 TREE_OPERAND (arg0, 1), arg1);
12738 }
12739
12740 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12741 if (TREE_CODE (arg0) == ABS_EXPR
12742 && (integer_zerop (arg1) || real_zerop (arg1)))
12743 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12744
12745 /* If this is an EQ or NE comparison with zero and ARG0 is
12746 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12747 two operations, but the latter can be done in one less insn
12748 on machines that have only two-operand insns or on which a
12749 constant cannot be the first operand. */
12750 if (TREE_CODE (arg0) == BIT_AND_EXPR
12751 && integer_zerop (arg1))
12752 {
12753 tree arg00 = TREE_OPERAND (arg0, 0);
12754 tree arg01 = TREE_OPERAND (arg0, 1);
12755 if (TREE_CODE (arg00) == LSHIFT_EXPR
12756 && integer_onep (TREE_OPERAND (arg00, 0)))
12757 {
12758 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12759 arg01, TREE_OPERAND (arg00, 1));
12760 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12761 build_int_cst (TREE_TYPE (arg0), 1));
12762 return fold_build2_loc (loc, code, type,
12763 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12764 arg1);
12765 }
12766 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12767 && integer_onep (TREE_OPERAND (arg01, 0)))
12768 {
12769 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12770 arg00, TREE_OPERAND (arg01, 1));
12771 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12772 build_int_cst (TREE_TYPE (arg0), 1));
12773 return fold_build2_loc (loc, code, type,
12774 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12775 arg1);
12776 }
12777 }
12778
12779 /* If this is an NE or EQ comparison of zero against the result of a
12780 signed MOD operation whose second operand is a power of 2, make
12781 the MOD operation unsigned since it is simpler and equivalent. */
12782 if (integer_zerop (arg1)
12783 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12784 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12785 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12786 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12787 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12788 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12789 {
12790 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12791 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12792 fold_convert_loc (loc, newtype,
12793 TREE_OPERAND (arg0, 0)),
12794 fold_convert_loc (loc, newtype,
12795 TREE_OPERAND (arg0, 1)));
12796
12797 return fold_build2_loc (loc, code, type, newmod,
12798 fold_convert_loc (loc, newtype, arg1));
12799 }
12800
12801 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12802 C1 is a valid shift constant, and C2 is a power of two, i.e.
12803 a single bit. */
12804 if (TREE_CODE (arg0) == BIT_AND_EXPR
12805 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12806 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12807 == INTEGER_CST
12808 && integer_pow2p (TREE_OPERAND (arg0, 1))
12809 && integer_zerop (arg1))
12810 {
12811 tree itype = TREE_TYPE (arg0);
12812 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12813 prec = TYPE_PRECISION (itype);
12814
12815 /* Check for a valid shift count. */
12816 if (wi::ltu_p (arg001, prec))
12817 {
12818 tree arg01 = TREE_OPERAND (arg0, 1);
12819 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12820 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12821 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12822 can be rewritten as (X & (C2 << C1)) != 0. */
12823 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12824 {
12825 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12826 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12827 return fold_build2_loc (loc, code, type, tem,
12828 fold_convert_loc (loc, itype, arg1));
12829 }
12830 /* Otherwise, for signed (arithmetic) shifts,
12831 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12832 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12833 else if (!TYPE_UNSIGNED (itype))
12834 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12835 arg000, build_int_cst (itype, 0));
12836 /* Otherwise, of unsigned (logical) shifts,
12837 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12838 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12839 else
12840 return omit_one_operand_loc (loc, type,
12841 code == EQ_EXPR ? integer_one_node
12842 : integer_zero_node,
12843 arg000);
12844 }
12845 }
12846
12847 /* If we have (A & C) == C where C is a power of 2, convert this into
12848 (A & C) != 0. Similarly for NE_EXPR. */
12849 if (TREE_CODE (arg0) == BIT_AND_EXPR
12850 && integer_pow2p (TREE_OPERAND (arg0, 1))
12851 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12852 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12853 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12854 integer_zero_node));
12855
12856 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12857 bit, then fold the expression into A < 0 or A >= 0. */
12858 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12859 if (tem)
12860 return tem;
12861
12862 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12863 Similarly for NE_EXPR. */
12864 if (TREE_CODE (arg0) == BIT_AND_EXPR
12865 && TREE_CODE (arg1) == INTEGER_CST
12866 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12867 {
12868 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12869 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12870 TREE_OPERAND (arg0, 1));
12871 tree dandnotc
12872 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12873 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12874 notc);
12875 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12876 if (integer_nonzerop (dandnotc))
12877 return omit_one_operand_loc (loc, type, rslt, arg0);
12878 }
12879
12880 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12881 Similarly for NE_EXPR. */
12882 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12883 && TREE_CODE (arg1) == INTEGER_CST
12884 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12885 {
12886 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12887 tree candnotd
12888 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12889 TREE_OPERAND (arg0, 1),
12890 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12891 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12892 if (integer_nonzerop (candnotd))
12893 return omit_one_operand_loc (loc, type, rslt, arg0);
12894 }
12895
12896 /* If this is a comparison of a field, we may be able to simplify it. */
12897 if ((TREE_CODE (arg0) == COMPONENT_REF
12898 || TREE_CODE (arg0) == BIT_FIELD_REF)
12899 /* Handle the constant case even without -O
12900 to make sure the warnings are given. */
12901 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12902 {
12903 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12904 if (t1)
12905 return t1;
12906 }
12907
12908 /* Optimize comparisons of strlen vs zero to a compare of the
12909 first character of the string vs zero. To wit,
12910 strlen(ptr) == 0 => *ptr == 0
12911 strlen(ptr) != 0 => *ptr != 0
12912 Other cases should reduce to one of these two (or a constant)
12913 due to the return value of strlen being unsigned. */
12914 if (TREE_CODE (arg0) == CALL_EXPR
12915 && integer_zerop (arg1))
12916 {
12917 tree fndecl = get_callee_fndecl (arg0);
12918
12919 if (fndecl
12920 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12921 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12922 && call_expr_nargs (arg0) == 1
12923 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12924 {
12925 tree iref = build_fold_indirect_ref_loc (loc,
12926 CALL_EXPR_ARG (arg0, 0));
12927 return fold_build2_loc (loc, code, type, iref,
12928 build_int_cst (TREE_TYPE (iref), 0));
12929 }
12930 }
12931
12932 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12933 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12934 if (TREE_CODE (arg0) == RSHIFT_EXPR
12935 && integer_zerop (arg1)
12936 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12937 {
12938 tree arg00 = TREE_OPERAND (arg0, 0);
12939 tree arg01 = TREE_OPERAND (arg0, 1);
12940 tree itype = TREE_TYPE (arg00);
12941 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12942 {
12943 if (TYPE_UNSIGNED (itype))
12944 {
12945 itype = signed_type_for (itype);
12946 arg00 = fold_convert_loc (loc, itype, arg00);
12947 }
12948 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12949 type, arg00, build_zero_cst (itype));
12950 }
12951 }
12952
12953 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12954 if (integer_zerop (arg1)
12955 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12956 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12957 TREE_OPERAND (arg0, 1));
12958
12959 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12960 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12961 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12962 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12963 build_zero_cst (TREE_TYPE (arg0)));
12964 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12965 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12966 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12967 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12968 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12969 build_zero_cst (TREE_TYPE (arg0)));
12970
12971 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12972 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12973 && TREE_CODE (arg1) == INTEGER_CST
12974 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12975 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12976 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12977 TREE_OPERAND (arg0, 1), arg1));
12978
12979 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12980 (X & C) == 0 when C is a single bit. */
12981 if (TREE_CODE (arg0) == BIT_AND_EXPR
12982 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12983 && integer_zerop (arg1)
12984 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12985 {
12986 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12987 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12988 TREE_OPERAND (arg0, 1));
12989 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12990 type, tem,
12991 fold_convert_loc (loc, TREE_TYPE (arg0),
12992 arg1));
12993 }
12994
12995 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12996 constant C is a power of two, i.e. a single bit. */
12997 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12998 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12999 && integer_zerop (arg1)
13000 && integer_pow2p (TREE_OPERAND (arg0, 1))
13001 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13002 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13003 {
13004 tree arg00 = TREE_OPERAND (arg0, 0);
13005 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13006 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13007 }
13008
13009 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13010 when is C is a power of two, i.e. a single bit. */
13011 if (TREE_CODE (arg0) == BIT_AND_EXPR
13012 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13013 && integer_zerop (arg1)
13014 && integer_pow2p (TREE_OPERAND (arg0, 1))
13015 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13016 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13017 {
13018 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13019 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13020 arg000, TREE_OPERAND (arg0, 1));
13021 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13022 tem, build_int_cst (TREE_TYPE (tem), 0));
13023 }
13024
13025 if (integer_zerop (arg1)
13026 && tree_expr_nonzero_p (arg0))
13027 {
13028 tree res = constant_boolean_node (code==NE_EXPR, type);
13029 return omit_one_operand_loc (loc, type, res, arg0);
13030 }
13031
13032 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13033 if (TREE_CODE (arg0) == NEGATE_EXPR
13034 && TREE_CODE (arg1) == NEGATE_EXPR)
13035 return fold_build2_loc (loc, code, type,
13036 TREE_OPERAND (arg0, 0),
13037 fold_convert_loc (loc, TREE_TYPE (arg0),
13038 TREE_OPERAND (arg1, 0)));
13039
13040 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13041 if (TREE_CODE (arg0) == BIT_AND_EXPR
13042 && TREE_CODE (arg1) == BIT_AND_EXPR)
13043 {
13044 tree arg00 = TREE_OPERAND (arg0, 0);
13045 tree arg01 = TREE_OPERAND (arg0, 1);
13046 tree arg10 = TREE_OPERAND (arg1, 0);
13047 tree arg11 = TREE_OPERAND (arg1, 1);
13048 tree itype = TREE_TYPE (arg0);
13049
13050 if (operand_equal_p (arg01, arg11, 0))
13051 return fold_build2_loc (loc, code, type,
13052 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13053 fold_build2_loc (loc,
13054 BIT_XOR_EXPR, itype,
13055 arg00, arg10),
13056 arg01),
13057 build_zero_cst (itype));
13058
13059 if (operand_equal_p (arg01, arg10, 0))
13060 return fold_build2_loc (loc, code, type,
13061 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13062 fold_build2_loc (loc,
13063 BIT_XOR_EXPR, itype,
13064 arg00, arg11),
13065 arg01),
13066 build_zero_cst (itype));
13067
13068 if (operand_equal_p (arg00, arg11, 0))
13069 return fold_build2_loc (loc, code, type,
13070 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13071 fold_build2_loc (loc,
13072 BIT_XOR_EXPR, itype,
13073 arg01, arg10),
13074 arg00),
13075 build_zero_cst (itype));
13076
13077 if (operand_equal_p (arg00, arg10, 0))
13078 return fold_build2_loc (loc, code, type,
13079 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13080 fold_build2_loc (loc,
13081 BIT_XOR_EXPR, itype,
13082 arg01, arg11),
13083 arg00),
13084 build_zero_cst (itype));
13085 }
13086
13087 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13088 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13089 {
13090 tree arg00 = TREE_OPERAND (arg0, 0);
13091 tree arg01 = TREE_OPERAND (arg0, 1);
13092 tree arg10 = TREE_OPERAND (arg1, 0);
13093 tree arg11 = TREE_OPERAND (arg1, 1);
13094 tree itype = TREE_TYPE (arg0);
13095
13096 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13097 operand_equal_p guarantees no side-effects so we don't need
13098 to use omit_one_operand on Z. */
13099 if (operand_equal_p (arg01, arg11, 0))
13100 return fold_build2_loc (loc, code, type, arg00,
13101 fold_convert_loc (loc, TREE_TYPE (arg00),
13102 arg10));
13103 if (operand_equal_p (arg01, arg10, 0))
13104 return fold_build2_loc (loc, code, type, arg00,
13105 fold_convert_loc (loc, TREE_TYPE (arg00),
13106 arg11));
13107 if (operand_equal_p (arg00, arg11, 0))
13108 return fold_build2_loc (loc, code, type, arg01,
13109 fold_convert_loc (loc, TREE_TYPE (arg01),
13110 arg10));
13111 if (operand_equal_p (arg00, arg10, 0))
13112 return fold_build2_loc (loc, code, type, arg01,
13113 fold_convert_loc (loc, TREE_TYPE (arg01),
13114 arg11));
13115
13116 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13117 if (TREE_CODE (arg01) == INTEGER_CST
13118 && TREE_CODE (arg11) == INTEGER_CST)
13119 {
13120 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13121 fold_convert_loc (loc, itype, arg11));
13122 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13123 return fold_build2_loc (loc, code, type, tem,
13124 fold_convert_loc (loc, itype, arg10));
13125 }
13126 }
13127
13128 /* Attempt to simplify equality/inequality comparisons of complex
13129 values. Only lower the comparison if the result is known or
13130 can be simplified to a single scalar comparison. */
13131 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13132 || TREE_CODE (arg0) == COMPLEX_CST)
13133 && (TREE_CODE (arg1) == COMPLEX_EXPR
13134 || TREE_CODE (arg1) == COMPLEX_CST))
13135 {
13136 tree real0, imag0, real1, imag1;
13137 tree rcond, icond;
13138
13139 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13140 {
13141 real0 = TREE_OPERAND (arg0, 0);
13142 imag0 = TREE_OPERAND (arg0, 1);
13143 }
13144 else
13145 {
13146 real0 = TREE_REALPART (arg0);
13147 imag0 = TREE_IMAGPART (arg0);
13148 }
13149
13150 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13151 {
13152 real1 = TREE_OPERAND (arg1, 0);
13153 imag1 = TREE_OPERAND (arg1, 1);
13154 }
13155 else
13156 {
13157 real1 = TREE_REALPART (arg1);
13158 imag1 = TREE_IMAGPART (arg1);
13159 }
13160
13161 rcond = fold_binary_loc (loc, code, type, real0, real1);
13162 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13163 {
13164 if (integer_zerop (rcond))
13165 {
13166 if (code == EQ_EXPR)
13167 return omit_two_operands_loc (loc, type, boolean_false_node,
13168 imag0, imag1);
13169 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13170 }
13171 else
13172 {
13173 if (code == NE_EXPR)
13174 return omit_two_operands_loc (loc, type, boolean_true_node,
13175 imag0, imag1);
13176 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13177 }
13178 }
13179
13180 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13181 if (icond && TREE_CODE (icond) == INTEGER_CST)
13182 {
13183 if (integer_zerop (icond))
13184 {
13185 if (code == EQ_EXPR)
13186 return omit_two_operands_loc (loc, type, boolean_false_node,
13187 real0, real1);
13188 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13189 }
13190 else
13191 {
13192 if (code == NE_EXPR)
13193 return omit_two_operands_loc (loc, type, boolean_true_node,
13194 real0, real1);
13195 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13196 }
13197 }
13198 }
13199
13200 return NULL_TREE;
13201
13202 case LT_EXPR:
13203 case GT_EXPR:
13204 case LE_EXPR:
13205 case GE_EXPR:
13206 tem = fold_comparison (loc, code, type, op0, op1);
13207 if (tem != NULL_TREE)
13208 return tem;
13209
13210 /* Transform comparisons of the form X +- C CMP X. */
13211 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13212 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13213 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13214 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13215 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13216 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13217 {
13218 tree arg01 = TREE_OPERAND (arg0, 1);
13219 enum tree_code code0 = TREE_CODE (arg0);
13220 int is_positive;
13221
13222 if (TREE_CODE (arg01) == REAL_CST)
13223 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13224 else
13225 is_positive = tree_int_cst_sgn (arg01);
13226
13227 /* (X - c) > X becomes false. */
13228 if (code == GT_EXPR
13229 && ((code0 == MINUS_EXPR && is_positive >= 0)
13230 || (code0 == PLUS_EXPR && is_positive <= 0)))
13231 {
13232 if (TREE_CODE (arg01) == INTEGER_CST
13233 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13234 fold_overflow_warning (("assuming signed overflow does not "
13235 "occur when assuming that (X - c) > X "
13236 "is always false"),
13237 WARN_STRICT_OVERFLOW_ALL);
13238 return constant_boolean_node (0, type);
13239 }
13240
13241 /* Likewise (X + c) < X becomes false. */
13242 if (code == LT_EXPR
13243 && ((code0 == PLUS_EXPR && is_positive >= 0)
13244 || (code0 == MINUS_EXPR && is_positive <= 0)))
13245 {
13246 if (TREE_CODE (arg01) == INTEGER_CST
13247 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13248 fold_overflow_warning (("assuming signed overflow does not "
13249 "occur when assuming that "
13250 "(X + c) < X is always false"),
13251 WARN_STRICT_OVERFLOW_ALL);
13252 return constant_boolean_node (0, type);
13253 }
13254
13255 /* Convert (X - c) <= X to true. */
13256 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13257 && code == LE_EXPR
13258 && ((code0 == MINUS_EXPR && is_positive >= 0)
13259 || (code0 == PLUS_EXPR && is_positive <= 0)))
13260 {
13261 if (TREE_CODE (arg01) == INTEGER_CST
13262 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13263 fold_overflow_warning (("assuming signed overflow does not "
13264 "occur when assuming that "
13265 "(X - c) <= X is always true"),
13266 WARN_STRICT_OVERFLOW_ALL);
13267 return constant_boolean_node (1, type);
13268 }
13269
13270 /* Convert (X + c) >= X to true. */
13271 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13272 && code == GE_EXPR
13273 && ((code0 == PLUS_EXPR && is_positive >= 0)
13274 || (code0 == MINUS_EXPR && is_positive <= 0)))
13275 {
13276 if (TREE_CODE (arg01) == INTEGER_CST
13277 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13278 fold_overflow_warning (("assuming signed overflow does not "
13279 "occur when assuming that "
13280 "(X + c) >= X is always true"),
13281 WARN_STRICT_OVERFLOW_ALL);
13282 return constant_boolean_node (1, type);
13283 }
13284
13285 if (TREE_CODE (arg01) == INTEGER_CST)
13286 {
13287 /* Convert X + c > X and X - c < X to true for integers. */
13288 if (code == GT_EXPR
13289 && ((code0 == PLUS_EXPR && is_positive > 0)
13290 || (code0 == MINUS_EXPR && is_positive < 0)))
13291 {
13292 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13293 fold_overflow_warning (("assuming signed overflow does "
13294 "not occur when assuming that "
13295 "(X + c) > X is always true"),
13296 WARN_STRICT_OVERFLOW_ALL);
13297 return constant_boolean_node (1, type);
13298 }
13299
13300 if (code == LT_EXPR
13301 && ((code0 == MINUS_EXPR && is_positive > 0)
13302 || (code0 == PLUS_EXPR && is_positive < 0)))
13303 {
13304 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13305 fold_overflow_warning (("assuming signed overflow does "
13306 "not occur when assuming that "
13307 "(X - c) < X is always true"),
13308 WARN_STRICT_OVERFLOW_ALL);
13309 return constant_boolean_node (1, type);
13310 }
13311
13312 /* Convert X + c <= X and X - c >= X to false for integers. */
13313 if (code == LE_EXPR
13314 && ((code0 == PLUS_EXPR && is_positive > 0)
13315 || (code0 == MINUS_EXPR && is_positive < 0)))
13316 {
13317 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13318 fold_overflow_warning (("assuming signed overflow does "
13319 "not occur when assuming that "
13320 "(X + c) <= X is always false"),
13321 WARN_STRICT_OVERFLOW_ALL);
13322 return constant_boolean_node (0, type);
13323 }
13324
13325 if (code == GE_EXPR
13326 && ((code0 == MINUS_EXPR && is_positive > 0)
13327 || (code0 == PLUS_EXPR && is_positive < 0)))
13328 {
13329 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13330 fold_overflow_warning (("assuming signed overflow does "
13331 "not occur when assuming that "
13332 "(X - c) >= X is always false"),
13333 WARN_STRICT_OVERFLOW_ALL);
13334 return constant_boolean_node (0, type);
13335 }
13336 }
13337 }
13338
13339 /* Comparisons with the highest or lowest possible integer of
13340 the specified precision will have known values. */
13341 {
13342 tree arg1_type = TREE_TYPE (arg1);
13343 unsigned int prec = TYPE_PRECISION (arg1_type);
13344
13345 if (TREE_CODE (arg1) == INTEGER_CST
13346 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13347 {
13348 wide_int max = wi::max_value (arg1_type);
13349 wide_int signed_max = wi::max_value (prec, SIGNED);
13350 wide_int min = wi::min_value (arg1_type);
13351
13352 if (wi::eq_p (arg1, max))
13353 switch (code)
13354 {
13355 case GT_EXPR:
13356 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13357
13358 case GE_EXPR:
13359 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13360
13361 case LE_EXPR:
13362 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13363
13364 case LT_EXPR:
13365 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13366
13367 /* The GE_EXPR and LT_EXPR cases above are not normally
13368 reached because of previous transformations. */
13369
13370 default:
13371 break;
13372 }
13373 else if (wi::eq_p (arg1, max - 1))
13374 switch (code)
13375 {
13376 case GT_EXPR:
13377 arg1 = const_binop (PLUS_EXPR, arg1,
13378 build_int_cst (TREE_TYPE (arg1), 1));
13379 return fold_build2_loc (loc, EQ_EXPR, type,
13380 fold_convert_loc (loc,
13381 TREE_TYPE (arg1), arg0),
13382 arg1);
13383 case LE_EXPR:
13384 arg1 = const_binop (PLUS_EXPR, arg1,
13385 build_int_cst (TREE_TYPE (arg1), 1));
13386 return fold_build2_loc (loc, NE_EXPR, type,
13387 fold_convert_loc (loc, TREE_TYPE (arg1),
13388 arg0),
13389 arg1);
13390 default:
13391 break;
13392 }
13393 else if (wi::eq_p (arg1, min))
13394 switch (code)
13395 {
13396 case LT_EXPR:
13397 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13398
13399 case LE_EXPR:
13400 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13401
13402 case GE_EXPR:
13403 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13404
13405 case GT_EXPR:
13406 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13407
13408 default:
13409 break;
13410 }
13411 else if (wi::eq_p (arg1, min + 1))
13412 switch (code)
13413 {
13414 case GE_EXPR:
13415 arg1 = const_binop (MINUS_EXPR, arg1,
13416 build_int_cst (TREE_TYPE (arg1), 1));
13417 return fold_build2_loc (loc, NE_EXPR, type,
13418 fold_convert_loc (loc,
13419 TREE_TYPE (arg1), arg0),
13420 arg1);
13421 case LT_EXPR:
13422 arg1 = const_binop (MINUS_EXPR, arg1,
13423 build_int_cst (TREE_TYPE (arg1), 1));
13424 return fold_build2_loc (loc, EQ_EXPR, type,
13425 fold_convert_loc (loc, TREE_TYPE (arg1),
13426 arg0),
13427 arg1);
13428 default:
13429 break;
13430 }
13431
13432 else if (wi::eq_p (arg1, signed_max)
13433 && TYPE_UNSIGNED (arg1_type)
13434 /* We will flip the signedness of the comparison operator
13435 associated with the mode of arg1, so the sign bit is
13436 specified by this mode. Check that arg1 is the signed
13437 max associated with this sign bit. */
13438 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13439 /* signed_type does not work on pointer types. */
13440 && INTEGRAL_TYPE_P (arg1_type))
13441 {
13442 /* The following case also applies to X < signed_max+1
13443 and X >= signed_max+1 because previous transformations. */
13444 if (code == LE_EXPR || code == GT_EXPR)
13445 {
13446 tree st = signed_type_for (arg1_type);
13447 return fold_build2_loc (loc,
13448 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13449 type, fold_convert_loc (loc, st, arg0),
13450 build_int_cst (st, 0));
13451 }
13452 }
13453 }
13454 }
13455
13456 /* If we are comparing an ABS_EXPR with a constant, we can
13457 convert all the cases into explicit comparisons, but they may
13458 well not be faster than doing the ABS and one comparison.
13459 But ABS (X) <= C is a range comparison, which becomes a subtraction
13460 and a comparison, and is probably faster. */
13461 if (code == LE_EXPR
13462 && TREE_CODE (arg1) == INTEGER_CST
13463 && TREE_CODE (arg0) == ABS_EXPR
13464 && ! TREE_SIDE_EFFECTS (arg0)
13465 && (0 != (tem = negate_expr (arg1)))
13466 && TREE_CODE (tem) == INTEGER_CST
13467 && !TREE_OVERFLOW (tem))
13468 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13469 build2 (GE_EXPR, type,
13470 TREE_OPERAND (arg0, 0), tem),
13471 build2 (LE_EXPR, type,
13472 TREE_OPERAND (arg0, 0), arg1));
13473
13474 /* Convert ABS_EXPR<x> >= 0 to true. */
13475 strict_overflow_p = false;
13476 if (code == GE_EXPR
13477 && (integer_zerop (arg1)
13478 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13479 && real_zerop (arg1)))
13480 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13481 {
13482 if (strict_overflow_p)
13483 fold_overflow_warning (("assuming signed overflow does not occur "
13484 "when simplifying comparison of "
13485 "absolute value and zero"),
13486 WARN_STRICT_OVERFLOW_CONDITIONAL);
13487 return omit_one_operand_loc (loc, type,
13488 constant_boolean_node (true, type),
13489 arg0);
13490 }
13491
13492 /* Convert ABS_EXPR<x> < 0 to false. */
13493 strict_overflow_p = false;
13494 if (code == LT_EXPR
13495 && (integer_zerop (arg1) || real_zerop (arg1))
13496 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13497 {
13498 if (strict_overflow_p)
13499 fold_overflow_warning (("assuming signed overflow does not occur "
13500 "when simplifying comparison of "
13501 "absolute value and zero"),
13502 WARN_STRICT_OVERFLOW_CONDITIONAL);
13503 return omit_one_operand_loc (loc, type,
13504 constant_boolean_node (false, type),
13505 arg0);
13506 }
13507
13508 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13509 and similarly for >= into !=. */
13510 if ((code == LT_EXPR || code == GE_EXPR)
13511 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13512 && TREE_CODE (arg1) == LSHIFT_EXPR
13513 && integer_onep (TREE_OPERAND (arg1, 0)))
13514 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13515 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13516 TREE_OPERAND (arg1, 1)),
13517 build_zero_cst (TREE_TYPE (arg0)));
13518
13519 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13520 otherwise Y might be >= # of bits in X's type and thus e.g.
13521 (unsigned char) (1 << Y) for Y 15 might be 0.
13522 If the cast is widening, then 1 << Y should have unsigned type,
13523 otherwise if Y is number of bits in the signed shift type minus 1,
13524 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13525 31 might be 0xffffffff80000000. */
13526 if ((code == LT_EXPR || code == GE_EXPR)
13527 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13528 && CONVERT_EXPR_P (arg1)
13529 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13530 && (TYPE_PRECISION (TREE_TYPE (arg1))
13531 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13532 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13533 || (TYPE_PRECISION (TREE_TYPE (arg1))
13534 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13535 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13536 {
13537 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13538 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13539 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13540 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13541 build_zero_cst (TREE_TYPE (arg0)));
13542 }
13543
13544 return NULL_TREE;
13545
13546 case UNORDERED_EXPR:
13547 case ORDERED_EXPR:
13548 case UNLT_EXPR:
13549 case UNLE_EXPR:
13550 case UNGT_EXPR:
13551 case UNGE_EXPR:
13552 case UNEQ_EXPR:
13553 case LTGT_EXPR:
13554 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13555 {
13556 t1 = fold_relational_const (code, type, arg0, arg1);
13557 if (t1 != NULL_TREE)
13558 return t1;
13559 }
13560
13561 /* If the first operand is NaN, the result is constant. */
13562 if (TREE_CODE (arg0) == REAL_CST
13563 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13564 && (code != LTGT_EXPR || ! flag_trapping_math))
13565 {
13566 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13567 ? integer_zero_node
13568 : integer_one_node;
13569 return omit_one_operand_loc (loc, type, t1, arg1);
13570 }
13571
13572 /* If the second operand is NaN, the result is constant. */
13573 if (TREE_CODE (arg1) == REAL_CST
13574 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13575 && (code != LTGT_EXPR || ! flag_trapping_math))
13576 {
13577 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13578 ? integer_zero_node
13579 : integer_one_node;
13580 return omit_one_operand_loc (loc, type, t1, arg0);
13581 }
13582
13583 /* Simplify unordered comparison of something with itself. */
13584 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13585 && operand_equal_p (arg0, arg1, 0))
13586 return constant_boolean_node (1, type);
13587
13588 if (code == LTGT_EXPR
13589 && !flag_trapping_math
13590 && operand_equal_p (arg0, arg1, 0))
13591 return constant_boolean_node (0, type);
13592
13593 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13594 {
13595 tree targ0 = strip_float_extensions (arg0);
13596 tree targ1 = strip_float_extensions (arg1);
13597 tree newtype = TREE_TYPE (targ0);
13598
13599 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13600 newtype = TREE_TYPE (targ1);
13601
13602 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13603 return fold_build2_loc (loc, code, type,
13604 fold_convert_loc (loc, newtype, targ0),
13605 fold_convert_loc (loc, newtype, targ1));
13606 }
13607
13608 return NULL_TREE;
13609
13610 case COMPOUND_EXPR:
13611 /* When pedantic, a compound expression can be neither an lvalue
13612 nor an integer constant expression. */
13613 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13614 return NULL_TREE;
13615 /* Don't let (0, 0) be null pointer constant. */
13616 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13617 : fold_convert_loc (loc, type, arg1);
13618 return pedantic_non_lvalue_loc (loc, tem);
13619
13620 case COMPLEX_EXPR:
13621 if ((TREE_CODE (arg0) == REAL_CST
13622 && TREE_CODE (arg1) == REAL_CST)
13623 || (TREE_CODE (arg0) == INTEGER_CST
13624 && TREE_CODE (arg1) == INTEGER_CST))
13625 return build_complex (type, arg0, arg1);
13626 if (TREE_CODE (arg0) == REALPART_EXPR
13627 && TREE_CODE (arg1) == IMAGPART_EXPR
13628 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13629 && operand_equal_p (TREE_OPERAND (arg0, 0),
13630 TREE_OPERAND (arg1, 0), 0))
13631 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13632 TREE_OPERAND (arg1, 0));
13633 return NULL_TREE;
13634
13635 case ASSERT_EXPR:
13636 /* An ASSERT_EXPR should never be passed to fold_binary. */
13637 gcc_unreachable ();
13638
13639 case VEC_PACK_TRUNC_EXPR:
13640 case VEC_PACK_FIX_TRUNC_EXPR:
13641 {
13642 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13643 tree *elts;
13644
13645 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13646 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13647 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13648 return NULL_TREE;
13649
13650 elts = XALLOCAVEC (tree, nelts);
13651 if (!vec_cst_ctor_to_array (arg0, elts)
13652 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13653 return NULL_TREE;
13654
13655 for (i = 0; i < nelts; i++)
13656 {
13657 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13658 ? NOP_EXPR : FIX_TRUNC_EXPR,
13659 TREE_TYPE (type), elts[i]);
13660 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13661 return NULL_TREE;
13662 }
13663
13664 return build_vector (type, elts);
13665 }
13666
13667 case VEC_WIDEN_MULT_LO_EXPR:
13668 case VEC_WIDEN_MULT_HI_EXPR:
13669 case VEC_WIDEN_MULT_EVEN_EXPR:
13670 case VEC_WIDEN_MULT_ODD_EXPR:
13671 {
13672 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13673 unsigned int out, ofs, scale;
13674 tree *elts;
13675
13676 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13677 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13678 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13679 return NULL_TREE;
13680
13681 elts = XALLOCAVEC (tree, nelts * 4);
13682 if (!vec_cst_ctor_to_array (arg0, elts)
13683 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13684 return NULL_TREE;
13685
13686 if (code == VEC_WIDEN_MULT_LO_EXPR)
13687 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13688 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13689 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13690 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13691 scale = 1, ofs = 0;
13692 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13693 scale = 1, ofs = 1;
13694
13695 for (out = 0; out < nelts; out++)
13696 {
13697 unsigned int in1 = (out << scale) + ofs;
13698 unsigned int in2 = in1 + nelts * 2;
13699 tree t1, t2;
13700
13701 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13702 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13703
13704 if (t1 == NULL_TREE || t2 == NULL_TREE)
13705 return NULL_TREE;
13706 elts[out] = const_binop (MULT_EXPR, t1, t2);
13707 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13708 return NULL_TREE;
13709 }
13710
13711 return build_vector (type, elts);
13712 }
13713
13714 default:
13715 return NULL_TREE;
13716 } /* switch (code) */
13717 }
13718
13719 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13720 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13721 of GOTO_EXPR. */
13722
13723 static tree
13724 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13725 {
13726 switch (TREE_CODE (*tp))
13727 {
13728 case LABEL_EXPR:
13729 return *tp;
13730
13731 case GOTO_EXPR:
13732 *walk_subtrees = 0;
13733
13734 /* ... fall through ... */
13735
13736 default:
13737 return NULL_TREE;
13738 }
13739 }
13740
13741 /* Return whether the sub-tree ST contains a label which is accessible from
13742 outside the sub-tree. */
13743
13744 static bool
13745 contains_label_p (tree st)
13746 {
13747 return
13748 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13749 }
13750
13751 /* Fold a ternary expression of code CODE and type TYPE with operands
13752 OP0, OP1, and OP2. Return the folded expression if folding is
13753 successful. Otherwise, return NULL_TREE. */
13754
13755 tree
13756 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13757 tree op0, tree op1, tree op2)
13758 {
13759 tree tem;
13760 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13761 enum tree_code_class kind = TREE_CODE_CLASS (code);
13762
13763 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13764 && TREE_CODE_LENGTH (code) == 3);
13765
13766 /* If this is a commutative operation, and OP0 is a constant, move it
13767 to OP1 to reduce the number of tests below. */
13768 if (commutative_ternary_tree_code (code)
13769 && tree_swap_operands_p (op0, op1, true))
13770 return fold_build3_loc (loc, code, type, op1, op0, op2);
13771
13772 tem = generic_simplify (loc, code, type, op0, op1, op2);
13773 if (tem)
13774 return tem;
13775
13776 /* Strip any conversions that don't change the mode. This is safe
13777 for every expression, except for a comparison expression because
13778 its signedness is derived from its operands. So, in the latter
13779 case, only strip conversions that don't change the signedness.
13780
13781 Note that this is done as an internal manipulation within the
13782 constant folder, in order to find the simplest representation of
13783 the arguments so that their form can be studied. In any cases,
13784 the appropriate type conversions should be put back in the tree
13785 that will get out of the constant folder. */
13786 if (op0)
13787 {
13788 arg0 = op0;
13789 STRIP_NOPS (arg0);
13790 }
13791
13792 if (op1)
13793 {
13794 arg1 = op1;
13795 STRIP_NOPS (arg1);
13796 }
13797
13798 if (op2)
13799 {
13800 arg2 = op2;
13801 STRIP_NOPS (arg2);
13802 }
13803
13804 switch (code)
13805 {
13806 case COMPONENT_REF:
13807 if (TREE_CODE (arg0) == CONSTRUCTOR
13808 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13809 {
13810 unsigned HOST_WIDE_INT idx;
13811 tree field, value;
13812 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13813 if (field == arg1)
13814 return value;
13815 }
13816 return NULL_TREE;
13817
13818 case COND_EXPR:
13819 case VEC_COND_EXPR:
13820 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13821 so all simple results must be passed through pedantic_non_lvalue. */
13822 if (TREE_CODE (arg0) == INTEGER_CST)
13823 {
13824 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13825 tem = integer_zerop (arg0) ? op2 : op1;
13826 /* Only optimize constant conditions when the selected branch
13827 has the same type as the COND_EXPR. This avoids optimizing
13828 away "c ? x : throw", where the throw has a void type.
13829 Avoid throwing away that operand which contains label. */
13830 if ((!TREE_SIDE_EFFECTS (unused_op)
13831 || !contains_label_p (unused_op))
13832 && (! VOID_TYPE_P (TREE_TYPE (tem))
13833 || VOID_TYPE_P (type)))
13834 return pedantic_non_lvalue_loc (loc, tem);
13835 return NULL_TREE;
13836 }
13837 else if (TREE_CODE (arg0) == VECTOR_CST)
13838 {
13839 if (integer_all_onesp (arg0))
13840 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13841 if (integer_zerop (arg0))
13842 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13843
13844 if ((TREE_CODE (arg1) == VECTOR_CST
13845 || TREE_CODE (arg1) == CONSTRUCTOR)
13846 && (TREE_CODE (arg2) == VECTOR_CST
13847 || TREE_CODE (arg2) == CONSTRUCTOR))
13848 {
13849 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13850 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13851 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13852 for (i = 0; i < nelts; i++)
13853 {
13854 tree val = VECTOR_CST_ELT (arg0, i);
13855 if (integer_all_onesp (val))
13856 sel[i] = i;
13857 else if (integer_zerop (val))
13858 sel[i] = nelts + i;
13859 else /* Currently unreachable. */
13860 return NULL_TREE;
13861 }
13862 tree t = fold_vec_perm (type, arg1, arg2, sel);
13863 if (t != NULL_TREE)
13864 return t;
13865 }
13866 }
13867
13868 if (operand_equal_p (arg1, op2, 0))
13869 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13870
13871 /* If we have A op B ? A : C, we may be able to convert this to a
13872 simpler expression, depending on the operation and the values
13873 of B and C. Signed zeros prevent all of these transformations,
13874 for reasons given above each one.
13875
13876 Also try swapping the arguments and inverting the conditional. */
13877 if (COMPARISON_CLASS_P (arg0)
13878 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13879 arg1, TREE_OPERAND (arg0, 1))
13880 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13881 {
13882 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13883 if (tem)
13884 return tem;
13885 }
13886
13887 if (COMPARISON_CLASS_P (arg0)
13888 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13889 op2,
13890 TREE_OPERAND (arg0, 1))
13891 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13892 {
13893 location_t loc0 = expr_location_or (arg0, loc);
13894 tem = fold_invert_truthvalue (loc0, arg0);
13895 if (tem && COMPARISON_CLASS_P (tem))
13896 {
13897 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13898 if (tem)
13899 return tem;
13900 }
13901 }
13902
13903 /* If the second operand is simpler than the third, swap them
13904 since that produces better jump optimization results. */
13905 if (truth_value_p (TREE_CODE (arg0))
13906 && tree_swap_operands_p (op1, op2, false))
13907 {
13908 location_t loc0 = expr_location_or (arg0, loc);
13909 /* See if this can be inverted. If it can't, possibly because
13910 it was a floating-point inequality comparison, don't do
13911 anything. */
13912 tem = fold_invert_truthvalue (loc0, arg0);
13913 if (tem)
13914 return fold_build3_loc (loc, code, type, tem, op2, op1);
13915 }
13916
13917 /* Convert A ? 1 : 0 to simply A. */
13918 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13919 : (integer_onep (op1)
13920 && !VECTOR_TYPE_P (type)))
13921 && integer_zerop (op2)
13922 /* If we try to convert OP0 to our type, the
13923 call to fold will try to move the conversion inside
13924 a COND, which will recurse. In that case, the COND_EXPR
13925 is probably the best choice, so leave it alone. */
13926 && type == TREE_TYPE (arg0))
13927 return pedantic_non_lvalue_loc (loc, arg0);
13928
13929 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13930 over COND_EXPR in cases such as floating point comparisons. */
13931 if (integer_zerop (op1)
13932 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13933 : (integer_onep (op2)
13934 && !VECTOR_TYPE_P (type)))
13935 && truth_value_p (TREE_CODE (arg0)))
13936 return pedantic_non_lvalue_loc (loc,
13937 fold_convert_loc (loc, type,
13938 invert_truthvalue_loc (loc,
13939 arg0)));
13940
13941 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13942 if (TREE_CODE (arg0) == LT_EXPR
13943 && integer_zerop (TREE_OPERAND (arg0, 1))
13944 && integer_zerop (op2)
13945 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13946 {
13947 /* sign_bit_p looks through both zero and sign extensions,
13948 but for this optimization only sign extensions are
13949 usable. */
13950 tree tem2 = TREE_OPERAND (arg0, 0);
13951 while (tem != tem2)
13952 {
13953 if (TREE_CODE (tem2) != NOP_EXPR
13954 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13955 {
13956 tem = NULL_TREE;
13957 break;
13958 }
13959 tem2 = TREE_OPERAND (tem2, 0);
13960 }
13961 /* sign_bit_p only checks ARG1 bits within A's precision.
13962 If <sign bit of A> has wider type than A, bits outside
13963 of A's precision in <sign bit of A> need to be checked.
13964 If they are all 0, this optimization needs to be done
13965 in unsigned A's type, if they are all 1 in signed A's type,
13966 otherwise this can't be done. */
13967 if (tem
13968 && TYPE_PRECISION (TREE_TYPE (tem))
13969 < TYPE_PRECISION (TREE_TYPE (arg1))
13970 && TYPE_PRECISION (TREE_TYPE (tem))
13971 < TYPE_PRECISION (type))
13972 {
13973 int inner_width, outer_width;
13974 tree tem_type;
13975
13976 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13977 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13978 if (outer_width > TYPE_PRECISION (type))
13979 outer_width = TYPE_PRECISION (type);
13980
13981 wide_int mask = wi::shifted_mask
13982 (inner_width, outer_width - inner_width, false,
13983 TYPE_PRECISION (TREE_TYPE (arg1)));
13984
13985 wide_int common = mask & arg1;
13986 if (common == mask)
13987 {
13988 tem_type = signed_type_for (TREE_TYPE (tem));
13989 tem = fold_convert_loc (loc, tem_type, tem);
13990 }
13991 else if (common == 0)
13992 {
13993 tem_type = unsigned_type_for (TREE_TYPE (tem));
13994 tem = fold_convert_loc (loc, tem_type, tem);
13995 }
13996 else
13997 tem = NULL;
13998 }
13999
14000 if (tem)
14001 return
14002 fold_convert_loc (loc, type,
14003 fold_build2_loc (loc, BIT_AND_EXPR,
14004 TREE_TYPE (tem), tem,
14005 fold_convert_loc (loc,
14006 TREE_TYPE (tem),
14007 arg1)));
14008 }
14009
14010 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14011 already handled above. */
14012 if (TREE_CODE (arg0) == BIT_AND_EXPR
14013 && integer_onep (TREE_OPERAND (arg0, 1))
14014 && integer_zerop (op2)
14015 && integer_pow2p (arg1))
14016 {
14017 tree tem = TREE_OPERAND (arg0, 0);
14018 STRIP_NOPS (tem);
14019 if (TREE_CODE (tem) == RSHIFT_EXPR
14020 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14021 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14022 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14023 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14024 TREE_OPERAND (tem, 0), arg1);
14025 }
14026
14027 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14028 is probably obsolete because the first operand should be a
14029 truth value (that's why we have the two cases above), but let's
14030 leave it in until we can confirm this for all front-ends. */
14031 if (integer_zerop (op2)
14032 && TREE_CODE (arg0) == NE_EXPR
14033 && integer_zerop (TREE_OPERAND (arg0, 1))
14034 && integer_pow2p (arg1)
14035 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14036 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14037 arg1, OEP_ONLY_CONST))
14038 return pedantic_non_lvalue_loc (loc,
14039 fold_convert_loc (loc, type,
14040 TREE_OPERAND (arg0, 0)));
14041
14042 /* Disable the transformations below for vectors, since
14043 fold_binary_op_with_conditional_arg may undo them immediately,
14044 yielding an infinite loop. */
14045 if (code == VEC_COND_EXPR)
14046 return NULL_TREE;
14047
14048 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14049 if (integer_zerop (op2)
14050 && truth_value_p (TREE_CODE (arg0))
14051 && truth_value_p (TREE_CODE (arg1))
14052 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14053 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14054 : TRUTH_ANDIF_EXPR,
14055 type, fold_convert_loc (loc, type, arg0), arg1);
14056
14057 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14058 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14059 && truth_value_p (TREE_CODE (arg0))
14060 && truth_value_p (TREE_CODE (arg1))
14061 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14062 {
14063 location_t loc0 = expr_location_or (arg0, loc);
14064 /* Only perform transformation if ARG0 is easily inverted. */
14065 tem = fold_invert_truthvalue (loc0, arg0);
14066 if (tem)
14067 return fold_build2_loc (loc, code == VEC_COND_EXPR
14068 ? BIT_IOR_EXPR
14069 : TRUTH_ORIF_EXPR,
14070 type, fold_convert_loc (loc, type, tem),
14071 arg1);
14072 }
14073
14074 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14075 if (integer_zerop (arg1)
14076 && truth_value_p (TREE_CODE (arg0))
14077 && truth_value_p (TREE_CODE (op2))
14078 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14079 {
14080 location_t loc0 = expr_location_or (arg0, loc);
14081 /* Only perform transformation if ARG0 is easily inverted. */
14082 tem = fold_invert_truthvalue (loc0, arg0);
14083 if (tem)
14084 return fold_build2_loc (loc, code == VEC_COND_EXPR
14085 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14086 type, fold_convert_loc (loc, type, tem),
14087 op2);
14088 }
14089
14090 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14091 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14092 && truth_value_p (TREE_CODE (arg0))
14093 && truth_value_p (TREE_CODE (op2))
14094 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14095 return fold_build2_loc (loc, code == VEC_COND_EXPR
14096 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14097 type, fold_convert_loc (loc, type, arg0), op2);
14098
14099 return NULL_TREE;
14100
14101 case CALL_EXPR:
14102 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14103 of fold_ternary on them. */
14104 gcc_unreachable ();
14105
14106 case BIT_FIELD_REF:
14107 if ((TREE_CODE (arg0) == VECTOR_CST
14108 || (TREE_CODE (arg0) == CONSTRUCTOR
14109 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14110 && (type == TREE_TYPE (TREE_TYPE (arg0))
14111 || (TREE_CODE (type) == VECTOR_TYPE
14112 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14113 {
14114 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14115 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14116 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14117 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14118
14119 if (n != 0
14120 && (idx % width) == 0
14121 && (n % width) == 0
14122 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14123 {
14124 idx = idx / width;
14125 n = n / width;
14126
14127 if (TREE_CODE (arg0) == VECTOR_CST)
14128 {
14129 if (n == 1)
14130 return VECTOR_CST_ELT (arg0, idx);
14131
14132 tree *vals = XALLOCAVEC (tree, n);
14133 for (unsigned i = 0; i < n; ++i)
14134 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14135 return build_vector (type, vals);
14136 }
14137
14138 /* Constructor elements can be subvectors. */
14139 unsigned HOST_WIDE_INT k = 1;
14140 if (CONSTRUCTOR_NELTS (arg0) != 0)
14141 {
14142 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14143 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14144 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14145 }
14146
14147 /* We keep an exact subset of the constructor elements. */
14148 if ((idx % k) == 0 && (n % k) == 0)
14149 {
14150 if (CONSTRUCTOR_NELTS (arg0) == 0)
14151 return build_constructor (type, NULL);
14152 idx /= k;
14153 n /= k;
14154 if (n == 1)
14155 {
14156 if (idx < CONSTRUCTOR_NELTS (arg0))
14157 return CONSTRUCTOR_ELT (arg0, idx)->value;
14158 return build_zero_cst (type);
14159 }
14160
14161 vec<constructor_elt, va_gc> *vals;
14162 vec_alloc (vals, n);
14163 for (unsigned i = 0;
14164 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14165 ++i)
14166 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14167 CONSTRUCTOR_ELT
14168 (arg0, idx + i)->value);
14169 return build_constructor (type, vals);
14170 }
14171 /* The bitfield references a single constructor element. */
14172 else if (idx + n <= (idx / k + 1) * k)
14173 {
14174 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14175 return build_zero_cst (type);
14176 else if (n == k)
14177 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14178 else
14179 return fold_build3_loc (loc, code, type,
14180 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14181 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14182 }
14183 }
14184 }
14185
14186 /* A bit-field-ref that referenced the full argument can be stripped. */
14187 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14188 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14189 && integer_zerop (op2))
14190 return fold_convert_loc (loc, type, arg0);
14191
14192 /* On constants we can use native encode/interpret to constant
14193 fold (nearly) all BIT_FIELD_REFs. */
14194 if (CONSTANT_CLASS_P (arg0)
14195 && can_native_interpret_type_p (type)
14196 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14197 /* This limitation should not be necessary, we just need to
14198 round this up to mode size. */
14199 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14200 /* Need bit-shifting of the buffer to relax the following. */
14201 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14202 {
14203 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14204 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14205 unsigned HOST_WIDE_INT clen;
14206 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14207 /* ??? We cannot tell native_encode_expr to start at
14208 some random byte only. So limit us to a reasonable amount
14209 of work. */
14210 if (clen <= 4096)
14211 {
14212 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14213 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14214 if (len > 0
14215 && len * BITS_PER_UNIT >= bitpos + bitsize)
14216 {
14217 tree v = native_interpret_expr (type,
14218 b + bitpos / BITS_PER_UNIT,
14219 bitsize / BITS_PER_UNIT);
14220 if (v)
14221 return v;
14222 }
14223 }
14224 }
14225
14226 return NULL_TREE;
14227
14228 case FMA_EXPR:
14229 /* For integers we can decompose the FMA if possible. */
14230 if (TREE_CODE (arg0) == INTEGER_CST
14231 && TREE_CODE (arg1) == INTEGER_CST)
14232 return fold_build2_loc (loc, PLUS_EXPR, type,
14233 const_binop (MULT_EXPR, arg0, arg1), arg2);
14234 if (integer_zerop (arg2))
14235 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14236
14237 return fold_fma (loc, type, arg0, arg1, arg2);
14238
14239 case VEC_PERM_EXPR:
14240 if (TREE_CODE (arg2) == VECTOR_CST)
14241 {
14242 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14243 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14244 bool need_mask_canon = false;
14245 bool all_in_vec0 = true;
14246 bool all_in_vec1 = true;
14247 bool maybe_identity = true;
14248 bool single_arg = (op0 == op1);
14249 bool changed = false;
14250
14251 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14252 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14253 for (i = 0; i < nelts; i++)
14254 {
14255 tree val = VECTOR_CST_ELT (arg2, i);
14256 if (TREE_CODE (val) != INTEGER_CST)
14257 return NULL_TREE;
14258
14259 /* Make sure that the perm value is in an acceptable
14260 range. */
14261 wide_int t = val;
14262 if (wi::gtu_p (t, mask))
14263 {
14264 need_mask_canon = true;
14265 sel[i] = t.to_uhwi () & mask;
14266 }
14267 else
14268 sel[i] = t.to_uhwi ();
14269
14270 if (sel[i] < nelts)
14271 all_in_vec1 = false;
14272 else
14273 all_in_vec0 = false;
14274
14275 if ((sel[i] & (nelts-1)) != i)
14276 maybe_identity = false;
14277 }
14278
14279 if (maybe_identity)
14280 {
14281 if (all_in_vec0)
14282 return op0;
14283 if (all_in_vec1)
14284 return op1;
14285 }
14286
14287 if (all_in_vec0)
14288 op1 = op0;
14289 else if (all_in_vec1)
14290 {
14291 op0 = op1;
14292 for (i = 0; i < nelts; i++)
14293 sel[i] -= nelts;
14294 need_mask_canon = true;
14295 }
14296
14297 if ((TREE_CODE (op0) == VECTOR_CST
14298 || TREE_CODE (op0) == CONSTRUCTOR)
14299 && (TREE_CODE (op1) == VECTOR_CST
14300 || TREE_CODE (op1) == CONSTRUCTOR))
14301 {
14302 tree t = fold_vec_perm (type, op0, op1, sel);
14303 if (t != NULL_TREE)
14304 return t;
14305 }
14306
14307 if (op0 == op1 && !single_arg)
14308 changed = true;
14309
14310 if (need_mask_canon && arg2 == op2)
14311 {
14312 tree *tsel = XALLOCAVEC (tree, nelts);
14313 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14314 for (i = 0; i < nelts; i++)
14315 tsel[i] = build_int_cst (eltype, sel[i]);
14316 op2 = build_vector (TREE_TYPE (arg2), tsel);
14317 changed = true;
14318 }
14319
14320 if (changed)
14321 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14322 }
14323 return NULL_TREE;
14324
14325 default:
14326 return NULL_TREE;
14327 } /* switch (code) */
14328 }
14329
14330 /* Perform constant folding and related simplification of EXPR.
14331 The related simplifications include x*1 => x, x*0 => 0, etc.,
14332 and application of the associative law.
14333 NOP_EXPR conversions may be removed freely (as long as we
14334 are careful not to change the type of the overall expression).
14335 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14336 but we can constant-fold them if they have constant operands. */
14337
14338 #ifdef ENABLE_FOLD_CHECKING
14339 # define fold(x) fold_1 (x)
14340 static tree fold_1 (tree);
14341 static
14342 #endif
14343 tree
14344 fold (tree expr)
14345 {
14346 const tree t = expr;
14347 enum tree_code code = TREE_CODE (t);
14348 enum tree_code_class kind = TREE_CODE_CLASS (code);
14349 tree tem;
14350 location_t loc = EXPR_LOCATION (expr);
14351
14352 /* Return right away if a constant. */
14353 if (kind == tcc_constant)
14354 return t;
14355
14356 /* CALL_EXPR-like objects with variable numbers of operands are
14357 treated specially. */
14358 if (kind == tcc_vl_exp)
14359 {
14360 if (code == CALL_EXPR)
14361 {
14362 tem = fold_call_expr (loc, expr, false);
14363 return tem ? tem : expr;
14364 }
14365 return expr;
14366 }
14367
14368 if (IS_EXPR_CODE_CLASS (kind))
14369 {
14370 tree type = TREE_TYPE (t);
14371 tree op0, op1, op2;
14372
14373 switch (TREE_CODE_LENGTH (code))
14374 {
14375 case 1:
14376 op0 = TREE_OPERAND (t, 0);
14377 tem = fold_unary_loc (loc, code, type, op0);
14378 return tem ? tem : expr;
14379 case 2:
14380 op0 = TREE_OPERAND (t, 0);
14381 op1 = TREE_OPERAND (t, 1);
14382 tem = fold_binary_loc (loc, code, type, op0, op1);
14383 return tem ? tem : expr;
14384 case 3:
14385 op0 = TREE_OPERAND (t, 0);
14386 op1 = TREE_OPERAND (t, 1);
14387 op2 = TREE_OPERAND (t, 2);
14388 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14389 return tem ? tem : expr;
14390 default:
14391 break;
14392 }
14393 }
14394
14395 switch (code)
14396 {
14397 case ARRAY_REF:
14398 {
14399 tree op0 = TREE_OPERAND (t, 0);
14400 tree op1 = TREE_OPERAND (t, 1);
14401
14402 if (TREE_CODE (op1) == INTEGER_CST
14403 && TREE_CODE (op0) == CONSTRUCTOR
14404 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14405 {
14406 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14407 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14408 unsigned HOST_WIDE_INT begin = 0;
14409
14410 /* Find a matching index by means of a binary search. */
14411 while (begin != end)
14412 {
14413 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14414 tree index = (*elts)[middle].index;
14415
14416 if (TREE_CODE (index) == INTEGER_CST
14417 && tree_int_cst_lt (index, op1))
14418 begin = middle + 1;
14419 else if (TREE_CODE (index) == INTEGER_CST
14420 && tree_int_cst_lt (op1, index))
14421 end = middle;
14422 else if (TREE_CODE (index) == RANGE_EXPR
14423 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14424 begin = middle + 1;
14425 else if (TREE_CODE (index) == RANGE_EXPR
14426 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14427 end = middle;
14428 else
14429 return (*elts)[middle].value;
14430 }
14431 }
14432
14433 return t;
14434 }
14435
14436 /* Return a VECTOR_CST if possible. */
14437 case CONSTRUCTOR:
14438 {
14439 tree type = TREE_TYPE (t);
14440 if (TREE_CODE (type) != VECTOR_TYPE)
14441 return t;
14442
14443 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14444 unsigned HOST_WIDE_INT idx, pos = 0;
14445 tree value;
14446
14447 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14448 {
14449 if (!CONSTANT_CLASS_P (value))
14450 return t;
14451 if (TREE_CODE (value) == VECTOR_CST)
14452 {
14453 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14454 vec[pos++] = VECTOR_CST_ELT (value, i);
14455 }
14456 else
14457 vec[pos++] = value;
14458 }
14459 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14460 vec[pos] = build_zero_cst (TREE_TYPE (type));
14461
14462 return build_vector (type, vec);
14463 }
14464
14465 case CONST_DECL:
14466 return fold (DECL_INITIAL (t));
14467
14468 default:
14469 return t;
14470 } /* switch (code) */
14471 }
14472
14473 #ifdef ENABLE_FOLD_CHECKING
14474 #undef fold
14475
14476 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14477 hash_table<pointer_hash<const tree_node> > *);
14478 static void fold_check_failed (const_tree, const_tree);
14479 void print_fold_checksum (const_tree);
14480
14481 /* When --enable-checking=fold, compute a digest of expr before
14482 and after actual fold call to see if fold did not accidentally
14483 change original expr. */
14484
14485 tree
14486 fold (tree expr)
14487 {
14488 tree ret;
14489 struct md5_ctx ctx;
14490 unsigned char checksum_before[16], checksum_after[16];
14491 hash_table<pointer_hash<const tree_node> > ht (32);
14492
14493 md5_init_ctx (&ctx);
14494 fold_checksum_tree (expr, &ctx, &ht);
14495 md5_finish_ctx (&ctx, checksum_before);
14496 ht.empty ();
14497
14498 ret = fold_1 (expr);
14499
14500 md5_init_ctx (&ctx);
14501 fold_checksum_tree (expr, &ctx, &ht);
14502 md5_finish_ctx (&ctx, checksum_after);
14503
14504 if (memcmp (checksum_before, checksum_after, 16))
14505 fold_check_failed (expr, ret);
14506
14507 return ret;
14508 }
14509
14510 void
14511 print_fold_checksum (const_tree expr)
14512 {
14513 struct md5_ctx ctx;
14514 unsigned char checksum[16], cnt;
14515 hash_table<pointer_hash<const tree_node> > ht (32);
14516
14517 md5_init_ctx (&ctx);
14518 fold_checksum_tree (expr, &ctx, &ht);
14519 md5_finish_ctx (&ctx, checksum);
14520 for (cnt = 0; cnt < 16; ++cnt)
14521 fprintf (stderr, "%02x", checksum[cnt]);
14522 putc ('\n', stderr);
14523 }
14524
14525 static void
14526 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14527 {
14528 internal_error ("fold check: original tree changed by fold");
14529 }
14530
14531 static void
14532 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14533 hash_table<pointer_hash <const tree_node> > *ht)
14534 {
14535 const tree_node **slot;
14536 enum tree_code code;
14537 union tree_node buf;
14538 int i, len;
14539
14540 recursive_label:
14541 if (expr == NULL)
14542 return;
14543 slot = ht->find_slot (expr, INSERT);
14544 if (*slot != NULL)
14545 return;
14546 *slot = expr;
14547 code = TREE_CODE (expr);
14548 if (TREE_CODE_CLASS (code) == tcc_declaration
14549 && DECL_ASSEMBLER_NAME_SET_P (expr))
14550 {
14551 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14552 memcpy ((char *) &buf, expr, tree_size (expr));
14553 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14554 expr = (tree) &buf;
14555 }
14556 else if (TREE_CODE_CLASS (code) == tcc_type
14557 && (TYPE_POINTER_TO (expr)
14558 || TYPE_REFERENCE_TO (expr)
14559 || TYPE_CACHED_VALUES_P (expr)
14560 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14561 || TYPE_NEXT_VARIANT (expr)))
14562 {
14563 /* Allow these fields to be modified. */
14564 tree tmp;
14565 memcpy ((char *) &buf, expr, tree_size (expr));
14566 expr = tmp = (tree) &buf;
14567 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14568 TYPE_POINTER_TO (tmp) = NULL;
14569 TYPE_REFERENCE_TO (tmp) = NULL;
14570 TYPE_NEXT_VARIANT (tmp) = NULL;
14571 if (TYPE_CACHED_VALUES_P (tmp))
14572 {
14573 TYPE_CACHED_VALUES_P (tmp) = 0;
14574 TYPE_CACHED_VALUES (tmp) = NULL;
14575 }
14576 }
14577 md5_process_bytes (expr, tree_size (expr), ctx);
14578 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14579 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14580 if (TREE_CODE_CLASS (code) != tcc_type
14581 && TREE_CODE_CLASS (code) != tcc_declaration
14582 && code != TREE_LIST
14583 && code != SSA_NAME
14584 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14585 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14586 switch (TREE_CODE_CLASS (code))
14587 {
14588 case tcc_constant:
14589 switch (code)
14590 {
14591 case STRING_CST:
14592 md5_process_bytes (TREE_STRING_POINTER (expr),
14593 TREE_STRING_LENGTH (expr), ctx);
14594 break;
14595 case COMPLEX_CST:
14596 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14597 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14598 break;
14599 case VECTOR_CST:
14600 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14601 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14602 break;
14603 default:
14604 break;
14605 }
14606 break;
14607 case tcc_exceptional:
14608 switch (code)
14609 {
14610 case TREE_LIST:
14611 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14612 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14613 expr = TREE_CHAIN (expr);
14614 goto recursive_label;
14615 break;
14616 case TREE_VEC:
14617 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14618 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14619 break;
14620 default:
14621 break;
14622 }
14623 break;
14624 case tcc_expression:
14625 case tcc_reference:
14626 case tcc_comparison:
14627 case tcc_unary:
14628 case tcc_binary:
14629 case tcc_statement:
14630 case tcc_vl_exp:
14631 len = TREE_OPERAND_LENGTH (expr);
14632 for (i = 0; i < len; ++i)
14633 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14634 break;
14635 case tcc_declaration:
14636 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14637 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14638 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14639 {
14640 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14641 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14642 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14643 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14644 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14645 }
14646
14647 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14648 {
14649 if (TREE_CODE (expr) == FUNCTION_DECL)
14650 {
14651 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14652 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14653 }
14654 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14655 }
14656 break;
14657 case tcc_type:
14658 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14659 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14660 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14661 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14662 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14663 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14664 if (INTEGRAL_TYPE_P (expr)
14665 || SCALAR_FLOAT_TYPE_P (expr))
14666 {
14667 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14668 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14669 }
14670 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14671 if (TREE_CODE (expr) == RECORD_TYPE
14672 || TREE_CODE (expr) == UNION_TYPE
14673 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14674 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14675 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14676 break;
14677 default:
14678 break;
14679 }
14680 }
14681
14682 /* Helper function for outputting the checksum of a tree T. When
14683 debugging with gdb, you can "define mynext" to be "next" followed
14684 by "call debug_fold_checksum (op0)", then just trace down till the
14685 outputs differ. */
14686
14687 DEBUG_FUNCTION void
14688 debug_fold_checksum (const_tree t)
14689 {
14690 int i;
14691 unsigned char checksum[16];
14692 struct md5_ctx ctx;
14693 hash_table<pointer_hash<const tree_node> > ht (32);
14694
14695 md5_init_ctx (&ctx);
14696 fold_checksum_tree (t, &ctx, &ht);
14697 md5_finish_ctx (&ctx, checksum);
14698 ht.empty ();
14699
14700 for (i = 0; i < 16; i++)
14701 fprintf (stderr, "%d ", checksum[i]);
14702
14703 fprintf (stderr, "\n");
14704 }
14705
14706 #endif
14707
14708 /* Fold a unary tree expression with code CODE of type TYPE with an
14709 operand OP0. LOC is the location of the resulting expression.
14710 Return a folded expression if successful. Otherwise, return a tree
14711 expression with code CODE of type TYPE with an operand OP0. */
14712
14713 tree
14714 fold_build1_stat_loc (location_t loc,
14715 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14716 {
14717 tree tem;
14718 #ifdef ENABLE_FOLD_CHECKING
14719 unsigned char checksum_before[16], checksum_after[16];
14720 struct md5_ctx ctx;
14721 hash_table<pointer_hash<const tree_node> > ht (32);
14722
14723 md5_init_ctx (&ctx);
14724 fold_checksum_tree (op0, &ctx, &ht);
14725 md5_finish_ctx (&ctx, checksum_before);
14726 ht.empty ();
14727 #endif
14728
14729 tem = fold_unary_loc (loc, code, type, op0);
14730 if (!tem)
14731 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14732
14733 #ifdef ENABLE_FOLD_CHECKING
14734 md5_init_ctx (&ctx);
14735 fold_checksum_tree (op0, &ctx, &ht);
14736 md5_finish_ctx (&ctx, checksum_after);
14737
14738 if (memcmp (checksum_before, checksum_after, 16))
14739 fold_check_failed (op0, tem);
14740 #endif
14741 return tem;
14742 }
14743
14744 /* Fold a binary tree expression with code CODE of type TYPE with
14745 operands OP0 and OP1. LOC is the location of the resulting
14746 expression. Return a folded expression if successful. Otherwise,
14747 return a tree expression with code CODE of type TYPE with operands
14748 OP0 and OP1. */
14749
14750 tree
14751 fold_build2_stat_loc (location_t loc,
14752 enum tree_code code, tree type, tree op0, tree op1
14753 MEM_STAT_DECL)
14754 {
14755 tree tem;
14756 #ifdef ENABLE_FOLD_CHECKING
14757 unsigned char checksum_before_op0[16],
14758 checksum_before_op1[16],
14759 checksum_after_op0[16],
14760 checksum_after_op1[16];
14761 struct md5_ctx ctx;
14762 hash_table<pointer_hash<const tree_node> > ht (32);
14763
14764 md5_init_ctx (&ctx);
14765 fold_checksum_tree (op0, &ctx, &ht);
14766 md5_finish_ctx (&ctx, checksum_before_op0);
14767 ht.empty ();
14768
14769 md5_init_ctx (&ctx);
14770 fold_checksum_tree (op1, &ctx, &ht);
14771 md5_finish_ctx (&ctx, checksum_before_op1);
14772 ht.empty ();
14773 #endif
14774
14775 tem = fold_binary_loc (loc, code, type, op0, op1);
14776 if (!tem)
14777 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14778
14779 #ifdef ENABLE_FOLD_CHECKING
14780 md5_init_ctx (&ctx);
14781 fold_checksum_tree (op0, &ctx, &ht);
14782 md5_finish_ctx (&ctx, checksum_after_op0);
14783 ht.empty ();
14784
14785 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14786 fold_check_failed (op0, tem);
14787
14788 md5_init_ctx (&ctx);
14789 fold_checksum_tree (op1, &ctx, &ht);
14790 md5_finish_ctx (&ctx, checksum_after_op1);
14791
14792 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14793 fold_check_failed (op1, tem);
14794 #endif
14795 return tem;
14796 }
14797
14798 /* Fold a ternary tree expression with code CODE of type TYPE with
14799 operands OP0, OP1, and OP2. Return a folded expression if
14800 successful. Otherwise, return a tree expression with code CODE of
14801 type TYPE with operands OP0, OP1, and OP2. */
14802
14803 tree
14804 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14805 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14806 {
14807 tree tem;
14808 #ifdef ENABLE_FOLD_CHECKING
14809 unsigned char checksum_before_op0[16],
14810 checksum_before_op1[16],
14811 checksum_before_op2[16],
14812 checksum_after_op0[16],
14813 checksum_after_op1[16],
14814 checksum_after_op2[16];
14815 struct md5_ctx ctx;
14816 hash_table<pointer_hash<const tree_node> > ht (32);
14817
14818 md5_init_ctx (&ctx);
14819 fold_checksum_tree (op0, &ctx, &ht);
14820 md5_finish_ctx (&ctx, checksum_before_op0);
14821 ht.empty ();
14822
14823 md5_init_ctx (&ctx);
14824 fold_checksum_tree (op1, &ctx, &ht);
14825 md5_finish_ctx (&ctx, checksum_before_op1);
14826 ht.empty ();
14827
14828 md5_init_ctx (&ctx);
14829 fold_checksum_tree (op2, &ctx, &ht);
14830 md5_finish_ctx (&ctx, checksum_before_op2);
14831 ht.empty ();
14832 #endif
14833
14834 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14835 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14836 if (!tem)
14837 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14838
14839 #ifdef ENABLE_FOLD_CHECKING
14840 md5_init_ctx (&ctx);
14841 fold_checksum_tree (op0, &ctx, &ht);
14842 md5_finish_ctx (&ctx, checksum_after_op0);
14843 ht.empty ();
14844
14845 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14846 fold_check_failed (op0, tem);
14847
14848 md5_init_ctx (&ctx);
14849 fold_checksum_tree (op1, &ctx, &ht);
14850 md5_finish_ctx (&ctx, checksum_after_op1);
14851 ht.empty ();
14852
14853 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14854 fold_check_failed (op1, tem);
14855
14856 md5_init_ctx (&ctx);
14857 fold_checksum_tree (op2, &ctx, &ht);
14858 md5_finish_ctx (&ctx, checksum_after_op2);
14859
14860 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14861 fold_check_failed (op2, tem);
14862 #endif
14863 return tem;
14864 }
14865
14866 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14867 arguments in ARGARRAY, and a null static chain.
14868 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14869 of type TYPE from the given operands as constructed by build_call_array. */
14870
14871 tree
14872 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14873 int nargs, tree *argarray)
14874 {
14875 tree tem;
14876 #ifdef ENABLE_FOLD_CHECKING
14877 unsigned char checksum_before_fn[16],
14878 checksum_before_arglist[16],
14879 checksum_after_fn[16],
14880 checksum_after_arglist[16];
14881 struct md5_ctx ctx;
14882 hash_table<pointer_hash<const tree_node> > ht (32);
14883 int i;
14884
14885 md5_init_ctx (&ctx);
14886 fold_checksum_tree (fn, &ctx, &ht);
14887 md5_finish_ctx (&ctx, checksum_before_fn);
14888 ht.empty ();
14889
14890 md5_init_ctx (&ctx);
14891 for (i = 0; i < nargs; i++)
14892 fold_checksum_tree (argarray[i], &ctx, &ht);
14893 md5_finish_ctx (&ctx, checksum_before_arglist);
14894 ht.empty ();
14895 #endif
14896
14897 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14898
14899 #ifdef ENABLE_FOLD_CHECKING
14900 md5_init_ctx (&ctx);
14901 fold_checksum_tree (fn, &ctx, &ht);
14902 md5_finish_ctx (&ctx, checksum_after_fn);
14903 ht.empty ();
14904
14905 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14906 fold_check_failed (fn, tem);
14907
14908 md5_init_ctx (&ctx);
14909 for (i = 0; i < nargs; i++)
14910 fold_checksum_tree (argarray[i], &ctx, &ht);
14911 md5_finish_ctx (&ctx, checksum_after_arglist);
14912
14913 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14914 fold_check_failed (NULL_TREE, tem);
14915 #endif
14916 return tem;
14917 }
14918
14919 /* Perform constant folding and related simplification of initializer
14920 expression EXPR. These behave identically to "fold_buildN" but ignore
14921 potential run-time traps and exceptions that fold must preserve. */
14922
14923 #define START_FOLD_INIT \
14924 int saved_signaling_nans = flag_signaling_nans;\
14925 int saved_trapping_math = flag_trapping_math;\
14926 int saved_rounding_math = flag_rounding_math;\
14927 int saved_trapv = flag_trapv;\
14928 int saved_folding_initializer = folding_initializer;\
14929 flag_signaling_nans = 0;\
14930 flag_trapping_math = 0;\
14931 flag_rounding_math = 0;\
14932 flag_trapv = 0;\
14933 folding_initializer = 1;
14934
14935 #define END_FOLD_INIT \
14936 flag_signaling_nans = saved_signaling_nans;\
14937 flag_trapping_math = saved_trapping_math;\
14938 flag_rounding_math = saved_rounding_math;\
14939 flag_trapv = saved_trapv;\
14940 folding_initializer = saved_folding_initializer;
14941
14942 tree
14943 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14944 tree type, tree op)
14945 {
14946 tree result;
14947 START_FOLD_INIT;
14948
14949 result = fold_build1_loc (loc, code, type, op);
14950
14951 END_FOLD_INIT;
14952 return result;
14953 }
14954
14955 tree
14956 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14957 tree type, tree op0, tree op1)
14958 {
14959 tree result;
14960 START_FOLD_INIT;
14961
14962 result = fold_build2_loc (loc, code, type, op0, op1);
14963
14964 END_FOLD_INIT;
14965 return result;
14966 }
14967
14968 tree
14969 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14970 int nargs, tree *argarray)
14971 {
14972 tree result;
14973 START_FOLD_INIT;
14974
14975 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14976
14977 END_FOLD_INIT;
14978 return result;
14979 }
14980
14981 #undef START_FOLD_INIT
14982 #undef END_FOLD_INIT
14983
14984 /* Determine if first argument is a multiple of second argument. Return 0 if
14985 it is not, or we cannot easily determined it to be.
14986
14987 An example of the sort of thing we care about (at this point; this routine
14988 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14989 fold cases do now) is discovering that
14990
14991 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14992
14993 is a multiple of
14994
14995 SAVE_EXPR (J * 8)
14996
14997 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14998
14999 This code also handles discovering that
15000
15001 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15002
15003 is a multiple of 8 so we don't have to worry about dealing with a
15004 possible remainder.
15005
15006 Note that we *look* inside a SAVE_EXPR only to determine how it was
15007 calculated; it is not safe for fold to do much of anything else with the
15008 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15009 at run time. For example, the latter example above *cannot* be implemented
15010 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15011 evaluation time of the original SAVE_EXPR is not necessarily the same at
15012 the time the new expression is evaluated. The only optimization of this
15013 sort that would be valid is changing
15014
15015 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15016
15017 divided by 8 to
15018
15019 SAVE_EXPR (I) * SAVE_EXPR (J)
15020
15021 (where the same SAVE_EXPR (J) is used in the original and the
15022 transformed version). */
15023
15024 int
15025 multiple_of_p (tree type, const_tree top, const_tree bottom)
15026 {
15027 if (operand_equal_p (top, bottom, 0))
15028 return 1;
15029
15030 if (TREE_CODE (type) != INTEGER_TYPE)
15031 return 0;
15032
15033 switch (TREE_CODE (top))
15034 {
15035 case BIT_AND_EXPR:
15036 /* Bitwise and provides a power of two multiple. If the mask is
15037 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15038 if (!integer_pow2p (bottom))
15039 return 0;
15040 /* FALLTHRU */
15041
15042 case MULT_EXPR:
15043 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15044 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15045
15046 case PLUS_EXPR:
15047 case MINUS_EXPR:
15048 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15049 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15050
15051 case LSHIFT_EXPR:
15052 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15053 {
15054 tree op1, t1;
15055
15056 op1 = TREE_OPERAND (top, 1);
15057 /* const_binop may not detect overflow correctly,
15058 so check for it explicitly here. */
15059 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15060 && 0 != (t1 = fold_convert (type,
15061 const_binop (LSHIFT_EXPR,
15062 size_one_node,
15063 op1)))
15064 && !TREE_OVERFLOW (t1))
15065 return multiple_of_p (type, t1, bottom);
15066 }
15067 return 0;
15068
15069 case NOP_EXPR:
15070 /* Can't handle conversions from non-integral or wider integral type. */
15071 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15072 || (TYPE_PRECISION (type)
15073 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15074 return 0;
15075
15076 /* .. fall through ... */
15077
15078 case SAVE_EXPR:
15079 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15080
15081 case COND_EXPR:
15082 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15083 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15084
15085 case INTEGER_CST:
15086 if (TREE_CODE (bottom) != INTEGER_CST
15087 || integer_zerop (bottom)
15088 || (TYPE_UNSIGNED (type)
15089 && (tree_int_cst_sgn (top) < 0
15090 || tree_int_cst_sgn (bottom) < 0)))
15091 return 0;
15092 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15093 SIGNED);
15094
15095 default:
15096 return 0;
15097 }
15098 }
15099
15100 /* Return true if CODE or TYPE is known to be non-negative. */
15101
15102 static bool
15103 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15104 {
15105 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15106 && truth_value_p (code))
15107 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15108 have a signed:1 type (where the value is -1 and 0). */
15109 return true;
15110 return false;
15111 }
15112
15113 /* Return true if (CODE OP0) is known to be non-negative. If the return
15114 value is based on the assumption that signed overflow is undefined,
15115 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15116 *STRICT_OVERFLOW_P. */
15117
15118 bool
15119 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15120 bool *strict_overflow_p)
15121 {
15122 if (TYPE_UNSIGNED (type))
15123 return true;
15124
15125 switch (code)
15126 {
15127 case ABS_EXPR:
15128 /* We can't return 1 if flag_wrapv is set because
15129 ABS_EXPR<INT_MIN> = INT_MIN. */
15130 if (!INTEGRAL_TYPE_P (type))
15131 return true;
15132 if (TYPE_OVERFLOW_UNDEFINED (type))
15133 {
15134 *strict_overflow_p = true;
15135 return true;
15136 }
15137 break;
15138
15139 case NON_LVALUE_EXPR:
15140 case FLOAT_EXPR:
15141 case FIX_TRUNC_EXPR:
15142 return tree_expr_nonnegative_warnv_p (op0,
15143 strict_overflow_p);
15144
15145 case NOP_EXPR:
15146 {
15147 tree inner_type = TREE_TYPE (op0);
15148 tree outer_type = type;
15149
15150 if (TREE_CODE (outer_type) == REAL_TYPE)
15151 {
15152 if (TREE_CODE (inner_type) == REAL_TYPE)
15153 return tree_expr_nonnegative_warnv_p (op0,
15154 strict_overflow_p);
15155 if (INTEGRAL_TYPE_P (inner_type))
15156 {
15157 if (TYPE_UNSIGNED (inner_type))
15158 return true;
15159 return tree_expr_nonnegative_warnv_p (op0,
15160 strict_overflow_p);
15161 }
15162 }
15163 else if (INTEGRAL_TYPE_P (outer_type))
15164 {
15165 if (TREE_CODE (inner_type) == REAL_TYPE)
15166 return tree_expr_nonnegative_warnv_p (op0,
15167 strict_overflow_p);
15168 if (INTEGRAL_TYPE_P (inner_type))
15169 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15170 && TYPE_UNSIGNED (inner_type);
15171 }
15172 }
15173 break;
15174
15175 default:
15176 return tree_simple_nonnegative_warnv_p (code, type);
15177 }
15178
15179 /* We don't know sign of `t', so be conservative and return false. */
15180 return false;
15181 }
15182
15183 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15184 value is based on the assumption that signed overflow is undefined,
15185 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15186 *STRICT_OVERFLOW_P. */
15187
15188 bool
15189 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15190 tree op1, bool *strict_overflow_p)
15191 {
15192 if (TYPE_UNSIGNED (type))
15193 return true;
15194
15195 switch (code)
15196 {
15197 case POINTER_PLUS_EXPR:
15198 case PLUS_EXPR:
15199 if (FLOAT_TYPE_P (type))
15200 return (tree_expr_nonnegative_warnv_p (op0,
15201 strict_overflow_p)
15202 && tree_expr_nonnegative_warnv_p (op1,
15203 strict_overflow_p));
15204
15205 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15206 both unsigned and at least 2 bits shorter than the result. */
15207 if (TREE_CODE (type) == INTEGER_TYPE
15208 && TREE_CODE (op0) == NOP_EXPR
15209 && TREE_CODE (op1) == NOP_EXPR)
15210 {
15211 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15212 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15213 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15214 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15215 {
15216 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15217 TYPE_PRECISION (inner2)) + 1;
15218 return prec < TYPE_PRECISION (type);
15219 }
15220 }
15221 break;
15222
15223 case MULT_EXPR:
15224 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15225 {
15226 /* x * x is always non-negative for floating point x
15227 or without overflow. */
15228 if (operand_equal_p (op0, op1, 0)
15229 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15230 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15231 {
15232 if (TYPE_OVERFLOW_UNDEFINED (type))
15233 *strict_overflow_p = true;
15234 return true;
15235 }
15236 }
15237
15238 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15239 both unsigned and their total bits is shorter than the result. */
15240 if (TREE_CODE (type) == INTEGER_TYPE
15241 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15242 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15243 {
15244 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15245 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15246 : TREE_TYPE (op0);
15247 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15248 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15249 : TREE_TYPE (op1);
15250
15251 bool unsigned0 = TYPE_UNSIGNED (inner0);
15252 bool unsigned1 = TYPE_UNSIGNED (inner1);
15253
15254 if (TREE_CODE (op0) == INTEGER_CST)
15255 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15256
15257 if (TREE_CODE (op1) == INTEGER_CST)
15258 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15259
15260 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15261 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15262 {
15263 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15264 ? tree_int_cst_min_precision (op0, UNSIGNED)
15265 : TYPE_PRECISION (inner0);
15266
15267 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15268 ? tree_int_cst_min_precision (op1, UNSIGNED)
15269 : TYPE_PRECISION (inner1);
15270
15271 return precision0 + precision1 < TYPE_PRECISION (type);
15272 }
15273 }
15274 return false;
15275
15276 case BIT_AND_EXPR:
15277 case MAX_EXPR:
15278 return (tree_expr_nonnegative_warnv_p (op0,
15279 strict_overflow_p)
15280 || tree_expr_nonnegative_warnv_p (op1,
15281 strict_overflow_p));
15282
15283 case BIT_IOR_EXPR:
15284 case BIT_XOR_EXPR:
15285 case MIN_EXPR:
15286 case RDIV_EXPR:
15287 case TRUNC_DIV_EXPR:
15288 case CEIL_DIV_EXPR:
15289 case FLOOR_DIV_EXPR:
15290 case ROUND_DIV_EXPR:
15291 return (tree_expr_nonnegative_warnv_p (op0,
15292 strict_overflow_p)
15293 && tree_expr_nonnegative_warnv_p (op1,
15294 strict_overflow_p));
15295
15296 case TRUNC_MOD_EXPR:
15297 case CEIL_MOD_EXPR:
15298 case FLOOR_MOD_EXPR:
15299 case ROUND_MOD_EXPR:
15300 return tree_expr_nonnegative_warnv_p (op0,
15301 strict_overflow_p);
15302 default:
15303 return tree_simple_nonnegative_warnv_p (code, type);
15304 }
15305
15306 /* We don't know sign of `t', so be conservative and return false. */
15307 return false;
15308 }
15309
15310 /* Return true if T is known to be non-negative. If the return
15311 value is based on the assumption that signed overflow is undefined,
15312 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15313 *STRICT_OVERFLOW_P. */
15314
15315 bool
15316 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15317 {
15318 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15319 return true;
15320
15321 switch (TREE_CODE (t))
15322 {
15323 case INTEGER_CST:
15324 return tree_int_cst_sgn (t) >= 0;
15325
15326 case REAL_CST:
15327 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15328
15329 case FIXED_CST:
15330 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15331
15332 case COND_EXPR:
15333 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15334 strict_overflow_p)
15335 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15336 strict_overflow_p));
15337 default:
15338 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15339 TREE_TYPE (t));
15340 }
15341 /* We don't know sign of `t', so be conservative and return false. */
15342 return false;
15343 }
15344
15345 /* Return true if T is known to be non-negative. If the return
15346 value is based on the assumption that signed overflow is undefined,
15347 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15348 *STRICT_OVERFLOW_P. */
15349
15350 bool
15351 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15352 tree arg0, tree arg1, bool *strict_overflow_p)
15353 {
15354 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15355 switch (DECL_FUNCTION_CODE (fndecl))
15356 {
15357 CASE_FLT_FN (BUILT_IN_ACOS):
15358 CASE_FLT_FN (BUILT_IN_ACOSH):
15359 CASE_FLT_FN (BUILT_IN_CABS):
15360 CASE_FLT_FN (BUILT_IN_COSH):
15361 CASE_FLT_FN (BUILT_IN_ERFC):
15362 CASE_FLT_FN (BUILT_IN_EXP):
15363 CASE_FLT_FN (BUILT_IN_EXP10):
15364 CASE_FLT_FN (BUILT_IN_EXP2):
15365 CASE_FLT_FN (BUILT_IN_FABS):
15366 CASE_FLT_FN (BUILT_IN_FDIM):
15367 CASE_FLT_FN (BUILT_IN_HYPOT):
15368 CASE_FLT_FN (BUILT_IN_POW10):
15369 CASE_INT_FN (BUILT_IN_FFS):
15370 CASE_INT_FN (BUILT_IN_PARITY):
15371 CASE_INT_FN (BUILT_IN_POPCOUNT):
15372 CASE_INT_FN (BUILT_IN_CLZ):
15373 CASE_INT_FN (BUILT_IN_CLRSB):
15374 case BUILT_IN_BSWAP32:
15375 case BUILT_IN_BSWAP64:
15376 /* Always true. */
15377 return true;
15378
15379 CASE_FLT_FN (BUILT_IN_SQRT):
15380 /* sqrt(-0.0) is -0.0. */
15381 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15382 return true;
15383 return tree_expr_nonnegative_warnv_p (arg0,
15384 strict_overflow_p);
15385
15386 CASE_FLT_FN (BUILT_IN_ASINH):
15387 CASE_FLT_FN (BUILT_IN_ATAN):
15388 CASE_FLT_FN (BUILT_IN_ATANH):
15389 CASE_FLT_FN (BUILT_IN_CBRT):
15390 CASE_FLT_FN (BUILT_IN_CEIL):
15391 CASE_FLT_FN (BUILT_IN_ERF):
15392 CASE_FLT_FN (BUILT_IN_EXPM1):
15393 CASE_FLT_FN (BUILT_IN_FLOOR):
15394 CASE_FLT_FN (BUILT_IN_FMOD):
15395 CASE_FLT_FN (BUILT_IN_FREXP):
15396 CASE_FLT_FN (BUILT_IN_ICEIL):
15397 CASE_FLT_FN (BUILT_IN_IFLOOR):
15398 CASE_FLT_FN (BUILT_IN_IRINT):
15399 CASE_FLT_FN (BUILT_IN_IROUND):
15400 CASE_FLT_FN (BUILT_IN_LCEIL):
15401 CASE_FLT_FN (BUILT_IN_LDEXP):
15402 CASE_FLT_FN (BUILT_IN_LFLOOR):
15403 CASE_FLT_FN (BUILT_IN_LLCEIL):
15404 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15405 CASE_FLT_FN (BUILT_IN_LLRINT):
15406 CASE_FLT_FN (BUILT_IN_LLROUND):
15407 CASE_FLT_FN (BUILT_IN_LRINT):
15408 CASE_FLT_FN (BUILT_IN_LROUND):
15409 CASE_FLT_FN (BUILT_IN_MODF):
15410 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15411 CASE_FLT_FN (BUILT_IN_RINT):
15412 CASE_FLT_FN (BUILT_IN_ROUND):
15413 CASE_FLT_FN (BUILT_IN_SCALB):
15414 CASE_FLT_FN (BUILT_IN_SCALBLN):
15415 CASE_FLT_FN (BUILT_IN_SCALBN):
15416 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15417 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15418 CASE_FLT_FN (BUILT_IN_SINH):
15419 CASE_FLT_FN (BUILT_IN_TANH):
15420 CASE_FLT_FN (BUILT_IN_TRUNC):
15421 /* True if the 1st argument is nonnegative. */
15422 return tree_expr_nonnegative_warnv_p (arg0,
15423 strict_overflow_p);
15424
15425 CASE_FLT_FN (BUILT_IN_FMAX):
15426 /* True if the 1st OR 2nd arguments are nonnegative. */
15427 return (tree_expr_nonnegative_warnv_p (arg0,
15428 strict_overflow_p)
15429 || (tree_expr_nonnegative_warnv_p (arg1,
15430 strict_overflow_p)));
15431
15432 CASE_FLT_FN (BUILT_IN_FMIN):
15433 /* True if the 1st AND 2nd arguments are nonnegative. */
15434 return (tree_expr_nonnegative_warnv_p (arg0,
15435 strict_overflow_p)
15436 && (tree_expr_nonnegative_warnv_p (arg1,
15437 strict_overflow_p)));
15438
15439 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15440 /* True if the 2nd argument is nonnegative. */
15441 return tree_expr_nonnegative_warnv_p (arg1,
15442 strict_overflow_p);
15443
15444 CASE_FLT_FN (BUILT_IN_POWI):
15445 /* True if the 1st argument is nonnegative or the second
15446 argument is an even integer. */
15447 if (TREE_CODE (arg1) == INTEGER_CST
15448 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15449 return true;
15450 return tree_expr_nonnegative_warnv_p (arg0,
15451 strict_overflow_p);
15452
15453 CASE_FLT_FN (BUILT_IN_POW):
15454 /* True if the 1st argument is nonnegative or the second
15455 argument is an even integer valued real. */
15456 if (TREE_CODE (arg1) == REAL_CST)
15457 {
15458 REAL_VALUE_TYPE c;
15459 HOST_WIDE_INT n;
15460
15461 c = TREE_REAL_CST (arg1);
15462 n = real_to_integer (&c);
15463 if ((n & 1) == 0)
15464 {
15465 REAL_VALUE_TYPE cint;
15466 real_from_integer (&cint, VOIDmode, n, SIGNED);
15467 if (real_identical (&c, &cint))
15468 return true;
15469 }
15470 }
15471 return tree_expr_nonnegative_warnv_p (arg0,
15472 strict_overflow_p);
15473
15474 default:
15475 break;
15476 }
15477 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15478 type);
15479 }
15480
15481 /* Return true if T is known to be non-negative. If the return
15482 value is based on the assumption that signed overflow is undefined,
15483 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15484 *STRICT_OVERFLOW_P. */
15485
15486 static bool
15487 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15488 {
15489 enum tree_code code = TREE_CODE (t);
15490 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15491 return true;
15492
15493 switch (code)
15494 {
15495 case TARGET_EXPR:
15496 {
15497 tree temp = TARGET_EXPR_SLOT (t);
15498 t = TARGET_EXPR_INITIAL (t);
15499
15500 /* If the initializer is non-void, then it's a normal expression
15501 that will be assigned to the slot. */
15502 if (!VOID_TYPE_P (t))
15503 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15504
15505 /* Otherwise, the initializer sets the slot in some way. One common
15506 way is an assignment statement at the end of the initializer. */
15507 while (1)
15508 {
15509 if (TREE_CODE (t) == BIND_EXPR)
15510 t = expr_last (BIND_EXPR_BODY (t));
15511 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15512 || TREE_CODE (t) == TRY_CATCH_EXPR)
15513 t = expr_last (TREE_OPERAND (t, 0));
15514 else if (TREE_CODE (t) == STATEMENT_LIST)
15515 t = expr_last (t);
15516 else
15517 break;
15518 }
15519 if (TREE_CODE (t) == MODIFY_EXPR
15520 && TREE_OPERAND (t, 0) == temp)
15521 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15522 strict_overflow_p);
15523
15524 return false;
15525 }
15526
15527 case CALL_EXPR:
15528 {
15529 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15530 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15531
15532 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15533 get_callee_fndecl (t),
15534 arg0,
15535 arg1,
15536 strict_overflow_p);
15537 }
15538 case COMPOUND_EXPR:
15539 case MODIFY_EXPR:
15540 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15541 strict_overflow_p);
15542 case BIND_EXPR:
15543 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15544 strict_overflow_p);
15545 case SAVE_EXPR:
15546 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15547 strict_overflow_p);
15548
15549 default:
15550 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15551 TREE_TYPE (t));
15552 }
15553
15554 /* We don't know sign of `t', so be conservative and return false. */
15555 return false;
15556 }
15557
15558 /* Return true if T is known to be non-negative. If the return
15559 value is based on the assumption that signed overflow is undefined,
15560 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15561 *STRICT_OVERFLOW_P. */
15562
15563 bool
15564 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15565 {
15566 enum tree_code code;
15567 if (t == error_mark_node)
15568 return false;
15569
15570 code = TREE_CODE (t);
15571 switch (TREE_CODE_CLASS (code))
15572 {
15573 case tcc_binary:
15574 case tcc_comparison:
15575 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15576 TREE_TYPE (t),
15577 TREE_OPERAND (t, 0),
15578 TREE_OPERAND (t, 1),
15579 strict_overflow_p);
15580
15581 case tcc_unary:
15582 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15583 TREE_TYPE (t),
15584 TREE_OPERAND (t, 0),
15585 strict_overflow_p);
15586
15587 case tcc_constant:
15588 case tcc_declaration:
15589 case tcc_reference:
15590 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15591
15592 default:
15593 break;
15594 }
15595
15596 switch (code)
15597 {
15598 case TRUTH_AND_EXPR:
15599 case TRUTH_OR_EXPR:
15600 case TRUTH_XOR_EXPR:
15601 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15602 TREE_TYPE (t),
15603 TREE_OPERAND (t, 0),
15604 TREE_OPERAND (t, 1),
15605 strict_overflow_p);
15606 case TRUTH_NOT_EXPR:
15607 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15608 TREE_TYPE (t),
15609 TREE_OPERAND (t, 0),
15610 strict_overflow_p);
15611
15612 case COND_EXPR:
15613 case CONSTRUCTOR:
15614 case OBJ_TYPE_REF:
15615 case ASSERT_EXPR:
15616 case ADDR_EXPR:
15617 case WITH_SIZE_EXPR:
15618 case SSA_NAME:
15619 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15620
15621 default:
15622 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15623 }
15624 }
15625
15626 /* Return true if `t' is known to be non-negative. Handle warnings
15627 about undefined signed overflow. */
15628
15629 bool
15630 tree_expr_nonnegative_p (tree t)
15631 {
15632 bool ret, strict_overflow_p;
15633
15634 strict_overflow_p = false;
15635 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15636 if (strict_overflow_p)
15637 fold_overflow_warning (("assuming signed overflow does not occur when "
15638 "determining that expression is always "
15639 "non-negative"),
15640 WARN_STRICT_OVERFLOW_MISC);
15641 return ret;
15642 }
15643
15644
15645 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15646 For floating point we further ensure that T is not denormal.
15647 Similar logic is present in nonzero_address in rtlanal.h.
15648
15649 If the return value is based on the assumption that signed overflow
15650 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15651 change *STRICT_OVERFLOW_P. */
15652
15653 bool
15654 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15655 bool *strict_overflow_p)
15656 {
15657 switch (code)
15658 {
15659 case ABS_EXPR:
15660 return tree_expr_nonzero_warnv_p (op0,
15661 strict_overflow_p);
15662
15663 case NOP_EXPR:
15664 {
15665 tree inner_type = TREE_TYPE (op0);
15666 tree outer_type = type;
15667
15668 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15669 && tree_expr_nonzero_warnv_p (op0,
15670 strict_overflow_p));
15671 }
15672 break;
15673
15674 case NON_LVALUE_EXPR:
15675 return tree_expr_nonzero_warnv_p (op0,
15676 strict_overflow_p);
15677
15678 default:
15679 break;
15680 }
15681
15682 return false;
15683 }
15684
15685 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15686 For floating point we further ensure that T is not denormal.
15687 Similar logic is present in nonzero_address in rtlanal.h.
15688
15689 If the return value is based on the assumption that signed overflow
15690 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15691 change *STRICT_OVERFLOW_P. */
15692
15693 bool
15694 tree_binary_nonzero_warnv_p (enum tree_code code,
15695 tree type,
15696 tree op0,
15697 tree op1, bool *strict_overflow_p)
15698 {
15699 bool sub_strict_overflow_p;
15700 switch (code)
15701 {
15702 case POINTER_PLUS_EXPR:
15703 case PLUS_EXPR:
15704 if (TYPE_OVERFLOW_UNDEFINED (type))
15705 {
15706 /* With the presence of negative values it is hard
15707 to say something. */
15708 sub_strict_overflow_p = false;
15709 if (!tree_expr_nonnegative_warnv_p (op0,
15710 &sub_strict_overflow_p)
15711 || !tree_expr_nonnegative_warnv_p (op1,
15712 &sub_strict_overflow_p))
15713 return false;
15714 /* One of operands must be positive and the other non-negative. */
15715 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15716 overflows, on a twos-complement machine the sum of two
15717 nonnegative numbers can never be zero. */
15718 return (tree_expr_nonzero_warnv_p (op0,
15719 strict_overflow_p)
15720 || tree_expr_nonzero_warnv_p (op1,
15721 strict_overflow_p));
15722 }
15723 break;
15724
15725 case MULT_EXPR:
15726 if (TYPE_OVERFLOW_UNDEFINED (type))
15727 {
15728 if (tree_expr_nonzero_warnv_p (op0,
15729 strict_overflow_p)
15730 && tree_expr_nonzero_warnv_p (op1,
15731 strict_overflow_p))
15732 {
15733 *strict_overflow_p = true;
15734 return true;
15735 }
15736 }
15737 break;
15738
15739 case MIN_EXPR:
15740 sub_strict_overflow_p = false;
15741 if (tree_expr_nonzero_warnv_p (op0,
15742 &sub_strict_overflow_p)
15743 && tree_expr_nonzero_warnv_p (op1,
15744 &sub_strict_overflow_p))
15745 {
15746 if (sub_strict_overflow_p)
15747 *strict_overflow_p = true;
15748 }
15749 break;
15750
15751 case MAX_EXPR:
15752 sub_strict_overflow_p = false;
15753 if (tree_expr_nonzero_warnv_p (op0,
15754 &sub_strict_overflow_p))
15755 {
15756 if (sub_strict_overflow_p)
15757 *strict_overflow_p = true;
15758
15759 /* When both operands are nonzero, then MAX must be too. */
15760 if (tree_expr_nonzero_warnv_p (op1,
15761 strict_overflow_p))
15762 return true;
15763
15764 /* MAX where operand 0 is positive is positive. */
15765 return tree_expr_nonnegative_warnv_p (op0,
15766 strict_overflow_p);
15767 }
15768 /* MAX where operand 1 is positive is positive. */
15769 else if (tree_expr_nonzero_warnv_p (op1,
15770 &sub_strict_overflow_p)
15771 && tree_expr_nonnegative_warnv_p (op1,
15772 &sub_strict_overflow_p))
15773 {
15774 if (sub_strict_overflow_p)
15775 *strict_overflow_p = true;
15776 return true;
15777 }
15778 break;
15779
15780 case BIT_IOR_EXPR:
15781 return (tree_expr_nonzero_warnv_p (op1,
15782 strict_overflow_p)
15783 || tree_expr_nonzero_warnv_p (op0,
15784 strict_overflow_p));
15785
15786 default:
15787 break;
15788 }
15789
15790 return false;
15791 }
15792
15793 /* Return true when T is an address and is known to be nonzero.
15794 For floating point we further ensure that T is not denormal.
15795 Similar logic is present in nonzero_address in rtlanal.h.
15796
15797 If the return value is based on the assumption that signed overflow
15798 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15799 change *STRICT_OVERFLOW_P. */
15800
15801 bool
15802 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15803 {
15804 bool sub_strict_overflow_p;
15805 switch (TREE_CODE (t))
15806 {
15807 case INTEGER_CST:
15808 return !integer_zerop (t);
15809
15810 case ADDR_EXPR:
15811 {
15812 tree base = TREE_OPERAND (t, 0);
15813
15814 if (!DECL_P (base))
15815 base = get_base_address (base);
15816
15817 if (!base)
15818 return false;
15819
15820 /* For objects in symbol table check if we know they are non-zero.
15821 Don't do anything for variables and functions before symtab is built;
15822 it is quite possible that they will be declared weak later. */
15823 if (DECL_P (base) && decl_in_symtab_p (base))
15824 {
15825 struct symtab_node *symbol;
15826
15827 symbol = symtab_node::get_create (base);
15828 if (symbol)
15829 return symbol->nonzero_address ();
15830 else
15831 return false;
15832 }
15833
15834 /* Function local objects are never NULL. */
15835 if (DECL_P (base)
15836 && (DECL_CONTEXT (base)
15837 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15838 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15839 return true;
15840
15841 /* Constants are never weak. */
15842 if (CONSTANT_CLASS_P (base))
15843 return true;
15844
15845 return false;
15846 }
15847
15848 case COND_EXPR:
15849 sub_strict_overflow_p = false;
15850 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15851 &sub_strict_overflow_p)
15852 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15853 &sub_strict_overflow_p))
15854 {
15855 if (sub_strict_overflow_p)
15856 *strict_overflow_p = true;
15857 return true;
15858 }
15859 break;
15860
15861 default:
15862 break;
15863 }
15864 return false;
15865 }
15866
15867 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15868 attempt to fold the expression to a constant without modifying TYPE,
15869 OP0 or OP1.
15870
15871 If the expression could be simplified to a constant, then return
15872 the constant. If the expression would not be simplified to a
15873 constant, then return NULL_TREE. */
15874
15875 tree
15876 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15877 {
15878 tree tem = fold_binary (code, type, op0, op1);
15879 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15880 }
15881
15882 /* Given the components of a unary expression CODE, TYPE and OP0,
15883 attempt to fold the expression to a constant without modifying
15884 TYPE or OP0.
15885
15886 If the expression could be simplified to a constant, then return
15887 the constant. If the expression would not be simplified to a
15888 constant, then return NULL_TREE. */
15889
15890 tree
15891 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15892 {
15893 tree tem = fold_unary (code, type, op0);
15894 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15895 }
15896
15897 /* If EXP represents referencing an element in a constant string
15898 (either via pointer arithmetic or array indexing), return the
15899 tree representing the value accessed, otherwise return NULL. */
15900
15901 tree
15902 fold_read_from_constant_string (tree exp)
15903 {
15904 if ((TREE_CODE (exp) == INDIRECT_REF
15905 || TREE_CODE (exp) == ARRAY_REF)
15906 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15907 {
15908 tree exp1 = TREE_OPERAND (exp, 0);
15909 tree index;
15910 tree string;
15911 location_t loc = EXPR_LOCATION (exp);
15912
15913 if (TREE_CODE (exp) == INDIRECT_REF)
15914 string = string_constant (exp1, &index);
15915 else
15916 {
15917 tree low_bound = array_ref_low_bound (exp);
15918 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15919
15920 /* Optimize the special-case of a zero lower bound.
15921
15922 We convert the low_bound to sizetype to avoid some problems
15923 with constant folding. (E.g. suppose the lower bound is 1,
15924 and its mode is QI. Without the conversion,l (ARRAY
15925 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15926 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15927 if (! integer_zerop (low_bound))
15928 index = size_diffop_loc (loc, index,
15929 fold_convert_loc (loc, sizetype, low_bound));
15930
15931 string = exp1;
15932 }
15933
15934 if (string
15935 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15936 && TREE_CODE (string) == STRING_CST
15937 && TREE_CODE (index) == INTEGER_CST
15938 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15939 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15940 == MODE_INT)
15941 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15942 return build_int_cst_type (TREE_TYPE (exp),
15943 (TREE_STRING_POINTER (string)
15944 [TREE_INT_CST_LOW (index)]));
15945 }
15946 return NULL;
15947 }
15948
15949 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15950 an integer constant, real, or fixed-point constant.
15951
15952 TYPE is the type of the result. */
15953
15954 static tree
15955 fold_negate_const (tree arg0, tree type)
15956 {
15957 tree t = NULL_TREE;
15958
15959 switch (TREE_CODE (arg0))
15960 {
15961 case INTEGER_CST:
15962 {
15963 bool overflow;
15964 wide_int val = wi::neg (arg0, &overflow);
15965 t = force_fit_type (type, val, 1,
15966 (overflow | TREE_OVERFLOW (arg0))
15967 && !TYPE_UNSIGNED (type));
15968 break;
15969 }
15970
15971 case REAL_CST:
15972 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15973 break;
15974
15975 case FIXED_CST:
15976 {
15977 FIXED_VALUE_TYPE f;
15978 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15979 &(TREE_FIXED_CST (arg0)), NULL,
15980 TYPE_SATURATING (type));
15981 t = build_fixed (type, f);
15982 /* Propagate overflow flags. */
15983 if (overflow_p | TREE_OVERFLOW (arg0))
15984 TREE_OVERFLOW (t) = 1;
15985 break;
15986 }
15987
15988 default:
15989 gcc_unreachable ();
15990 }
15991
15992 return t;
15993 }
15994
15995 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15996 an integer constant or real constant.
15997
15998 TYPE is the type of the result. */
15999
16000 tree
16001 fold_abs_const (tree arg0, tree type)
16002 {
16003 tree t = NULL_TREE;
16004
16005 switch (TREE_CODE (arg0))
16006 {
16007 case INTEGER_CST:
16008 {
16009 /* If the value is unsigned or non-negative, then the absolute value
16010 is the same as the ordinary value. */
16011 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16012 t = arg0;
16013
16014 /* If the value is negative, then the absolute value is
16015 its negation. */
16016 else
16017 {
16018 bool overflow;
16019 wide_int val = wi::neg (arg0, &overflow);
16020 t = force_fit_type (type, val, -1,
16021 overflow | TREE_OVERFLOW (arg0));
16022 }
16023 }
16024 break;
16025
16026 case REAL_CST:
16027 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16028 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16029 else
16030 t = arg0;
16031 break;
16032
16033 default:
16034 gcc_unreachable ();
16035 }
16036
16037 return t;
16038 }
16039
16040 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16041 constant. TYPE is the type of the result. */
16042
16043 static tree
16044 fold_not_const (const_tree arg0, tree type)
16045 {
16046 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16047
16048 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16049 }
16050
16051 /* Given CODE, a relational operator, the target type, TYPE and two
16052 constant operands OP0 and OP1, return the result of the
16053 relational operation. If the result is not a compile time
16054 constant, then return NULL_TREE. */
16055
16056 static tree
16057 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16058 {
16059 int result, invert;
16060
16061 /* From here on, the only cases we handle are when the result is
16062 known to be a constant. */
16063
16064 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16065 {
16066 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16067 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16068
16069 /* Handle the cases where either operand is a NaN. */
16070 if (real_isnan (c0) || real_isnan (c1))
16071 {
16072 switch (code)
16073 {
16074 case EQ_EXPR:
16075 case ORDERED_EXPR:
16076 result = 0;
16077 break;
16078
16079 case NE_EXPR:
16080 case UNORDERED_EXPR:
16081 case UNLT_EXPR:
16082 case UNLE_EXPR:
16083 case UNGT_EXPR:
16084 case UNGE_EXPR:
16085 case UNEQ_EXPR:
16086 result = 1;
16087 break;
16088
16089 case LT_EXPR:
16090 case LE_EXPR:
16091 case GT_EXPR:
16092 case GE_EXPR:
16093 case LTGT_EXPR:
16094 if (flag_trapping_math)
16095 return NULL_TREE;
16096 result = 0;
16097 break;
16098
16099 default:
16100 gcc_unreachable ();
16101 }
16102
16103 return constant_boolean_node (result, type);
16104 }
16105
16106 return constant_boolean_node (real_compare (code, c0, c1), type);
16107 }
16108
16109 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16110 {
16111 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16112 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16113 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16114 }
16115
16116 /* Handle equality/inequality of complex constants. */
16117 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16118 {
16119 tree rcond = fold_relational_const (code, type,
16120 TREE_REALPART (op0),
16121 TREE_REALPART (op1));
16122 tree icond = fold_relational_const (code, type,
16123 TREE_IMAGPART (op0),
16124 TREE_IMAGPART (op1));
16125 if (code == EQ_EXPR)
16126 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16127 else if (code == NE_EXPR)
16128 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16129 else
16130 return NULL_TREE;
16131 }
16132
16133 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16134 {
16135 unsigned count = VECTOR_CST_NELTS (op0);
16136 tree *elts = XALLOCAVEC (tree, count);
16137 gcc_assert (VECTOR_CST_NELTS (op1) == count
16138 && TYPE_VECTOR_SUBPARTS (type) == count);
16139
16140 for (unsigned i = 0; i < count; i++)
16141 {
16142 tree elem_type = TREE_TYPE (type);
16143 tree elem0 = VECTOR_CST_ELT (op0, i);
16144 tree elem1 = VECTOR_CST_ELT (op1, i);
16145
16146 tree tem = fold_relational_const (code, elem_type,
16147 elem0, elem1);
16148
16149 if (tem == NULL_TREE)
16150 return NULL_TREE;
16151
16152 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16153 }
16154
16155 return build_vector (type, elts);
16156 }
16157
16158 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16159
16160 To compute GT, swap the arguments and do LT.
16161 To compute GE, do LT and invert the result.
16162 To compute LE, swap the arguments, do LT and invert the result.
16163 To compute NE, do EQ and invert the result.
16164
16165 Therefore, the code below must handle only EQ and LT. */
16166
16167 if (code == LE_EXPR || code == GT_EXPR)
16168 {
16169 tree tem = op0;
16170 op0 = op1;
16171 op1 = tem;
16172 code = swap_tree_comparison (code);
16173 }
16174
16175 /* Note that it is safe to invert for real values here because we
16176 have already handled the one case that it matters. */
16177
16178 invert = 0;
16179 if (code == NE_EXPR || code == GE_EXPR)
16180 {
16181 invert = 1;
16182 code = invert_tree_comparison (code, false);
16183 }
16184
16185 /* Compute a result for LT or EQ if args permit;
16186 Otherwise return T. */
16187 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16188 {
16189 if (code == EQ_EXPR)
16190 result = tree_int_cst_equal (op0, op1);
16191 else
16192 result = tree_int_cst_lt (op0, op1);
16193 }
16194 else
16195 return NULL_TREE;
16196
16197 if (invert)
16198 result ^= 1;
16199 return constant_boolean_node (result, type);
16200 }
16201
16202 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16203 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16204 itself. */
16205
16206 tree
16207 fold_build_cleanup_point_expr (tree type, tree expr)
16208 {
16209 /* If the expression does not have side effects then we don't have to wrap
16210 it with a cleanup point expression. */
16211 if (!TREE_SIDE_EFFECTS (expr))
16212 return expr;
16213
16214 /* If the expression is a return, check to see if the expression inside the
16215 return has no side effects or the right hand side of the modify expression
16216 inside the return. If either don't have side effects set we don't need to
16217 wrap the expression in a cleanup point expression. Note we don't check the
16218 left hand side of the modify because it should always be a return decl. */
16219 if (TREE_CODE (expr) == RETURN_EXPR)
16220 {
16221 tree op = TREE_OPERAND (expr, 0);
16222 if (!op || !TREE_SIDE_EFFECTS (op))
16223 return expr;
16224 op = TREE_OPERAND (op, 1);
16225 if (!TREE_SIDE_EFFECTS (op))
16226 return expr;
16227 }
16228
16229 return build1 (CLEANUP_POINT_EXPR, type, expr);
16230 }
16231
16232 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16233 of an indirection through OP0, or NULL_TREE if no simplification is
16234 possible. */
16235
16236 tree
16237 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16238 {
16239 tree sub = op0;
16240 tree subtype;
16241
16242 STRIP_NOPS (sub);
16243 subtype = TREE_TYPE (sub);
16244 if (!POINTER_TYPE_P (subtype))
16245 return NULL_TREE;
16246
16247 if (TREE_CODE (sub) == ADDR_EXPR)
16248 {
16249 tree op = TREE_OPERAND (sub, 0);
16250 tree optype = TREE_TYPE (op);
16251 /* *&CONST_DECL -> to the value of the const decl. */
16252 if (TREE_CODE (op) == CONST_DECL)
16253 return DECL_INITIAL (op);
16254 /* *&p => p; make sure to handle *&"str"[cst] here. */
16255 if (type == optype)
16256 {
16257 tree fop = fold_read_from_constant_string (op);
16258 if (fop)
16259 return fop;
16260 else
16261 return op;
16262 }
16263 /* *(foo *)&fooarray => fooarray[0] */
16264 else if (TREE_CODE (optype) == ARRAY_TYPE
16265 && type == TREE_TYPE (optype)
16266 && (!in_gimple_form
16267 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16268 {
16269 tree type_domain = TYPE_DOMAIN (optype);
16270 tree min_val = size_zero_node;
16271 if (type_domain && TYPE_MIN_VALUE (type_domain))
16272 min_val = TYPE_MIN_VALUE (type_domain);
16273 if (in_gimple_form
16274 && TREE_CODE (min_val) != INTEGER_CST)
16275 return NULL_TREE;
16276 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16277 NULL_TREE, NULL_TREE);
16278 }
16279 /* *(foo *)&complexfoo => __real__ complexfoo */
16280 else if (TREE_CODE (optype) == COMPLEX_TYPE
16281 && type == TREE_TYPE (optype))
16282 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16283 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16284 else if (TREE_CODE (optype) == VECTOR_TYPE
16285 && type == TREE_TYPE (optype))
16286 {
16287 tree part_width = TYPE_SIZE (type);
16288 tree index = bitsize_int (0);
16289 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16290 }
16291 }
16292
16293 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16294 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16295 {
16296 tree op00 = TREE_OPERAND (sub, 0);
16297 tree op01 = TREE_OPERAND (sub, 1);
16298
16299 STRIP_NOPS (op00);
16300 if (TREE_CODE (op00) == ADDR_EXPR)
16301 {
16302 tree op00type;
16303 op00 = TREE_OPERAND (op00, 0);
16304 op00type = TREE_TYPE (op00);
16305
16306 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16307 if (TREE_CODE (op00type) == VECTOR_TYPE
16308 && type == TREE_TYPE (op00type))
16309 {
16310 HOST_WIDE_INT offset = tree_to_shwi (op01);
16311 tree part_width = TYPE_SIZE (type);
16312 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16313 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16314 tree index = bitsize_int (indexi);
16315
16316 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16317 return fold_build3_loc (loc,
16318 BIT_FIELD_REF, type, op00,
16319 part_width, index);
16320
16321 }
16322 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16323 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16324 && type == TREE_TYPE (op00type))
16325 {
16326 tree size = TYPE_SIZE_UNIT (type);
16327 if (tree_int_cst_equal (size, op01))
16328 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16329 }
16330 /* ((foo *)&fooarray)[1] => fooarray[1] */
16331 else if (TREE_CODE (op00type) == ARRAY_TYPE
16332 && type == TREE_TYPE (op00type))
16333 {
16334 tree type_domain = TYPE_DOMAIN (op00type);
16335 tree min_val = size_zero_node;
16336 if (type_domain && TYPE_MIN_VALUE (type_domain))
16337 min_val = TYPE_MIN_VALUE (type_domain);
16338 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16339 TYPE_SIZE_UNIT (type));
16340 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16341 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16342 NULL_TREE, NULL_TREE);
16343 }
16344 }
16345 }
16346
16347 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16348 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16349 && type == TREE_TYPE (TREE_TYPE (subtype))
16350 && (!in_gimple_form
16351 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16352 {
16353 tree type_domain;
16354 tree min_val = size_zero_node;
16355 sub = build_fold_indirect_ref_loc (loc, sub);
16356 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16357 if (type_domain && TYPE_MIN_VALUE (type_domain))
16358 min_val = TYPE_MIN_VALUE (type_domain);
16359 if (in_gimple_form
16360 && TREE_CODE (min_val) != INTEGER_CST)
16361 return NULL_TREE;
16362 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16363 NULL_TREE);
16364 }
16365
16366 return NULL_TREE;
16367 }
16368
16369 /* Builds an expression for an indirection through T, simplifying some
16370 cases. */
16371
16372 tree
16373 build_fold_indirect_ref_loc (location_t loc, tree t)
16374 {
16375 tree type = TREE_TYPE (TREE_TYPE (t));
16376 tree sub = fold_indirect_ref_1 (loc, type, t);
16377
16378 if (sub)
16379 return sub;
16380
16381 return build1_loc (loc, INDIRECT_REF, type, t);
16382 }
16383
16384 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16385
16386 tree
16387 fold_indirect_ref_loc (location_t loc, tree t)
16388 {
16389 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16390
16391 if (sub)
16392 return sub;
16393 else
16394 return t;
16395 }
16396
16397 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16398 whose result is ignored. The type of the returned tree need not be
16399 the same as the original expression. */
16400
16401 tree
16402 fold_ignored_result (tree t)
16403 {
16404 if (!TREE_SIDE_EFFECTS (t))
16405 return integer_zero_node;
16406
16407 for (;;)
16408 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16409 {
16410 case tcc_unary:
16411 t = TREE_OPERAND (t, 0);
16412 break;
16413
16414 case tcc_binary:
16415 case tcc_comparison:
16416 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16417 t = TREE_OPERAND (t, 0);
16418 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16419 t = TREE_OPERAND (t, 1);
16420 else
16421 return t;
16422 break;
16423
16424 case tcc_expression:
16425 switch (TREE_CODE (t))
16426 {
16427 case COMPOUND_EXPR:
16428 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16429 return t;
16430 t = TREE_OPERAND (t, 0);
16431 break;
16432
16433 case COND_EXPR:
16434 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16435 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16436 return t;
16437 t = TREE_OPERAND (t, 0);
16438 break;
16439
16440 default:
16441 return t;
16442 }
16443 break;
16444
16445 default:
16446 return t;
16447 }
16448 }
16449
16450 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16451
16452 tree
16453 round_up_loc (location_t loc, tree value, unsigned int divisor)
16454 {
16455 tree div = NULL_TREE;
16456
16457 if (divisor == 1)
16458 return value;
16459
16460 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16461 have to do anything. Only do this when we are not given a const,
16462 because in that case, this check is more expensive than just
16463 doing it. */
16464 if (TREE_CODE (value) != INTEGER_CST)
16465 {
16466 div = build_int_cst (TREE_TYPE (value), divisor);
16467
16468 if (multiple_of_p (TREE_TYPE (value), value, div))
16469 return value;
16470 }
16471
16472 /* If divisor is a power of two, simplify this to bit manipulation. */
16473 if (divisor == (divisor & -divisor))
16474 {
16475 if (TREE_CODE (value) == INTEGER_CST)
16476 {
16477 wide_int val = value;
16478 bool overflow_p;
16479
16480 if ((val & (divisor - 1)) == 0)
16481 return value;
16482
16483 overflow_p = TREE_OVERFLOW (value);
16484 val &= ~(divisor - 1);
16485 val += divisor;
16486 if (val == 0)
16487 overflow_p = true;
16488
16489 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16490 }
16491 else
16492 {
16493 tree t;
16494
16495 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16496 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16497 t = build_int_cst (TREE_TYPE (value), -divisor);
16498 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16499 }
16500 }
16501 else
16502 {
16503 if (!div)
16504 div = build_int_cst (TREE_TYPE (value), divisor);
16505 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16506 value = size_binop_loc (loc, MULT_EXPR, value, div);
16507 }
16508
16509 return value;
16510 }
16511
16512 /* Likewise, but round down. */
16513
16514 tree
16515 round_down_loc (location_t loc, tree value, int divisor)
16516 {
16517 tree div = NULL_TREE;
16518
16519 gcc_assert (divisor > 0);
16520 if (divisor == 1)
16521 return value;
16522
16523 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16524 have to do anything. Only do this when we are not given a const,
16525 because in that case, this check is more expensive than just
16526 doing it. */
16527 if (TREE_CODE (value) != INTEGER_CST)
16528 {
16529 div = build_int_cst (TREE_TYPE (value), divisor);
16530
16531 if (multiple_of_p (TREE_TYPE (value), value, div))
16532 return value;
16533 }
16534
16535 /* If divisor is a power of two, simplify this to bit manipulation. */
16536 if (divisor == (divisor & -divisor))
16537 {
16538 tree t;
16539
16540 t = build_int_cst (TREE_TYPE (value), -divisor);
16541 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16542 }
16543 else
16544 {
16545 if (!div)
16546 div = build_int_cst (TREE_TYPE (value), divisor);
16547 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16548 value = size_binop_loc (loc, MULT_EXPR, value, div);
16549 }
16550
16551 return value;
16552 }
16553
16554 /* Returns the pointer to the base of the object addressed by EXP and
16555 extracts the information about the offset of the access, storing it
16556 to PBITPOS and POFFSET. */
16557
16558 static tree
16559 split_address_to_core_and_offset (tree exp,
16560 HOST_WIDE_INT *pbitpos, tree *poffset)
16561 {
16562 tree core;
16563 enum machine_mode mode;
16564 int unsignedp, volatilep;
16565 HOST_WIDE_INT bitsize;
16566 location_t loc = EXPR_LOCATION (exp);
16567
16568 if (TREE_CODE (exp) == ADDR_EXPR)
16569 {
16570 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16571 poffset, &mode, &unsignedp, &volatilep,
16572 false);
16573 core = build_fold_addr_expr_loc (loc, core);
16574 }
16575 else
16576 {
16577 core = exp;
16578 *pbitpos = 0;
16579 *poffset = NULL_TREE;
16580 }
16581
16582 return core;
16583 }
16584
16585 /* Returns true if addresses of E1 and E2 differ by a constant, false
16586 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16587
16588 bool
16589 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16590 {
16591 tree core1, core2;
16592 HOST_WIDE_INT bitpos1, bitpos2;
16593 tree toffset1, toffset2, tdiff, type;
16594
16595 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16596 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16597
16598 if (bitpos1 % BITS_PER_UNIT != 0
16599 || bitpos2 % BITS_PER_UNIT != 0
16600 || !operand_equal_p (core1, core2, 0))
16601 return false;
16602
16603 if (toffset1 && toffset2)
16604 {
16605 type = TREE_TYPE (toffset1);
16606 if (type != TREE_TYPE (toffset2))
16607 toffset2 = fold_convert (type, toffset2);
16608
16609 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16610 if (!cst_and_fits_in_hwi (tdiff))
16611 return false;
16612
16613 *diff = int_cst_value (tdiff);
16614 }
16615 else if (toffset1 || toffset2)
16616 {
16617 /* If only one of the offsets is non-constant, the difference cannot
16618 be a constant. */
16619 return false;
16620 }
16621 else
16622 *diff = 0;
16623
16624 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16625 return true;
16626 }
16627
16628 /* Simplify the floating point expression EXP when the sign of the
16629 result is not significant. Return NULL_TREE if no simplification
16630 is possible. */
16631
16632 tree
16633 fold_strip_sign_ops (tree exp)
16634 {
16635 tree arg0, arg1;
16636 location_t loc = EXPR_LOCATION (exp);
16637
16638 switch (TREE_CODE (exp))
16639 {
16640 case ABS_EXPR:
16641 case NEGATE_EXPR:
16642 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16643 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16644
16645 case MULT_EXPR:
16646 case RDIV_EXPR:
16647 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16648 return NULL_TREE;
16649 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16650 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16651 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16652 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16653 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16654 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16655 break;
16656
16657 case COMPOUND_EXPR:
16658 arg0 = TREE_OPERAND (exp, 0);
16659 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16660 if (arg1)
16661 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16662 break;
16663
16664 case COND_EXPR:
16665 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16666 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16667 if (arg0 || arg1)
16668 return fold_build3_loc (loc,
16669 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16670 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16671 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16672 break;
16673
16674 case CALL_EXPR:
16675 {
16676 const enum built_in_function fcode = builtin_mathfn_code (exp);
16677 switch (fcode)
16678 {
16679 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16680 /* Strip copysign function call, return the 1st argument. */
16681 arg0 = CALL_EXPR_ARG (exp, 0);
16682 arg1 = CALL_EXPR_ARG (exp, 1);
16683 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16684
16685 default:
16686 /* Strip sign ops from the argument of "odd" math functions. */
16687 if (negate_mathfn_p (fcode))
16688 {
16689 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16690 if (arg0)
16691 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16692 }
16693 break;
16694 }
16695 }
16696 break;
16697
16698 default:
16699 break;
16700 }
16701 return NULL_TREE;
16702 }