[Vectorizer] Make REDUC_xxx_EXPR tree codes produce a scalar result
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "cgraph.h"
81 #include "generic-match.h"
82
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
86
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
107 };
108
109 static bool negate_mathfn_p (enum built_in_function);
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static tree const_binop (enum tree_code, tree, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
121 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
122 static tree make_bit_field_ref (location_t, tree, tree,
123 HOST_WIDE_INT, HOST_WIDE_INT, int);
124 static tree optimize_bit_field_compare (location_t, enum tree_code,
125 tree, tree, tree);
126 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
127 HOST_WIDE_INT *,
128 enum machine_mode *, int *, int *,
129 tree *, tree *);
130 static tree sign_bit_p (tree, const_tree);
131 static int simple_operand_p (const_tree);
132 static bool simple_operand_p_2 (tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
138 static tree unextend (tree, int, int, tree);
139 static tree optimize_minmax_comparison (location_t, enum tree_code,
140 tree, tree, tree);
141 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 static tree fold_binary_op_with_conditional_arg (location_t,
144 enum tree_code, tree,
145 tree, tree,
146 tree, tree, int);
147 static tree fold_mathfn_compare (location_t,
148 enum built_in_function, enum tree_code,
149 tree, tree, tree);
150 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
151 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
152 static bool reorder_operands_p (const_tree, const_tree);
153 static tree fold_negate_const (tree, tree);
154 static tree fold_not_const (const_tree, tree);
155 static tree fold_relational_const (enum tree_code, tree, tree, tree);
156 static tree fold_convert_const (enum tree_code, tree, tree);
157
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
160
161 static location_t
162 expr_location_or (tree t, location_t loc)
163 {
164 location_t tloc = EXPR_LOCATION (t);
165 return tloc == UNKNOWN_LOCATION ? loc : tloc;
166 }
167
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
170
171 static inline tree
172 protected_set_expr_location_unshare (tree x, location_t loc)
173 {
174 if (CAN_HAVE_LOCATION_P (x)
175 && EXPR_LOCATION (x) != loc
176 && !(TREE_CODE (x) == SAVE_EXPR
177 || TREE_CODE (x) == TARGET_EXPR
178 || TREE_CODE (x) == BIND_EXPR))
179 {
180 x = copy_node (x);
181 SET_EXPR_LOCATION (x, loc);
182 }
183 return x;
184 }
185 \f
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
188 NULL_TREE. */
189
190 tree
191 div_if_zero_remainder (const_tree arg1, const_tree arg2)
192 {
193 widest_int quo;
194
195 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
196 SIGNED, &quo))
197 return wide_int_to_tree (TREE_TYPE (arg1), quo);
198
199 return NULL_TREE;
200 }
201 \f
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
210
211 static int fold_deferring_overflow_warnings;
212
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
217
218 static const char* fold_deferred_overflow_warning;
219
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
222
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
224
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
227
228 void
229 fold_defer_overflow_warnings (void)
230 {
231 ++fold_deferring_overflow_warnings;
232 }
233
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
242
243 void
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
245 {
246 const char *warnmsg;
247 location_t locus;
248
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
252 {
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 return;
258 }
259
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
262
263 if (!issue || warnmsg == NULL)
264 return;
265
266 if (gimple_no_warning_p (stmt))
267 return;
268
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
273
274 if (!issue_strict_overflow_warning (code))
275 return;
276
277 if (stmt == NULL)
278 locus = input_location;
279 else
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
282 }
283
284 /* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
286
287 void
288 fold_undefer_and_ignore_overflow_warnings (void)
289 {
290 fold_undefer_overflow_warnings (false, NULL, 0);
291 }
292
293 /* Whether we are deferring overflow warnings. */
294
295 bool
296 fold_deferring_overflow_warnings_p (void)
297 {
298 return fold_deferring_overflow_warnings > 0;
299 }
300
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
303
304 static void
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
306 {
307 if (fold_deferring_overflow_warnings > 0)
308 {
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
311 {
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
314 }
315 }
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
318 }
319 \f
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
322
323 static bool
324 negate_mathfn_p (enum built_in_function code)
325 {
326 switch (code)
327 {
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
351 return true;
352
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
358
359 default:
360 break;
361 }
362 return false;
363 }
364
365 /* Check whether we may negate an integer constant T without causing
366 overflow. */
367
368 bool
369 may_negate_without_overflow_p (const_tree t)
370 {
371 tree type;
372
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
378
379 return !wi::only_sign_bit_p (t);
380 }
381
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
384
385 static bool
386 negate_expr_p (tree t)
387 {
388 tree type;
389
390 if (t == 0)
391 return false;
392
393 type = TREE_TYPE (t);
394
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
397 {
398 case INTEGER_CST:
399 if (TYPE_OVERFLOW_WRAPS (type))
400 return true;
401
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
407
408 case FIXED_CST:
409 case NEGATE_EXPR:
410 return true;
411
412 case REAL_CST:
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
416
417 case COMPLEX_CST:
418 return negate_expr_p (TREE_REALPART (t))
419 && negate_expr_p (TREE_IMAGPART (t));
420
421 case VECTOR_CST:
422 {
423 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
424 return true;
425
426 int count = TYPE_VECTOR_SUBPARTS (type), i;
427
428 for (i = 0; i < count; i++)
429 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
430 return false;
431
432 return true;
433 }
434
435 case COMPLEX_EXPR:
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
438
439 case CONJ_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0));
441
442 case PLUS_EXPR:
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1));
460
461 case MULT_EXPR:
462 if (TYPE_UNSIGNED (TREE_TYPE (t)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 /* In general we can't negate A / B, because if A is INT_MIN and
477 B is 1, we may turn this into INT_MIN / -1 which is undefined
478 and actually traps on some architectures. But if overflow is
479 undefined, we can negate, because - (INT_MIN / 1) is an
480 overflow. */
481 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
482 {
483 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
484 break;
485 /* If overflow is undefined then we have to be careful because
486 we ask whether it's ok to associate the negate with the
487 division which is not ok for example for
488 -((a - b) / c) where (-(a - b)) / c may invoke undefined
489 overflow because of negating INT_MIN. So do not use
490 negate_expr_p here but open-code the two important cases. */
491 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
492 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
493 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
494 return true;
495 }
496 else if (negate_expr_p (TREE_OPERAND (t, 0)))
497 return true;
498 return negate_expr_p (TREE_OPERAND (t, 1));
499
500 case NOP_EXPR:
501 /* Negate -((double)float) as (double)(-float). */
502 if (TREE_CODE (type) == REAL_TYPE)
503 {
504 tree tem = strip_float_extensions (t);
505 if (tem != t)
506 return negate_expr_p (tem);
507 }
508 break;
509
510 case CALL_EXPR:
511 /* Negate -f(x) as f(-x). */
512 if (negate_mathfn_p (builtin_mathfn_code (t)))
513 return negate_expr_p (CALL_EXPR_ARG (t, 0));
514 break;
515
516 case RSHIFT_EXPR:
517 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
518 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
519 {
520 tree op1 = TREE_OPERAND (t, 1);
521 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
522 return true;
523 }
524 break;
525
526 default:
527 break;
528 }
529 return false;
530 }
531
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
535 returned. */
536
537 static tree
538 fold_negate_expr (location_t loc, tree t)
539 {
540 tree type = TREE_TYPE (t);
541 tree tem;
542
543 switch (TREE_CODE (t))
544 {
545 /* Convert - (~A) to A + 1. */
546 case BIT_NOT_EXPR:
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_one_cst (type));
550 break;
551
552 case INTEGER_CST:
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
556 return tem;
557 break;
558
559 case REAL_CST:
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
563 return tem;
564 break;
565
566 case FIXED_CST:
567 tem = fold_negate_const (t, type);
568 return tem;
569
570 case COMPLEX_CST:
571 {
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
574
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
580 }
581 break;
582
583 case VECTOR_CST:
584 {
585 int count = TYPE_VECTOR_SUBPARTS (type), i;
586 tree *elts = XALLOCAVEC (tree, count);
587
588 for (i = 0; i < count; i++)
589 {
590 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
591 if (elts[i] == NULL_TREE)
592 return NULL_TREE;
593 }
594
595 return build_vector (type, elts);
596 }
597
598 case COMPLEX_EXPR:
599 if (negate_expr_p (t))
600 return fold_build2_loc (loc, COMPLEX_EXPR, type,
601 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
602 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
603 break;
604
605 case CONJ_EXPR:
606 if (negate_expr_p (t))
607 return fold_build1_loc (loc, CONJ_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
609 break;
610
611 case NEGATE_EXPR:
612 return TREE_OPERAND (t, 0);
613
614 case PLUS_EXPR:
615 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
616 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
617 {
618 /* -(A + B) -> (-B) - A. */
619 if (negate_expr_p (TREE_OPERAND (t, 1))
620 && reorder_operands_p (TREE_OPERAND (t, 0),
621 TREE_OPERAND (t, 1)))
622 {
623 tem = negate_expr (TREE_OPERAND (t, 1));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 0));
626 }
627
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 {
631 tem = negate_expr (TREE_OPERAND (t, 0));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 1));
634 }
635 }
636 break;
637
638 case MINUS_EXPR:
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
641 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
642 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
643 return fold_build2_loc (loc, MINUS_EXPR, type,
644 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
645 break;
646
647 case MULT_EXPR:
648 if (TYPE_UNSIGNED (type))
649 break;
650
651 /* Fall through. */
652
653 case RDIV_EXPR:
654 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
655 {
656 tem = TREE_OPERAND (t, 1);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 TREE_OPERAND (t, 0), negate_expr (tem));
660 tem = TREE_OPERAND (t, 0);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (tem), TREE_OPERAND (t, 1));
664 }
665 break;
666
667 case TRUNC_DIV_EXPR:
668 case ROUND_DIV_EXPR:
669 case EXACT_DIV_EXPR:
670 /* In general we can't negate A / B, because if A is INT_MIN and
671 B is 1, we may turn this into INT_MIN / -1 which is undefined
672 and actually traps on some architectures. But if overflow is
673 undefined, we can negate, because - (INT_MIN / 1) is an
674 overflow. */
675 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
676 {
677 const char * const warnmsg = G_("assuming signed overflow does not "
678 "occur when negating a division");
679 tem = TREE_OPERAND (t, 1);
680 if (negate_expr_p (tem))
681 {
682 if (INTEGRAL_TYPE_P (type)
683 && (TREE_CODE (tem) != INTEGER_CST
684 || integer_onep (tem)))
685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 TREE_OPERAND (t, 0), negate_expr (tem));
688 }
689 /* If overflow is undefined then we have to be careful because
690 we ask whether it's ok to associate the negate with the
691 division which is not ok for example for
692 -((a - b) / c) where (-(a - b)) / c may invoke undefined
693 overflow because of negating INT_MIN. So do not use
694 negate_expr_p here but open-code the two important cases. */
695 tem = TREE_OPERAND (t, 0);
696 if ((INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) == NEGATE_EXPR
698 || (TREE_CODE (tem) == INTEGER_CST
699 && may_negate_without_overflow_p (tem))))
700 || !INTEGRAL_TYPE_P (type))
701 return fold_build2_loc (loc, TREE_CODE (t), type,
702 negate_expr (tem), TREE_OPERAND (t, 1));
703 }
704 break;
705
706 case NOP_EXPR:
707 /* Convert -((double)float) into (double)(-float). */
708 if (TREE_CODE (type) == REAL_TYPE)
709 {
710 tem = strip_float_extensions (t);
711 if (tem != t && negate_expr_p (tem))
712 return fold_convert_loc (loc, type, negate_expr (tem));
713 }
714 break;
715
716 case CALL_EXPR:
717 /* Negate -f(x) as f(-x). */
718 if (negate_mathfn_p (builtin_mathfn_code (t))
719 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
720 {
721 tree fndecl, arg;
722
723 fndecl = get_callee_fndecl (t);
724 arg = negate_expr (CALL_EXPR_ARG (t, 0));
725 return build_call_expr_loc (loc, fndecl, 1, arg);
726 }
727 break;
728
729 case RSHIFT_EXPR:
730 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
731 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
732 {
733 tree op1 = TREE_OPERAND (t, 1);
734 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
735 {
736 tree ntype = TYPE_UNSIGNED (type)
737 ? signed_type_for (type)
738 : unsigned_type_for (type);
739 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
740 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
741 return fold_convert_loc (loc, type, temp);
742 }
743 }
744 break;
745
746 default:
747 break;
748 }
749
750 return NULL_TREE;
751 }
752
753 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
754 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
755 return NULL_TREE. */
756
757 static tree
758 negate_expr (tree t)
759 {
760 tree type, tem;
761 location_t loc;
762
763 if (t == NULL_TREE)
764 return NULL_TREE;
765
766 loc = EXPR_LOCATION (t);
767 type = TREE_TYPE (t);
768 STRIP_SIGN_NOPS (t);
769
770 tem = fold_negate_expr (loc, t);
771 if (!tem)
772 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
773 return fold_convert_loc (loc, type, tem);
774 }
775 \f
776 /* Split a tree IN into a constant, literal and variable parts that could be
777 combined with CODE to make IN. "constant" means an expression with
778 TREE_CONSTANT but that isn't an actual constant. CODE must be a
779 commutative arithmetic operation. Store the constant part into *CONP,
780 the literal in *LITP and return the variable part. If a part isn't
781 present, set it to null. If the tree does not decompose in this way,
782 return the entire tree as the variable part and the other parts as null.
783
784 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
785 case, we negate an operand that was subtracted. Except if it is a
786 literal for which we use *MINUS_LITP instead.
787
788 If NEGATE_P is true, we are negating all of IN, again except a literal
789 for which we use *MINUS_LITP instead.
790
791 If IN is itself a literal or constant, return it as appropriate.
792
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
795
796 static tree
797 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
798 tree *minus_litp, int negate_p)
799 {
800 tree var = 0;
801
802 *conp = 0;
803 *litp = 0;
804 *minus_litp = 0;
805
806 /* Strip any conversions that don't change the machine mode or signedness. */
807 STRIP_SIGN_NOPS (in);
808
809 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
810 || TREE_CODE (in) == FIXED_CST)
811 *litp = in;
812 else if (TREE_CODE (in) == code
813 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
814 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
815 /* We can associate addition and subtraction together (even
816 though the C standard doesn't say so) for integers because
817 the value is not affected. For reals, the value might be
818 affected, so we can't. */
819 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
820 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
821 {
822 tree op0 = TREE_OPERAND (in, 0);
823 tree op1 = TREE_OPERAND (in, 1);
824 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
825 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
826
827 /* First see if either of the operands is a literal, then a constant. */
828 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
829 || TREE_CODE (op0) == FIXED_CST)
830 *litp = op0, op0 = 0;
831 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
832 || TREE_CODE (op1) == FIXED_CST)
833 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
834
835 if (op0 != 0 && TREE_CONSTANT (op0))
836 *conp = op0, op0 = 0;
837 else if (op1 != 0 && TREE_CONSTANT (op1))
838 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
839
840 /* If we haven't dealt with either operand, this is not a case we can
841 decompose. Otherwise, VAR is either of the ones remaining, if any. */
842 if (op0 != 0 && op1 != 0)
843 var = in;
844 else if (op0 != 0)
845 var = op0;
846 else
847 var = op1, neg_var_p = neg1_p;
848
849 /* Now do any needed negations. */
850 if (neg_litp_p)
851 *minus_litp = *litp, *litp = 0;
852 if (neg_conp_p)
853 *conp = negate_expr (*conp);
854 if (neg_var_p)
855 var = negate_expr (var);
856 }
857 else if (TREE_CODE (in) == BIT_NOT_EXPR
858 && code == PLUS_EXPR)
859 {
860 /* -X - 1 is folded to ~X, undo that here. */
861 *minus_litp = build_one_cst (TREE_TYPE (in));
862 var = negate_expr (TREE_OPERAND (in, 0));
863 }
864 else if (TREE_CONSTANT (in))
865 *conp = in;
866 else
867 var = in;
868
869 if (negate_p)
870 {
871 if (*litp)
872 *minus_litp = *litp, *litp = 0;
873 else if (*minus_litp)
874 *litp = *minus_litp, *minus_litp = 0;
875 *conp = negate_expr (*conp);
876 var = negate_expr (var);
877 }
878
879 return var;
880 }
881
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
886
887 static tree
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
889 {
890 if (t1 == 0)
891 return t2;
892 else if (t2 == 0)
893 return t1;
894
895 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
896 try to fold this since we will have infinite recursion. But do
897 deal with any NEGATE_EXPRs. */
898 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
899 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
900 {
901 if (code == PLUS_EXPR)
902 {
903 if (TREE_CODE (t1) == NEGATE_EXPR)
904 return build2_loc (loc, MINUS_EXPR, type,
905 fold_convert_loc (loc, type, t2),
906 fold_convert_loc (loc, type,
907 TREE_OPERAND (t1, 0)));
908 else if (TREE_CODE (t2) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t1),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t2, 0)));
913 else if (integer_zerop (t2))
914 return fold_convert_loc (loc, type, t1);
915 }
916 else if (code == MINUS_EXPR)
917 {
918 if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
920 }
921
922 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type, t2));
924 }
925
926 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
928 }
929 \f
930 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
931 for use in int_const_binop, size_binop and size_diffop. */
932
933 static bool
934 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
935 {
936 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
937 return false;
938 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
939 return false;
940
941 switch (code)
942 {
943 case LSHIFT_EXPR:
944 case RSHIFT_EXPR:
945 case LROTATE_EXPR:
946 case RROTATE_EXPR:
947 return true;
948
949 default:
950 break;
951 }
952
953 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
954 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
955 && TYPE_MODE (type1) == TYPE_MODE (type2);
956 }
957
958
959 /* Combine two integer constants ARG1 and ARG2 under operation CODE
960 to produce a new constant. Return NULL_TREE if we don't know how
961 to evaluate CODE at compile-time. */
962
963 static tree
964 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
965 int overflowable)
966 {
967 wide_int res;
968 tree t;
969 tree type = TREE_TYPE (arg1);
970 signop sign = TYPE_SIGN (type);
971 bool overflow = false;
972
973 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
974 TYPE_SIGN (TREE_TYPE (parg2)));
975
976 switch (code)
977 {
978 case BIT_IOR_EXPR:
979 res = wi::bit_or (arg1, arg2);
980 break;
981
982 case BIT_XOR_EXPR:
983 res = wi::bit_xor (arg1, arg2);
984 break;
985
986 case BIT_AND_EXPR:
987 res = wi::bit_and (arg1, arg2);
988 break;
989
990 case RSHIFT_EXPR:
991 case LSHIFT_EXPR:
992 if (wi::neg_p (arg2))
993 {
994 arg2 = -arg2;
995 if (code == RSHIFT_EXPR)
996 code = LSHIFT_EXPR;
997 else
998 code = RSHIFT_EXPR;
999 }
1000
1001 if (code == RSHIFT_EXPR)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res = wi::rshift (arg1, arg2, sign);
1006 else
1007 res = wi::lshift (arg1, arg2);
1008 break;
1009
1010 case RROTATE_EXPR:
1011 case LROTATE_EXPR:
1012 if (wi::neg_p (arg2))
1013 {
1014 arg2 = -arg2;
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1017 else
1018 code = RROTATE_EXPR;
1019 }
1020
1021 if (code == RROTATE_EXPR)
1022 res = wi::rrotate (arg1, arg2);
1023 else
1024 res = wi::lrotate (arg1, arg2);
1025 break;
1026
1027 case PLUS_EXPR:
1028 res = wi::add (arg1, arg2, sign, &overflow);
1029 break;
1030
1031 case MINUS_EXPR:
1032 res = wi::sub (arg1, arg2, sign, &overflow);
1033 break;
1034
1035 case MULT_EXPR:
1036 res = wi::mul (arg1, arg2, sign, &overflow);
1037 break;
1038
1039 case MULT_HIGHPART_EXPR:
1040 res = wi::mul_high (arg1, arg2, sign);
1041 break;
1042
1043 case TRUNC_DIV_EXPR:
1044 case EXACT_DIV_EXPR:
1045 if (arg2 == 0)
1046 return NULL_TREE;
1047 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1048 break;
1049
1050 case FLOOR_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_floor (arg1, arg2, sign, &overflow);
1054 break;
1055
1056 case CEIL_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1060 break;
1061
1062 case ROUND_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_round (arg1, arg2, sign, &overflow);
1066 break;
1067
1068 case TRUNC_MOD_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1072 break;
1073
1074 case FLOOR_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1078 break;
1079
1080 case CEIL_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1084 break;
1085
1086 case ROUND_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_round (arg1, arg2, sign, &overflow);
1090 break;
1091
1092 case MIN_EXPR:
1093 res = wi::min (arg1, arg2, sign);
1094 break;
1095
1096 case MAX_EXPR:
1097 res = wi::max (arg1, arg2, sign);
1098 break;
1099
1100 default:
1101 return NULL_TREE;
1102 }
1103
1104 t = force_fit_type (type, res, overflowable,
1105 (((sign == SIGNED || overflowable == -1)
1106 && overflow)
1107 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1108
1109 return t;
1110 }
1111
1112 tree
1113 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1114 {
1115 return int_const_binop_1 (code, arg1, arg2, 1);
1116 }
1117
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1122
1123 static tree
1124 const_binop (enum tree_code code, tree arg1, tree arg2)
1125 {
1126 /* Sanity check for the recursive cases. */
1127 if (!arg1 || !arg2)
1128 return NULL_TREE;
1129
1130 STRIP_NOPS (arg1);
1131 STRIP_NOPS (arg2);
1132
1133 if (TREE_CODE (arg1) == INTEGER_CST)
1134 return int_const_binop (code, arg1, arg2);
1135
1136 if (TREE_CODE (arg1) == REAL_CST)
1137 {
1138 enum machine_mode mode;
1139 REAL_VALUE_TYPE d1;
1140 REAL_VALUE_TYPE d2;
1141 REAL_VALUE_TYPE value;
1142 REAL_VALUE_TYPE result;
1143 bool inexact;
1144 tree t, type;
1145
1146 /* The following codes are handled by real_arithmetic. */
1147 switch (code)
1148 {
1149 case PLUS_EXPR:
1150 case MINUS_EXPR:
1151 case MULT_EXPR:
1152 case RDIV_EXPR:
1153 case MIN_EXPR:
1154 case MAX_EXPR:
1155 break;
1156
1157 default:
1158 return NULL_TREE;
1159 }
1160
1161 d1 = TREE_REAL_CST (arg1);
1162 d2 = TREE_REAL_CST (arg2);
1163
1164 type = TREE_TYPE (arg1);
1165 mode = TYPE_MODE (type);
1166
1167 /* Don't perform operation if we honor signaling NaNs and
1168 either operand is a NaN. */
1169 if (HONOR_SNANS (mode)
1170 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1171 return NULL_TREE;
1172
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && REAL_VALUES_EQUAL (d2, dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1179
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 return arg1;
1184 else if (REAL_VALUE_ISNAN (d2))
1185 return arg2;
1186
1187 inexact = real_arithmetic (&value, code, &d1, &d2);
1188 real_convert (&result, mode, &value);
1189
1190 /* Don't constant fold this floating point operation if
1191 the result has overflowed and flag_trapping_math. */
1192 if (flag_trapping_math
1193 && MODE_HAS_INFINITIES (mode)
1194 && REAL_VALUE_ISINF (result)
1195 && !REAL_VALUE_ISINF (d1)
1196 && !REAL_VALUE_ISINF (d2))
1197 return NULL_TREE;
1198
1199 /* Don't constant fold this floating point operation if the
1200 result may dependent upon the run-time rounding mode and
1201 flag_rounding_math is set, or if GCC's software emulation
1202 is unable to accurately represent the result. */
1203 if ((flag_rounding_math
1204 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1205 && (inexact || !real_identical (&result, &value)))
1206 return NULL_TREE;
1207
1208 t = build_real (type, result);
1209
1210 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1211 return t;
1212 }
1213
1214 if (TREE_CODE (arg1) == FIXED_CST)
1215 {
1216 FIXED_VALUE_TYPE f1;
1217 FIXED_VALUE_TYPE f2;
1218 FIXED_VALUE_TYPE result;
1219 tree t, type;
1220 int sat_p;
1221 bool overflow_p;
1222
1223 /* The following codes are handled by fixed_arithmetic. */
1224 switch (code)
1225 {
1226 case PLUS_EXPR:
1227 case MINUS_EXPR:
1228 case MULT_EXPR:
1229 case TRUNC_DIV_EXPR:
1230 f2 = TREE_FIXED_CST (arg2);
1231 break;
1232
1233 case LSHIFT_EXPR:
1234 case RSHIFT_EXPR:
1235 {
1236 wide_int w2 = arg2;
1237 f2.data.high = w2.elt (1);
1238 f2.data.low = w2.elt (0);
1239 f2.mode = SImode;
1240 }
1241 break;
1242
1243 default:
1244 return NULL_TREE;
1245 }
1246
1247 f1 = TREE_FIXED_CST (arg1);
1248 type = TREE_TYPE (arg1);
1249 sat_p = TYPE_SATURATING (type);
1250 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1251 t = build_fixed (type, result);
1252 /* Propagate overflow flags. */
1253 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1254 TREE_OVERFLOW (t) = 1;
1255 return t;
1256 }
1257
1258 if (TREE_CODE (arg1) == COMPLEX_CST)
1259 {
1260 tree type = TREE_TYPE (arg1);
1261 tree r1 = TREE_REALPART (arg1);
1262 tree i1 = TREE_IMAGPART (arg1);
1263 tree r2 = TREE_REALPART (arg2);
1264 tree i2 = TREE_IMAGPART (arg2);
1265 tree real, imag;
1266
1267 switch (code)
1268 {
1269 case PLUS_EXPR:
1270 case MINUS_EXPR:
1271 real = const_binop (code, r1, r2);
1272 imag = const_binop (code, i1, i2);
1273 break;
1274
1275 case MULT_EXPR:
1276 if (COMPLEX_FLOAT_TYPE_P (type))
1277 return do_mpc_arg2 (arg1, arg2, type,
1278 /* do_nonfinite= */ folding_initializer,
1279 mpc_mul);
1280
1281 real = const_binop (MINUS_EXPR,
1282 const_binop (MULT_EXPR, r1, r2),
1283 const_binop (MULT_EXPR, i1, i2));
1284 imag = const_binop (PLUS_EXPR,
1285 const_binop (MULT_EXPR, r1, i2),
1286 const_binop (MULT_EXPR, i1, r2));
1287 break;
1288
1289 case RDIV_EXPR:
1290 if (COMPLEX_FLOAT_TYPE_P (type))
1291 return do_mpc_arg2 (arg1, arg2, type,
1292 /* do_nonfinite= */ folding_initializer,
1293 mpc_div);
1294 /* Fallthru ... */
1295 case TRUNC_DIV_EXPR:
1296 case CEIL_DIV_EXPR:
1297 case FLOOR_DIV_EXPR:
1298 case ROUND_DIV_EXPR:
1299 if (flag_complex_method == 0)
1300 {
1301 /* Keep this algorithm in sync with
1302 tree-complex.c:expand_complex_div_straight().
1303
1304 Expand complex division to scalars, straightforward algorithm.
1305 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1306 t = br*br + bi*bi
1307 */
1308 tree magsquared
1309 = const_binop (PLUS_EXPR,
1310 const_binop (MULT_EXPR, r2, r2),
1311 const_binop (MULT_EXPR, i2, i2));
1312 tree t1
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r1, r2),
1315 const_binop (MULT_EXPR, i1, i2));
1316 tree t2
1317 = const_binop (MINUS_EXPR,
1318 const_binop (MULT_EXPR, i1, r2),
1319 const_binop (MULT_EXPR, r1, i2));
1320
1321 real = const_binop (code, t1, magsquared);
1322 imag = const_binop (code, t2, magsquared);
1323 }
1324 else
1325 {
1326 /* Keep this algorithm in sync with
1327 tree-complex.c:expand_complex_div_wide().
1328
1329 Expand complex division to scalars, modified algorithm to minimize
1330 overflow with wide input ranges. */
1331 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1332 fold_abs_const (r2, TREE_TYPE (type)),
1333 fold_abs_const (i2, TREE_TYPE (type)));
1334
1335 if (integer_nonzerop (compare))
1336 {
1337 /* In the TRUE branch, we compute
1338 ratio = br/bi;
1339 div = (br * ratio) + bi;
1340 tr = (ar * ratio) + ai;
1341 ti = (ai * ratio) - ar;
1342 tr = tr / div;
1343 ti = ti / div; */
1344 tree ratio = const_binop (code, r2, i2);
1345 tree div = const_binop (PLUS_EXPR, i2,
1346 const_binop (MULT_EXPR, r2, ratio));
1347 real = const_binop (MULT_EXPR, r1, ratio);
1348 real = const_binop (PLUS_EXPR, real, i1);
1349 real = const_binop (code, real, div);
1350
1351 imag = const_binop (MULT_EXPR, i1, ratio);
1352 imag = const_binop (MINUS_EXPR, imag, r1);
1353 imag = const_binop (code, imag, div);
1354 }
1355 else
1356 {
1357 /* In the FALSE branch, we compute
1358 ratio = d/c;
1359 divisor = (d * ratio) + c;
1360 tr = (b * ratio) + a;
1361 ti = b - (a * ratio);
1362 tr = tr / div;
1363 ti = ti / div; */
1364 tree ratio = const_binop (code, i2, r2);
1365 tree div = const_binop (PLUS_EXPR, r2,
1366 const_binop (MULT_EXPR, i2, ratio));
1367
1368 real = const_binop (MULT_EXPR, i1, ratio);
1369 real = const_binop (PLUS_EXPR, real, r1);
1370 real = const_binop (code, real, div);
1371
1372 imag = const_binop (MULT_EXPR, r1, ratio);
1373 imag = const_binop (MINUS_EXPR, i1, imag);
1374 imag = const_binop (code, imag, div);
1375 }
1376 }
1377 break;
1378
1379 default:
1380 return NULL_TREE;
1381 }
1382
1383 if (real && imag)
1384 return build_complex (type, real, imag);
1385 }
1386
1387 if (TREE_CODE (arg1) == VECTOR_CST
1388 && TREE_CODE (arg2) == VECTOR_CST)
1389 {
1390 tree type = TREE_TYPE (arg1);
1391 int count = TYPE_VECTOR_SUBPARTS (type), i;
1392 tree *elts = XALLOCAVEC (tree, count);
1393
1394 for (i = 0; i < count; i++)
1395 {
1396 tree elem1 = VECTOR_CST_ELT (arg1, i);
1397 tree elem2 = VECTOR_CST_ELT (arg2, i);
1398
1399 elts[i] = const_binop (code, elem1, elem2);
1400
1401 /* It is possible that const_binop cannot handle the given
1402 code and return NULL_TREE */
1403 if (elts[i] == NULL_TREE)
1404 return NULL_TREE;
1405 }
1406
1407 return build_vector (type, elts);
1408 }
1409
1410 /* Shifts allow a scalar offset for a vector. */
1411 if (TREE_CODE (arg1) == VECTOR_CST
1412 && TREE_CODE (arg2) == INTEGER_CST)
1413 {
1414 tree type = TREE_TYPE (arg1);
1415 int count = TYPE_VECTOR_SUBPARTS (type), i;
1416 tree *elts = XALLOCAVEC (tree, count);
1417
1418 if (code == VEC_LSHIFT_EXPR
1419 || code == VEC_RSHIFT_EXPR)
1420 {
1421 if (!tree_fits_uhwi_p (arg2))
1422 return NULL_TREE;
1423
1424 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1425 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1426 unsigned HOST_WIDE_INT innerc
1427 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1428 if (shiftc >= outerc || (shiftc % innerc) != 0)
1429 return NULL_TREE;
1430 int offset = shiftc / innerc;
1431 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1432 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1433 for !BYTES_BIG_ENDIAN picks first vector element, but
1434 for BYTES_BIG_ENDIAN last element from the vector. */
1435 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1436 offset = -offset;
1437 tree zero = build_zero_cst (TREE_TYPE (type));
1438 for (i = 0; i < count; i++)
1439 {
1440 if (i + offset < 0 || i + offset >= count)
1441 elts[i] = zero;
1442 else
1443 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1444 }
1445 }
1446 else
1447 for (i = 0; i < count; i++)
1448 {
1449 tree elem1 = VECTOR_CST_ELT (arg1, i);
1450
1451 elts[i] = const_binop (code, elem1, arg2);
1452
1453 /* It is possible that const_binop cannot handle the given
1454 code and return NULL_TREE */
1455 if (elts[i] == NULL_TREE)
1456 return NULL_TREE;
1457 }
1458
1459 return build_vector (type, elts);
1460 }
1461 return NULL_TREE;
1462 }
1463
1464 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1465 indicates which particular sizetype to create. */
1466
1467 tree
1468 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1469 {
1470 return build_int_cst (sizetype_tab[(int) kind], number);
1471 }
1472 \f
1473 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1474 is a tree code. The type of the result is taken from the operands.
1475 Both must be equivalent integer types, ala int_binop_types_match_p.
1476 If the operands are constant, so is the result. */
1477
1478 tree
1479 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1480 {
1481 tree type = TREE_TYPE (arg0);
1482
1483 if (arg0 == error_mark_node || arg1 == error_mark_node)
1484 return error_mark_node;
1485
1486 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1487 TREE_TYPE (arg1)));
1488
1489 /* Handle the special case of two integer constants faster. */
1490 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1491 {
1492 /* And some specific cases even faster than that. */
1493 if (code == PLUS_EXPR)
1494 {
1495 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1496 return arg1;
1497 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1498 return arg0;
1499 }
1500 else if (code == MINUS_EXPR)
1501 {
1502 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1503 return arg0;
1504 }
1505 else if (code == MULT_EXPR)
1506 {
1507 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1508 return arg1;
1509 }
1510
1511 /* Handle general case of two integer constants. For sizetype
1512 constant calculations we always want to know about overflow,
1513 even in the unsigned case. */
1514 return int_const_binop_1 (code, arg0, arg1, -1);
1515 }
1516
1517 return fold_build2_loc (loc, code, type, arg0, arg1);
1518 }
1519
1520 /* Given two values, either both of sizetype or both of bitsizetype,
1521 compute the difference between the two values. Return the value
1522 in signed type corresponding to the type of the operands. */
1523
1524 tree
1525 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1526 {
1527 tree type = TREE_TYPE (arg0);
1528 tree ctype;
1529
1530 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1531 TREE_TYPE (arg1)));
1532
1533 /* If the type is already signed, just do the simple thing. */
1534 if (!TYPE_UNSIGNED (type))
1535 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1536
1537 if (type == sizetype)
1538 ctype = ssizetype;
1539 else if (type == bitsizetype)
1540 ctype = sbitsizetype;
1541 else
1542 ctype = signed_type_for (type);
1543
1544 /* If either operand is not a constant, do the conversions to the signed
1545 type and subtract. The hardware will do the right thing with any
1546 overflow in the subtraction. */
1547 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1548 return size_binop_loc (loc, MINUS_EXPR,
1549 fold_convert_loc (loc, ctype, arg0),
1550 fold_convert_loc (loc, ctype, arg1));
1551
1552 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1553 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1554 overflow) and negate (which can't either). Special-case a result
1555 of zero while we're here. */
1556 if (tree_int_cst_equal (arg0, arg1))
1557 return build_int_cst (ctype, 0);
1558 else if (tree_int_cst_lt (arg1, arg0))
1559 return fold_convert_loc (loc, ctype,
1560 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1561 else
1562 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1563 fold_convert_loc (loc, ctype,
1564 size_binop_loc (loc,
1565 MINUS_EXPR,
1566 arg1, arg0)));
1567 }
1568 \f
1569 /* A subroutine of fold_convert_const handling conversions of an
1570 INTEGER_CST to another integer type. */
1571
1572 static tree
1573 fold_convert_const_int_from_int (tree type, const_tree arg1)
1574 {
1575 /* Given an integer constant, make new constant with new type,
1576 appropriately sign-extended or truncated. Use widest_int
1577 so that any extension is done according ARG1's type. */
1578 return force_fit_type (type, wi::to_widest (arg1),
1579 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1580 TREE_OVERFLOW (arg1));
1581 }
1582
1583 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1584 to an integer type. */
1585
1586 static tree
1587 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1588 {
1589 bool overflow = false;
1590 tree t;
1591
1592 /* The following code implements the floating point to integer
1593 conversion rules required by the Java Language Specification,
1594 that IEEE NaNs are mapped to zero and values that overflow
1595 the target precision saturate, i.e. values greater than
1596 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1597 are mapped to INT_MIN. These semantics are allowed by the
1598 C and C++ standards that simply state that the behavior of
1599 FP-to-integer conversion is unspecified upon overflow. */
1600
1601 wide_int val;
1602 REAL_VALUE_TYPE r;
1603 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1604
1605 switch (code)
1606 {
1607 case FIX_TRUNC_EXPR:
1608 real_trunc (&r, VOIDmode, &x);
1609 break;
1610
1611 default:
1612 gcc_unreachable ();
1613 }
1614
1615 /* If R is NaN, return zero and show we have an overflow. */
1616 if (REAL_VALUE_ISNAN (r))
1617 {
1618 overflow = true;
1619 val = wi::zero (TYPE_PRECISION (type));
1620 }
1621
1622 /* See if R is less than the lower bound or greater than the
1623 upper bound. */
1624
1625 if (! overflow)
1626 {
1627 tree lt = TYPE_MIN_VALUE (type);
1628 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1629 if (REAL_VALUES_LESS (r, l))
1630 {
1631 overflow = true;
1632 val = lt;
1633 }
1634 }
1635
1636 if (! overflow)
1637 {
1638 tree ut = TYPE_MAX_VALUE (type);
1639 if (ut)
1640 {
1641 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1642 if (REAL_VALUES_LESS (u, r))
1643 {
1644 overflow = true;
1645 val = ut;
1646 }
1647 }
1648 }
1649
1650 if (! overflow)
1651 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1652
1653 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1654 return t;
1655 }
1656
1657 /* A subroutine of fold_convert_const handling conversions of a
1658 FIXED_CST to an integer type. */
1659
1660 static tree
1661 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1662 {
1663 tree t;
1664 double_int temp, temp_trunc;
1665 unsigned int mode;
1666
1667 /* Right shift FIXED_CST to temp by fbit. */
1668 temp = TREE_FIXED_CST (arg1).data;
1669 mode = TREE_FIXED_CST (arg1).mode;
1670 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1671 {
1672 temp = temp.rshift (GET_MODE_FBIT (mode),
1673 HOST_BITS_PER_DOUBLE_INT,
1674 SIGNED_FIXED_POINT_MODE_P (mode));
1675
1676 /* Left shift temp to temp_trunc by fbit. */
1677 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1678 HOST_BITS_PER_DOUBLE_INT,
1679 SIGNED_FIXED_POINT_MODE_P (mode));
1680 }
1681 else
1682 {
1683 temp = double_int_zero;
1684 temp_trunc = double_int_zero;
1685 }
1686
1687 /* If FIXED_CST is negative, we need to round the value toward 0.
1688 By checking if the fractional bits are not zero to add 1 to temp. */
1689 if (SIGNED_FIXED_POINT_MODE_P (mode)
1690 && temp_trunc.is_negative ()
1691 && TREE_FIXED_CST (arg1).data != temp_trunc)
1692 temp += double_int_one;
1693
1694 /* Given a fixed-point constant, make new constant with new type,
1695 appropriately sign-extended or truncated. */
1696 t = force_fit_type (type, temp, -1,
1697 (temp.is_negative ()
1698 && (TYPE_UNSIGNED (type)
1699 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1700 | TREE_OVERFLOW (arg1));
1701
1702 return t;
1703 }
1704
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to another floating point type. */
1707
1708 static tree
1709 fold_convert_const_real_from_real (tree type, const_tree arg1)
1710 {
1711 REAL_VALUE_TYPE value;
1712 tree t;
1713
1714 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1715 t = build_real (type, value);
1716
1717 /* If converting an infinity or NAN to a representation that doesn't
1718 have one, set the overflow bit so that we can produce some kind of
1719 error message at the appropriate point if necessary. It's not the
1720 most user-friendly message, but it's better than nothing. */
1721 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1722 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1723 TREE_OVERFLOW (t) = 1;
1724 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1725 && !MODE_HAS_NANS (TYPE_MODE (type)))
1726 TREE_OVERFLOW (t) = 1;
1727 /* Regular overflow, conversion produced an infinity in a mode that
1728 can't represent them. */
1729 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1730 && REAL_VALUE_ISINF (value)
1731 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1732 TREE_OVERFLOW (t) = 1;
1733 else
1734 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1735 return t;
1736 }
1737
1738 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1739 to a floating point type. */
1740
1741 static tree
1742 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1743 {
1744 REAL_VALUE_TYPE value;
1745 tree t;
1746
1747 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1748 t = build_real (type, value);
1749
1750 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1751 return t;
1752 }
1753
1754 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1755 to another fixed-point type. */
1756
1757 static tree
1758 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1759 {
1760 FIXED_VALUE_TYPE value;
1761 tree t;
1762 bool overflow_p;
1763
1764 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1765 TYPE_SATURATING (type));
1766 t = build_fixed (type, value);
1767
1768 /* Propagate overflow flags. */
1769 if (overflow_p | TREE_OVERFLOW (arg1))
1770 TREE_OVERFLOW (t) = 1;
1771 return t;
1772 }
1773
1774 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1775 to a fixed-point type. */
1776
1777 static tree
1778 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1779 {
1780 FIXED_VALUE_TYPE value;
1781 tree t;
1782 bool overflow_p;
1783 double_int di;
1784
1785 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1786
1787 di.low = TREE_INT_CST_ELT (arg1, 0);
1788 if (TREE_INT_CST_NUNITS (arg1) == 1)
1789 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1790 else
1791 di.high = TREE_INT_CST_ELT (arg1, 1);
1792
1793 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1794 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1795 TYPE_SATURATING (type));
1796 t = build_fixed (type, value);
1797
1798 /* Propagate overflow flags. */
1799 if (overflow_p | TREE_OVERFLOW (arg1))
1800 TREE_OVERFLOW (t) = 1;
1801 return t;
1802 }
1803
1804 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1805 to a fixed-point type. */
1806
1807 static tree
1808 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1809 {
1810 FIXED_VALUE_TYPE value;
1811 tree t;
1812 bool overflow_p;
1813
1814 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1815 &TREE_REAL_CST (arg1),
1816 TYPE_SATURATING (type));
1817 t = build_fixed (type, value);
1818
1819 /* Propagate overflow flags. */
1820 if (overflow_p | TREE_OVERFLOW (arg1))
1821 TREE_OVERFLOW (t) = 1;
1822 return t;
1823 }
1824
1825 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1826 type TYPE. If no simplification can be done return NULL_TREE. */
1827
1828 static tree
1829 fold_convert_const (enum tree_code code, tree type, tree arg1)
1830 {
1831 if (TREE_TYPE (arg1) == type)
1832 return arg1;
1833
1834 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1835 || TREE_CODE (type) == OFFSET_TYPE)
1836 {
1837 if (TREE_CODE (arg1) == INTEGER_CST)
1838 return fold_convert_const_int_from_int (type, arg1);
1839 else if (TREE_CODE (arg1) == REAL_CST)
1840 return fold_convert_const_int_from_real (code, type, arg1);
1841 else if (TREE_CODE (arg1) == FIXED_CST)
1842 return fold_convert_const_int_from_fixed (type, arg1);
1843 }
1844 else if (TREE_CODE (type) == REAL_TYPE)
1845 {
1846 if (TREE_CODE (arg1) == INTEGER_CST)
1847 return build_real_from_int_cst (type, arg1);
1848 else if (TREE_CODE (arg1) == REAL_CST)
1849 return fold_convert_const_real_from_real (type, arg1);
1850 else if (TREE_CODE (arg1) == FIXED_CST)
1851 return fold_convert_const_real_from_fixed (type, arg1);
1852 }
1853 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1854 {
1855 if (TREE_CODE (arg1) == FIXED_CST)
1856 return fold_convert_const_fixed_from_fixed (type, arg1);
1857 else if (TREE_CODE (arg1) == INTEGER_CST)
1858 return fold_convert_const_fixed_from_int (type, arg1);
1859 else if (TREE_CODE (arg1) == REAL_CST)
1860 return fold_convert_const_fixed_from_real (type, arg1);
1861 }
1862 return NULL_TREE;
1863 }
1864
1865 /* Construct a vector of zero elements of vector type TYPE. */
1866
1867 static tree
1868 build_zero_vector (tree type)
1869 {
1870 tree t;
1871
1872 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1873 return build_vector_from_val (type, t);
1874 }
1875
1876 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1877
1878 bool
1879 fold_convertible_p (const_tree type, const_tree arg)
1880 {
1881 tree orig = TREE_TYPE (arg);
1882
1883 if (type == orig)
1884 return true;
1885
1886 if (TREE_CODE (arg) == ERROR_MARK
1887 || TREE_CODE (type) == ERROR_MARK
1888 || TREE_CODE (orig) == ERROR_MARK)
1889 return false;
1890
1891 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1892 return true;
1893
1894 switch (TREE_CODE (type))
1895 {
1896 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1897 case POINTER_TYPE: case REFERENCE_TYPE:
1898 case OFFSET_TYPE:
1899 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1900 || TREE_CODE (orig) == OFFSET_TYPE)
1901 return true;
1902 return (TREE_CODE (orig) == VECTOR_TYPE
1903 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1904
1905 case REAL_TYPE:
1906 case FIXED_POINT_TYPE:
1907 case COMPLEX_TYPE:
1908 case VECTOR_TYPE:
1909 case VOID_TYPE:
1910 return TREE_CODE (type) == TREE_CODE (orig);
1911
1912 default:
1913 return false;
1914 }
1915 }
1916
1917 /* Convert expression ARG to type TYPE. Used by the middle-end for
1918 simple conversions in preference to calling the front-end's convert. */
1919
1920 tree
1921 fold_convert_loc (location_t loc, tree type, tree arg)
1922 {
1923 tree orig = TREE_TYPE (arg);
1924 tree tem;
1925
1926 if (type == orig)
1927 return arg;
1928
1929 if (TREE_CODE (arg) == ERROR_MARK
1930 || TREE_CODE (type) == ERROR_MARK
1931 || TREE_CODE (orig) == ERROR_MARK)
1932 return error_mark_node;
1933
1934 switch (TREE_CODE (type))
1935 {
1936 case POINTER_TYPE:
1937 case REFERENCE_TYPE:
1938 /* Handle conversions between pointers to different address spaces. */
1939 if (POINTER_TYPE_P (orig)
1940 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1941 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1942 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1943 /* fall through */
1944
1945 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1946 case OFFSET_TYPE:
1947 if (TREE_CODE (arg) == INTEGER_CST)
1948 {
1949 tem = fold_convert_const (NOP_EXPR, type, arg);
1950 if (tem != NULL_TREE)
1951 return tem;
1952 }
1953 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1954 || TREE_CODE (orig) == OFFSET_TYPE)
1955 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1956 if (TREE_CODE (orig) == COMPLEX_TYPE)
1957 return fold_convert_loc (loc, type,
1958 fold_build1_loc (loc, REALPART_EXPR,
1959 TREE_TYPE (orig), arg));
1960 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1961 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1962 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1963
1964 case REAL_TYPE:
1965 if (TREE_CODE (arg) == INTEGER_CST)
1966 {
1967 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1968 if (tem != NULL_TREE)
1969 return tem;
1970 }
1971 else if (TREE_CODE (arg) == REAL_CST)
1972 {
1973 tem = fold_convert_const (NOP_EXPR, type, arg);
1974 if (tem != NULL_TREE)
1975 return tem;
1976 }
1977 else if (TREE_CODE (arg) == FIXED_CST)
1978 {
1979 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1980 if (tem != NULL_TREE)
1981 return tem;
1982 }
1983
1984 switch (TREE_CODE (orig))
1985 {
1986 case INTEGER_TYPE:
1987 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1988 case POINTER_TYPE: case REFERENCE_TYPE:
1989 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1990
1991 case REAL_TYPE:
1992 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1993
1994 case FIXED_POINT_TYPE:
1995 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1996
1997 case COMPLEX_TYPE:
1998 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1999 return fold_convert_loc (loc, type, tem);
2000
2001 default:
2002 gcc_unreachable ();
2003 }
2004
2005 case FIXED_POINT_TYPE:
2006 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2007 || TREE_CODE (arg) == REAL_CST)
2008 {
2009 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2010 if (tem != NULL_TREE)
2011 goto fold_convert_exit;
2012 }
2013
2014 switch (TREE_CODE (orig))
2015 {
2016 case FIXED_POINT_TYPE:
2017 case INTEGER_TYPE:
2018 case ENUMERAL_TYPE:
2019 case BOOLEAN_TYPE:
2020 case REAL_TYPE:
2021 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2022
2023 case COMPLEX_TYPE:
2024 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2025 return fold_convert_loc (loc, type, tem);
2026
2027 default:
2028 gcc_unreachable ();
2029 }
2030
2031 case COMPLEX_TYPE:
2032 switch (TREE_CODE (orig))
2033 {
2034 case INTEGER_TYPE:
2035 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2036 case POINTER_TYPE: case REFERENCE_TYPE:
2037 case REAL_TYPE:
2038 case FIXED_POINT_TYPE:
2039 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2040 fold_convert_loc (loc, TREE_TYPE (type), arg),
2041 fold_convert_loc (loc, TREE_TYPE (type),
2042 integer_zero_node));
2043 case COMPLEX_TYPE:
2044 {
2045 tree rpart, ipart;
2046
2047 if (TREE_CODE (arg) == COMPLEX_EXPR)
2048 {
2049 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2050 TREE_OPERAND (arg, 0));
2051 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2052 TREE_OPERAND (arg, 1));
2053 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2054 }
2055
2056 arg = save_expr (arg);
2057 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2058 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2059 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2060 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2061 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2062 }
2063
2064 default:
2065 gcc_unreachable ();
2066 }
2067
2068 case VECTOR_TYPE:
2069 if (integer_zerop (arg))
2070 return build_zero_vector (type);
2071 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2072 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2073 || TREE_CODE (orig) == VECTOR_TYPE);
2074 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2075
2076 case VOID_TYPE:
2077 tem = fold_ignored_result (arg);
2078 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2079
2080 default:
2081 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2082 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2083 gcc_unreachable ();
2084 }
2085 fold_convert_exit:
2086 protected_set_expr_location_unshare (tem, loc);
2087 return tem;
2088 }
2089 \f
2090 /* Return false if expr can be assumed not to be an lvalue, true
2091 otherwise. */
2092
2093 static bool
2094 maybe_lvalue_p (const_tree x)
2095 {
2096 /* We only need to wrap lvalue tree codes. */
2097 switch (TREE_CODE (x))
2098 {
2099 case VAR_DECL:
2100 case PARM_DECL:
2101 case RESULT_DECL:
2102 case LABEL_DECL:
2103 case FUNCTION_DECL:
2104 case SSA_NAME:
2105
2106 case COMPONENT_REF:
2107 case MEM_REF:
2108 case INDIRECT_REF:
2109 case ARRAY_REF:
2110 case ARRAY_RANGE_REF:
2111 case BIT_FIELD_REF:
2112 case OBJ_TYPE_REF:
2113
2114 case REALPART_EXPR:
2115 case IMAGPART_EXPR:
2116 case PREINCREMENT_EXPR:
2117 case PREDECREMENT_EXPR:
2118 case SAVE_EXPR:
2119 case TRY_CATCH_EXPR:
2120 case WITH_CLEANUP_EXPR:
2121 case COMPOUND_EXPR:
2122 case MODIFY_EXPR:
2123 case TARGET_EXPR:
2124 case COND_EXPR:
2125 case BIND_EXPR:
2126 break;
2127
2128 default:
2129 /* Assume the worst for front-end tree codes. */
2130 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2131 break;
2132 return false;
2133 }
2134
2135 return true;
2136 }
2137
2138 /* Return an expr equal to X but certainly not valid as an lvalue. */
2139
2140 tree
2141 non_lvalue_loc (location_t loc, tree x)
2142 {
2143 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2144 us. */
2145 if (in_gimple_form)
2146 return x;
2147
2148 if (! maybe_lvalue_p (x))
2149 return x;
2150 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2151 }
2152
2153 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2154 Zero means allow extended lvalues. */
2155
2156 int pedantic_lvalues;
2157
2158 /* When pedantic, return an expr equal to X but certainly not valid as a
2159 pedantic lvalue. Otherwise, return X. */
2160
2161 static tree
2162 pedantic_non_lvalue_loc (location_t loc, tree x)
2163 {
2164 if (pedantic_lvalues)
2165 return non_lvalue_loc (loc, x);
2166
2167 return protected_set_expr_location_unshare (x, loc);
2168 }
2169 \f
2170 /* Given a tree comparison code, return the code that is the logical inverse.
2171 It is generally not safe to do this for floating-point comparisons, except
2172 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2173 ERROR_MARK in this case. */
2174
2175 enum tree_code
2176 invert_tree_comparison (enum tree_code code, bool honor_nans)
2177 {
2178 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2179 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2180 return ERROR_MARK;
2181
2182 switch (code)
2183 {
2184 case EQ_EXPR:
2185 return NE_EXPR;
2186 case NE_EXPR:
2187 return EQ_EXPR;
2188 case GT_EXPR:
2189 return honor_nans ? UNLE_EXPR : LE_EXPR;
2190 case GE_EXPR:
2191 return honor_nans ? UNLT_EXPR : LT_EXPR;
2192 case LT_EXPR:
2193 return honor_nans ? UNGE_EXPR : GE_EXPR;
2194 case LE_EXPR:
2195 return honor_nans ? UNGT_EXPR : GT_EXPR;
2196 case LTGT_EXPR:
2197 return UNEQ_EXPR;
2198 case UNEQ_EXPR:
2199 return LTGT_EXPR;
2200 case UNGT_EXPR:
2201 return LE_EXPR;
2202 case UNGE_EXPR:
2203 return LT_EXPR;
2204 case UNLT_EXPR:
2205 return GE_EXPR;
2206 case UNLE_EXPR:
2207 return GT_EXPR;
2208 case ORDERED_EXPR:
2209 return UNORDERED_EXPR;
2210 case UNORDERED_EXPR:
2211 return ORDERED_EXPR;
2212 default:
2213 gcc_unreachable ();
2214 }
2215 }
2216
2217 /* Similar, but return the comparison that results if the operands are
2218 swapped. This is safe for floating-point. */
2219
2220 enum tree_code
2221 swap_tree_comparison (enum tree_code code)
2222 {
2223 switch (code)
2224 {
2225 case EQ_EXPR:
2226 case NE_EXPR:
2227 case ORDERED_EXPR:
2228 case UNORDERED_EXPR:
2229 case LTGT_EXPR:
2230 case UNEQ_EXPR:
2231 return code;
2232 case GT_EXPR:
2233 return LT_EXPR;
2234 case GE_EXPR:
2235 return LE_EXPR;
2236 case LT_EXPR:
2237 return GT_EXPR;
2238 case LE_EXPR:
2239 return GE_EXPR;
2240 case UNGT_EXPR:
2241 return UNLT_EXPR;
2242 case UNGE_EXPR:
2243 return UNLE_EXPR;
2244 case UNLT_EXPR:
2245 return UNGT_EXPR;
2246 case UNLE_EXPR:
2247 return UNGE_EXPR;
2248 default:
2249 gcc_unreachable ();
2250 }
2251 }
2252
2253
2254 /* Convert a comparison tree code from an enum tree_code representation
2255 into a compcode bit-based encoding. This function is the inverse of
2256 compcode_to_comparison. */
2257
2258 static enum comparison_code
2259 comparison_to_compcode (enum tree_code code)
2260 {
2261 switch (code)
2262 {
2263 case LT_EXPR:
2264 return COMPCODE_LT;
2265 case EQ_EXPR:
2266 return COMPCODE_EQ;
2267 case LE_EXPR:
2268 return COMPCODE_LE;
2269 case GT_EXPR:
2270 return COMPCODE_GT;
2271 case NE_EXPR:
2272 return COMPCODE_NE;
2273 case GE_EXPR:
2274 return COMPCODE_GE;
2275 case ORDERED_EXPR:
2276 return COMPCODE_ORD;
2277 case UNORDERED_EXPR:
2278 return COMPCODE_UNORD;
2279 case UNLT_EXPR:
2280 return COMPCODE_UNLT;
2281 case UNEQ_EXPR:
2282 return COMPCODE_UNEQ;
2283 case UNLE_EXPR:
2284 return COMPCODE_UNLE;
2285 case UNGT_EXPR:
2286 return COMPCODE_UNGT;
2287 case LTGT_EXPR:
2288 return COMPCODE_LTGT;
2289 case UNGE_EXPR:
2290 return COMPCODE_UNGE;
2291 default:
2292 gcc_unreachable ();
2293 }
2294 }
2295
2296 /* Convert a compcode bit-based encoding of a comparison operator back
2297 to GCC's enum tree_code representation. This function is the
2298 inverse of comparison_to_compcode. */
2299
2300 static enum tree_code
2301 compcode_to_comparison (enum comparison_code code)
2302 {
2303 switch (code)
2304 {
2305 case COMPCODE_LT:
2306 return LT_EXPR;
2307 case COMPCODE_EQ:
2308 return EQ_EXPR;
2309 case COMPCODE_LE:
2310 return LE_EXPR;
2311 case COMPCODE_GT:
2312 return GT_EXPR;
2313 case COMPCODE_NE:
2314 return NE_EXPR;
2315 case COMPCODE_GE:
2316 return GE_EXPR;
2317 case COMPCODE_ORD:
2318 return ORDERED_EXPR;
2319 case COMPCODE_UNORD:
2320 return UNORDERED_EXPR;
2321 case COMPCODE_UNLT:
2322 return UNLT_EXPR;
2323 case COMPCODE_UNEQ:
2324 return UNEQ_EXPR;
2325 case COMPCODE_UNLE:
2326 return UNLE_EXPR;
2327 case COMPCODE_UNGT:
2328 return UNGT_EXPR;
2329 case COMPCODE_LTGT:
2330 return LTGT_EXPR;
2331 case COMPCODE_UNGE:
2332 return UNGE_EXPR;
2333 default:
2334 gcc_unreachable ();
2335 }
2336 }
2337
2338 /* Return a tree for the comparison which is the combination of
2339 doing the AND or OR (depending on CODE) of the two operations LCODE
2340 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2341 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2342 if this makes the transformation invalid. */
2343
2344 tree
2345 combine_comparisons (location_t loc,
2346 enum tree_code code, enum tree_code lcode,
2347 enum tree_code rcode, tree truth_type,
2348 tree ll_arg, tree lr_arg)
2349 {
2350 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2351 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2352 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2353 int compcode;
2354
2355 switch (code)
2356 {
2357 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2358 compcode = lcompcode & rcompcode;
2359 break;
2360
2361 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2362 compcode = lcompcode | rcompcode;
2363 break;
2364
2365 default:
2366 return NULL_TREE;
2367 }
2368
2369 if (!honor_nans)
2370 {
2371 /* Eliminate unordered comparisons, as well as LTGT and ORD
2372 which are not used unless the mode has NaNs. */
2373 compcode &= ~COMPCODE_UNORD;
2374 if (compcode == COMPCODE_LTGT)
2375 compcode = COMPCODE_NE;
2376 else if (compcode == COMPCODE_ORD)
2377 compcode = COMPCODE_TRUE;
2378 }
2379 else if (flag_trapping_math)
2380 {
2381 /* Check that the original operation and the optimized ones will trap
2382 under the same condition. */
2383 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2384 && (lcompcode != COMPCODE_EQ)
2385 && (lcompcode != COMPCODE_ORD);
2386 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2387 && (rcompcode != COMPCODE_EQ)
2388 && (rcompcode != COMPCODE_ORD);
2389 bool trap = (compcode & COMPCODE_UNORD) == 0
2390 && (compcode != COMPCODE_EQ)
2391 && (compcode != COMPCODE_ORD);
2392
2393 /* In a short-circuited boolean expression the LHS might be
2394 such that the RHS, if evaluated, will never trap. For
2395 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2396 if neither x nor y is NaN. (This is a mixed blessing: for
2397 example, the expression above will never trap, hence
2398 optimizing it to x < y would be invalid). */
2399 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2400 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2401 rtrap = false;
2402
2403 /* If the comparison was short-circuited, and only the RHS
2404 trapped, we may now generate a spurious trap. */
2405 if (rtrap && !ltrap
2406 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2407 return NULL_TREE;
2408
2409 /* If we changed the conditions that cause a trap, we lose. */
2410 if ((ltrap || rtrap) != trap)
2411 return NULL_TREE;
2412 }
2413
2414 if (compcode == COMPCODE_TRUE)
2415 return constant_boolean_node (true, truth_type);
2416 else if (compcode == COMPCODE_FALSE)
2417 return constant_boolean_node (false, truth_type);
2418 else
2419 {
2420 enum tree_code tcode;
2421
2422 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2423 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2424 }
2425 }
2426 \f
2427 /* Return nonzero if two operands (typically of the same tree node)
2428 are necessarily equal. If either argument has side-effects this
2429 function returns zero. FLAGS modifies behavior as follows:
2430
2431 If OEP_ONLY_CONST is set, only return nonzero for constants.
2432 This function tests whether the operands are indistinguishable;
2433 it does not test whether they are equal using C's == operation.
2434 The distinction is important for IEEE floating point, because
2435 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2436 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2437
2438 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2439 even though it may hold multiple values during a function.
2440 This is because a GCC tree node guarantees that nothing else is
2441 executed between the evaluation of its "operands" (which may often
2442 be evaluated in arbitrary order). Hence if the operands themselves
2443 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2444 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2445 unset means assuming isochronic (or instantaneous) tree equivalence.
2446 Unless comparing arbitrary expression trees, such as from different
2447 statements, this flag can usually be left unset.
2448
2449 If OEP_PURE_SAME is set, then pure functions with identical arguments
2450 are considered the same. It is used when the caller has other ways
2451 to ensure that global memory is unchanged in between. */
2452
2453 int
2454 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2455 {
2456 /* If either is ERROR_MARK, they aren't equal. */
2457 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2458 || TREE_TYPE (arg0) == error_mark_node
2459 || TREE_TYPE (arg1) == error_mark_node)
2460 return 0;
2461
2462 /* Similar, if either does not have a type (like a released SSA name),
2463 they aren't equal. */
2464 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2465 return 0;
2466
2467 /* Check equality of integer constants before bailing out due to
2468 precision differences. */
2469 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2470 return tree_int_cst_equal (arg0, arg1);
2471
2472 /* If both types don't have the same signedness, then we can't consider
2473 them equal. We must check this before the STRIP_NOPS calls
2474 because they may change the signedness of the arguments. As pointers
2475 strictly don't have a signedness, require either two pointers or
2476 two non-pointers as well. */
2477 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2478 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2479 return 0;
2480
2481 /* We cannot consider pointers to different address space equal. */
2482 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2483 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2484 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2485 return 0;
2486
2487 /* If both types don't have the same precision, then it is not safe
2488 to strip NOPs. */
2489 if (element_precision (TREE_TYPE (arg0))
2490 != element_precision (TREE_TYPE (arg1)))
2491 return 0;
2492
2493 STRIP_NOPS (arg0);
2494 STRIP_NOPS (arg1);
2495
2496 /* In case both args are comparisons but with different comparison
2497 code, try to swap the comparison operands of one arg to produce
2498 a match and compare that variant. */
2499 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2500 && COMPARISON_CLASS_P (arg0)
2501 && COMPARISON_CLASS_P (arg1))
2502 {
2503 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2504
2505 if (TREE_CODE (arg0) == swap_code)
2506 return operand_equal_p (TREE_OPERAND (arg0, 0),
2507 TREE_OPERAND (arg1, 1), flags)
2508 && operand_equal_p (TREE_OPERAND (arg0, 1),
2509 TREE_OPERAND (arg1, 0), flags);
2510 }
2511
2512 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2513 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2514 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2515 return 0;
2516
2517 /* This is needed for conversions and for COMPONENT_REF.
2518 Might as well play it safe and always test this. */
2519 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2520 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2521 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2522 return 0;
2523
2524 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2525 We don't care about side effects in that case because the SAVE_EXPR
2526 takes care of that for us. In all other cases, two expressions are
2527 equal if they have no side effects. If we have two identical
2528 expressions with side effects that should be treated the same due
2529 to the only side effects being identical SAVE_EXPR's, that will
2530 be detected in the recursive calls below.
2531 If we are taking an invariant address of two identical objects
2532 they are necessarily equal as well. */
2533 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2534 && (TREE_CODE (arg0) == SAVE_EXPR
2535 || (flags & OEP_CONSTANT_ADDRESS_OF)
2536 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2537 return 1;
2538
2539 /* Next handle constant cases, those for which we can return 1 even
2540 if ONLY_CONST is set. */
2541 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2542 switch (TREE_CODE (arg0))
2543 {
2544 case INTEGER_CST:
2545 return tree_int_cst_equal (arg0, arg1);
2546
2547 case FIXED_CST:
2548 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2549 TREE_FIXED_CST (arg1));
2550
2551 case REAL_CST:
2552 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2553 TREE_REAL_CST (arg1)))
2554 return 1;
2555
2556
2557 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2558 {
2559 /* If we do not distinguish between signed and unsigned zero,
2560 consider them equal. */
2561 if (real_zerop (arg0) && real_zerop (arg1))
2562 return 1;
2563 }
2564 return 0;
2565
2566 case VECTOR_CST:
2567 {
2568 unsigned i;
2569
2570 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2571 return 0;
2572
2573 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2574 {
2575 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2576 VECTOR_CST_ELT (arg1, i), flags))
2577 return 0;
2578 }
2579 return 1;
2580 }
2581
2582 case COMPLEX_CST:
2583 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2584 flags)
2585 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2586 flags));
2587
2588 case STRING_CST:
2589 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2590 && ! memcmp (TREE_STRING_POINTER (arg0),
2591 TREE_STRING_POINTER (arg1),
2592 TREE_STRING_LENGTH (arg0)));
2593
2594 case ADDR_EXPR:
2595 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2596 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2597 ? OEP_CONSTANT_ADDRESS_OF : 0);
2598 default:
2599 break;
2600 }
2601
2602 if (flags & OEP_ONLY_CONST)
2603 return 0;
2604
2605 /* Define macros to test an operand from arg0 and arg1 for equality and a
2606 variant that allows null and views null as being different from any
2607 non-null value. In the latter case, if either is null, the both
2608 must be; otherwise, do the normal comparison. */
2609 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2610 TREE_OPERAND (arg1, N), flags)
2611
2612 #define OP_SAME_WITH_NULL(N) \
2613 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2614 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2615
2616 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2617 {
2618 case tcc_unary:
2619 /* Two conversions are equal only if signedness and modes match. */
2620 switch (TREE_CODE (arg0))
2621 {
2622 CASE_CONVERT:
2623 case FIX_TRUNC_EXPR:
2624 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2625 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2626 return 0;
2627 break;
2628 default:
2629 break;
2630 }
2631
2632 return OP_SAME (0);
2633
2634
2635 case tcc_comparison:
2636 case tcc_binary:
2637 if (OP_SAME (0) && OP_SAME (1))
2638 return 1;
2639
2640 /* For commutative ops, allow the other order. */
2641 return (commutative_tree_code (TREE_CODE (arg0))
2642 && operand_equal_p (TREE_OPERAND (arg0, 0),
2643 TREE_OPERAND (arg1, 1), flags)
2644 && operand_equal_p (TREE_OPERAND (arg0, 1),
2645 TREE_OPERAND (arg1, 0), flags));
2646
2647 case tcc_reference:
2648 /* If either of the pointer (or reference) expressions we are
2649 dereferencing contain a side effect, these cannot be equal,
2650 but their addresses can be. */
2651 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2652 && (TREE_SIDE_EFFECTS (arg0)
2653 || TREE_SIDE_EFFECTS (arg1)))
2654 return 0;
2655
2656 switch (TREE_CODE (arg0))
2657 {
2658 case INDIRECT_REF:
2659 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2660 return OP_SAME (0);
2661
2662 case REALPART_EXPR:
2663 case IMAGPART_EXPR:
2664 return OP_SAME (0);
2665
2666 case TARGET_MEM_REF:
2667 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2668 /* Require equal extra operands and then fall through to MEM_REF
2669 handling of the two common operands. */
2670 if (!OP_SAME_WITH_NULL (2)
2671 || !OP_SAME_WITH_NULL (3)
2672 || !OP_SAME_WITH_NULL (4))
2673 return 0;
2674 /* Fallthru. */
2675 case MEM_REF:
2676 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2677 /* Require equal access sizes, and similar pointer types.
2678 We can have incomplete types for array references of
2679 variable-sized arrays from the Fortran frontend
2680 though. Also verify the types are compatible. */
2681 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2682 || (TYPE_SIZE (TREE_TYPE (arg0))
2683 && TYPE_SIZE (TREE_TYPE (arg1))
2684 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2685 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2686 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2687 && alias_ptr_types_compatible_p
2688 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2689 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2690 && OP_SAME (0) && OP_SAME (1));
2691
2692 case ARRAY_REF:
2693 case ARRAY_RANGE_REF:
2694 /* Operands 2 and 3 may be null.
2695 Compare the array index by value if it is constant first as we
2696 may have different types but same value here. */
2697 if (!OP_SAME (0))
2698 return 0;
2699 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2700 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2701 TREE_OPERAND (arg1, 1))
2702 || OP_SAME (1))
2703 && OP_SAME_WITH_NULL (2)
2704 && OP_SAME_WITH_NULL (3));
2705
2706 case COMPONENT_REF:
2707 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2708 may be NULL when we're called to compare MEM_EXPRs. */
2709 if (!OP_SAME_WITH_NULL (0)
2710 || !OP_SAME (1))
2711 return 0;
2712 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2713 return OP_SAME_WITH_NULL (2);
2714
2715 case BIT_FIELD_REF:
2716 if (!OP_SAME (0))
2717 return 0;
2718 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2719 return OP_SAME (1) && OP_SAME (2);
2720
2721 default:
2722 return 0;
2723 }
2724
2725 case tcc_expression:
2726 switch (TREE_CODE (arg0))
2727 {
2728 case ADDR_EXPR:
2729 case TRUTH_NOT_EXPR:
2730 return OP_SAME (0);
2731
2732 case TRUTH_ANDIF_EXPR:
2733 case TRUTH_ORIF_EXPR:
2734 return OP_SAME (0) && OP_SAME (1);
2735
2736 case FMA_EXPR:
2737 case WIDEN_MULT_PLUS_EXPR:
2738 case WIDEN_MULT_MINUS_EXPR:
2739 if (!OP_SAME (2))
2740 return 0;
2741 /* The multiplcation operands are commutative. */
2742 /* FALLTHRU */
2743
2744 case TRUTH_AND_EXPR:
2745 case TRUTH_OR_EXPR:
2746 case TRUTH_XOR_EXPR:
2747 if (OP_SAME (0) && OP_SAME (1))
2748 return 1;
2749
2750 /* Otherwise take into account this is a commutative operation. */
2751 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2752 TREE_OPERAND (arg1, 1), flags)
2753 && operand_equal_p (TREE_OPERAND (arg0, 1),
2754 TREE_OPERAND (arg1, 0), flags));
2755
2756 case COND_EXPR:
2757 case VEC_COND_EXPR:
2758 case DOT_PROD_EXPR:
2759 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2760
2761 default:
2762 return 0;
2763 }
2764
2765 case tcc_vl_exp:
2766 switch (TREE_CODE (arg0))
2767 {
2768 case CALL_EXPR:
2769 /* If the CALL_EXPRs call different functions, then they
2770 clearly can not be equal. */
2771 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2772 flags))
2773 return 0;
2774
2775 {
2776 unsigned int cef = call_expr_flags (arg0);
2777 if (flags & OEP_PURE_SAME)
2778 cef &= ECF_CONST | ECF_PURE;
2779 else
2780 cef &= ECF_CONST;
2781 if (!cef)
2782 return 0;
2783 }
2784
2785 /* Now see if all the arguments are the same. */
2786 {
2787 const_call_expr_arg_iterator iter0, iter1;
2788 const_tree a0, a1;
2789 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2790 a1 = first_const_call_expr_arg (arg1, &iter1);
2791 a0 && a1;
2792 a0 = next_const_call_expr_arg (&iter0),
2793 a1 = next_const_call_expr_arg (&iter1))
2794 if (! operand_equal_p (a0, a1, flags))
2795 return 0;
2796
2797 /* If we get here and both argument lists are exhausted
2798 then the CALL_EXPRs are equal. */
2799 return ! (a0 || a1);
2800 }
2801 default:
2802 return 0;
2803 }
2804
2805 case tcc_declaration:
2806 /* Consider __builtin_sqrt equal to sqrt. */
2807 return (TREE_CODE (arg0) == FUNCTION_DECL
2808 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2809 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2810 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2811
2812 default:
2813 return 0;
2814 }
2815
2816 #undef OP_SAME
2817 #undef OP_SAME_WITH_NULL
2818 }
2819 \f
2820 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2821 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2822
2823 When in doubt, return 0. */
2824
2825 static int
2826 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2827 {
2828 int unsignedp1, unsignedpo;
2829 tree primarg0, primarg1, primother;
2830 unsigned int correct_width;
2831
2832 if (operand_equal_p (arg0, arg1, 0))
2833 return 1;
2834
2835 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2836 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2837 return 0;
2838
2839 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2840 and see if the inner values are the same. This removes any
2841 signedness comparison, which doesn't matter here. */
2842 primarg0 = arg0, primarg1 = arg1;
2843 STRIP_NOPS (primarg0);
2844 STRIP_NOPS (primarg1);
2845 if (operand_equal_p (primarg0, primarg1, 0))
2846 return 1;
2847
2848 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2849 actual comparison operand, ARG0.
2850
2851 First throw away any conversions to wider types
2852 already present in the operands. */
2853
2854 primarg1 = get_narrower (arg1, &unsignedp1);
2855 primother = get_narrower (other, &unsignedpo);
2856
2857 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2858 if (unsignedp1 == unsignedpo
2859 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2860 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2861 {
2862 tree type = TREE_TYPE (arg0);
2863
2864 /* Make sure shorter operand is extended the right way
2865 to match the longer operand. */
2866 primarg1 = fold_convert (signed_or_unsigned_type_for
2867 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2868
2869 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2870 return 1;
2871 }
2872
2873 return 0;
2874 }
2875 \f
2876 /* See if ARG is an expression that is either a comparison or is performing
2877 arithmetic on comparisons. The comparisons must only be comparing
2878 two different values, which will be stored in *CVAL1 and *CVAL2; if
2879 they are nonzero it means that some operands have already been found.
2880 No variables may be used anywhere else in the expression except in the
2881 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2882 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2883
2884 If this is true, return 1. Otherwise, return zero. */
2885
2886 static int
2887 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2888 {
2889 enum tree_code code = TREE_CODE (arg);
2890 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2891
2892 /* We can handle some of the tcc_expression cases here. */
2893 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2894 tclass = tcc_unary;
2895 else if (tclass == tcc_expression
2896 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2897 || code == COMPOUND_EXPR))
2898 tclass = tcc_binary;
2899
2900 else if (tclass == tcc_expression && code == SAVE_EXPR
2901 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2902 {
2903 /* If we've already found a CVAL1 or CVAL2, this expression is
2904 two complex to handle. */
2905 if (*cval1 || *cval2)
2906 return 0;
2907
2908 tclass = tcc_unary;
2909 *save_p = 1;
2910 }
2911
2912 switch (tclass)
2913 {
2914 case tcc_unary:
2915 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2916
2917 case tcc_binary:
2918 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2919 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2920 cval1, cval2, save_p));
2921
2922 case tcc_constant:
2923 return 1;
2924
2925 case tcc_expression:
2926 if (code == COND_EXPR)
2927 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2928 cval1, cval2, save_p)
2929 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2930 cval1, cval2, save_p)
2931 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2932 cval1, cval2, save_p));
2933 return 0;
2934
2935 case tcc_comparison:
2936 /* First see if we can handle the first operand, then the second. For
2937 the second operand, we know *CVAL1 can't be zero. It must be that
2938 one side of the comparison is each of the values; test for the
2939 case where this isn't true by failing if the two operands
2940 are the same. */
2941
2942 if (operand_equal_p (TREE_OPERAND (arg, 0),
2943 TREE_OPERAND (arg, 1), 0))
2944 return 0;
2945
2946 if (*cval1 == 0)
2947 *cval1 = TREE_OPERAND (arg, 0);
2948 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2949 ;
2950 else if (*cval2 == 0)
2951 *cval2 = TREE_OPERAND (arg, 0);
2952 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2953 ;
2954 else
2955 return 0;
2956
2957 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2958 ;
2959 else if (*cval2 == 0)
2960 *cval2 = TREE_OPERAND (arg, 1);
2961 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2962 ;
2963 else
2964 return 0;
2965
2966 return 1;
2967
2968 default:
2969 return 0;
2970 }
2971 }
2972 \f
2973 /* ARG is a tree that is known to contain just arithmetic operations and
2974 comparisons. Evaluate the operations in the tree substituting NEW0 for
2975 any occurrence of OLD0 as an operand of a comparison and likewise for
2976 NEW1 and OLD1. */
2977
2978 static tree
2979 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2980 tree old1, tree new1)
2981 {
2982 tree type = TREE_TYPE (arg);
2983 enum tree_code code = TREE_CODE (arg);
2984 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2985
2986 /* We can handle some of the tcc_expression cases here. */
2987 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2988 tclass = tcc_unary;
2989 else if (tclass == tcc_expression
2990 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2991 tclass = tcc_binary;
2992
2993 switch (tclass)
2994 {
2995 case tcc_unary:
2996 return fold_build1_loc (loc, code, type,
2997 eval_subst (loc, TREE_OPERAND (arg, 0),
2998 old0, new0, old1, new1));
2999
3000 case tcc_binary:
3001 return fold_build2_loc (loc, code, type,
3002 eval_subst (loc, TREE_OPERAND (arg, 0),
3003 old0, new0, old1, new1),
3004 eval_subst (loc, TREE_OPERAND (arg, 1),
3005 old0, new0, old1, new1));
3006
3007 case tcc_expression:
3008 switch (code)
3009 {
3010 case SAVE_EXPR:
3011 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3012 old1, new1);
3013
3014 case COMPOUND_EXPR:
3015 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3016 old1, new1);
3017
3018 case COND_EXPR:
3019 return fold_build3_loc (loc, code, type,
3020 eval_subst (loc, TREE_OPERAND (arg, 0),
3021 old0, new0, old1, new1),
3022 eval_subst (loc, TREE_OPERAND (arg, 1),
3023 old0, new0, old1, new1),
3024 eval_subst (loc, TREE_OPERAND (arg, 2),
3025 old0, new0, old1, new1));
3026 default:
3027 break;
3028 }
3029 /* Fall through - ??? */
3030
3031 case tcc_comparison:
3032 {
3033 tree arg0 = TREE_OPERAND (arg, 0);
3034 tree arg1 = TREE_OPERAND (arg, 1);
3035
3036 /* We need to check both for exact equality and tree equality. The
3037 former will be true if the operand has a side-effect. In that
3038 case, we know the operand occurred exactly once. */
3039
3040 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3041 arg0 = new0;
3042 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3043 arg0 = new1;
3044
3045 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3046 arg1 = new0;
3047 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3048 arg1 = new1;
3049
3050 return fold_build2_loc (loc, code, type, arg0, arg1);
3051 }
3052
3053 default:
3054 return arg;
3055 }
3056 }
3057 \f
3058 /* Return a tree for the case when the result of an expression is RESULT
3059 converted to TYPE and OMITTED was previously an operand of the expression
3060 but is now not needed (e.g., we folded OMITTED * 0).
3061
3062 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3063 the conversion of RESULT to TYPE. */
3064
3065 tree
3066 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3067 {
3068 tree t = fold_convert_loc (loc, type, result);
3069
3070 /* If the resulting operand is an empty statement, just return the omitted
3071 statement casted to void. */
3072 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3073 return build1_loc (loc, NOP_EXPR, void_type_node,
3074 fold_ignored_result (omitted));
3075
3076 if (TREE_SIDE_EFFECTS (omitted))
3077 return build2_loc (loc, COMPOUND_EXPR, type,
3078 fold_ignored_result (omitted), t);
3079
3080 return non_lvalue_loc (loc, t);
3081 }
3082
3083 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3084
3085 static tree
3086 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3087 tree omitted)
3088 {
3089 tree t = fold_convert_loc (loc, type, result);
3090
3091 /* If the resulting operand is an empty statement, just return the omitted
3092 statement casted to void. */
3093 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3094 return build1_loc (loc, NOP_EXPR, void_type_node,
3095 fold_ignored_result (omitted));
3096
3097 if (TREE_SIDE_EFFECTS (omitted))
3098 return build2_loc (loc, COMPOUND_EXPR, type,
3099 fold_ignored_result (omitted), t);
3100
3101 return pedantic_non_lvalue_loc (loc, t);
3102 }
3103
3104 /* Return a tree for the case when the result of an expression is RESULT
3105 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3106 of the expression but are now not needed.
3107
3108 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3109 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3110 evaluated before OMITTED2. Otherwise, if neither has side effects,
3111 just do the conversion of RESULT to TYPE. */
3112
3113 tree
3114 omit_two_operands_loc (location_t loc, tree type, tree result,
3115 tree omitted1, tree omitted2)
3116 {
3117 tree t = fold_convert_loc (loc, type, result);
3118
3119 if (TREE_SIDE_EFFECTS (omitted2))
3120 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3121 if (TREE_SIDE_EFFECTS (omitted1))
3122 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3123
3124 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3125 }
3126
3127 \f
3128 /* Return a simplified tree node for the truth-negation of ARG. This
3129 never alters ARG itself. We assume that ARG is an operation that
3130 returns a truth value (0 or 1).
3131
3132 FIXME: one would think we would fold the result, but it causes
3133 problems with the dominator optimizer. */
3134
3135 static tree
3136 fold_truth_not_expr (location_t loc, tree arg)
3137 {
3138 tree type = TREE_TYPE (arg);
3139 enum tree_code code = TREE_CODE (arg);
3140 location_t loc1, loc2;
3141
3142 /* If this is a comparison, we can simply invert it, except for
3143 floating-point non-equality comparisons, in which case we just
3144 enclose a TRUTH_NOT_EXPR around what we have. */
3145
3146 if (TREE_CODE_CLASS (code) == tcc_comparison)
3147 {
3148 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3149 if (FLOAT_TYPE_P (op_type)
3150 && flag_trapping_math
3151 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3152 && code != NE_EXPR && code != EQ_EXPR)
3153 return NULL_TREE;
3154
3155 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3156 if (code == ERROR_MARK)
3157 return NULL_TREE;
3158
3159 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3160 TREE_OPERAND (arg, 1));
3161 }
3162
3163 switch (code)
3164 {
3165 case INTEGER_CST:
3166 return constant_boolean_node (integer_zerop (arg), type);
3167
3168 case TRUTH_AND_EXPR:
3169 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3170 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3171 return build2_loc (loc, TRUTH_OR_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3173 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3174
3175 case TRUTH_OR_EXPR:
3176 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3177 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3178 return build2_loc (loc, TRUTH_AND_EXPR, type,
3179 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3180 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3181
3182 case TRUTH_XOR_EXPR:
3183 /* Here we can invert either operand. We invert the first operand
3184 unless the second operand is a TRUTH_NOT_EXPR in which case our
3185 result is the XOR of the first operand with the inside of the
3186 negation of the second operand. */
3187
3188 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3189 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3190 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3191 else
3192 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3193 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3194 TREE_OPERAND (arg, 1));
3195
3196 case TRUTH_ANDIF_EXPR:
3197 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3198 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3199 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3200 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3201 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3202
3203 case TRUTH_ORIF_EXPR:
3204 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3205 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3206 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3208 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3209
3210 case TRUTH_NOT_EXPR:
3211 return TREE_OPERAND (arg, 0);
3212
3213 case COND_EXPR:
3214 {
3215 tree arg1 = TREE_OPERAND (arg, 1);
3216 tree arg2 = TREE_OPERAND (arg, 2);
3217
3218 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3219 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3220
3221 /* A COND_EXPR may have a throw as one operand, which
3222 then has void type. Just leave void operands
3223 as they are. */
3224 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3225 VOID_TYPE_P (TREE_TYPE (arg1))
3226 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3227 VOID_TYPE_P (TREE_TYPE (arg2))
3228 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3229 }
3230
3231 case COMPOUND_EXPR:
3232 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3233 return build2_loc (loc, COMPOUND_EXPR, type,
3234 TREE_OPERAND (arg, 0),
3235 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3236
3237 case NON_LVALUE_EXPR:
3238 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3239 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3240
3241 CASE_CONVERT:
3242 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3243 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3244
3245 /* ... fall through ... */
3246
3247 case FLOAT_EXPR:
3248 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3249 return build1_loc (loc, TREE_CODE (arg), type,
3250 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3251
3252 case BIT_AND_EXPR:
3253 if (!integer_onep (TREE_OPERAND (arg, 1)))
3254 return NULL_TREE;
3255 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3256
3257 case SAVE_EXPR:
3258 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3259
3260 case CLEANUP_POINT_EXPR:
3261 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3262 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3263 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3264
3265 default:
3266 return NULL_TREE;
3267 }
3268 }
3269
3270 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3271 assume that ARG is an operation that returns a truth value (0 or 1
3272 for scalars, 0 or -1 for vectors). Return the folded expression if
3273 folding is successful. Otherwise, return NULL_TREE. */
3274
3275 static tree
3276 fold_invert_truthvalue (location_t loc, tree arg)
3277 {
3278 tree type = TREE_TYPE (arg);
3279 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3280 ? BIT_NOT_EXPR
3281 : TRUTH_NOT_EXPR,
3282 type, arg);
3283 }
3284
3285 /* Return a simplified tree node for the truth-negation of ARG. This
3286 never alters ARG itself. We assume that ARG is an operation that
3287 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3288
3289 tree
3290 invert_truthvalue_loc (location_t loc, tree arg)
3291 {
3292 if (TREE_CODE (arg) == ERROR_MARK)
3293 return arg;
3294
3295 tree type = TREE_TYPE (arg);
3296 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3297 ? BIT_NOT_EXPR
3298 : TRUTH_NOT_EXPR,
3299 type, arg);
3300 }
3301
3302 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3303 operands are another bit-wise operation with a common input. If so,
3304 distribute the bit operations to save an operation and possibly two if
3305 constants are involved. For example, convert
3306 (A | B) & (A | C) into A | (B & C)
3307 Further simplification will occur if B and C are constants.
3308
3309 If this optimization cannot be done, 0 will be returned. */
3310
3311 static tree
3312 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3313 tree arg0, tree arg1)
3314 {
3315 tree common;
3316 tree left, right;
3317
3318 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3319 || TREE_CODE (arg0) == code
3320 || (TREE_CODE (arg0) != BIT_AND_EXPR
3321 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3322 return 0;
3323
3324 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3325 {
3326 common = TREE_OPERAND (arg0, 0);
3327 left = TREE_OPERAND (arg0, 1);
3328 right = TREE_OPERAND (arg1, 1);
3329 }
3330 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3331 {
3332 common = TREE_OPERAND (arg0, 0);
3333 left = TREE_OPERAND (arg0, 1);
3334 right = TREE_OPERAND (arg1, 0);
3335 }
3336 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3337 {
3338 common = TREE_OPERAND (arg0, 1);
3339 left = TREE_OPERAND (arg0, 0);
3340 right = TREE_OPERAND (arg1, 1);
3341 }
3342 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3343 {
3344 common = TREE_OPERAND (arg0, 1);
3345 left = TREE_OPERAND (arg0, 0);
3346 right = TREE_OPERAND (arg1, 0);
3347 }
3348 else
3349 return 0;
3350
3351 common = fold_convert_loc (loc, type, common);
3352 left = fold_convert_loc (loc, type, left);
3353 right = fold_convert_loc (loc, type, right);
3354 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3355 fold_build2_loc (loc, code, type, left, right));
3356 }
3357
3358 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3359 with code CODE. This optimization is unsafe. */
3360 static tree
3361 distribute_real_division (location_t loc, enum tree_code code, tree type,
3362 tree arg0, tree arg1)
3363 {
3364 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3365 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3366
3367 /* (A / C) +- (B / C) -> (A +- B) / C. */
3368 if (mul0 == mul1
3369 && operand_equal_p (TREE_OPERAND (arg0, 1),
3370 TREE_OPERAND (arg1, 1), 0))
3371 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3372 fold_build2_loc (loc, code, type,
3373 TREE_OPERAND (arg0, 0),
3374 TREE_OPERAND (arg1, 0)),
3375 TREE_OPERAND (arg0, 1));
3376
3377 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3378 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3379 TREE_OPERAND (arg1, 0), 0)
3380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3381 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3382 {
3383 REAL_VALUE_TYPE r0, r1;
3384 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3385 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3386 if (!mul0)
3387 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3388 if (!mul1)
3389 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3390 real_arithmetic (&r0, code, &r0, &r1);
3391 return fold_build2_loc (loc, MULT_EXPR, type,
3392 TREE_OPERAND (arg0, 0),
3393 build_real (type, r0));
3394 }
3395
3396 return NULL_TREE;
3397 }
3398 \f
3399 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3400 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3401
3402 static tree
3403 make_bit_field_ref (location_t loc, tree inner, tree type,
3404 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3405 {
3406 tree result, bftype;
3407
3408 if (bitpos == 0)
3409 {
3410 tree size = TYPE_SIZE (TREE_TYPE (inner));
3411 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3412 || POINTER_TYPE_P (TREE_TYPE (inner)))
3413 && tree_fits_shwi_p (size)
3414 && tree_to_shwi (size) == bitsize)
3415 return fold_convert_loc (loc, type, inner);
3416 }
3417
3418 bftype = type;
3419 if (TYPE_PRECISION (bftype) != bitsize
3420 || TYPE_UNSIGNED (bftype) == !unsignedp)
3421 bftype = build_nonstandard_integer_type (bitsize, 0);
3422
3423 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3424 size_int (bitsize), bitsize_int (bitpos));
3425
3426 if (bftype != type)
3427 result = fold_convert_loc (loc, type, result);
3428
3429 return result;
3430 }
3431
3432 /* Optimize a bit-field compare.
3433
3434 There are two cases: First is a compare against a constant and the
3435 second is a comparison of two items where the fields are at the same
3436 bit position relative to the start of a chunk (byte, halfword, word)
3437 large enough to contain it. In these cases we can avoid the shift
3438 implicit in bitfield extractions.
3439
3440 For constants, we emit a compare of the shifted constant with the
3441 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3442 compared. For two fields at the same position, we do the ANDs with the
3443 similar mask and compare the result of the ANDs.
3444
3445 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3446 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3447 are the left and right operands of the comparison, respectively.
3448
3449 If the optimization described above can be done, we return the resulting
3450 tree. Otherwise we return zero. */
3451
3452 static tree
3453 optimize_bit_field_compare (location_t loc, enum tree_code code,
3454 tree compare_type, tree lhs, tree rhs)
3455 {
3456 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3457 tree type = TREE_TYPE (lhs);
3458 tree unsigned_type;
3459 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3460 enum machine_mode lmode, rmode, nmode;
3461 int lunsignedp, runsignedp;
3462 int lvolatilep = 0, rvolatilep = 0;
3463 tree linner, rinner = NULL_TREE;
3464 tree mask;
3465 tree offset;
3466
3467 /* Get all the information about the extractions being done. If the bit size
3468 if the same as the size of the underlying object, we aren't doing an
3469 extraction at all and so can do nothing. We also don't want to
3470 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3471 then will no longer be able to replace it. */
3472 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3473 &lunsignedp, &lvolatilep, false);
3474 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3475 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3476 return 0;
3477
3478 if (!const_p)
3479 {
3480 /* If this is not a constant, we can only do something if bit positions,
3481 sizes, and signedness are the same. */
3482 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3483 &runsignedp, &rvolatilep, false);
3484
3485 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3486 || lunsignedp != runsignedp || offset != 0
3487 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3488 return 0;
3489 }
3490
3491 /* See if we can find a mode to refer to this field. We should be able to,
3492 but fail if we can't. */
3493 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3494 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3495 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3496 TYPE_ALIGN (TREE_TYPE (rinner))),
3497 word_mode, false);
3498 if (nmode == VOIDmode)
3499 return 0;
3500
3501 /* Set signed and unsigned types of the precision of this mode for the
3502 shifts below. */
3503 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3504
3505 /* Compute the bit position and size for the new reference and our offset
3506 within it. If the new reference is the same size as the original, we
3507 won't optimize anything, so return zero. */
3508 nbitsize = GET_MODE_BITSIZE (nmode);
3509 nbitpos = lbitpos & ~ (nbitsize - 1);
3510 lbitpos -= nbitpos;
3511 if (nbitsize == lbitsize)
3512 return 0;
3513
3514 if (BYTES_BIG_ENDIAN)
3515 lbitpos = nbitsize - lbitsize - lbitpos;
3516
3517 /* Make the mask to be used against the extracted field. */
3518 mask = build_int_cst_type (unsigned_type, -1);
3519 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3520 mask = const_binop (RSHIFT_EXPR, mask,
3521 size_int (nbitsize - lbitsize - lbitpos));
3522
3523 if (! const_p)
3524 /* If not comparing with constant, just rework the comparison
3525 and return. */
3526 return fold_build2_loc (loc, code, compare_type,
3527 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3528 make_bit_field_ref (loc, linner,
3529 unsigned_type,
3530 nbitsize, nbitpos,
3531 1),
3532 mask),
3533 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3534 make_bit_field_ref (loc, rinner,
3535 unsigned_type,
3536 nbitsize, nbitpos,
3537 1),
3538 mask));
3539
3540 /* Otherwise, we are handling the constant case. See if the constant is too
3541 big for the field. Warn and return a tree of for 0 (false) if so. We do
3542 this not only for its own sake, but to avoid having to test for this
3543 error case below. If we didn't, we might generate wrong code.
3544
3545 For unsigned fields, the constant shifted right by the field length should
3546 be all zero. For signed fields, the high-order bits should agree with
3547 the sign bit. */
3548
3549 if (lunsignedp)
3550 {
3551 if (wi::lrshift (rhs, lbitsize) != 0)
3552 {
3553 warning (0, "comparison is always %d due to width of bit-field",
3554 code == NE_EXPR);
3555 return constant_boolean_node (code == NE_EXPR, compare_type);
3556 }
3557 }
3558 else
3559 {
3560 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3561 if (tem != 0 && tem != -1)
3562 {
3563 warning (0, "comparison is always %d due to width of bit-field",
3564 code == NE_EXPR);
3565 return constant_boolean_node (code == NE_EXPR, compare_type);
3566 }
3567 }
3568
3569 /* Single-bit compares should always be against zero. */
3570 if (lbitsize == 1 && ! integer_zerop (rhs))
3571 {
3572 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3573 rhs = build_int_cst (type, 0);
3574 }
3575
3576 /* Make a new bitfield reference, shift the constant over the
3577 appropriate number of bits and mask it with the computed mask
3578 (in case this was a signed field). If we changed it, make a new one. */
3579 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3580
3581 rhs = const_binop (BIT_AND_EXPR,
3582 const_binop (LSHIFT_EXPR,
3583 fold_convert_loc (loc, unsigned_type, rhs),
3584 size_int (lbitpos)),
3585 mask);
3586
3587 lhs = build2_loc (loc, code, compare_type,
3588 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3589 return lhs;
3590 }
3591 \f
3592 /* Subroutine for fold_truth_andor_1: decode a field reference.
3593
3594 If EXP is a comparison reference, we return the innermost reference.
3595
3596 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3597 set to the starting bit number.
3598
3599 If the innermost field can be completely contained in a mode-sized
3600 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3601
3602 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3603 otherwise it is not changed.
3604
3605 *PUNSIGNEDP is set to the signedness of the field.
3606
3607 *PMASK is set to the mask used. This is either contained in a
3608 BIT_AND_EXPR or derived from the width of the field.
3609
3610 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3611
3612 Return 0 if this is not a component reference or is one that we can't
3613 do anything with. */
3614
3615 static tree
3616 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3617 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3618 int *punsignedp, int *pvolatilep,
3619 tree *pmask, tree *pand_mask)
3620 {
3621 tree outer_type = 0;
3622 tree and_mask = 0;
3623 tree mask, inner, offset;
3624 tree unsigned_type;
3625 unsigned int precision;
3626
3627 /* All the optimizations using this function assume integer fields.
3628 There are problems with FP fields since the type_for_size call
3629 below can fail for, e.g., XFmode. */
3630 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3631 return 0;
3632
3633 /* We are interested in the bare arrangement of bits, so strip everything
3634 that doesn't affect the machine mode. However, record the type of the
3635 outermost expression if it may matter below. */
3636 if (CONVERT_EXPR_P (exp)
3637 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3638 outer_type = TREE_TYPE (exp);
3639 STRIP_NOPS (exp);
3640
3641 if (TREE_CODE (exp) == BIT_AND_EXPR)
3642 {
3643 and_mask = TREE_OPERAND (exp, 1);
3644 exp = TREE_OPERAND (exp, 0);
3645 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3646 if (TREE_CODE (and_mask) != INTEGER_CST)
3647 return 0;
3648 }
3649
3650 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3651 punsignedp, pvolatilep, false);
3652 if ((inner == exp && and_mask == 0)
3653 || *pbitsize < 0 || offset != 0
3654 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3655 return 0;
3656
3657 /* If the number of bits in the reference is the same as the bitsize of
3658 the outer type, then the outer type gives the signedness. Otherwise
3659 (in case of a small bitfield) the signedness is unchanged. */
3660 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3661 *punsignedp = TYPE_UNSIGNED (outer_type);
3662
3663 /* Compute the mask to access the bitfield. */
3664 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3665 precision = TYPE_PRECISION (unsigned_type);
3666
3667 mask = build_int_cst_type (unsigned_type, -1);
3668
3669 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3670 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3671
3672 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3673 if (and_mask != 0)
3674 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3675 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3676
3677 *pmask = mask;
3678 *pand_mask = and_mask;
3679 return inner;
3680 }
3681
3682 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3683 bit positions and MASK is SIGNED. */
3684
3685 static int
3686 all_ones_mask_p (const_tree mask, unsigned int size)
3687 {
3688 tree type = TREE_TYPE (mask);
3689 unsigned int precision = TYPE_PRECISION (type);
3690
3691 /* If this function returns true when the type of the mask is
3692 UNSIGNED, then there will be errors. In particular see
3693 gcc.c-torture/execute/990326-1.c. There does not appear to be
3694 any documentation paper trail as to why this is so. But the pre
3695 wide-int worked with that restriction and it has been preserved
3696 here. */
3697 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3698 return false;
3699
3700 return wi::mask (size, false, precision) == mask;
3701 }
3702
3703 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3704 represents the sign bit of EXP's type. If EXP represents a sign
3705 or zero extension, also test VAL against the unextended type.
3706 The return value is the (sub)expression whose sign bit is VAL,
3707 or NULL_TREE otherwise. */
3708
3709 static tree
3710 sign_bit_p (tree exp, const_tree val)
3711 {
3712 int width;
3713 tree t;
3714
3715 /* Tree EXP must have an integral type. */
3716 t = TREE_TYPE (exp);
3717 if (! INTEGRAL_TYPE_P (t))
3718 return NULL_TREE;
3719
3720 /* Tree VAL must be an integer constant. */
3721 if (TREE_CODE (val) != INTEGER_CST
3722 || TREE_OVERFLOW (val))
3723 return NULL_TREE;
3724
3725 width = TYPE_PRECISION (t);
3726 if (wi::only_sign_bit_p (val, width))
3727 return exp;
3728
3729 /* Handle extension from a narrower type. */
3730 if (TREE_CODE (exp) == NOP_EXPR
3731 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3732 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3733
3734 return NULL_TREE;
3735 }
3736
3737 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3738 to be evaluated unconditionally. */
3739
3740 static int
3741 simple_operand_p (const_tree exp)
3742 {
3743 /* Strip any conversions that don't change the machine mode. */
3744 STRIP_NOPS (exp);
3745
3746 return (CONSTANT_CLASS_P (exp)
3747 || TREE_CODE (exp) == SSA_NAME
3748 || (DECL_P (exp)
3749 && ! TREE_ADDRESSABLE (exp)
3750 && ! TREE_THIS_VOLATILE (exp)
3751 && ! DECL_NONLOCAL (exp)
3752 /* Don't regard global variables as simple. They may be
3753 allocated in ways unknown to the compiler (shared memory,
3754 #pragma weak, etc). */
3755 && ! TREE_PUBLIC (exp)
3756 && ! DECL_EXTERNAL (exp)
3757 /* Weakrefs are not safe to be read, since they can be NULL.
3758 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3759 have DECL_WEAK flag set. */
3760 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3761 /* Loading a static variable is unduly expensive, but global
3762 registers aren't expensive. */
3763 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3764 }
3765
3766 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3767 to be evaluated unconditionally.
3768 I addition to simple_operand_p, we assume that comparisons, conversions,
3769 and logic-not operations are simple, if their operands are simple, too. */
3770
3771 static bool
3772 simple_operand_p_2 (tree exp)
3773 {
3774 enum tree_code code;
3775
3776 if (TREE_SIDE_EFFECTS (exp)
3777 || tree_could_trap_p (exp))
3778 return false;
3779
3780 while (CONVERT_EXPR_P (exp))
3781 exp = TREE_OPERAND (exp, 0);
3782
3783 code = TREE_CODE (exp);
3784
3785 if (TREE_CODE_CLASS (code) == tcc_comparison)
3786 return (simple_operand_p (TREE_OPERAND (exp, 0))
3787 && simple_operand_p (TREE_OPERAND (exp, 1)));
3788
3789 if (code == TRUTH_NOT_EXPR)
3790 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3791
3792 return simple_operand_p (exp);
3793 }
3794
3795 \f
3796 /* The following functions are subroutines to fold_range_test and allow it to
3797 try to change a logical combination of comparisons into a range test.
3798
3799 For example, both
3800 X == 2 || X == 3 || X == 4 || X == 5
3801 and
3802 X >= 2 && X <= 5
3803 are converted to
3804 (unsigned) (X - 2) <= 3
3805
3806 We describe each set of comparisons as being either inside or outside
3807 a range, using a variable named like IN_P, and then describe the
3808 range with a lower and upper bound. If one of the bounds is omitted,
3809 it represents either the highest or lowest value of the type.
3810
3811 In the comments below, we represent a range by two numbers in brackets
3812 preceded by a "+" to designate being inside that range, or a "-" to
3813 designate being outside that range, so the condition can be inverted by
3814 flipping the prefix. An omitted bound is represented by a "-". For
3815 example, "- [-, 10]" means being outside the range starting at the lowest
3816 possible value and ending at 10, in other words, being greater than 10.
3817 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3818 always false.
3819
3820 We set up things so that the missing bounds are handled in a consistent
3821 manner so neither a missing bound nor "true" and "false" need to be
3822 handled using a special case. */
3823
3824 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3825 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3826 and UPPER1_P are nonzero if the respective argument is an upper bound
3827 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3828 must be specified for a comparison. ARG1 will be converted to ARG0's
3829 type if both are specified. */
3830
3831 static tree
3832 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3833 tree arg1, int upper1_p)
3834 {
3835 tree tem;
3836 int result;
3837 int sgn0, sgn1;
3838
3839 /* If neither arg represents infinity, do the normal operation.
3840 Else, if not a comparison, return infinity. Else handle the special
3841 comparison rules. Note that most of the cases below won't occur, but
3842 are handled for consistency. */
3843
3844 if (arg0 != 0 && arg1 != 0)
3845 {
3846 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3847 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3848 STRIP_NOPS (tem);
3849 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3850 }
3851
3852 if (TREE_CODE_CLASS (code) != tcc_comparison)
3853 return 0;
3854
3855 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3856 for neither. In real maths, we cannot assume open ended ranges are
3857 the same. But, this is computer arithmetic, where numbers are finite.
3858 We can therefore make the transformation of any unbounded range with
3859 the value Z, Z being greater than any representable number. This permits
3860 us to treat unbounded ranges as equal. */
3861 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3862 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3863 switch (code)
3864 {
3865 case EQ_EXPR:
3866 result = sgn0 == sgn1;
3867 break;
3868 case NE_EXPR:
3869 result = sgn0 != sgn1;
3870 break;
3871 case LT_EXPR:
3872 result = sgn0 < sgn1;
3873 break;
3874 case LE_EXPR:
3875 result = sgn0 <= sgn1;
3876 break;
3877 case GT_EXPR:
3878 result = sgn0 > sgn1;
3879 break;
3880 case GE_EXPR:
3881 result = sgn0 >= sgn1;
3882 break;
3883 default:
3884 gcc_unreachable ();
3885 }
3886
3887 return constant_boolean_node (result, type);
3888 }
3889 \f
3890 /* Helper routine for make_range. Perform one step for it, return
3891 new expression if the loop should continue or NULL_TREE if it should
3892 stop. */
3893
3894 tree
3895 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3896 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3897 bool *strict_overflow_p)
3898 {
3899 tree arg0_type = TREE_TYPE (arg0);
3900 tree n_low, n_high, low = *p_low, high = *p_high;
3901 int in_p = *p_in_p, n_in_p;
3902
3903 switch (code)
3904 {
3905 case TRUTH_NOT_EXPR:
3906 /* We can only do something if the range is testing for zero. */
3907 if (low == NULL_TREE || high == NULL_TREE
3908 || ! integer_zerop (low) || ! integer_zerop (high))
3909 return NULL_TREE;
3910 *p_in_p = ! in_p;
3911 return arg0;
3912
3913 case EQ_EXPR: case NE_EXPR:
3914 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3915 /* We can only do something if the range is testing for zero
3916 and if the second operand is an integer constant. Note that
3917 saying something is "in" the range we make is done by
3918 complementing IN_P since it will set in the initial case of
3919 being not equal to zero; "out" is leaving it alone. */
3920 if (low == NULL_TREE || high == NULL_TREE
3921 || ! integer_zerop (low) || ! integer_zerop (high)
3922 || TREE_CODE (arg1) != INTEGER_CST)
3923 return NULL_TREE;
3924
3925 switch (code)
3926 {
3927 case NE_EXPR: /* - [c, c] */
3928 low = high = arg1;
3929 break;
3930 case EQ_EXPR: /* + [c, c] */
3931 in_p = ! in_p, low = high = arg1;
3932 break;
3933 case GT_EXPR: /* - [-, c] */
3934 low = 0, high = arg1;
3935 break;
3936 case GE_EXPR: /* + [c, -] */
3937 in_p = ! in_p, low = arg1, high = 0;
3938 break;
3939 case LT_EXPR: /* - [c, -] */
3940 low = arg1, high = 0;
3941 break;
3942 case LE_EXPR: /* + [-, c] */
3943 in_p = ! in_p, low = 0, high = arg1;
3944 break;
3945 default:
3946 gcc_unreachable ();
3947 }
3948
3949 /* If this is an unsigned comparison, we also know that EXP is
3950 greater than or equal to zero. We base the range tests we make
3951 on that fact, so we record it here so we can parse existing
3952 range tests. We test arg0_type since often the return type
3953 of, e.g. EQ_EXPR, is boolean. */
3954 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3955 {
3956 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3957 in_p, low, high, 1,
3958 build_int_cst (arg0_type, 0),
3959 NULL_TREE))
3960 return NULL_TREE;
3961
3962 in_p = n_in_p, low = n_low, high = n_high;
3963
3964 /* If the high bound is missing, but we have a nonzero low
3965 bound, reverse the range so it goes from zero to the low bound
3966 minus 1. */
3967 if (high == 0 && low && ! integer_zerop (low))
3968 {
3969 in_p = ! in_p;
3970 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3971 build_int_cst (TREE_TYPE (low), 1), 0);
3972 low = build_int_cst (arg0_type, 0);
3973 }
3974 }
3975
3976 *p_low = low;
3977 *p_high = high;
3978 *p_in_p = in_p;
3979 return arg0;
3980
3981 case NEGATE_EXPR:
3982 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3983 low and high are non-NULL, then normalize will DTRT. */
3984 if (!TYPE_UNSIGNED (arg0_type)
3985 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3986 {
3987 if (low == NULL_TREE)
3988 low = TYPE_MIN_VALUE (arg0_type);
3989 if (high == NULL_TREE)
3990 high = TYPE_MAX_VALUE (arg0_type);
3991 }
3992
3993 /* (-x) IN [a,b] -> x in [-b, -a] */
3994 n_low = range_binop (MINUS_EXPR, exp_type,
3995 build_int_cst (exp_type, 0),
3996 0, high, 1);
3997 n_high = range_binop (MINUS_EXPR, exp_type,
3998 build_int_cst (exp_type, 0),
3999 0, low, 0);
4000 if (n_high != 0 && TREE_OVERFLOW (n_high))
4001 return NULL_TREE;
4002 goto normalize;
4003
4004 case BIT_NOT_EXPR:
4005 /* ~ X -> -X - 1 */
4006 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4007 build_int_cst (exp_type, 1));
4008
4009 case PLUS_EXPR:
4010 case MINUS_EXPR:
4011 if (TREE_CODE (arg1) != INTEGER_CST)
4012 return NULL_TREE;
4013
4014 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4015 move a constant to the other side. */
4016 if (!TYPE_UNSIGNED (arg0_type)
4017 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4018 return NULL_TREE;
4019
4020 /* If EXP is signed, any overflow in the computation is undefined,
4021 so we don't worry about it so long as our computations on
4022 the bounds don't overflow. For unsigned, overflow is defined
4023 and this is exactly the right thing. */
4024 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4025 arg0_type, low, 0, arg1, 0);
4026 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4027 arg0_type, high, 1, arg1, 0);
4028 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4029 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4030 return NULL_TREE;
4031
4032 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4033 *strict_overflow_p = true;
4034
4035 normalize:
4036 /* Check for an unsigned range which has wrapped around the maximum
4037 value thus making n_high < n_low, and normalize it. */
4038 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4039 {
4040 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4041 build_int_cst (TREE_TYPE (n_high), 1), 0);
4042 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4043 build_int_cst (TREE_TYPE (n_low), 1), 0);
4044
4045 /* If the range is of the form +/- [ x+1, x ], we won't
4046 be able to normalize it. But then, it represents the
4047 whole range or the empty set, so make it
4048 +/- [ -, - ]. */
4049 if (tree_int_cst_equal (n_low, low)
4050 && tree_int_cst_equal (n_high, high))
4051 low = high = 0;
4052 else
4053 in_p = ! in_p;
4054 }
4055 else
4056 low = n_low, high = n_high;
4057
4058 *p_low = low;
4059 *p_high = high;
4060 *p_in_p = in_p;
4061 return arg0;
4062
4063 CASE_CONVERT:
4064 case NON_LVALUE_EXPR:
4065 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4066 return NULL_TREE;
4067
4068 if (! INTEGRAL_TYPE_P (arg0_type)
4069 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4070 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4071 return NULL_TREE;
4072
4073 n_low = low, n_high = high;
4074
4075 if (n_low != 0)
4076 n_low = fold_convert_loc (loc, arg0_type, n_low);
4077
4078 if (n_high != 0)
4079 n_high = fold_convert_loc (loc, arg0_type, n_high);
4080
4081 /* If we're converting arg0 from an unsigned type, to exp,
4082 a signed type, we will be doing the comparison as unsigned.
4083 The tests above have already verified that LOW and HIGH
4084 are both positive.
4085
4086 So we have to ensure that we will handle large unsigned
4087 values the same way that the current signed bounds treat
4088 negative values. */
4089
4090 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4091 {
4092 tree high_positive;
4093 tree equiv_type;
4094 /* For fixed-point modes, we need to pass the saturating flag
4095 as the 2nd parameter. */
4096 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4097 equiv_type
4098 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4099 TYPE_SATURATING (arg0_type));
4100 else
4101 equiv_type
4102 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4103
4104 /* A range without an upper bound is, naturally, unbounded.
4105 Since convert would have cropped a very large value, use
4106 the max value for the destination type. */
4107 high_positive
4108 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4109 : TYPE_MAX_VALUE (arg0_type);
4110
4111 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4112 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4113 fold_convert_loc (loc, arg0_type,
4114 high_positive),
4115 build_int_cst (arg0_type, 1));
4116
4117 /* If the low bound is specified, "and" the range with the
4118 range for which the original unsigned value will be
4119 positive. */
4120 if (low != 0)
4121 {
4122 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4123 1, fold_convert_loc (loc, arg0_type,
4124 integer_zero_node),
4125 high_positive))
4126 return NULL_TREE;
4127
4128 in_p = (n_in_p == in_p);
4129 }
4130 else
4131 {
4132 /* Otherwise, "or" the range with the range of the input
4133 that will be interpreted as negative. */
4134 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4135 1, fold_convert_loc (loc, arg0_type,
4136 integer_zero_node),
4137 high_positive))
4138 return NULL_TREE;
4139
4140 in_p = (in_p != n_in_p);
4141 }
4142 }
4143
4144 *p_low = n_low;
4145 *p_high = n_high;
4146 *p_in_p = in_p;
4147 return arg0;
4148
4149 default:
4150 return NULL_TREE;
4151 }
4152 }
4153
4154 /* Given EXP, a logical expression, set the range it is testing into
4155 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4156 actually being tested. *PLOW and *PHIGH will be made of the same
4157 type as the returned expression. If EXP is not a comparison, we
4158 will most likely not be returning a useful value and range. Set
4159 *STRICT_OVERFLOW_P to true if the return value is only valid
4160 because signed overflow is undefined; otherwise, do not change
4161 *STRICT_OVERFLOW_P. */
4162
4163 tree
4164 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4165 bool *strict_overflow_p)
4166 {
4167 enum tree_code code;
4168 tree arg0, arg1 = NULL_TREE;
4169 tree exp_type, nexp;
4170 int in_p;
4171 tree low, high;
4172 location_t loc = EXPR_LOCATION (exp);
4173
4174 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4175 and see if we can refine the range. Some of the cases below may not
4176 happen, but it doesn't seem worth worrying about this. We "continue"
4177 the outer loop when we've changed something; otherwise we "break"
4178 the switch, which will "break" the while. */
4179
4180 in_p = 0;
4181 low = high = build_int_cst (TREE_TYPE (exp), 0);
4182
4183 while (1)
4184 {
4185 code = TREE_CODE (exp);
4186 exp_type = TREE_TYPE (exp);
4187 arg0 = NULL_TREE;
4188
4189 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4190 {
4191 if (TREE_OPERAND_LENGTH (exp) > 0)
4192 arg0 = TREE_OPERAND (exp, 0);
4193 if (TREE_CODE_CLASS (code) == tcc_binary
4194 || TREE_CODE_CLASS (code) == tcc_comparison
4195 || (TREE_CODE_CLASS (code) == tcc_expression
4196 && TREE_OPERAND_LENGTH (exp) > 1))
4197 arg1 = TREE_OPERAND (exp, 1);
4198 }
4199 if (arg0 == NULL_TREE)
4200 break;
4201
4202 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4203 &high, &in_p, strict_overflow_p);
4204 if (nexp == NULL_TREE)
4205 break;
4206 exp = nexp;
4207 }
4208
4209 /* If EXP is a constant, we can evaluate whether this is true or false. */
4210 if (TREE_CODE (exp) == INTEGER_CST)
4211 {
4212 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4213 exp, 0, low, 0))
4214 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4215 exp, 1, high, 1)));
4216 low = high = 0;
4217 exp = 0;
4218 }
4219
4220 *pin_p = in_p, *plow = low, *phigh = high;
4221 return exp;
4222 }
4223 \f
4224 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4225 type, TYPE, return an expression to test if EXP is in (or out of, depending
4226 on IN_P) the range. Return 0 if the test couldn't be created. */
4227
4228 tree
4229 build_range_check (location_t loc, tree type, tree exp, int in_p,
4230 tree low, tree high)
4231 {
4232 tree etype = TREE_TYPE (exp), value;
4233
4234 #ifdef HAVE_canonicalize_funcptr_for_compare
4235 /* Disable this optimization for function pointer expressions
4236 on targets that require function pointer canonicalization. */
4237 if (HAVE_canonicalize_funcptr_for_compare
4238 && TREE_CODE (etype) == POINTER_TYPE
4239 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4240 return NULL_TREE;
4241 #endif
4242
4243 if (! in_p)
4244 {
4245 value = build_range_check (loc, type, exp, 1, low, high);
4246 if (value != 0)
4247 return invert_truthvalue_loc (loc, value);
4248
4249 return 0;
4250 }
4251
4252 if (low == 0 && high == 0)
4253 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4254
4255 if (low == 0)
4256 return fold_build2_loc (loc, LE_EXPR, type, exp,
4257 fold_convert_loc (loc, etype, high));
4258
4259 if (high == 0)
4260 return fold_build2_loc (loc, GE_EXPR, type, exp,
4261 fold_convert_loc (loc, etype, low));
4262
4263 if (operand_equal_p (low, high, 0))
4264 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4265 fold_convert_loc (loc, etype, low));
4266
4267 if (integer_zerop (low))
4268 {
4269 if (! TYPE_UNSIGNED (etype))
4270 {
4271 etype = unsigned_type_for (etype);
4272 high = fold_convert_loc (loc, etype, high);
4273 exp = fold_convert_loc (loc, etype, exp);
4274 }
4275 return build_range_check (loc, type, exp, 1, 0, high);
4276 }
4277
4278 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4279 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4280 {
4281 int prec = TYPE_PRECISION (etype);
4282
4283 if (wi::mask (prec - 1, false, prec) == high)
4284 {
4285 if (TYPE_UNSIGNED (etype))
4286 {
4287 tree signed_etype = signed_type_for (etype);
4288 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4289 etype
4290 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4291 else
4292 etype = signed_etype;
4293 exp = fold_convert_loc (loc, etype, exp);
4294 }
4295 return fold_build2_loc (loc, GT_EXPR, type, exp,
4296 build_int_cst (etype, 0));
4297 }
4298 }
4299
4300 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4301 This requires wrap-around arithmetics for the type of the expression.
4302 First make sure that arithmetics in this type is valid, then make sure
4303 that it wraps around. */
4304 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4305 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4306 TYPE_UNSIGNED (etype));
4307
4308 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4309 {
4310 tree utype, minv, maxv;
4311
4312 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4313 for the type in question, as we rely on this here. */
4314 utype = unsigned_type_for (etype);
4315 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4316 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4317 build_int_cst (TREE_TYPE (maxv), 1), 1);
4318 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4319
4320 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4321 minv, 1, maxv, 1)))
4322 etype = utype;
4323 else
4324 return 0;
4325 }
4326
4327 high = fold_convert_loc (loc, etype, high);
4328 low = fold_convert_loc (loc, etype, low);
4329 exp = fold_convert_loc (loc, etype, exp);
4330
4331 value = const_binop (MINUS_EXPR, high, low);
4332
4333
4334 if (POINTER_TYPE_P (etype))
4335 {
4336 if (value != 0 && !TREE_OVERFLOW (value))
4337 {
4338 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4339 return build_range_check (loc, type,
4340 fold_build_pointer_plus_loc (loc, exp, low),
4341 1, build_int_cst (etype, 0), value);
4342 }
4343 return 0;
4344 }
4345
4346 if (value != 0 && !TREE_OVERFLOW (value))
4347 return build_range_check (loc, type,
4348 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4349 1, build_int_cst (etype, 0), value);
4350
4351 return 0;
4352 }
4353 \f
4354 /* Return the predecessor of VAL in its type, handling the infinite case. */
4355
4356 static tree
4357 range_predecessor (tree val)
4358 {
4359 tree type = TREE_TYPE (val);
4360
4361 if (INTEGRAL_TYPE_P (type)
4362 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4363 return 0;
4364 else
4365 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4366 build_int_cst (TREE_TYPE (val), 1), 0);
4367 }
4368
4369 /* Return the successor of VAL in its type, handling the infinite case. */
4370
4371 static tree
4372 range_successor (tree val)
4373 {
4374 tree type = TREE_TYPE (val);
4375
4376 if (INTEGRAL_TYPE_P (type)
4377 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4378 return 0;
4379 else
4380 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4381 build_int_cst (TREE_TYPE (val), 1), 0);
4382 }
4383
4384 /* Given two ranges, see if we can merge them into one. Return 1 if we
4385 can, 0 if we can't. Set the output range into the specified parameters. */
4386
4387 bool
4388 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4389 tree high0, int in1_p, tree low1, tree high1)
4390 {
4391 int no_overlap;
4392 int subset;
4393 int temp;
4394 tree tem;
4395 int in_p;
4396 tree low, high;
4397 int lowequal = ((low0 == 0 && low1 == 0)
4398 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4399 low0, 0, low1, 0)));
4400 int highequal = ((high0 == 0 && high1 == 0)
4401 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4402 high0, 1, high1, 1)));
4403
4404 /* Make range 0 be the range that starts first, or ends last if they
4405 start at the same value. Swap them if it isn't. */
4406 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4407 low0, 0, low1, 0))
4408 || (lowequal
4409 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4410 high1, 1, high0, 1))))
4411 {
4412 temp = in0_p, in0_p = in1_p, in1_p = temp;
4413 tem = low0, low0 = low1, low1 = tem;
4414 tem = high0, high0 = high1, high1 = tem;
4415 }
4416
4417 /* Now flag two cases, whether the ranges are disjoint or whether the
4418 second range is totally subsumed in the first. Note that the tests
4419 below are simplified by the ones above. */
4420 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4421 high0, 1, low1, 0));
4422 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4423 high1, 1, high0, 1));
4424
4425 /* We now have four cases, depending on whether we are including or
4426 excluding the two ranges. */
4427 if (in0_p && in1_p)
4428 {
4429 /* If they don't overlap, the result is false. If the second range
4430 is a subset it is the result. Otherwise, the range is from the start
4431 of the second to the end of the first. */
4432 if (no_overlap)
4433 in_p = 0, low = high = 0;
4434 else if (subset)
4435 in_p = 1, low = low1, high = high1;
4436 else
4437 in_p = 1, low = low1, high = high0;
4438 }
4439
4440 else if (in0_p && ! in1_p)
4441 {
4442 /* If they don't overlap, the result is the first range. If they are
4443 equal, the result is false. If the second range is a subset of the
4444 first, and the ranges begin at the same place, we go from just after
4445 the end of the second range to the end of the first. If the second
4446 range is not a subset of the first, or if it is a subset and both
4447 ranges end at the same place, the range starts at the start of the
4448 first range and ends just before the second range.
4449 Otherwise, we can't describe this as a single range. */
4450 if (no_overlap)
4451 in_p = 1, low = low0, high = high0;
4452 else if (lowequal && highequal)
4453 in_p = 0, low = high = 0;
4454 else if (subset && lowequal)
4455 {
4456 low = range_successor (high1);
4457 high = high0;
4458 in_p = 1;
4459 if (low == 0)
4460 {
4461 /* We are in the weird situation where high0 > high1 but
4462 high1 has no successor. Punt. */
4463 return 0;
4464 }
4465 }
4466 else if (! subset || highequal)
4467 {
4468 low = low0;
4469 high = range_predecessor (low1);
4470 in_p = 1;
4471 if (high == 0)
4472 {
4473 /* low0 < low1 but low1 has no predecessor. Punt. */
4474 return 0;
4475 }
4476 }
4477 else
4478 return 0;
4479 }
4480
4481 else if (! in0_p && in1_p)
4482 {
4483 /* If they don't overlap, the result is the second range. If the second
4484 is a subset of the first, the result is false. Otherwise,
4485 the range starts just after the first range and ends at the
4486 end of the second. */
4487 if (no_overlap)
4488 in_p = 1, low = low1, high = high1;
4489 else if (subset || highequal)
4490 in_p = 0, low = high = 0;
4491 else
4492 {
4493 low = range_successor (high0);
4494 high = high1;
4495 in_p = 1;
4496 if (low == 0)
4497 {
4498 /* high1 > high0 but high0 has no successor. Punt. */
4499 return 0;
4500 }
4501 }
4502 }
4503
4504 else
4505 {
4506 /* The case where we are excluding both ranges. Here the complex case
4507 is if they don't overlap. In that case, the only time we have a
4508 range is if they are adjacent. If the second is a subset of the
4509 first, the result is the first. Otherwise, the range to exclude
4510 starts at the beginning of the first range and ends at the end of the
4511 second. */
4512 if (no_overlap)
4513 {
4514 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4515 range_successor (high0),
4516 1, low1, 0)))
4517 in_p = 0, low = low0, high = high1;
4518 else
4519 {
4520 /* Canonicalize - [min, x] into - [-, x]. */
4521 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4522 switch (TREE_CODE (TREE_TYPE (low0)))
4523 {
4524 case ENUMERAL_TYPE:
4525 if (TYPE_PRECISION (TREE_TYPE (low0))
4526 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4527 break;
4528 /* FALLTHROUGH */
4529 case INTEGER_TYPE:
4530 if (tree_int_cst_equal (low0,
4531 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4532 low0 = 0;
4533 break;
4534 case POINTER_TYPE:
4535 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4536 && integer_zerop (low0))
4537 low0 = 0;
4538 break;
4539 default:
4540 break;
4541 }
4542
4543 /* Canonicalize - [x, max] into - [x, -]. */
4544 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4545 switch (TREE_CODE (TREE_TYPE (high1)))
4546 {
4547 case ENUMERAL_TYPE:
4548 if (TYPE_PRECISION (TREE_TYPE (high1))
4549 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4550 break;
4551 /* FALLTHROUGH */
4552 case INTEGER_TYPE:
4553 if (tree_int_cst_equal (high1,
4554 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4555 high1 = 0;
4556 break;
4557 case POINTER_TYPE:
4558 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4559 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4560 high1, 1,
4561 build_int_cst (TREE_TYPE (high1), 1),
4562 1)))
4563 high1 = 0;
4564 break;
4565 default:
4566 break;
4567 }
4568
4569 /* The ranges might be also adjacent between the maximum and
4570 minimum values of the given type. For
4571 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4572 return + [x + 1, y - 1]. */
4573 if (low0 == 0 && high1 == 0)
4574 {
4575 low = range_successor (high0);
4576 high = range_predecessor (low1);
4577 if (low == 0 || high == 0)
4578 return 0;
4579
4580 in_p = 1;
4581 }
4582 else
4583 return 0;
4584 }
4585 }
4586 else if (subset)
4587 in_p = 0, low = low0, high = high0;
4588 else
4589 in_p = 0, low = low0, high = high1;
4590 }
4591
4592 *pin_p = in_p, *plow = low, *phigh = high;
4593 return 1;
4594 }
4595 \f
4596
4597 /* Subroutine of fold, looking inside expressions of the form
4598 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4599 of the COND_EXPR. This function is being used also to optimize
4600 A op B ? C : A, by reversing the comparison first.
4601
4602 Return a folded expression whose code is not a COND_EXPR
4603 anymore, or NULL_TREE if no folding opportunity is found. */
4604
4605 static tree
4606 fold_cond_expr_with_comparison (location_t loc, tree type,
4607 tree arg0, tree arg1, tree arg2)
4608 {
4609 enum tree_code comp_code = TREE_CODE (arg0);
4610 tree arg00 = TREE_OPERAND (arg0, 0);
4611 tree arg01 = TREE_OPERAND (arg0, 1);
4612 tree arg1_type = TREE_TYPE (arg1);
4613 tree tem;
4614
4615 STRIP_NOPS (arg1);
4616 STRIP_NOPS (arg2);
4617
4618 /* If we have A op 0 ? A : -A, consider applying the following
4619 transformations:
4620
4621 A == 0? A : -A same as -A
4622 A != 0? A : -A same as A
4623 A >= 0? A : -A same as abs (A)
4624 A > 0? A : -A same as abs (A)
4625 A <= 0? A : -A same as -abs (A)
4626 A < 0? A : -A same as -abs (A)
4627
4628 None of these transformations work for modes with signed
4629 zeros. If A is +/-0, the first two transformations will
4630 change the sign of the result (from +0 to -0, or vice
4631 versa). The last four will fix the sign of the result,
4632 even though the original expressions could be positive or
4633 negative, depending on the sign of A.
4634
4635 Note that all these transformations are correct if A is
4636 NaN, since the two alternatives (A and -A) are also NaNs. */
4637 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4638 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4639 ? real_zerop (arg01)
4640 : integer_zerop (arg01))
4641 && ((TREE_CODE (arg2) == NEGATE_EXPR
4642 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4643 /* In the case that A is of the form X-Y, '-A' (arg2) may
4644 have already been folded to Y-X, check for that. */
4645 || (TREE_CODE (arg1) == MINUS_EXPR
4646 && TREE_CODE (arg2) == MINUS_EXPR
4647 && operand_equal_p (TREE_OPERAND (arg1, 0),
4648 TREE_OPERAND (arg2, 1), 0)
4649 && operand_equal_p (TREE_OPERAND (arg1, 1),
4650 TREE_OPERAND (arg2, 0), 0))))
4651 switch (comp_code)
4652 {
4653 case EQ_EXPR:
4654 case UNEQ_EXPR:
4655 tem = fold_convert_loc (loc, arg1_type, arg1);
4656 return pedantic_non_lvalue_loc (loc,
4657 fold_convert_loc (loc, type,
4658 negate_expr (tem)));
4659 case NE_EXPR:
4660 case LTGT_EXPR:
4661 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4662 case UNGE_EXPR:
4663 case UNGT_EXPR:
4664 if (flag_trapping_math)
4665 break;
4666 /* Fall through. */
4667 case GE_EXPR:
4668 case GT_EXPR:
4669 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4670 arg1 = fold_convert_loc (loc, signed_type_for
4671 (TREE_TYPE (arg1)), arg1);
4672 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4673 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4674 case UNLE_EXPR:
4675 case UNLT_EXPR:
4676 if (flag_trapping_math)
4677 break;
4678 case LE_EXPR:
4679 case LT_EXPR:
4680 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4681 arg1 = fold_convert_loc (loc, signed_type_for
4682 (TREE_TYPE (arg1)), arg1);
4683 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4684 return negate_expr (fold_convert_loc (loc, type, tem));
4685 default:
4686 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4687 break;
4688 }
4689
4690 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4691 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4692 both transformations are correct when A is NaN: A != 0
4693 is then true, and A == 0 is false. */
4694
4695 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4696 && integer_zerop (arg01) && integer_zerop (arg2))
4697 {
4698 if (comp_code == NE_EXPR)
4699 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4700 else if (comp_code == EQ_EXPR)
4701 return build_zero_cst (type);
4702 }
4703
4704 /* Try some transformations of A op B ? A : B.
4705
4706 A == B? A : B same as B
4707 A != B? A : B same as A
4708 A >= B? A : B same as max (A, B)
4709 A > B? A : B same as max (B, A)
4710 A <= B? A : B same as min (A, B)
4711 A < B? A : B same as min (B, A)
4712
4713 As above, these transformations don't work in the presence
4714 of signed zeros. For example, if A and B are zeros of
4715 opposite sign, the first two transformations will change
4716 the sign of the result. In the last four, the original
4717 expressions give different results for (A=+0, B=-0) and
4718 (A=-0, B=+0), but the transformed expressions do not.
4719
4720 The first two transformations are correct if either A or B
4721 is a NaN. In the first transformation, the condition will
4722 be false, and B will indeed be chosen. In the case of the
4723 second transformation, the condition A != B will be true,
4724 and A will be chosen.
4725
4726 The conversions to max() and min() are not correct if B is
4727 a number and A is not. The conditions in the original
4728 expressions will be false, so all four give B. The min()
4729 and max() versions would give a NaN instead. */
4730 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4731 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4732 /* Avoid these transformations if the COND_EXPR may be used
4733 as an lvalue in the C++ front-end. PR c++/19199. */
4734 && (in_gimple_form
4735 || VECTOR_TYPE_P (type)
4736 || (strcmp (lang_hooks.name, "GNU C++") != 0
4737 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4738 || ! maybe_lvalue_p (arg1)
4739 || ! maybe_lvalue_p (arg2)))
4740 {
4741 tree comp_op0 = arg00;
4742 tree comp_op1 = arg01;
4743 tree comp_type = TREE_TYPE (comp_op0);
4744
4745 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4746 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4747 {
4748 comp_type = type;
4749 comp_op0 = arg1;
4750 comp_op1 = arg2;
4751 }
4752
4753 switch (comp_code)
4754 {
4755 case EQ_EXPR:
4756 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4757 case NE_EXPR:
4758 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4759 case LE_EXPR:
4760 case LT_EXPR:
4761 case UNLE_EXPR:
4762 case UNLT_EXPR:
4763 /* In C++ a ?: expression can be an lvalue, so put the
4764 operand which will be used if they are equal first
4765 so that we can convert this back to the
4766 corresponding COND_EXPR. */
4767 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4768 {
4769 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4770 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4771 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4772 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4773 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4774 comp_op1, comp_op0);
4775 return pedantic_non_lvalue_loc (loc,
4776 fold_convert_loc (loc, type, tem));
4777 }
4778 break;
4779 case GE_EXPR:
4780 case GT_EXPR:
4781 case UNGE_EXPR:
4782 case UNGT_EXPR:
4783 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4784 {
4785 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4786 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4787 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4788 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4789 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4790 comp_op1, comp_op0);
4791 return pedantic_non_lvalue_loc (loc,
4792 fold_convert_loc (loc, type, tem));
4793 }
4794 break;
4795 case UNEQ_EXPR:
4796 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4797 return pedantic_non_lvalue_loc (loc,
4798 fold_convert_loc (loc, type, arg2));
4799 break;
4800 case LTGT_EXPR:
4801 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4802 return pedantic_non_lvalue_loc (loc,
4803 fold_convert_loc (loc, type, arg1));
4804 break;
4805 default:
4806 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4807 break;
4808 }
4809 }
4810
4811 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4812 we might still be able to simplify this. For example,
4813 if C1 is one less or one more than C2, this might have started
4814 out as a MIN or MAX and been transformed by this function.
4815 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4816
4817 if (INTEGRAL_TYPE_P (type)
4818 && TREE_CODE (arg01) == INTEGER_CST
4819 && TREE_CODE (arg2) == INTEGER_CST)
4820 switch (comp_code)
4821 {
4822 case EQ_EXPR:
4823 if (TREE_CODE (arg1) == INTEGER_CST)
4824 break;
4825 /* We can replace A with C1 in this case. */
4826 arg1 = fold_convert_loc (loc, type, arg01);
4827 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4828
4829 case LT_EXPR:
4830 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4831 MIN_EXPR, to preserve the signedness of the comparison. */
4832 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4833 OEP_ONLY_CONST)
4834 && operand_equal_p (arg01,
4835 const_binop (PLUS_EXPR, arg2,
4836 build_int_cst (type, 1)),
4837 OEP_ONLY_CONST))
4838 {
4839 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4840 fold_convert_loc (loc, TREE_TYPE (arg00),
4841 arg2));
4842 return pedantic_non_lvalue_loc (loc,
4843 fold_convert_loc (loc, type, tem));
4844 }
4845 break;
4846
4847 case LE_EXPR:
4848 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4849 as above. */
4850 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4851 OEP_ONLY_CONST)
4852 && operand_equal_p (arg01,
4853 const_binop (MINUS_EXPR, arg2,
4854 build_int_cst (type, 1)),
4855 OEP_ONLY_CONST))
4856 {
4857 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4858 fold_convert_loc (loc, TREE_TYPE (arg00),
4859 arg2));
4860 return pedantic_non_lvalue_loc (loc,
4861 fold_convert_loc (loc, type, tem));
4862 }
4863 break;
4864
4865 case GT_EXPR:
4866 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4867 MAX_EXPR, to preserve the signedness of the comparison. */
4868 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4869 OEP_ONLY_CONST)
4870 && operand_equal_p (arg01,
4871 const_binop (MINUS_EXPR, arg2,
4872 build_int_cst (type, 1)),
4873 OEP_ONLY_CONST))
4874 {
4875 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4876 fold_convert_loc (loc, TREE_TYPE (arg00),
4877 arg2));
4878 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4879 }
4880 break;
4881
4882 case GE_EXPR:
4883 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4884 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4885 OEP_ONLY_CONST)
4886 && operand_equal_p (arg01,
4887 const_binop (PLUS_EXPR, arg2,
4888 build_int_cst (type, 1)),
4889 OEP_ONLY_CONST))
4890 {
4891 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4892 fold_convert_loc (loc, TREE_TYPE (arg00),
4893 arg2));
4894 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4895 }
4896 break;
4897 case NE_EXPR:
4898 break;
4899 default:
4900 gcc_unreachable ();
4901 }
4902
4903 return NULL_TREE;
4904 }
4905
4906
4907 \f
4908 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4909 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4910 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4911 false) >= 2)
4912 #endif
4913
4914 /* EXP is some logical combination of boolean tests. See if we can
4915 merge it into some range test. Return the new tree if so. */
4916
4917 static tree
4918 fold_range_test (location_t loc, enum tree_code code, tree type,
4919 tree op0, tree op1)
4920 {
4921 int or_op = (code == TRUTH_ORIF_EXPR
4922 || code == TRUTH_OR_EXPR);
4923 int in0_p, in1_p, in_p;
4924 tree low0, low1, low, high0, high1, high;
4925 bool strict_overflow_p = false;
4926 tree tem, lhs, rhs;
4927 const char * const warnmsg = G_("assuming signed overflow does not occur "
4928 "when simplifying range test");
4929
4930 if (!INTEGRAL_TYPE_P (type))
4931 return 0;
4932
4933 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4934 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4935
4936 /* If this is an OR operation, invert both sides; we will invert
4937 again at the end. */
4938 if (or_op)
4939 in0_p = ! in0_p, in1_p = ! in1_p;
4940
4941 /* If both expressions are the same, if we can merge the ranges, and we
4942 can build the range test, return it or it inverted. If one of the
4943 ranges is always true or always false, consider it to be the same
4944 expression as the other. */
4945 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4946 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4947 in1_p, low1, high1)
4948 && 0 != (tem = (build_range_check (loc, type,
4949 lhs != 0 ? lhs
4950 : rhs != 0 ? rhs : integer_zero_node,
4951 in_p, low, high))))
4952 {
4953 if (strict_overflow_p)
4954 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4955 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4956 }
4957
4958 /* On machines where the branch cost is expensive, if this is a
4959 short-circuited branch and the underlying object on both sides
4960 is the same, make a non-short-circuit operation. */
4961 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4962 && lhs != 0 && rhs != 0
4963 && (code == TRUTH_ANDIF_EXPR
4964 || code == TRUTH_ORIF_EXPR)
4965 && operand_equal_p (lhs, rhs, 0))
4966 {
4967 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4968 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4969 which cases we can't do this. */
4970 if (simple_operand_p (lhs))
4971 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4972 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4973 type, op0, op1);
4974
4975 else if (!lang_hooks.decls.global_bindings_p ()
4976 && !CONTAINS_PLACEHOLDER_P (lhs))
4977 {
4978 tree common = save_expr (lhs);
4979
4980 if (0 != (lhs = build_range_check (loc, type, common,
4981 or_op ? ! in0_p : in0_p,
4982 low0, high0))
4983 && (0 != (rhs = build_range_check (loc, type, common,
4984 or_op ? ! in1_p : in1_p,
4985 low1, high1))))
4986 {
4987 if (strict_overflow_p)
4988 fold_overflow_warning (warnmsg,
4989 WARN_STRICT_OVERFLOW_COMPARISON);
4990 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4991 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4992 type, lhs, rhs);
4993 }
4994 }
4995 }
4996
4997 return 0;
4998 }
4999 \f
5000 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5001 bit value. Arrange things so the extra bits will be set to zero if and
5002 only if C is signed-extended to its full width. If MASK is nonzero,
5003 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5004
5005 static tree
5006 unextend (tree c, int p, int unsignedp, tree mask)
5007 {
5008 tree type = TREE_TYPE (c);
5009 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5010 tree temp;
5011
5012 if (p == modesize || unsignedp)
5013 return c;
5014
5015 /* We work by getting just the sign bit into the low-order bit, then
5016 into the high-order bit, then sign-extend. We then XOR that value
5017 with C. */
5018 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5019
5020 /* We must use a signed type in order to get an arithmetic right shift.
5021 However, we must also avoid introducing accidental overflows, so that
5022 a subsequent call to integer_zerop will work. Hence we must
5023 do the type conversion here. At this point, the constant is either
5024 zero or one, and the conversion to a signed type can never overflow.
5025 We could get an overflow if this conversion is done anywhere else. */
5026 if (TYPE_UNSIGNED (type))
5027 temp = fold_convert (signed_type_for (type), temp);
5028
5029 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5030 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5031 if (mask != 0)
5032 temp = const_binop (BIT_AND_EXPR, temp,
5033 fold_convert (TREE_TYPE (c), mask));
5034 /* If necessary, convert the type back to match the type of C. */
5035 if (TYPE_UNSIGNED (type))
5036 temp = fold_convert (type, temp);
5037
5038 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5039 }
5040 \f
5041 /* For an expression that has the form
5042 (A && B) || ~B
5043 or
5044 (A || B) && ~B,
5045 we can drop one of the inner expressions and simplify to
5046 A || ~B
5047 or
5048 A && ~B
5049 LOC is the location of the resulting expression. OP is the inner
5050 logical operation; the left-hand side in the examples above, while CMPOP
5051 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5052 removing a condition that guards another, as in
5053 (A != NULL && A->...) || A == NULL
5054 which we must not transform. If RHS_ONLY is true, only eliminate the
5055 right-most operand of the inner logical operation. */
5056
5057 static tree
5058 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5059 bool rhs_only)
5060 {
5061 tree type = TREE_TYPE (cmpop);
5062 enum tree_code code = TREE_CODE (cmpop);
5063 enum tree_code truthop_code = TREE_CODE (op);
5064 tree lhs = TREE_OPERAND (op, 0);
5065 tree rhs = TREE_OPERAND (op, 1);
5066 tree orig_lhs = lhs, orig_rhs = rhs;
5067 enum tree_code rhs_code = TREE_CODE (rhs);
5068 enum tree_code lhs_code = TREE_CODE (lhs);
5069 enum tree_code inv_code;
5070
5071 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5072 return NULL_TREE;
5073
5074 if (TREE_CODE_CLASS (code) != tcc_comparison)
5075 return NULL_TREE;
5076
5077 if (rhs_code == truthop_code)
5078 {
5079 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5080 if (newrhs != NULL_TREE)
5081 {
5082 rhs = newrhs;
5083 rhs_code = TREE_CODE (rhs);
5084 }
5085 }
5086 if (lhs_code == truthop_code && !rhs_only)
5087 {
5088 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5089 if (newlhs != NULL_TREE)
5090 {
5091 lhs = newlhs;
5092 lhs_code = TREE_CODE (lhs);
5093 }
5094 }
5095
5096 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5097 if (inv_code == rhs_code
5098 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5099 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5100 return lhs;
5101 if (!rhs_only && inv_code == lhs_code
5102 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5103 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5104 return rhs;
5105 if (rhs != orig_rhs || lhs != orig_lhs)
5106 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5107 lhs, rhs);
5108 return NULL_TREE;
5109 }
5110
5111 /* Find ways of folding logical expressions of LHS and RHS:
5112 Try to merge two comparisons to the same innermost item.
5113 Look for range tests like "ch >= '0' && ch <= '9'".
5114 Look for combinations of simple terms on machines with expensive branches
5115 and evaluate the RHS unconditionally.
5116
5117 For example, if we have p->a == 2 && p->b == 4 and we can make an
5118 object large enough to span both A and B, we can do this with a comparison
5119 against the object ANDed with the a mask.
5120
5121 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5122 operations to do this with one comparison.
5123
5124 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5125 function and the one above.
5126
5127 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5128 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5129
5130 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5131 two operands.
5132
5133 We return the simplified tree or 0 if no optimization is possible. */
5134
5135 static tree
5136 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5137 tree lhs, tree rhs)
5138 {
5139 /* If this is the "or" of two comparisons, we can do something if
5140 the comparisons are NE_EXPR. If this is the "and", we can do something
5141 if the comparisons are EQ_EXPR. I.e.,
5142 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5143
5144 WANTED_CODE is this operation code. For single bit fields, we can
5145 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5146 comparison for one-bit fields. */
5147
5148 enum tree_code wanted_code;
5149 enum tree_code lcode, rcode;
5150 tree ll_arg, lr_arg, rl_arg, rr_arg;
5151 tree ll_inner, lr_inner, rl_inner, rr_inner;
5152 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5153 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5154 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5155 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5156 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5157 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5158 enum machine_mode lnmode, rnmode;
5159 tree ll_mask, lr_mask, rl_mask, rr_mask;
5160 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5161 tree l_const, r_const;
5162 tree lntype, rntype, result;
5163 HOST_WIDE_INT first_bit, end_bit;
5164 int volatilep;
5165
5166 /* Start by getting the comparison codes. Fail if anything is volatile.
5167 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5168 it were surrounded with a NE_EXPR. */
5169
5170 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5171 return 0;
5172
5173 lcode = TREE_CODE (lhs);
5174 rcode = TREE_CODE (rhs);
5175
5176 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5177 {
5178 lhs = build2 (NE_EXPR, truth_type, lhs,
5179 build_int_cst (TREE_TYPE (lhs), 0));
5180 lcode = NE_EXPR;
5181 }
5182
5183 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5184 {
5185 rhs = build2 (NE_EXPR, truth_type, rhs,
5186 build_int_cst (TREE_TYPE (rhs), 0));
5187 rcode = NE_EXPR;
5188 }
5189
5190 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5191 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5192 return 0;
5193
5194 ll_arg = TREE_OPERAND (lhs, 0);
5195 lr_arg = TREE_OPERAND (lhs, 1);
5196 rl_arg = TREE_OPERAND (rhs, 0);
5197 rr_arg = TREE_OPERAND (rhs, 1);
5198
5199 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5200 if (simple_operand_p (ll_arg)
5201 && simple_operand_p (lr_arg))
5202 {
5203 if (operand_equal_p (ll_arg, rl_arg, 0)
5204 && operand_equal_p (lr_arg, rr_arg, 0))
5205 {
5206 result = combine_comparisons (loc, code, lcode, rcode,
5207 truth_type, ll_arg, lr_arg);
5208 if (result)
5209 return result;
5210 }
5211 else if (operand_equal_p (ll_arg, rr_arg, 0)
5212 && operand_equal_p (lr_arg, rl_arg, 0))
5213 {
5214 result = combine_comparisons (loc, code, lcode,
5215 swap_tree_comparison (rcode),
5216 truth_type, ll_arg, lr_arg);
5217 if (result)
5218 return result;
5219 }
5220 }
5221
5222 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5223 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5224
5225 /* If the RHS can be evaluated unconditionally and its operands are
5226 simple, it wins to evaluate the RHS unconditionally on machines
5227 with expensive branches. In this case, this isn't a comparison
5228 that can be merged. */
5229
5230 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5231 false) >= 2
5232 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5233 && simple_operand_p (rl_arg)
5234 && simple_operand_p (rr_arg))
5235 {
5236 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5237 if (code == TRUTH_OR_EXPR
5238 && lcode == NE_EXPR && integer_zerop (lr_arg)
5239 && rcode == NE_EXPR && integer_zerop (rr_arg)
5240 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5241 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5242 return build2_loc (loc, NE_EXPR, truth_type,
5243 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5244 ll_arg, rl_arg),
5245 build_int_cst (TREE_TYPE (ll_arg), 0));
5246
5247 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5248 if (code == TRUTH_AND_EXPR
5249 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5250 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5251 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5252 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5253 return build2_loc (loc, EQ_EXPR, truth_type,
5254 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5255 ll_arg, rl_arg),
5256 build_int_cst (TREE_TYPE (ll_arg), 0));
5257 }
5258
5259 /* See if the comparisons can be merged. Then get all the parameters for
5260 each side. */
5261
5262 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5263 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5264 return 0;
5265
5266 volatilep = 0;
5267 ll_inner = decode_field_reference (loc, ll_arg,
5268 &ll_bitsize, &ll_bitpos, &ll_mode,
5269 &ll_unsignedp, &volatilep, &ll_mask,
5270 &ll_and_mask);
5271 lr_inner = decode_field_reference (loc, lr_arg,
5272 &lr_bitsize, &lr_bitpos, &lr_mode,
5273 &lr_unsignedp, &volatilep, &lr_mask,
5274 &lr_and_mask);
5275 rl_inner = decode_field_reference (loc, rl_arg,
5276 &rl_bitsize, &rl_bitpos, &rl_mode,
5277 &rl_unsignedp, &volatilep, &rl_mask,
5278 &rl_and_mask);
5279 rr_inner = decode_field_reference (loc, rr_arg,
5280 &rr_bitsize, &rr_bitpos, &rr_mode,
5281 &rr_unsignedp, &volatilep, &rr_mask,
5282 &rr_and_mask);
5283
5284 /* It must be true that the inner operation on the lhs of each
5285 comparison must be the same if we are to be able to do anything.
5286 Then see if we have constants. If not, the same must be true for
5287 the rhs's. */
5288 if (volatilep || ll_inner == 0 || rl_inner == 0
5289 || ! operand_equal_p (ll_inner, rl_inner, 0))
5290 return 0;
5291
5292 if (TREE_CODE (lr_arg) == INTEGER_CST
5293 && TREE_CODE (rr_arg) == INTEGER_CST)
5294 l_const = lr_arg, r_const = rr_arg;
5295 else if (lr_inner == 0 || rr_inner == 0
5296 || ! operand_equal_p (lr_inner, rr_inner, 0))
5297 return 0;
5298 else
5299 l_const = r_const = 0;
5300
5301 /* If either comparison code is not correct for our logical operation,
5302 fail. However, we can convert a one-bit comparison against zero into
5303 the opposite comparison against that bit being set in the field. */
5304
5305 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5306 if (lcode != wanted_code)
5307 {
5308 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5309 {
5310 /* Make the left operand unsigned, since we are only interested
5311 in the value of one bit. Otherwise we are doing the wrong
5312 thing below. */
5313 ll_unsignedp = 1;
5314 l_const = ll_mask;
5315 }
5316 else
5317 return 0;
5318 }
5319
5320 /* This is analogous to the code for l_const above. */
5321 if (rcode != wanted_code)
5322 {
5323 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5324 {
5325 rl_unsignedp = 1;
5326 r_const = rl_mask;
5327 }
5328 else
5329 return 0;
5330 }
5331
5332 /* See if we can find a mode that contains both fields being compared on
5333 the left. If we can't, fail. Otherwise, update all constants and masks
5334 to be relative to a field of that size. */
5335 first_bit = MIN (ll_bitpos, rl_bitpos);
5336 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5337 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5338 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5339 volatilep);
5340 if (lnmode == VOIDmode)
5341 return 0;
5342
5343 lnbitsize = GET_MODE_BITSIZE (lnmode);
5344 lnbitpos = first_bit & ~ (lnbitsize - 1);
5345 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5346 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5347
5348 if (BYTES_BIG_ENDIAN)
5349 {
5350 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5351 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5352 }
5353
5354 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5355 size_int (xll_bitpos));
5356 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5357 size_int (xrl_bitpos));
5358
5359 if (l_const)
5360 {
5361 l_const = fold_convert_loc (loc, lntype, l_const);
5362 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5363 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5364 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5365 fold_build1_loc (loc, BIT_NOT_EXPR,
5366 lntype, ll_mask))))
5367 {
5368 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5369
5370 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5371 }
5372 }
5373 if (r_const)
5374 {
5375 r_const = fold_convert_loc (loc, lntype, r_const);
5376 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5377 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5378 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5379 fold_build1_loc (loc, BIT_NOT_EXPR,
5380 lntype, rl_mask))))
5381 {
5382 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5383
5384 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5385 }
5386 }
5387
5388 /* If the right sides are not constant, do the same for it. Also,
5389 disallow this optimization if a size or signedness mismatch occurs
5390 between the left and right sides. */
5391 if (l_const == 0)
5392 {
5393 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5394 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5395 /* Make sure the two fields on the right
5396 correspond to the left without being swapped. */
5397 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5398 return 0;
5399
5400 first_bit = MIN (lr_bitpos, rr_bitpos);
5401 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5402 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5403 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5404 volatilep);
5405 if (rnmode == VOIDmode)
5406 return 0;
5407
5408 rnbitsize = GET_MODE_BITSIZE (rnmode);
5409 rnbitpos = first_bit & ~ (rnbitsize - 1);
5410 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5411 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5412
5413 if (BYTES_BIG_ENDIAN)
5414 {
5415 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5416 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5417 }
5418
5419 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5420 rntype, lr_mask),
5421 size_int (xlr_bitpos));
5422 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5423 rntype, rr_mask),
5424 size_int (xrr_bitpos));
5425
5426 /* Make a mask that corresponds to both fields being compared.
5427 Do this for both items being compared. If the operands are the
5428 same size and the bits being compared are in the same position
5429 then we can do this by masking both and comparing the masked
5430 results. */
5431 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5432 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5433 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5434 {
5435 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5436 ll_unsignedp || rl_unsignedp);
5437 if (! all_ones_mask_p (ll_mask, lnbitsize))
5438 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5439
5440 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5441 lr_unsignedp || rr_unsignedp);
5442 if (! all_ones_mask_p (lr_mask, rnbitsize))
5443 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5444
5445 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5446 }
5447
5448 /* There is still another way we can do something: If both pairs of
5449 fields being compared are adjacent, we may be able to make a wider
5450 field containing them both.
5451
5452 Note that we still must mask the lhs/rhs expressions. Furthermore,
5453 the mask must be shifted to account for the shift done by
5454 make_bit_field_ref. */
5455 if ((ll_bitsize + ll_bitpos == rl_bitpos
5456 && lr_bitsize + lr_bitpos == rr_bitpos)
5457 || (ll_bitpos == rl_bitpos + rl_bitsize
5458 && lr_bitpos == rr_bitpos + rr_bitsize))
5459 {
5460 tree type;
5461
5462 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5463 ll_bitsize + rl_bitsize,
5464 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5465 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5466 lr_bitsize + rr_bitsize,
5467 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5468
5469 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5470 size_int (MIN (xll_bitpos, xrl_bitpos)));
5471 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5472 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5473
5474 /* Convert to the smaller type before masking out unwanted bits. */
5475 type = lntype;
5476 if (lntype != rntype)
5477 {
5478 if (lnbitsize > rnbitsize)
5479 {
5480 lhs = fold_convert_loc (loc, rntype, lhs);
5481 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5482 type = rntype;
5483 }
5484 else if (lnbitsize < rnbitsize)
5485 {
5486 rhs = fold_convert_loc (loc, lntype, rhs);
5487 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5488 type = lntype;
5489 }
5490 }
5491
5492 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5493 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5494
5495 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5496 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5497
5498 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5499 }
5500
5501 return 0;
5502 }
5503
5504 /* Handle the case of comparisons with constants. If there is something in
5505 common between the masks, those bits of the constants must be the same.
5506 If not, the condition is always false. Test for this to avoid generating
5507 incorrect code below. */
5508 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5509 if (! integer_zerop (result)
5510 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5511 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5512 {
5513 if (wanted_code == NE_EXPR)
5514 {
5515 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5516 return constant_boolean_node (true, truth_type);
5517 }
5518 else
5519 {
5520 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5521 return constant_boolean_node (false, truth_type);
5522 }
5523 }
5524
5525 /* Construct the expression we will return. First get the component
5526 reference we will make. Unless the mask is all ones the width of
5527 that field, perform the mask operation. Then compare with the
5528 merged constant. */
5529 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5530 ll_unsignedp || rl_unsignedp);
5531
5532 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5533 if (! all_ones_mask_p (ll_mask, lnbitsize))
5534 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5535
5536 return build2_loc (loc, wanted_code, truth_type, result,
5537 const_binop (BIT_IOR_EXPR, l_const, r_const));
5538 }
5539 \f
5540 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5541 constant. */
5542
5543 static tree
5544 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5545 tree op0, tree op1)
5546 {
5547 tree arg0 = op0;
5548 enum tree_code op_code;
5549 tree comp_const;
5550 tree minmax_const;
5551 int consts_equal, consts_lt;
5552 tree inner;
5553
5554 STRIP_SIGN_NOPS (arg0);
5555
5556 op_code = TREE_CODE (arg0);
5557 minmax_const = TREE_OPERAND (arg0, 1);
5558 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5559 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5560 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5561 inner = TREE_OPERAND (arg0, 0);
5562
5563 /* If something does not permit us to optimize, return the original tree. */
5564 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5565 || TREE_CODE (comp_const) != INTEGER_CST
5566 || TREE_OVERFLOW (comp_const)
5567 || TREE_CODE (minmax_const) != INTEGER_CST
5568 || TREE_OVERFLOW (minmax_const))
5569 return NULL_TREE;
5570
5571 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5572 and GT_EXPR, doing the rest with recursive calls using logical
5573 simplifications. */
5574 switch (code)
5575 {
5576 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5577 {
5578 tree tem
5579 = optimize_minmax_comparison (loc,
5580 invert_tree_comparison (code, false),
5581 type, op0, op1);
5582 if (tem)
5583 return invert_truthvalue_loc (loc, tem);
5584 return NULL_TREE;
5585 }
5586
5587 case GE_EXPR:
5588 return
5589 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5590 optimize_minmax_comparison
5591 (loc, EQ_EXPR, type, arg0, comp_const),
5592 optimize_minmax_comparison
5593 (loc, GT_EXPR, type, arg0, comp_const));
5594
5595 case EQ_EXPR:
5596 if (op_code == MAX_EXPR && consts_equal)
5597 /* MAX (X, 0) == 0 -> X <= 0 */
5598 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5599
5600 else if (op_code == MAX_EXPR && consts_lt)
5601 /* MAX (X, 0) == 5 -> X == 5 */
5602 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5603
5604 else if (op_code == MAX_EXPR)
5605 /* MAX (X, 0) == -1 -> false */
5606 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5607
5608 else if (consts_equal)
5609 /* MIN (X, 0) == 0 -> X >= 0 */
5610 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5611
5612 else if (consts_lt)
5613 /* MIN (X, 0) == 5 -> false */
5614 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5615
5616 else
5617 /* MIN (X, 0) == -1 -> X == -1 */
5618 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5619
5620 case GT_EXPR:
5621 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5622 /* MAX (X, 0) > 0 -> X > 0
5623 MAX (X, 0) > 5 -> X > 5 */
5624 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5625
5626 else if (op_code == MAX_EXPR)
5627 /* MAX (X, 0) > -1 -> true */
5628 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5629
5630 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5631 /* MIN (X, 0) > 0 -> false
5632 MIN (X, 0) > 5 -> false */
5633 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5634
5635 else
5636 /* MIN (X, 0) > -1 -> X > -1 */
5637 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5638
5639 default:
5640 return NULL_TREE;
5641 }
5642 }
5643 \f
5644 /* T is an integer expression that is being multiplied, divided, or taken a
5645 modulus (CODE says which and what kind of divide or modulus) by a
5646 constant C. See if we can eliminate that operation by folding it with
5647 other operations already in T. WIDE_TYPE, if non-null, is a type that
5648 should be used for the computation if wider than our type.
5649
5650 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5651 (X * 2) + (Y * 4). We must, however, be assured that either the original
5652 expression would not overflow or that overflow is undefined for the type
5653 in the language in question.
5654
5655 If we return a non-null expression, it is an equivalent form of the
5656 original computation, but need not be in the original type.
5657
5658 We set *STRICT_OVERFLOW_P to true if the return values depends on
5659 signed overflow being undefined. Otherwise we do not change
5660 *STRICT_OVERFLOW_P. */
5661
5662 static tree
5663 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5664 bool *strict_overflow_p)
5665 {
5666 /* To avoid exponential search depth, refuse to allow recursion past
5667 three levels. Beyond that (1) it's highly unlikely that we'll find
5668 something interesting and (2) we've probably processed it before
5669 when we built the inner expression. */
5670
5671 static int depth;
5672 tree ret;
5673
5674 if (depth > 3)
5675 return NULL;
5676
5677 depth++;
5678 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5679 depth--;
5680
5681 return ret;
5682 }
5683
5684 static tree
5685 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5686 bool *strict_overflow_p)
5687 {
5688 tree type = TREE_TYPE (t);
5689 enum tree_code tcode = TREE_CODE (t);
5690 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5691 > GET_MODE_SIZE (TYPE_MODE (type)))
5692 ? wide_type : type);
5693 tree t1, t2;
5694 int same_p = tcode == code;
5695 tree op0 = NULL_TREE, op1 = NULL_TREE;
5696 bool sub_strict_overflow_p;
5697
5698 /* Don't deal with constants of zero here; they confuse the code below. */
5699 if (integer_zerop (c))
5700 return NULL_TREE;
5701
5702 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5703 op0 = TREE_OPERAND (t, 0);
5704
5705 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5706 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5707
5708 /* Note that we need not handle conditional operations here since fold
5709 already handles those cases. So just do arithmetic here. */
5710 switch (tcode)
5711 {
5712 case INTEGER_CST:
5713 /* For a constant, we can always simplify if we are a multiply
5714 or (for divide and modulus) if it is a multiple of our constant. */
5715 if (code == MULT_EXPR
5716 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5717 return const_binop (code, fold_convert (ctype, t),
5718 fold_convert (ctype, c));
5719 break;
5720
5721 CASE_CONVERT: case NON_LVALUE_EXPR:
5722 /* If op0 is an expression ... */
5723 if ((COMPARISON_CLASS_P (op0)
5724 || UNARY_CLASS_P (op0)
5725 || BINARY_CLASS_P (op0)
5726 || VL_EXP_CLASS_P (op0)
5727 || EXPRESSION_CLASS_P (op0))
5728 /* ... and has wrapping overflow, and its type is smaller
5729 than ctype, then we cannot pass through as widening. */
5730 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5731 && (TYPE_PRECISION (ctype)
5732 > TYPE_PRECISION (TREE_TYPE (op0))))
5733 /* ... or this is a truncation (t is narrower than op0),
5734 then we cannot pass through this narrowing. */
5735 || (TYPE_PRECISION (type)
5736 < TYPE_PRECISION (TREE_TYPE (op0)))
5737 /* ... or signedness changes for division or modulus,
5738 then we cannot pass through this conversion. */
5739 || (code != MULT_EXPR
5740 && (TYPE_UNSIGNED (ctype)
5741 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5742 /* ... or has undefined overflow while the converted to
5743 type has not, we cannot do the operation in the inner type
5744 as that would introduce undefined overflow. */
5745 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5746 && !TYPE_OVERFLOW_UNDEFINED (type))))
5747 break;
5748
5749 /* Pass the constant down and see if we can make a simplification. If
5750 we can, replace this expression with the inner simplification for
5751 possible later conversion to our or some other type. */
5752 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5753 && TREE_CODE (t2) == INTEGER_CST
5754 && !TREE_OVERFLOW (t2)
5755 && (0 != (t1 = extract_muldiv (op0, t2, code,
5756 code == MULT_EXPR
5757 ? ctype : NULL_TREE,
5758 strict_overflow_p))))
5759 return t1;
5760 break;
5761
5762 case ABS_EXPR:
5763 /* If widening the type changes it from signed to unsigned, then we
5764 must avoid building ABS_EXPR itself as unsigned. */
5765 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5766 {
5767 tree cstype = (*signed_type_for) (ctype);
5768 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5769 != 0)
5770 {
5771 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5772 return fold_convert (ctype, t1);
5773 }
5774 break;
5775 }
5776 /* If the constant is negative, we cannot simplify this. */
5777 if (tree_int_cst_sgn (c) == -1)
5778 break;
5779 /* FALLTHROUGH */
5780 case NEGATE_EXPR:
5781 /* For division and modulus, type can't be unsigned, as e.g.
5782 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5783 For signed types, even with wrapping overflow, this is fine. */
5784 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5785 break;
5786 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5787 != 0)
5788 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5789 break;
5790
5791 case MIN_EXPR: case MAX_EXPR:
5792 /* If widening the type changes the signedness, then we can't perform
5793 this optimization as that changes the result. */
5794 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5795 break;
5796
5797 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5798 sub_strict_overflow_p = false;
5799 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5800 &sub_strict_overflow_p)) != 0
5801 && (t2 = extract_muldiv (op1, c, code, wide_type,
5802 &sub_strict_overflow_p)) != 0)
5803 {
5804 if (tree_int_cst_sgn (c) < 0)
5805 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5806 if (sub_strict_overflow_p)
5807 *strict_overflow_p = true;
5808 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5809 fold_convert (ctype, t2));
5810 }
5811 break;
5812
5813 case LSHIFT_EXPR: case RSHIFT_EXPR:
5814 /* If the second operand is constant, this is a multiplication
5815 or floor division, by a power of two, so we can treat it that
5816 way unless the multiplier or divisor overflows. Signed
5817 left-shift overflow is implementation-defined rather than
5818 undefined in C90, so do not convert signed left shift into
5819 multiplication. */
5820 if (TREE_CODE (op1) == INTEGER_CST
5821 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5822 /* const_binop may not detect overflow correctly,
5823 so check for it explicitly here. */
5824 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5825 && 0 != (t1 = fold_convert (ctype,
5826 const_binop (LSHIFT_EXPR,
5827 size_one_node,
5828 op1)))
5829 && !TREE_OVERFLOW (t1))
5830 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5831 ? MULT_EXPR : FLOOR_DIV_EXPR,
5832 ctype,
5833 fold_convert (ctype, op0),
5834 t1),
5835 c, code, wide_type, strict_overflow_p);
5836 break;
5837
5838 case PLUS_EXPR: case MINUS_EXPR:
5839 /* See if we can eliminate the operation on both sides. If we can, we
5840 can return a new PLUS or MINUS. If we can't, the only remaining
5841 cases where we can do anything are if the second operand is a
5842 constant. */
5843 sub_strict_overflow_p = false;
5844 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5845 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5846 if (t1 != 0 && t2 != 0
5847 && (code == MULT_EXPR
5848 /* If not multiplication, we can only do this if both operands
5849 are divisible by c. */
5850 || (multiple_of_p (ctype, op0, c)
5851 && multiple_of_p (ctype, op1, c))))
5852 {
5853 if (sub_strict_overflow_p)
5854 *strict_overflow_p = true;
5855 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5856 fold_convert (ctype, t2));
5857 }
5858
5859 /* If this was a subtraction, negate OP1 and set it to be an addition.
5860 This simplifies the logic below. */
5861 if (tcode == MINUS_EXPR)
5862 {
5863 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5864 /* If OP1 was not easily negatable, the constant may be OP0. */
5865 if (TREE_CODE (op0) == INTEGER_CST)
5866 {
5867 tree tem = op0;
5868 op0 = op1;
5869 op1 = tem;
5870 tem = t1;
5871 t1 = t2;
5872 t2 = tem;
5873 }
5874 }
5875
5876 if (TREE_CODE (op1) != INTEGER_CST)
5877 break;
5878
5879 /* If either OP1 or C are negative, this optimization is not safe for
5880 some of the division and remainder types while for others we need
5881 to change the code. */
5882 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5883 {
5884 if (code == CEIL_DIV_EXPR)
5885 code = FLOOR_DIV_EXPR;
5886 else if (code == FLOOR_DIV_EXPR)
5887 code = CEIL_DIV_EXPR;
5888 else if (code != MULT_EXPR
5889 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5890 break;
5891 }
5892
5893 /* If it's a multiply or a division/modulus operation of a multiple
5894 of our constant, do the operation and verify it doesn't overflow. */
5895 if (code == MULT_EXPR
5896 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5897 {
5898 op1 = const_binop (code, fold_convert (ctype, op1),
5899 fold_convert (ctype, c));
5900 /* We allow the constant to overflow with wrapping semantics. */
5901 if (op1 == 0
5902 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5903 break;
5904 }
5905 else
5906 break;
5907
5908 /* If we have an unsigned type, we cannot widen the operation since it
5909 will change the result if the original computation overflowed. */
5910 if (TYPE_UNSIGNED (ctype) && ctype != type)
5911 break;
5912
5913 /* If we were able to eliminate our operation from the first side,
5914 apply our operation to the second side and reform the PLUS. */
5915 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5916 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5917
5918 /* The last case is if we are a multiply. In that case, we can
5919 apply the distributive law to commute the multiply and addition
5920 if the multiplication of the constants doesn't overflow
5921 and overflow is defined. With undefined overflow
5922 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5923 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5924 return fold_build2 (tcode, ctype,
5925 fold_build2 (code, ctype,
5926 fold_convert (ctype, op0),
5927 fold_convert (ctype, c)),
5928 op1);
5929
5930 break;
5931
5932 case MULT_EXPR:
5933 /* We have a special case here if we are doing something like
5934 (C * 8) % 4 since we know that's zero. */
5935 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5936 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5937 /* If the multiplication can overflow we cannot optimize this. */
5938 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5939 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5940 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5941 {
5942 *strict_overflow_p = true;
5943 return omit_one_operand (type, integer_zero_node, op0);
5944 }
5945
5946 /* ... fall through ... */
5947
5948 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5949 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5950 /* If we can extract our operation from the LHS, do so and return a
5951 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5952 do something only if the second operand is a constant. */
5953 if (same_p
5954 && (t1 = extract_muldiv (op0, c, code, wide_type,
5955 strict_overflow_p)) != 0)
5956 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5957 fold_convert (ctype, op1));
5958 else if (tcode == MULT_EXPR && code == MULT_EXPR
5959 && (t1 = extract_muldiv (op1, c, code, wide_type,
5960 strict_overflow_p)) != 0)
5961 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5962 fold_convert (ctype, t1));
5963 else if (TREE_CODE (op1) != INTEGER_CST)
5964 return 0;
5965
5966 /* If these are the same operation types, we can associate them
5967 assuming no overflow. */
5968 if (tcode == code)
5969 {
5970 bool overflow_p = false;
5971 bool overflow_mul_p;
5972 signop sign = TYPE_SIGN (ctype);
5973 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5974 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5975 if (overflow_mul_p
5976 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5977 overflow_p = true;
5978 if (!overflow_p)
5979 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5980 wide_int_to_tree (ctype, mul));
5981 }
5982
5983 /* If these operations "cancel" each other, we have the main
5984 optimizations of this pass, which occur when either constant is a
5985 multiple of the other, in which case we replace this with either an
5986 operation or CODE or TCODE.
5987
5988 If we have an unsigned type, we cannot do this since it will change
5989 the result if the original computation overflowed. */
5990 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5991 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5992 || (tcode == MULT_EXPR
5993 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5994 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5995 && code != MULT_EXPR)))
5996 {
5997 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5998 {
5999 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6000 *strict_overflow_p = true;
6001 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6002 fold_convert (ctype,
6003 const_binop (TRUNC_DIV_EXPR,
6004 op1, c)));
6005 }
6006 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6007 {
6008 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6009 *strict_overflow_p = true;
6010 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6011 fold_convert (ctype,
6012 const_binop (TRUNC_DIV_EXPR,
6013 c, op1)));
6014 }
6015 }
6016 break;
6017
6018 default:
6019 break;
6020 }
6021
6022 return 0;
6023 }
6024 \f
6025 /* Return a node which has the indicated constant VALUE (either 0 or
6026 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6027 and is of the indicated TYPE. */
6028
6029 tree
6030 constant_boolean_node (bool value, tree type)
6031 {
6032 if (type == integer_type_node)
6033 return value ? integer_one_node : integer_zero_node;
6034 else if (type == boolean_type_node)
6035 return value ? boolean_true_node : boolean_false_node;
6036 else if (TREE_CODE (type) == VECTOR_TYPE)
6037 return build_vector_from_val (type,
6038 build_int_cst (TREE_TYPE (type),
6039 value ? -1 : 0));
6040 else
6041 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6042 }
6043
6044
6045 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6046 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6047 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6048 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6049 COND is the first argument to CODE; otherwise (as in the example
6050 given here), it is the second argument. TYPE is the type of the
6051 original expression. Return NULL_TREE if no simplification is
6052 possible. */
6053
6054 static tree
6055 fold_binary_op_with_conditional_arg (location_t loc,
6056 enum tree_code code,
6057 tree type, tree op0, tree op1,
6058 tree cond, tree arg, int cond_first_p)
6059 {
6060 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6061 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6062 tree test, true_value, false_value;
6063 tree lhs = NULL_TREE;
6064 tree rhs = NULL_TREE;
6065 enum tree_code cond_code = COND_EXPR;
6066
6067 if (TREE_CODE (cond) == COND_EXPR
6068 || TREE_CODE (cond) == VEC_COND_EXPR)
6069 {
6070 test = TREE_OPERAND (cond, 0);
6071 true_value = TREE_OPERAND (cond, 1);
6072 false_value = TREE_OPERAND (cond, 2);
6073 /* If this operand throws an expression, then it does not make
6074 sense to try to perform a logical or arithmetic operation
6075 involving it. */
6076 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6077 lhs = true_value;
6078 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6079 rhs = false_value;
6080 }
6081 else
6082 {
6083 tree testtype = TREE_TYPE (cond);
6084 test = cond;
6085 true_value = constant_boolean_node (true, testtype);
6086 false_value = constant_boolean_node (false, testtype);
6087 }
6088
6089 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6090 cond_code = VEC_COND_EXPR;
6091
6092 /* This transformation is only worthwhile if we don't have to wrap ARG
6093 in a SAVE_EXPR and the operation can be simplified without recursing
6094 on at least one of the branches once its pushed inside the COND_EXPR. */
6095 if (!TREE_CONSTANT (arg)
6096 && (TREE_SIDE_EFFECTS (arg)
6097 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6098 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6099 return NULL_TREE;
6100
6101 arg = fold_convert_loc (loc, arg_type, arg);
6102 if (lhs == 0)
6103 {
6104 true_value = fold_convert_loc (loc, cond_type, true_value);
6105 if (cond_first_p)
6106 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6107 else
6108 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6109 }
6110 if (rhs == 0)
6111 {
6112 false_value = fold_convert_loc (loc, cond_type, false_value);
6113 if (cond_first_p)
6114 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6115 else
6116 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6117 }
6118
6119 /* Check that we have simplified at least one of the branches. */
6120 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6121 return NULL_TREE;
6122
6123 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6124 }
6125
6126 \f
6127 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6128
6129 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6130 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6131 ADDEND is the same as X.
6132
6133 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6134 and finite. The problematic cases are when X is zero, and its mode
6135 has signed zeros. In the case of rounding towards -infinity,
6136 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6137 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6138
6139 bool
6140 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6141 {
6142 if (!real_zerop (addend))
6143 return false;
6144
6145 /* Don't allow the fold with -fsignaling-nans. */
6146 if (HONOR_SNANS (TYPE_MODE (type)))
6147 return false;
6148
6149 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6150 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6151 return true;
6152
6153 /* In a vector or complex, we would need to check the sign of all zeros. */
6154 if (TREE_CODE (addend) != REAL_CST)
6155 return false;
6156
6157 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6158 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6159 negate = !negate;
6160
6161 /* The mode has signed zeros, and we have to honor their sign.
6162 In this situation, there is only one case we can return true for.
6163 X - 0 is the same as X unless rounding towards -infinity is
6164 supported. */
6165 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6166 }
6167
6168 /* Subroutine of fold() that checks comparisons of built-in math
6169 functions against real constants.
6170
6171 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6172 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6173 is the type of the result and ARG0 and ARG1 are the operands of the
6174 comparison. ARG1 must be a TREE_REAL_CST.
6175
6176 The function returns the constant folded tree if a simplification
6177 can be made, and NULL_TREE otherwise. */
6178
6179 static tree
6180 fold_mathfn_compare (location_t loc,
6181 enum built_in_function fcode, enum tree_code code,
6182 tree type, tree arg0, tree arg1)
6183 {
6184 REAL_VALUE_TYPE c;
6185
6186 if (BUILTIN_SQRT_P (fcode))
6187 {
6188 tree arg = CALL_EXPR_ARG (arg0, 0);
6189 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6190
6191 c = TREE_REAL_CST (arg1);
6192 if (REAL_VALUE_NEGATIVE (c))
6193 {
6194 /* sqrt(x) < y is always false, if y is negative. */
6195 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6196 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6197
6198 /* sqrt(x) > y is always true, if y is negative and we
6199 don't care about NaNs, i.e. negative values of x. */
6200 if (code == NE_EXPR || !HONOR_NANS (mode))
6201 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6202
6203 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6204 return fold_build2_loc (loc, GE_EXPR, type, arg,
6205 build_real (TREE_TYPE (arg), dconst0));
6206 }
6207 else if (code == GT_EXPR || code == GE_EXPR)
6208 {
6209 REAL_VALUE_TYPE c2;
6210
6211 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6212 real_convert (&c2, mode, &c2);
6213
6214 if (REAL_VALUE_ISINF (c2))
6215 {
6216 /* sqrt(x) > y is x == +Inf, when y is very large. */
6217 if (HONOR_INFINITIES (mode))
6218 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6219 build_real (TREE_TYPE (arg), c2));
6220
6221 /* sqrt(x) > y is always false, when y is very large
6222 and we don't care about infinities. */
6223 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6224 }
6225
6226 /* sqrt(x) > c is the same as x > c*c. */
6227 return fold_build2_loc (loc, code, type, arg,
6228 build_real (TREE_TYPE (arg), c2));
6229 }
6230 else if (code == LT_EXPR || code == LE_EXPR)
6231 {
6232 REAL_VALUE_TYPE c2;
6233
6234 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6235 real_convert (&c2, mode, &c2);
6236
6237 if (REAL_VALUE_ISINF (c2))
6238 {
6239 /* sqrt(x) < y is always true, when y is a very large
6240 value and we don't care about NaNs or Infinities. */
6241 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6242 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6243
6244 /* sqrt(x) < y is x != +Inf when y is very large and we
6245 don't care about NaNs. */
6246 if (! HONOR_NANS (mode))
6247 return fold_build2_loc (loc, NE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg), c2));
6249
6250 /* sqrt(x) < y is x >= 0 when y is very large and we
6251 don't care about Infinities. */
6252 if (! HONOR_INFINITIES (mode))
6253 return fold_build2_loc (loc, GE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg), dconst0));
6255
6256 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6257 arg = save_expr (arg);
6258 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6259 fold_build2_loc (loc, GE_EXPR, type, arg,
6260 build_real (TREE_TYPE (arg),
6261 dconst0)),
6262 fold_build2_loc (loc, NE_EXPR, type, arg,
6263 build_real (TREE_TYPE (arg),
6264 c2)));
6265 }
6266
6267 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6268 if (! HONOR_NANS (mode))
6269 return fold_build2_loc (loc, code, type, arg,
6270 build_real (TREE_TYPE (arg), c2));
6271
6272 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6273 arg = save_expr (arg);
6274 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6275 fold_build2_loc (loc, GE_EXPR, type, arg,
6276 build_real (TREE_TYPE (arg),
6277 dconst0)),
6278 fold_build2_loc (loc, code, type, arg,
6279 build_real (TREE_TYPE (arg),
6280 c2)));
6281 }
6282 }
6283
6284 return NULL_TREE;
6285 }
6286
6287 /* Subroutine of fold() that optimizes comparisons against Infinities,
6288 either +Inf or -Inf.
6289
6290 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6291 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6292 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6293
6294 The function returns the constant folded tree if a simplification
6295 can be made, and NULL_TREE otherwise. */
6296
6297 static tree
6298 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6299 tree arg0, tree arg1)
6300 {
6301 enum machine_mode mode;
6302 REAL_VALUE_TYPE max;
6303 tree temp;
6304 bool neg;
6305
6306 mode = TYPE_MODE (TREE_TYPE (arg0));
6307
6308 /* For negative infinity swap the sense of the comparison. */
6309 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6310 if (neg)
6311 code = swap_tree_comparison (code);
6312
6313 switch (code)
6314 {
6315 case GT_EXPR:
6316 /* x > +Inf is always false, if with ignore sNANs. */
6317 if (HONOR_SNANS (mode))
6318 return NULL_TREE;
6319 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6320
6321 case LE_EXPR:
6322 /* x <= +Inf is always true, if we don't case about NaNs. */
6323 if (! HONOR_NANS (mode))
6324 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6325
6326 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6327 arg0 = save_expr (arg0);
6328 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6329
6330 case EQ_EXPR:
6331 case GE_EXPR:
6332 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6333 real_maxval (&max, neg, mode);
6334 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6335 arg0, build_real (TREE_TYPE (arg0), max));
6336
6337 case LT_EXPR:
6338 /* x < +Inf is always equal to x <= DBL_MAX. */
6339 real_maxval (&max, neg, mode);
6340 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6341 arg0, build_real (TREE_TYPE (arg0), max));
6342
6343 case NE_EXPR:
6344 /* x != +Inf is always equal to !(x > DBL_MAX). */
6345 real_maxval (&max, neg, mode);
6346 if (! HONOR_NANS (mode))
6347 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6348 arg0, build_real (TREE_TYPE (arg0), max));
6349
6350 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6351 arg0, build_real (TREE_TYPE (arg0), max));
6352 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6353
6354 default:
6355 break;
6356 }
6357
6358 return NULL_TREE;
6359 }
6360
6361 /* Subroutine of fold() that optimizes comparisons of a division by
6362 a nonzero integer constant against an integer constant, i.e.
6363 X/C1 op C2.
6364
6365 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6366 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6367 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6368
6369 The function returns the constant folded tree if a simplification
6370 can be made, and NULL_TREE otherwise. */
6371
6372 static tree
6373 fold_div_compare (location_t loc,
6374 enum tree_code code, tree type, tree arg0, tree arg1)
6375 {
6376 tree prod, tmp, hi, lo;
6377 tree arg00 = TREE_OPERAND (arg0, 0);
6378 tree arg01 = TREE_OPERAND (arg0, 1);
6379 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6380 bool neg_overflow = false;
6381 bool overflow;
6382
6383 /* We have to do this the hard way to detect unsigned overflow.
6384 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6385 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6386 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6387 neg_overflow = false;
6388
6389 if (sign == UNSIGNED)
6390 {
6391 tmp = int_const_binop (MINUS_EXPR, arg01,
6392 build_int_cst (TREE_TYPE (arg01), 1));
6393 lo = prod;
6394
6395 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6396 val = wi::add (prod, tmp, sign, &overflow);
6397 hi = force_fit_type (TREE_TYPE (arg00), val,
6398 -1, overflow | TREE_OVERFLOW (prod));
6399 }
6400 else if (tree_int_cst_sgn (arg01) >= 0)
6401 {
6402 tmp = int_const_binop (MINUS_EXPR, arg01,
6403 build_int_cst (TREE_TYPE (arg01), 1));
6404 switch (tree_int_cst_sgn (arg1))
6405 {
6406 case -1:
6407 neg_overflow = true;
6408 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6409 hi = prod;
6410 break;
6411
6412 case 0:
6413 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6414 hi = tmp;
6415 break;
6416
6417 case 1:
6418 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6419 lo = prod;
6420 break;
6421
6422 default:
6423 gcc_unreachable ();
6424 }
6425 }
6426 else
6427 {
6428 /* A negative divisor reverses the relational operators. */
6429 code = swap_tree_comparison (code);
6430
6431 tmp = int_const_binop (PLUS_EXPR, arg01,
6432 build_int_cst (TREE_TYPE (arg01), 1));
6433 switch (tree_int_cst_sgn (arg1))
6434 {
6435 case -1:
6436 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6437 lo = prod;
6438 break;
6439
6440 case 0:
6441 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6442 lo = tmp;
6443 break;
6444
6445 case 1:
6446 neg_overflow = true;
6447 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6448 hi = prod;
6449 break;
6450
6451 default:
6452 gcc_unreachable ();
6453 }
6454 }
6455
6456 switch (code)
6457 {
6458 case EQ_EXPR:
6459 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6460 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6461 if (TREE_OVERFLOW (hi))
6462 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6463 if (TREE_OVERFLOW (lo))
6464 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6465 return build_range_check (loc, type, arg00, 1, lo, hi);
6466
6467 case NE_EXPR:
6468 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6469 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6470 if (TREE_OVERFLOW (hi))
6471 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6472 if (TREE_OVERFLOW (lo))
6473 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6474 return build_range_check (loc, type, arg00, 0, lo, hi);
6475
6476 case LT_EXPR:
6477 if (TREE_OVERFLOW (lo))
6478 {
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand_loc (loc, type, tmp, arg00);
6481 }
6482 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6483
6484 case LE_EXPR:
6485 if (TREE_OVERFLOW (hi))
6486 {
6487 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6488 return omit_one_operand_loc (loc, type, tmp, arg00);
6489 }
6490 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6491
6492 case GT_EXPR:
6493 if (TREE_OVERFLOW (hi))
6494 {
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand_loc (loc, type, tmp, arg00);
6497 }
6498 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6499
6500 case GE_EXPR:
6501 if (TREE_OVERFLOW (lo))
6502 {
6503 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6504 return omit_one_operand_loc (loc, type, tmp, arg00);
6505 }
6506 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6507
6508 default:
6509 break;
6510 }
6511
6512 return NULL_TREE;
6513 }
6514
6515
6516 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6517 equality/inequality test, then return a simplified form of the test
6518 using a sign testing. Otherwise return NULL. TYPE is the desired
6519 result type. */
6520
6521 static tree
6522 fold_single_bit_test_into_sign_test (location_t loc,
6523 enum tree_code code, tree arg0, tree arg1,
6524 tree result_type)
6525 {
6526 /* If this is testing a single bit, we can optimize the test. */
6527 if ((code == NE_EXPR || code == EQ_EXPR)
6528 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6529 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6530 {
6531 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6532 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6533 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6534
6535 if (arg00 != NULL_TREE
6536 /* This is only a win if casting to a signed type is cheap,
6537 i.e. when arg00's type is not a partial mode. */
6538 && TYPE_PRECISION (TREE_TYPE (arg00))
6539 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6540 {
6541 tree stype = signed_type_for (TREE_TYPE (arg00));
6542 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6543 result_type,
6544 fold_convert_loc (loc, stype, arg00),
6545 build_int_cst (stype, 0));
6546 }
6547 }
6548
6549 return NULL_TREE;
6550 }
6551
6552 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6553 equality/inequality test, then return a simplified form of
6554 the test using shifts and logical operations. Otherwise return
6555 NULL. TYPE is the desired result type. */
6556
6557 tree
6558 fold_single_bit_test (location_t loc, enum tree_code code,
6559 tree arg0, tree arg1, tree result_type)
6560 {
6561 /* If this is testing a single bit, we can optimize the test. */
6562 if ((code == NE_EXPR || code == EQ_EXPR)
6563 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6564 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6565 {
6566 tree inner = TREE_OPERAND (arg0, 0);
6567 tree type = TREE_TYPE (arg0);
6568 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6569 enum machine_mode operand_mode = TYPE_MODE (type);
6570 int ops_unsigned;
6571 tree signed_type, unsigned_type, intermediate_type;
6572 tree tem, one;
6573
6574 /* First, see if we can fold the single bit test into a sign-bit
6575 test. */
6576 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6577 result_type);
6578 if (tem)
6579 return tem;
6580
6581 /* Otherwise we have (A & C) != 0 where C is a single bit,
6582 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6583 Similarly for (A & C) == 0. */
6584
6585 /* If INNER is a right shift of a constant and it plus BITNUM does
6586 not overflow, adjust BITNUM and INNER. */
6587 if (TREE_CODE (inner) == RSHIFT_EXPR
6588 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6589 && bitnum < TYPE_PRECISION (type)
6590 && wi::ltu_p (TREE_OPERAND (inner, 1),
6591 TYPE_PRECISION (type) - bitnum))
6592 {
6593 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6594 inner = TREE_OPERAND (inner, 0);
6595 }
6596
6597 /* If we are going to be able to omit the AND below, we must do our
6598 operations as unsigned. If we must use the AND, we have a choice.
6599 Normally unsigned is faster, but for some machines signed is. */
6600 #ifdef LOAD_EXTEND_OP
6601 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6602 && !flag_syntax_only) ? 0 : 1;
6603 #else
6604 ops_unsigned = 1;
6605 #endif
6606
6607 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6608 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6609 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6610 inner = fold_convert_loc (loc, intermediate_type, inner);
6611
6612 if (bitnum != 0)
6613 inner = build2 (RSHIFT_EXPR, intermediate_type,
6614 inner, size_int (bitnum));
6615
6616 one = build_int_cst (intermediate_type, 1);
6617
6618 if (code == EQ_EXPR)
6619 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6620
6621 /* Put the AND last so it can combine with more things. */
6622 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6623
6624 /* Make sure to return the proper type. */
6625 inner = fold_convert_loc (loc, result_type, inner);
6626
6627 return inner;
6628 }
6629 return NULL_TREE;
6630 }
6631
6632 /* Check whether we are allowed to reorder operands arg0 and arg1,
6633 such that the evaluation of arg1 occurs before arg0. */
6634
6635 static bool
6636 reorder_operands_p (const_tree arg0, const_tree arg1)
6637 {
6638 if (! flag_evaluation_order)
6639 return true;
6640 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6641 return true;
6642 return ! TREE_SIDE_EFFECTS (arg0)
6643 && ! TREE_SIDE_EFFECTS (arg1);
6644 }
6645
6646 /* Test whether it is preferable two swap two operands, ARG0 and
6647 ARG1, for example because ARG0 is an integer constant and ARG1
6648 isn't. If REORDER is true, only recommend swapping if we can
6649 evaluate the operands in reverse order. */
6650
6651 bool
6652 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6653 {
6654 if (CONSTANT_CLASS_P (arg1))
6655 return 0;
6656 if (CONSTANT_CLASS_P (arg0))
6657 return 1;
6658
6659 STRIP_SIGN_NOPS (arg0);
6660 STRIP_SIGN_NOPS (arg1);
6661
6662 if (TREE_CONSTANT (arg1))
6663 return 0;
6664 if (TREE_CONSTANT (arg0))
6665 return 1;
6666
6667 if (reorder && flag_evaluation_order
6668 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6669 return 0;
6670
6671 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6672 for commutative and comparison operators. Ensuring a canonical
6673 form allows the optimizers to find additional redundancies without
6674 having to explicitly check for both orderings. */
6675 if (TREE_CODE (arg0) == SSA_NAME
6676 && TREE_CODE (arg1) == SSA_NAME
6677 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6678 return 1;
6679
6680 /* Put SSA_NAMEs last. */
6681 if (TREE_CODE (arg1) == SSA_NAME)
6682 return 0;
6683 if (TREE_CODE (arg0) == SSA_NAME)
6684 return 1;
6685
6686 /* Put variables last. */
6687 if (DECL_P (arg1))
6688 return 0;
6689 if (DECL_P (arg0))
6690 return 1;
6691
6692 return 0;
6693 }
6694
6695 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6696 ARG0 is extended to a wider type. */
6697
6698 static tree
6699 fold_widened_comparison (location_t loc, enum tree_code code,
6700 tree type, tree arg0, tree arg1)
6701 {
6702 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6703 tree arg1_unw;
6704 tree shorter_type, outer_type;
6705 tree min, max;
6706 bool above, below;
6707
6708 if (arg0_unw == arg0)
6709 return NULL_TREE;
6710 shorter_type = TREE_TYPE (arg0_unw);
6711
6712 #ifdef HAVE_canonicalize_funcptr_for_compare
6713 /* Disable this optimization if we're casting a function pointer
6714 type on targets that require function pointer canonicalization. */
6715 if (HAVE_canonicalize_funcptr_for_compare
6716 && TREE_CODE (shorter_type) == POINTER_TYPE
6717 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6718 return NULL_TREE;
6719 #endif
6720
6721 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6722 return NULL_TREE;
6723
6724 arg1_unw = get_unwidened (arg1, NULL_TREE);
6725
6726 /* If possible, express the comparison in the shorter mode. */
6727 if ((code == EQ_EXPR || code == NE_EXPR
6728 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6729 && (TREE_TYPE (arg1_unw) == shorter_type
6730 || ((TYPE_PRECISION (shorter_type)
6731 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6732 && (TYPE_UNSIGNED (shorter_type)
6733 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6734 || (TREE_CODE (arg1_unw) == INTEGER_CST
6735 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6736 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6737 && int_fits_type_p (arg1_unw, shorter_type))))
6738 return fold_build2_loc (loc, code, type, arg0_unw,
6739 fold_convert_loc (loc, shorter_type, arg1_unw));
6740
6741 if (TREE_CODE (arg1_unw) != INTEGER_CST
6742 || TREE_CODE (shorter_type) != INTEGER_TYPE
6743 || !int_fits_type_p (arg1_unw, shorter_type))
6744 return NULL_TREE;
6745
6746 /* If we are comparing with the integer that does not fit into the range
6747 of the shorter type, the result is known. */
6748 outer_type = TREE_TYPE (arg1_unw);
6749 min = lower_bound_in_type (outer_type, shorter_type);
6750 max = upper_bound_in_type (outer_type, shorter_type);
6751
6752 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6753 max, arg1_unw));
6754 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6755 arg1_unw, min));
6756
6757 switch (code)
6758 {
6759 case EQ_EXPR:
6760 if (above || below)
6761 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6762 break;
6763
6764 case NE_EXPR:
6765 if (above || below)
6766 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6767 break;
6768
6769 case LT_EXPR:
6770 case LE_EXPR:
6771 if (above)
6772 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6773 else if (below)
6774 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6775
6776 case GT_EXPR:
6777 case GE_EXPR:
6778 if (above)
6779 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6780 else if (below)
6781 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6782
6783 default:
6784 break;
6785 }
6786
6787 return NULL_TREE;
6788 }
6789
6790 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6791 ARG0 just the signedness is changed. */
6792
6793 static tree
6794 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6795 tree arg0, tree arg1)
6796 {
6797 tree arg0_inner;
6798 tree inner_type, outer_type;
6799
6800 if (!CONVERT_EXPR_P (arg0))
6801 return NULL_TREE;
6802
6803 outer_type = TREE_TYPE (arg0);
6804 arg0_inner = TREE_OPERAND (arg0, 0);
6805 inner_type = TREE_TYPE (arg0_inner);
6806
6807 #ifdef HAVE_canonicalize_funcptr_for_compare
6808 /* Disable this optimization if we're casting a function pointer
6809 type on targets that require function pointer canonicalization. */
6810 if (HAVE_canonicalize_funcptr_for_compare
6811 && TREE_CODE (inner_type) == POINTER_TYPE
6812 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6813 return NULL_TREE;
6814 #endif
6815
6816 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6817 return NULL_TREE;
6818
6819 if (TREE_CODE (arg1) != INTEGER_CST
6820 && !(CONVERT_EXPR_P (arg1)
6821 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6822 return NULL_TREE;
6823
6824 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6825 && code != NE_EXPR
6826 && code != EQ_EXPR)
6827 return NULL_TREE;
6828
6829 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6830 return NULL_TREE;
6831
6832 if (TREE_CODE (arg1) == INTEGER_CST)
6833 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6834 TREE_OVERFLOW (arg1));
6835 else
6836 arg1 = fold_convert_loc (loc, inner_type, arg1);
6837
6838 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6839 }
6840
6841
6842 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6843 means A >= Y && A != MAX, but in this case we know that
6844 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6845
6846 static tree
6847 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6848 {
6849 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6850
6851 if (TREE_CODE (bound) == LT_EXPR)
6852 a = TREE_OPERAND (bound, 0);
6853 else if (TREE_CODE (bound) == GT_EXPR)
6854 a = TREE_OPERAND (bound, 1);
6855 else
6856 return NULL_TREE;
6857
6858 typea = TREE_TYPE (a);
6859 if (!INTEGRAL_TYPE_P (typea)
6860 && !POINTER_TYPE_P (typea))
6861 return NULL_TREE;
6862
6863 if (TREE_CODE (ineq) == LT_EXPR)
6864 {
6865 a1 = TREE_OPERAND (ineq, 1);
6866 y = TREE_OPERAND (ineq, 0);
6867 }
6868 else if (TREE_CODE (ineq) == GT_EXPR)
6869 {
6870 a1 = TREE_OPERAND (ineq, 0);
6871 y = TREE_OPERAND (ineq, 1);
6872 }
6873 else
6874 return NULL_TREE;
6875
6876 if (TREE_TYPE (a1) != typea)
6877 return NULL_TREE;
6878
6879 if (POINTER_TYPE_P (typea))
6880 {
6881 /* Convert the pointer types into integer before taking the difference. */
6882 tree ta = fold_convert_loc (loc, ssizetype, a);
6883 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6884 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6885 }
6886 else
6887 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6888
6889 if (!diff || !integer_onep (diff))
6890 return NULL_TREE;
6891
6892 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6893 }
6894
6895 /* Fold a sum or difference of at least one multiplication.
6896 Returns the folded tree or NULL if no simplification could be made. */
6897
6898 static tree
6899 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6900 tree arg0, tree arg1)
6901 {
6902 tree arg00, arg01, arg10, arg11;
6903 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6904
6905 /* (A * C) +- (B * C) -> (A+-B) * C.
6906 (A * C) +- A -> A * (C+-1).
6907 We are most concerned about the case where C is a constant,
6908 but other combinations show up during loop reduction. Since
6909 it is not difficult, try all four possibilities. */
6910
6911 if (TREE_CODE (arg0) == MULT_EXPR)
6912 {
6913 arg00 = TREE_OPERAND (arg0, 0);
6914 arg01 = TREE_OPERAND (arg0, 1);
6915 }
6916 else if (TREE_CODE (arg0) == INTEGER_CST)
6917 {
6918 arg00 = build_one_cst (type);
6919 arg01 = arg0;
6920 }
6921 else
6922 {
6923 /* We cannot generate constant 1 for fract. */
6924 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6925 return NULL_TREE;
6926 arg00 = arg0;
6927 arg01 = build_one_cst (type);
6928 }
6929 if (TREE_CODE (arg1) == MULT_EXPR)
6930 {
6931 arg10 = TREE_OPERAND (arg1, 0);
6932 arg11 = TREE_OPERAND (arg1, 1);
6933 }
6934 else if (TREE_CODE (arg1) == INTEGER_CST)
6935 {
6936 arg10 = build_one_cst (type);
6937 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6938 the purpose of this canonicalization. */
6939 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6940 && negate_expr_p (arg1)
6941 && code == PLUS_EXPR)
6942 {
6943 arg11 = negate_expr (arg1);
6944 code = MINUS_EXPR;
6945 }
6946 else
6947 arg11 = arg1;
6948 }
6949 else
6950 {
6951 /* We cannot generate constant 1 for fract. */
6952 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6953 return NULL_TREE;
6954 arg10 = arg1;
6955 arg11 = build_one_cst (type);
6956 }
6957 same = NULL_TREE;
6958
6959 if (operand_equal_p (arg01, arg11, 0))
6960 same = arg01, alt0 = arg00, alt1 = arg10;
6961 else if (operand_equal_p (arg00, arg10, 0))
6962 same = arg00, alt0 = arg01, alt1 = arg11;
6963 else if (operand_equal_p (arg00, arg11, 0))
6964 same = arg00, alt0 = arg01, alt1 = arg10;
6965 else if (operand_equal_p (arg01, arg10, 0))
6966 same = arg01, alt0 = arg00, alt1 = arg11;
6967
6968 /* No identical multiplicands; see if we can find a common
6969 power-of-two factor in non-power-of-two multiplies. This
6970 can help in multi-dimensional array access. */
6971 else if (tree_fits_shwi_p (arg01)
6972 && tree_fits_shwi_p (arg11))
6973 {
6974 HOST_WIDE_INT int01, int11, tmp;
6975 bool swap = false;
6976 tree maybe_same;
6977 int01 = tree_to_shwi (arg01);
6978 int11 = tree_to_shwi (arg11);
6979
6980 /* Move min of absolute values to int11. */
6981 if (absu_hwi (int01) < absu_hwi (int11))
6982 {
6983 tmp = int01, int01 = int11, int11 = tmp;
6984 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6985 maybe_same = arg01;
6986 swap = true;
6987 }
6988 else
6989 maybe_same = arg11;
6990
6991 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6992 /* The remainder should not be a constant, otherwise we
6993 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6994 increased the number of multiplications necessary. */
6995 && TREE_CODE (arg10) != INTEGER_CST)
6996 {
6997 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6998 build_int_cst (TREE_TYPE (arg00),
6999 int01 / int11));
7000 alt1 = arg10;
7001 same = maybe_same;
7002 if (swap)
7003 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7004 }
7005 }
7006
7007 if (same)
7008 return fold_build2_loc (loc, MULT_EXPR, type,
7009 fold_build2_loc (loc, code, type,
7010 fold_convert_loc (loc, type, alt0),
7011 fold_convert_loc (loc, type, alt1)),
7012 fold_convert_loc (loc, type, same));
7013
7014 return NULL_TREE;
7015 }
7016
7017 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7018 specified by EXPR into the buffer PTR of length LEN bytes.
7019 Return the number of bytes placed in the buffer, or zero
7020 upon failure. */
7021
7022 static int
7023 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7024 {
7025 tree type = TREE_TYPE (expr);
7026 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7027 int byte, offset, word, words;
7028 unsigned char value;
7029
7030 if ((off == -1 && total_bytes > len)
7031 || off >= total_bytes)
7032 return 0;
7033 if (off == -1)
7034 off = 0;
7035 words = total_bytes / UNITS_PER_WORD;
7036
7037 for (byte = 0; byte < total_bytes; byte++)
7038 {
7039 int bitpos = byte * BITS_PER_UNIT;
7040 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7041 number of bytes. */
7042 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7043
7044 if (total_bytes > UNITS_PER_WORD)
7045 {
7046 word = byte / UNITS_PER_WORD;
7047 if (WORDS_BIG_ENDIAN)
7048 word = (words - 1) - word;
7049 offset = word * UNITS_PER_WORD;
7050 if (BYTES_BIG_ENDIAN)
7051 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7052 else
7053 offset += byte % UNITS_PER_WORD;
7054 }
7055 else
7056 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7057 if (offset >= off
7058 && offset - off < len)
7059 ptr[offset - off] = value;
7060 }
7061 return MIN (len, total_bytes - off);
7062 }
7063
7064
7065 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7066 specified by EXPR into the buffer PTR of length LEN bytes.
7067 Return the number of bytes placed in the buffer, or zero
7068 upon failure. */
7069
7070 static int
7071 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7072 {
7073 tree type = TREE_TYPE (expr);
7074 enum machine_mode mode = TYPE_MODE (type);
7075 int total_bytes = GET_MODE_SIZE (mode);
7076 FIXED_VALUE_TYPE value;
7077 tree i_value, i_type;
7078
7079 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7080 return 0;
7081
7082 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7083
7084 if (NULL_TREE == i_type
7085 || TYPE_PRECISION (i_type) != total_bytes)
7086 return 0;
7087
7088 value = TREE_FIXED_CST (expr);
7089 i_value = double_int_to_tree (i_type, value.data);
7090
7091 return native_encode_int (i_value, ptr, len, off);
7092 }
7093
7094
7095 /* Subroutine of native_encode_expr. Encode the REAL_CST
7096 specified by EXPR into the buffer PTR of length LEN bytes.
7097 Return the number of bytes placed in the buffer, or zero
7098 upon failure. */
7099
7100 static int
7101 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7102 {
7103 tree type = TREE_TYPE (expr);
7104 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7105 int byte, offset, word, words, bitpos;
7106 unsigned char value;
7107
7108 /* There are always 32 bits in each long, no matter the size of
7109 the hosts long. We handle floating point representations with
7110 up to 192 bits. */
7111 long tmp[6];
7112
7113 if ((off == -1 && total_bytes > len)
7114 || off >= total_bytes)
7115 return 0;
7116 if (off == -1)
7117 off = 0;
7118 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7119
7120 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7121
7122 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7123 bitpos += BITS_PER_UNIT)
7124 {
7125 byte = (bitpos / BITS_PER_UNIT) & 3;
7126 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7127
7128 if (UNITS_PER_WORD < 4)
7129 {
7130 word = byte / UNITS_PER_WORD;
7131 if (WORDS_BIG_ENDIAN)
7132 word = (words - 1) - word;
7133 offset = word * UNITS_PER_WORD;
7134 if (BYTES_BIG_ENDIAN)
7135 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7136 else
7137 offset += byte % UNITS_PER_WORD;
7138 }
7139 else
7140 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7141 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7142 if (offset >= off
7143 && offset - off < len)
7144 ptr[offset - off] = value;
7145 }
7146 return MIN (len, total_bytes - off);
7147 }
7148
7149 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7150 specified by EXPR into the buffer PTR of length LEN bytes.
7151 Return the number of bytes placed in the buffer, or zero
7152 upon failure. */
7153
7154 static int
7155 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7156 {
7157 int rsize, isize;
7158 tree part;
7159
7160 part = TREE_REALPART (expr);
7161 rsize = native_encode_expr (part, ptr, len, off);
7162 if (off == -1
7163 && rsize == 0)
7164 return 0;
7165 part = TREE_IMAGPART (expr);
7166 if (off != -1)
7167 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7168 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7169 if (off == -1
7170 && isize != rsize)
7171 return 0;
7172 return rsize + isize;
7173 }
7174
7175
7176 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7177 specified by EXPR into the buffer PTR of length LEN bytes.
7178 Return the number of bytes placed in the buffer, or zero
7179 upon failure. */
7180
7181 static int
7182 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7183 {
7184 unsigned i, count;
7185 int size, offset;
7186 tree itype, elem;
7187
7188 offset = 0;
7189 count = VECTOR_CST_NELTS (expr);
7190 itype = TREE_TYPE (TREE_TYPE (expr));
7191 size = GET_MODE_SIZE (TYPE_MODE (itype));
7192 for (i = 0; i < count; i++)
7193 {
7194 if (off >= size)
7195 {
7196 off -= size;
7197 continue;
7198 }
7199 elem = VECTOR_CST_ELT (expr, i);
7200 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7201 if ((off == -1 && res != size)
7202 || res == 0)
7203 return 0;
7204 offset += res;
7205 if (offset >= len)
7206 return offset;
7207 if (off != -1)
7208 off = 0;
7209 }
7210 return offset;
7211 }
7212
7213
7214 /* Subroutine of native_encode_expr. Encode the STRING_CST
7215 specified by EXPR into the buffer PTR of length LEN bytes.
7216 Return the number of bytes placed in the buffer, or zero
7217 upon failure. */
7218
7219 static int
7220 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7221 {
7222 tree type = TREE_TYPE (expr);
7223 HOST_WIDE_INT total_bytes;
7224
7225 if (TREE_CODE (type) != ARRAY_TYPE
7226 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7227 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7228 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7229 return 0;
7230 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7231 if ((off == -1 && total_bytes > len)
7232 || off >= total_bytes)
7233 return 0;
7234 if (off == -1)
7235 off = 0;
7236 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7237 {
7238 int written = 0;
7239 if (off < TREE_STRING_LENGTH (expr))
7240 {
7241 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7242 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7243 }
7244 memset (ptr + written, 0,
7245 MIN (total_bytes - written, len - written));
7246 }
7247 else
7248 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7249 return MIN (total_bytes - off, len);
7250 }
7251
7252
7253 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7254 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7255 buffer PTR of length LEN bytes. If OFF is not -1 then start
7256 the encoding at byte offset OFF and encode at most LEN bytes.
7257 Return the number of bytes placed in the buffer, or zero upon failure. */
7258
7259 int
7260 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7261 {
7262 switch (TREE_CODE (expr))
7263 {
7264 case INTEGER_CST:
7265 return native_encode_int (expr, ptr, len, off);
7266
7267 case REAL_CST:
7268 return native_encode_real (expr, ptr, len, off);
7269
7270 case FIXED_CST:
7271 return native_encode_fixed (expr, ptr, len, off);
7272
7273 case COMPLEX_CST:
7274 return native_encode_complex (expr, ptr, len, off);
7275
7276 case VECTOR_CST:
7277 return native_encode_vector (expr, ptr, len, off);
7278
7279 case STRING_CST:
7280 return native_encode_string (expr, ptr, len, off);
7281
7282 default:
7283 return 0;
7284 }
7285 }
7286
7287
7288 /* Subroutine of native_interpret_expr. Interpret the contents of
7289 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7290 If the buffer cannot be interpreted, return NULL_TREE. */
7291
7292 static tree
7293 native_interpret_int (tree type, const unsigned char *ptr, int len)
7294 {
7295 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7296
7297 if (total_bytes > len
7298 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7299 return NULL_TREE;
7300
7301 wide_int result = wi::from_buffer (ptr, total_bytes);
7302
7303 return wide_int_to_tree (type, result);
7304 }
7305
7306
7307 /* Subroutine of native_interpret_expr. Interpret the contents of
7308 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7309 If the buffer cannot be interpreted, return NULL_TREE. */
7310
7311 static tree
7312 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7313 {
7314 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7315 double_int result;
7316 FIXED_VALUE_TYPE fixed_value;
7317
7318 if (total_bytes > len
7319 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7320 return NULL_TREE;
7321
7322 result = double_int::from_buffer (ptr, total_bytes);
7323 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7324
7325 return build_fixed (type, fixed_value);
7326 }
7327
7328
7329 /* Subroutine of native_interpret_expr. Interpret the contents of
7330 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7331 If the buffer cannot be interpreted, return NULL_TREE. */
7332
7333 static tree
7334 native_interpret_real (tree type, const unsigned char *ptr, int len)
7335 {
7336 enum machine_mode mode = TYPE_MODE (type);
7337 int total_bytes = GET_MODE_SIZE (mode);
7338 int byte, offset, word, words, bitpos;
7339 unsigned char value;
7340 /* There are always 32 bits in each long, no matter the size of
7341 the hosts long. We handle floating point representations with
7342 up to 192 bits. */
7343 REAL_VALUE_TYPE r;
7344 long tmp[6];
7345
7346 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7347 if (total_bytes > len || total_bytes > 24)
7348 return NULL_TREE;
7349 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7350
7351 memset (tmp, 0, sizeof (tmp));
7352 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7353 bitpos += BITS_PER_UNIT)
7354 {
7355 byte = (bitpos / BITS_PER_UNIT) & 3;
7356 if (UNITS_PER_WORD < 4)
7357 {
7358 word = byte / UNITS_PER_WORD;
7359 if (WORDS_BIG_ENDIAN)
7360 word = (words - 1) - word;
7361 offset = word * UNITS_PER_WORD;
7362 if (BYTES_BIG_ENDIAN)
7363 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7364 else
7365 offset += byte % UNITS_PER_WORD;
7366 }
7367 else
7368 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7369 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7370
7371 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7372 }
7373
7374 real_from_target (&r, tmp, mode);
7375 return build_real (type, r);
7376 }
7377
7378
7379 /* Subroutine of native_interpret_expr. Interpret the contents of
7380 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7381 If the buffer cannot be interpreted, return NULL_TREE. */
7382
7383 static tree
7384 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7385 {
7386 tree etype, rpart, ipart;
7387 int size;
7388
7389 etype = TREE_TYPE (type);
7390 size = GET_MODE_SIZE (TYPE_MODE (etype));
7391 if (size * 2 > len)
7392 return NULL_TREE;
7393 rpart = native_interpret_expr (etype, ptr, size);
7394 if (!rpart)
7395 return NULL_TREE;
7396 ipart = native_interpret_expr (etype, ptr+size, size);
7397 if (!ipart)
7398 return NULL_TREE;
7399 return build_complex (type, rpart, ipart);
7400 }
7401
7402
7403 /* Subroutine of native_interpret_expr. Interpret the contents of
7404 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7405 If the buffer cannot be interpreted, return NULL_TREE. */
7406
7407 static tree
7408 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7409 {
7410 tree etype, elem;
7411 int i, size, count;
7412 tree *elements;
7413
7414 etype = TREE_TYPE (type);
7415 size = GET_MODE_SIZE (TYPE_MODE (etype));
7416 count = TYPE_VECTOR_SUBPARTS (type);
7417 if (size * count > len)
7418 return NULL_TREE;
7419
7420 elements = XALLOCAVEC (tree, count);
7421 for (i = count - 1; i >= 0; i--)
7422 {
7423 elem = native_interpret_expr (etype, ptr+(i*size), size);
7424 if (!elem)
7425 return NULL_TREE;
7426 elements[i] = elem;
7427 }
7428 return build_vector (type, elements);
7429 }
7430
7431
7432 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7433 the buffer PTR of length LEN as a constant of type TYPE. For
7434 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7435 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7436 return NULL_TREE. */
7437
7438 tree
7439 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7440 {
7441 switch (TREE_CODE (type))
7442 {
7443 case INTEGER_TYPE:
7444 case ENUMERAL_TYPE:
7445 case BOOLEAN_TYPE:
7446 case POINTER_TYPE:
7447 case REFERENCE_TYPE:
7448 return native_interpret_int (type, ptr, len);
7449
7450 case REAL_TYPE:
7451 return native_interpret_real (type, ptr, len);
7452
7453 case FIXED_POINT_TYPE:
7454 return native_interpret_fixed (type, ptr, len);
7455
7456 case COMPLEX_TYPE:
7457 return native_interpret_complex (type, ptr, len);
7458
7459 case VECTOR_TYPE:
7460 return native_interpret_vector (type, ptr, len);
7461
7462 default:
7463 return NULL_TREE;
7464 }
7465 }
7466
7467 /* Returns true if we can interpret the contents of a native encoding
7468 as TYPE. */
7469
7470 static bool
7471 can_native_interpret_type_p (tree type)
7472 {
7473 switch (TREE_CODE (type))
7474 {
7475 case INTEGER_TYPE:
7476 case ENUMERAL_TYPE:
7477 case BOOLEAN_TYPE:
7478 case POINTER_TYPE:
7479 case REFERENCE_TYPE:
7480 case FIXED_POINT_TYPE:
7481 case REAL_TYPE:
7482 case COMPLEX_TYPE:
7483 case VECTOR_TYPE:
7484 return true;
7485 default:
7486 return false;
7487 }
7488 }
7489
7490 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7491 TYPE at compile-time. If we're unable to perform the conversion
7492 return NULL_TREE. */
7493
7494 static tree
7495 fold_view_convert_expr (tree type, tree expr)
7496 {
7497 /* We support up to 512-bit values (for V8DFmode). */
7498 unsigned char buffer[64];
7499 int len;
7500
7501 /* Check that the host and target are sane. */
7502 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7503 return NULL_TREE;
7504
7505 len = native_encode_expr (expr, buffer, sizeof (buffer));
7506 if (len == 0)
7507 return NULL_TREE;
7508
7509 return native_interpret_expr (type, buffer, len);
7510 }
7511
7512 /* Build an expression for the address of T. Folds away INDIRECT_REF
7513 to avoid confusing the gimplify process. */
7514
7515 tree
7516 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7517 {
7518 /* The size of the object is not relevant when talking about its address. */
7519 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7520 t = TREE_OPERAND (t, 0);
7521
7522 if (TREE_CODE (t) == INDIRECT_REF)
7523 {
7524 t = TREE_OPERAND (t, 0);
7525
7526 if (TREE_TYPE (t) != ptrtype)
7527 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7528 }
7529 else if (TREE_CODE (t) == MEM_REF
7530 && integer_zerop (TREE_OPERAND (t, 1)))
7531 return TREE_OPERAND (t, 0);
7532 else if (TREE_CODE (t) == MEM_REF
7533 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7534 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7535 TREE_OPERAND (t, 0),
7536 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7537 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7538 {
7539 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7540
7541 if (TREE_TYPE (t) != ptrtype)
7542 t = fold_convert_loc (loc, ptrtype, t);
7543 }
7544 else
7545 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7546
7547 return t;
7548 }
7549
7550 /* Build an expression for the address of T. */
7551
7552 tree
7553 build_fold_addr_expr_loc (location_t loc, tree t)
7554 {
7555 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7556
7557 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7558 }
7559
7560 static bool vec_cst_ctor_to_array (tree, tree *);
7561
7562 /* Fold a unary expression of code CODE and type TYPE with operand
7563 OP0. Return the folded expression if folding is successful.
7564 Otherwise, return NULL_TREE. */
7565
7566 tree
7567 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7568 {
7569 tree tem;
7570 tree arg0;
7571 enum tree_code_class kind = TREE_CODE_CLASS (code);
7572
7573 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7574 && TREE_CODE_LENGTH (code) == 1);
7575
7576 tem = generic_simplify (loc, code, type, op0);
7577 if (tem)
7578 return tem;
7579
7580 arg0 = op0;
7581 if (arg0)
7582 {
7583 if (CONVERT_EXPR_CODE_P (code)
7584 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7585 {
7586 /* Don't use STRIP_NOPS, because signedness of argument type
7587 matters. */
7588 STRIP_SIGN_NOPS (arg0);
7589 }
7590 else
7591 {
7592 /* Strip any conversions that don't change the mode. This
7593 is safe for every expression, except for a comparison
7594 expression because its signedness is derived from its
7595 operands.
7596
7597 Note that this is done as an internal manipulation within
7598 the constant folder, in order to find the simplest
7599 representation of the arguments so that their form can be
7600 studied. In any cases, the appropriate type conversions
7601 should be put back in the tree that will get out of the
7602 constant folder. */
7603 STRIP_NOPS (arg0);
7604 }
7605 }
7606
7607 if (TREE_CODE_CLASS (code) == tcc_unary)
7608 {
7609 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7610 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7611 fold_build1_loc (loc, code, type,
7612 fold_convert_loc (loc, TREE_TYPE (op0),
7613 TREE_OPERAND (arg0, 1))));
7614 else if (TREE_CODE (arg0) == COND_EXPR)
7615 {
7616 tree arg01 = TREE_OPERAND (arg0, 1);
7617 tree arg02 = TREE_OPERAND (arg0, 2);
7618 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7619 arg01 = fold_build1_loc (loc, code, type,
7620 fold_convert_loc (loc,
7621 TREE_TYPE (op0), arg01));
7622 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7623 arg02 = fold_build1_loc (loc, code, type,
7624 fold_convert_loc (loc,
7625 TREE_TYPE (op0), arg02));
7626 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7627 arg01, arg02);
7628
7629 /* If this was a conversion, and all we did was to move into
7630 inside the COND_EXPR, bring it back out. But leave it if
7631 it is a conversion from integer to integer and the
7632 result precision is no wider than a word since such a
7633 conversion is cheap and may be optimized away by combine,
7634 while it couldn't if it were outside the COND_EXPR. Then return
7635 so we don't get into an infinite recursion loop taking the
7636 conversion out and then back in. */
7637
7638 if ((CONVERT_EXPR_CODE_P (code)
7639 || code == NON_LVALUE_EXPR)
7640 && TREE_CODE (tem) == COND_EXPR
7641 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7642 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7643 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7644 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7645 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7646 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7647 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7648 && (INTEGRAL_TYPE_P
7649 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7650 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7651 || flag_syntax_only))
7652 tem = build1_loc (loc, code, type,
7653 build3 (COND_EXPR,
7654 TREE_TYPE (TREE_OPERAND
7655 (TREE_OPERAND (tem, 1), 0)),
7656 TREE_OPERAND (tem, 0),
7657 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7658 TREE_OPERAND (TREE_OPERAND (tem, 2),
7659 0)));
7660 return tem;
7661 }
7662 }
7663
7664 switch (code)
7665 {
7666 case PAREN_EXPR:
7667 /* Re-association barriers around constants and other re-association
7668 barriers can be removed. */
7669 if (CONSTANT_CLASS_P (op0)
7670 || TREE_CODE (op0) == PAREN_EXPR)
7671 return fold_convert_loc (loc, type, op0);
7672 return NULL_TREE;
7673
7674 case NON_LVALUE_EXPR:
7675 if (!maybe_lvalue_p (op0))
7676 return fold_convert_loc (loc, type, op0);
7677 return NULL_TREE;
7678
7679 CASE_CONVERT:
7680 case FLOAT_EXPR:
7681 case FIX_TRUNC_EXPR:
7682 if (TREE_TYPE (op0) == type)
7683 return op0;
7684
7685 if (COMPARISON_CLASS_P (op0))
7686 {
7687 /* If we have (type) (a CMP b) and type is an integral type, return
7688 new expression involving the new type. Canonicalize
7689 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7690 non-integral type.
7691 Do not fold the result as that would not simplify further, also
7692 folding again results in recursions. */
7693 if (TREE_CODE (type) == BOOLEAN_TYPE)
7694 return build2_loc (loc, TREE_CODE (op0), type,
7695 TREE_OPERAND (op0, 0),
7696 TREE_OPERAND (op0, 1));
7697 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7698 && TREE_CODE (type) != VECTOR_TYPE)
7699 return build3_loc (loc, COND_EXPR, type, op0,
7700 constant_boolean_node (true, type),
7701 constant_boolean_node (false, type));
7702 }
7703
7704 /* Handle cases of two conversions in a row. */
7705 if (CONVERT_EXPR_P (op0))
7706 {
7707 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7708 tree inter_type = TREE_TYPE (op0);
7709 int inside_int = INTEGRAL_TYPE_P (inside_type);
7710 int inside_ptr = POINTER_TYPE_P (inside_type);
7711 int inside_float = FLOAT_TYPE_P (inside_type);
7712 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7713 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7714 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7715 int inter_int = INTEGRAL_TYPE_P (inter_type);
7716 int inter_ptr = POINTER_TYPE_P (inter_type);
7717 int inter_float = FLOAT_TYPE_P (inter_type);
7718 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7719 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7720 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7721 int final_int = INTEGRAL_TYPE_P (type);
7722 int final_ptr = POINTER_TYPE_P (type);
7723 int final_float = FLOAT_TYPE_P (type);
7724 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7725 unsigned int final_prec = TYPE_PRECISION (type);
7726 int final_unsignedp = TYPE_UNSIGNED (type);
7727
7728 /* In addition to the cases of two conversions in a row
7729 handled below, if we are converting something to its own
7730 type via an object of identical or wider precision, neither
7731 conversion is needed. */
7732 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7733 && (((inter_int || inter_ptr) && final_int)
7734 || (inter_float && final_float))
7735 && inter_prec >= final_prec)
7736 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7737
7738 /* Likewise, if the intermediate and initial types are either both
7739 float or both integer, we don't need the middle conversion if the
7740 former is wider than the latter and doesn't change the signedness
7741 (for integers). Avoid this if the final type is a pointer since
7742 then we sometimes need the middle conversion. Likewise if the
7743 final type has a precision not equal to the size of its mode. */
7744 if (((inter_int && inside_int)
7745 || (inter_float && inside_float)
7746 || (inter_vec && inside_vec))
7747 && inter_prec >= inside_prec
7748 && (inter_float || inter_vec
7749 || inter_unsignedp == inside_unsignedp)
7750 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7751 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7752 && ! final_ptr
7753 && (! final_vec || inter_prec == inside_prec))
7754 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7755
7756 /* If we have a sign-extension of a zero-extended value, we can
7757 replace that by a single zero-extension. Likewise if the
7758 final conversion does not change precision we can drop the
7759 intermediate conversion. */
7760 if (inside_int && inter_int && final_int
7761 && ((inside_prec < inter_prec && inter_prec < final_prec
7762 && inside_unsignedp && !inter_unsignedp)
7763 || final_prec == inter_prec))
7764 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7765
7766 /* Two conversions in a row are not needed unless:
7767 - some conversion is floating-point (overstrict for now), or
7768 - some conversion is a vector (overstrict for now), or
7769 - the intermediate type is narrower than both initial and
7770 final, or
7771 - the intermediate type and innermost type differ in signedness,
7772 and the outermost type is wider than the intermediate, or
7773 - the initial type is a pointer type and the precisions of the
7774 intermediate and final types differ, or
7775 - the final type is a pointer type and the precisions of the
7776 initial and intermediate types differ. */
7777 if (! inside_float && ! inter_float && ! final_float
7778 && ! inside_vec && ! inter_vec && ! final_vec
7779 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7780 && ! (inside_int && inter_int
7781 && inter_unsignedp != inside_unsignedp
7782 && inter_prec < final_prec)
7783 && ((inter_unsignedp && inter_prec > inside_prec)
7784 == (final_unsignedp && final_prec > inter_prec))
7785 && ! (inside_ptr && inter_prec != final_prec)
7786 && ! (final_ptr && inside_prec != inter_prec)
7787 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7788 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7789 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7790 }
7791
7792 /* Handle (T *)&A.B.C for A being of type T and B and C
7793 living at offset zero. This occurs frequently in
7794 C++ upcasting and then accessing the base. */
7795 if (TREE_CODE (op0) == ADDR_EXPR
7796 && POINTER_TYPE_P (type)
7797 && handled_component_p (TREE_OPERAND (op0, 0)))
7798 {
7799 HOST_WIDE_INT bitsize, bitpos;
7800 tree offset;
7801 enum machine_mode mode;
7802 int unsignedp, volatilep;
7803 tree base = TREE_OPERAND (op0, 0);
7804 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7805 &mode, &unsignedp, &volatilep, false);
7806 /* If the reference was to a (constant) zero offset, we can use
7807 the address of the base if it has the same base type
7808 as the result type and the pointer type is unqualified. */
7809 if (! offset && bitpos == 0
7810 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7811 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7812 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7813 return fold_convert_loc (loc, type,
7814 build_fold_addr_expr_loc (loc, base));
7815 }
7816
7817 if (TREE_CODE (op0) == MODIFY_EXPR
7818 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7819 /* Detect assigning a bitfield. */
7820 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7821 && DECL_BIT_FIELD
7822 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7823 {
7824 /* Don't leave an assignment inside a conversion
7825 unless assigning a bitfield. */
7826 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7827 /* First do the assignment, then return converted constant. */
7828 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7829 TREE_NO_WARNING (tem) = 1;
7830 TREE_USED (tem) = 1;
7831 return tem;
7832 }
7833
7834 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7835 constants (if x has signed type, the sign bit cannot be set
7836 in c). This folds extension into the BIT_AND_EXPR.
7837 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7838 very likely don't have maximal range for their precision and this
7839 transformation effectively doesn't preserve non-maximal ranges. */
7840 if (TREE_CODE (type) == INTEGER_TYPE
7841 && TREE_CODE (op0) == BIT_AND_EXPR
7842 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7843 {
7844 tree and_expr = op0;
7845 tree and0 = TREE_OPERAND (and_expr, 0);
7846 tree and1 = TREE_OPERAND (and_expr, 1);
7847 int change = 0;
7848
7849 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7850 || (TYPE_PRECISION (type)
7851 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7852 change = 1;
7853 else if (TYPE_PRECISION (TREE_TYPE (and1))
7854 <= HOST_BITS_PER_WIDE_INT
7855 && tree_fits_uhwi_p (and1))
7856 {
7857 unsigned HOST_WIDE_INT cst;
7858
7859 cst = tree_to_uhwi (and1);
7860 cst &= HOST_WIDE_INT_M1U
7861 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7862 change = (cst == 0);
7863 #ifdef LOAD_EXTEND_OP
7864 if (change
7865 && !flag_syntax_only
7866 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7867 == ZERO_EXTEND))
7868 {
7869 tree uns = unsigned_type_for (TREE_TYPE (and0));
7870 and0 = fold_convert_loc (loc, uns, and0);
7871 and1 = fold_convert_loc (loc, uns, and1);
7872 }
7873 #endif
7874 }
7875 if (change)
7876 {
7877 tem = force_fit_type (type, wi::to_widest (and1), 0,
7878 TREE_OVERFLOW (and1));
7879 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7880 fold_convert_loc (loc, type, and0), tem);
7881 }
7882 }
7883
7884 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7885 when one of the new casts will fold away. Conservatively we assume
7886 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7887 if (POINTER_TYPE_P (type)
7888 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7889 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7890 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7891 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7892 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7893 {
7894 tree arg00 = TREE_OPERAND (arg0, 0);
7895 tree arg01 = TREE_OPERAND (arg0, 1);
7896
7897 return fold_build_pointer_plus_loc
7898 (loc, fold_convert_loc (loc, type, arg00), arg01);
7899 }
7900
7901 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7902 of the same precision, and X is an integer type not narrower than
7903 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7904 if (INTEGRAL_TYPE_P (type)
7905 && TREE_CODE (op0) == BIT_NOT_EXPR
7906 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7907 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7908 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7909 {
7910 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7911 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7912 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7913 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7914 fold_convert_loc (loc, type, tem));
7915 }
7916
7917 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7918 type of X and Y (integer types only). */
7919 if (INTEGRAL_TYPE_P (type)
7920 && TREE_CODE (op0) == MULT_EXPR
7921 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7922 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7923 {
7924 /* Be careful not to introduce new overflows. */
7925 tree mult_type;
7926 if (TYPE_OVERFLOW_WRAPS (type))
7927 mult_type = type;
7928 else
7929 mult_type = unsigned_type_for (type);
7930
7931 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7932 {
7933 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7934 fold_convert_loc (loc, mult_type,
7935 TREE_OPERAND (op0, 0)),
7936 fold_convert_loc (loc, mult_type,
7937 TREE_OPERAND (op0, 1)));
7938 return fold_convert_loc (loc, type, tem);
7939 }
7940 }
7941
7942 tem = fold_convert_const (code, type, arg0);
7943 return tem ? tem : NULL_TREE;
7944
7945 case ADDR_SPACE_CONVERT_EXPR:
7946 if (integer_zerop (arg0))
7947 return fold_convert_const (code, type, arg0);
7948 return NULL_TREE;
7949
7950 case FIXED_CONVERT_EXPR:
7951 tem = fold_convert_const (code, type, arg0);
7952 return tem ? tem : NULL_TREE;
7953
7954 case VIEW_CONVERT_EXPR:
7955 if (TREE_TYPE (op0) == type)
7956 return op0;
7957 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7958 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7959 type, TREE_OPERAND (op0, 0));
7960 if (TREE_CODE (op0) == MEM_REF)
7961 return fold_build2_loc (loc, MEM_REF, type,
7962 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7963
7964 /* For integral conversions with the same precision or pointer
7965 conversions use a NOP_EXPR instead. */
7966 if ((INTEGRAL_TYPE_P (type)
7967 || POINTER_TYPE_P (type))
7968 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7969 || POINTER_TYPE_P (TREE_TYPE (op0)))
7970 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7971 return fold_convert_loc (loc, type, op0);
7972
7973 /* Strip inner integral conversions that do not change the precision. */
7974 if (CONVERT_EXPR_P (op0)
7975 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
7976 || POINTER_TYPE_P (TREE_TYPE (op0)))
7977 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
7978 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
7979 && (TYPE_PRECISION (TREE_TYPE (op0))
7980 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
7981 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
7982 type, TREE_OPERAND (op0, 0));
7983
7984 return fold_view_convert_expr (type, op0);
7985
7986 case NEGATE_EXPR:
7987 tem = fold_negate_expr (loc, arg0);
7988 if (tem)
7989 return fold_convert_loc (loc, type, tem);
7990 return NULL_TREE;
7991
7992 case ABS_EXPR:
7993 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7994 return fold_abs_const (arg0, type);
7995 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7996 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7997 /* Convert fabs((double)float) into (double)fabsf(float). */
7998 else if (TREE_CODE (arg0) == NOP_EXPR
7999 && TREE_CODE (type) == REAL_TYPE)
8000 {
8001 tree targ0 = strip_float_extensions (arg0);
8002 if (targ0 != arg0)
8003 return fold_convert_loc (loc, type,
8004 fold_build1_loc (loc, ABS_EXPR,
8005 TREE_TYPE (targ0),
8006 targ0));
8007 }
8008 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8009 else if (TREE_CODE (arg0) == ABS_EXPR)
8010 return arg0;
8011 else if (tree_expr_nonnegative_p (arg0))
8012 return arg0;
8013
8014 /* Strip sign ops from argument. */
8015 if (TREE_CODE (type) == REAL_TYPE)
8016 {
8017 tem = fold_strip_sign_ops (arg0);
8018 if (tem)
8019 return fold_build1_loc (loc, ABS_EXPR, type,
8020 fold_convert_loc (loc, type, tem));
8021 }
8022 return NULL_TREE;
8023
8024 case CONJ_EXPR:
8025 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8026 return fold_convert_loc (loc, type, arg0);
8027 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8028 {
8029 tree itype = TREE_TYPE (type);
8030 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8031 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8032 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8033 negate_expr (ipart));
8034 }
8035 if (TREE_CODE (arg0) == COMPLEX_CST)
8036 {
8037 tree itype = TREE_TYPE (type);
8038 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8039 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8040 return build_complex (type, rpart, negate_expr (ipart));
8041 }
8042 if (TREE_CODE (arg0) == CONJ_EXPR)
8043 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8044 return NULL_TREE;
8045
8046 case BIT_NOT_EXPR:
8047 if (TREE_CODE (arg0) == INTEGER_CST)
8048 return fold_not_const (arg0, type);
8049 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8050 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8051 /* Convert ~ (-A) to A - 1. */
8052 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8053 return fold_build2_loc (loc, MINUS_EXPR, type,
8054 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8055 build_int_cst (type, 1));
8056 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8057 else if (INTEGRAL_TYPE_P (type)
8058 && ((TREE_CODE (arg0) == MINUS_EXPR
8059 && integer_onep (TREE_OPERAND (arg0, 1)))
8060 || (TREE_CODE (arg0) == PLUS_EXPR
8061 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8062 return fold_build1_loc (loc, NEGATE_EXPR, type,
8063 fold_convert_loc (loc, type,
8064 TREE_OPERAND (arg0, 0)));
8065 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8066 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8067 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8068 fold_convert_loc (loc, type,
8069 TREE_OPERAND (arg0, 0)))))
8070 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8071 fold_convert_loc (loc, type,
8072 TREE_OPERAND (arg0, 1)));
8073 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8074 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8075 fold_convert_loc (loc, type,
8076 TREE_OPERAND (arg0, 1)))))
8077 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8078 fold_convert_loc (loc, type,
8079 TREE_OPERAND (arg0, 0)), tem);
8080 /* Perform BIT_NOT_EXPR on each element individually. */
8081 else if (TREE_CODE (arg0) == VECTOR_CST)
8082 {
8083 tree *elements;
8084 tree elem;
8085 unsigned count = VECTOR_CST_NELTS (arg0), i;
8086
8087 elements = XALLOCAVEC (tree, count);
8088 for (i = 0; i < count; i++)
8089 {
8090 elem = VECTOR_CST_ELT (arg0, i);
8091 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8092 if (elem == NULL_TREE)
8093 break;
8094 elements[i] = elem;
8095 }
8096 if (i == count)
8097 return build_vector (type, elements);
8098 }
8099 else if (COMPARISON_CLASS_P (arg0)
8100 && (VECTOR_TYPE_P (type)
8101 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8102 {
8103 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8104 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8105 HONOR_NANS (TYPE_MODE (op_type)));
8106 if (subcode != ERROR_MARK)
8107 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8108 TREE_OPERAND (arg0, 1));
8109 }
8110
8111
8112 return NULL_TREE;
8113
8114 case TRUTH_NOT_EXPR:
8115 /* Note that the operand of this must be an int
8116 and its values must be 0 or 1.
8117 ("true" is a fixed value perhaps depending on the language,
8118 but we don't handle values other than 1 correctly yet.) */
8119 tem = fold_truth_not_expr (loc, arg0);
8120 if (!tem)
8121 return NULL_TREE;
8122 return fold_convert_loc (loc, type, tem);
8123
8124 case REALPART_EXPR:
8125 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8126 return fold_convert_loc (loc, type, arg0);
8127 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8128 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8129 TREE_OPERAND (arg0, 1));
8130 if (TREE_CODE (arg0) == COMPLEX_CST)
8131 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8132 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8133 {
8134 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8135 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8136 fold_build1_loc (loc, REALPART_EXPR, itype,
8137 TREE_OPERAND (arg0, 0)),
8138 fold_build1_loc (loc, REALPART_EXPR, itype,
8139 TREE_OPERAND (arg0, 1)));
8140 return fold_convert_loc (loc, type, tem);
8141 }
8142 if (TREE_CODE (arg0) == CONJ_EXPR)
8143 {
8144 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8145 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8146 TREE_OPERAND (arg0, 0));
8147 return fold_convert_loc (loc, type, tem);
8148 }
8149 if (TREE_CODE (arg0) == CALL_EXPR)
8150 {
8151 tree fn = get_callee_fndecl (arg0);
8152 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8153 switch (DECL_FUNCTION_CODE (fn))
8154 {
8155 CASE_FLT_FN (BUILT_IN_CEXPI):
8156 fn = mathfn_built_in (type, BUILT_IN_COS);
8157 if (fn)
8158 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8159 break;
8160
8161 default:
8162 break;
8163 }
8164 }
8165 return NULL_TREE;
8166
8167 case IMAGPART_EXPR:
8168 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8169 return build_zero_cst (type);
8170 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8171 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8172 TREE_OPERAND (arg0, 0));
8173 if (TREE_CODE (arg0) == COMPLEX_CST)
8174 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8175 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8176 {
8177 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8178 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8179 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8180 TREE_OPERAND (arg0, 0)),
8181 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8182 TREE_OPERAND (arg0, 1)));
8183 return fold_convert_loc (loc, type, tem);
8184 }
8185 if (TREE_CODE (arg0) == CONJ_EXPR)
8186 {
8187 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8188 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8189 return fold_convert_loc (loc, type, negate_expr (tem));
8190 }
8191 if (TREE_CODE (arg0) == CALL_EXPR)
8192 {
8193 tree fn = get_callee_fndecl (arg0);
8194 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8195 switch (DECL_FUNCTION_CODE (fn))
8196 {
8197 CASE_FLT_FN (BUILT_IN_CEXPI):
8198 fn = mathfn_built_in (type, BUILT_IN_SIN);
8199 if (fn)
8200 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8201 break;
8202
8203 default:
8204 break;
8205 }
8206 }
8207 return NULL_TREE;
8208
8209 case INDIRECT_REF:
8210 /* Fold *&X to X if X is an lvalue. */
8211 if (TREE_CODE (op0) == ADDR_EXPR)
8212 {
8213 tree op00 = TREE_OPERAND (op0, 0);
8214 if ((TREE_CODE (op00) == VAR_DECL
8215 || TREE_CODE (op00) == PARM_DECL
8216 || TREE_CODE (op00) == RESULT_DECL)
8217 && !TREE_READONLY (op00))
8218 return op00;
8219 }
8220 return NULL_TREE;
8221
8222 case VEC_UNPACK_LO_EXPR:
8223 case VEC_UNPACK_HI_EXPR:
8224 case VEC_UNPACK_FLOAT_LO_EXPR:
8225 case VEC_UNPACK_FLOAT_HI_EXPR:
8226 {
8227 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8228 tree *elts;
8229 enum tree_code subcode;
8230
8231 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8232 if (TREE_CODE (arg0) != VECTOR_CST)
8233 return NULL_TREE;
8234
8235 elts = XALLOCAVEC (tree, nelts * 2);
8236 if (!vec_cst_ctor_to_array (arg0, elts))
8237 return NULL_TREE;
8238
8239 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8240 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8241 elts += nelts;
8242
8243 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8244 subcode = NOP_EXPR;
8245 else
8246 subcode = FLOAT_EXPR;
8247
8248 for (i = 0; i < nelts; i++)
8249 {
8250 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8251 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8252 return NULL_TREE;
8253 }
8254
8255 return build_vector (type, elts);
8256 }
8257
8258 case REDUC_MIN_EXPR:
8259 case REDUC_MAX_EXPR:
8260 case REDUC_PLUS_EXPR:
8261 {
8262 unsigned int nelts, i;
8263 tree *elts;
8264 enum tree_code subcode;
8265
8266 if (TREE_CODE (op0) != VECTOR_CST)
8267 return NULL_TREE;
8268 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8269
8270 elts = XALLOCAVEC (tree, nelts);
8271 if (!vec_cst_ctor_to_array (op0, elts))
8272 return NULL_TREE;
8273
8274 switch (code)
8275 {
8276 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8277 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8278 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8279 default: gcc_unreachable ();
8280 }
8281
8282 for (i = 1; i < nelts; i++)
8283 {
8284 elts[0] = const_binop (subcode, elts[0], elts[i]);
8285 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8286 return NULL_TREE;
8287 }
8288
8289 return elts[0];
8290 }
8291
8292 default:
8293 return NULL_TREE;
8294 } /* switch (code) */
8295 }
8296
8297
8298 /* If the operation was a conversion do _not_ mark a resulting constant
8299 with TREE_OVERFLOW if the original constant was not. These conversions
8300 have implementation defined behavior and retaining the TREE_OVERFLOW
8301 flag here would confuse later passes such as VRP. */
8302 tree
8303 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8304 tree type, tree op0)
8305 {
8306 tree res = fold_unary_loc (loc, code, type, op0);
8307 if (res
8308 && TREE_CODE (res) == INTEGER_CST
8309 && TREE_CODE (op0) == INTEGER_CST
8310 && CONVERT_EXPR_CODE_P (code))
8311 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8312
8313 return res;
8314 }
8315
8316 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8317 operands OP0 and OP1. LOC is the location of the resulting expression.
8318 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8319 Return the folded expression if folding is successful. Otherwise,
8320 return NULL_TREE. */
8321 static tree
8322 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8323 tree arg0, tree arg1, tree op0, tree op1)
8324 {
8325 tree tem;
8326
8327 /* We only do these simplifications if we are optimizing. */
8328 if (!optimize)
8329 return NULL_TREE;
8330
8331 /* Check for things like (A || B) && (A || C). We can convert this
8332 to A || (B && C). Note that either operator can be any of the four
8333 truth and/or operations and the transformation will still be
8334 valid. Also note that we only care about order for the
8335 ANDIF and ORIF operators. If B contains side effects, this
8336 might change the truth-value of A. */
8337 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8338 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8339 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8340 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8341 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8342 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8343 {
8344 tree a00 = TREE_OPERAND (arg0, 0);
8345 tree a01 = TREE_OPERAND (arg0, 1);
8346 tree a10 = TREE_OPERAND (arg1, 0);
8347 tree a11 = TREE_OPERAND (arg1, 1);
8348 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8349 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8350 && (code == TRUTH_AND_EXPR
8351 || code == TRUTH_OR_EXPR));
8352
8353 if (operand_equal_p (a00, a10, 0))
8354 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8355 fold_build2_loc (loc, code, type, a01, a11));
8356 else if (commutative && operand_equal_p (a00, a11, 0))
8357 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8358 fold_build2_loc (loc, code, type, a01, a10));
8359 else if (commutative && operand_equal_p (a01, a10, 0))
8360 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8361 fold_build2_loc (loc, code, type, a00, a11));
8362
8363 /* This case if tricky because we must either have commutative
8364 operators or else A10 must not have side-effects. */
8365
8366 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8367 && operand_equal_p (a01, a11, 0))
8368 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8369 fold_build2_loc (loc, code, type, a00, a10),
8370 a01);
8371 }
8372
8373 /* See if we can build a range comparison. */
8374 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8375 return tem;
8376
8377 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8378 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8379 {
8380 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8381 if (tem)
8382 return fold_build2_loc (loc, code, type, tem, arg1);
8383 }
8384
8385 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8386 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8387 {
8388 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8389 if (tem)
8390 return fold_build2_loc (loc, code, type, arg0, tem);
8391 }
8392
8393 /* Check for the possibility of merging component references. If our
8394 lhs is another similar operation, try to merge its rhs with our
8395 rhs. Then try to merge our lhs and rhs. */
8396 if (TREE_CODE (arg0) == code
8397 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8398 TREE_OPERAND (arg0, 1), arg1)))
8399 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8400
8401 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8402 return tem;
8403
8404 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8405 && (code == TRUTH_AND_EXPR
8406 || code == TRUTH_ANDIF_EXPR
8407 || code == TRUTH_OR_EXPR
8408 || code == TRUTH_ORIF_EXPR))
8409 {
8410 enum tree_code ncode, icode;
8411
8412 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8413 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8414 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8415
8416 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8417 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8418 We don't want to pack more than two leafs to a non-IF AND/OR
8419 expression.
8420 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8421 equal to IF-CODE, then we don't want to add right-hand operand.
8422 If the inner right-hand side of left-hand operand has
8423 side-effects, or isn't simple, then we can't add to it,
8424 as otherwise we might destroy if-sequence. */
8425 if (TREE_CODE (arg0) == icode
8426 && simple_operand_p_2 (arg1)
8427 /* Needed for sequence points to handle trappings, and
8428 side-effects. */
8429 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8430 {
8431 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8432 arg1);
8433 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8434 tem);
8435 }
8436 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8437 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8438 else if (TREE_CODE (arg1) == icode
8439 && simple_operand_p_2 (arg0)
8440 /* Needed for sequence points to handle trappings, and
8441 side-effects. */
8442 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8443 {
8444 tem = fold_build2_loc (loc, ncode, type,
8445 arg0, TREE_OPERAND (arg1, 0));
8446 return fold_build2_loc (loc, icode, type, tem,
8447 TREE_OPERAND (arg1, 1));
8448 }
8449 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8450 into (A OR B).
8451 For sequence point consistancy, we need to check for trapping,
8452 and side-effects. */
8453 else if (code == icode && simple_operand_p_2 (arg0)
8454 && simple_operand_p_2 (arg1))
8455 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8456 }
8457
8458 return NULL_TREE;
8459 }
8460
8461 /* Fold a binary expression of code CODE and type TYPE with operands
8462 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8463 Return the folded expression if folding is successful. Otherwise,
8464 return NULL_TREE. */
8465
8466 static tree
8467 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8468 {
8469 enum tree_code compl_code;
8470
8471 if (code == MIN_EXPR)
8472 compl_code = MAX_EXPR;
8473 else if (code == MAX_EXPR)
8474 compl_code = MIN_EXPR;
8475 else
8476 gcc_unreachable ();
8477
8478 /* MIN (MAX (a, b), b) == b. */
8479 if (TREE_CODE (op0) == compl_code
8480 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8481 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8482
8483 /* MIN (MAX (b, a), b) == b. */
8484 if (TREE_CODE (op0) == compl_code
8485 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8486 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8487 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8488
8489 /* MIN (a, MAX (a, b)) == a. */
8490 if (TREE_CODE (op1) == compl_code
8491 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8492 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8493 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8494
8495 /* MIN (a, MAX (b, a)) == a. */
8496 if (TREE_CODE (op1) == compl_code
8497 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8498 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8499 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8500
8501 return NULL_TREE;
8502 }
8503
8504 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8505 by changing CODE to reduce the magnitude of constants involved in
8506 ARG0 of the comparison.
8507 Returns a canonicalized comparison tree if a simplification was
8508 possible, otherwise returns NULL_TREE.
8509 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8510 valid if signed overflow is undefined. */
8511
8512 static tree
8513 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8514 tree arg0, tree arg1,
8515 bool *strict_overflow_p)
8516 {
8517 enum tree_code code0 = TREE_CODE (arg0);
8518 tree t, cst0 = NULL_TREE;
8519 int sgn0;
8520 bool swap = false;
8521
8522 /* Match A +- CST code arg1 and CST code arg1. We can change the
8523 first form only if overflow is undefined. */
8524 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8525 /* In principle pointers also have undefined overflow behavior,
8526 but that causes problems elsewhere. */
8527 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8528 && (code0 == MINUS_EXPR
8529 || code0 == PLUS_EXPR)
8530 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8531 || code0 == INTEGER_CST))
8532 return NULL_TREE;
8533
8534 /* Identify the constant in arg0 and its sign. */
8535 if (code0 == INTEGER_CST)
8536 cst0 = arg0;
8537 else
8538 cst0 = TREE_OPERAND (arg0, 1);
8539 sgn0 = tree_int_cst_sgn (cst0);
8540
8541 /* Overflowed constants and zero will cause problems. */
8542 if (integer_zerop (cst0)
8543 || TREE_OVERFLOW (cst0))
8544 return NULL_TREE;
8545
8546 /* See if we can reduce the magnitude of the constant in
8547 arg0 by changing the comparison code. */
8548 if (code0 == INTEGER_CST)
8549 {
8550 /* CST <= arg1 -> CST-1 < arg1. */
8551 if (code == LE_EXPR && sgn0 == 1)
8552 code = LT_EXPR;
8553 /* -CST < arg1 -> -CST-1 <= arg1. */
8554 else if (code == LT_EXPR && sgn0 == -1)
8555 code = LE_EXPR;
8556 /* CST > arg1 -> CST-1 >= arg1. */
8557 else if (code == GT_EXPR && sgn0 == 1)
8558 code = GE_EXPR;
8559 /* -CST >= arg1 -> -CST-1 > arg1. */
8560 else if (code == GE_EXPR && sgn0 == -1)
8561 code = GT_EXPR;
8562 else
8563 return NULL_TREE;
8564 /* arg1 code' CST' might be more canonical. */
8565 swap = true;
8566 }
8567 else
8568 {
8569 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8570 if (code == LT_EXPR
8571 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8572 code = LE_EXPR;
8573 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8574 else if (code == GT_EXPR
8575 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8576 code = GE_EXPR;
8577 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8578 else if (code == LE_EXPR
8579 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8580 code = LT_EXPR;
8581 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8582 else if (code == GE_EXPR
8583 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8584 code = GT_EXPR;
8585 else
8586 return NULL_TREE;
8587 *strict_overflow_p = true;
8588 }
8589
8590 /* Now build the constant reduced in magnitude. But not if that
8591 would produce one outside of its types range. */
8592 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8593 && ((sgn0 == 1
8594 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8595 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8596 || (sgn0 == -1
8597 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8598 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8599 /* We cannot swap the comparison here as that would cause us to
8600 endlessly recurse. */
8601 return NULL_TREE;
8602
8603 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8604 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8605 if (code0 != INTEGER_CST)
8606 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8607 t = fold_convert (TREE_TYPE (arg1), t);
8608
8609 /* If swapping might yield to a more canonical form, do so. */
8610 if (swap)
8611 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8612 else
8613 return fold_build2_loc (loc, code, type, t, arg1);
8614 }
8615
8616 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8617 overflow further. Try to decrease the magnitude of constants involved
8618 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8619 and put sole constants at the second argument position.
8620 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8621
8622 static tree
8623 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8624 tree arg0, tree arg1)
8625 {
8626 tree t;
8627 bool strict_overflow_p;
8628 const char * const warnmsg = G_("assuming signed overflow does not occur "
8629 "when reducing constant in comparison");
8630
8631 /* Try canonicalization by simplifying arg0. */
8632 strict_overflow_p = false;
8633 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8634 &strict_overflow_p);
8635 if (t)
8636 {
8637 if (strict_overflow_p)
8638 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8639 return t;
8640 }
8641
8642 /* Try canonicalization by simplifying arg1 using the swapped
8643 comparison. */
8644 code = swap_tree_comparison (code);
8645 strict_overflow_p = false;
8646 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8647 &strict_overflow_p);
8648 if (t && strict_overflow_p)
8649 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8650 return t;
8651 }
8652
8653 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8654 space. This is used to avoid issuing overflow warnings for
8655 expressions like &p->x which can not wrap. */
8656
8657 static bool
8658 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8659 {
8660 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8661 return true;
8662
8663 if (bitpos < 0)
8664 return true;
8665
8666 wide_int wi_offset;
8667 int precision = TYPE_PRECISION (TREE_TYPE (base));
8668 if (offset == NULL_TREE)
8669 wi_offset = wi::zero (precision);
8670 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8671 return true;
8672 else
8673 wi_offset = offset;
8674
8675 bool overflow;
8676 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8677 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8678 if (overflow)
8679 return true;
8680
8681 if (!wi::fits_uhwi_p (total))
8682 return true;
8683
8684 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8685 if (size <= 0)
8686 return true;
8687
8688 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8689 array. */
8690 if (TREE_CODE (base) == ADDR_EXPR)
8691 {
8692 HOST_WIDE_INT base_size;
8693
8694 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8695 if (base_size > 0 && size < base_size)
8696 size = base_size;
8697 }
8698
8699 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8700 }
8701
8702 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8703 kind INTEGER_CST. This makes sure to properly sign-extend the
8704 constant. */
8705
8706 static HOST_WIDE_INT
8707 size_low_cst (const_tree t)
8708 {
8709 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8710 int prec = TYPE_PRECISION (TREE_TYPE (t));
8711 if (prec < HOST_BITS_PER_WIDE_INT)
8712 return sext_hwi (w, prec);
8713 return w;
8714 }
8715
8716 /* Subroutine of fold_binary. This routine performs all of the
8717 transformations that are common to the equality/inequality
8718 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8719 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8720 fold_binary should call fold_binary. Fold a comparison with
8721 tree code CODE and type TYPE with operands OP0 and OP1. Return
8722 the folded comparison or NULL_TREE. */
8723
8724 static tree
8725 fold_comparison (location_t loc, enum tree_code code, tree type,
8726 tree op0, tree op1)
8727 {
8728 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8729 tree arg0, arg1, tem;
8730
8731 arg0 = op0;
8732 arg1 = op1;
8733
8734 STRIP_SIGN_NOPS (arg0);
8735 STRIP_SIGN_NOPS (arg1);
8736
8737 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8738 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8739 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8740 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8741 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8742 && TREE_CODE (arg1) == INTEGER_CST
8743 && !TREE_OVERFLOW (arg1))
8744 {
8745 const enum tree_code
8746 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8747 tree const1 = TREE_OPERAND (arg0, 1);
8748 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8749 tree variable = TREE_OPERAND (arg0, 0);
8750 tree new_const = int_const_binop (reverse_op, const2, const1);
8751
8752 /* If the constant operation overflowed this can be
8753 simplified as a comparison against INT_MAX/INT_MIN. */
8754 if (TREE_OVERFLOW (new_const))
8755 {
8756 int const1_sgn = tree_int_cst_sgn (const1);
8757 enum tree_code code2 = code;
8758
8759 /* Get the sign of the constant on the lhs if the
8760 operation were VARIABLE + CONST1. */
8761 if (TREE_CODE (arg0) == MINUS_EXPR)
8762 const1_sgn = -const1_sgn;
8763
8764 /* The sign of the constant determines if we overflowed
8765 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8766 Canonicalize to the INT_MIN overflow by swapping the comparison
8767 if necessary. */
8768 if (const1_sgn == -1)
8769 code2 = swap_tree_comparison (code);
8770
8771 /* We now can look at the canonicalized case
8772 VARIABLE + 1 CODE2 INT_MIN
8773 and decide on the result. */
8774 switch (code2)
8775 {
8776 case EQ_EXPR:
8777 case LT_EXPR:
8778 case LE_EXPR:
8779 return
8780 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8781
8782 case NE_EXPR:
8783 case GE_EXPR:
8784 case GT_EXPR:
8785 return
8786 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8787
8788 default:
8789 gcc_unreachable ();
8790 }
8791 }
8792 else
8793 {
8794 if (!equality_code)
8795 fold_overflow_warning ("assuming signed overflow does not occur "
8796 "when changing X +- C1 cmp C2 to "
8797 "X cmp C2 -+ C1",
8798 WARN_STRICT_OVERFLOW_COMPARISON);
8799 return fold_build2_loc (loc, code, type, variable, new_const);
8800 }
8801 }
8802
8803 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8804 if (TREE_CODE (arg0) == MINUS_EXPR
8805 && equality_code
8806 && integer_zerop (arg1))
8807 {
8808 /* ??? The transformation is valid for the other operators if overflow
8809 is undefined for the type, but performing it here badly interacts
8810 with the transformation in fold_cond_expr_with_comparison which
8811 attempts to synthetize ABS_EXPR. */
8812 if (!equality_code)
8813 fold_overflow_warning ("assuming signed overflow does not occur "
8814 "when changing X - Y cmp 0 to X cmp Y",
8815 WARN_STRICT_OVERFLOW_COMPARISON);
8816 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8817 TREE_OPERAND (arg0, 1));
8818 }
8819
8820 /* For comparisons of pointers we can decompose it to a compile time
8821 comparison of the base objects and the offsets into the object.
8822 This requires at least one operand being an ADDR_EXPR or a
8823 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8824 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8825 && (TREE_CODE (arg0) == ADDR_EXPR
8826 || TREE_CODE (arg1) == ADDR_EXPR
8827 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8828 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8829 {
8830 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8831 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8832 enum machine_mode mode;
8833 int volatilep, unsignedp;
8834 bool indirect_base0 = false, indirect_base1 = false;
8835
8836 /* Get base and offset for the access. Strip ADDR_EXPR for
8837 get_inner_reference, but put it back by stripping INDIRECT_REF
8838 off the base object if possible. indirect_baseN will be true
8839 if baseN is not an address but refers to the object itself. */
8840 base0 = arg0;
8841 if (TREE_CODE (arg0) == ADDR_EXPR)
8842 {
8843 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8844 &bitsize, &bitpos0, &offset0, &mode,
8845 &unsignedp, &volatilep, false);
8846 if (TREE_CODE (base0) == INDIRECT_REF)
8847 base0 = TREE_OPERAND (base0, 0);
8848 else
8849 indirect_base0 = true;
8850 }
8851 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8852 {
8853 base0 = TREE_OPERAND (arg0, 0);
8854 STRIP_SIGN_NOPS (base0);
8855 if (TREE_CODE (base0) == ADDR_EXPR)
8856 {
8857 base0 = TREE_OPERAND (base0, 0);
8858 indirect_base0 = true;
8859 }
8860 offset0 = TREE_OPERAND (arg0, 1);
8861 if (tree_fits_shwi_p (offset0))
8862 {
8863 HOST_WIDE_INT off = size_low_cst (offset0);
8864 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8865 * BITS_PER_UNIT)
8866 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8867 {
8868 bitpos0 = off * BITS_PER_UNIT;
8869 offset0 = NULL_TREE;
8870 }
8871 }
8872 }
8873
8874 base1 = arg1;
8875 if (TREE_CODE (arg1) == ADDR_EXPR)
8876 {
8877 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8878 &bitsize, &bitpos1, &offset1, &mode,
8879 &unsignedp, &volatilep, false);
8880 if (TREE_CODE (base1) == INDIRECT_REF)
8881 base1 = TREE_OPERAND (base1, 0);
8882 else
8883 indirect_base1 = true;
8884 }
8885 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8886 {
8887 base1 = TREE_OPERAND (arg1, 0);
8888 STRIP_SIGN_NOPS (base1);
8889 if (TREE_CODE (base1) == ADDR_EXPR)
8890 {
8891 base1 = TREE_OPERAND (base1, 0);
8892 indirect_base1 = true;
8893 }
8894 offset1 = TREE_OPERAND (arg1, 1);
8895 if (tree_fits_shwi_p (offset1))
8896 {
8897 HOST_WIDE_INT off = size_low_cst (offset1);
8898 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8899 * BITS_PER_UNIT)
8900 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8901 {
8902 bitpos1 = off * BITS_PER_UNIT;
8903 offset1 = NULL_TREE;
8904 }
8905 }
8906 }
8907
8908 /* A local variable can never be pointed to by
8909 the default SSA name of an incoming parameter. */
8910 if ((TREE_CODE (arg0) == ADDR_EXPR
8911 && indirect_base0
8912 && TREE_CODE (base0) == VAR_DECL
8913 && auto_var_in_fn_p (base0, current_function_decl)
8914 && !indirect_base1
8915 && TREE_CODE (base1) == SSA_NAME
8916 && SSA_NAME_IS_DEFAULT_DEF (base1)
8917 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8918 || (TREE_CODE (arg1) == ADDR_EXPR
8919 && indirect_base1
8920 && TREE_CODE (base1) == VAR_DECL
8921 && auto_var_in_fn_p (base1, current_function_decl)
8922 && !indirect_base0
8923 && TREE_CODE (base0) == SSA_NAME
8924 && SSA_NAME_IS_DEFAULT_DEF (base0)
8925 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8926 {
8927 if (code == NE_EXPR)
8928 return constant_boolean_node (1, type);
8929 else if (code == EQ_EXPR)
8930 return constant_boolean_node (0, type);
8931 }
8932 /* If we have equivalent bases we might be able to simplify. */
8933 else if (indirect_base0 == indirect_base1
8934 && operand_equal_p (base0, base1, 0))
8935 {
8936 /* We can fold this expression to a constant if the non-constant
8937 offset parts are equal. */
8938 if ((offset0 == offset1
8939 || (offset0 && offset1
8940 && operand_equal_p (offset0, offset1, 0)))
8941 && (code == EQ_EXPR
8942 || code == NE_EXPR
8943 || (indirect_base0 && DECL_P (base0))
8944 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8945
8946 {
8947 if (!equality_code
8948 && bitpos0 != bitpos1
8949 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8950 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8951 fold_overflow_warning (("assuming pointer wraparound does not "
8952 "occur when comparing P +- C1 with "
8953 "P +- C2"),
8954 WARN_STRICT_OVERFLOW_CONDITIONAL);
8955
8956 switch (code)
8957 {
8958 case EQ_EXPR:
8959 return constant_boolean_node (bitpos0 == bitpos1, type);
8960 case NE_EXPR:
8961 return constant_boolean_node (bitpos0 != bitpos1, type);
8962 case LT_EXPR:
8963 return constant_boolean_node (bitpos0 < bitpos1, type);
8964 case LE_EXPR:
8965 return constant_boolean_node (bitpos0 <= bitpos1, type);
8966 case GE_EXPR:
8967 return constant_boolean_node (bitpos0 >= bitpos1, type);
8968 case GT_EXPR:
8969 return constant_boolean_node (bitpos0 > bitpos1, type);
8970 default:;
8971 }
8972 }
8973 /* We can simplify the comparison to a comparison of the variable
8974 offset parts if the constant offset parts are equal.
8975 Be careful to use signed sizetype here because otherwise we
8976 mess with array offsets in the wrong way. This is possible
8977 because pointer arithmetic is restricted to retain within an
8978 object and overflow on pointer differences is undefined as of
8979 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8980 else if (bitpos0 == bitpos1
8981 && (equality_code
8982 || (indirect_base0 && DECL_P (base0))
8983 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8984 {
8985 /* By converting to signed sizetype we cover middle-end pointer
8986 arithmetic which operates on unsigned pointer types of size
8987 type size and ARRAY_REF offsets which are properly sign or
8988 zero extended from their type in case it is narrower than
8989 sizetype. */
8990 if (offset0 == NULL_TREE)
8991 offset0 = build_int_cst (ssizetype, 0);
8992 else
8993 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8994 if (offset1 == NULL_TREE)
8995 offset1 = build_int_cst (ssizetype, 0);
8996 else
8997 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8998
8999 if (!equality_code
9000 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9001 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9002 fold_overflow_warning (("assuming pointer wraparound does not "
9003 "occur when comparing P +- C1 with "
9004 "P +- C2"),
9005 WARN_STRICT_OVERFLOW_COMPARISON);
9006
9007 return fold_build2_loc (loc, code, type, offset0, offset1);
9008 }
9009 }
9010 /* For non-equal bases we can simplify if they are addresses
9011 of local binding decls or constants. */
9012 else if (indirect_base0 && indirect_base1
9013 /* We know that !operand_equal_p (base0, base1, 0)
9014 because the if condition was false. But make
9015 sure two decls are not the same. */
9016 && base0 != base1
9017 && TREE_CODE (arg0) == ADDR_EXPR
9018 && TREE_CODE (arg1) == ADDR_EXPR
9019 && (((TREE_CODE (base0) == VAR_DECL
9020 || TREE_CODE (base0) == PARM_DECL)
9021 && (targetm.binds_local_p (base0)
9022 || CONSTANT_CLASS_P (base1)))
9023 || CONSTANT_CLASS_P (base0))
9024 && (((TREE_CODE (base1) == VAR_DECL
9025 || TREE_CODE (base1) == PARM_DECL)
9026 && (targetm.binds_local_p (base1)
9027 || CONSTANT_CLASS_P (base0)))
9028 || CONSTANT_CLASS_P (base1)))
9029 {
9030 if (code == EQ_EXPR)
9031 return omit_two_operands_loc (loc, type, boolean_false_node,
9032 arg0, arg1);
9033 else if (code == NE_EXPR)
9034 return omit_two_operands_loc (loc, type, boolean_true_node,
9035 arg0, arg1);
9036 }
9037 /* For equal offsets we can simplify to a comparison of the
9038 base addresses. */
9039 else if (bitpos0 == bitpos1
9040 && (indirect_base0
9041 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9042 && (indirect_base1
9043 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9044 && ((offset0 == offset1)
9045 || (offset0 && offset1
9046 && operand_equal_p (offset0, offset1, 0))))
9047 {
9048 if (indirect_base0)
9049 base0 = build_fold_addr_expr_loc (loc, base0);
9050 if (indirect_base1)
9051 base1 = build_fold_addr_expr_loc (loc, base1);
9052 return fold_build2_loc (loc, code, type, base0, base1);
9053 }
9054 }
9055
9056 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9057 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9058 the resulting offset is smaller in absolute value than the
9059 original one and has the same sign. */
9060 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9061 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9062 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9063 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9064 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9065 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9066 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9067 {
9068 tree const1 = TREE_OPERAND (arg0, 1);
9069 tree const2 = TREE_OPERAND (arg1, 1);
9070 tree variable1 = TREE_OPERAND (arg0, 0);
9071 tree variable2 = TREE_OPERAND (arg1, 0);
9072 tree cst;
9073 const char * const warnmsg = G_("assuming signed overflow does not "
9074 "occur when combining constants around "
9075 "a comparison");
9076
9077 /* Put the constant on the side where it doesn't overflow and is
9078 of lower absolute value and of same sign than before. */
9079 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9080 ? MINUS_EXPR : PLUS_EXPR,
9081 const2, const1);
9082 if (!TREE_OVERFLOW (cst)
9083 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9084 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9085 {
9086 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9087 return fold_build2_loc (loc, code, type,
9088 variable1,
9089 fold_build2_loc (loc, TREE_CODE (arg1),
9090 TREE_TYPE (arg1),
9091 variable2, cst));
9092 }
9093
9094 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9095 ? MINUS_EXPR : PLUS_EXPR,
9096 const1, const2);
9097 if (!TREE_OVERFLOW (cst)
9098 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9099 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9100 {
9101 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9102 return fold_build2_loc (loc, code, type,
9103 fold_build2_loc (loc, TREE_CODE (arg0),
9104 TREE_TYPE (arg0),
9105 variable1, cst),
9106 variable2);
9107 }
9108 }
9109
9110 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9111 signed arithmetic case. That form is created by the compiler
9112 often enough for folding it to be of value. One example is in
9113 computing loop trip counts after Operator Strength Reduction. */
9114 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9115 && TREE_CODE (arg0) == MULT_EXPR
9116 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9117 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9118 && integer_zerop (arg1))
9119 {
9120 tree const1 = TREE_OPERAND (arg0, 1);
9121 tree const2 = arg1; /* zero */
9122 tree variable1 = TREE_OPERAND (arg0, 0);
9123 enum tree_code cmp_code = code;
9124
9125 /* Handle unfolded multiplication by zero. */
9126 if (integer_zerop (const1))
9127 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9128
9129 fold_overflow_warning (("assuming signed overflow does not occur when "
9130 "eliminating multiplication in comparison "
9131 "with zero"),
9132 WARN_STRICT_OVERFLOW_COMPARISON);
9133
9134 /* If const1 is negative we swap the sense of the comparison. */
9135 if (tree_int_cst_sgn (const1) < 0)
9136 cmp_code = swap_tree_comparison (cmp_code);
9137
9138 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9139 }
9140
9141 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9142 if (tem)
9143 return tem;
9144
9145 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9146 {
9147 tree targ0 = strip_float_extensions (arg0);
9148 tree targ1 = strip_float_extensions (arg1);
9149 tree newtype = TREE_TYPE (targ0);
9150
9151 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9152 newtype = TREE_TYPE (targ1);
9153
9154 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9155 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9156 return fold_build2_loc (loc, code, type,
9157 fold_convert_loc (loc, newtype, targ0),
9158 fold_convert_loc (loc, newtype, targ1));
9159
9160 /* (-a) CMP (-b) -> b CMP a */
9161 if (TREE_CODE (arg0) == NEGATE_EXPR
9162 && TREE_CODE (arg1) == NEGATE_EXPR)
9163 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9164 TREE_OPERAND (arg0, 0));
9165
9166 if (TREE_CODE (arg1) == REAL_CST)
9167 {
9168 REAL_VALUE_TYPE cst;
9169 cst = TREE_REAL_CST (arg1);
9170
9171 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9172 if (TREE_CODE (arg0) == NEGATE_EXPR)
9173 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9174 TREE_OPERAND (arg0, 0),
9175 build_real (TREE_TYPE (arg1),
9176 real_value_negate (&cst)));
9177
9178 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9179 /* a CMP (-0) -> a CMP 0 */
9180 if (REAL_VALUE_MINUS_ZERO (cst))
9181 return fold_build2_loc (loc, code, type, arg0,
9182 build_real (TREE_TYPE (arg1), dconst0));
9183
9184 /* x != NaN is always true, other ops are always false. */
9185 if (REAL_VALUE_ISNAN (cst)
9186 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9187 {
9188 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9189 return omit_one_operand_loc (loc, type, tem, arg0);
9190 }
9191
9192 /* Fold comparisons against infinity. */
9193 if (REAL_VALUE_ISINF (cst)
9194 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9195 {
9196 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9197 if (tem != NULL_TREE)
9198 return tem;
9199 }
9200 }
9201
9202 /* If this is a comparison of a real constant with a PLUS_EXPR
9203 or a MINUS_EXPR of a real constant, we can convert it into a
9204 comparison with a revised real constant as long as no overflow
9205 occurs when unsafe_math_optimizations are enabled. */
9206 if (flag_unsafe_math_optimizations
9207 && TREE_CODE (arg1) == REAL_CST
9208 && (TREE_CODE (arg0) == PLUS_EXPR
9209 || TREE_CODE (arg0) == MINUS_EXPR)
9210 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9211 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9212 ? MINUS_EXPR : PLUS_EXPR,
9213 arg1, TREE_OPERAND (arg0, 1)))
9214 && !TREE_OVERFLOW (tem))
9215 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9216
9217 /* Likewise, we can simplify a comparison of a real constant with
9218 a MINUS_EXPR whose first operand is also a real constant, i.e.
9219 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9220 floating-point types only if -fassociative-math is set. */
9221 if (flag_associative_math
9222 && TREE_CODE (arg1) == REAL_CST
9223 && TREE_CODE (arg0) == MINUS_EXPR
9224 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9225 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9226 arg1))
9227 && !TREE_OVERFLOW (tem))
9228 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9229 TREE_OPERAND (arg0, 1), tem);
9230
9231 /* Fold comparisons against built-in math functions. */
9232 if (TREE_CODE (arg1) == REAL_CST
9233 && flag_unsafe_math_optimizations
9234 && ! flag_errno_math)
9235 {
9236 enum built_in_function fcode = builtin_mathfn_code (arg0);
9237
9238 if (fcode != END_BUILTINS)
9239 {
9240 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9241 if (tem != NULL_TREE)
9242 return tem;
9243 }
9244 }
9245 }
9246
9247 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9248 && CONVERT_EXPR_P (arg0))
9249 {
9250 /* If we are widening one operand of an integer comparison,
9251 see if the other operand is similarly being widened. Perhaps we
9252 can do the comparison in the narrower type. */
9253 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9254 if (tem)
9255 return tem;
9256
9257 /* Or if we are changing signedness. */
9258 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9259 if (tem)
9260 return tem;
9261 }
9262
9263 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9264 constant, we can simplify it. */
9265 if (TREE_CODE (arg1) == INTEGER_CST
9266 && (TREE_CODE (arg0) == MIN_EXPR
9267 || TREE_CODE (arg0) == MAX_EXPR)
9268 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9269 {
9270 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9271 if (tem)
9272 return tem;
9273 }
9274
9275 /* Simplify comparison of something with itself. (For IEEE
9276 floating-point, we can only do some of these simplifications.) */
9277 if (operand_equal_p (arg0, arg1, 0))
9278 {
9279 switch (code)
9280 {
9281 case EQ_EXPR:
9282 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9283 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9284 return constant_boolean_node (1, type);
9285 break;
9286
9287 case GE_EXPR:
9288 case LE_EXPR:
9289 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9290 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9291 return constant_boolean_node (1, type);
9292 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9293
9294 case NE_EXPR:
9295 /* For NE, we can only do this simplification if integer
9296 or we don't honor IEEE floating point NaNs. */
9297 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9298 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9299 break;
9300 /* ... fall through ... */
9301 case GT_EXPR:
9302 case LT_EXPR:
9303 return constant_boolean_node (0, type);
9304 default:
9305 gcc_unreachable ();
9306 }
9307 }
9308
9309 /* If we are comparing an expression that just has comparisons
9310 of two integer values, arithmetic expressions of those comparisons,
9311 and constants, we can simplify it. There are only three cases
9312 to check: the two values can either be equal, the first can be
9313 greater, or the second can be greater. Fold the expression for
9314 those three values. Since each value must be 0 or 1, we have
9315 eight possibilities, each of which corresponds to the constant 0
9316 or 1 or one of the six possible comparisons.
9317
9318 This handles common cases like (a > b) == 0 but also handles
9319 expressions like ((x > y) - (y > x)) > 0, which supposedly
9320 occur in macroized code. */
9321
9322 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9323 {
9324 tree cval1 = 0, cval2 = 0;
9325 int save_p = 0;
9326
9327 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9328 /* Don't handle degenerate cases here; they should already
9329 have been handled anyway. */
9330 && cval1 != 0 && cval2 != 0
9331 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9332 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9333 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9334 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9335 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9336 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9337 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9338 {
9339 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9340 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9341
9342 /* We can't just pass T to eval_subst in case cval1 or cval2
9343 was the same as ARG1. */
9344
9345 tree high_result
9346 = fold_build2_loc (loc, code, type,
9347 eval_subst (loc, arg0, cval1, maxval,
9348 cval2, minval),
9349 arg1);
9350 tree equal_result
9351 = fold_build2_loc (loc, code, type,
9352 eval_subst (loc, arg0, cval1, maxval,
9353 cval2, maxval),
9354 arg1);
9355 tree low_result
9356 = fold_build2_loc (loc, code, type,
9357 eval_subst (loc, arg0, cval1, minval,
9358 cval2, maxval),
9359 arg1);
9360
9361 /* All three of these results should be 0 or 1. Confirm they are.
9362 Then use those values to select the proper code to use. */
9363
9364 if (TREE_CODE (high_result) == INTEGER_CST
9365 && TREE_CODE (equal_result) == INTEGER_CST
9366 && TREE_CODE (low_result) == INTEGER_CST)
9367 {
9368 /* Make a 3-bit mask with the high-order bit being the
9369 value for `>', the next for '=', and the low for '<'. */
9370 switch ((integer_onep (high_result) * 4)
9371 + (integer_onep (equal_result) * 2)
9372 + integer_onep (low_result))
9373 {
9374 case 0:
9375 /* Always false. */
9376 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9377 case 1:
9378 code = LT_EXPR;
9379 break;
9380 case 2:
9381 code = EQ_EXPR;
9382 break;
9383 case 3:
9384 code = LE_EXPR;
9385 break;
9386 case 4:
9387 code = GT_EXPR;
9388 break;
9389 case 5:
9390 code = NE_EXPR;
9391 break;
9392 case 6:
9393 code = GE_EXPR;
9394 break;
9395 case 7:
9396 /* Always true. */
9397 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9398 }
9399
9400 if (save_p)
9401 {
9402 tem = save_expr (build2 (code, type, cval1, cval2));
9403 SET_EXPR_LOCATION (tem, loc);
9404 return tem;
9405 }
9406 return fold_build2_loc (loc, code, type, cval1, cval2);
9407 }
9408 }
9409 }
9410
9411 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9412 into a single range test. */
9413 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9414 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9415 && TREE_CODE (arg1) == INTEGER_CST
9416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9417 && !integer_zerop (TREE_OPERAND (arg0, 1))
9418 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9419 && !TREE_OVERFLOW (arg1))
9420 {
9421 tem = fold_div_compare (loc, code, type, arg0, arg1);
9422 if (tem != NULL_TREE)
9423 return tem;
9424 }
9425
9426 /* Fold ~X op ~Y as Y op X. */
9427 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9428 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9429 {
9430 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9431 return fold_build2_loc (loc, code, type,
9432 fold_convert_loc (loc, cmp_type,
9433 TREE_OPERAND (arg1, 0)),
9434 TREE_OPERAND (arg0, 0));
9435 }
9436
9437 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9438 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9439 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9440 {
9441 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9442 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9443 TREE_OPERAND (arg0, 0),
9444 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9445 fold_convert_loc (loc, cmp_type, arg1)));
9446 }
9447
9448 return NULL_TREE;
9449 }
9450
9451
9452 /* Subroutine of fold_binary. Optimize complex multiplications of the
9453 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9454 argument EXPR represents the expression "z" of type TYPE. */
9455
9456 static tree
9457 fold_mult_zconjz (location_t loc, tree type, tree expr)
9458 {
9459 tree itype = TREE_TYPE (type);
9460 tree rpart, ipart, tem;
9461
9462 if (TREE_CODE (expr) == COMPLEX_EXPR)
9463 {
9464 rpart = TREE_OPERAND (expr, 0);
9465 ipart = TREE_OPERAND (expr, 1);
9466 }
9467 else if (TREE_CODE (expr) == COMPLEX_CST)
9468 {
9469 rpart = TREE_REALPART (expr);
9470 ipart = TREE_IMAGPART (expr);
9471 }
9472 else
9473 {
9474 expr = save_expr (expr);
9475 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9476 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9477 }
9478
9479 rpart = save_expr (rpart);
9480 ipart = save_expr (ipart);
9481 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9482 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9483 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9484 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9485 build_zero_cst (itype));
9486 }
9487
9488
9489 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9490 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9491 guarantees that P and N have the same least significant log2(M) bits.
9492 N is not otherwise constrained. In particular, N is not normalized to
9493 0 <= N < M as is common. In general, the precise value of P is unknown.
9494 M is chosen as large as possible such that constant N can be determined.
9495
9496 Returns M and sets *RESIDUE to N.
9497
9498 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9499 account. This is not always possible due to PR 35705.
9500 */
9501
9502 static unsigned HOST_WIDE_INT
9503 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9504 bool allow_func_align)
9505 {
9506 enum tree_code code;
9507
9508 *residue = 0;
9509
9510 code = TREE_CODE (expr);
9511 if (code == ADDR_EXPR)
9512 {
9513 unsigned int bitalign;
9514 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9515 *residue /= BITS_PER_UNIT;
9516 return bitalign / BITS_PER_UNIT;
9517 }
9518 else if (code == POINTER_PLUS_EXPR)
9519 {
9520 tree op0, op1;
9521 unsigned HOST_WIDE_INT modulus;
9522 enum tree_code inner_code;
9523
9524 op0 = TREE_OPERAND (expr, 0);
9525 STRIP_NOPS (op0);
9526 modulus = get_pointer_modulus_and_residue (op0, residue,
9527 allow_func_align);
9528
9529 op1 = TREE_OPERAND (expr, 1);
9530 STRIP_NOPS (op1);
9531 inner_code = TREE_CODE (op1);
9532 if (inner_code == INTEGER_CST)
9533 {
9534 *residue += TREE_INT_CST_LOW (op1);
9535 return modulus;
9536 }
9537 else if (inner_code == MULT_EXPR)
9538 {
9539 op1 = TREE_OPERAND (op1, 1);
9540 if (TREE_CODE (op1) == INTEGER_CST)
9541 {
9542 unsigned HOST_WIDE_INT align;
9543
9544 /* Compute the greatest power-of-2 divisor of op1. */
9545 align = TREE_INT_CST_LOW (op1);
9546 align &= -align;
9547
9548 /* If align is non-zero and less than *modulus, replace
9549 *modulus with align., If align is 0, then either op1 is 0
9550 or the greatest power-of-2 divisor of op1 doesn't fit in an
9551 unsigned HOST_WIDE_INT. In either case, no additional
9552 constraint is imposed. */
9553 if (align)
9554 modulus = MIN (modulus, align);
9555
9556 return modulus;
9557 }
9558 }
9559 }
9560
9561 /* If we get here, we were unable to determine anything useful about the
9562 expression. */
9563 return 1;
9564 }
9565
9566 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9567 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9568
9569 static bool
9570 vec_cst_ctor_to_array (tree arg, tree *elts)
9571 {
9572 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9573
9574 if (TREE_CODE (arg) == VECTOR_CST)
9575 {
9576 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9577 elts[i] = VECTOR_CST_ELT (arg, i);
9578 }
9579 else if (TREE_CODE (arg) == CONSTRUCTOR)
9580 {
9581 constructor_elt *elt;
9582
9583 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9584 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9585 return false;
9586 else
9587 elts[i] = elt->value;
9588 }
9589 else
9590 return false;
9591 for (; i < nelts; i++)
9592 elts[i]
9593 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9594 return true;
9595 }
9596
9597 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9598 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9599 NULL_TREE otherwise. */
9600
9601 static tree
9602 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9603 {
9604 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9605 tree *elts;
9606 bool need_ctor = false;
9607
9608 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9609 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9610 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9611 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9612 return NULL_TREE;
9613
9614 elts = XALLOCAVEC (tree, nelts * 3);
9615 if (!vec_cst_ctor_to_array (arg0, elts)
9616 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9617 return NULL_TREE;
9618
9619 for (i = 0; i < nelts; i++)
9620 {
9621 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9622 need_ctor = true;
9623 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9624 }
9625
9626 if (need_ctor)
9627 {
9628 vec<constructor_elt, va_gc> *v;
9629 vec_alloc (v, nelts);
9630 for (i = 0; i < nelts; i++)
9631 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9632 return build_constructor (type, v);
9633 }
9634 else
9635 return build_vector (type, &elts[2 * nelts]);
9636 }
9637
9638 /* Try to fold a pointer difference of type TYPE two address expressions of
9639 array references AREF0 and AREF1 using location LOC. Return a
9640 simplified expression for the difference or NULL_TREE. */
9641
9642 static tree
9643 fold_addr_of_array_ref_difference (location_t loc, tree type,
9644 tree aref0, tree aref1)
9645 {
9646 tree base0 = TREE_OPERAND (aref0, 0);
9647 tree base1 = TREE_OPERAND (aref1, 0);
9648 tree base_offset = build_int_cst (type, 0);
9649
9650 /* If the bases are array references as well, recurse. If the bases
9651 are pointer indirections compute the difference of the pointers.
9652 If the bases are equal, we are set. */
9653 if ((TREE_CODE (base0) == ARRAY_REF
9654 && TREE_CODE (base1) == ARRAY_REF
9655 && (base_offset
9656 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9657 || (INDIRECT_REF_P (base0)
9658 && INDIRECT_REF_P (base1)
9659 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9660 TREE_OPERAND (base0, 0),
9661 TREE_OPERAND (base1, 0))))
9662 || operand_equal_p (base0, base1, 0))
9663 {
9664 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9665 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9666 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9667 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9668 return fold_build2_loc (loc, PLUS_EXPR, type,
9669 base_offset,
9670 fold_build2_loc (loc, MULT_EXPR, type,
9671 diff, esz));
9672 }
9673 return NULL_TREE;
9674 }
9675
9676 /* If the real or vector real constant CST of type TYPE has an exact
9677 inverse, return it, else return NULL. */
9678
9679 static tree
9680 exact_inverse (tree type, tree cst)
9681 {
9682 REAL_VALUE_TYPE r;
9683 tree unit_type, *elts;
9684 enum machine_mode mode;
9685 unsigned vec_nelts, i;
9686
9687 switch (TREE_CODE (cst))
9688 {
9689 case REAL_CST:
9690 r = TREE_REAL_CST (cst);
9691
9692 if (exact_real_inverse (TYPE_MODE (type), &r))
9693 return build_real (type, r);
9694
9695 return NULL_TREE;
9696
9697 case VECTOR_CST:
9698 vec_nelts = VECTOR_CST_NELTS (cst);
9699 elts = XALLOCAVEC (tree, vec_nelts);
9700 unit_type = TREE_TYPE (type);
9701 mode = TYPE_MODE (unit_type);
9702
9703 for (i = 0; i < vec_nelts; i++)
9704 {
9705 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9706 if (!exact_real_inverse (mode, &r))
9707 return NULL_TREE;
9708 elts[i] = build_real (unit_type, r);
9709 }
9710
9711 return build_vector (type, elts);
9712
9713 default:
9714 return NULL_TREE;
9715 }
9716 }
9717
9718 /* Mask out the tz least significant bits of X of type TYPE where
9719 tz is the number of trailing zeroes in Y. */
9720 static wide_int
9721 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9722 {
9723 int tz = wi::ctz (y);
9724 if (tz > 0)
9725 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9726 return x;
9727 }
9728
9729 /* Return true when T is an address and is known to be nonzero.
9730 For floating point we further ensure that T is not denormal.
9731 Similar logic is present in nonzero_address in rtlanal.h.
9732
9733 If the return value is based on the assumption that signed overflow
9734 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9735 change *STRICT_OVERFLOW_P. */
9736
9737 static bool
9738 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9739 {
9740 tree type = TREE_TYPE (t);
9741 enum tree_code code;
9742
9743 /* Doing something useful for floating point would need more work. */
9744 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9745 return false;
9746
9747 code = TREE_CODE (t);
9748 switch (TREE_CODE_CLASS (code))
9749 {
9750 case tcc_unary:
9751 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9752 strict_overflow_p);
9753 case tcc_binary:
9754 case tcc_comparison:
9755 return tree_binary_nonzero_warnv_p (code, type,
9756 TREE_OPERAND (t, 0),
9757 TREE_OPERAND (t, 1),
9758 strict_overflow_p);
9759 case tcc_constant:
9760 case tcc_declaration:
9761 case tcc_reference:
9762 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9763
9764 default:
9765 break;
9766 }
9767
9768 switch (code)
9769 {
9770 case TRUTH_NOT_EXPR:
9771 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9772 strict_overflow_p);
9773
9774 case TRUTH_AND_EXPR:
9775 case TRUTH_OR_EXPR:
9776 case TRUTH_XOR_EXPR:
9777 return tree_binary_nonzero_warnv_p (code, type,
9778 TREE_OPERAND (t, 0),
9779 TREE_OPERAND (t, 1),
9780 strict_overflow_p);
9781
9782 case COND_EXPR:
9783 case CONSTRUCTOR:
9784 case OBJ_TYPE_REF:
9785 case ASSERT_EXPR:
9786 case ADDR_EXPR:
9787 case WITH_SIZE_EXPR:
9788 case SSA_NAME:
9789 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9790
9791 case COMPOUND_EXPR:
9792 case MODIFY_EXPR:
9793 case BIND_EXPR:
9794 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9795 strict_overflow_p);
9796
9797 case SAVE_EXPR:
9798 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9799 strict_overflow_p);
9800
9801 case CALL_EXPR:
9802 {
9803 tree fndecl = get_callee_fndecl (t);
9804 if (!fndecl) return false;
9805 if (flag_delete_null_pointer_checks && !flag_check_new
9806 && DECL_IS_OPERATOR_NEW (fndecl)
9807 && !TREE_NOTHROW (fndecl))
9808 return true;
9809 if (flag_delete_null_pointer_checks
9810 && lookup_attribute ("returns_nonnull",
9811 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9812 return true;
9813 return alloca_call_p (t);
9814 }
9815
9816 default:
9817 break;
9818 }
9819 return false;
9820 }
9821
9822 /* Return true when T is an address and is known to be nonzero.
9823 Handle warnings about undefined signed overflow. */
9824
9825 static bool
9826 tree_expr_nonzero_p (tree t)
9827 {
9828 bool ret, strict_overflow_p;
9829
9830 strict_overflow_p = false;
9831 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9832 if (strict_overflow_p)
9833 fold_overflow_warning (("assuming signed overflow does not occur when "
9834 "determining that expression is always "
9835 "non-zero"),
9836 WARN_STRICT_OVERFLOW_MISC);
9837 return ret;
9838 }
9839
9840 /* Fold a binary expression of code CODE and type TYPE with operands
9841 OP0 and OP1. LOC is the location of the resulting expression.
9842 Return the folded expression if folding is successful. Otherwise,
9843 return NULL_TREE. */
9844
9845 tree
9846 fold_binary_loc (location_t loc,
9847 enum tree_code code, tree type, tree op0, tree op1)
9848 {
9849 enum tree_code_class kind = TREE_CODE_CLASS (code);
9850 tree arg0, arg1, tem;
9851 tree t1 = NULL_TREE;
9852 bool strict_overflow_p;
9853 unsigned int prec;
9854
9855 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9856 && TREE_CODE_LENGTH (code) == 2
9857 && op0 != NULL_TREE
9858 && op1 != NULL_TREE);
9859
9860 arg0 = op0;
9861 arg1 = op1;
9862
9863 /* Strip any conversions that don't change the mode. This is
9864 safe for every expression, except for a comparison expression
9865 because its signedness is derived from its operands. So, in
9866 the latter case, only strip conversions that don't change the
9867 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9868 preserved.
9869
9870 Note that this is done as an internal manipulation within the
9871 constant folder, in order to find the simplest representation
9872 of the arguments so that their form can be studied. In any
9873 cases, the appropriate type conversions should be put back in
9874 the tree that will get out of the constant folder. */
9875
9876 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9877 {
9878 STRIP_SIGN_NOPS (arg0);
9879 STRIP_SIGN_NOPS (arg1);
9880 }
9881 else
9882 {
9883 STRIP_NOPS (arg0);
9884 STRIP_NOPS (arg1);
9885 }
9886
9887 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9888 constant but we can't do arithmetic on them. */
9889 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9890 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9891 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9892 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9893 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9894 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9895 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9896 {
9897 if (kind == tcc_binary)
9898 {
9899 /* Make sure type and arg0 have the same saturating flag. */
9900 gcc_assert (TYPE_SATURATING (type)
9901 == TYPE_SATURATING (TREE_TYPE (arg0)));
9902 tem = const_binop (code, arg0, arg1);
9903 }
9904 else if (kind == tcc_comparison)
9905 tem = fold_relational_const (code, type, arg0, arg1);
9906 else
9907 tem = NULL_TREE;
9908
9909 if (tem != NULL_TREE)
9910 {
9911 if (TREE_TYPE (tem) != type)
9912 tem = fold_convert_loc (loc, type, tem);
9913 return tem;
9914 }
9915 }
9916
9917 /* If this is a commutative operation, and ARG0 is a constant, move it
9918 to ARG1 to reduce the number of tests below. */
9919 if (commutative_tree_code (code)
9920 && tree_swap_operands_p (arg0, arg1, true))
9921 return fold_build2_loc (loc, code, type, op1, op0);
9922
9923 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9924 to ARG1 to reduce the number of tests below. */
9925 if (kind == tcc_comparison
9926 && tree_swap_operands_p (arg0, arg1, true))
9927 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9928
9929 tem = generic_simplify (loc, code, type, op0, op1);
9930 if (tem)
9931 return tem;
9932
9933 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9934
9935 First check for cases where an arithmetic operation is applied to a
9936 compound, conditional, or comparison operation. Push the arithmetic
9937 operation inside the compound or conditional to see if any folding
9938 can then be done. Convert comparison to conditional for this purpose.
9939 The also optimizes non-constant cases that used to be done in
9940 expand_expr.
9941
9942 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9943 one of the operands is a comparison and the other is a comparison, a
9944 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9945 code below would make the expression more complex. Change it to a
9946 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9947 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9948
9949 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9950 || code == EQ_EXPR || code == NE_EXPR)
9951 && TREE_CODE (type) != VECTOR_TYPE
9952 && ((truth_value_p (TREE_CODE (arg0))
9953 && (truth_value_p (TREE_CODE (arg1))
9954 || (TREE_CODE (arg1) == BIT_AND_EXPR
9955 && integer_onep (TREE_OPERAND (arg1, 1)))))
9956 || (truth_value_p (TREE_CODE (arg1))
9957 && (truth_value_p (TREE_CODE (arg0))
9958 || (TREE_CODE (arg0) == BIT_AND_EXPR
9959 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9960 {
9961 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9962 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9963 : TRUTH_XOR_EXPR,
9964 boolean_type_node,
9965 fold_convert_loc (loc, boolean_type_node, arg0),
9966 fold_convert_loc (loc, boolean_type_node, arg1));
9967
9968 if (code == EQ_EXPR)
9969 tem = invert_truthvalue_loc (loc, tem);
9970
9971 return fold_convert_loc (loc, type, tem);
9972 }
9973
9974 if (TREE_CODE_CLASS (code) == tcc_binary
9975 || TREE_CODE_CLASS (code) == tcc_comparison)
9976 {
9977 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9978 {
9979 tem = fold_build2_loc (loc, code, type,
9980 fold_convert_loc (loc, TREE_TYPE (op0),
9981 TREE_OPERAND (arg0, 1)), op1);
9982 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9983 tem);
9984 }
9985 if (TREE_CODE (arg1) == COMPOUND_EXPR
9986 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9987 {
9988 tem = fold_build2_loc (loc, code, type, op0,
9989 fold_convert_loc (loc, TREE_TYPE (op1),
9990 TREE_OPERAND (arg1, 1)));
9991 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9992 tem);
9993 }
9994
9995 if (TREE_CODE (arg0) == COND_EXPR
9996 || TREE_CODE (arg0) == VEC_COND_EXPR
9997 || COMPARISON_CLASS_P (arg0))
9998 {
9999 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10000 arg0, arg1,
10001 /*cond_first_p=*/1);
10002 if (tem != NULL_TREE)
10003 return tem;
10004 }
10005
10006 if (TREE_CODE (arg1) == COND_EXPR
10007 || TREE_CODE (arg1) == VEC_COND_EXPR
10008 || COMPARISON_CLASS_P (arg1))
10009 {
10010 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10011 arg1, arg0,
10012 /*cond_first_p=*/0);
10013 if (tem != NULL_TREE)
10014 return tem;
10015 }
10016 }
10017
10018 switch (code)
10019 {
10020 case MEM_REF:
10021 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10022 if (TREE_CODE (arg0) == ADDR_EXPR
10023 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10024 {
10025 tree iref = TREE_OPERAND (arg0, 0);
10026 return fold_build2 (MEM_REF, type,
10027 TREE_OPERAND (iref, 0),
10028 int_const_binop (PLUS_EXPR, arg1,
10029 TREE_OPERAND (iref, 1)));
10030 }
10031
10032 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10033 if (TREE_CODE (arg0) == ADDR_EXPR
10034 && handled_component_p (TREE_OPERAND (arg0, 0)))
10035 {
10036 tree base;
10037 HOST_WIDE_INT coffset;
10038 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10039 &coffset);
10040 if (!base)
10041 return NULL_TREE;
10042 return fold_build2 (MEM_REF, type,
10043 build_fold_addr_expr (base),
10044 int_const_binop (PLUS_EXPR, arg1,
10045 size_int (coffset)));
10046 }
10047
10048 return NULL_TREE;
10049
10050 case POINTER_PLUS_EXPR:
10051 /* 0 +p index -> (type)index */
10052 if (integer_zerop (arg0))
10053 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10054
10055 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10056 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10057 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10058 return fold_convert_loc (loc, type,
10059 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10060 fold_convert_loc (loc, sizetype,
10061 arg1),
10062 fold_convert_loc (loc, sizetype,
10063 arg0)));
10064
10065 /* (PTR +p B) +p A -> PTR +p (B + A) */
10066 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10067 {
10068 tree inner;
10069 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10070 tree arg00 = TREE_OPERAND (arg0, 0);
10071 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10072 arg01, fold_convert_loc (loc, sizetype, arg1));
10073 return fold_convert_loc (loc, type,
10074 fold_build_pointer_plus_loc (loc,
10075 arg00, inner));
10076 }
10077
10078 /* PTR_CST +p CST -> CST1 */
10079 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10080 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10081 fold_convert_loc (loc, type, arg1));
10082
10083 return NULL_TREE;
10084
10085 case PLUS_EXPR:
10086 /* A + (-B) -> A - B */
10087 if (TREE_CODE (arg1) == NEGATE_EXPR
10088 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10089 return fold_build2_loc (loc, MINUS_EXPR, type,
10090 fold_convert_loc (loc, type, arg0),
10091 fold_convert_loc (loc, type,
10092 TREE_OPERAND (arg1, 0)));
10093 /* (-A) + B -> B - A */
10094 if (TREE_CODE (arg0) == NEGATE_EXPR
10095 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10096 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10097 return fold_build2_loc (loc, MINUS_EXPR, type,
10098 fold_convert_loc (loc, type, arg1),
10099 fold_convert_loc (loc, type,
10100 TREE_OPERAND (arg0, 0)));
10101
10102 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10103 {
10104 /* Convert ~A + 1 to -A. */
10105 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10106 && integer_each_onep (arg1))
10107 return fold_build1_loc (loc, NEGATE_EXPR, type,
10108 fold_convert_loc (loc, type,
10109 TREE_OPERAND (arg0, 0)));
10110
10111 /* ~X + X is -1. */
10112 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10113 && !TYPE_OVERFLOW_TRAPS (type))
10114 {
10115 tree tem = TREE_OPERAND (arg0, 0);
10116
10117 STRIP_NOPS (tem);
10118 if (operand_equal_p (tem, arg1, 0))
10119 {
10120 t1 = build_all_ones_cst (type);
10121 return omit_one_operand_loc (loc, type, t1, arg1);
10122 }
10123 }
10124
10125 /* X + ~X is -1. */
10126 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10127 && !TYPE_OVERFLOW_TRAPS (type))
10128 {
10129 tree tem = TREE_OPERAND (arg1, 0);
10130
10131 STRIP_NOPS (tem);
10132 if (operand_equal_p (arg0, tem, 0))
10133 {
10134 t1 = build_all_ones_cst (type);
10135 return omit_one_operand_loc (loc, type, t1, arg0);
10136 }
10137 }
10138
10139 /* X + (X / CST) * -CST is X % CST. */
10140 if (TREE_CODE (arg1) == MULT_EXPR
10141 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10142 && operand_equal_p (arg0,
10143 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10144 {
10145 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10146 tree cst1 = TREE_OPERAND (arg1, 1);
10147 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10148 cst1, cst0);
10149 if (sum && integer_zerop (sum))
10150 return fold_convert_loc (loc, type,
10151 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10152 TREE_TYPE (arg0), arg0,
10153 cst0));
10154 }
10155 }
10156
10157 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10158 one. Make sure the type is not saturating and has the signedness of
10159 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10160 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10161 if ((TREE_CODE (arg0) == MULT_EXPR
10162 || TREE_CODE (arg1) == MULT_EXPR)
10163 && !TYPE_SATURATING (type)
10164 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10165 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10166 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10167 {
10168 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10169 if (tem)
10170 return tem;
10171 }
10172
10173 if (! FLOAT_TYPE_P (type))
10174 {
10175 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10176 with a constant, and the two constants have no bits in common,
10177 we should treat this as a BIT_IOR_EXPR since this may produce more
10178 simplifications. */
10179 if (TREE_CODE (arg0) == BIT_AND_EXPR
10180 && TREE_CODE (arg1) == BIT_AND_EXPR
10181 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10182 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10183 && wi::bit_and (TREE_OPERAND (arg0, 1),
10184 TREE_OPERAND (arg1, 1)) == 0)
10185 {
10186 code = BIT_IOR_EXPR;
10187 goto bit_ior;
10188 }
10189
10190 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10191 (plus (plus (mult) (mult)) (foo)) so that we can
10192 take advantage of the factoring cases below. */
10193 if (TYPE_OVERFLOW_WRAPS (type)
10194 && (((TREE_CODE (arg0) == PLUS_EXPR
10195 || TREE_CODE (arg0) == MINUS_EXPR)
10196 && TREE_CODE (arg1) == MULT_EXPR)
10197 || ((TREE_CODE (arg1) == PLUS_EXPR
10198 || TREE_CODE (arg1) == MINUS_EXPR)
10199 && TREE_CODE (arg0) == MULT_EXPR)))
10200 {
10201 tree parg0, parg1, parg, marg;
10202 enum tree_code pcode;
10203
10204 if (TREE_CODE (arg1) == MULT_EXPR)
10205 parg = arg0, marg = arg1;
10206 else
10207 parg = arg1, marg = arg0;
10208 pcode = TREE_CODE (parg);
10209 parg0 = TREE_OPERAND (parg, 0);
10210 parg1 = TREE_OPERAND (parg, 1);
10211 STRIP_NOPS (parg0);
10212 STRIP_NOPS (parg1);
10213
10214 if (TREE_CODE (parg0) == MULT_EXPR
10215 && TREE_CODE (parg1) != MULT_EXPR)
10216 return fold_build2_loc (loc, pcode, type,
10217 fold_build2_loc (loc, PLUS_EXPR, type,
10218 fold_convert_loc (loc, type,
10219 parg0),
10220 fold_convert_loc (loc, type,
10221 marg)),
10222 fold_convert_loc (loc, type, parg1));
10223 if (TREE_CODE (parg0) != MULT_EXPR
10224 && TREE_CODE (parg1) == MULT_EXPR)
10225 return
10226 fold_build2_loc (loc, PLUS_EXPR, type,
10227 fold_convert_loc (loc, type, parg0),
10228 fold_build2_loc (loc, pcode, type,
10229 fold_convert_loc (loc, type, marg),
10230 fold_convert_loc (loc, type,
10231 parg1)));
10232 }
10233 }
10234 else
10235 {
10236 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10237 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10238 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10239
10240 /* Likewise if the operands are reversed. */
10241 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10242 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10243
10244 /* Convert X + -C into X - C. */
10245 if (TREE_CODE (arg1) == REAL_CST
10246 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10247 {
10248 tem = fold_negate_const (arg1, type);
10249 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10250 return fold_build2_loc (loc, MINUS_EXPR, type,
10251 fold_convert_loc (loc, type, arg0),
10252 fold_convert_loc (loc, type, tem));
10253 }
10254
10255 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10256 to __complex__ ( x, y ). This is not the same for SNaNs or
10257 if signed zeros are involved. */
10258 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10259 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10260 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10261 {
10262 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10263 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10264 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10265 bool arg0rz = false, arg0iz = false;
10266 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10267 || (arg0i && (arg0iz = real_zerop (arg0i))))
10268 {
10269 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10270 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10271 if (arg0rz && arg1i && real_zerop (arg1i))
10272 {
10273 tree rp = arg1r ? arg1r
10274 : build1 (REALPART_EXPR, rtype, arg1);
10275 tree ip = arg0i ? arg0i
10276 : build1 (IMAGPART_EXPR, rtype, arg0);
10277 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10278 }
10279 else if (arg0iz && arg1r && real_zerop (arg1r))
10280 {
10281 tree rp = arg0r ? arg0r
10282 : build1 (REALPART_EXPR, rtype, arg0);
10283 tree ip = arg1i ? arg1i
10284 : build1 (IMAGPART_EXPR, rtype, arg1);
10285 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10286 }
10287 }
10288 }
10289
10290 if (flag_unsafe_math_optimizations
10291 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10292 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10293 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10294 return tem;
10295
10296 /* Convert x+x into x*2.0. */
10297 if (operand_equal_p (arg0, arg1, 0)
10298 && SCALAR_FLOAT_TYPE_P (type))
10299 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10300 build_real (type, dconst2));
10301
10302 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10303 We associate floats only if the user has specified
10304 -fassociative-math. */
10305 if (flag_associative_math
10306 && TREE_CODE (arg1) == PLUS_EXPR
10307 && TREE_CODE (arg0) != MULT_EXPR)
10308 {
10309 tree tree10 = TREE_OPERAND (arg1, 0);
10310 tree tree11 = TREE_OPERAND (arg1, 1);
10311 if (TREE_CODE (tree11) == MULT_EXPR
10312 && TREE_CODE (tree10) == MULT_EXPR)
10313 {
10314 tree tree0;
10315 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10316 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10317 }
10318 }
10319 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10320 We associate floats only if the user has specified
10321 -fassociative-math. */
10322 if (flag_associative_math
10323 && TREE_CODE (arg0) == PLUS_EXPR
10324 && TREE_CODE (arg1) != MULT_EXPR)
10325 {
10326 tree tree00 = TREE_OPERAND (arg0, 0);
10327 tree tree01 = TREE_OPERAND (arg0, 1);
10328 if (TREE_CODE (tree01) == MULT_EXPR
10329 && TREE_CODE (tree00) == MULT_EXPR)
10330 {
10331 tree tree0;
10332 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10333 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10334 }
10335 }
10336 }
10337
10338 bit_rotate:
10339 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10340 is a rotate of A by C1 bits. */
10341 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10342 is a rotate of A by B bits. */
10343 {
10344 enum tree_code code0, code1;
10345 tree rtype;
10346 code0 = TREE_CODE (arg0);
10347 code1 = TREE_CODE (arg1);
10348 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10349 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10350 && operand_equal_p (TREE_OPERAND (arg0, 0),
10351 TREE_OPERAND (arg1, 0), 0)
10352 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10353 TYPE_UNSIGNED (rtype))
10354 /* Only create rotates in complete modes. Other cases are not
10355 expanded properly. */
10356 && (element_precision (rtype)
10357 == element_precision (TYPE_MODE (rtype))))
10358 {
10359 tree tree01, tree11;
10360 enum tree_code code01, code11;
10361
10362 tree01 = TREE_OPERAND (arg0, 1);
10363 tree11 = TREE_OPERAND (arg1, 1);
10364 STRIP_NOPS (tree01);
10365 STRIP_NOPS (tree11);
10366 code01 = TREE_CODE (tree01);
10367 code11 = TREE_CODE (tree11);
10368 if (code01 == INTEGER_CST
10369 && code11 == INTEGER_CST
10370 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10371 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10372 {
10373 tem = build2_loc (loc, LROTATE_EXPR,
10374 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10375 TREE_OPERAND (arg0, 0),
10376 code0 == LSHIFT_EXPR ? tree01 : tree11);
10377 return fold_convert_loc (loc, type, tem);
10378 }
10379 else if (code11 == MINUS_EXPR)
10380 {
10381 tree tree110, tree111;
10382 tree110 = TREE_OPERAND (tree11, 0);
10383 tree111 = TREE_OPERAND (tree11, 1);
10384 STRIP_NOPS (tree110);
10385 STRIP_NOPS (tree111);
10386 if (TREE_CODE (tree110) == INTEGER_CST
10387 && 0 == compare_tree_int (tree110,
10388 element_precision
10389 (TREE_TYPE (TREE_OPERAND
10390 (arg0, 0))))
10391 && operand_equal_p (tree01, tree111, 0))
10392 return
10393 fold_convert_loc (loc, type,
10394 build2 ((code0 == LSHIFT_EXPR
10395 ? LROTATE_EXPR
10396 : RROTATE_EXPR),
10397 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10398 TREE_OPERAND (arg0, 0), tree01));
10399 }
10400 else if (code01 == MINUS_EXPR)
10401 {
10402 tree tree010, tree011;
10403 tree010 = TREE_OPERAND (tree01, 0);
10404 tree011 = TREE_OPERAND (tree01, 1);
10405 STRIP_NOPS (tree010);
10406 STRIP_NOPS (tree011);
10407 if (TREE_CODE (tree010) == INTEGER_CST
10408 && 0 == compare_tree_int (tree010,
10409 element_precision
10410 (TREE_TYPE (TREE_OPERAND
10411 (arg0, 0))))
10412 && operand_equal_p (tree11, tree011, 0))
10413 return fold_convert_loc
10414 (loc, type,
10415 build2 ((code0 != LSHIFT_EXPR
10416 ? LROTATE_EXPR
10417 : RROTATE_EXPR),
10418 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10419 TREE_OPERAND (arg0, 0), tree11));
10420 }
10421 }
10422 }
10423
10424 associate:
10425 /* In most languages, can't associate operations on floats through
10426 parentheses. Rather than remember where the parentheses were, we
10427 don't associate floats at all, unless the user has specified
10428 -fassociative-math.
10429 And, we need to make sure type is not saturating. */
10430
10431 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10432 && !TYPE_SATURATING (type))
10433 {
10434 tree var0, con0, lit0, minus_lit0;
10435 tree var1, con1, lit1, minus_lit1;
10436 tree atype = type;
10437 bool ok = true;
10438
10439 /* Split both trees into variables, constants, and literals. Then
10440 associate each group together, the constants with literals,
10441 then the result with variables. This increases the chances of
10442 literals being recombined later and of generating relocatable
10443 expressions for the sum of a constant and literal. */
10444 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10445 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10446 code == MINUS_EXPR);
10447
10448 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10449 if (code == MINUS_EXPR)
10450 code = PLUS_EXPR;
10451
10452 /* With undefined overflow prefer doing association in a type
10453 which wraps on overflow, if that is one of the operand types. */
10454 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10455 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10456 {
10457 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10458 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10459 atype = TREE_TYPE (arg0);
10460 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10461 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10462 atype = TREE_TYPE (arg1);
10463 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10464 }
10465
10466 /* With undefined overflow we can only associate constants with one
10467 variable, and constants whose association doesn't overflow. */
10468 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10469 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10470 {
10471 if (var0 && var1)
10472 {
10473 tree tmp0 = var0;
10474 tree tmp1 = var1;
10475
10476 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10477 tmp0 = TREE_OPERAND (tmp0, 0);
10478 if (CONVERT_EXPR_P (tmp0)
10479 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10480 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10481 <= TYPE_PRECISION (atype)))
10482 tmp0 = TREE_OPERAND (tmp0, 0);
10483 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10484 tmp1 = TREE_OPERAND (tmp1, 0);
10485 if (CONVERT_EXPR_P (tmp1)
10486 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10487 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10488 <= TYPE_PRECISION (atype)))
10489 tmp1 = TREE_OPERAND (tmp1, 0);
10490 /* The only case we can still associate with two variables
10491 is if they are the same, modulo negation and bit-pattern
10492 preserving conversions. */
10493 if (!operand_equal_p (tmp0, tmp1, 0))
10494 ok = false;
10495 }
10496 }
10497
10498 /* Only do something if we found more than two objects. Otherwise,
10499 nothing has changed and we risk infinite recursion. */
10500 if (ok
10501 && (2 < ((var0 != 0) + (var1 != 0)
10502 + (con0 != 0) + (con1 != 0)
10503 + (lit0 != 0) + (lit1 != 0)
10504 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10505 {
10506 bool any_overflows = false;
10507 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10508 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10509 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10510 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10511 var0 = associate_trees (loc, var0, var1, code, atype);
10512 con0 = associate_trees (loc, con0, con1, code, atype);
10513 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10514 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10515 code, atype);
10516
10517 /* Preserve the MINUS_EXPR if the negative part of the literal is
10518 greater than the positive part. Otherwise, the multiplicative
10519 folding code (i.e extract_muldiv) may be fooled in case
10520 unsigned constants are subtracted, like in the following
10521 example: ((X*2 + 4) - 8U)/2. */
10522 if (minus_lit0 && lit0)
10523 {
10524 if (TREE_CODE (lit0) == INTEGER_CST
10525 && TREE_CODE (minus_lit0) == INTEGER_CST
10526 && tree_int_cst_lt (lit0, minus_lit0))
10527 {
10528 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10529 MINUS_EXPR, atype);
10530 lit0 = 0;
10531 }
10532 else
10533 {
10534 lit0 = associate_trees (loc, lit0, minus_lit0,
10535 MINUS_EXPR, atype);
10536 minus_lit0 = 0;
10537 }
10538 }
10539
10540 /* Don't introduce overflows through reassociation. */
10541 if (!any_overflows
10542 && ((lit0 && TREE_OVERFLOW (lit0))
10543 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10544 return NULL_TREE;
10545
10546 if (minus_lit0)
10547 {
10548 if (con0 == 0)
10549 return
10550 fold_convert_loc (loc, type,
10551 associate_trees (loc, var0, minus_lit0,
10552 MINUS_EXPR, atype));
10553 else
10554 {
10555 con0 = associate_trees (loc, con0, minus_lit0,
10556 MINUS_EXPR, atype);
10557 return
10558 fold_convert_loc (loc, type,
10559 associate_trees (loc, var0, con0,
10560 PLUS_EXPR, atype));
10561 }
10562 }
10563
10564 con0 = associate_trees (loc, con0, lit0, code, atype);
10565 return
10566 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10567 code, atype));
10568 }
10569 }
10570
10571 return NULL_TREE;
10572
10573 case MINUS_EXPR:
10574 /* Pointer simplifications for subtraction, simple reassociations. */
10575 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10576 {
10577 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10578 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10579 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10580 {
10581 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10582 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10583 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10584 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10585 return fold_build2_loc (loc, PLUS_EXPR, type,
10586 fold_build2_loc (loc, MINUS_EXPR, type,
10587 arg00, arg10),
10588 fold_build2_loc (loc, MINUS_EXPR, type,
10589 arg01, arg11));
10590 }
10591 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10592 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10593 {
10594 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10595 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10596 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10597 fold_convert_loc (loc, type, arg1));
10598 if (tmp)
10599 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10600 }
10601 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10602 simplifies. */
10603 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10604 {
10605 tree arg10 = fold_convert_loc (loc, type,
10606 TREE_OPERAND (arg1, 0));
10607 tree arg11 = fold_convert_loc (loc, type,
10608 TREE_OPERAND (arg1, 1));
10609 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10610 fold_convert_loc (loc, type, arg0),
10611 arg10);
10612 if (tmp)
10613 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10614 }
10615 }
10616 /* A - (-B) -> A + B */
10617 if (TREE_CODE (arg1) == NEGATE_EXPR)
10618 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10619 fold_convert_loc (loc, type,
10620 TREE_OPERAND (arg1, 0)));
10621 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10622 if (TREE_CODE (arg0) == NEGATE_EXPR
10623 && negate_expr_p (arg1)
10624 && reorder_operands_p (arg0, arg1))
10625 return fold_build2_loc (loc, MINUS_EXPR, type,
10626 fold_convert_loc (loc, type,
10627 negate_expr (arg1)),
10628 fold_convert_loc (loc, type,
10629 TREE_OPERAND (arg0, 0)));
10630 /* Convert -A - 1 to ~A. */
10631 if (TREE_CODE (arg0) == NEGATE_EXPR
10632 && integer_each_onep (arg1)
10633 && !TYPE_OVERFLOW_TRAPS (type))
10634 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10635 fold_convert_loc (loc, type,
10636 TREE_OPERAND (arg0, 0)));
10637
10638 /* Convert -1 - A to ~A. */
10639 if (TREE_CODE (type) != COMPLEX_TYPE
10640 && integer_all_onesp (arg0))
10641 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10642
10643
10644 /* X - (X / Y) * Y is X % Y. */
10645 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10646 && TREE_CODE (arg1) == MULT_EXPR
10647 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10648 && operand_equal_p (arg0,
10649 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10650 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10651 TREE_OPERAND (arg1, 1), 0))
10652 return
10653 fold_convert_loc (loc, type,
10654 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10655 arg0, TREE_OPERAND (arg1, 1)));
10656
10657 if (! FLOAT_TYPE_P (type))
10658 {
10659 if (integer_zerop (arg0))
10660 return negate_expr (fold_convert_loc (loc, type, arg1));
10661
10662 /* Fold A - (A & B) into ~B & A. */
10663 if (!TREE_SIDE_EFFECTS (arg0)
10664 && TREE_CODE (arg1) == BIT_AND_EXPR)
10665 {
10666 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10667 {
10668 tree arg10 = fold_convert_loc (loc, type,
10669 TREE_OPERAND (arg1, 0));
10670 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10671 fold_build1_loc (loc, BIT_NOT_EXPR,
10672 type, arg10),
10673 fold_convert_loc (loc, type, arg0));
10674 }
10675 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10676 {
10677 tree arg11 = fold_convert_loc (loc,
10678 type, TREE_OPERAND (arg1, 1));
10679 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10680 fold_build1_loc (loc, BIT_NOT_EXPR,
10681 type, arg11),
10682 fold_convert_loc (loc, type, arg0));
10683 }
10684 }
10685
10686 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10687 any power of 2 minus 1. */
10688 if (TREE_CODE (arg0) == BIT_AND_EXPR
10689 && TREE_CODE (arg1) == BIT_AND_EXPR
10690 && operand_equal_p (TREE_OPERAND (arg0, 0),
10691 TREE_OPERAND (arg1, 0), 0))
10692 {
10693 tree mask0 = TREE_OPERAND (arg0, 1);
10694 tree mask1 = TREE_OPERAND (arg1, 1);
10695 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10696
10697 if (operand_equal_p (tem, mask1, 0))
10698 {
10699 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10700 TREE_OPERAND (arg0, 0), mask1);
10701 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10702 }
10703 }
10704 }
10705
10706 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10707 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10708 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10709
10710 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10711 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10712 (-ARG1 + ARG0) reduces to -ARG1. */
10713 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10714 return negate_expr (fold_convert_loc (loc, type, arg1));
10715
10716 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10717 __complex__ ( x, -y ). This is not the same for SNaNs or if
10718 signed zeros are involved. */
10719 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10720 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10721 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10722 {
10723 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10724 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10725 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10726 bool arg0rz = false, arg0iz = false;
10727 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10728 || (arg0i && (arg0iz = real_zerop (arg0i))))
10729 {
10730 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10731 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10732 if (arg0rz && arg1i && real_zerop (arg1i))
10733 {
10734 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10735 arg1r ? arg1r
10736 : build1 (REALPART_EXPR, rtype, arg1));
10737 tree ip = arg0i ? arg0i
10738 : build1 (IMAGPART_EXPR, rtype, arg0);
10739 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10740 }
10741 else if (arg0iz && arg1r && real_zerop (arg1r))
10742 {
10743 tree rp = arg0r ? arg0r
10744 : build1 (REALPART_EXPR, rtype, arg0);
10745 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10746 arg1i ? arg1i
10747 : build1 (IMAGPART_EXPR, rtype, arg1));
10748 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10749 }
10750 }
10751 }
10752
10753 /* A - B -> A + (-B) if B is easily negatable. */
10754 if (negate_expr_p (arg1)
10755 && ((FLOAT_TYPE_P (type)
10756 /* Avoid this transformation if B is a positive REAL_CST. */
10757 && (TREE_CODE (arg1) != REAL_CST
10758 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10759 || INTEGRAL_TYPE_P (type)))
10760 return fold_build2_loc (loc, PLUS_EXPR, type,
10761 fold_convert_loc (loc, type, arg0),
10762 fold_convert_loc (loc, type,
10763 negate_expr (arg1)));
10764
10765 /* Try folding difference of addresses. */
10766 {
10767 HOST_WIDE_INT diff;
10768
10769 if ((TREE_CODE (arg0) == ADDR_EXPR
10770 || TREE_CODE (arg1) == ADDR_EXPR)
10771 && ptr_difference_const (arg0, arg1, &diff))
10772 return build_int_cst_type (type, diff);
10773 }
10774
10775 /* Fold &a[i] - &a[j] to i-j. */
10776 if (TREE_CODE (arg0) == ADDR_EXPR
10777 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10778 && TREE_CODE (arg1) == ADDR_EXPR
10779 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10780 {
10781 tree tem = fold_addr_of_array_ref_difference (loc, type,
10782 TREE_OPERAND (arg0, 0),
10783 TREE_OPERAND (arg1, 0));
10784 if (tem)
10785 return tem;
10786 }
10787
10788 if (FLOAT_TYPE_P (type)
10789 && flag_unsafe_math_optimizations
10790 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10791 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10792 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10793 return tem;
10794
10795 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10796 one. Make sure the type is not saturating and has the signedness of
10797 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10798 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10799 if ((TREE_CODE (arg0) == MULT_EXPR
10800 || TREE_CODE (arg1) == MULT_EXPR)
10801 && !TYPE_SATURATING (type)
10802 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10803 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10804 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10805 {
10806 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10807 if (tem)
10808 return tem;
10809 }
10810
10811 goto associate;
10812
10813 case MULT_EXPR:
10814 /* (-A) * (-B) -> A * B */
10815 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10816 return fold_build2_loc (loc, MULT_EXPR, type,
10817 fold_convert_loc (loc, type,
10818 TREE_OPERAND (arg0, 0)),
10819 fold_convert_loc (loc, type,
10820 negate_expr (arg1)));
10821 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10822 return fold_build2_loc (loc, MULT_EXPR, type,
10823 fold_convert_loc (loc, type,
10824 negate_expr (arg0)),
10825 fold_convert_loc (loc, type,
10826 TREE_OPERAND (arg1, 0)));
10827
10828 if (! FLOAT_TYPE_P (type))
10829 {
10830 /* Transform x * -1 into -x. Make sure to do the negation
10831 on the original operand with conversions not stripped
10832 because we can only strip non-sign-changing conversions. */
10833 if (integer_minus_onep (arg1))
10834 return fold_convert_loc (loc, type, negate_expr (op0));
10835 /* Transform x * -C into -x * C if x is easily negatable. */
10836 if (TREE_CODE (arg1) == INTEGER_CST
10837 && tree_int_cst_sgn (arg1) == -1
10838 && negate_expr_p (arg0)
10839 && (tem = negate_expr (arg1)) != arg1
10840 && !TREE_OVERFLOW (tem))
10841 return fold_build2_loc (loc, MULT_EXPR, type,
10842 fold_convert_loc (loc, type,
10843 negate_expr (arg0)),
10844 tem);
10845
10846 /* (a * (1 << b)) is (a << b) */
10847 if (TREE_CODE (arg1) == LSHIFT_EXPR
10848 && integer_onep (TREE_OPERAND (arg1, 0)))
10849 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10850 TREE_OPERAND (arg1, 1));
10851 if (TREE_CODE (arg0) == LSHIFT_EXPR
10852 && integer_onep (TREE_OPERAND (arg0, 0)))
10853 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10854 TREE_OPERAND (arg0, 1));
10855
10856 /* (A + A) * C -> A * 2 * C */
10857 if (TREE_CODE (arg0) == PLUS_EXPR
10858 && TREE_CODE (arg1) == INTEGER_CST
10859 && operand_equal_p (TREE_OPERAND (arg0, 0),
10860 TREE_OPERAND (arg0, 1), 0))
10861 return fold_build2_loc (loc, MULT_EXPR, type,
10862 omit_one_operand_loc (loc, type,
10863 TREE_OPERAND (arg0, 0),
10864 TREE_OPERAND (arg0, 1)),
10865 fold_build2_loc (loc, MULT_EXPR, type,
10866 build_int_cst (type, 2) , arg1));
10867
10868 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10869 sign-changing only. */
10870 if (TREE_CODE (arg1) == INTEGER_CST
10871 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10872 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10873 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10874
10875 strict_overflow_p = false;
10876 if (TREE_CODE (arg1) == INTEGER_CST
10877 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10878 &strict_overflow_p)))
10879 {
10880 if (strict_overflow_p)
10881 fold_overflow_warning (("assuming signed overflow does not "
10882 "occur when simplifying "
10883 "multiplication"),
10884 WARN_STRICT_OVERFLOW_MISC);
10885 return fold_convert_loc (loc, type, tem);
10886 }
10887
10888 /* Optimize z * conj(z) for integer complex numbers. */
10889 if (TREE_CODE (arg0) == CONJ_EXPR
10890 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10891 return fold_mult_zconjz (loc, type, arg1);
10892 if (TREE_CODE (arg1) == CONJ_EXPR
10893 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10894 return fold_mult_zconjz (loc, type, arg0);
10895 }
10896 else
10897 {
10898 /* Maybe fold x * 0 to 0. The expressions aren't the same
10899 when x is NaN, since x * 0 is also NaN. Nor are they the
10900 same in modes with signed zeros, since multiplying a
10901 negative value by 0 gives -0, not +0. */
10902 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10903 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10904 && real_zerop (arg1))
10905 return omit_one_operand_loc (loc, type, arg1, arg0);
10906 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10907 Likewise for complex arithmetic with signed zeros. */
10908 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10909 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10910 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10911 && real_onep (arg1))
10912 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10913
10914 /* Transform x * -1.0 into -x. */
10915 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10916 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10917 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10918 && real_minus_onep (arg1))
10919 return fold_convert_loc (loc, type, negate_expr (arg0));
10920
10921 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10922 the result for floating point types due to rounding so it is applied
10923 only if -fassociative-math was specify. */
10924 if (flag_associative_math
10925 && TREE_CODE (arg0) == RDIV_EXPR
10926 && TREE_CODE (arg1) == REAL_CST
10927 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10928 {
10929 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10930 arg1);
10931 if (tem)
10932 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10933 TREE_OPERAND (arg0, 1));
10934 }
10935
10936 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10937 if (operand_equal_p (arg0, arg1, 0))
10938 {
10939 tree tem = fold_strip_sign_ops (arg0);
10940 if (tem != NULL_TREE)
10941 {
10942 tem = fold_convert_loc (loc, type, tem);
10943 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10944 }
10945 }
10946
10947 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10948 This is not the same for NaNs or if signed zeros are
10949 involved. */
10950 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10951 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10952 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10953 && TREE_CODE (arg1) == COMPLEX_CST
10954 && real_zerop (TREE_REALPART (arg1)))
10955 {
10956 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10957 if (real_onep (TREE_IMAGPART (arg1)))
10958 return
10959 fold_build2_loc (loc, COMPLEX_EXPR, type,
10960 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10961 rtype, arg0)),
10962 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10963 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10964 return
10965 fold_build2_loc (loc, COMPLEX_EXPR, type,
10966 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10967 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10968 rtype, arg0)));
10969 }
10970
10971 /* Optimize z * conj(z) for floating point complex numbers.
10972 Guarded by flag_unsafe_math_optimizations as non-finite
10973 imaginary components don't produce scalar results. */
10974 if (flag_unsafe_math_optimizations
10975 && TREE_CODE (arg0) == CONJ_EXPR
10976 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10977 return fold_mult_zconjz (loc, type, arg1);
10978 if (flag_unsafe_math_optimizations
10979 && TREE_CODE (arg1) == CONJ_EXPR
10980 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10981 return fold_mult_zconjz (loc, type, arg0);
10982
10983 if (flag_unsafe_math_optimizations)
10984 {
10985 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10986 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10987
10988 /* Optimizations of root(...)*root(...). */
10989 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10990 {
10991 tree rootfn, arg;
10992 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10993 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10994
10995 /* Optimize sqrt(x)*sqrt(x) as x. */
10996 if (BUILTIN_SQRT_P (fcode0)
10997 && operand_equal_p (arg00, arg10, 0)
10998 && ! HONOR_SNANS (TYPE_MODE (type)))
10999 return arg00;
11000
11001 /* Optimize root(x)*root(y) as root(x*y). */
11002 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11003 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11004 return build_call_expr_loc (loc, rootfn, 1, arg);
11005 }
11006
11007 /* Optimize expN(x)*expN(y) as expN(x+y). */
11008 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11009 {
11010 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11011 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11012 CALL_EXPR_ARG (arg0, 0),
11013 CALL_EXPR_ARG (arg1, 0));
11014 return build_call_expr_loc (loc, expfn, 1, arg);
11015 }
11016
11017 /* Optimizations of pow(...)*pow(...). */
11018 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11019 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11020 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11021 {
11022 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11023 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11024 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11025 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11026
11027 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11028 if (operand_equal_p (arg01, arg11, 0))
11029 {
11030 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11031 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11032 arg00, arg10);
11033 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11034 }
11035
11036 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11037 if (operand_equal_p (arg00, arg10, 0))
11038 {
11039 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11040 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11041 arg01, arg11);
11042 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11043 }
11044 }
11045
11046 /* Optimize tan(x)*cos(x) as sin(x). */
11047 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11048 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11049 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11050 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11051 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11052 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11053 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11054 CALL_EXPR_ARG (arg1, 0), 0))
11055 {
11056 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11057
11058 if (sinfn != NULL_TREE)
11059 return build_call_expr_loc (loc, sinfn, 1,
11060 CALL_EXPR_ARG (arg0, 0));
11061 }
11062
11063 /* Optimize x*pow(x,c) as pow(x,c+1). */
11064 if (fcode1 == BUILT_IN_POW
11065 || fcode1 == BUILT_IN_POWF
11066 || fcode1 == BUILT_IN_POWL)
11067 {
11068 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11069 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11070 if (TREE_CODE (arg11) == REAL_CST
11071 && !TREE_OVERFLOW (arg11)
11072 && operand_equal_p (arg0, arg10, 0))
11073 {
11074 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11075 REAL_VALUE_TYPE c;
11076 tree arg;
11077
11078 c = TREE_REAL_CST (arg11);
11079 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11080 arg = build_real (type, c);
11081 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11082 }
11083 }
11084
11085 /* Optimize pow(x,c)*x as pow(x,c+1). */
11086 if (fcode0 == BUILT_IN_POW
11087 || fcode0 == BUILT_IN_POWF
11088 || fcode0 == BUILT_IN_POWL)
11089 {
11090 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11091 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11092 if (TREE_CODE (arg01) == REAL_CST
11093 && !TREE_OVERFLOW (arg01)
11094 && operand_equal_p (arg1, arg00, 0))
11095 {
11096 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11097 REAL_VALUE_TYPE c;
11098 tree arg;
11099
11100 c = TREE_REAL_CST (arg01);
11101 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11102 arg = build_real (type, c);
11103 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11104 }
11105 }
11106
11107 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11108 if (!in_gimple_form
11109 && optimize
11110 && operand_equal_p (arg0, arg1, 0))
11111 {
11112 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11113
11114 if (powfn)
11115 {
11116 tree arg = build_real (type, dconst2);
11117 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11118 }
11119 }
11120 }
11121 }
11122 goto associate;
11123
11124 case BIT_IOR_EXPR:
11125 bit_ior:
11126 if (operand_equal_p (arg0, arg1, 0))
11127 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11128
11129 /* ~X | X is -1. */
11130 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11131 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11132 {
11133 t1 = build_zero_cst (type);
11134 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11135 return omit_one_operand_loc (loc, type, t1, arg1);
11136 }
11137
11138 /* X | ~X is -1. */
11139 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11140 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11141 {
11142 t1 = build_zero_cst (type);
11143 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11144 return omit_one_operand_loc (loc, type, t1, arg0);
11145 }
11146
11147 /* Canonicalize (X & C1) | C2. */
11148 if (TREE_CODE (arg0) == BIT_AND_EXPR
11149 && TREE_CODE (arg1) == INTEGER_CST
11150 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11151 {
11152 int width = TYPE_PRECISION (type), w;
11153 wide_int c1 = TREE_OPERAND (arg0, 1);
11154 wide_int c2 = arg1;
11155
11156 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11157 if ((c1 & c2) == c1)
11158 return omit_one_operand_loc (loc, type, arg1,
11159 TREE_OPERAND (arg0, 0));
11160
11161 wide_int msk = wi::mask (width, false,
11162 TYPE_PRECISION (TREE_TYPE (arg1)));
11163
11164 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11165 if (msk.and_not (c1 | c2) == 0)
11166 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11167 TREE_OPERAND (arg0, 0), arg1);
11168
11169 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11170 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11171 mode which allows further optimizations. */
11172 c1 &= msk;
11173 c2 &= msk;
11174 wide_int c3 = c1.and_not (c2);
11175 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11176 {
11177 wide_int mask = wi::mask (w, false,
11178 TYPE_PRECISION (type));
11179 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11180 {
11181 c3 = mask;
11182 break;
11183 }
11184 }
11185
11186 if (c3 != c1)
11187 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11188 fold_build2_loc (loc, BIT_AND_EXPR, type,
11189 TREE_OPERAND (arg0, 0),
11190 wide_int_to_tree (type,
11191 c3)),
11192 arg1);
11193 }
11194
11195 /* (X & Y) | Y is (X, Y). */
11196 if (TREE_CODE (arg0) == BIT_AND_EXPR
11197 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11198 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11199 /* (X & Y) | X is (Y, X). */
11200 if (TREE_CODE (arg0) == BIT_AND_EXPR
11201 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11202 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11203 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11204 /* X | (X & Y) is (Y, X). */
11205 if (TREE_CODE (arg1) == BIT_AND_EXPR
11206 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11207 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11208 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11209 /* X | (Y & X) is (Y, X). */
11210 if (TREE_CODE (arg1) == BIT_AND_EXPR
11211 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11212 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11213 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11214
11215 /* (X & ~Y) | (~X & Y) is X ^ Y */
11216 if (TREE_CODE (arg0) == BIT_AND_EXPR
11217 && TREE_CODE (arg1) == BIT_AND_EXPR)
11218 {
11219 tree a0, a1, l0, l1, n0, n1;
11220
11221 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11222 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11223
11224 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11225 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11226
11227 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11228 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11229
11230 if ((operand_equal_p (n0, a0, 0)
11231 && operand_equal_p (n1, a1, 0))
11232 || (operand_equal_p (n0, a1, 0)
11233 && operand_equal_p (n1, a0, 0)))
11234 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11235 }
11236
11237 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11238 if (t1 != NULL_TREE)
11239 return t1;
11240
11241 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11242
11243 This results in more efficient code for machines without a NAND
11244 instruction. Combine will canonicalize to the first form
11245 which will allow use of NAND instructions provided by the
11246 backend if they exist. */
11247 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11248 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11249 {
11250 return
11251 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11252 build2 (BIT_AND_EXPR, type,
11253 fold_convert_loc (loc, type,
11254 TREE_OPERAND (arg0, 0)),
11255 fold_convert_loc (loc, type,
11256 TREE_OPERAND (arg1, 0))));
11257 }
11258
11259 /* See if this can be simplified into a rotate first. If that
11260 is unsuccessful continue in the association code. */
11261 goto bit_rotate;
11262
11263 case BIT_XOR_EXPR:
11264 if (integer_all_onesp (arg1))
11265 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11266
11267 /* ~X ^ X is -1. */
11268 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11269 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11270 {
11271 t1 = build_zero_cst (type);
11272 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11273 return omit_one_operand_loc (loc, type, t1, arg1);
11274 }
11275
11276 /* X ^ ~X is -1. */
11277 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11278 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11279 {
11280 t1 = build_zero_cst (type);
11281 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11282 return omit_one_operand_loc (loc, type, t1, arg0);
11283 }
11284
11285 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11286 with a constant, and the two constants have no bits in common,
11287 we should treat this as a BIT_IOR_EXPR since this may produce more
11288 simplifications. */
11289 if (TREE_CODE (arg0) == BIT_AND_EXPR
11290 && TREE_CODE (arg1) == BIT_AND_EXPR
11291 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11292 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11293 && wi::bit_and (TREE_OPERAND (arg0, 1),
11294 TREE_OPERAND (arg1, 1)) == 0)
11295 {
11296 code = BIT_IOR_EXPR;
11297 goto bit_ior;
11298 }
11299
11300 /* (X | Y) ^ X -> Y & ~ X*/
11301 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11302 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11303 {
11304 tree t2 = TREE_OPERAND (arg0, 1);
11305 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11306 arg1);
11307 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11308 fold_convert_loc (loc, type, t2),
11309 fold_convert_loc (loc, type, t1));
11310 return t1;
11311 }
11312
11313 /* (Y | X) ^ X -> Y & ~ X*/
11314 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11315 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11316 {
11317 tree t2 = TREE_OPERAND (arg0, 0);
11318 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11319 arg1);
11320 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11321 fold_convert_loc (loc, type, t2),
11322 fold_convert_loc (loc, type, t1));
11323 return t1;
11324 }
11325
11326 /* X ^ (X | Y) -> Y & ~ X*/
11327 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11328 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11329 {
11330 tree t2 = TREE_OPERAND (arg1, 1);
11331 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11332 arg0);
11333 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11334 fold_convert_loc (loc, type, t2),
11335 fold_convert_loc (loc, type, t1));
11336 return t1;
11337 }
11338
11339 /* X ^ (Y | X) -> Y & ~ X*/
11340 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11341 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11342 {
11343 tree t2 = TREE_OPERAND (arg1, 0);
11344 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11345 arg0);
11346 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11347 fold_convert_loc (loc, type, t2),
11348 fold_convert_loc (loc, type, t1));
11349 return t1;
11350 }
11351
11352 /* Convert ~X ^ ~Y to X ^ Y. */
11353 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11354 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11355 return fold_build2_loc (loc, code, type,
11356 fold_convert_loc (loc, type,
11357 TREE_OPERAND (arg0, 0)),
11358 fold_convert_loc (loc, type,
11359 TREE_OPERAND (arg1, 0)));
11360
11361 /* Convert ~X ^ C to X ^ ~C. */
11362 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11363 && TREE_CODE (arg1) == INTEGER_CST)
11364 return fold_build2_loc (loc, code, type,
11365 fold_convert_loc (loc, type,
11366 TREE_OPERAND (arg0, 0)),
11367 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11368
11369 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11370 if (TREE_CODE (arg0) == BIT_AND_EXPR
11371 && INTEGRAL_TYPE_P (type)
11372 && integer_onep (TREE_OPERAND (arg0, 1))
11373 && integer_onep (arg1))
11374 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11375 build_zero_cst (TREE_TYPE (arg0)));
11376
11377 /* Fold (X & Y) ^ Y as ~X & Y. */
11378 if (TREE_CODE (arg0) == BIT_AND_EXPR
11379 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11380 {
11381 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11382 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11383 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11384 fold_convert_loc (loc, type, arg1));
11385 }
11386 /* Fold (X & Y) ^ X as ~Y & X. */
11387 if (TREE_CODE (arg0) == BIT_AND_EXPR
11388 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11389 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11390 {
11391 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11392 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11393 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11394 fold_convert_loc (loc, type, arg1));
11395 }
11396 /* Fold X ^ (X & Y) as X & ~Y. */
11397 if (TREE_CODE (arg1) == BIT_AND_EXPR
11398 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11399 {
11400 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11401 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11402 fold_convert_loc (loc, type, arg0),
11403 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11404 }
11405 /* Fold X ^ (Y & X) as ~Y & X. */
11406 if (TREE_CODE (arg1) == BIT_AND_EXPR
11407 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11408 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11409 {
11410 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11411 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11412 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11413 fold_convert_loc (loc, type, arg0));
11414 }
11415
11416 /* See if this can be simplified into a rotate first. If that
11417 is unsuccessful continue in the association code. */
11418 goto bit_rotate;
11419
11420 case BIT_AND_EXPR:
11421 if (integer_all_onesp (arg1))
11422 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11423 if (operand_equal_p (arg0, arg1, 0))
11424 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11425
11426 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11427 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11428 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11429 || (TREE_CODE (arg0) == EQ_EXPR
11430 && integer_zerop (TREE_OPERAND (arg0, 1))))
11431 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11432 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11433
11434 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11435 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11436 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11437 || (TREE_CODE (arg1) == EQ_EXPR
11438 && integer_zerop (TREE_OPERAND (arg1, 1))))
11439 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11440 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11441
11442 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11443 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11444 && TREE_CODE (arg1) == INTEGER_CST
11445 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11446 {
11447 tree tmp1 = fold_convert_loc (loc, type, arg1);
11448 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11449 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11450 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11451 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11452 return
11453 fold_convert_loc (loc, type,
11454 fold_build2_loc (loc, BIT_IOR_EXPR,
11455 type, tmp2, tmp3));
11456 }
11457
11458 /* (X | Y) & Y is (X, Y). */
11459 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11460 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11461 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11462 /* (X | Y) & X is (Y, X). */
11463 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11464 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11465 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11466 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11467 /* X & (X | Y) is (Y, X). */
11468 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11469 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11470 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11471 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11472 /* X & (Y | X) is (Y, X). */
11473 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11474 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11475 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11476 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11477
11478 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11479 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11480 && INTEGRAL_TYPE_P (type)
11481 && integer_onep (TREE_OPERAND (arg0, 1))
11482 && integer_onep (arg1))
11483 {
11484 tree tem2;
11485 tem = TREE_OPERAND (arg0, 0);
11486 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11487 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11488 tem, tem2);
11489 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11490 build_zero_cst (TREE_TYPE (tem)));
11491 }
11492 /* Fold ~X & 1 as (X & 1) == 0. */
11493 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11494 && INTEGRAL_TYPE_P (type)
11495 && integer_onep (arg1))
11496 {
11497 tree tem2;
11498 tem = TREE_OPERAND (arg0, 0);
11499 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11500 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11501 tem, tem2);
11502 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11503 build_zero_cst (TREE_TYPE (tem)));
11504 }
11505 /* Fold !X & 1 as X == 0. */
11506 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11507 && integer_onep (arg1))
11508 {
11509 tem = TREE_OPERAND (arg0, 0);
11510 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11511 build_zero_cst (TREE_TYPE (tem)));
11512 }
11513
11514 /* Fold (X ^ Y) & Y as ~X & Y. */
11515 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11516 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11517 {
11518 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11519 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11520 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11521 fold_convert_loc (loc, type, arg1));
11522 }
11523 /* Fold (X ^ Y) & X as ~Y & X. */
11524 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11525 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11526 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11527 {
11528 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11529 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11530 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11531 fold_convert_loc (loc, type, arg1));
11532 }
11533 /* Fold X & (X ^ Y) as X & ~Y. */
11534 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11535 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11536 {
11537 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11538 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11539 fold_convert_loc (loc, type, arg0),
11540 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11541 }
11542 /* Fold X & (Y ^ X) as ~Y & X. */
11543 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11544 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11545 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11546 {
11547 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11548 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11549 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11550 fold_convert_loc (loc, type, arg0));
11551 }
11552
11553 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11554 multiple of 1 << CST. */
11555 if (TREE_CODE (arg1) == INTEGER_CST)
11556 {
11557 wide_int cst1 = arg1;
11558 wide_int ncst1 = -cst1;
11559 if ((cst1 & ncst1) == ncst1
11560 && multiple_of_p (type, arg0,
11561 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11562 return fold_convert_loc (loc, type, arg0);
11563 }
11564
11565 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11566 bits from CST2. */
11567 if (TREE_CODE (arg1) == INTEGER_CST
11568 && TREE_CODE (arg0) == MULT_EXPR
11569 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11570 {
11571 wide_int warg1 = arg1;
11572 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11573
11574 if (masked == 0)
11575 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11576 arg0, arg1);
11577 else if (masked != warg1)
11578 {
11579 /* Avoid the transform if arg1 is a mask of some
11580 mode which allows further optimizations. */
11581 int pop = wi::popcount (warg1);
11582 if (!(pop >= BITS_PER_UNIT
11583 && exact_log2 (pop) != -1
11584 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11585 return fold_build2_loc (loc, code, type, op0,
11586 wide_int_to_tree (type, masked));
11587 }
11588 }
11589
11590 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11591 ((A & N) + B) & M -> (A + B) & M
11592 Similarly if (N & M) == 0,
11593 ((A | N) + B) & M -> (A + B) & M
11594 and for - instead of + (or unary - instead of +)
11595 and/or ^ instead of |.
11596 If B is constant and (B & M) == 0, fold into A & M. */
11597 if (TREE_CODE (arg1) == INTEGER_CST)
11598 {
11599 wide_int cst1 = arg1;
11600 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11601 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11602 && (TREE_CODE (arg0) == PLUS_EXPR
11603 || TREE_CODE (arg0) == MINUS_EXPR
11604 || TREE_CODE (arg0) == NEGATE_EXPR)
11605 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11606 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11607 {
11608 tree pmop[2];
11609 int which = 0;
11610 wide_int cst0;
11611
11612 /* Now we know that arg0 is (C + D) or (C - D) or
11613 -C and arg1 (M) is == (1LL << cst) - 1.
11614 Store C into PMOP[0] and D into PMOP[1]. */
11615 pmop[0] = TREE_OPERAND (arg0, 0);
11616 pmop[1] = NULL;
11617 if (TREE_CODE (arg0) != NEGATE_EXPR)
11618 {
11619 pmop[1] = TREE_OPERAND (arg0, 1);
11620 which = 1;
11621 }
11622
11623 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11624 which = -1;
11625
11626 for (; which >= 0; which--)
11627 switch (TREE_CODE (pmop[which]))
11628 {
11629 case BIT_AND_EXPR:
11630 case BIT_IOR_EXPR:
11631 case BIT_XOR_EXPR:
11632 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11633 != INTEGER_CST)
11634 break;
11635 cst0 = TREE_OPERAND (pmop[which], 1);
11636 cst0 &= cst1;
11637 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11638 {
11639 if (cst0 != cst1)
11640 break;
11641 }
11642 else if (cst0 != 0)
11643 break;
11644 /* If C or D is of the form (A & N) where
11645 (N & M) == M, or of the form (A | N) or
11646 (A ^ N) where (N & M) == 0, replace it with A. */
11647 pmop[which] = TREE_OPERAND (pmop[which], 0);
11648 break;
11649 case INTEGER_CST:
11650 /* If C or D is a N where (N & M) == 0, it can be
11651 omitted (assumed 0). */
11652 if ((TREE_CODE (arg0) == PLUS_EXPR
11653 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11654 && (cst1 & pmop[which]) == 0)
11655 pmop[which] = NULL;
11656 break;
11657 default:
11658 break;
11659 }
11660
11661 /* Only build anything new if we optimized one or both arguments
11662 above. */
11663 if (pmop[0] != TREE_OPERAND (arg0, 0)
11664 || (TREE_CODE (arg0) != NEGATE_EXPR
11665 && pmop[1] != TREE_OPERAND (arg0, 1)))
11666 {
11667 tree utype = TREE_TYPE (arg0);
11668 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11669 {
11670 /* Perform the operations in a type that has defined
11671 overflow behavior. */
11672 utype = unsigned_type_for (TREE_TYPE (arg0));
11673 if (pmop[0] != NULL)
11674 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11675 if (pmop[1] != NULL)
11676 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11677 }
11678
11679 if (TREE_CODE (arg0) == NEGATE_EXPR)
11680 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11681 else if (TREE_CODE (arg0) == PLUS_EXPR)
11682 {
11683 if (pmop[0] != NULL && pmop[1] != NULL)
11684 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11685 pmop[0], pmop[1]);
11686 else if (pmop[0] != NULL)
11687 tem = pmop[0];
11688 else if (pmop[1] != NULL)
11689 tem = pmop[1];
11690 else
11691 return build_int_cst (type, 0);
11692 }
11693 else if (pmop[0] == NULL)
11694 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11695 else
11696 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11697 pmop[0], pmop[1]);
11698 /* TEM is now the new binary +, - or unary - replacement. */
11699 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11700 fold_convert_loc (loc, utype, arg1));
11701 return fold_convert_loc (loc, type, tem);
11702 }
11703 }
11704 }
11705
11706 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11707 if (t1 != NULL_TREE)
11708 return t1;
11709 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11710 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11711 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11712 {
11713 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11714
11715 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11716 if (mask == -1)
11717 return
11718 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11719 }
11720
11721 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11722
11723 This results in more efficient code for machines without a NOR
11724 instruction. Combine will canonicalize to the first form
11725 which will allow use of NOR instructions provided by the
11726 backend if they exist. */
11727 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11728 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11729 {
11730 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11731 build2 (BIT_IOR_EXPR, type,
11732 fold_convert_loc (loc, type,
11733 TREE_OPERAND (arg0, 0)),
11734 fold_convert_loc (loc, type,
11735 TREE_OPERAND (arg1, 0))));
11736 }
11737
11738 /* If arg0 is derived from the address of an object or function, we may
11739 be able to fold this expression using the object or function's
11740 alignment. */
11741 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11742 {
11743 unsigned HOST_WIDE_INT modulus, residue;
11744 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11745
11746 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11747 integer_onep (arg1));
11748
11749 /* This works because modulus is a power of 2. If this weren't the
11750 case, we'd have to replace it by its greatest power-of-2
11751 divisor: modulus & -modulus. */
11752 if (low < modulus)
11753 return build_int_cst (type, residue & low);
11754 }
11755
11756 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11757 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11758 if the new mask might be further optimized. */
11759 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11760 || TREE_CODE (arg0) == RSHIFT_EXPR)
11761 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11762 && TREE_CODE (arg1) == INTEGER_CST
11763 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11764 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11765 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11766 < TYPE_PRECISION (TREE_TYPE (arg0))))
11767 {
11768 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11769 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11770 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11771 tree shift_type = TREE_TYPE (arg0);
11772
11773 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11774 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11775 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11776 && TYPE_PRECISION (TREE_TYPE (arg0))
11777 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11778 {
11779 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11780 tree arg00 = TREE_OPERAND (arg0, 0);
11781 /* See if more bits can be proven as zero because of
11782 zero extension. */
11783 if (TREE_CODE (arg00) == NOP_EXPR
11784 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11785 {
11786 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11787 if (TYPE_PRECISION (inner_type)
11788 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11789 && TYPE_PRECISION (inner_type) < prec)
11790 {
11791 prec = TYPE_PRECISION (inner_type);
11792 /* See if we can shorten the right shift. */
11793 if (shiftc < prec)
11794 shift_type = inner_type;
11795 /* Otherwise X >> C1 is all zeros, so we'll optimize
11796 it into (X, 0) later on by making sure zerobits
11797 is all ones. */
11798 }
11799 }
11800 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11801 if (shiftc < prec)
11802 {
11803 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11804 zerobits <<= prec - shiftc;
11805 }
11806 /* For arithmetic shift if sign bit could be set, zerobits
11807 can contain actually sign bits, so no transformation is
11808 possible, unless MASK masks them all away. In that
11809 case the shift needs to be converted into logical shift. */
11810 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11811 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11812 {
11813 if ((mask & zerobits) == 0)
11814 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11815 else
11816 zerobits = 0;
11817 }
11818 }
11819
11820 /* ((X << 16) & 0xff00) is (X, 0). */
11821 if ((mask & zerobits) == mask)
11822 return omit_one_operand_loc (loc, type,
11823 build_int_cst (type, 0), arg0);
11824
11825 newmask = mask | zerobits;
11826 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11827 {
11828 /* Only do the transformation if NEWMASK is some integer
11829 mode's mask. */
11830 for (prec = BITS_PER_UNIT;
11831 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11832 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11833 break;
11834 if (prec < HOST_BITS_PER_WIDE_INT
11835 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11836 {
11837 tree newmaskt;
11838
11839 if (shift_type != TREE_TYPE (arg0))
11840 {
11841 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11842 fold_convert_loc (loc, shift_type,
11843 TREE_OPERAND (arg0, 0)),
11844 TREE_OPERAND (arg0, 1));
11845 tem = fold_convert_loc (loc, type, tem);
11846 }
11847 else
11848 tem = op0;
11849 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11850 if (!tree_int_cst_equal (newmaskt, arg1))
11851 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11852 }
11853 }
11854 }
11855
11856 goto associate;
11857
11858 case RDIV_EXPR:
11859 /* Don't touch a floating-point divide by zero unless the mode
11860 of the constant can represent infinity. */
11861 if (TREE_CODE (arg1) == REAL_CST
11862 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11863 && real_zerop (arg1))
11864 return NULL_TREE;
11865
11866 /* Optimize A / A to 1.0 if we don't care about
11867 NaNs or Infinities. Skip the transformation
11868 for non-real operands. */
11869 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11870 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11871 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11872 && operand_equal_p (arg0, arg1, 0))
11873 {
11874 tree r = build_real (TREE_TYPE (arg0), dconst1);
11875
11876 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11877 }
11878
11879 /* The complex version of the above A / A optimization. */
11880 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11881 && operand_equal_p (arg0, arg1, 0))
11882 {
11883 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11884 if (! HONOR_NANS (TYPE_MODE (elem_type))
11885 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11886 {
11887 tree r = build_real (elem_type, dconst1);
11888 /* omit_two_operands will call fold_convert for us. */
11889 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11890 }
11891 }
11892
11893 /* (-A) / (-B) -> A / B */
11894 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11895 return fold_build2_loc (loc, RDIV_EXPR, type,
11896 TREE_OPERAND (arg0, 0),
11897 negate_expr (arg1));
11898 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11899 return fold_build2_loc (loc, RDIV_EXPR, type,
11900 negate_expr (arg0),
11901 TREE_OPERAND (arg1, 0));
11902
11903 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11904 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11905 && real_onep (arg1))
11906 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11907
11908 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11909 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11910 && real_minus_onep (arg1))
11911 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11912 negate_expr (arg0)));
11913
11914 /* If ARG1 is a constant, we can convert this to a multiply by the
11915 reciprocal. This does not have the same rounding properties,
11916 so only do this if -freciprocal-math. We can actually
11917 always safely do it if ARG1 is a power of two, but it's hard to
11918 tell if it is or not in a portable manner. */
11919 if (optimize
11920 && (TREE_CODE (arg1) == REAL_CST
11921 || (TREE_CODE (arg1) == COMPLEX_CST
11922 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11923 || (TREE_CODE (arg1) == VECTOR_CST
11924 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11925 {
11926 if (flag_reciprocal_math
11927 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11928 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11929 /* Find the reciprocal if optimizing and the result is exact.
11930 TODO: Complex reciprocal not implemented. */
11931 if (TREE_CODE (arg1) != COMPLEX_CST)
11932 {
11933 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11934
11935 if (inverse)
11936 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11937 }
11938 }
11939 /* Convert A/B/C to A/(B*C). */
11940 if (flag_reciprocal_math
11941 && TREE_CODE (arg0) == RDIV_EXPR)
11942 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11943 fold_build2_loc (loc, MULT_EXPR, type,
11944 TREE_OPERAND (arg0, 1), arg1));
11945
11946 /* Convert A/(B/C) to (A/B)*C. */
11947 if (flag_reciprocal_math
11948 && TREE_CODE (arg1) == RDIV_EXPR)
11949 return fold_build2_loc (loc, MULT_EXPR, type,
11950 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11951 TREE_OPERAND (arg1, 0)),
11952 TREE_OPERAND (arg1, 1));
11953
11954 /* Convert C1/(X*C2) into (C1/C2)/X. */
11955 if (flag_reciprocal_math
11956 && TREE_CODE (arg1) == MULT_EXPR
11957 && TREE_CODE (arg0) == REAL_CST
11958 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11959 {
11960 tree tem = const_binop (RDIV_EXPR, arg0,
11961 TREE_OPERAND (arg1, 1));
11962 if (tem)
11963 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11964 TREE_OPERAND (arg1, 0));
11965 }
11966
11967 if (flag_unsafe_math_optimizations)
11968 {
11969 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11970 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11971
11972 /* Optimize sin(x)/cos(x) as tan(x). */
11973 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11974 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11975 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11976 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11977 CALL_EXPR_ARG (arg1, 0), 0))
11978 {
11979 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11980
11981 if (tanfn != NULL_TREE)
11982 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11983 }
11984
11985 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11986 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11987 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11988 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11989 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11990 CALL_EXPR_ARG (arg1, 0), 0))
11991 {
11992 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11993
11994 if (tanfn != NULL_TREE)
11995 {
11996 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11997 CALL_EXPR_ARG (arg0, 0));
11998 return fold_build2_loc (loc, RDIV_EXPR, type,
11999 build_real (type, dconst1), tmp);
12000 }
12001 }
12002
12003 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12004 NaNs or Infinities. */
12005 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12006 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12007 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12008 {
12009 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12010 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12011
12012 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12013 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12014 && operand_equal_p (arg00, arg01, 0))
12015 {
12016 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12017
12018 if (cosfn != NULL_TREE)
12019 return build_call_expr_loc (loc, cosfn, 1, arg00);
12020 }
12021 }
12022
12023 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12024 NaNs or Infinities. */
12025 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12026 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12027 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12028 {
12029 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12030 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12031
12032 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12033 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12034 && operand_equal_p (arg00, arg01, 0))
12035 {
12036 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12037
12038 if (cosfn != NULL_TREE)
12039 {
12040 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12041 return fold_build2_loc (loc, RDIV_EXPR, type,
12042 build_real (type, dconst1),
12043 tmp);
12044 }
12045 }
12046 }
12047
12048 /* Optimize pow(x,c)/x as pow(x,c-1). */
12049 if (fcode0 == BUILT_IN_POW
12050 || fcode0 == BUILT_IN_POWF
12051 || fcode0 == BUILT_IN_POWL)
12052 {
12053 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12054 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12055 if (TREE_CODE (arg01) == REAL_CST
12056 && !TREE_OVERFLOW (arg01)
12057 && operand_equal_p (arg1, arg00, 0))
12058 {
12059 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12060 REAL_VALUE_TYPE c;
12061 tree arg;
12062
12063 c = TREE_REAL_CST (arg01);
12064 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12065 arg = build_real (type, c);
12066 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12067 }
12068 }
12069
12070 /* Optimize a/root(b/c) into a*root(c/b). */
12071 if (BUILTIN_ROOT_P (fcode1))
12072 {
12073 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12074
12075 if (TREE_CODE (rootarg) == RDIV_EXPR)
12076 {
12077 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12078 tree b = TREE_OPERAND (rootarg, 0);
12079 tree c = TREE_OPERAND (rootarg, 1);
12080
12081 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12082
12083 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12084 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12085 }
12086 }
12087
12088 /* Optimize x/expN(y) into x*expN(-y). */
12089 if (BUILTIN_EXPONENT_P (fcode1))
12090 {
12091 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12092 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12093 arg1 = build_call_expr_loc (loc,
12094 expfn, 1,
12095 fold_convert_loc (loc, type, arg));
12096 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12097 }
12098
12099 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12100 if (fcode1 == BUILT_IN_POW
12101 || fcode1 == BUILT_IN_POWF
12102 || fcode1 == BUILT_IN_POWL)
12103 {
12104 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12105 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12106 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12107 tree neg11 = fold_convert_loc (loc, type,
12108 negate_expr (arg11));
12109 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12110 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12111 }
12112 }
12113 return NULL_TREE;
12114
12115 case TRUNC_DIV_EXPR:
12116 /* Optimize (X & (-A)) / A where A is a power of 2,
12117 to X >> log2(A) */
12118 if (TREE_CODE (arg0) == BIT_AND_EXPR
12119 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12120 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12121 {
12122 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12123 arg1, TREE_OPERAND (arg0, 1));
12124 if (sum && integer_zerop (sum)) {
12125 tree pow2 = build_int_cst (integer_type_node,
12126 wi::exact_log2 (arg1));
12127 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12128 TREE_OPERAND (arg0, 0), pow2);
12129 }
12130 }
12131
12132 /* Fall through */
12133
12134 case FLOOR_DIV_EXPR:
12135 /* Simplify A / (B << N) where A and B are positive and B is
12136 a power of 2, to A >> (N + log2(B)). */
12137 strict_overflow_p = false;
12138 if (TREE_CODE (arg1) == LSHIFT_EXPR
12139 && (TYPE_UNSIGNED (type)
12140 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12141 {
12142 tree sval = TREE_OPERAND (arg1, 0);
12143 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12144 {
12145 tree sh_cnt = TREE_OPERAND (arg1, 1);
12146 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12147 wi::exact_log2 (sval));
12148
12149 if (strict_overflow_p)
12150 fold_overflow_warning (("assuming signed overflow does not "
12151 "occur when simplifying A / (B << N)"),
12152 WARN_STRICT_OVERFLOW_MISC);
12153
12154 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12155 sh_cnt, pow2);
12156 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12157 fold_convert_loc (loc, type, arg0), sh_cnt);
12158 }
12159 }
12160
12161 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12162 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12163 if (INTEGRAL_TYPE_P (type)
12164 && TYPE_UNSIGNED (type)
12165 && code == FLOOR_DIV_EXPR)
12166 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12167
12168 /* Fall through */
12169
12170 case ROUND_DIV_EXPR:
12171 case CEIL_DIV_EXPR:
12172 case EXACT_DIV_EXPR:
12173 if (integer_zerop (arg1))
12174 return NULL_TREE;
12175 /* X / -1 is -X. */
12176 if (!TYPE_UNSIGNED (type)
12177 && TREE_CODE (arg1) == INTEGER_CST
12178 && wi::eq_p (arg1, -1))
12179 return fold_convert_loc (loc, type, negate_expr (arg0));
12180
12181 /* Convert -A / -B to A / B when the type is signed and overflow is
12182 undefined. */
12183 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12184 && TREE_CODE (arg0) == NEGATE_EXPR
12185 && negate_expr_p (arg1))
12186 {
12187 if (INTEGRAL_TYPE_P (type))
12188 fold_overflow_warning (("assuming signed overflow does not occur "
12189 "when distributing negation across "
12190 "division"),
12191 WARN_STRICT_OVERFLOW_MISC);
12192 return fold_build2_loc (loc, code, type,
12193 fold_convert_loc (loc, type,
12194 TREE_OPERAND (arg0, 0)),
12195 fold_convert_loc (loc, type,
12196 negate_expr (arg1)));
12197 }
12198 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12199 && TREE_CODE (arg1) == NEGATE_EXPR
12200 && negate_expr_p (arg0))
12201 {
12202 if (INTEGRAL_TYPE_P (type))
12203 fold_overflow_warning (("assuming signed overflow does not occur "
12204 "when distributing negation across "
12205 "division"),
12206 WARN_STRICT_OVERFLOW_MISC);
12207 return fold_build2_loc (loc, code, type,
12208 fold_convert_loc (loc, type,
12209 negate_expr (arg0)),
12210 fold_convert_loc (loc, type,
12211 TREE_OPERAND (arg1, 0)));
12212 }
12213
12214 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12215 operation, EXACT_DIV_EXPR.
12216
12217 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12218 At one time others generated faster code, it's not clear if they do
12219 after the last round to changes to the DIV code in expmed.c. */
12220 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12221 && multiple_of_p (type, arg0, arg1))
12222 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12223
12224 strict_overflow_p = false;
12225 if (TREE_CODE (arg1) == INTEGER_CST
12226 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12227 &strict_overflow_p)))
12228 {
12229 if (strict_overflow_p)
12230 fold_overflow_warning (("assuming signed overflow does not occur "
12231 "when simplifying division"),
12232 WARN_STRICT_OVERFLOW_MISC);
12233 return fold_convert_loc (loc, type, tem);
12234 }
12235
12236 return NULL_TREE;
12237
12238 case CEIL_MOD_EXPR:
12239 case FLOOR_MOD_EXPR:
12240 case ROUND_MOD_EXPR:
12241 case TRUNC_MOD_EXPR:
12242 /* X % -1 is zero. */
12243 if (!TYPE_UNSIGNED (type)
12244 && TREE_CODE (arg1) == INTEGER_CST
12245 && wi::eq_p (arg1, -1))
12246 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12247
12248 /* X % -C is the same as X % C. */
12249 if (code == TRUNC_MOD_EXPR
12250 && TYPE_SIGN (type) == SIGNED
12251 && TREE_CODE (arg1) == INTEGER_CST
12252 && !TREE_OVERFLOW (arg1)
12253 && wi::neg_p (arg1)
12254 && !TYPE_OVERFLOW_TRAPS (type)
12255 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12256 && !sign_bit_p (arg1, arg1))
12257 return fold_build2_loc (loc, code, type,
12258 fold_convert_loc (loc, type, arg0),
12259 fold_convert_loc (loc, type,
12260 negate_expr (arg1)));
12261
12262 /* X % -Y is the same as X % Y. */
12263 if (code == TRUNC_MOD_EXPR
12264 && !TYPE_UNSIGNED (type)
12265 && TREE_CODE (arg1) == NEGATE_EXPR
12266 && !TYPE_OVERFLOW_TRAPS (type))
12267 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12268 fold_convert_loc (loc, type,
12269 TREE_OPERAND (arg1, 0)));
12270
12271 strict_overflow_p = false;
12272 if (TREE_CODE (arg1) == INTEGER_CST
12273 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12274 &strict_overflow_p)))
12275 {
12276 if (strict_overflow_p)
12277 fold_overflow_warning (("assuming signed overflow does not occur "
12278 "when simplifying modulus"),
12279 WARN_STRICT_OVERFLOW_MISC);
12280 return fold_convert_loc (loc, type, tem);
12281 }
12282
12283 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12284 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12285 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12286 && (TYPE_UNSIGNED (type)
12287 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12288 {
12289 tree c = arg1;
12290 /* Also optimize A % (C << N) where C is a power of 2,
12291 to A & ((C << N) - 1). */
12292 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12293 c = TREE_OPERAND (arg1, 0);
12294
12295 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12296 {
12297 tree mask
12298 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12299 build_int_cst (TREE_TYPE (arg1), 1));
12300 if (strict_overflow_p)
12301 fold_overflow_warning (("assuming signed overflow does not "
12302 "occur when simplifying "
12303 "X % (power of two)"),
12304 WARN_STRICT_OVERFLOW_MISC);
12305 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12306 fold_convert_loc (loc, type, arg0),
12307 fold_convert_loc (loc, type, mask));
12308 }
12309 }
12310
12311 return NULL_TREE;
12312
12313 case LROTATE_EXPR:
12314 case RROTATE_EXPR:
12315 if (integer_all_onesp (arg0))
12316 return omit_one_operand_loc (loc, type, arg0, arg1);
12317 goto shift;
12318
12319 case RSHIFT_EXPR:
12320 /* Optimize -1 >> x for arithmetic right shifts. */
12321 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12322 && tree_expr_nonnegative_p (arg1))
12323 return omit_one_operand_loc (loc, type, arg0, arg1);
12324 /* ... fall through ... */
12325
12326 case LSHIFT_EXPR:
12327 shift:
12328 if (integer_zerop (arg1))
12329 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12330 if (integer_zerop (arg0))
12331 return omit_one_operand_loc (loc, type, arg0, arg1);
12332
12333 /* Prefer vector1 << scalar to vector1 << vector2
12334 if vector2 is uniform. */
12335 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12336 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12337 return fold_build2_loc (loc, code, type, op0, tem);
12338
12339 /* Since negative shift count is not well-defined,
12340 don't try to compute it in the compiler. */
12341 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12342 return NULL_TREE;
12343
12344 prec = element_precision (type);
12345
12346 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12347 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12348 && tree_to_uhwi (arg1) < prec
12349 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12350 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12351 {
12352 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12353 + tree_to_uhwi (arg1));
12354
12355 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12356 being well defined. */
12357 if (low >= prec)
12358 {
12359 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12360 low = low % prec;
12361 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12362 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12363 TREE_OPERAND (arg0, 0));
12364 else
12365 low = prec - 1;
12366 }
12367
12368 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12369 build_int_cst (TREE_TYPE (arg1), low));
12370 }
12371
12372 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12373 into x & ((unsigned)-1 >> c) for unsigned types. */
12374 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12375 || (TYPE_UNSIGNED (type)
12376 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12377 && tree_fits_uhwi_p (arg1)
12378 && tree_to_uhwi (arg1) < prec
12379 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12380 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12381 {
12382 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12383 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12384 tree lshift;
12385 tree arg00;
12386
12387 if (low0 == low1)
12388 {
12389 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12390
12391 lshift = build_minus_one_cst (type);
12392 lshift = const_binop (code, lshift, arg1);
12393
12394 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12395 }
12396 }
12397
12398 /* Rewrite an LROTATE_EXPR by a constant into an
12399 RROTATE_EXPR by a new constant. */
12400 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12401 {
12402 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12403 tem = const_binop (MINUS_EXPR, tem, arg1);
12404 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12405 }
12406
12407 /* If we have a rotate of a bit operation with the rotate count and
12408 the second operand of the bit operation both constant,
12409 permute the two operations. */
12410 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12411 && (TREE_CODE (arg0) == BIT_AND_EXPR
12412 || TREE_CODE (arg0) == BIT_IOR_EXPR
12413 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12415 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12416 fold_build2_loc (loc, code, type,
12417 TREE_OPERAND (arg0, 0), arg1),
12418 fold_build2_loc (loc, code, type,
12419 TREE_OPERAND (arg0, 1), arg1));
12420
12421 /* Two consecutive rotates adding up to the some integer
12422 multiple of the precision of the type can be ignored. */
12423 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12424 && TREE_CODE (arg0) == RROTATE_EXPR
12425 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12426 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12427 prec) == 0)
12428 return TREE_OPERAND (arg0, 0);
12429
12430 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12431 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12432 if the latter can be further optimized. */
12433 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12434 && TREE_CODE (arg0) == BIT_AND_EXPR
12435 && TREE_CODE (arg1) == INTEGER_CST
12436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12437 {
12438 tree mask = fold_build2_loc (loc, code, type,
12439 fold_convert_loc (loc, type,
12440 TREE_OPERAND (arg0, 1)),
12441 arg1);
12442 tree shift = fold_build2_loc (loc, code, type,
12443 fold_convert_loc (loc, type,
12444 TREE_OPERAND (arg0, 0)),
12445 arg1);
12446 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12447 if (tem)
12448 return tem;
12449 }
12450
12451 return NULL_TREE;
12452
12453 case MIN_EXPR:
12454 if (operand_equal_p (arg0, arg1, 0))
12455 return omit_one_operand_loc (loc, type, arg0, arg1);
12456 if (INTEGRAL_TYPE_P (type)
12457 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12458 return omit_one_operand_loc (loc, type, arg1, arg0);
12459 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12460 if (tem)
12461 return tem;
12462 goto associate;
12463
12464 case MAX_EXPR:
12465 if (operand_equal_p (arg0, arg1, 0))
12466 return omit_one_operand_loc (loc, type, arg0, arg1);
12467 if (INTEGRAL_TYPE_P (type)
12468 && TYPE_MAX_VALUE (type)
12469 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12470 return omit_one_operand_loc (loc, type, arg1, arg0);
12471 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12472 if (tem)
12473 return tem;
12474 goto associate;
12475
12476 case TRUTH_ANDIF_EXPR:
12477 /* Note that the operands of this must be ints
12478 and their values must be 0 or 1.
12479 ("true" is a fixed value perhaps depending on the language.) */
12480 /* If first arg is constant zero, return it. */
12481 if (integer_zerop (arg0))
12482 return fold_convert_loc (loc, type, arg0);
12483 case TRUTH_AND_EXPR:
12484 /* If either arg is constant true, drop it. */
12485 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12486 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12487 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12488 /* Preserve sequence points. */
12489 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12490 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12491 /* If second arg is constant zero, result is zero, but first arg
12492 must be evaluated. */
12493 if (integer_zerop (arg1))
12494 return omit_one_operand_loc (loc, type, arg1, arg0);
12495 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12496 case will be handled here. */
12497 if (integer_zerop (arg0))
12498 return omit_one_operand_loc (loc, type, arg0, arg1);
12499
12500 /* !X && X is always false. */
12501 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12502 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12503 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12504 /* X && !X is always false. */
12505 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12506 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12507 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12508
12509 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12510 means A >= Y && A != MAX, but in this case we know that
12511 A < X <= MAX. */
12512
12513 if (!TREE_SIDE_EFFECTS (arg0)
12514 && !TREE_SIDE_EFFECTS (arg1))
12515 {
12516 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12517 if (tem && !operand_equal_p (tem, arg0, 0))
12518 return fold_build2_loc (loc, code, type, tem, arg1);
12519
12520 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12521 if (tem && !operand_equal_p (tem, arg1, 0))
12522 return fold_build2_loc (loc, code, type, arg0, tem);
12523 }
12524
12525 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12526 != NULL_TREE)
12527 return tem;
12528
12529 return NULL_TREE;
12530
12531 case TRUTH_ORIF_EXPR:
12532 /* Note that the operands of this must be ints
12533 and their values must be 0 or true.
12534 ("true" is a fixed value perhaps depending on the language.) */
12535 /* If first arg is constant true, return it. */
12536 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12537 return fold_convert_loc (loc, type, arg0);
12538 case TRUTH_OR_EXPR:
12539 /* If either arg is constant zero, drop it. */
12540 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12541 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12542 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12543 /* Preserve sequence points. */
12544 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12545 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12546 /* If second arg is constant true, result is true, but we must
12547 evaluate first arg. */
12548 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12549 return omit_one_operand_loc (loc, type, arg1, arg0);
12550 /* Likewise for first arg, but note this only occurs here for
12551 TRUTH_OR_EXPR. */
12552 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12553 return omit_one_operand_loc (loc, type, arg0, arg1);
12554
12555 /* !X || X is always true. */
12556 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12557 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12558 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12559 /* X || !X is always true. */
12560 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12561 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12562 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12563
12564 /* (X && !Y) || (!X && Y) is X ^ Y */
12565 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12566 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12567 {
12568 tree a0, a1, l0, l1, n0, n1;
12569
12570 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12571 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12572
12573 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12574 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12575
12576 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12577 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12578
12579 if ((operand_equal_p (n0, a0, 0)
12580 && operand_equal_p (n1, a1, 0))
12581 || (operand_equal_p (n0, a1, 0)
12582 && operand_equal_p (n1, a0, 0)))
12583 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12584 }
12585
12586 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12587 != NULL_TREE)
12588 return tem;
12589
12590 return NULL_TREE;
12591
12592 case TRUTH_XOR_EXPR:
12593 /* If the second arg is constant zero, drop it. */
12594 if (integer_zerop (arg1))
12595 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12596 /* If the second arg is constant true, this is a logical inversion. */
12597 if (integer_onep (arg1))
12598 {
12599 tem = invert_truthvalue_loc (loc, arg0);
12600 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12601 }
12602 /* Identical arguments cancel to zero. */
12603 if (operand_equal_p (arg0, arg1, 0))
12604 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12605
12606 /* !X ^ X is always true. */
12607 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12608 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12609 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12610
12611 /* X ^ !X is always true. */
12612 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12613 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12614 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12615
12616 return NULL_TREE;
12617
12618 case EQ_EXPR:
12619 case NE_EXPR:
12620 STRIP_NOPS (arg0);
12621 STRIP_NOPS (arg1);
12622
12623 tem = fold_comparison (loc, code, type, op0, op1);
12624 if (tem != NULL_TREE)
12625 return tem;
12626
12627 /* bool_var != 0 becomes bool_var. */
12628 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12629 && code == NE_EXPR)
12630 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12631
12632 /* bool_var == 1 becomes bool_var. */
12633 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12634 && code == EQ_EXPR)
12635 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12636
12637 /* bool_var != 1 becomes !bool_var. */
12638 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12639 && code == NE_EXPR)
12640 return fold_convert_loc (loc, type,
12641 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12642 TREE_TYPE (arg0), arg0));
12643
12644 /* bool_var == 0 becomes !bool_var. */
12645 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12646 && code == EQ_EXPR)
12647 return fold_convert_loc (loc, type,
12648 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12649 TREE_TYPE (arg0), arg0));
12650
12651 /* !exp != 0 becomes !exp */
12652 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12653 && code == NE_EXPR)
12654 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12655
12656 /* If this is an equality comparison of the address of two non-weak,
12657 unaliased symbols neither of which are extern (since we do not
12658 have access to attributes for externs), then we know the result. */
12659 if (TREE_CODE (arg0) == ADDR_EXPR
12660 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12661 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12662 && ! lookup_attribute ("alias",
12663 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12664 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12665 && TREE_CODE (arg1) == ADDR_EXPR
12666 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12667 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12668 && ! lookup_attribute ("alias",
12669 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12670 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12671 {
12672 /* We know that we're looking at the address of two
12673 non-weak, unaliased, static _DECL nodes.
12674
12675 It is both wasteful and incorrect to call operand_equal_p
12676 to compare the two ADDR_EXPR nodes. It is wasteful in that
12677 all we need to do is test pointer equality for the arguments
12678 to the two ADDR_EXPR nodes. It is incorrect to use
12679 operand_equal_p as that function is NOT equivalent to a
12680 C equality test. It can in fact return false for two
12681 objects which would test as equal using the C equality
12682 operator. */
12683 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12684 return constant_boolean_node (equal
12685 ? code == EQ_EXPR : code != EQ_EXPR,
12686 type);
12687 }
12688
12689 /* Similarly for a NEGATE_EXPR. */
12690 if (TREE_CODE (arg0) == NEGATE_EXPR
12691 && TREE_CODE (arg1) == INTEGER_CST
12692 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12693 arg1)))
12694 && TREE_CODE (tem) == INTEGER_CST
12695 && !TREE_OVERFLOW (tem))
12696 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12697
12698 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12699 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12700 && TREE_CODE (arg1) == INTEGER_CST
12701 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12702 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12703 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12704 fold_convert_loc (loc,
12705 TREE_TYPE (arg0),
12706 arg1),
12707 TREE_OPERAND (arg0, 1)));
12708
12709 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12710 if ((TREE_CODE (arg0) == PLUS_EXPR
12711 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12712 || TREE_CODE (arg0) == MINUS_EXPR)
12713 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12714 0)),
12715 arg1, 0)
12716 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12717 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12718 {
12719 tree val = TREE_OPERAND (arg0, 1);
12720 return omit_two_operands_loc (loc, type,
12721 fold_build2_loc (loc, code, type,
12722 val,
12723 build_int_cst (TREE_TYPE (val),
12724 0)),
12725 TREE_OPERAND (arg0, 0), arg1);
12726 }
12727
12728 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12729 if (TREE_CODE (arg0) == MINUS_EXPR
12730 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12731 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12732 1)),
12733 arg1, 0)
12734 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12735 {
12736 return omit_two_operands_loc (loc, type,
12737 code == NE_EXPR
12738 ? boolean_true_node : boolean_false_node,
12739 TREE_OPERAND (arg0, 1), arg1);
12740 }
12741
12742 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12743 if (TREE_CODE (arg0) == ABS_EXPR
12744 && (integer_zerop (arg1) || real_zerop (arg1)))
12745 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12746
12747 /* If this is an EQ or NE comparison with zero and ARG0 is
12748 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12749 two operations, but the latter can be done in one less insn
12750 on machines that have only two-operand insns or on which a
12751 constant cannot be the first operand. */
12752 if (TREE_CODE (arg0) == BIT_AND_EXPR
12753 && integer_zerop (arg1))
12754 {
12755 tree arg00 = TREE_OPERAND (arg0, 0);
12756 tree arg01 = TREE_OPERAND (arg0, 1);
12757 if (TREE_CODE (arg00) == LSHIFT_EXPR
12758 && integer_onep (TREE_OPERAND (arg00, 0)))
12759 {
12760 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12761 arg01, TREE_OPERAND (arg00, 1));
12762 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12763 build_int_cst (TREE_TYPE (arg0), 1));
12764 return fold_build2_loc (loc, code, type,
12765 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12766 arg1);
12767 }
12768 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12769 && integer_onep (TREE_OPERAND (arg01, 0)))
12770 {
12771 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12772 arg00, TREE_OPERAND (arg01, 1));
12773 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12774 build_int_cst (TREE_TYPE (arg0), 1));
12775 return fold_build2_loc (loc, code, type,
12776 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12777 arg1);
12778 }
12779 }
12780
12781 /* If this is an NE or EQ comparison of zero against the result of a
12782 signed MOD operation whose second operand is a power of 2, make
12783 the MOD operation unsigned since it is simpler and equivalent. */
12784 if (integer_zerop (arg1)
12785 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12786 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12787 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12788 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12789 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12790 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12791 {
12792 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12793 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12794 fold_convert_loc (loc, newtype,
12795 TREE_OPERAND (arg0, 0)),
12796 fold_convert_loc (loc, newtype,
12797 TREE_OPERAND (arg0, 1)));
12798
12799 return fold_build2_loc (loc, code, type, newmod,
12800 fold_convert_loc (loc, newtype, arg1));
12801 }
12802
12803 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12804 C1 is a valid shift constant, and C2 is a power of two, i.e.
12805 a single bit. */
12806 if (TREE_CODE (arg0) == BIT_AND_EXPR
12807 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12808 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12809 == INTEGER_CST
12810 && integer_pow2p (TREE_OPERAND (arg0, 1))
12811 && integer_zerop (arg1))
12812 {
12813 tree itype = TREE_TYPE (arg0);
12814 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12815 prec = TYPE_PRECISION (itype);
12816
12817 /* Check for a valid shift count. */
12818 if (wi::ltu_p (arg001, prec))
12819 {
12820 tree arg01 = TREE_OPERAND (arg0, 1);
12821 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12822 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12823 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12824 can be rewritten as (X & (C2 << C1)) != 0. */
12825 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12826 {
12827 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12828 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12829 return fold_build2_loc (loc, code, type, tem,
12830 fold_convert_loc (loc, itype, arg1));
12831 }
12832 /* Otherwise, for signed (arithmetic) shifts,
12833 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12834 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12835 else if (!TYPE_UNSIGNED (itype))
12836 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12837 arg000, build_int_cst (itype, 0));
12838 /* Otherwise, of unsigned (logical) shifts,
12839 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12840 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12841 else
12842 return omit_one_operand_loc (loc, type,
12843 code == EQ_EXPR ? integer_one_node
12844 : integer_zero_node,
12845 arg000);
12846 }
12847 }
12848
12849 /* If we have (A & C) == C where C is a power of 2, convert this into
12850 (A & C) != 0. Similarly for NE_EXPR. */
12851 if (TREE_CODE (arg0) == BIT_AND_EXPR
12852 && integer_pow2p (TREE_OPERAND (arg0, 1))
12853 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12854 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12855 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12856 integer_zero_node));
12857
12858 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12859 bit, then fold the expression into A < 0 or A >= 0. */
12860 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12861 if (tem)
12862 return tem;
12863
12864 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12865 Similarly for NE_EXPR. */
12866 if (TREE_CODE (arg0) == BIT_AND_EXPR
12867 && TREE_CODE (arg1) == INTEGER_CST
12868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12869 {
12870 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12871 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12872 TREE_OPERAND (arg0, 1));
12873 tree dandnotc
12874 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12875 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12876 notc);
12877 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12878 if (integer_nonzerop (dandnotc))
12879 return omit_one_operand_loc (loc, type, rslt, arg0);
12880 }
12881
12882 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12883 Similarly for NE_EXPR. */
12884 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12885 && TREE_CODE (arg1) == INTEGER_CST
12886 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12887 {
12888 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12889 tree candnotd
12890 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12891 TREE_OPERAND (arg0, 1),
12892 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12893 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12894 if (integer_nonzerop (candnotd))
12895 return omit_one_operand_loc (loc, type, rslt, arg0);
12896 }
12897
12898 /* If this is a comparison of a field, we may be able to simplify it. */
12899 if ((TREE_CODE (arg0) == COMPONENT_REF
12900 || TREE_CODE (arg0) == BIT_FIELD_REF)
12901 /* Handle the constant case even without -O
12902 to make sure the warnings are given. */
12903 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12904 {
12905 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12906 if (t1)
12907 return t1;
12908 }
12909
12910 /* Optimize comparisons of strlen vs zero to a compare of the
12911 first character of the string vs zero. To wit,
12912 strlen(ptr) == 0 => *ptr == 0
12913 strlen(ptr) != 0 => *ptr != 0
12914 Other cases should reduce to one of these two (or a constant)
12915 due to the return value of strlen being unsigned. */
12916 if (TREE_CODE (arg0) == CALL_EXPR
12917 && integer_zerop (arg1))
12918 {
12919 tree fndecl = get_callee_fndecl (arg0);
12920
12921 if (fndecl
12922 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12923 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12924 && call_expr_nargs (arg0) == 1
12925 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12926 {
12927 tree iref = build_fold_indirect_ref_loc (loc,
12928 CALL_EXPR_ARG (arg0, 0));
12929 return fold_build2_loc (loc, code, type, iref,
12930 build_int_cst (TREE_TYPE (iref), 0));
12931 }
12932 }
12933
12934 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12935 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12936 if (TREE_CODE (arg0) == RSHIFT_EXPR
12937 && integer_zerop (arg1)
12938 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12939 {
12940 tree arg00 = TREE_OPERAND (arg0, 0);
12941 tree arg01 = TREE_OPERAND (arg0, 1);
12942 tree itype = TREE_TYPE (arg00);
12943 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12944 {
12945 if (TYPE_UNSIGNED (itype))
12946 {
12947 itype = signed_type_for (itype);
12948 arg00 = fold_convert_loc (loc, itype, arg00);
12949 }
12950 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12951 type, arg00, build_zero_cst (itype));
12952 }
12953 }
12954
12955 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12956 if (integer_zerop (arg1)
12957 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12958 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12959 TREE_OPERAND (arg0, 1));
12960
12961 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12962 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12963 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12964 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12965 build_zero_cst (TREE_TYPE (arg0)));
12966 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12967 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12968 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12969 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12970 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12971 build_zero_cst (TREE_TYPE (arg0)));
12972
12973 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12974 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12975 && TREE_CODE (arg1) == INTEGER_CST
12976 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12977 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12978 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12979 TREE_OPERAND (arg0, 1), arg1));
12980
12981 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12982 (X & C) == 0 when C is a single bit. */
12983 if (TREE_CODE (arg0) == BIT_AND_EXPR
12984 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12985 && integer_zerop (arg1)
12986 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12987 {
12988 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12989 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12990 TREE_OPERAND (arg0, 1));
12991 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12992 type, tem,
12993 fold_convert_loc (loc, TREE_TYPE (arg0),
12994 arg1));
12995 }
12996
12997 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12998 constant C is a power of two, i.e. a single bit. */
12999 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13000 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13001 && integer_zerop (arg1)
13002 && integer_pow2p (TREE_OPERAND (arg0, 1))
13003 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13004 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13005 {
13006 tree arg00 = TREE_OPERAND (arg0, 0);
13007 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13008 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13009 }
13010
13011 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13012 when is C is a power of two, i.e. a single bit. */
13013 if (TREE_CODE (arg0) == BIT_AND_EXPR
13014 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13015 && integer_zerop (arg1)
13016 && integer_pow2p (TREE_OPERAND (arg0, 1))
13017 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13018 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13019 {
13020 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13021 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13022 arg000, TREE_OPERAND (arg0, 1));
13023 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13024 tem, build_int_cst (TREE_TYPE (tem), 0));
13025 }
13026
13027 if (integer_zerop (arg1)
13028 && tree_expr_nonzero_p (arg0))
13029 {
13030 tree res = constant_boolean_node (code==NE_EXPR, type);
13031 return omit_one_operand_loc (loc, type, res, arg0);
13032 }
13033
13034 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13035 if (TREE_CODE (arg0) == NEGATE_EXPR
13036 && TREE_CODE (arg1) == NEGATE_EXPR)
13037 return fold_build2_loc (loc, code, type,
13038 TREE_OPERAND (arg0, 0),
13039 fold_convert_loc (loc, TREE_TYPE (arg0),
13040 TREE_OPERAND (arg1, 0)));
13041
13042 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13043 if (TREE_CODE (arg0) == BIT_AND_EXPR
13044 && TREE_CODE (arg1) == BIT_AND_EXPR)
13045 {
13046 tree arg00 = TREE_OPERAND (arg0, 0);
13047 tree arg01 = TREE_OPERAND (arg0, 1);
13048 tree arg10 = TREE_OPERAND (arg1, 0);
13049 tree arg11 = TREE_OPERAND (arg1, 1);
13050 tree itype = TREE_TYPE (arg0);
13051
13052 if (operand_equal_p (arg01, arg11, 0))
13053 return fold_build2_loc (loc, code, type,
13054 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13055 fold_build2_loc (loc,
13056 BIT_XOR_EXPR, itype,
13057 arg00, arg10),
13058 arg01),
13059 build_zero_cst (itype));
13060
13061 if (operand_equal_p (arg01, arg10, 0))
13062 return fold_build2_loc (loc, code, type,
13063 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13064 fold_build2_loc (loc,
13065 BIT_XOR_EXPR, itype,
13066 arg00, arg11),
13067 arg01),
13068 build_zero_cst (itype));
13069
13070 if (operand_equal_p (arg00, arg11, 0))
13071 return fold_build2_loc (loc, code, type,
13072 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13073 fold_build2_loc (loc,
13074 BIT_XOR_EXPR, itype,
13075 arg01, arg10),
13076 arg00),
13077 build_zero_cst (itype));
13078
13079 if (operand_equal_p (arg00, arg10, 0))
13080 return fold_build2_loc (loc, code, type,
13081 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13082 fold_build2_loc (loc,
13083 BIT_XOR_EXPR, itype,
13084 arg01, arg11),
13085 arg00),
13086 build_zero_cst (itype));
13087 }
13088
13089 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13090 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13091 {
13092 tree arg00 = TREE_OPERAND (arg0, 0);
13093 tree arg01 = TREE_OPERAND (arg0, 1);
13094 tree arg10 = TREE_OPERAND (arg1, 0);
13095 tree arg11 = TREE_OPERAND (arg1, 1);
13096 tree itype = TREE_TYPE (arg0);
13097
13098 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13099 operand_equal_p guarantees no side-effects so we don't need
13100 to use omit_one_operand on Z. */
13101 if (operand_equal_p (arg01, arg11, 0))
13102 return fold_build2_loc (loc, code, type, arg00,
13103 fold_convert_loc (loc, TREE_TYPE (arg00),
13104 arg10));
13105 if (operand_equal_p (arg01, arg10, 0))
13106 return fold_build2_loc (loc, code, type, arg00,
13107 fold_convert_loc (loc, TREE_TYPE (arg00),
13108 arg11));
13109 if (operand_equal_p (arg00, arg11, 0))
13110 return fold_build2_loc (loc, code, type, arg01,
13111 fold_convert_loc (loc, TREE_TYPE (arg01),
13112 arg10));
13113 if (operand_equal_p (arg00, arg10, 0))
13114 return fold_build2_loc (loc, code, type, arg01,
13115 fold_convert_loc (loc, TREE_TYPE (arg01),
13116 arg11));
13117
13118 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13119 if (TREE_CODE (arg01) == INTEGER_CST
13120 && TREE_CODE (arg11) == INTEGER_CST)
13121 {
13122 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13123 fold_convert_loc (loc, itype, arg11));
13124 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13125 return fold_build2_loc (loc, code, type, tem,
13126 fold_convert_loc (loc, itype, arg10));
13127 }
13128 }
13129
13130 /* Attempt to simplify equality/inequality comparisons of complex
13131 values. Only lower the comparison if the result is known or
13132 can be simplified to a single scalar comparison. */
13133 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13134 || TREE_CODE (arg0) == COMPLEX_CST)
13135 && (TREE_CODE (arg1) == COMPLEX_EXPR
13136 || TREE_CODE (arg1) == COMPLEX_CST))
13137 {
13138 tree real0, imag0, real1, imag1;
13139 tree rcond, icond;
13140
13141 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13142 {
13143 real0 = TREE_OPERAND (arg0, 0);
13144 imag0 = TREE_OPERAND (arg0, 1);
13145 }
13146 else
13147 {
13148 real0 = TREE_REALPART (arg0);
13149 imag0 = TREE_IMAGPART (arg0);
13150 }
13151
13152 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13153 {
13154 real1 = TREE_OPERAND (arg1, 0);
13155 imag1 = TREE_OPERAND (arg1, 1);
13156 }
13157 else
13158 {
13159 real1 = TREE_REALPART (arg1);
13160 imag1 = TREE_IMAGPART (arg1);
13161 }
13162
13163 rcond = fold_binary_loc (loc, code, type, real0, real1);
13164 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13165 {
13166 if (integer_zerop (rcond))
13167 {
13168 if (code == EQ_EXPR)
13169 return omit_two_operands_loc (loc, type, boolean_false_node,
13170 imag0, imag1);
13171 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13172 }
13173 else
13174 {
13175 if (code == NE_EXPR)
13176 return omit_two_operands_loc (loc, type, boolean_true_node,
13177 imag0, imag1);
13178 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13179 }
13180 }
13181
13182 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13183 if (icond && TREE_CODE (icond) == INTEGER_CST)
13184 {
13185 if (integer_zerop (icond))
13186 {
13187 if (code == EQ_EXPR)
13188 return omit_two_operands_loc (loc, type, boolean_false_node,
13189 real0, real1);
13190 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13191 }
13192 else
13193 {
13194 if (code == NE_EXPR)
13195 return omit_two_operands_loc (loc, type, boolean_true_node,
13196 real0, real1);
13197 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13198 }
13199 }
13200 }
13201
13202 return NULL_TREE;
13203
13204 case LT_EXPR:
13205 case GT_EXPR:
13206 case LE_EXPR:
13207 case GE_EXPR:
13208 tem = fold_comparison (loc, code, type, op0, op1);
13209 if (tem != NULL_TREE)
13210 return tem;
13211
13212 /* Transform comparisons of the form X +- C CMP X. */
13213 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13214 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13215 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13216 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13217 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13218 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13219 {
13220 tree arg01 = TREE_OPERAND (arg0, 1);
13221 enum tree_code code0 = TREE_CODE (arg0);
13222 int is_positive;
13223
13224 if (TREE_CODE (arg01) == REAL_CST)
13225 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13226 else
13227 is_positive = tree_int_cst_sgn (arg01);
13228
13229 /* (X - c) > X becomes false. */
13230 if (code == GT_EXPR
13231 && ((code0 == MINUS_EXPR && is_positive >= 0)
13232 || (code0 == PLUS_EXPR && is_positive <= 0)))
13233 {
13234 if (TREE_CODE (arg01) == INTEGER_CST
13235 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13236 fold_overflow_warning (("assuming signed overflow does not "
13237 "occur when assuming that (X - c) > X "
13238 "is always false"),
13239 WARN_STRICT_OVERFLOW_ALL);
13240 return constant_boolean_node (0, type);
13241 }
13242
13243 /* Likewise (X + c) < X becomes false. */
13244 if (code == LT_EXPR
13245 && ((code0 == PLUS_EXPR && is_positive >= 0)
13246 || (code0 == MINUS_EXPR && is_positive <= 0)))
13247 {
13248 if (TREE_CODE (arg01) == INTEGER_CST
13249 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13250 fold_overflow_warning (("assuming signed overflow does not "
13251 "occur when assuming that "
13252 "(X + c) < X is always false"),
13253 WARN_STRICT_OVERFLOW_ALL);
13254 return constant_boolean_node (0, type);
13255 }
13256
13257 /* Convert (X - c) <= X to true. */
13258 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13259 && code == LE_EXPR
13260 && ((code0 == MINUS_EXPR && is_positive >= 0)
13261 || (code0 == PLUS_EXPR && is_positive <= 0)))
13262 {
13263 if (TREE_CODE (arg01) == INTEGER_CST
13264 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13265 fold_overflow_warning (("assuming signed overflow does not "
13266 "occur when assuming that "
13267 "(X - c) <= X is always true"),
13268 WARN_STRICT_OVERFLOW_ALL);
13269 return constant_boolean_node (1, type);
13270 }
13271
13272 /* Convert (X + c) >= X to true. */
13273 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13274 && code == GE_EXPR
13275 && ((code0 == PLUS_EXPR && is_positive >= 0)
13276 || (code0 == MINUS_EXPR && is_positive <= 0)))
13277 {
13278 if (TREE_CODE (arg01) == INTEGER_CST
13279 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13280 fold_overflow_warning (("assuming signed overflow does not "
13281 "occur when assuming that "
13282 "(X + c) >= X is always true"),
13283 WARN_STRICT_OVERFLOW_ALL);
13284 return constant_boolean_node (1, type);
13285 }
13286
13287 if (TREE_CODE (arg01) == INTEGER_CST)
13288 {
13289 /* Convert X + c > X and X - c < X to true for integers. */
13290 if (code == GT_EXPR
13291 && ((code0 == PLUS_EXPR && is_positive > 0)
13292 || (code0 == MINUS_EXPR && is_positive < 0)))
13293 {
13294 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13295 fold_overflow_warning (("assuming signed overflow does "
13296 "not occur when assuming that "
13297 "(X + c) > X is always true"),
13298 WARN_STRICT_OVERFLOW_ALL);
13299 return constant_boolean_node (1, type);
13300 }
13301
13302 if (code == LT_EXPR
13303 && ((code0 == MINUS_EXPR && is_positive > 0)
13304 || (code0 == PLUS_EXPR && is_positive < 0)))
13305 {
13306 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13307 fold_overflow_warning (("assuming signed overflow does "
13308 "not occur when assuming that "
13309 "(X - c) < X is always true"),
13310 WARN_STRICT_OVERFLOW_ALL);
13311 return constant_boolean_node (1, type);
13312 }
13313
13314 /* Convert X + c <= X and X - c >= X to false for integers. */
13315 if (code == LE_EXPR
13316 && ((code0 == PLUS_EXPR && is_positive > 0)
13317 || (code0 == MINUS_EXPR && is_positive < 0)))
13318 {
13319 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13320 fold_overflow_warning (("assuming signed overflow does "
13321 "not occur when assuming that "
13322 "(X + c) <= X is always false"),
13323 WARN_STRICT_OVERFLOW_ALL);
13324 return constant_boolean_node (0, type);
13325 }
13326
13327 if (code == GE_EXPR
13328 && ((code0 == MINUS_EXPR && is_positive > 0)
13329 || (code0 == PLUS_EXPR && is_positive < 0)))
13330 {
13331 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13332 fold_overflow_warning (("assuming signed overflow does "
13333 "not occur when assuming that "
13334 "(X - c) >= X is always false"),
13335 WARN_STRICT_OVERFLOW_ALL);
13336 return constant_boolean_node (0, type);
13337 }
13338 }
13339 }
13340
13341 /* Comparisons with the highest or lowest possible integer of
13342 the specified precision will have known values. */
13343 {
13344 tree arg1_type = TREE_TYPE (arg1);
13345 unsigned int prec = TYPE_PRECISION (arg1_type);
13346
13347 if (TREE_CODE (arg1) == INTEGER_CST
13348 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13349 {
13350 wide_int max = wi::max_value (arg1_type);
13351 wide_int signed_max = wi::max_value (prec, SIGNED);
13352 wide_int min = wi::min_value (arg1_type);
13353
13354 if (wi::eq_p (arg1, max))
13355 switch (code)
13356 {
13357 case GT_EXPR:
13358 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13359
13360 case GE_EXPR:
13361 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13362
13363 case LE_EXPR:
13364 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13365
13366 case LT_EXPR:
13367 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13368
13369 /* The GE_EXPR and LT_EXPR cases above are not normally
13370 reached because of previous transformations. */
13371
13372 default:
13373 break;
13374 }
13375 else if (wi::eq_p (arg1, max - 1))
13376 switch (code)
13377 {
13378 case GT_EXPR:
13379 arg1 = const_binop (PLUS_EXPR, arg1,
13380 build_int_cst (TREE_TYPE (arg1), 1));
13381 return fold_build2_loc (loc, EQ_EXPR, type,
13382 fold_convert_loc (loc,
13383 TREE_TYPE (arg1), arg0),
13384 arg1);
13385 case LE_EXPR:
13386 arg1 = const_binop (PLUS_EXPR, arg1,
13387 build_int_cst (TREE_TYPE (arg1), 1));
13388 return fold_build2_loc (loc, NE_EXPR, type,
13389 fold_convert_loc (loc, TREE_TYPE (arg1),
13390 arg0),
13391 arg1);
13392 default:
13393 break;
13394 }
13395 else if (wi::eq_p (arg1, min))
13396 switch (code)
13397 {
13398 case LT_EXPR:
13399 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13400
13401 case LE_EXPR:
13402 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13403
13404 case GE_EXPR:
13405 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13406
13407 case GT_EXPR:
13408 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13409
13410 default:
13411 break;
13412 }
13413 else if (wi::eq_p (arg1, min + 1))
13414 switch (code)
13415 {
13416 case GE_EXPR:
13417 arg1 = const_binop (MINUS_EXPR, arg1,
13418 build_int_cst (TREE_TYPE (arg1), 1));
13419 return fold_build2_loc (loc, NE_EXPR, type,
13420 fold_convert_loc (loc,
13421 TREE_TYPE (arg1), arg0),
13422 arg1);
13423 case LT_EXPR:
13424 arg1 = const_binop (MINUS_EXPR, arg1,
13425 build_int_cst (TREE_TYPE (arg1), 1));
13426 return fold_build2_loc (loc, EQ_EXPR, type,
13427 fold_convert_loc (loc, TREE_TYPE (arg1),
13428 arg0),
13429 arg1);
13430 default:
13431 break;
13432 }
13433
13434 else if (wi::eq_p (arg1, signed_max)
13435 && TYPE_UNSIGNED (arg1_type)
13436 /* We will flip the signedness of the comparison operator
13437 associated with the mode of arg1, so the sign bit is
13438 specified by this mode. Check that arg1 is the signed
13439 max associated with this sign bit. */
13440 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13441 /* signed_type does not work on pointer types. */
13442 && INTEGRAL_TYPE_P (arg1_type))
13443 {
13444 /* The following case also applies to X < signed_max+1
13445 and X >= signed_max+1 because previous transformations. */
13446 if (code == LE_EXPR || code == GT_EXPR)
13447 {
13448 tree st = signed_type_for (arg1_type);
13449 return fold_build2_loc (loc,
13450 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13451 type, fold_convert_loc (loc, st, arg0),
13452 build_int_cst (st, 0));
13453 }
13454 }
13455 }
13456 }
13457
13458 /* If we are comparing an ABS_EXPR with a constant, we can
13459 convert all the cases into explicit comparisons, but they may
13460 well not be faster than doing the ABS and one comparison.
13461 But ABS (X) <= C is a range comparison, which becomes a subtraction
13462 and a comparison, and is probably faster. */
13463 if (code == LE_EXPR
13464 && TREE_CODE (arg1) == INTEGER_CST
13465 && TREE_CODE (arg0) == ABS_EXPR
13466 && ! TREE_SIDE_EFFECTS (arg0)
13467 && (0 != (tem = negate_expr (arg1)))
13468 && TREE_CODE (tem) == INTEGER_CST
13469 && !TREE_OVERFLOW (tem))
13470 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13471 build2 (GE_EXPR, type,
13472 TREE_OPERAND (arg0, 0), tem),
13473 build2 (LE_EXPR, type,
13474 TREE_OPERAND (arg0, 0), arg1));
13475
13476 /* Convert ABS_EXPR<x> >= 0 to true. */
13477 strict_overflow_p = false;
13478 if (code == GE_EXPR
13479 && (integer_zerop (arg1)
13480 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13481 && real_zerop (arg1)))
13482 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13483 {
13484 if (strict_overflow_p)
13485 fold_overflow_warning (("assuming signed overflow does not occur "
13486 "when simplifying comparison of "
13487 "absolute value and zero"),
13488 WARN_STRICT_OVERFLOW_CONDITIONAL);
13489 return omit_one_operand_loc (loc, type,
13490 constant_boolean_node (true, type),
13491 arg0);
13492 }
13493
13494 /* Convert ABS_EXPR<x> < 0 to false. */
13495 strict_overflow_p = false;
13496 if (code == LT_EXPR
13497 && (integer_zerop (arg1) || real_zerop (arg1))
13498 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13499 {
13500 if (strict_overflow_p)
13501 fold_overflow_warning (("assuming signed overflow does not occur "
13502 "when simplifying comparison of "
13503 "absolute value and zero"),
13504 WARN_STRICT_OVERFLOW_CONDITIONAL);
13505 return omit_one_operand_loc (loc, type,
13506 constant_boolean_node (false, type),
13507 arg0);
13508 }
13509
13510 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13511 and similarly for >= into !=. */
13512 if ((code == LT_EXPR || code == GE_EXPR)
13513 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13514 && TREE_CODE (arg1) == LSHIFT_EXPR
13515 && integer_onep (TREE_OPERAND (arg1, 0)))
13516 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13517 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13518 TREE_OPERAND (arg1, 1)),
13519 build_zero_cst (TREE_TYPE (arg0)));
13520
13521 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13522 otherwise Y might be >= # of bits in X's type and thus e.g.
13523 (unsigned char) (1 << Y) for Y 15 might be 0.
13524 If the cast is widening, then 1 << Y should have unsigned type,
13525 otherwise if Y is number of bits in the signed shift type minus 1,
13526 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13527 31 might be 0xffffffff80000000. */
13528 if ((code == LT_EXPR || code == GE_EXPR)
13529 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13530 && CONVERT_EXPR_P (arg1)
13531 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13532 && (TYPE_PRECISION (TREE_TYPE (arg1))
13533 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13534 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13535 || (TYPE_PRECISION (TREE_TYPE (arg1))
13536 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13537 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13538 {
13539 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13540 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13541 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13542 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13543 build_zero_cst (TREE_TYPE (arg0)));
13544 }
13545
13546 return NULL_TREE;
13547
13548 case UNORDERED_EXPR:
13549 case ORDERED_EXPR:
13550 case UNLT_EXPR:
13551 case UNLE_EXPR:
13552 case UNGT_EXPR:
13553 case UNGE_EXPR:
13554 case UNEQ_EXPR:
13555 case LTGT_EXPR:
13556 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13557 {
13558 t1 = fold_relational_const (code, type, arg0, arg1);
13559 if (t1 != NULL_TREE)
13560 return t1;
13561 }
13562
13563 /* If the first operand is NaN, the result is constant. */
13564 if (TREE_CODE (arg0) == REAL_CST
13565 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13566 && (code != LTGT_EXPR || ! flag_trapping_math))
13567 {
13568 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13569 ? integer_zero_node
13570 : integer_one_node;
13571 return omit_one_operand_loc (loc, type, t1, arg1);
13572 }
13573
13574 /* If the second operand is NaN, the result is constant. */
13575 if (TREE_CODE (arg1) == REAL_CST
13576 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13577 && (code != LTGT_EXPR || ! flag_trapping_math))
13578 {
13579 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13580 ? integer_zero_node
13581 : integer_one_node;
13582 return omit_one_operand_loc (loc, type, t1, arg0);
13583 }
13584
13585 /* Simplify unordered comparison of something with itself. */
13586 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13587 && operand_equal_p (arg0, arg1, 0))
13588 return constant_boolean_node (1, type);
13589
13590 if (code == LTGT_EXPR
13591 && !flag_trapping_math
13592 && operand_equal_p (arg0, arg1, 0))
13593 return constant_boolean_node (0, type);
13594
13595 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13596 {
13597 tree targ0 = strip_float_extensions (arg0);
13598 tree targ1 = strip_float_extensions (arg1);
13599 tree newtype = TREE_TYPE (targ0);
13600
13601 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13602 newtype = TREE_TYPE (targ1);
13603
13604 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13605 return fold_build2_loc (loc, code, type,
13606 fold_convert_loc (loc, newtype, targ0),
13607 fold_convert_loc (loc, newtype, targ1));
13608 }
13609
13610 return NULL_TREE;
13611
13612 case COMPOUND_EXPR:
13613 /* When pedantic, a compound expression can be neither an lvalue
13614 nor an integer constant expression. */
13615 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13616 return NULL_TREE;
13617 /* Don't let (0, 0) be null pointer constant. */
13618 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13619 : fold_convert_loc (loc, type, arg1);
13620 return pedantic_non_lvalue_loc (loc, tem);
13621
13622 case COMPLEX_EXPR:
13623 if ((TREE_CODE (arg0) == REAL_CST
13624 && TREE_CODE (arg1) == REAL_CST)
13625 || (TREE_CODE (arg0) == INTEGER_CST
13626 && TREE_CODE (arg1) == INTEGER_CST))
13627 return build_complex (type, arg0, arg1);
13628 if (TREE_CODE (arg0) == REALPART_EXPR
13629 && TREE_CODE (arg1) == IMAGPART_EXPR
13630 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13631 && operand_equal_p (TREE_OPERAND (arg0, 0),
13632 TREE_OPERAND (arg1, 0), 0))
13633 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13634 TREE_OPERAND (arg1, 0));
13635 return NULL_TREE;
13636
13637 case ASSERT_EXPR:
13638 /* An ASSERT_EXPR should never be passed to fold_binary. */
13639 gcc_unreachable ();
13640
13641 case VEC_PACK_TRUNC_EXPR:
13642 case VEC_PACK_FIX_TRUNC_EXPR:
13643 {
13644 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13645 tree *elts;
13646
13647 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13648 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13649 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13650 return NULL_TREE;
13651
13652 elts = XALLOCAVEC (tree, nelts);
13653 if (!vec_cst_ctor_to_array (arg0, elts)
13654 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13655 return NULL_TREE;
13656
13657 for (i = 0; i < nelts; i++)
13658 {
13659 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13660 ? NOP_EXPR : FIX_TRUNC_EXPR,
13661 TREE_TYPE (type), elts[i]);
13662 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13663 return NULL_TREE;
13664 }
13665
13666 return build_vector (type, elts);
13667 }
13668
13669 case VEC_WIDEN_MULT_LO_EXPR:
13670 case VEC_WIDEN_MULT_HI_EXPR:
13671 case VEC_WIDEN_MULT_EVEN_EXPR:
13672 case VEC_WIDEN_MULT_ODD_EXPR:
13673 {
13674 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13675 unsigned int out, ofs, scale;
13676 tree *elts;
13677
13678 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13679 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13680 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13681 return NULL_TREE;
13682
13683 elts = XALLOCAVEC (tree, nelts * 4);
13684 if (!vec_cst_ctor_to_array (arg0, elts)
13685 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13686 return NULL_TREE;
13687
13688 if (code == VEC_WIDEN_MULT_LO_EXPR)
13689 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13690 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13691 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13692 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13693 scale = 1, ofs = 0;
13694 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13695 scale = 1, ofs = 1;
13696
13697 for (out = 0; out < nelts; out++)
13698 {
13699 unsigned int in1 = (out << scale) + ofs;
13700 unsigned int in2 = in1 + nelts * 2;
13701 tree t1, t2;
13702
13703 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13704 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13705
13706 if (t1 == NULL_TREE || t2 == NULL_TREE)
13707 return NULL_TREE;
13708 elts[out] = const_binop (MULT_EXPR, t1, t2);
13709 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13710 return NULL_TREE;
13711 }
13712
13713 return build_vector (type, elts);
13714 }
13715
13716 default:
13717 return NULL_TREE;
13718 } /* switch (code) */
13719 }
13720
13721 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13722 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13723 of GOTO_EXPR. */
13724
13725 static tree
13726 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13727 {
13728 switch (TREE_CODE (*tp))
13729 {
13730 case LABEL_EXPR:
13731 return *tp;
13732
13733 case GOTO_EXPR:
13734 *walk_subtrees = 0;
13735
13736 /* ... fall through ... */
13737
13738 default:
13739 return NULL_TREE;
13740 }
13741 }
13742
13743 /* Return whether the sub-tree ST contains a label which is accessible from
13744 outside the sub-tree. */
13745
13746 static bool
13747 contains_label_p (tree st)
13748 {
13749 return
13750 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13751 }
13752
13753 /* Fold a ternary expression of code CODE and type TYPE with operands
13754 OP0, OP1, and OP2. Return the folded expression if folding is
13755 successful. Otherwise, return NULL_TREE. */
13756
13757 tree
13758 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13759 tree op0, tree op1, tree op2)
13760 {
13761 tree tem;
13762 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13763 enum tree_code_class kind = TREE_CODE_CLASS (code);
13764
13765 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13766 && TREE_CODE_LENGTH (code) == 3);
13767
13768 /* If this is a commutative operation, and OP0 is a constant, move it
13769 to OP1 to reduce the number of tests below. */
13770 if (commutative_ternary_tree_code (code)
13771 && tree_swap_operands_p (op0, op1, true))
13772 return fold_build3_loc (loc, code, type, op1, op0, op2);
13773
13774 tem = generic_simplify (loc, code, type, op0, op1, op2);
13775 if (tem)
13776 return tem;
13777
13778 /* Strip any conversions that don't change the mode. This is safe
13779 for every expression, except for a comparison expression because
13780 its signedness is derived from its operands. So, in the latter
13781 case, only strip conversions that don't change the signedness.
13782
13783 Note that this is done as an internal manipulation within the
13784 constant folder, in order to find the simplest representation of
13785 the arguments so that their form can be studied. In any cases,
13786 the appropriate type conversions should be put back in the tree
13787 that will get out of the constant folder. */
13788 if (op0)
13789 {
13790 arg0 = op0;
13791 STRIP_NOPS (arg0);
13792 }
13793
13794 if (op1)
13795 {
13796 arg1 = op1;
13797 STRIP_NOPS (arg1);
13798 }
13799
13800 if (op2)
13801 {
13802 arg2 = op2;
13803 STRIP_NOPS (arg2);
13804 }
13805
13806 switch (code)
13807 {
13808 case COMPONENT_REF:
13809 if (TREE_CODE (arg0) == CONSTRUCTOR
13810 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13811 {
13812 unsigned HOST_WIDE_INT idx;
13813 tree field, value;
13814 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13815 if (field == arg1)
13816 return value;
13817 }
13818 return NULL_TREE;
13819
13820 case COND_EXPR:
13821 case VEC_COND_EXPR:
13822 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13823 so all simple results must be passed through pedantic_non_lvalue. */
13824 if (TREE_CODE (arg0) == INTEGER_CST)
13825 {
13826 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13827 tem = integer_zerop (arg0) ? op2 : op1;
13828 /* Only optimize constant conditions when the selected branch
13829 has the same type as the COND_EXPR. This avoids optimizing
13830 away "c ? x : throw", where the throw has a void type.
13831 Avoid throwing away that operand which contains label. */
13832 if ((!TREE_SIDE_EFFECTS (unused_op)
13833 || !contains_label_p (unused_op))
13834 && (! VOID_TYPE_P (TREE_TYPE (tem))
13835 || VOID_TYPE_P (type)))
13836 return pedantic_non_lvalue_loc (loc, tem);
13837 return NULL_TREE;
13838 }
13839 else if (TREE_CODE (arg0) == VECTOR_CST)
13840 {
13841 if (integer_all_onesp (arg0))
13842 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13843 if (integer_zerop (arg0))
13844 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13845
13846 if ((TREE_CODE (arg1) == VECTOR_CST
13847 || TREE_CODE (arg1) == CONSTRUCTOR)
13848 && (TREE_CODE (arg2) == VECTOR_CST
13849 || TREE_CODE (arg2) == CONSTRUCTOR))
13850 {
13851 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13852 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13853 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13854 for (i = 0; i < nelts; i++)
13855 {
13856 tree val = VECTOR_CST_ELT (arg0, i);
13857 if (integer_all_onesp (val))
13858 sel[i] = i;
13859 else if (integer_zerop (val))
13860 sel[i] = nelts + i;
13861 else /* Currently unreachable. */
13862 return NULL_TREE;
13863 }
13864 tree t = fold_vec_perm (type, arg1, arg2, sel);
13865 if (t != NULL_TREE)
13866 return t;
13867 }
13868 }
13869
13870 if (operand_equal_p (arg1, op2, 0))
13871 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13872
13873 /* If we have A op B ? A : C, we may be able to convert this to a
13874 simpler expression, depending on the operation and the values
13875 of B and C. Signed zeros prevent all of these transformations,
13876 for reasons given above each one.
13877
13878 Also try swapping the arguments and inverting the conditional. */
13879 if (COMPARISON_CLASS_P (arg0)
13880 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13881 arg1, TREE_OPERAND (arg0, 1))
13882 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13883 {
13884 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13885 if (tem)
13886 return tem;
13887 }
13888
13889 if (COMPARISON_CLASS_P (arg0)
13890 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13891 op2,
13892 TREE_OPERAND (arg0, 1))
13893 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13894 {
13895 location_t loc0 = expr_location_or (arg0, loc);
13896 tem = fold_invert_truthvalue (loc0, arg0);
13897 if (tem && COMPARISON_CLASS_P (tem))
13898 {
13899 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13900 if (tem)
13901 return tem;
13902 }
13903 }
13904
13905 /* If the second operand is simpler than the third, swap them
13906 since that produces better jump optimization results. */
13907 if (truth_value_p (TREE_CODE (arg0))
13908 && tree_swap_operands_p (op1, op2, false))
13909 {
13910 location_t loc0 = expr_location_or (arg0, loc);
13911 /* See if this can be inverted. If it can't, possibly because
13912 it was a floating-point inequality comparison, don't do
13913 anything. */
13914 tem = fold_invert_truthvalue (loc0, arg0);
13915 if (tem)
13916 return fold_build3_loc (loc, code, type, tem, op2, op1);
13917 }
13918
13919 /* Convert A ? 1 : 0 to simply A. */
13920 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13921 : (integer_onep (op1)
13922 && !VECTOR_TYPE_P (type)))
13923 && integer_zerop (op2)
13924 /* If we try to convert OP0 to our type, the
13925 call to fold will try to move the conversion inside
13926 a COND, which will recurse. In that case, the COND_EXPR
13927 is probably the best choice, so leave it alone. */
13928 && type == TREE_TYPE (arg0))
13929 return pedantic_non_lvalue_loc (loc, arg0);
13930
13931 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13932 over COND_EXPR in cases such as floating point comparisons. */
13933 if (integer_zerop (op1)
13934 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13935 : (integer_onep (op2)
13936 && !VECTOR_TYPE_P (type)))
13937 && truth_value_p (TREE_CODE (arg0)))
13938 return pedantic_non_lvalue_loc (loc,
13939 fold_convert_loc (loc, type,
13940 invert_truthvalue_loc (loc,
13941 arg0)));
13942
13943 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13944 if (TREE_CODE (arg0) == LT_EXPR
13945 && integer_zerop (TREE_OPERAND (arg0, 1))
13946 && integer_zerop (op2)
13947 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13948 {
13949 /* sign_bit_p looks through both zero and sign extensions,
13950 but for this optimization only sign extensions are
13951 usable. */
13952 tree tem2 = TREE_OPERAND (arg0, 0);
13953 while (tem != tem2)
13954 {
13955 if (TREE_CODE (tem2) != NOP_EXPR
13956 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13957 {
13958 tem = NULL_TREE;
13959 break;
13960 }
13961 tem2 = TREE_OPERAND (tem2, 0);
13962 }
13963 /* sign_bit_p only checks ARG1 bits within A's precision.
13964 If <sign bit of A> has wider type than A, bits outside
13965 of A's precision in <sign bit of A> need to be checked.
13966 If they are all 0, this optimization needs to be done
13967 in unsigned A's type, if they are all 1 in signed A's type,
13968 otherwise this can't be done. */
13969 if (tem
13970 && TYPE_PRECISION (TREE_TYPE (tem))
13971 < TYPE_PRECISION (TREE_TYPE (arg1))
13972 && TYPE_PRECISION (TREE_TYPE (tem))
13973 < TYPE_PRECISION (type))
13974 {
13975 int inner_width, outer_width;
13976 tree tem_type;
13977
13978 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13979 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13980 if (outer_width > TYPE_PRECISION (type))
13981 outer_width = TYPE_PRECISION (type);
13982
13983 wide_int mask = wi::shifted_mask
13984 (inner_width, outer_width - inner_width, false,
13985 TYPE_PRECISION (TREE_TYPE (arg1)));
13986
13987 wide_int common = mask & arg1;
13988 if (common == mask)
13989 {
13990 tem_type = signed_type_for (TREE_TYPE (tem));
13991 tem = fold_convert_loc (loc, tem_type, tem);
13992 }
13993 else if (common == 0)
13994 {
13995 tem_type = unsigned_type_for (TREE_TYPE (tem));
13996 tem = fold_convert_loc (loc, tem_type, tem);
13997 }
13998 else
13999 tem = NULL;
14000 }
14001
14002 if (tem)
14003 return
14004 fold_convert_loc (loc, type,
14005 fold_build2_loc (loc, BIT_AND_EXPR,
14006 TREE_TYPE (tem), tem,
14007 fold_convert_loc (loc,
14008 TREE_TYPE (tem),
14009 arg1)));
14010 }
14011
14012 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14013 already handled above. */
14014 if (TREE_CODE (arg0) == BIT_AND_EXPR
14015 && integer_onep (TREE_OPERAND (arg0, 1))
14016 && integer_zerop (op2)
14017 && integer_pow2p (arg1))
14018 {
14019 tree tem = TREE_OPERAND (arg0, 0);
14020 STRIP_NOPS (tem);
14021 if (TREE_CODE (tem) == RSHIFT_EXPR
14022 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14023 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14024 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14025 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14026 TREE_OPERAND (tem, 0), arg1);
14027 }
14028
14029 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14030 is probably obsolete because the first operand should be a
14031 truth value (that's why we have the two cases above), but let's
14032 leave it in until we can confirm this for all front-ends. */
14033 if (integer_zerop (op2)
14034 && TREE_CODE (arg0) == NE_EXPR
14035 && integer_zerop (TREE_OPERAND (arg0, 1))
14036 && integer_pow2p (arg1)
14037 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14038 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14039 arg1, OEP_ONLY_CONST))
14040 return pedantic_non_lvalue_loc (loc,
14041 fold_convert_loc (loc, type,
14042 TREE_OPERAND (arg0, 0)));
14043
14044 /* Disable the transformations below for vectors, since
14045 fold_binary_op_with_conditional_arg may undo them immediately,
14046 yielding an infinite loop. */
14047 if (code == VEC_COND_EXPR)
14048 return NULL_TREE;
14049
14050 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14051 if (integer_zerop (op2)
14052 && truth_value_p (TREE_CODE (arg0))
14053 && truth_value_p (TREE_CODE (arg1))
14054 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14055 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14056 : TRUTH_ANDIF_EXPR,
14057 type, fold_convert_loc (loc, type, arg0), arg1);
14058
14059 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14060 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14061 && truth_value_p (TREE_CODE (arg0))
14062 && truth_value_p (TREE_CODE (arg1))
14063 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14064 {
14065 location_t loc0 = expr_location_or (arg0, loc);
14066 /* Only perform transformation if ARG0 is easily inverted. */
14067 tem = fold_invert_truthvalue (loc0, arg0);
14068 if (tem)
14069 return fold_build2_loc (loc, code == VEC_COND_EXPR
14070 ? BIT_IOR_EXPR
14071 : TRUTH_ORIF_EXPR,
14072 type, fold_convert_loc (loc, type, tem),
14073 arg1);
14074 }
14075
14076 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14077 if (integer_zerop (arg1)
14078 && truth_value_p (TREE_CODE (arg0))
14079 && truth_value_p (TREE_CODE (op2))
14080 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14081 {
14082 location_t loc0 = expr_location_or (arg0, loc);
14083 /* Only perform transformation if ARG0 is easily inverted. */
14084 tem = fold_invert_truthvalue (loc0, arg0);
14085 if (tem)
14086 return fold_build2_loc (loc, code == VEC_COND_EXPR
14087 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14088 type, fold_convert_loc (loc, type, tem),
14089 op2);
14090 }
14091
14092 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14093 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14094 && truth_value_p (TREE_CODE (arg0))
14095 && truth_value_p (TREE_CODE (op2))
14096 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14097 return fold_build2_loc (loc, code == VEC_COND_EXPR
14098 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14099 type, fold_convert_loc (loc, type, arg0), op2);
14100
14101 return NULL_TREE;
14102
14103 case CALL_EXPR:
14104 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14105 of fold_ternary on them. */
14106 gcc_unreachable ();
14107
14108 case BIT_FIELD_REF:
14109 if ((TREE_CODE (arg0) == VECTOR_CST
14110 || (TREE_CODE (arg0) == CONSTRUCTOR
14111 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14112 && (type == TREE_TYPE (TREE_TYPE (arg0))
14113 || (TREE_CODE (type) == VECTOR_TYPE
14114 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14115 {
14116 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14117 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14118 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14119 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14120
14121 if (n != 0
14122 && (idx % width) == 0
14123 && (n % width) == 0
14124 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14125 {
14126 idx = idx / width;
14127 n = n / width;
14128
14129 if (TREE_CODE (arg0) == VECTOR_CST)
14130 {
14131 if (n == 1)
14132 return VECTOR_CST_ELT (arg0, idx);
14133
14134 tree *vals = XALLOCAVEC (tree, n);
14135 for (unsigned i = 0; i < n; ++i)
14136 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14137 return build_vector (type, vals);
14138 }
14139
14140 /* Constructor elements can be subvectors. */
14141 unsigned HOST_WIDE_INT k = 1;
14142 if (CONSTRUCTOR_NELTS (arg0) != 0)
14143 {
14144 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14145 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14146 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14147 }
14148
14149 /* We keep an exact subset of the constructor elements. */
14150 if ((idx % k) == 0 && (n % k) == 0)
14151 {
14152 if (CONSTRUCTOR_NELTS (arg0) == 0)
14153 return build_constructor (type, NULL);
14154 idx /= k;
14155 n /= k;
14156 if (n == 1)
14157 {
14158 if (idx < CONSTRUCTOR_NELTS (arg0))
14159 return CONSTRUCTOR_ELT (arg0, idx)->value;
14160 return build_zero_cst (type);
14161 }
14162
14163 vec<constructor_elt, va_gc> *vals;
14164 vec_alloc (vals, n);
14165 for (unsigned i = 0;
14166 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14167 ++i)
14168 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14169 CONSTRUCTOR_ELT
14170 (arg0, idx + i)->value);
14171 return build_constructor (type, vals);
14172 }
14173 /* The bitfield references a single constructor element. */
14174 else if (idx + n <= (idx / k + 1) * k)
14175 {
14176 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14177 return build_zero_cst (type);
14178 else if (n == k)
14179 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14180 else
14181 return fold_build3_loc (loc, code, type,
14182 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14183 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14184 }
14185 }
14186 }
14187
14188 /* A bit-field-ref that referenced the full argument can be stripped. */
14189 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14190 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14191 && integer_zerop (op2))
14192 return fold_convert_loc (loc, type, arg0);
14193
14194 /* On constants we can use native encode/interpret to constant
14195 fold (nearly) all BIT_FIELD_REFs. */
14196 if (CONSTANT_CLASS_P (arg0)
14197 && can_native_interpret_type_p (type)
14198 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14199 /* This limitation should not be necessary, we just need to
14200 round this up to mode size. */
14201 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14202 /* Need bit-shifting of the buffer to relax the following. */
14203 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14204 {
14205 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14206 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14207 unsigned HOST_WIDE_INT clen;
14208 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14209 /* ??? We cannot tell native_encode_expr to start at
14210 some random byte only. So limit us to a reasonable amount
14211 of work. */
14212 if (clen <= 4096)
14213 {
14214 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14215 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14216 if (len > 0
14217 && len * BITS_PER_UNIT >= bitpos + bitsize)
14218 {
14219 tree v = native_interpret_expr (type,
14220 b + bitpos / BITS_PER_UNIT,
14221 bitsize / BITS_PER_UNIT);
14222 if (v)
14223 return v;
14224 }
14225 }
14226 }
14227
14228 return NULL_TREE;
14229
14230 case FMA_EXPR:
14231 /* For integers we can decompose the FMA if possible. */
14232 if (TREE_CODE (arg0) == INTEGER_CST
14233 && TREE_CODE (arg1) == INTEGER_CST)
14234 return fold_build2_loc (loc, PLUS_EXPR, type,
14235 const_binop (MULT_EXPR, arg0, arg1), arg2);
14236 if (integer_zerop (arg2))
14237 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14238
14239 return fold_fma (loc, type, arg0, arg1, arg2);
14240
14241 case VEC_PERM_EXPR:
14242 if (TREE_CODE (arg2) == VECTOR_CST)
14243 {
14244 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14245 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14246 bool need_mask_canon = false;
14247 bool all_in_vec0 = true;
14248 bool all_in_vec1 = true;
14249 bool maybe_identity = true;
14250 bool single_arg = (op0 == op1);
14251 bool changed = false;
14252
14253 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14254 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14255 for (i = 0; i < nelts; i++)
14256 {
14257 tree val = VECTOR_CST_ELT (arg2, i);
14258 if (TREE_CODE (val) != INTEGER_CST)
14259 return NULL_TREE;
14260
14261 /* Make sure that the perm value is in an acceptable
14262 range. */
14263 wide_int t = val;
14264 if (wi::gtu_p (t, mask))
14265 {
14266 need_mask_canon = true;
14267 sel[i] = t.to_uhwi () & mask;
14268 }
14269 else
14270 sel[i] = t.to_uhwi ();
14271
14272 if (sel[i] < nelts)
14273 all_in_vec1 = false;
14274 else
14275 all_in_vec0 = false;
14276
14277 if ((sel[i] & (nelts-1)) != i)
14278 maybe_identity = false;
14279 }
14280
14281 if (maybe_identity)
14282 {
14283 if (all_in_vec0)
14284 return op0;
14285 if (all_in_vec1)
14286 return op1;
14287 }
14288
14289 if (all_in_vec0)
14290 op1 = op0;
14291 else if (all_in_vec1)
14292 {
14293 op0 = op1;
14294 for (i = 0; i < nelts; i++)
14295 sel[i] -= nelts;
14296 need_mask_canon = true;
14297 }
14298
14299 if ((TREE_CODE (op0) == VECTOR_CST
14300 || TREE_CODE (op0) == CONSTRUCTOR)
14301 && (TREE_CODE (op1) == VECTOR_CST
14302 || TREE_CODE (op1) == CONSTRUCTOR))
14303 {
14304 tree t = fold_vec_perm (type, op0, op1, sel);
14305 if (t != NULL_TREE)
14306 return t;
14307 }
14308
14309 if (op0 == op1 && !single_arg)
14310 changed = true;
14311
14312 if (need_mask_canon && arg2 == op2)
14313 {
14314 tree *tsel = XALLOCAVEC (tree, nelts);
14315 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14316 for (i = 0; i < nelts; i++)
14317 tsel[i] = build_int_cst (eltype, sel[i]);
14318 op2 = build_vector (TREE_TYPE (arg2), tsel);
14319 changed = true;
14320 }
14321
14322 if (changed)
14323 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14324 }
14325 return NULL_TREE;
14326
14327 default:
14328 return NULL_TREE;
14329 } /* switch (code) */
14330 }
14331
14332 /* Perform constant folding and related simplification of EXPR.
14333 The related simplifications include x*1 => x, x*0 => 0, etc.,
14334 and application of the associative law.
14335 NOP_EXPR conversions may be removed freely (as long as we
14336 are careful not to change the type of the overall expression).
14337 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14338 but we can constant-fold them if they have constant operands. */
14339
14340 #ifdef ENABLE_FOLD_CHECKING
14341 # define fold(x) fold_1 (x)
14342 static tree fold_1 (tree);
14343 static
14344 #endif
14345 tree
14346 fold (tree expr)
14347 {
14348 const tree t = expr;
14349 enum tree_code code = TREE_CODE (t);
14350 enum tree_code_class kind = TREE_CODE_CLASS (code);
14351 tree tem;
14352 location_t loc = EXPR_LOCATION (expr);
14353
14354 /* Return right away if a constant. */
14355 if (kind == tcc_constant)
14356 return t;
14357
14358 /* CALL_EXPR-like objects with variable numbers of operands are
14359 treated specially. */
14360 if (kind == tcc_vl_exp)
14361 {
14362 if (code == CALL_EXPR)
14363 {
14364 tem = fold_call_expr (loc, expr, false);
14365 return tem ? tem : expr;
14366 }
14367 return expr;
14368 }
14369
14370 if (IS_EXPR_CODE_CLASS (kind))
14371 {
14372 tree type = TREE_TYPE (t);
14373 tree op0, op1, op2;
14374
14375 switch (TREE_CODE_LENGTH (code))
14376 {
14377 case 1:
14378 op0 = TREE_OPERAND (t, 0);
14379 tem = fold_unary_loc (loc, code, type, op0);
14380 return tem ? tem : expr;
14381 case 2:
14382 op0 = TREE_OPERAND (t, 0);
14383 op1 = TREE_OPERAND (t, 1);
14384 tem = fold_binary_loc (loc, code, type, op0, op1);
14385 return tem ? tem : expr;
14386 case 3:
14387 op0 = TREE_OPERAND (t, 0);
14388 op1 = TREE_OPERAND (t, 1);
14389 op2 = TREE_OPERAND (t, 2);
14390 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14391 return tem ? tem : expr;
14392 default:
14393 break;
14394 }
14395 }
14396
14397 switch (code)
14398 {
14399 case ARRAY_REF:
14400 {
14401 tree op0 = TREE_OPERAND (t, 0);
14402 tree op1 = TREE_OPERAND (t, 1);
14403
14404 if (TREE_CODE (op1) == INTEGER_CST
14405 && TREE_CODE (op0) == CONSTRUCTOR
14406 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14407 {
14408 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14409 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14410 unsigned HOST_WIDE_INT begin = 0;
14411
14412 /* Find a matching index by means of a binary search. */
14413 while (begin != end)
14414 {
14415 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14416 tree index = (*elts)[middle].index;
14417
14418 if (TREE_CODE (index) == INTEGER_CST
14419 && tree_int_cst_lt (index, op1))
14420 begin = middle + 1;
14421 else if (TREE_CODE (index) == INTEGER_CST
14422 && tree_int_cst_lt (op1, index))
14423 end = middle;
14424 else if (TREE_CODE (index) == RANGE_EXPR
14425 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14426 begin = middle + 1;
14427 else if (TREE_CODE (index) == RANGE_EXPR
14428 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14429 end = middle;
14430 else
14431 return (*elts)[middle].value;
14432 }
14433 }
14434
14435 return t;
14436 }
14437
14438 /* Return a VECTOR_CST if possible. */
14439 case CONSTRUCTOR:
14440 {
14441 tree type = TREE_TYPE (t);
14442 if (TREE_CODE (type) != VECTOR_TYPE)
14443 return t;
14444
14445 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14446 unsigned HOST_WIDE_INT idx, pos = 0;
14447 tree value;
14448
14449 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14450 {
14451 if (!CONSTANT_CLASS_P (value))
14452 return t;
14453 if (TREE_CODE (value) == VECTOR_CST)
14454 {
14455 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14456 vec[pos++] = VECTOR_CST_ELT (value, i);
14457 }
14458 else
14459 vec[pos++] = value;
14460 }
14461 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14462 vec[pos] = build_zero_cst (TREE_TYPE (type));
14463
14464 return build_vector (type, vec);
14465 }
14466
14467 case CONST_DECL:
14468 return fold (DECL_INITIAL (t));
14469
14470 default:
14471 return t;
14472 } /* switch (code) */
14473 }
14474
14475 #ifdef ENABLE_FOLD_CHECKING
14476 #undef fold
14477
14478 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14479 hash_table<pointer_hash<const tree_node> > *);
14480 static void fold_check_failed (const_tree, const_tree);
14481 void print_fold_checksum (const_tree);
14482
14483 /* When --enable-checking=fold, compute a digest of expr before
14484 and after actual fold call to see if fold did not accidentally
14485 change original expr. */
14486
14487 tree
14488 fold (tree expr)
14489 {
14490 tree ret;
14491 struct md5_ctx ctx;
14492 unsigned char checksum_before[16], checksum_after[16];
14493 hash_table<pointer_hash<const tree_node> > ht (32);
14494
14495 md5_init_ctx (&ctx);
14496 fold_checksum_tree (expr, &ctx, &ht);
14497 md5_finish_ctx (&ctx, checksum_before);
14498 ht.empty ();
14499
14500 ret = fold_1 (expr);
14501
14502 md5_init_ctx (&ctx);
14503 fold_checksum_tree (expr, &ctx, &ht);
14504 md5_finish_ctx (&ctx, checksum_after);
14505
14506 if (memcmp (checksum_before, checksum_after, 16))
14507 fold_check_failed (expr, ret);
14508
14509 return ret;
14510 }
14511
14512 void
14513 print_fold_checksum (const_tree expr)
14514 {
14515 struct md5_ctx ctx;
14516 unsigned char checksum[16], cnt;
14517 hash_table<pointer_hash<const tree_node> > ht (32);
14518
14519 md5_init_ctx (&ctx);
14520 fold_checksum_tree (expr, &ctx, &ht);
14521 md5_finish_ctx (&ctx, checksum);
14522 for (cnt = 0; cnt < 16; ++cnt)
14523 fprintf (stderr, "%02x", checksum[cnt]);
14524 putc ('\n', stderr);
14525 }
14526
14527 static void
14528 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14529 {
14530 internal_error ("fold check: original tree changed by fold");
14531 }
14532
14533 static void
14534 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14535 hash_table<pointer_hash <const tree_node> > *ht)
14536 {
14537 const tree_node **slot;
14538 enum tree_code code;
14539 union tree_node buf;
14540 int i, len;
14541
14542 recursive_label:
14543 if (expr == NULL)
14544 return;
14545 slot = ht->find_slot (expr, INSERT);
14546 if (*slot != NULL)
14547 return;
14548 *slot = expr;
14549 code = TREE_CODE (expr);
14550 if (TREE_CODE_CLASS (code) == tcc_declaration
14551 && DECL_ASSEMBLER_NAME_SET_P (expr))
14552 {
14553 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14554 memcpy ((char *) &buf, expr, tree_size (expr));
14555 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14556 expr = (tree) &buf;
14557 }
14558 else if (TREE_CODE_CLASS (code) == tcc_type
14559 && (TYPE_POINTER_TO (expr)
14560 || TYPE_REFERENCE_TO (expr)
14561 || TYPE_CACHED_VALUES_P (expr)
14562 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14563 || TYPE_NEXT_VARIANT (expr)))
14564 {
14565 /* Allow these fields to be modified. */
14566 tree tmp;
14567 memcpy ((char *) &buf, expr, tree_size (expr));
14568 expr = tmp = (tree) &buf;
14569 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14570 TYPE_POINTER_TO (tmp) = NULL;
14571 TYPE_REFERENCE_TO (tmp) = NULL;
14572 TYPE_NEXT_VARIANT (tmp) = NULL;
14573 if (TYPE_CACHED_VALUES_P (tmp))
14574 {
14575 TYPE_CACHED_VALUES_P (tmp) = 0;
14576 TYPE_CACHED_VALUES (tmp) = NULL;
14577 }
14578 }
14579 md5_process_bytes (expr, tree_size (expr), ctx);
14580 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14581 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14582 if (TREE_CODE_CLASS (code) != tcc_type
14583 && TREE_CODE_CLASS (code) != tcc_declaration
14584 && code != TREE_LIST
14585 && code != SSA_NAME
14586 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14587 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14588 switch (TREE_CODE_CLASS (code))
14589 {
14590 case tcc_constant:
14591 switch (code)
14592 {
14593 case STRING_CST:
14594 md5_process_bytes (TREE_STRING_POINTER (expr),
14595 TREE_STRING_LENGTH (expr), ctx);
14596 break;
14597 case COMPLEX_CST:
14598 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14599 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14600 break;
14601 case VECTOR_CST:
14602 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14603 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14604 break;
14605 default:
14606 break;
14607 }
14608 break;
14609 case tcc_exceptional:
14610 switch (code)
14611 {
14612 case TREE_LIST:
14613 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14614 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14615 expr = TREE_CHAIN (expr);
14616 goto recursive_label;
14617 break;
14618 case TREE_VEC:
14619 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14620 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14621 break;
14622 default:
14623 break;
14624 }
14625 break;
14626 case tcc_expression:
14627 case tcc_reference:
14628 case tcc_comparison:
14629 case tcc_unary:
14630 case tcc_binary:
14631 case tcc_statement:
14632 case tcc_vl_exp:
14633 len = TREE_OPERAND_LENGTH (expr);
14634 for (i = 0; i < len; ++i)
14635 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14636 break;
14637 case tcc_declaration:
14638 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14639 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14640 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14641 {
14642 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14643 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14644 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14645 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14646 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14647 }
14648
14649 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14650 {
14651 if (TREE_CODE (expr) == FUNCTION_DECL)
14652 {
14653 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14654 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14655 }
14656 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14657 }
14658 break;
14659 case tcc_type:
14660 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14661 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14662 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14663 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14664 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14665 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14666 if (INTEGRAL_TYPE_P (expr)
14667 || SCALAR_FLOAT_TYPE_P (expr))
14668 {
14669 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14670 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14671 }
14672 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14673 if (TREE_CODE (expr) == RECORD_TYPE
14674 || TREE_CODE (expr) == UNION_TYPE
14675 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14676 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14677 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14678 break;
14679 default:
14680 break;
14681 }
14682 }
14683
14684 /* Helper function for outputting the checksum of a tree T. When
14685 debugging with gdb, you can "define mynext" to be "next" followed
14686 by "call debug_fold_checksum (op0)", then just trace down till the
14687 outputs differ. */
14688
14689 DEBUG_FUNCTION void
14690 debug_fold_checksum (const_tree t)
14691 {
14692 int i;
14693 unsigned char checksum[16];
14694 struct md5_ctx ctx;
14695 hash_table<pointer_hash<const tree_node> > ht (32);
14696
14697 md5_init_ctx (&ctx);
14698 fold_checksum_tree (t, &ctx, &ht);
14699 md5_finish_ctx (&ctx, checksum);
14700 ht.empty ();
14701
14702 for (i = 0; i < 16; i++)
14703 fprintf (stderr, "%d ", checksum[i]);
14704
14705 fprintf (stderr, "\n");
14706 }
14707
14708 #endif
14709
14710 /* Fold a unary tree expression with code CODE of type TYPE with an
14711 operand OP0. LOC is the location of the resulting expression.
14712 Return a folded expression if successful. Otherwise, return a tree
14713 expression with code CODE of type TYPE with an operand OP0. */
14714
14715 tree
14716 fold_build1_stat_loc (location_t loc,
14717 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14718 {
14719 tree tem;
14720 #ifdef ENABLE_FOLD_CHECKING
14721 unsigned char checksum_before[16], checksum_after[16];
14722 struct md5_ctx ctx;
14723 hash_table<pointer_hash<const tree_node> > ht (32);
14724
14725 md5_init_ctx (&ctx);
14726 fold_checksum_tree (op0, &ctx, &ht);
14727 md5_finish_ctx (&ctx, checksum_before);
14728 ht.empty ();
14729 #endif
14730
14731 tem = fold_unary_loc (loc, code, type, op0);
14732 if (!tem)
14733 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14734
14735 #ifdef ENABLE_FOLD_CHECKING
14736 md5_init_ctx (&ctx);
14737 fold_checksum_tree (op0, &ctx, &ht);
14738 md5_finish_ctx (&ctx, checksum_after);
14739
14740 if (memcmp (checksum_before, checksum_after, 16))
14741 fold_check_failed (op0, tem);
14742 #endif
14743 return tem;
14744 }
14745
14746 /* Fold a binary tree expression with code CODE of type TYPE with
14747 operands OP0 and OP1. LOC is the location of the resulting
14748 expression. Return a folded expression if successful. Otherwise,
14749 return a tree expression with code CODE of type TYPE with operands
14750 OP0 and OP1. */
14751
14752 tree
14753 fold_build2_stat_loc (location_t loc,
14754 enum tree_code code, tree type, tree op0, tree op1
14755 MEM_STAT_DECL)
14756 {
14757 tree tem;
14758 #ifdef ENABLE_FOLD_CHECKING
14759 unsigned char checksum_before_op0[16],
14760 checksum_before_op1[16],
14761 checksum_after_op0[16],
14762 checksum_after_op1[16];
14763 struct md5_ctx ctx;
14764 hash_table<pointer_hash<const tree_node> > ht (32);
14765
14766 md5_init_ctx (&ctx);
14767 fold_checksum_tree (op0, &ctx, &ht);
14768 md5_finish_ctx (&ctx, checksum_before_op0);
14769 ht.empty ();
14770
14771 md5_init_ctx (&ctx);
14772 fold_checksum_tree (op1, &ctx, &ht);
14773 md5_finish_ctx (&ctx, checksum_before_op1);
14774 ht.empty ();
14775 #endif
14776
14777 tem = fold_binary_loc (loc, code, type, op0, op1);
14778 if (!tem)
14779 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14780
14781 #ifdef ENABLE_FOLD_CHECKING
14782 md5_init_ctx (&ctx);
14783 fold_checksum_tree (op0, &ctx, &ht);
14784 md5_finish_ctx (&ctx, checksum_after_op0);
14785 ht.empty ();
14786
14787 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14788 fold_check_failed (op0, tem);
14789
14790 md5_init_ctx (&ctx);
14791 fold_checksum_tree (op1, &ctx, &ht);
14792 md5_finish_ctx (&ctx, checksum_after_op1);
14793
14794 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14795 fold_check_failed (op1, tem);
14796 #endif
14797 return tem;
14798 }
14799
14800 /* Fold a ternary tree expression with code CODE of type TYPE with
14801 operands OP0, OP1, and OP2. Return a folded expression if
14802 successful. Otherwise, return a tree expression with code CODE of
14803 type TYPE with operands OP0, OP1, and OP2. */
14804
14805 tree
14806 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14807 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14808 {
14809 tree tem;
14810 #ifdef ENABLE_FOLD_CHECKING
14811 unsigned char checksum_before_op0[16],
14812 checksum_before_op1[16],
14813 checksum_before_op2[16],
14814 checksum_after_op0[16],
14815 checksum_after_op1[16],
14816 checksum_after_op2[16];
14817 struct md5_ctx ctx;
14818 hash_table<pointer_hash<const tree_node> > ht (32);
14819
14820 md5_init_ctx (&ctx);
14821 fold_checksum_tree (op0, &ctx, &ht);
14822 md5_finish_ctx (&ctx, checksum_before_op0);
14823 ht.empty ();
14824
14825 md5_init_ctx (&ctx);
14826 fold_checksum_tree (op1, &ctx, &ht);
14827 md5_finish_ctx (&ctx, checksum_before_op1);
14828 ht.empty ();
14829
14830 md5_init_ctx (&ctx);
14831 fold_checksum_tree (op2, &ctx, &ht);
14832 md5_finish_ctx (&ctx, checksum_before_op2);
14833 ht.empty ();
14834 #endif
14835
14836 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14837 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14838 if (!tem)
14839 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14840
14841 #ifdef ENABLE_FOLD_CHECKING
14842 md5_init_ctx (&ctx);
14843 fold_checksum_tree (op0, &ctx, &ht);
14844 md5_finish_ctx (&ctx, checksum_after_op0);
14845 ht.empty ();
14846
14847 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14848 fold_check_failed (op0, tem);
14849
14850 md5_init_ctx (&ctx);
14851 fold_checksum_tree (op1, &ctx, &ht);
14852 md5_finish_ctx (&ctx, checksum_after_op1);
14853 ht.empty ();
14854
14855 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14856 fold_check_failed (op1, tem);
14857
14858 md5_init_ctx (&ctx);
14859 fold_checksum_tree (op2, &ctx, &ht);
14860 md5_finish_ctx (&ctx, checksum_after_op2);
14861
14862 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14863 fold_check_failed (op2, tem);
14864 #endif
14865 return tem;
14866 }
14867
14868 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14869 arguments in ARGARRAY, and a null static chain.
14870 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14871 of type TYPE from the given operands as constructed by build_call_array. */
14872
14873 tree
14874 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14875 int nargs, tree *argarray)
14876 {
14877 tree tem;
14878 #ifdef ENABLE_FOLD_CHECKING
14879 unsigned char checksum_before_fn[16],
14880 checksum_before_arglist[16],
14881 checksum_after_fn[16],
14882 checksum_after_arglist[16];
14883 struct md5_ctx ctx;
14884 hash_table<pointer_hash<const tree_node> > ht (32);
14885 int i;
14886
14887 md5_init_ctx (&ctx);
14888 fold_checksum_tree (fn, &ctx, &ht);
14889 md5_finish_ctx (&ctx, checksum_before_fn);
14890 ht.empty ();
14891
14892 md5_init_ctx (&ctx);
14893 for (i = 0; i < nargs; i++)
14894 fold_checksum_tree (argarray[i], &ctx, &ht);
14895 md5_finish_ctx (&ctx, checksum_before_arglist);
14896 ht.empty ();
14897 #endif
14898
14899 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14900
14901 #ifdef ENABLE_FOLD_CHECKING
14902 md5_init_ctx (&ctx);
14903 fold_checksum_tree (fn, &ctx, &ht);
14904 md5_finish_ctx (&ctx, checksum_after_fn);
14905 ht.empty ();
14906
14907 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14908 fold_check_failed (fn, tem);
14909
14910 md5_init_ctx (&ctx);
14911 for (i = 0; i < nargs; i++)
14912 fold_checksum_tree (argarray[i], &ctx, &ht);
14913 md5_finish_ctx (&ctx, checksum_after_arglist);
14914
14915 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14916 fold_check_failed (NULL_TREE, tem);
14917 #endif
14918 return tem;
14919 }
14920
14921 /* Perform constant folding and related simplification of initializer
14922 expression EXPR. These behave identically to "fold_buildN" but ignore
14923 potential run-time traps and exceptions that fold must preserve. */
14924
14925 #define START_FOLD_INIT \
14926 int saved_signaling_nans = flag_signaling_nans;\
14927 int saved_trapping_math = flag_trapping_math;\
14928 int saved_rounding_math = flag_rounding_math;\
14929 int saved_trapv = flag_trapv;\
14930 int saved_folding_initializer = folding_initializer;\
14931 flag_signaling_nans = 0;\
14932 flag_trapping_math = 0;\
14933 flag_rounding_math = 0;\
14934 flag_trapv = 0;\
14935 folding_initializer = 1;
14936
14937 #define END_FOLD_INIT \
14938 flag_signaling_nans = saved_signaling_nans;\
14939 flag_trapping_math = saved_trapping_math;\
14940 flag_rounding_math = saved_rounding_math;\
14941 flag_trapv = saved_trapv;\
14942 folding_initializer = saved_folding_initializer;
14943
14944 tree
14945 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14946 tree type, tree op)
14947 {
14948 tree result;
14949 START_FOLD_INIT;
14950
14951 result = fold_build1_loc (loc, code, type, op);
14952
14953 END_FOLD_INIT;
14954 return result;
14955 }
14956
14957 tree
14958 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14959 tree type, tree op0, tree op1)
14960 {
14961 tree result;
14962 START_FOLD_INIT;
14963
14964 result = fold_build2_loc (loc, code, type, op0, op1);
14965
14966 END_FOLD_INIT;
14967 return result;
14968 }
14969
14970 tree
14971 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14972 int nargs, tree *argarray)
14973 {
14974 tree result;
14975 START_FOLD_INIT;
14976
14977 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14978
14979 END_FOLD_INIT;
14980 return result;
14981 }
14982
14983 #undef START_FOLD_INIT
14984 #undef END_FOLD_INIT
14985
14986 /* Determine if first argument is a multiple of second argument. Return 0 if
14987 it is not, or we cannot easily determined it to be.
14988
14989 An example of the sort of thing we care about (at this point; this routine
14990 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14991 fold cases do now) is discovering that
14992
14993 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14994
14995 is a multiple of
14996
14997 SAVE_EXPR (J * 8)
14998
14999 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15000
15001 This code also handles discovering that
15002
15003 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15004
15005 is a multiple of 8 so we don't have to worry about dealing with a
15006 possible remainder.
15007
15008 Note that we *look* inside a SAVE_EXPR only to determine how it was
15009 calculated; it is not safe for fold to do much of anything else with the
15010 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15011 at run time. For example, the latter example above *cannot* be implemented
15012 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15013 evaluation time of the original SAVE_EXPR is not necessarily the same at
15014 the time the new expression is evaluated. The only optimization of this
15015 sort that would be valid is changing
15016
15017 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15018
15019 divided by 8 to
15020
15021 SAVE_EXPR (I) * SAVE_EXPR (J)
15022
15023 (where the same SAVE_EXPR (J) is used in the original and the
15024 transformed version). */
15025
15026 int
15027 multiple_of_p (tree type, const_tree top, const_tree bottom)
15028 {
15029 if (operand_equal_p (top, bottom, 0))
15030 return 1;
15031
15032 if (TREE_CODE (type) != INTEGER_TYPE)
15033 return 0;
15034
15035 switch (TREE_CODE (top))
15036 {
15037 case BIT_AND_EXPR:
15038 /* Bitwise and provides a power of two multiple. If the mask is
15039 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15040 if (!integer_pow2p (bottom))
15041 return 0;
15042 /* FALLTHRU */
15043
15044 case MULT_EXPR:
15045 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15046 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15047
15048 case PLUS_EXPR:
15049 case MINUS_EXPR:
15050 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15051 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15052
15053 case LSHIFT_EXPR:
15054 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15055 {
15056 tree op1, t1;
15057
15058 op1 = TREE_OPERAND (top, 1);
15059 /* const_binop may not detect overflow correctly,
15060 so check for it explicitly here. */
15061 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15062 && 0 != (t1 = fold_convert (type,
15063 const_binop (LSHIFT_EXPR,
15064 size_one_node,
15065 op1)))
15066 && !TREE_OVERFLOW (t1))
15067 return multiple_of_p (type, t1, bottom);
15068 }
15069 return 0;
15070
15071 case NOP_EXPR:
15072 /* Can't handle conversions from non-integral or wider integral type. */
15073 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15074 || (TYPE_PRECISION (type)
15075 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15076 return 0;
15077
15078 /* .. fall through ... */
15079
15080 case SAVE_EXPR:
15081 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15082
15083 case COND_EXPR:
15084 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15085 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15086
15087 case INTEGER_CST:
15088 if (TREE_CODE (bottom) != INTEGER_CST
15089 || integer_zerop (bottom)
15090 || (TYPE_UNSIGNED (type)
15091 && (tree_int_cst_sgn (top) < 0
15092 || tree_int_cst_sgn (bottom) < 0)))
15093 return 0;
15094 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15095 SIGNED);
15096
15097 default:
15098 return 0;
15099 }
15100 }
15101
15102 /* Return true if CODE or TYPE is known to be non-negative. */
15103
15104 static bool
15105 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15106 {
15107 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15108 && truth_value_p (code))
15109 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15110 have a signed:1 type (where the value is -1 and 0). */
15111 return true;
15112 return false;
15113 }
15114
15115 /* Return true if (CODE OP0) is known to be non-negative. If the return
15116 value is based on the assumption that signed overflow is undefined,
15117 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15118 *STRICT_OVERFLOW_P. */
15119
15120 bool
15121 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15122 bool *strict_overflow_p)
15123 {
15124 if (TYPE_UNSIGNED (type))
15125 return true;
15126
15127 switch (code)
15128 {
15129 case ABS_EXPR:
15130 /* We can't return 1 if flag_wrapv is set because
15131 ABS_EXPR<INT_MIN> = INT_MIN. */
15132 if (!INTEGRAL_TYPE_P (type))
15133 return true;
15134 if (TYPE_OVERFLOW_UNDEFINED (type))
15135 {
15136 *strict_overflow_p = true;
15137 return true;
15138 }
15139 break;
15140
15141 case NON_LVALUE_EXPR:
15142 case FLOAT_EXPR:
15143 case FIX_TRUNC_EXPR:
15144 return tree_expr_nonnegative_warnv_p (op0,
15145 strict_overflow_p);
15146
15147 case NOP_EXPR:
15148 {
15149 tree inner_type = TREE_TYPE (op0);
15150 tree outer_type = type;
15151
15152 if (TREE_CODE (outer_type) == REAL_TYPE)
15153 {
15154 if (TREE_CODE (inner_type) == REAL_TYPE)
15155 return tree_expr_nonnegative_warnv_p (op0,
15156 strict_overflow_p);
15157 if (INTEGRAL_TYPE_P (inner_type))
15158 {
15159 if (TYPE_UNSIGNED (inner_type))
15160 return true;
15161 return tree_expr_nonnegative_warnv_p (op0,
15162 strict_overflow_p);
15163 }
15164 }
15165 else if (INTEGRAL_TYPE_P (outer_type))
15166 {
15167 if (TREE_CODE (inner_type) == REAL_TYPE)
15168 return tree_expr_nonnegative_warnv_p (op0,
15169 strict_overflow_p);
15170 if (INTEGRAL_TYPE_P (inner_type))
15171 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15172 && TYPE_UNSIGNED (inner_type);
15173 }
15174 }
15175 break;
15176
15177 default:
15178 return tree_simple_nonnegative_warnv_p (code, type);
15179 }
15180
15181 /* We don't know sign of `t', so be conservative and return false. */
15182 return false;
15183 }
15184
15185 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15186 value is based on the assumption that signed overflow is undefined,
15187 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15188 *STRICT_OVERFLOW_P. */
15189
15190 bool
15191 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15192 tree op1, bool *strict_overflow_p)
15193 {
15194 if (TYPE_UNSIGNED (type))
15195 return true;
15196
15197 switch (code)
15198 {
15199 case POINTER_PLUS_EXPR:
15200 case PLUS_EXPR:
15201 if (FLOAT_TYPE_P (type))
15202 return (tree_expr_nonnegative_warnv_p (op0,
15203 strict_overflow_p)
15204 && tree_expr_nonnegative_warnv_p (op1,
15205 strict_overflow_p));
15206
15207 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15208 both unsigned and at least 2 bits shorter than the result. */
15209 if (TREE_CODE (type) == INTEGER_TYPE
15210 && TREE_CODE (op0) == NOP_EXPR
15211 && TREE_CODE (op1) == NOP_EXPR)
15212 {
15213 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15214 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15215 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15216 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15217 {
15218 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15219 TYPE_PRECISION (inner2)) + 1;
15220 return prec < TYPE_PRECISION (type);
15221 }
15222 }
15223 break;
15224
15225 case MULT_EXPR:
15226 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15227 {
15228 /* x * x is always non-negative for floating point x
15229 or without overflow. */
15230 if (operand_equal_p (op0, op1, 0)
15231 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15232 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15233 {
15234 if (TYPE_OVERFLOW_UNDEFINED (type))
15235 *strict_overflow_p = true;
15236 return true;
15237 }
15238 }
15239
15240 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15241 both unsigned and their total bits is shorter than the result. */
15242 if (TREE_CODE (type) == INTEGER_TYPE
15243 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15244 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15245 {
15246 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15247 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15248 : TREE_TYPE (op0);
15249 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15250 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15251 : TREE_TYPE (op1);
15252
15253 bool unsigned0 = TYPE_UNSIGNED (inner0);
15254 bool unsigned1 = TYPE_UNSIGNED (inner1);
15255
15256 if (TREE_CODE (op0) == INTEGER_CST)
15257 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15258
15259 if (TREE_CODE (op1) == INTEGER_CST)
15260 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15261
15262 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15263 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15264 {
15265 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15266 ? tree_int_cst_min_precision (op0, UNSIGNED)
15267 : TYPE_PRECISION (inner0);
15268
15269 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15270 ? tree_int_cst_min_precision (op1, UNSIGNED)
15271 : TYPE_PRECISION (inner1);
15272
15273 return precision0 + precision1 < TYPE_PRECISION (type);
15274 }
15275 }
15276 return false;
15277
15278 case BIT_AND_EXPR:
15279 case MAX_EXPR:
15280 return (tree_expr_nonnegative_warnv_p (op0,
15281 strict_overflow_p)
15282 || tree_expr_nonnegative_warnv_p (op1,
15283 strict_overflow_p));
15284
15285 case BIT_IOR_EXPR:
15286 case BIT_XOR_EXPR:
15287 case MIN_EXPR:
15288 case RDIV_EXPR:
15289 case TRUNC_DIV_EXPR:
15290 case CEIL_DIV_EXPR:
15291 case FLOOR_DIV_EXPR:
15292 case ROUND_DIV_EXPR:
15293 return (tree_expr_nonnegative_warnv_p (op0,
15294 strict_overflow_p)
15295 && tree_expr_nonnegative_warnv_p (op1,
15296 strict_overflow_p));
15297
15298 case TRUNC_MOD_EXPR:
15299 case CEIL_MOD_EXPR:
15300 case FLOOR_MOD_EXPR:
15301 case ROUND_MOD_EXPR:
15302 return tree_expr_nonnegative_warnv_p (op0,
15303 strict_overflow_p);
15304 default:
15305 return tree_simple_nonnegative_warnv_p (code, type);
15306 }
15307
15308 /* We don't know sign of `t', so be conservative and return false. */
15309 return false;
15310 }
15311
15312 /* Return true if T is known to be non-negative. If the return
15313 value is based on the assumption that signed overflow is undefined,
15314 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15315 *STRICT_OVERFLOW_P. */
15316
15317 bool
15318 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15319 {
15320 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15321 return true;
15322
15323 switch (TREE_CODE (t))
15324 {
15325 case INTEGER_CST:
15326 return tree_int_cst_sgn (t) >= 0;
15327
15328 case REAL_CST:
15329 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15330
15331 case FIXED_CST:
15332 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15333
15334 case COND_EXPR:
15335 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15336 strict_overflow_p)
15337 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15338 strict_overflow_p));
15339 default:
15340 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15341 TREE_TYPE (t));
15342 }
15343 /* We don't know sign of `t', so be conservative and return false. */
15344 return false;
15345 }
15346
15347 /* Return true if T is known to be non-negative. If the return
15348 value is based on the assumption that signed overflow is undefined,
15349 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15350 *STRICT_OVERFLOW_P. */
15351
15352 bool
15353 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15354 tree arg0, tree arg1, bool *strict_overflow_p)
15355 {
15356 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15357 switch (DECL_FUNCTION_CODE (fndecl))
15358 {
15359 CASE_FLT_FN (BUILT_IN_ACOS):
15360 CASE_FLT_FN (BUILT_IN_ACOSH):
15361 CASE_FLT_FN (BUILT_IN_CABS):
15362 CASE_FLT_FN (BUILT_IN_COSH):
15363 CASE_FLT_FN (BUILT_IN_ERFC):
15364 CASE_FLT_FN (BUILT_IN_EXP):
15365 CASE_FLT_FN (BUILT_IN_EXP10):
15366 CASE_FLT_FN (BUILT_IN_EXP2):
15367 CASE_FLT_FN (BUILT_IN_FABS):
15368 CASE_FLT_FN (BUILT_IN_FDIM):
15369 CASE_FLT_FN (BUILT_IN_HYPOT):
15370 CASE_FLT_FN (BUILT_IN_POW10):
15371 CASE_INT_FN (BUILT_IN_FFS):
15372 CASE_INT_FN (BUILT_IN_PARITY):
15373 CASE_INT_FN (BUILT_IN_POPCOUNT):
15374 CASE_INT_FN (BUILT_IN_CLZ):
15375 CASE_INT_FN (BUILT_IN_CLRSB):
15376 case BUILT_IN_BSWAP32:
15377 case BUILT_IN_BSWAP64:
15378 /* Always true. */
15379 return true;
15380
15381 CASE_FLT_FN (BUILT_IN_SQRT):
15382 /* sqrt(-0.0) is -0.0. */
15383 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15384 return true;
15385 return tree_expr_nonnegative_warnv_p (arg0,
15386 strict_overflow_p);
15387
15388 CASE_FLT_FN (BUILT_IN_ASINH):
15389 CASE_FLT_FN (BUILT_IN_ATAN):
15390 CASE_FLT_FN (BUILT_IN_ATANH):
15391 CASE_FLT_FN (BUILT_IN_CBRT):
15392 CASE_FLT_FN (BUILT_IN_CEIL):
15393 CASE_FLT_FN (BUILT_IN_ERF):
15394 CASE_FLT_FN (BUILT_IN_EXPM1):
15395 CASE_FLT_FN (BUILT_IN_FLOOR):
15396 CASE_FLT_FN (BUILT_IN_FMOD):
15397 CASE_FLT_FN (BUILT_IN_FREXP):
15398 CASE_FLT_FN (BUILT_IN_ICEIL):
15399 CASE_FLT_FN (BUILT_IN_IFLOOR):
15400 CASE_FLT_FN (BUILT_IN_IRINT):
15401 CASE_FLT_FN (BUILT_IN_IROUND):
15402 CASE_FLT_FN (BUILT_IN_LCEIL):
15403 CASE_FLT_FN (BUILT_IN_LDEXP):
15404 CASE_FLT_FN (BUILT_IN_LFLOOR):
15405 CASE_FLT_FN (BUILT_IN_LLCEIL):
15406 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15407 CASE_FLT_FN (BUILT_IN_LLRINT):
15408 CASE_FLT_FN (BUILT_IN_LLROUND):
15409 CASE_FLT_FN (BUILT_IN_LRINT):
15410 CASE_FLT_FN (BUILT_IN_LROUND):
15411 CASE_FLT_FN (BUILT_IN_MODF):
15412 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15413 CASE_FLT_FN (BUILT_IN_RINT):
15414 CASE_FLT_FN (BUILT_IN_ROUND):
15415 CASE_FLT_FN (BUILT_IN_SCALB):
15416 CASE_FLT_FN (BUILT_IN_SCALBLN):
15417 CASE_FLT_FN (BUILT_IN_SCALBN):
15418 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15419 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15420 CASE_FLT_FN (BUILT_IN_SINH):
15421 CASE_FLT_FN (BUILT_IN_TANH):
15422 CASE_FLT_FN (BUILT_IN_TRUNC):
15423 /* True if the 1st argument is nonnegative. */
15424 return tree_expr_nonnegative_warnv_p (arg0,
15425 strict_overflow_p);
15426
15427 CASE_FLT_FN (BUILT_IN_FMAX):
15428 /* True if the 1st OR 2nd arguments are nonnegative. */
15429 return (tree_expr_nonnegative_warnv_p (arg0,
15430 strict_overflow_p)
15431 || (tree_expr_nonnegative_warnv_p (arg1,
15432 strict_overflow_p)));
15433
15434 CASE_FLT_FN (BUILT_IN_FMIN):
15435 /* True if the 1st AND 2nd arguments are nonnegative. */
15436 return (tree_expr_nonnegative_warnv_p (arg0,
15437 strict_overflow_p)
15438 && (tree_expr_nonnegative_warnv_p (arg1,
15439 strict_overflow_p)));
15440
15441 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15442 /* True if the 2nd argument is nonnegative. */
15443 return tree_expr_nonnegative_warnv_p (arg1,
15444 strict_overflow_p);
15445
15446 CASE_FLT_FN (BUILT_IN_POWI):
15447 /* True if the 1st argument is nonnegative or the second
15448 argument is an even integer. */
15449 if (TREE_CODE (arg1) == INTEGER_CST
15450 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15451 return true;
15452 return tree_expr_nonnegative_warnv_p (arg0,
15453 strict_overflow_p);
15454
15455 CASE_FLT_FN (BUILT_IN_POW):
15456 /* True if the 1st argument is nonnegative or the second
15457 argument is an even integer valued real. */
15458 if (TREE_CODE (arg1) == REAL_CST)
15459 {
15460 REAL_VALUE_TYPE c;
15461 HOST_WIDE_INT n;
15462
15463 c = TREE_REAL_CST (arg1);
15464 n = real_to_integer (&c);
15465 if ((n & 1) == 0)
15466 {
15467 REAL_VALUE_TYPE cint;
15468 real_from_integer (&cint, VOIDmode, n, SIGNED);
15469 if (real_identical (&c, &cint))
15470 return true;
15471 }
15472 }
15473 return tree_expr_nonnegative_warnv_p (arg0,
15474 strict_overflow_p);
15475
15476 default:
15477 break;
15478 }
15479 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15480 type);
15481 }
15482
15483 /* Return true if T is known to be non-negative. If the return
15484 value is based on the assumption that signed overflow is undefined,
15485 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15486 *STRICT_OVERFLOW_P. */
15487
15488 static bool
15489 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15490 {
15491 enum tree_code code = TREE_CODE (t);
15492 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15493 return true;
15494
15495 switch (code)
15496 {
15497 case TARGET_EXPR:
15498 {
15499 tree temp = TARGET_EXPR_SLOT (t);
15500 t = TARGET_EXPR_INITIAL (t);
15501
15502 /* If the initializer is non-void, then it's a normal expression
15503 that will be assigned to the slot. */
15504 if (!VOID_TYPE_P (t))
15505 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15506
15507 /* Otherwise, the initializer sets the slot in some way. One common
15508 way is an assignment statement at the end of the initializer. */
15509 while (1)
15510 {
15511 if (TREE_CODE (t) == BIND_EXPR)
15512 t = expr_last (BIND_EXPR_BODY (t));
15513 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15514 || TREE_CODE (t) == TRY_CATCH_EXPR)
15515 t = expr_last (TREE_OPERAND (t, 0));
15516 else if (TREE_CODE (t) == STATEMENT_LIST)
15517 t = expr_last (t);
15518 else
15519 break;
15520 }
15521 if (TREE_CODE (t) == MODIFY_EXPR
15522 && TREE_OPERAND (t, 0) == temp)
15523 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15524 strict_overflow_p);
15525
15526 return false;
15527 }
15528
15529 case CALL_EXPR:
15530 {
15531 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15532 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15533
15534 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15535 get_callee_fndecl (t),
15536 arg0,
15537 arg1,
15538 strict_overflow_p);
15539 }
15540 case COMPOUND_EXPR:
15541 case MODIFY_EXPR:
15542 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15543 strict_overflow_p);
15544 case BIND_EXPR:
15545 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15546 strict_overflow_p);
15547 case SAVE_EXPR:
15548 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15549 strict_overflow_p);
15550
15551 default:
15552 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15553 TREE_TYPE (t));
15554 }
15555
15556 /* We don't know sign of `t', so be conservative and return false. */
15557 return false;
15558 }
15559
15560 /* Return true if T is known to be non-negative. If the return
15561 value is based on the assumption that signed overflow is undefined,
15562 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15563 *STRICT_OVERFLOW_P. */
15564
15565 bool
15566 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15567 {
15568 enum tree_code code;
15569 if (t == error_mark_node)
15570 return false;
15571
15572 code = TREE_CODE (t);
15573 switch (TREE_CODE_CLASS (code))
15574 {
15575 case tcc_binary:
15576 case tcc_comparison:
15577 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15578 TREE_TYPE (t),
15579 TREE_OPERAND (t, 0),
15580 TREE_OPERAND (t, 1),
15581 strict_overflow_p);
15582
15583 case tcc_unary:
15584 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15585 TREE_TYPE (t),
15586 TREE_OPERAND (t, 0),
15587 strict_overflow_p);
15588
15589 case tcc_constant:
15590 case tcc_declaration:
15591 case tcc_reference:
15592 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15593
15594 default:
15595 break;
15596 }
15597
15598 switch (code)
15599 {
15600 case TRUTH_AND_EXPR:
15601 case TRUTH_OR_EXPR:
15602 case TRUTH_XOR_EXPR:
15603 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15604 TREE_TYPE (t),
15605 TREE_OPERAND (t, 0),
15606 TREE_OPERAND (t, 1),
15607 strict_overflow_p);
15608 case TRUTH_NOT_EXPR:
15609 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15610 TREE_TYPE (t),
15611 TREE_OPERAND (t, 0),
15612 strict_overflow_p);
15613
15614 case COND_EXPR:
15615 case CONSTRUCTOR:
15616 case OBJ_TYPE_REF:
15617 case ASSERT_EXPR:
15618 case ADDR_EXPR:
15619 case WITH_SIZE_EXPR:
15620 case SSA_NAME:
15621 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15622
15623 default:
15624 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15625 }
15626 }
15627
15628 /* Return true if `t' is known to be non-negative. Handle warnings
15629 about undefined signed overflow. */
15630
15631 bool
15632 tree_expr_nonnegative_p (tree t)
15633 {
15634 bool ret, strict_overflow_p;
15635
15636 strict_overflow_p = false;
15637 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15638 if (strict_overflow_p)
15639 fold_overflow_warning (("assuming signed overflow does not occur when "
15640 "determining that expression is always "
15641 "non-negative"),
15642 WARN_STRICT_OVERFLOW_MISC);
15643 return ret;
15644 }
15645
15646
15647 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15648 For floating point we further ensure that T is not denormal.
15649 Similar logic is present in nonzero_address in rtlanal.h.
15650
15651 If the return value is based on the assumption that signed overflow
15652 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15653 change *STRICT_OVERFLOW_P. */
15654
15655 bool
15656 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15657 bool *strict_overflow_p)
15658 {
15659 switch (code)
15660 {
15661 case ABS_EXPR:
15662 return tree_expr_nonzero_warnv_p (op0,
15663 strict_overflow_p);
15664
15665 case NOP_EXPR:
15666 {
15667 tree inner_type = TREE_TYPE (op0);
15668 tree outer_type = type;
15669
15670 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15671 && tree_expr_nonzero_warnv_p (op0,
15672 strict_overflow_p));
15673 }
15674 break;
15675
15676 case NON_LVALUE_EXPR:
15677 return tree_expr_nonzero_warnv_p (op0,
15678 strict_overflow_p);
15679
15680 default:
15681 break;
15682 }
15683
15684 return false;
15685 }
15686
15687 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15688 For floating point we further ensure that T is not denormal.
15689 Similar logic is present in nonzero_address in rtlanal.h.
15690
15691 If the return value is based on the assumption that signed overflow
15692 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15693 change *STRICT_OVERFLOW_P. */
15694
15695 bool
15696 tree_binary_nonzero_warnv_p (enum tree_code code,
15697 tree type,
15698 tree op0,
15699 tree op1, bool *strict_overflow_p)
15700 {
15701 bool sub_strict_overflow_p;
15702 switch (code)
15703 {
15704 case POINTER_PLUS_EXPR:
15705 case PLUS_EXPR:
15706 if (TYPE_OVERFLOW_UNDEFINED (type))
15707 {
15708 /* With the presence of negative values it is hard
15709 to say something. */
15710 sub_strict_overflow_p = false;
15711 if (!tree_expr_nonnegative_warnv_p (op0,
15712 &sub_strict_overflow_p)
15713 || !tree_expr_nonnegative_warnv_p (op1,
15714 &sub_strict_overflow_p))
15715 return false;
15716 /* One of operands must be positive and the other non-negative. */
15717 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15718 overflows, on a twos-complement machine the sum of two
15719 nonnegative numbers can never be zero. */
15720 return (tree_expr_nonzero_warnv_p (op0,
15721 strict_overflow_p)
15722 || tree_expr_nonzero_warnv_p (op1,
15723 strict_overflow_p));
15724 }
15725 break;
15726
15727 case MULT_EXPR:
15728 if (TYPE_OVERFLOW_UNDEFINED (type))
15729 {
15730 if (tree_expr_nonzero_warnv_p (op0,
15731 strict_overflow_p)
15732 && tree_expr_nonzero_warnv_p (op1,
15733 strict_overflow_p))
15734 {
15735 *strict_overflow_p = true;
15736 return true;
15737 }
15738 }
15739 break;
15740
15741 case MIN_EXPR:
15742 sub_strict_overflow_p = false;
15743 if (tree_expr_nonzero_warnv_p (op0,
15744 &sub_strict_overflow_p)
15745 && tree_expr_nonzero_warnv_p (op1,
15746 &sub_strict_overflow_p))
15747 {
15748 if (sub_strict_overflow_p)
15749 *strict_overflow_p = true;
15750 }
15751 break;
15752
15753 case MAX_EXPR:
15754 sub_strict_overflow_p = false;
15755 if (tree_expr_nonzero_warnv_p (op0,
15756 &sub_strict_overflow_p))
15757 {
15758 if (sub_strict_overflow_p)
15759 *strict_overflow_p = true;
15760
15761 /* When both operands are nonzero, then MAX must be too. */
15762 if (tree_expr_nonzero_warnv_p (op1,
15763 strict_overflow_p))
15764 return true;
15765
15766 /* MAX where operand 0 is positive is positive. */
15767 return tree_expr_nonnegative_warnv_p (op0,
15768 strict_overflow_p);
15769 }
15770 /* MAX where operand 1 is positive is positive. */
15771 else if (tree_expr_nonzero_warnv_p (op1,
15772 &sub_strict_overflow_p)
15773 && tree_expr_nonnegative_warnv_p (op1,
15774 &sub_strict_overflow_p))
15775 {
15776 if (sub_strict_overflow_p)
15777 *strict_overflow_p = true;
15778 return true;
15779 }
15780 break;
15781
15782 case BIT_IOR_EXPR:
15783 return (tree_expr_nonzero_warnv_p (op1,
15784 strict_overflow_p)
15785 || tree_expr_nonzero_warnv_p (op0,
15786 strict_overflow_p));
15787
15788 default:
15789 break;
15790 }
15791
15792 return false;
15793 }
15794
15795 /* Return true when T is an address and is known to be nonzero.
15796 For floating point we further ensure that T is not denormal.
15797 Similar logic is present in nonzero_address in rtlanal.h.
15798
15799 If the return value is based on the assumption that signed overflow
15800 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15801 change *STRICT_OVERFLOW_P. */
15802
15803 bool
15804 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15805 {
15806 bool sub_strict_overflow_p;
15807 switch (TREE_CODE (t))
15808 {
15809 case INTEGER_CST:
15810 return !integer_zerop (t);
15811
15812 case ADDR_EXPR:
15813 {
15814 tree base = TREE_OPERAND (t, 0);
15815
15816 if (!DECL_P (base))
15817 base = get_base_address (base);
15818
15819 if (!base)
15820 return false;
15821
15822 /* For objects in symbol table check if we know they are non-zero.
15823 Don't do anything for variables and functions before symtab is built;
15824 it is quite possible that they will be declared weak later. */
15825 if (DECL_P (base) && decl_in_symtab_p (base))
15826 {
15827 struct symtab_node *symbol;
15828
15829 symbol = symtab_node::get_create (base);
15830 if (symbol)
15831 return symbol->nonzero_address ();
15832 else
15833 return false;
15834 }
15835
15836 /* Function local objects are never NULL. */
15837 if (DECL_P (base)
15838 && (DECL_CONTEXT (base)
15839 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15840 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15841 return true;
15842
15843 /* Constants are never weak. */
15844 if (CONSTANT_CLASS_P (base))
15845 return true;
15846
15847 return false;
15848 }
15849
15850 case COND_EXPR:
15851 sub_strict_overflow_p = false;
15852 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15853 &sub_strict_overflow_p)
15854 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15855 &sub_strict_overflow_p))
15856 {
15857 if (sub_strict_overflow_p)
15858 *strict_overflow_p = true;
15859 return true;
15860 }
15861 break;
15862
15863 default:
15864 break;
15865 }
15866 return false;
15867 }
15868
15869 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15870 attempt to fold the expression to a constant without modifying TYPE,
15871 OP0 or OP1.
15872
15873 If the expression could be simplified to a constant, then return
15874 the constant. If the expression would not be simplified to a
15875 constant, then return NULL_TREE. */
15876
15877 tree
15878 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15879 {
15880 tree tem = fold_binary (code, type, op0, op1);
15881 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15882 }
15883
15884 /* Given the components of a unary expression CODE, TYPE and OP0,
15885 attempt to fold the expression to a constant without modifying
15886 TYPE or OP0.
15887
15888 If the expression could be simplified to a constant, then return
15889 the constant. If the expression would not be simplified to a
15890 constant, then return NULL_TREE. */
15891
15892 tree
15893 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15894 {
15895 tree tem = fold_unary (code, type, op0);
15896 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15897 }
15898
15899 /* If EXP represents referencing an element in a constant string
15900 (either via pointer arithmetic or array indexing), return the
15901 tree representing the value accessed, otherwise return NULL. */
15902
15903 tree
15904 fold_read_from_constant_string (tree exp)
15905 {
15906 if ((TREE_CODE (exp) == INDIRECT_REF
15907 || TREE_CODE (exp) == ARRAY_REF)
15908 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15909 {
15910 tree exp1 = TREE_OPERAND (exp, 0);
15911 tree index;
15912 tree string;
15913 location_t loc = EXPR_LOCATION (exp);
15914
15915 if (TREE_CODE (exp) == INDIRECT_REF)
15916 string = string_constant (exp1, &index);
15917 else
15918 {
15919 tree low_bound = array_ref_low_bound (exp);
15920 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15921
15922 /* Optimize the special-case of a zero lower bound.
15923
15924 We convert the low_bound to sizetype to avoid some problems
15925 with constant folding. (E.g. suppose the lower bound is 1,
15926 and its mode is QI. Without the conversion,l (ARRAY
15927 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15928 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15929 if (! integer_zerop (low_bound))
15930 index = size_diffop_loc (loc, index,
15931 fold_convert_loc (loc, sizetype, low_bound));
15932
15933 string = exp1;
15934 }
15935
15936 if (string
15937 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15938 && TREE_CODE (string) == STRING_CST
15939 && TREE_CODE (index) == INTEGER_CST
15940 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15941 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15942 == MODE_INT)
15943 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15944 return build_int_cst_type (TREE_TYPE (exp),
15945 (TREE_STRING_POINTER (string)
15946 [TREE_INT_CST_LOW (index)]));
15947 }
15948 return NULL;
15949 }
15950
15951 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15952 an integer constant, real, or fixed-point constant.
15953
15954 TYPE is the type of the result. */
15955
15956 static tree
15957 fold_negate_const (tree arg0, tree type)
15958 {
15959 tree t = NULL_TREE;
15960
15961 switch (TREE_CODE (arg0))
15962 {
15963 case INTEGER_CST:
15964 {
15965 bool overflow;
15966 wide_int val = wi::neg (arg0, &overflow);
15967 t = force_fit_type (type, val, 1,
15968 (overflow | TREE_OVERFLOW (arg0))
15969 && !TYPE_UNSIGNED (type));
15970 break;
15971 }
15972
15973 case REAL_CST:
15974 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15975 break;
15976
15977 case FIXED_CST:
15978 {
15979 FIXED_VALUE_TYPE f;
15980 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15981 &(TREE_FIXED_CST (arg0)), NULL,
15982 TYPE_SATURATING (type));
15983 t = build_fixed (type, f);
15984 /* Propagate overflow flags. */
15985 if (overflow_p | TREE_OVERFLOW (arg0))
15986 TREE_OVERFLOW (t) = 1;
15987 break;
15988 }
15989
15990 default:
15991 gcc_unreachable ();
15992 }
15993
15994 return t;
15995 }
15996
15997 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15998 an integer constant or real constant.
15999
16000 TYPE is the type of the result. */
16001
16002 tree
16003 fold_abs_const (tree arg0, tree type)
16004 {
16005 tree t = NULL_TREE;
16006
16007 switch (TREE_CODE (arg0))
16008 {
16009 case INTEGER_CST:
16010 {
16011 /* If the value is unsigned or non-negative, then the absolute value
16012 is the same as the ordinary value. */
16013 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16014 t = arg0;
16015
16016 /* If the value is negative, then the absolute value is
16017 its negation. */
16018 else
16019 {
16020 bool overflow;
16021 wide_int val = wi::neg (arg0, &overflow);
16022 t = force_fit_type (type, val, -1,
16023 overflow | TREE_OVERFLOW (arg0));
16024 }
16025 }
16026 break;
16027
16028 case REAL_CST:
16029 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16030 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16031 else
16032 t = arg0;
16033 break;
16034
16035 default:
16036 gcc_unreachable ();
16037 }
16038
16039 return t;
16040 }
16041
16042 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16043 constant. TYPE is the type of the result. */
16044
16045 static tree
16046 fold_not_const (const_tree arg0, tree type)
16047 {
16048 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16049
16050 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16051 }
16052
16053 /* Given CODE, a relational operator, the target type, TYPE and two
16054 constant operands OP0 and OP1, return the result of the
16055 relational operation. If the result is not a compile time
16056 constant, then return NULL_TREE. */
16057
16058 static tree
16059 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16060 {
16061 int result, invert;
16062
16063 /* From here on, the only cases we handle are when the result is
16064 known to be a constant. */
16065
16066 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16067 {
16068 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16069 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16070
16071 /* Handle the cases where either operand is a NaN. */
16072 if (real_isnan (c0) || real_isnan (c1))
16073 {
16074 switch (code)
16075 {
16076 case EQ_EXPR:
16077 case ORDERED_EXPR:
16078 result = 0;
16079 break;
16080
16081 case NE_EXPR:
16082 case UNORDERED_EXPR:
16083 case UNLT_EXPR:
16084 case UNLE_EXPR:
16085 case UNGT_EXPR:
16086 case UNGE_EXPR:
16087 case UNEQ_EXPR:
16088 result = 1;
16089 break;
16090
16091 case LT_EXPR:
16092 case LE_EXPR:
16093 case GT_EXPR:
16094 case GE_EXPR:
16095 case LTGT_EXPR:
16096 if (flag_trapping_math)
16097 return NULL_TREE;
16098 result = 0;
16099 break;
16100
16101 default:
16102 gcc_unreachable ();
16103 }
16104
16105 return constant_boolean_node (result, type);
16106 }
16107
16108 return constant_boolean_node (real_compare (code, c0, c1), type);
16109 }
16110
16111 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16112 {
16113 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16114 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16115 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16116 }
16117
16118 /* Handle equality/inequality of complex constants. */
16119 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16120 {
16121 tree rcond = fold_relational_const (code, type,
16122 TREE_REALPART (op0),
16123 TREE_REALPART (op1));
16124 tree icond = fold_relational_const (code, type,
16125 TREE_IMAGPART (op0),
16126 TREE_IMAGPART (op1));
16127 if (code == EQ_EXPR)
16128 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16129 else if (code == NE_EXPR)
16130 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16131 else
16132 return NULL_TREE;
16133 }
16134
16135 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16136 {
16137 unsigned count = VECTOR_CST_NELTS (op0);
16138 tree *elts = XALLOCAVEC (tree, count);
16139 gcc_assert (VECTOR_CST_NELTS (op1) == count
16140 && TYPE_VECTOR_SUBPARTS (type) == count);
16141
16142 for (unsigned i = 0; i < count; i++)
16143 {
16144 tree elem_type = TREE_TYPE (type);
16145 tree elem0 = VECTOR_CST_ELT (op0, i);
16146 tree elem1 = VECTOR_CST_ELT (op1, i);
16147
16148 tree tem = fold_relational_const (code, elem_type,
16149 elem0, elem1);
16150
16151 if (tem == NULL_TREE)
16152 return NULL_TREE;
16153
16154 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16155 }
16156
16157 return build_vector (type, elts);
16158 }
16159
16160 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16161
16162 To compute GT, swap the arguments and do LT.
16163 To compute GE, do LT and invert the result.
16164 To compute LE, swap the arguments, do LT and invert the result.
16165 To compute NE, do EQ and invert the result.
16166
16167 Therefore, the code below must handle only EQ and LT. */
16168
16169 if (code == LE_EXPR || code == GT_EXPR)
16170 {
16171 tree tem = op0;
16172 op0 = op1;
16173 op1 = tem;
16174 code = swap_tree_comparison (code);
16175 }
16176
16177 /* Note that it is safe to invert for real values here because we
16178 have already handled the one case that it matters. */
16179
16180 invert = 0;
16181 if (code == NE_EXPR || code == GE_EXPR)
16182 {
16183 invert = 1;
16184 code = invert_tree_comparison (code, false);
16185 }
16186
16187 /* Compute a result for LT or EQ if args permit;
16188 Otherwise return T. */
16189 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16190 {
16191 if (code == EQ_EXPR)
16192 result = tree_int_cst_equal (op0, op1);
16193 else
16194 result = tree_int_cst_lt (op0, op1);
16195 }
16196 else
16197 return NULL_TREE;
16198
16199 if (invert)
16200 result ^= 1;
16201 return constant_boolean_node (result, type);
16202 }
16203
16204 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16205 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16206 itself. */
16207
16208 tree
16209 fold_build_cleanup_point_expr (tree type, tree expr)
16210 {
16211 /* If the expression does not have side effects then we don't have to wrap
16212 it with a cleanup point expression. */
16213 if (!TREE_SIDE_EFFECTS (expr))
16214 return expr;
16215
16216 /* If the expression is a return, check to see if the expression inside the
16217 return has no side effects or the right hand side of the modify expression
16218 inside the return. If either don't have side effects set we don't need to
16219 wrap the expression in a cleanup point expression. Note we don't check the
16220 left hand side of the modify because it should always be a return decl. */
16221 if (TREE_CODE (expr) == RETURN_EXPR)
16222 {
16223 tree op = TREE_OPERAND (expr, 0);
16224 if (!op || !TREE_SIDE_EFFECTS (op))
16225 return expr;
16226 op = TREE_OPERAND (op, 1);
16227 if (!TREE_SIDE_EFFECTS (op))
16228 return expr;
16229 }
16230
16231 return build1 (CLEANUP_POINT_EXPR, type, expr);
16232 }
16233
16234 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16235 of an indirection through OP0, or NULL_TREE if no simplification is
16236 possible. */
16237
16238 tree
16239 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16240 {
16241 tree sub = op0;
16242 tree subtype;
16243
16244 STRIP_NOPS (sub);
16245 subtype = TREE_TYPE (sub);
16246 if (!POINTER_TYPE_P (subtype))
16247 return NULL_TREE;
16248
16249 if (TREE_CODE (sub) == ADDR_EXPR)
16250 {
16251 tree op = TREE_OPERAND (sub, 0);
16252 tree optype = TREE_TYPE (op);
16253 /* *&CONST_DECL -> to the value of the const decl. */
16254 if (TREE_CODE (op) == CONST_DECL)
16255 return DECL_INITIAL (op);
16256 /* *&p => p; make sure to handle *&"str"[cst] here. */
16257 if (type == optype)
16258 {
16259 tree fop = fold_read_from_constant_string (op);
16260 if (fop)
16261 return fop;
16262 else
16263 return op;
16264 }
16265 /* *(foo *)&fooarray => fooarray[0] */
16266 else if (TREE_CODE (optype) == ARRAY_TYPE
16267 && type == TREE_TYPE (optype)
16268 && (!in_gimple_form
16269 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16270 {
16271 tree type_domain = TYPE_DOMAIN (optype);
16272 tree min_val = size_zero_node;
16273 if (type_domain && TYPE_MIN_VALUE (type_domain))
16274 min_val = TYPE_MIN_VALUE (type_domain);
16275 if (in_gimple_form
16276 && TREE_CODE (min_val) != INTEGER_CST)
16277 return NULL_TREE;
16278 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16279 NULL_TREE, NULL_TREE);
16280 }
16281 /* *(foo *)&complexfoo => __real__ complexfoo */
16282 else if (TREE_CODE (optype) == COMPLEX_TYPE
16283 && type == TREE_TYPE (optype))
16284 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16285 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16286 else if (TREE_CODE (optype) == VECTOR_TYPE
16287 && type == TREE_TYPE (optype))
16288 {
16289 tree part_width = TYPE_SIZE (type);
16290 tree index = bitsize_int (0);
16291 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16292 }
16293 }
16294
16295 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16296 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16297 {
16298 tree op00 = TREE_OPERAND (sub, 0);
16299 tree op01 = TREE_OPERAND (sub, 1);
16300
16301 STRIP_NOPS (op00);
16302 if (TREE_CODE (op00) == ADDR_EXPR)
16303 {
16304 tree op00type;
16305 op00 = TREE_OPERAND (op00, 0);
16306 op00type = TREE_TYPE (op00);
16307
16308 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16309 if (TREE_CODE (op00type) == VECTOR_TYPE
16310 && type == TREE_TYPE (op00type))
16311 {
16312 HOST_WIDE_INT offset = tree_to_shwi (op01);
16313 tree part_width = TYPE_SIZE (type);
16314 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16315 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16316 tree index = bitsize_int (indexi);
16317
16318 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16319 return fold_build3_loc (loc,
16320 BIT_FIELD_REF, type, op00,
16321 part_width, index);
16322
16323 }
16324 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16325 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16326 && type == TREE_TYPE (op00type))
16327 {
16328 tree size = TYPE_SIZE_UNIT (type);
16329 if (tree_int_cst_equal (size, op01))
16330 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16331 }
16332 /* ((foo *)&fooarray)[1] => fooarray[1] */
16333 else if (TREE_CODE (op00type) == ARRAY_TYPE
16334 && type == TREE_TYPE (op00type))
16335 {
16336 tree type_domain = TYPE_DOMAIN (op00type);
16337 tree min_val = size_zero_node;
16338 if (type_domain && TYPE_MIN_VALUE (type_domain))
16339 min_val = TYPE_MIN_VALUE (type_domain);
16340 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16341 TYPE_SIZE_UNIT (type));
16342 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16343 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16344 NULL_TREE, NULL_TREE);
16345 }
16346 }
16347 }
16348
16349 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16350 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16351 && type == TREE_TYPE (TREE_TYPE (subtype))
16352 && (!in_gimple_form
16353 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16354 {
16355 tree type_domain;
16356 tree min_val = size_zero_node;
16357 sub = build_fold_indirect_ref_loc (loc, sub);
16358 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16359 if (type_domain && TYPE_MIN_VALUE (type_domain))
16360 min_val = TYPE_MIN_VALUE (type_domain);
16361 if (in_gimple_form
16362 && TREE_CODE (min_val) != INTEGER_CST)
16363 return NULL_TREE;
16364 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16365 NULL_TREE);
16366 }
16367
16368 return NULL_TREE;
16369 }
16370
16371 /* Builds an expression for an indirection through T, simplifying some
16372 cases. */
16373
16374 tree
16375 build_fold_indirect_ref_loc (location_t loc, tree t)
16376 {
16377 tree type = TREE_TYPE (TREE_TYPE (t));
16378 tree sub = fold_indirect_ref_1 (loc, type, t);
16379
16380 if (sub)
16381 return sub;
16382
16383 return build1_loc (loc, INDIRECT_REF, type, t);
16384 }
16385
16386 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16387
16388 tree
16389 fold_indirect_ref_loc (location_t loc, tree t)
16390 {
16391 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16392
16393 if (sub)
16394 return sub;
16395 else
16396 return t;
16397 }
16398
16399 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16400 whose result is ignored. The type of the returned tree need not be
16401 the same as the original expression. */
16402
16403 tree
16404 fold_ignored_result (tree t)
16405 {
16406 if (!TREE_SIDE_EFFECTS (t))
16407 return integer_zero_node;
16408
16409 for (;;)
16410 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16411 {
16412 case tcc_unary:
16413 t = TREE_OPERAND (t, 0);
16414 break;
16415
16416 case tcc_binary:
16417 case tcc_comparison:
16418 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16419 t = TREE_OPERAND (t, 0);
16420 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16421 t = TREE_OPERAND (t, 1);
16422 else
16423 return t;
16424 break;
16425
16426 case tcc_expression:
16427 switch (TREE_CODE (t))
16428 {
16429 case COMPOUND_EXPR:
16430 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16431 return t;
16432 t = TREE_OPERAND (t, 0);
16433 break;
16434
16435 case COND_EXPR:
16436 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16437 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16438 return t;
16439 t = TREE_OPERAND (t, 0);
16440 break;
16441
16442 default:
16443 return t;
16444 }
16445 break;
16446
16447 default:
16448 return t;
16449 }
16450 }
16451
16452 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16453
16454 tree
16455 round_up_loc (location_t loc, tree value, unsigned int divisor)
16456 {
16457 tree div = NULL_TREE;
16458
16459 if (divisor == 1)
16460 return value;
16461
16462 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16463 have to do anything. Only do this when we are not given a const,
16464 because in that case, this check is more expensive than just
16465 doing it. */
16466 if (TREE_CODE (value) != INTEGER_CST)
16467 {
16468 div = build_int_cst (TREE_TYPE (value), divisor);
16469
16470 if (multiple_of_p (TREE_TYPE (value), value, div))
16471 return value;
16472 }
16473
16474 /* If divisor is a power of two, simplify this to bit manipulation. */
16475 if (divisor == (divisor & -divisor))
16476 {
16477 if (TREE_CODE (value) == INTEGER_CST)
16478 {
16479 wide_int val = value;
16480 bool overflow_p;
16481
16482 if ((val & (divisor - 1)) == 0)
16483 return value;
16484
16485 overflow_p = TREE_OVERFLOW (value);
16486 val &= ~(divisor - 1);
16487 val += divisor;
16488 if (val == 0)
16489 overflow_p = true;
16490
16491 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16492 }
16493 else
16494 {
16495 tree t;
16496
16497 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16498 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16499 t = build_int_cst (TREE_TYPE (value), -divisor);
16500 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16501 }
16502 }
16503 else
16504 {
16505 if (!div)
16506 div = build_int_cst (TREE_TYPE (value), divisor);
16507 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16508 value = size_binop_loc (loc, MULT_EXPR, value, div);
16509 }
16510
16511 return value;
16512 }
16513
16514 /* Likewise, but round down. */
16515
16516 tree
16517 round_down_loc (location_t loc, tree value, int divisor)
16518 {
16519 tree div = NULL_TREE;
16520
16521 gcc_assert (divisor > 0);
16522 if (divisor == 1)
16523 return value;
16524
16525 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16526 have to do anything. Only do this when we are not given a const,
16527 because in that case, this check is more expensive than just
16528 doing it. */
16529 if (TREE_CODE (value) != INTEGER_CST)
16530 {
16531 div = build_int_cst (TREE_TYPE (value), divisor);
16532
16533 if (multiple_of_p (TREE_TYPE (value), value, div))
16534 return value;
16535 }
16536
16537 /* If divisor is a power of two, simplify this to bit manipulation. */
16538 if (divisor == (divisor & -divisor))
16539 {
16540 tree t;
16541
16542 t = build_int_cst (TREE_TYPE (value), -divisor);
16543 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16544 }
16545 else
16546 {
16547 if (!div)
16548 div = build_int_cst (TREE_TYPE (value), divisor);
16549 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16550 value = size_binop_loc (loc, MULT_EXPR, value, div);
16551 }
16552
16553 return value;
16554 }
16555
16556 /* Returns the pointer to the base of the object addressed by EXP and
16557 extracts the information about the offset of the access, storing it
16558 to PBITPOS and POFFSET. */
16559
16560 static tree
16561 split_address_to_core_and_offset (tree exp,
16562 HOST_WIDE_INT *pbitpos, tree *poffset)
16563 {
16564 tree core;
16565 enum machine_mode mode;
16566 int unsignedp, volatilep;
16567 HOST_WIDE_INT bitsize;
16568 location_t loc = EXPR_LOCATION (exp);
16569
16570 if (TREE_CODE (exp) == ADDR_EXPR)
16571 {
16572 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16573 poffset, &mode, &unsignedp, &volatilep,
16574 false);
16575 core = build_fold_addr_expr_loc (loc, core);
16576 }
16577 else
16578 {
16579 core = exp;
16580 *pbitpos = 0;
16581 *poffset = NULL_TREE;
16582 }
16583
16584 return core;
16585 }
16586
16587 /* Returns true if addresses of E1 and E2 differ by a constant, false
16588 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16589
16590 bool
16591 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16592 {
16593 tree core1, core2;
16594 HOST_WIDE_INT bitpos1, bitpos2;
16595 tree toffset1, toffset2, tdiff, type;
16596
16597 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16598 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16599
16600 if (bitpos1 % BITS_PER_UNIT != 0
16601 || bitpos2 % BITS_PER_UNIT != 0
16602 || !operand_equal_p (core1, core2, 0))
16603 return false;
16604
16605 if (toffset1 && toffset2)
16606 {
16607 type = TREE_TYPE (toffset1);
16608 if (type != TREE_TYPE (toffset2))
16609 toffset2 = fold_convert (type, toffset2);
16610
16611 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16612 if (!cst_and_fits_in_hwi (tdiff))
16613 return false;
16614
16615 *diff = int_cst_value (tdiff);
16616 }
16617 else if (toffset1 || toffset2)
16618 {
16619 /* If only one of the offsets is non-constant, the difference cannot
16620 be a constant. */
16621 return false;
16622 }
16623 else
16624 *diff = 0;
16625
16626 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16627 return true;
16628 }
16629
16630 /* Simplify the floating point expression EXP when the sign of the
16631 result is not significant. Return NULL_TREE if no simplification
16632 is possible. */
16633
16634 tree
16635 fold_strip_sign_ops (tree exp)
16636 {
16637 tree arg0, arg1;
16638 location_t loc = EXPR_LOCATION (exp);
16639
16640 switch (TREE_CODE (exp))
16641 {
16642 case ABS_EXPR:
16643 case NEGATE_EXPR:
16644 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16645 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16646
16647 case MULT_EXPR:
16648 case RDIV_EXPR:
16649 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16650 return NULL_TREE;
16651 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16652 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16653 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16654 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16655 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16656 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16657 break;
16658
16659 case COMPOUND_EXPR:
16660 arg0 = TREE_OPERAND (exp, 0);
16661 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16662 if (arg1)
16663 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16664 break;
16665
16666 case COND_EXPR:
16667 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16668 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16669 if (arg0 || arg1)
16670 return fold_build3_loc (loc,
16671 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16672 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16673 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16674 break;
16675
16676 case CALL_EXPR:
16677 {
16678 const enum built_in_function fcode = builtin_mathfn_code (exp);
16679 switch (fcode)
16680 {
16681 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16682 /* Strip copysign function call, return the 1st argument. */
16683 arg0 = CALL_EXPR_ARG (exp, 0);
16684 arg1 = CALL_EXPR_ARG (exp, 1);
16685 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16686
16687 default:
16688 /* Strip sign ops from the argument of "odd" math functions. */
16689 if (negate_mathfn_p (fcode))
16690 {
16691 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16692 if (arg0)
16693 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16694 }
16695 break;
16696 }
16697 }
16698 break;
16699
16700 default:
16701 break;
16702 }
16703 return NULL_TREE;
16704 }