re PR middle-end/61111 (Infinite recursion between fold_build2_stat_loc and fold_bina...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
75
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
96 };
97
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static tree sign_bit_p (tree, const_tree);
120 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree optimize_minmax_comparison (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_mathfn_compare (location_t,
137 enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (const_tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static tree fold_convert_const (enum tree_code, tree, tree);
146
147 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
148 Otherwise, return LOC. */
149
150 static location_t
151 expr_location_or (tree t, location_t loc)
152 {
153 location_t tloc = EXPR_LOCATION (t);
154 return tloc == UNKNOWN_LOCATION ? loc : tloc;
155 }
156
157 /* Similar to protected_set_expr_location, but never modify x in place,
158 if location can and needs to be set, unshare it. */
159
160 static inline tree
161 protected_set_expr_location_unshare (tree x, location_t loc)
162 {
163 if (CAN_HAVE_LOCATION_P (x)
164 && EXPR_LOCATION (x) != loc
165 && !(TREE_CODE (x) == SAVE_EXPR
166 || TREE_CODE (x) == TARGET_EXPR
167 || TREE_CODE (x) == BIND_EXPR))
168 {
169 x = copy_node (x);
170 SET_EXPR_LOCATION (x, loc);
171 }
172 return x;
173 }
174 \f
175 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
176 division and returns the quotient. Otherwise returns
177 NULL_TREE. */
178
179 tree
180 div_if_zero_remainder (const_tree arg1, const_tree arg2)
181 {
182 widest_int quo;
183
184 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
185 SIGNED, &quo))
186 return wide_int_to_tree (TREE_TYPE (arg1), quo);
187
188 return NULL_TREE;
189 }
190 \f
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
199
200 static int fold_deferring_overflow_warnings;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
206
207 static const char* fold_deferred_overflow_warning;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
211
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
216
217 void
218 fold_defer_overflow_warnings (void)
219 {
220 ++fold_deferring_overflow_warnings;
221 }
222
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
231
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235 const char *warnmsg;
236 location_t locus;
237
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
241 {
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
247 }
248
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
251
252 if (!issue || warnmsg == NULL)
253 return;
254
255 if (gimple_no_warning_p (stmt))
256 return;
257
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
262
263 if (!issue_strict_overflow_warning (code))
264 return;
265
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
275
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279 fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281
282 /* Whether we are deferring overflow warnings. */
283
284 bool
285 fold_deferring_overflow_warnings_p (void)
286 {
287 return fold_deferring_overflow_warnings > 0;
288 }
289
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
292
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296 if (fold_deferring_overflow_warnings > 0)
297 {
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
300 {
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
303 }
304 }
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
307 }
308 \f
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
311
312 static bool
313 negate_mathfn_p (enum built_in_function code)
314 {
315 switch (code)
316 {
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
341
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
347
348 default:
349 break;
350 }
351 return false;
352 }
353
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
356
357 bool
358 may_negate_without_overflow_p (const_tree t)
359 {
360 tree type;
361
362 gcc_assert (TREE_CODE (t) == INTEGER_CST);
363
364 type = TREE_TYPE (t);
365 if (TYPE_UNSIGNED (type))
366 return false;
367
368 return !wi::only_sign_bit_p (t);
369 }
370
371 /* Determine whether an expression T can be cheaply negated using
372 the function negate_expr without introducing undefined overflow. */
373
374 static bool
375 negate_expr_p (tree t)
376 {
377 tree type;
378
379 if (t == 0)
380 return false;
381
382 type = TREE_TYPE (t);
383
384 STRIP_SIGN_NOPS (t);
385 switch (TREE_CODE (t))
386 {
387 case INTEGER_CST:
388 if (TYPE_OVERFLOW_WRAPS (type))
389 return true;
390
391 /* Check that -CST will not overflow type. */
392 return may_negate_without_overflow_p (t);
393 case BIT_NOT_EXPR:
394 return (INTEGRAL_TYPE_P (type)
395 && TYPE_OVERFLOW_WRAPS (type));
396
397 case FIXED_CST:
398 case NEGATE_EXPR:
399 return true;
400
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
409
410 case VECTOR_CST:
411 {
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
414
415 int count = TYPE_VECTOR_SUBPARTS (type), i;
416
417 for (i = 0; i < count; i++)
418 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 return false;
420
421 return true;
422 }
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
453
454 /* Fall through. */
455
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
461
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case EXACT_DIV_EXPR:
465 /* In general we can't negate A / B, because if A is INT_MIN and
466 B is 1, we may turn this into INT_MIN / -1 which is undefined
467 and actually traps on some architectures. But if overflow is
468 undefined, we can negate, because - (INT_MIN / 1) is an
469 overflow. */
470 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
471 {
472 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
473 break;
474 /* If overflow is undefined then we have to be careful because
475 we ask whether it's ok to associate the negate with the
476 division which is not ok for example for
477 -((a - b) / c) where (-(a - b)) / c may invoke undefined
478 overflow because of negating INT_MIN. So do not use
479 negate_expr_p here but open-code the two important cases. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
481 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
482 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
483 return true;
484 }
485 else if (negate_expr_p (TREE_OPERAND (t, 0)))
486 return true;
487 return negate_expr_p (TREE_OPERAND (t, 1));
488
489 case NOP_EXPR:
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type) == REAL_TYPE)
492 {
493 tree tem = strip_float_extensions (t);
494 if (tem != t)
495 return negate_expr_p (tem);
496 }
497 break;
498
499 case CALL_EXPR:
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t)))
502 return negate_expr_p (CALL_EXPR_ARG (t, 0));
503 break;
504
505 case RSHIFT_EXPR:
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 {
509 tree op1 = TREE_OPERAND (t, 1);
510 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
511 return true;
512 }
513 break;
514
515 default:
516 break;
517 }
518 return false;
519 }
520
521 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
522 simplification is possible.
523 If negate_expr_p would return true for T, NULL_TREE will never be
524 returned. */
525
526 static tree
527 fold_negate_expr (location_t loc, tree t)
528 {
529 tree type = TREE_TYPE (t);
530 tree tem;
531
532 switch (TREE_CODE (t))
533 {
534 /* Convert - (~A) to A + 1. */
535 case BIT_NOT_EXPR:
536 if (INTEGRAL_TYPE_P (type))
537 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
538 build_one_cst (type));
539 break;
540
541 case INTEGER_CST:
542 tem = fold_negate_const (t, type);
543 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
544 || !TYPE_OVERFLOW_TRAPS (type))
545 return tem;
546 break;
547
548 case REAL_CST:
549 tem = fold_negate_const (t, type);
550 /* Two's complement FP formats, such as c4x, may overflow. */
551 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
552 return tem;
553 break;
554
555 case FIXED_CST:
556 tem = fold_negate_const (t, type);
557 return tem;
558
559 case COMPLEX_CST:
560 {
561 tree rpart = negate_expr (TREE_REALPART (t));
562 tree ipart = negate_expr (TREE_IMAGPART (t));
563
564 if ((TREE_CODE (rpart) == REAL_CST
565 && TREE_CODE (ipart) == REAL_CST)
566 || (TREE_CODE (rpart) == INTEGER_CST
567 && TREE_CODE (ipart) == INTEGER_CST))
568 return build_complex (type, rpart, ipart);
569 }
570 break;
571
572 case VECTOR_CST:
573 {
574 int count = TYPE_VECTOR_SUBPARTS (type), i;
575 tree *elts = XALLOCAVEC (tree, count);
576
577 for (i = 0; i < count; i++)
578 {
579 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
580 if (elts[i] == NULL_TREE)
581 return NULL_TREE;
582 }
583
584 return build_vector (type, elts);
585 }
586
587 case COMPLEX_EXPR:
588 if (negate_expr_p (t))
589 return fold_build2_loc (loc, COMPLEX_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
591 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
592 break;
593
594 case CONJ_EXPR:
595 if (negate_expr_p (t))
596 return fold_build1_loc (loc, CONJ_EXPR, type,
597 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
598 break;
599
600 case NEGATE_EXPR:
601 return TREE_OPERAND (t, 0);
602
603 case PLUS_EXPR:
604 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
605 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
606 {
607 /* -(A + B) -> (-B) - A. */
608 if (negate_expr_p (TREE_OPERAND (t, 1))
609 && reorder_operands_p (TREE_OPERAND (t, 0),
610 TREE_OPERAND (t, 1)))
611 {
612 tem = negate_expr (TREE_OPERAND (t, 1));
613 return fold_build2_loc (loc, MINUS_EXPR, type,
614 tem, TREE_OPERAND (t, 0));
615 }
616
617 /* -(A + B) -> (-A) - B. */
618 if (negate_expr_p (TREE_OPERAND (t, 0)))
619 {
620 tem = negate_expr (TREE_OPERAND (t, 0));
621 return fold_build2_loc (loc, MINUS_EXPR, type,
622 tem, TREE_OPERAND (t, 1));
623 }
624 }
625 break;
626
627 case MINUS_EXPR:
628 /* - (A - B) -> B - A */
629 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
630 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
631 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
634 break;
635
636 case MULT_EXPR:
637 if (TYPE_UNSIGNED (type))
638 break;
639
640 /* Fall through. */
641
642 case RDIV_EXPR:
643 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
644 {
645 tem = TREE_OPERAND (t, 1);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 TREE_OPERAND (t, 0), negate_expr (tem));
649 tem = TREE_OPERAND (t, 0);
650 if (negate_expr_p (tem))
651 return fold_build2_loc (loc, TREE_CODE (t), type,
652 negate_expr (tem), TREE_OPERAND (t, 1));
653 }
654 break;
655
656 case TRUNC_DIV_EXPR:
657 case ROUND_DIV_EXPR:
658 case EXACT_DIV_EXPR:
659 /* In general we can't negate A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. But if overflow is
662 undefined, we can negate, because - (INT_MIN / 1) is an
663 overflow. */
664 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
665 {
666 const char * const warnmsg = G_("assuming signed overflow does not "
667 "occur when negating a division");
668 tem = TREE_OPERAND (t, 1);
669 if (negate_expr_p (tem))
670 {
671 if (INTEGRAL_TYPE_P (type)
672 && (TREE_CODE (tem) != INTEGER_CST
673 || integer_onep (tem)))
674 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
675 return fold_build2_loc (loc, TREE_CODE (t), type,
676 TREE_OPERAND (t, 0), negate_expr (tem));
677 }
678 /* If overflow is undefined then we have to be careful because
679 we ask whether it's ok to associate the negate with the
680 division which is not ok for example for
681 -((a - b) / c) where (-(a - b)) / c may invoke undefined
682 overflow because of negating INT_MIN. So do not use
683 negate_expr_p here but open-code the two important cases. */
684 tem = TREE_OPERAND (t, 0);
685 if ((INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) == NEGATE_EXPR
687 || (TREE_CODE (tem) == INTEGER_CST
688 && may_negate_without_overflow_p (tem))))
689 || !INTEGRAL_TYPE_P (type))
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 negate_expr (tem), TREE_OPERAND (t, 1));
692 }
693 break;
694
695 case NOP_EXPR:
696 /* Convert -((double)float) into (double)(-float). */
697 if (TREE_CODE (type) == REAL_TYPE)
698 {
699 tem = strip_float_extensions (t);
700 if (tem != t && negate_expr_p (tem))
701 return fold_convert_loc (loc, type, negate_expr (tem));
702 }
703 break;
704
705 case CALL_EXPR:
706 /* Negate -f(x) as f(-x). */
707 if (negate_mathfn_p (builtin_mathfn_code (t))
708 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
709 {
710 tree fndecl, arg;
711
712 fndecl = get_callee_fndecl (t);
713 arg = negate_expr (CALL_EXPR_ARG (t, 0));
714 return build_call_expr_loc (loc, fndecl, 1, arg);
715 }
716 break;
717
718 case RSHIFT_EXPR:
719 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
720 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
721 {
722 tree op1 = TREE_OPERAND (t, 1);
723 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
724 {
725 tree ntype = TYPE_UNSIGNED (type)
726 ? signed_type_for (type)
727 : unsigned_type_for (type);
728 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
729 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
730 return fold_convert_loc (loc, type, temp);
731 }
732 }
733 break;
734
735 default:
736 break;
737 }
738
739 return NULL_TREE;
740 }
741
742 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
743 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
744 return NULL_TREE. */
745
746 static tree
747 negate_expr (tree t)
748 {
749 tree type, tem;
750 location_t loc;
751
752 if (t == NULL_TREE)
753 return NULL_TREE;
754
755 loc = EXPR_LOCATION (t);
756 type = TREE_TYPE (t);
757 STRIP_SIGN_NOPS (t);
758
759 tem = fold_negate_expr (loc, t);
760 if (!tem)
761 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
762 return fold_convert_loc (loc, type, tem);
763 }
764 \f
765 /* Split a tree IN into a constant, literal and variable parts that could be
766 combined with CODE to make IN. "constant" means an expression with
767 TREE_CONSTANT but that isn't an actual constant. CODE must be a
768 commutative arithmetic operation. Store the constant part into *CONP,
769 the literal in *LITP and return the variable part. If a part isn't
770 present, set it to null. If the tree does not decompose in this way,
771 return the entire tree as the variable part and the other parts as null.
772
773 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
774 case, we negate an operand that was subtracted. Except if it is a
775 literal for which we use *MINUS_LITP instead.
776
777 If NEGATE_P is true, we are negating all of IN, again except a literal
778 for which we use *MINUS_LITP instead.
779
780 If IN is itself a literal or constant, return it as appropriate.
781
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
784
785 static tree
786 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
787 tree *minus_litp, int negate_p)
788 {
789 tree var = 0;
790
791 *conp = 0;
792 *litp = 0;
793 *minus_litp = 0;
794
795 /* Strip any conversions that don't change the machine mode or signedness. */
796 STRIP_SIGN_NOPS (in);
797
798 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
799 || TREE_CODE (in) == FIXED_CST)
800 *litp = in;
801 else if (TREE_CODE (in) == code
802 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
803 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
804 /* We can associate addition and subtraction together (even
805 though the C standard doesn't say so) for integers because
806 the value is not affected. For reals, the value might be
807 affected, so we can't. */
808 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
809 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
810 {
811 tree op0 = TREE_OPERAND (in, 0);
812 tree op1 = TREE_OPERAND (in, 1);
813 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
814 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
815
816 /* First see if either of the operands is a literal, then a constant. */
817 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
818 || TREE_CODE (op0) == FIXED_CST)
819 *litp = op0, op0 = 0;
820 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
821 || TREE_CODE (op1) == FIXED_CST)
822 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
823
824 if (op0 != 0 && TREE_CONSTANT (op0))
825 *conp = op0, op0 = 0;
826 else if (op1 != 0 && TREE_CONSTANT (op1))
827 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
828
829 /* If we haven't dealt with either operand, this is not a case we can
830 decompose. Otherwise, VAR is either of the ones remaining, if any. */
831 if (op0 != 0 && op1 != 0)
832 var = in;
833 else if (op0 != 0)
834 var = op0;
835 else
836 var = op1, neg_var_p = neg1_p;
837
838 /* Now do any needed negations. */
839 if (neg_litp_p)
840 *minus_litp = *litp, *litp = 0;
841 if (neg_conp_p)
842 *conp = negate_expr (*conp);
843 if (neg_var_p)
844 var = negate_expr (var);
845 }
846 else if (TREE_CODE (in) == BIT_NOT_EXPR
847 && code == PLUS_EXPR)
848 {
849 /* -X - 1 is folded to ~X, undo that here. */
850 *minus_litp = build_one_cst (TREE_TYPE (in));
851 var = negate_expr (TREE_OPERAND (in, 0));
852 }
853 else if (TREE_CONSTANT (in))
854 *conp = in;
855 else
856 var = in;
857
858 if (negate_p)
859 {
860 if (*litp)
861 *minus_litp = *litp, *litp = 0;
862 else if (*minus_litp)
863 *litp = *minus_litp, *minus_litp = 0;
864 *conp = negate_expr (*conp);
865 var = negate_expr (var);
866 }
867
868 return var;
869 }
870
871 /* Re-associate trees split by the above function. T1 and T2 are
872 either expressions to associate or null. Return the new
873 expression, if any. LOC is the location of the new expression. If
874 we build an operation, do it in TYPE and with CODE. */
875
876 static tree
877 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
878 {
879 if (t1 == 0)
880 return t2;
881 else if (t2 == 0)
882 return t1;
883
884 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
885 try to fold this since we will have infinite recursion. But do
886 deal with any NEGATE_EXPRs. */
887 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
888 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
889 {
890 if (code == PLUS_EXPR)
891 {
892 if (TREE_CODE (t1) == NEGATE_EXPR)
893 return build2_loc (loc, MINUS_EXPR, type,
894 fold_convert_loc (loc, type, t2),
895 fold_convert_loc (loc, type,
896 TREE_OPERAND (t1, 0)));
897 else if (TREE_CODE (t2) == NEGATE_EXPR)
898 return build2_loc (loc, MINUS_EXPR, type,
899 fold_convert_loc (loc, type, t1),
900 fold_convert_loc (loc, type,
901 TREE_OPERAND (t2, 0)));
902 else if (integer_zerop (t2))
903 return fold_convert_loc (loc, type, t1);
904 }
905 else if (code == MINUS_EXPR)
906 {
907 if (integer_zerop (t2))
908 return fold_convert_loc (loc, type, t1);
909 }
910
911 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
912 fold_convert_loc (loc, type, t2));
913 }
914
915 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918 \f
919 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
920 for use in int_const_binop, size_binop and size_diffop. */
921
922 static bool
923 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
924 {
925 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
926 return false;
927 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
928 return false;
929
930 switch (code)
931 {
932 case LSHIFT_EXPR:
933 case RSHIFT_EXPR:
934 case LROTATE_EXPR:
935 case RROTATE_EXPR:
936 return true;
937
938 default:
939 break;
940 }
941
942 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
943 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
944 && TYPE_MODE (type1) == TYPE_MODE (type2);
945 }
946
947
948 /* Combine two integer constants ARG1 and ARG2 under operation CODE
949 to produce a new constant. Return NULL_TREE if we don't know how
950 to evaluate CODE at compile-time. */
951
952 static tree
953 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
954 int overflowable)
955 {
956 wide_int res;
957 tree t;
958 tree type = TREE_TYPE (arg1);
959 signop sign = TYPE_SIGN (type);
960 bool overflow = false;
961
962 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
963 TYPE_SIGN (TREE_TYPE (parg2)));
964
965 switch (code)
966 {
967 case BIT_IOR_EXPR:
968 res = wi::bit_or (arg1, arg2);
969 break;
970
971 case BIT_XOR_EXPR:
972 res = wi::bit_xor (arg1, arg2);
973 break;
974
975 case BIT_AND_EXPR:
976 res = wi::bit_and (arg1, arg2);
977 break;
978
979 case RSHIFT_EXPR:
980 case LSHIFT_EXPR:
981 if (wi::neg_p (arg2))
982 {
983 arg2 = -arg2;
984 if (code == RSHIFT_EXPR)
985 code = LSHIFT_EXPR;
986 else
987 code = RSHIFT_EXPR;
988 }
989
990 if (code == RSHIFT_EXPR)
991 /* It's unclear from the C standard whether shifts can overflow.
992 The following code ignores overflow; perhaps a C standard
993 interpretation ruling is needed. */
994 res = wi::rshift (arg1, arg2, sign);
995 else
996 res = wi::lshift (arg1, arg2);
997 break;
998
999 case RROTATE_EXPR:
1000 case LROTATE_EXPR:
1001 if (wi::neg_p (arg2))
1002 {
1003 arg2 = -arg2;
1004 if (code == RROTATE_EXPR)
1005 code = LROTATE_EXPR;
1006 else
1007 code = RROTATE_EXPR;
1008 }
1009
1010 if (code == RROTATE_EXPR)
1011 res = wi::rrotate (arg1, arg2);
1012 else
1013 res = wi::lrotate (arg1, arg2);
1014 break;
1015
1016 case PLUS_EXPR:
1017 res = wi::add (arg1, arg2, sign, &overflow);
1018 break;
1019
1020 case MINUS_EXPR:
1021 res = wi::sub (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MULT_EXPR:
1025 res = wi::mul (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_HIGHPART_EXPR:
1029 res = wi::mul_high (arg1, arg2, sign);
1030 break;
1031
1032 case TRUNC_DIV_EXPR:
1033 case EXACT_DIV_EXPR:
1034 if (arg2 == 0)
1035 return NULL_TREE;
1036 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1037 break;
1038
1039 case FLOOR_DIV_EXPR:
1040 if (arg2 == 0)
1041 return NULL_TREE;
1042 res = wi::div_floor (arg1, arg2, sign, &overflow);
1043 break;
1044
1045 case CEIL_DIV_EXPR:
1046 if (arg2 == 0)
1047 return NULL_TREE;
1048 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1049 break;
1050
1051 case ROUND_DIV_EXPR:
1052 if (arg2 == 0)
1053 return NULL_TREE;
1054 res = wi::div_round (arg1, arg2, sign, &overflow);
1055 break;
1056
1057 case TRUNC_MOD_EXPR:
1058 if (arg2 == 0)
1059 return NULL_TREE;
1060 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1061 break;
1062
1063 case FLOOR_MOD_EXPR:
1064 if (arg2 == 0)
1065 return NULL_TREE;
1066 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1067 break;
1068
1069 case CEIL_MOD_EXPR:
1070 if (arg2 == 0)
1071 return NULL_TREE;
1072 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1073 break;
1074
1075 case ROUND_MOD_EXPR:
1076 if (arg2 == 0)
1077 return NULL_TREE;
1078 res = wi::mod_round (arg1, arg2, sign, &overflow);
1079 break;
1080
1081 case MIN_EXPR:
1082 res = wi::min (arg1, arg2, sign);
1083 break;
1084
1085 case MAX_EXPR:
1086 res = wi::max (arg1, arg2, sign);
1087 break;
1088
1089 default:
1090 return NULL_TREE;
1091 }
1092
1093 t = force_fit_type (type, res, overflowable,
1094 (((sign == SIGNED || overflowable == -1)
1095 && overflow)
1096 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1097
1098 return t;
1099 }
1100
1101 tree
1102 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1103 {
1104 return int_const_binop_1 (code, arg1, arg2, 1);
1105 }
1106
1107 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1108 constant. We assume ARG1 and ARG2 have the same data type, or at least
1109 are the same kind of constant and the same machine mode. Return zero if
1110 combining the constants is not allowed in the current operating mode. */
1111
1112 static tree
1113 const_binop (enum tree_code code, tree arg1, tree arg2)
1114 {
1115 /* Sanity check for the recursive cases. */
1116 if (!arg1 || !arg2)
1117 return NULL_TREE;
1118
1119 STRIP_NOPS (arg1);
1120 STRIP_NOPS (arg2);
1121
1122 if (TREE_CODE (arg1) == INTEGER_CST)
1123 return int_const_binop (code, arg1, arg2);
1124
1125 if (TREE_CODE (arg1) == REAL_CST)
1126 {
1127 enum machine_mode mode;
1128 REAL_VALUE_TYPE d1;
1129 REAL_VALUE_TYPE d2;
1130 REAL_VALUE_TYPE value;
1131 REAL_VALUE_TYPE result;
1132 bool inexact;
1133 tree t, type;
1134
1135 /* The following codes are handled by real_arithmetic. */
1136 switch (code)
1137 {
1138 case PLUS_EXPR:
1139 case MINUS_EXPR:
1140 case MULT_EXPR:
1141 case RDIV_EXPR:
1142 case MIN_EXPR:
1143 case MAX_EXPR:
1144 break;
1145
1146 default:
1147 return NULL_TREE;
1148 }
1149
1150 d1 = TREE_REAL_CST (arg1);
1151 d2 = TREE_REAL_CST (arg2);
1152
1153 type = TREE_TYPE (arg1);
1154 mode = TYPE_MODE (type);
1155
1156 /* Don't perform operation if we honor signaling NaNs and
1157 either operand is a NaN. */
1158 if (HONOR_SNANS (mode)
1159 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1160 return NULL_TREE;
1161
1162 /* Don't perform operation if it would raise a division
1163 by zero exception. */
1164 if (code == RDIV_EXPR
1165 && REAL_VALUES_EQUAL (d2, dconst0)
1166 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1167 return NULL_TREE;
1168
1169 /* If either operand is a NaN, just return it. Otherwise, set up
1170 for floating-point trap; we return an overflow. */
1171 if (REAL_VALUE_ISNAN (d1))
1172 return arg1;
1173 else if (REAL_VALUE_ISNAN (d2))
1174 return arg2;
1175
1176 inexact = real_arithmetic (&value, code, &d1, &d2);
1177 real_convert (&result, mode, &value);
1178
1179 /* Don't constant fold this floating point operation if
1180 the result has overflowed and flag_trapping_math. */
1181 if (flag_trapping_math
1182 && MODE_HAS_INFINITIES (mode)
1183 && REAL_VALUE_ISINF (result)
1184 && !REAL_VALUE_ISINF (d1)
1185 && !REAL_VALUE_ISINF (d2))
1186 return NULL_TREE;
1187
1188 /* Don't constant fold this floating point operation if the
1189 result may dependent upon the run-time rounding mode and
1190 flag_rounding_math is set, or if GCC's software emulation
1191 is unable to accurately represent the result. */
1192 if ((flag_rounding_math
1193 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1194 && (inexact || !real_identical (&result, &value)))
1195 return NULL_TREE;
1196
1197 t = build_real (type, result);
1198
1199 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1200 return t;
1201 }
1202
1203 if (TREE_CODE (arg1) == FIXED_CST)
1204 {
1205 FIXED_VALUE_TYPE f1;
1206 FIXED_VALUE_TYPE f2;
1207 FIXED_VALUE_TYPE result;
1208 tree t, type;
1209 int sat_p;
1210 bool overflow_p;
1211
1212 /* The following codes are handled by fixed_arithmetic. */
1213 switch (code)
1214 {
1215 case PLUS_EXPR:
1216 case MINUS_EXPR:
1217 case MULT_EXPR:
1218 case TRUNC_DIV_EXPR:
1219 f2 = TREE_FIXED_CST (arg2);
1220 break;
1221
1222 case LSHIFT_EXPR:
1223 case RSHIFT_EXPR:
1224 {
1225 wide_int w2 = arg2;
1226 f2.data.high = w2.elt (1);
1227 f2.data.low = w2.elt (0);
1228 f2.mode = SImode;
1229 }
1230 break;
1231
1232 default:
1233 return NULL_TREE;
1234 }
1235
1236 f1 = TREE_FIXED_CST (arg1);
1237 type = TREE_TYPE (arg1);
1238 sat_p = TYPE_SATURATING (type);
1239 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1240 t = build_fixed (type, result);
1241 /* Propagate overflow flags. */
1242 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1243 TREE_OVERFLOW (t) = 1;
1244 return t;
1245 }
1246
1247 if (TREE_CODE (arg1) == COMPLEX_CST)
1248 {
1249 tree type = TREE_TYPE (arg1);
1250 tree r1 = TREE_REALPART (arg1);
1251 tree i1 = TREE_IMAGPART (arg1);
1252 tree r2 = TREE_REALPART (arg2);
1253 tree i2 = TREE_IMAGPART (arg2);
1254 tree real, imag;
1255
1256 switch (code)
1257 {
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 real = const_binop (code, r1, r2);
1261 imag = const_binop (code, i1, i2);
1262 break;
1263
1264 case MULT_EXPR:
1265 if (COMPLEX_FLOAT_TYPE_P (type))
1266 return do_mpc_arg2 (arg1, arg2, type,
1267 /* do_nonfinite= */ folding_initializer,
1268 mpc_mul);
1269
1270 real = const_binop (MINUS_EXPR,
1271 const_binop (MULT_EXPR, r1, r2),
1272 const_binop (MULT_EXPR, i1, i2));
1273 imag = const_binop (PLUS_EXPR,
1274 const_binop (MULT_EXPR, r1, i2),
1275 const_binop (MULT_EXPR, i1, r2));
1276 break;
1277
1278 case RDIV_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_div);
1283 /* Fallthru ... */
1284 case TRUNC_DIV_EXPR:
1285 case CEIL_DIV_EXPR:
1286 case FLOOR_DIV_EXPR:
1287 case ROUND_DIV_EXPR:
1288 if (flag_complex_method == 0)
1289 {
1290 /* Keep this algorithm in sync with
1291 tree-complex.c:expand_complex_div_straight().
1292
1293 Expand complex division to scalars, straightforward algorithm.
1294 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1295 t = br*br + bi*bi
1296 */
1297 tree magsquared
1298 = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r2, r2),
1300 const_binop (MULT_EXPR, i2, i2));
1301 tree t1
1302 = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r1, r2),
1304 const_binop (MULT_EXPR, i1, i2));
1305 tree t2
1306 = const_binop (MINUS_EXPR,
1307 const_binop (MULT_EXPR, i1, r2),
1308 const_binop (MULT_EXPR, r1, i2));
1309
1310 real = const_binop (code, t1, magsquared);
1311 imag = const_binop (code, t2, magsquared);
1312 }
1313 else
1314 {
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_wide().
1317
1318 Expand complex division to scalars, modified algorithm to minimize
1319 overflow with wide input ranges. */
1320 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1321 fold_abs_const (r2, TREE_TYPE (type)),
1322 fold_abs_const (i2, TREE_TYPE (type)));
1323
1324 if (integer_nonzerop (compare))
1325 {
1326 /* In the TRUE branch, we compute
1327 ratio = br/bi;
1328 div = (br * ratio) + bi;
1329 tr = (ar * ratio) + ai;
1330 ti = (ai * ratio) - ar;
1331 tr = tr / div;
1332 ti = ti / div; */
1333 tree ratio = const_binop (code, r2, i2);
1334 tree div = const_binop (PLUS_EXPR, i2,
1335 const_binop (MULT_EXPR, r2, ratio));
1336 real = const_binop (MULT_EXPR, r1, ratio);
1337 real = const_binop (PLUS_EXPR, real, i1);
1338 real = const_binop (code, real, div);
1339
1340 imag = const_binop (MULT_EXPR, i1, ratio);
1341 imag = const_binop (MINUS_EXPR, imag, r1);
1342 imag = const_binop (code, imag, div);
1343 }
1344 else
1345 {
1346 /* In the FALSE branch, we compute
1347 ratio = d/c;
1348 divisor = (d * ratio) + c;
1349 tr = (b * ratio) + a;
1350 ti = b - (a * ratio);
1351 tr = tr / div;
1352 ti = ti / div; */
1353 tree ratio = const_binop (code, i2, r2);
1354 tree div = const_binop (PLUS_EXPR, r2,
1355 const_binop (MULT_EXPR, i2, ratio));
1356
1357 real = const_binop (MULT_EXPR, i1, ratio);
1358 real = const_binop (PLUS_EXPR, real, r1);
1359 real = const_binop (code, real, div);
1360
1361 imag = const_binop (MULT_EXPR, r1, ratio);
1362 imag = const_binop (MINUS_EXPR, i1, imag);
1363 imag = const_binop (code, imag, div);
1364 }
1365 }
1366 break;
1367
1368 default:
1369 return NULL_TREE;
1370 }
1371
1372 if (real && imag)
1373 return build_complex (type, real, imag);
1374 }
1375
1376 if (TREE_CODE (arg1) == VECTOR_CST
1377 && TREE_CODE (arg2) == VECTOR_CST)
1378 {
1379 tree type = TREE_TYPE (arg1);
1380 int count = TYPE_VECTOR_SUBPARTS (type), i;
1381 tree *elts = XALLOCAVEC (tree, count);
1382
1383 for (i = 0; i < count; i++)
1384 {
1385 tree elem1 = VECTOR_CST_ELT (arg1, i);
1386 tree elem2 = VECTOR_CST_ELT (arg2, i);
1387
1388 elts[i] = const_binop (code, elem1, elem2);
1389
1390 /* It is possible that const_binop cannot handle the given
1391 code and return NULL_TREE */
1392 if (elts[i] == NULL_TREE)
1393 return NULL_TREE;
1394 }
1395
1396 return build_vector (type, elts);
1397 }
1398
1399 /* Shifts allow a scalar offset for a vector. */
1400 if (TREE_CODE (arg1) == VECTOR_CST
1401 && TREE_CODE (arg2) == INTEGER_CST)
1402 {
1403 tree type = TREE_TYPE (arg1);
1404 int count = TYPE_VECTOR_SUBPARTS (type), i;
1405 tree *elts = XALLOCAVEC (tree, count);
1406
1407 if (code == VEC_LSHIFT_EXPR
1408 || code == VEC_RSHIFT_EXPR)
1409 {
1410 if (!tree_fits_uhwi_p (arg2))
1411 return NULL_TREE;
1412
1413 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1414 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1415 unsigned HOST_WIDE_INT innerc
1416 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1417 if (shiftc >= outerc || (shiftc % innerc) != 0)
1418 return NULL_TREE;
1419 int offset = shiftc / innerc;
1420 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1421 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1422 for !BYTES_BIG_ENDIAN picks first vector element, but
1423 for BYTES_BIG_ENDIAN last element from the vector. */
1424 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1425 offset = -offset;
1426 tree zero = build_zero_cst (TREE_TYPE (type));
1427 for (i = 0; i < count; i++)
1428 {
1429 if (i + offset < 0 || i + offset >= count)
1430 elts[i] = zero;
1431 else
1432 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1433 }
1434 }
1435 else
1436 for (i = 0; i < count; i++)
1437 {
1438 tree elem1 = VECTOR_CST_ELT (arg1, i);
1439
1440 elts[i] = const_binop (code, elem1, arg2);
1441
1442 /* It is possible that const_binop cannot handle the given
1443 code and return NULL_TREE */
1444 if (elts[i] == NULL_TREE)
1445 return NULL_TREE;
1446 }
1447
1448 return build_vector (type, elts);
1449 }
1450 return NULL_TREE;
1451 }
1452
1453 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1454 indicates which particular sizetype to create. */
1455
1456 tree
1457 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1458 {
1459 return build_int_cst (sizetype_tab[(int) kind], number);
1460 }
1461 \f
1462 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1463 is a tree code. The type of the result is taken from the operands.
1464 Both must be equivalent integer types, ala int_binop_types_match_p.
1465 If the operands are constant, so is the result. */
1466
1467 tree
1468 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1469 {
1470 tree type = TREE_TYPE (arg0);
1471
1472 if (arg0 == error_mark_node || arg1 == error_mark_node)
1473 return error_mark_node;
1474
1475 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1476 TREE_TYPE (arg1)));
1477
1478 /* Handle the special case of two integer constants faster. */
1479 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1480 {
1481 /* And some specific cases even faster than that. */
1482 if (code == PLUS_EXPR)
1483 {
1484 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1485 return arg1;
1486 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1487 return arg0;
1488 }
1489 else if (code == MINUS_EXPR)
1490 {
1491 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1492 return arg0;
1493 }
1494 else if (code == MULT_EXPR)
1495 {
1496 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1497 return arg1;
1498 }
1499
1500 /* Handle general case of two integer constants. For sizetype
1501 constant calculations we always want to know about overflow,
1502 even in the unsigned case. */
1503 return int_const_binop_1 (code, arg0, arg1, -1);
1504 }
1505
1506 return fold_build2_loc (loc, code, type, arg0, arg1);
1507 }
1508
1509 /* Given two values, either both of sizetype or both of bitsizetype,
1510 compute the difference between the two values. Return the value
1511 in signed type corresponding to the type of the operands. */
1512
1513 tree
1514 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1515 {
1516 tree type = TREE_TYPE (arg0);
1517 tree ctype;
1518
1519 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1520 TREE_TYPE (arg1)));
1521
1522 /* If the type is already signed, just do the simple thing. */
1523 if (!TYPE_UNSIGNED (type))
1524 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1525
1526 if (type == sizetype)
1527 ctype = ssizetype;
1528 else if (type == bitsizetype)
1529 ctype = sbitsizetype;
1530 else
1531 ctype = signed_type_for (type);
1532
1533 /* If either operand is not a constant, do the conversions to the signed
1534 type and subtract. The hardware will do the right thing with any
1535 overflow in the subtraction. */
1536 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1537 return size_binop_loc (loc, MINUS_EXPR,
1538 fold_convert_loc (loc, ctype, arg0),
1539 fold_convert_loc (loc, ctype, arg1));
1540
1541 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1542 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1543 overflow) and negate (which can't either). Special-case a result
1544 of zero while we're here. */
1545 if (tree_int_cst_equal (arg0, arg1))
1546 return build_int_cst (ctype, 0);
1547 else if (tree_int_cst_lt (arg1, arg0))
1548 return fold_convert_loc (loc, ctype,
1549 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1550 else
1551 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1552 fold_convert_loc (loc, ctype,
1553 size_binop_loc (loc,
1554 MINUS_EXPR,
1555 arg1, arg0)));
1556 }
1557 \f
1558 /* A subroutine of fold_convert_const handling conversions of an
1559 INTEGER_CST to another integer type. */
1560
1561 static tree
1562 fold_convert_const_int_from_int (tree type, const_tree arg1)
1563 {
1564 /* Given an integer constant, make new constant with new type,
1565 appropriately sign-extended or truncated. Use widest_int
1566 so that any extension is done according ARG1's type. */
1567 return force_fit_type (type, wi::to_widest (arg1),
1568 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1569 TREE_OVERFLOW (arg1));
1570 }
1571
1572 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1573 to an integer type. */
1574
1575 static tree
1576 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1577 {
1578 bool overflow = false;
1579 tree t;
1580
1581 /* The following code implements the floating point to integer
1582 conversion rules required by the Java Language Specification,
1583 that IEEE NaNs are mapped to zero and values that overflow
1584 the target precision saturate, i.e. values greater than
1585 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1586 are mapped to INT_MIN. These semantics are allowed by the
1587 C and C++ standards that simply state that the behavior of
1588 FP-to-integer conversion is unspecified upon overflow. */
1589
1590 wide_int val;
1591 REAL_VALUE_TYPE r;
1592 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1593
1594 switch (code)
1595 {
1596 case FIX_TRUNC_EXPR:
1597 real_trunc (&r, VOIDmode, &x);
1598 break;
1599
1600 default:
1601 gcc_unreachable ();
1602 }
1603
1604 /* If R is NaN, return zero and show we have an overflow. */
1605 if (REAL_VALUE_ISNAN (r))
1606 {
1607 overflow = true;
1608 val = wi::zero (TYPE_PRECISION (type));
1609 }
1610
1611 /* See if R is less than the lower bound or greater than the
1612 upper bound. */
1613
1614 if (! overflow)
1615 {
1616 tree lt = TYPE_MIN_VALUE (type);
1617 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1618 if (REAL_VALUES_LESS (r, l))
1619 {
1620 overflow = true;
1621 val = lt;
1622 }
1623 }
1624
1625 if (! overflow)
1626 {
1627 tree ut = TYPE_MAX_VALUE (type);
1628 if (ut)
1629 {
1630 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1631 if (REAL_VALUES_LESS (u, r))
1632 {
1633 overflow = true;
1634 val = ut;
1635 }
1636 }
1637 }
1638
1639 if (! overflow)
1640 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1641
1642 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1643 return t;
1644 }
1645
1646 /* A subroutine of fold_convert_const handling conversions of a
1647 FIXED_CST to an integer type. */
1648
1649 static tree
1650 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1651 {
1652 tree t;
1653 double_int temp, temp_trunc;
1654 unsigned int mode;
1655
1656 /* Right shift FIXED_CST to temp by fbit. */
1657 temp = TREE_FIXED_CST (arg1).data;
1658 mode = TREE_FIXED_CST (arg1).mode;
1659 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1660 {
1661 temp = temp.rshift (GET_MODE_FBIT (mode),
1662 HOST_BITS_PER_DOUBLE_INT,
1663 SIGNED_FIXED_POINT_MODE_P (mode));
1664
1665 /* Left shift temp to temp_trunc by fbit. */
1666 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1667 HOST_BITS_PER_DOUBLE_INT,
1668 SIGNED_FIXED_POINT_MODE_P (mode));
1669 }
1670 else
1671 {
1672 temp = double_int_zero;
1673 temp_trunc = double_int_zero;
1674 }
1675
1676 /* If FIXED_CST is negative, we need to round the value toward 0.
1677 By checking if the fractional bits are not zero to add 1 to temp. */
1678 if (SIGNED_FIXED_POINT_MODE_P (mode)
1679 && temp_trunc.is_negative ()
1680 && TREE_FIXED_CST (arg1).data != temp_trunc)
1681 temp += double_int_one;
1682
1683 /* Given a fixed-point constant, make new constant with new type,
1684 appropriately sign-extended or truncated. */
1685 t = force_fit_type (type, temp, -1,
1686 (temp.is_negative ()
1687 && (TYPE_UNSIGNED (type)
1688 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1689 | TREE_OVERFLOW (arg1));
1690
1691 return t;
1692 }
1693
1694 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1695 to another floating point type. */
1696
1697 static tree
1698 fold_convert_const_real_from_real (tree type, const_tree arg1)
1699 {
1700 REAL_VALUE_TYPE value;
1701 tree t;
1702
1703 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1704 t = build_real (type, value);
1705
1706 /* If converting an infinity or NAN to a representation that doesn't
1707 have one, set the overflow bit so that we can produce some kind of
1708 error message at the appropriate point if necessary. It's not the
1709 most user-friendly message, but it's better than nothing. */
1710 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1711 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1712 TREE_OVERFLOW (t) = 1;
1713 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1714 && !MODE_HAS_NANS (TYPE_MODE (type)))
1715 TREE_OVERFLOW (t) = 1;
1716 /* Regular overflow, conversion produced an infinity in a mode that
1717 can't represent them. */
1718 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1719 && REAL_VALUE_ISINF (value)
1720 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1721 TREE_OVERFLOW (t) = 1;
1722 else
1723 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1724 return t;
1725 }
1726
1727 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1728 to a floating point type. */
1729
1730 static tree
1731 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1732 {
1733 REAL_VALUE_TYPE value;
1734 tree t;
1735
1736 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1737 t = build_real (type, value);
1738
1739 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1740 return t;
1741 }
1742
1743 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1744 to another fixed-point type. */
1745
1746 static tree
1747 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1748 {
1749 FIXED_VALUE_TYPE value;
1750 tree t;
1751 bool overflow_p;
1752
1753 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1754 TYPE_SATURATING (type));
1755 t = build_fixed (type, value);
1756
1757 /* Propagate overflow flags. */
1758 if (overflow_p | TREE_OVERFLOW (arg1))
1759 TREE_OVERFLOW (t) = 1;
1760 return t;
1761 }
1762
1763 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1764 to a fixed-point type. */
1765
1766 static tree
1767 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1768 {
1769 FIXED_VALUE_TYPE value;
1770 tree t;
1771 bool overflow_p;
1772 double_int di;
1773
1774 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1775
1776 di.low = TREE_INT_CST_ELT (arg1, 0);
1777 if (TREE_INT_CST_NUNITS (arg1) == 1)
1778 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1779 else
1780 di.high = TREE_INT_CST_ELT (arg1, 1);
1781
1782 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1783 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1784 TYPE_SATURATING (type));
1785 t = build_fixed (type, value);
1786
1787 /* Propagate overflow flags. */
1788 if (overflow_p | TREE_OVERFLOW (arg1))
1789 TREE_OVERFLOW (t) = 1;
1790 return t;
1791 }
1792
1793 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1794 to a fixed-point type. */
1795
1796 static tree
1797 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1798 {
1799 FIXED_VALUE_TYPE value;
1800 tree t;
1801 bool overflow_p;
1802
1803 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1804 &TREE_REAL_CST (arg1),
1805 TYPE_SATURATING (type));
1806 t = build_fixed (type, value);
1807
1808 /* Propagate overflow flags. */
1809 if (overflow_p | TREE_OVERFLOW (arg1))
1810 TREE_OVERFLOW (t) = 1;
1811 return t;
1812 }
1813
1814 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1815 type TYPE. If no simplification can be done return NULL_TREE. */
1816
1817 static tree
1818 fold_convert_const (enum tree_code code, tree type, tree arg1)
1819 {
1820 if (TREE_TYPE (arg1) == type)
1821 return arg1;
1822
1823 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1824 || TREE_CODE (type) == OFFSET_TYPE)
1825 {
1826 if (TREE_CODE (arg1) == INTEGER_CST)
1827 return fold_convert_const_int_from_int (type, arg1);
1828 else if (TREE_CODE (arg1) == REAL_CST)
1829 return fold_convert_const_int_from_real (code, type, arg1);
1830 else if (TREE_CODE (arg1) == FIXED_CST)
1831 return fold_convert_const_int_from_fixed (type, arg1);
1832 }
1833 else if (TREE_CODE (type) == REAL_TYPE)
1834 {
1835 if (TREE_CODE (arg1) == INTEGER_CST)
1836 return build_real_from_int_cst (type, arg1);
1837 else if (TREE_CODE (arg1) == REAL_CST)
1838 return fold_convert_const_real_from_real (type, arg1);
1839 else if (TREE_CODE (arg1) == FIXED_CST)
1840 return fold_convert_const_real_from_fixed (type, arg1);
1841 }
1842 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1843 {
1844 if (TREE_CODE (arg1) == FIXED_CST)
1845 return fold_convert_const_fixed_from_fixed (type, arg1);
1846 else if (TREE_CODE (arg1) == INTEGER_CST)
1847 return fold_convert_const_fixed_from_int (type, arg1);
1848 else if (TREE_CODE (arg1) == REAL_CST)
1849 return fold_convert_const_fixed_from_real (type, arg1);
1850 }
1851 return NULL_TREE;
1852 }
1853
1854 /* Construct a vector of zero elements of vector type TYPE. */
1855
1856 static tree
1857 build_zero_vector (tree type)
1858 {
1859 tree t;
1860
1861 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1862 return build_vector_from_val (type, t);
1863 }
1864
1865 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1866
1867 bool
1868 fold_convertible_p (const_tree type, const_tree arg)
1869 {
1870 tree orig = TREE_TYPE (arg);
1871
1872 if (type == orig)
1873 return true;
1874
1875 if (TREE_CODE (arg) == ERROR_MARK
1876 || TREE_CODE (type) == ERROR_MARK
1877 || TREE_CODE (orig) == ERROR_MARK)
1878 return false;
1879
1880 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1881 return true;
1882
1883 switch (TREE_CODE (type))
1884 {
1885 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1886 case POINTER_TYPE: case REFERENCE_TYPE:
1887 case OFFSET_TYPE:
1888 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1889 || TREE_CODE (orig) == OFFSET_TYPE)
1890 return true;
1891 return (TREE_CODE (orig) == VECTOR_TYPE
1892 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1893
1894 case REAL_TYPE:
1895 case FIXED_POINT_TYPE:
1896 case COMPLEX_TYPE:
1897 case VECTOR_TYPE:
1898 case VOID_TYPE:
1899 return TREE_CODE (type) == TREE_CODE (orig);
1900
1901 default:
1902 return false;
1903 }
1904 }
1905
1906 /* Convert expression ARG to type TYPE. Used by the middle-end for
1907 simple conversions in preference to calling the front-end's convert. */
1908
1909 tree
1910 fold_convert_loc (location_t loc, tree type, tree arg)
1911 {
1912 tree orig = TREE_TYPE (arg);
1913 tree tem;
1914
1915 if (type == orig)
1916 return arg;
1917
1918 if (TREE_CODE (arg) == ERROR_MARK
1919 || TREE_CODE (type) == ERROR_MARK
1920 || TREE_CODE (orig) == ERROR_MARK)
1921 return error_mark_node;
1922
1923 switch (TREE_CODE (type))
1924 {
1925 case POINTER_TYPE:
1926 case REFERENCE_TYPE:
1927 /* Handle conversions between pointers to different address spaces. */
1928 if (POINTER_TYPE_P (orig)
1929 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1930 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1931 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1932 /* fall through */
1933
1934 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1935 case OFFSET_TYPE:
1936 if (TREE_CODE (arg) == INTEGER_CST)
1937 {
1938 tem = fold_convert_const (NOP_EXPR, type, arg);
1939 if (tem != NULL_TREE)
1940 return tem;
1941 }
1942 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1943 || TREE_CODE (orig) == OFFSET_TYPE)
1944 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1945 if (TREE_CODE (orig) == COMPLEX_TYPE)
1946 return fold_convert_loc (loc, type,
1947 fold_build1_loc (loc, REALPART_EXPR,
1948 TREE_TYPE (orig), arg));
1949 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1950 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1951 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1952
1953 case REAL_TYPE:
1954 if (TREE_CODE (arg) == INTEGER_CST)
1955 {
1956 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1957 if (tem != NULL_TREE)
1958 return tem;
1959 }
1960 else if (TREE_CODE (arg) == REAL_CST)
1961 {
1962 tem = fold_convert_const (NOP_EXPR, type, arg);
1963 if (tem != NULL_TREE)
1964 return tem;
1965 }
1966 else if (TREE_CODE (arg) == FIXED_CST)
1967 {
1968 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 return tem;
1971 }
1972
1973 switch (TREE_CODE (orig))
1974 {
1975 case INTEGER_TYPE:
1976 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1977 case POINTER_TYPE: case REFERENCE_TYPE:
1978 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1979
1980 case REAL_TYPE:
1981 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1982
1983 case FIXED_POINT_TYPE:
1984 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1985
1986 case COMPLEX_TYPE:
1987 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1988 return fold_convert_loc (loc, type, tem);
1989
1990 default:
1991 gcc_unreachable ();
1992 }
1993
1994 case FIXED_POINT_TYPE:
1995 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1996 || TREE_CODE (arg) == REAL_CST)
1997 {
1998 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1999 if (tem != NULL_TREE)
2000 goto fold_convert_exit;
2001 }
2002
2003 switch (TREE_CODE (orig))
2004 {
2005 case FIXED_POINT_TYPE:
2006 case INTEGER_TYPE:
2007 case ENUMERAL_TYPE:
2008 case BOOLEAN_TYPE:
2009 case REAL_TYPE:
2010 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2011
2012 case COMPLEX_TYPE:
2013 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2014 return fold_convert_loc (loc, type, tem);
2015
2016 default:
2017 gcc_unreachable ();
2018 }
2019
2020 case COMPLEX_TYPE:
2021 switch (TREE_CODE (orig))
2022 {
2023 case INTEGER_TYPE:
2024 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2025 case POINTER_TYPE: case REFERENCE_TYPE:
2026 case REAL_TYPE:
2027 case FIXED_POINT_TYPE:
2028 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2029 fold_convert_loc (loc, TREE_TYPE (type), arg),
2030 fold_convert_loc (loc, TREE_TYPE (type),
2031 integer_zero_node));
2032 case COMPLEX_TYPE:
2033 {
2034 tree rpart, ipart;
2035
2036 if (TREE_CODE (arg) == COMPLEX_EXPR)
2037 {
2038 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2039 TREE_OPERAND (arg, 0));
2040 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2041 TREE_OPERAND (arg, 1));
2042 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2043 }
2044
2045 arg = save_expr (arg);
2046 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2047 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2048 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2049 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2050 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2051 }
2052
2053 default:
2054 gcc_unreachable ();
2055 }
2056
2057 case VECTOR_TYPE:
2058 if (integer_zerop (arg))
2059 return build_zero_vector (type);
2060 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2061 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2062 || TREE_CODE (orig) == VECTOR_TYPE);
2063 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2064
2065 case VOID_TYPE:
2066 tem = fold_ignored_result (arg);
2067 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2068
2069 default:
2070 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2071 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2072 gcc_unreachable ();
2073 }
2074 fold_convert_exit:
2075 protected_set_expr_location_unshare (tem, loc);
2076 return tem;
2077 }
2078 \f
2079 /* Return false if expr can be assumed not to be an lvalue, true
2080 otherwise. */
2081
2082 static bool
2083 maybe_lvalue_p (const_tree x)
2084 {
2085 /* We only need to wrap lvalue tree codes. */
2086 switch (TREE_CODE (x))
2087 {
2088 case VAR_DECL:
2089 case PARM_DECL:
2090 case RESULT_DECL:
2091 case LABEL_DECL:
2092 case FUNCTION_DECL:
2093 case SSA_NAME:
2094
2095 case COMPONENT_REF:
2096 case MEM_REF:
2097 case INDIRECT_REF:
2098 case ARRAY_REF:
2099 case ARRAY_RANGE_REF:
2100 case BIT_FIELD_REF:
2101 case OBJ_TYPE_REF:
2102
2103 case REALPART_EXPR:
2104 case IMAGPART_EXPR:
2105 case PREINCREMENT_EXPR:
2106 case PREDECREMENT_EXPR:
2107 case SAVE_EXPR:
2108 case TRY_CATCH_EXPR:
2109 case WITH_CLEANUP_EXPR:
2110 case COMPOUND_EXPR:
2111 case MODIFY_EXPR:
2112 case TARGET_EXPR:
2113 case COND_EXPR:
2114 case BIND_EXPR:
2115 break;
2116
2117 default:
2118 /* Assume the worst for front-end tree codes. */
2119 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2120 break;
2121 return false;
2122 }
2123
2124 return true;
2125 }
2126
2127 /* Return an expr equal to X but certainly not valid as an lvalue. */
2128
2129 tree
2130 non_lvalue_loc (location_t loc, tree x)
2131 {
2132 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2133 us. */
2134 if (in_gimple_form)
2135 return x;
2136
2137 if (! maybe_lvalue_p (x))
2138 return x;
2139 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2140 }
2141
2142 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2143 Zero means allow extended lvalues. */
2144
2145 int pedantic_lvalues;
2146
2147 /* When pedantic, return an expr equal to X but certainly not valid as a
2148 pedantic lvalue. Otherwise, return X. */
2149
2150 static tree
2151 pedantic_non_lvalue_loc (location_t loc, tree x)
2152 {
2153 if (pedantic_lvalues)
2154 return non_lvalue_loc (loc, x);
2155
2156 return protected_set_expr_location_unshare (x, loc);
2157 }
2158 \f
2159 /* Given a tree comparison code, return the code that is the logical inverse.
2160 It is generally not safe to do this for floating-point comparisons, except
2161 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2162 ERROR_MARK in this case. */
2163
2164 enum tree_code
2165 invert_tree_comparison (enum tree_code code, bool honor_nans)
2166 {
2167 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2168 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2169 return ERROR_MARK;
2170
2171 switch (code)
2172 {
2173 case EQ_EXPR:
2174 return NE_EXPR;
2175 case NE_EXPR:
2176 return EQ_EXPR;
2177 case GT_EXPR:
2178 return honor_nans ? UNLE_EXPR : LE_EXPR;
2179 case GE_EXPR:
2180 return honor_nans ? UNLT_EXPR : LT_EXPR;
2181 case LT_EXPR:
2182 return honor_nans ? UNGE_EXPR : GE_EXPR;
2183 case LE_EXPR:
2184 return honor_nans ? UNGT_EXPR : GT_EXPR;
2185 case LTGT_EXPR:
2186 return UNEQ_EXPR;
2187 case UNEQ_EXPR:
2188 return LTGT_EXPR;
2189 case UNGT_EXPR:
2190 return LE_EXPR;
2191 case UNGE_EXPR:
2192 return LT_EXPR;
2193 case UNLT_EXPR:
2194 return GE_EXPR;
2195 case UNLE_EXPR:
2196 return GT_EXPR;
2197 case ORDERED_EXPR:
2198 return UNORDERED_EXPR;
2199 case UNORDERED_EXPR:
2200 return ORDERED_EXPR;
2201 default:
2202 gcc_unreachable ();
2203 }
2204 }
2205
2206 /* Similar, but return the comparison that results if the operands are
2207 swapped. This is safe for floating-point. */
2208
2209 enum tree_code
2210 swap_tree_comparison (enum tree_code code)
2211 {
2212 switch (code)
2213 {
2214 case EQ_EXPR:
2215 case NE_EXPR:
2216 case ORDERED_EXPR:
2217 case UNORDERED_EXPR:
2218 case LTGT_EXPR:
2219 case UNEQ_EXPR:
2220 return code;
2221 case GT_EXPR:
2222 return LT_EXPR;
2223 case GE_EXPR:
2224 return LE_EXPR;
2225 case LT_EXPR:
2226 return GT_EXPR;
2227 case LE_EXPR:
2228 return GE_EXPR;
2229 case UNGT_EXPR:
2230 return UNLT_EXPR;
2231 case UNGE_EXPR:
2232 return UNLE_EXPR;
2233 case UNLT_EXPR:
2234 return UNGT_EXPR;
2235 case UNLE_EXPR:
2236 return UNGE_EXPR;
2237 default:
2238 gcc_unreachable ();
2239 }
2240 }
2241
2242
2243 /* Convert a comparison tree code from an enum tree_code representation
2244 into a compcode bit-based encoding. This function is the inverse of
2245 compcode_to_comparison. */
2246
2247 static enum comparison_code
2248 comparison_to_compcode (enum tree_code code)
2249 {
2250 switch (code)
2251 {
2252 case LT_EXPR:
2253 return COMPCODE_LT;
2254 case EQ_EXPR:
2255 return COMPCODE_EQ;
2256 case LE_EXPR:
2257 return COMPCODE_LE;
2258 case GT_EXPR:
2259 return COMPCODE_GT;
2260 case NE_EXPR:
2261 return COMPCODE_NE;
2262 case GE_EXPR:
2263 return COMPCODE_GE;
2264 case ORDERED_EXPR:
2265 return COMPCODE_ORD;
2266 case UNORDERED_EXPR:
2267 return COMPCODE_UNORD;
2268 case UNLT_EXPR:
2269 return COMPCODE_UNLT;
2270 case UNEQ_EXPR:
2271 return COMPCODE_UNEQ;
2272 case UNLE_EXPR:
2273 return COMPCODE_UNLE;
2274 case UNGT_EXPR:
2275 return COMPCODE_UNGT;
2276 case LTGT_EXPR:
2277 return COMPCODE_LTGT;
2278 case UNGE_EXPR:
2279 return COMPCODE_UNGE;
2280 default:
2281 gcc_unreachable ();
2282 }
2283 }
2284
2285 /* Convert a compcode bit-based encoding of a comparison operator back
2286 to GCC's enum tree_code representation. This function is the
2287 inverse of comparison_to_compcode. */
2288
2289 static enum tree_code
2290 compcode_to_comparison (enum comparison_code code)
2291 {
2292 switch (code)
2293 {
2294 case COMPCODE_LT:
2295 return LT_EXPR;
2296 case COMPCODE_EQ:
2297 return EQ_EXPR;
2298 case COMPCODE_LE:
2299 return LE_EXPR;
2300 case COMPCODE_GT:
2301 return GT_EXPR;
2302 case COMPCODE_NE:
2303 return NE_EXPR;
2304 case COMPCODE_GE:
2305 return GE_EXPR;
2306 case COMPCODE_ORD:
2307 return ORDERED_EXPR;
2308 case COMPCODE_UNORD:
2309 return UNORDERED_EXPR;
2310 case COMPCODE_UNLT:
2311 return UNLT_EXPR;
2312 case COMPCODE_UNEQ:
2313 return UNEQ_EXPR;
2314 case COMPCODE_UNLE:
2315 return UNLE_EXPR;
2316 case COMPCODE_UNGT:
2317 return UNGT_EXPR;
2318 case COMPCODE_LTGT:
2319 return LTGT_EXPR;
2320 case COMPCODE_UNGE:
2321 return UNGE_EXPR;
2322 default:
2323 gcc_unreachable ();
2324 }
2325 }
2326
2327 /* Return a tree for the comparison which is the combination of
2328 doing the AND or OR (depending on CODE) of the two operations LCODE
2329 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2330 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2331 if this makes the transformation invalid. */
2332
2333 tree
2334 combine_comparisons (location_t loc,
2335 enum tree_code code, enum tree_code lcode,
2336 enum tree_code rcode, tree truth_type,
2337 tree ll_arg, tree lr_arg)
2338 {
2339 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2340 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2341 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2342 int compcode;
2343
2344 switch (code)
2345 {
2346 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2347 compcode = lcompcode & rcompcode;
2348 break;
2349
2350 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2351 compcode = lcompcode | rcompcode;
2352 break;
2353
2354 default:
2355 return NULL_TREE;
2356 }
2357
2358 if (!honor_nans)
2359 {
2360 /* Eliminate unordered comparisons, as well as LTGT and ORD
2361 which are not used unless the mode has NaNs. */
2362 compcode &= ~COMPCODE_UNORD;
2363 if (compcode == COMPCODE_LTGT)
2364 compcode = COMPCODE_NE;
2365 else if (compcode == COMPCODE_ORD)
2366 compcode = COMPCODE_TRUE;
2367 }
2368 else if (flag_trapping_math)
2369 {
2370 /* Check that the original operation and the optimized ones will trap
2371 under the same condition. */
2372 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2373 && (lcompcode != COMPCODE_EQ)
2374 && (lcompcode != COMPCODE_ORD);
2375 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2376 && (rcompcode != COMPCODE_EQ)
2377 && (rcompcode != COMPCODE_ORD);
2378 bool trap = (compcode & COMPCODE_UNORD) == 0
2379 && (compcode != COMPCODE_EQ)
2380 && (compcode != COMPCODE_ORD);
2381
2382 /* In a short-circuited boolean expression the LHS might be
2383 such that the RHS, if evaluated, will never trap. For
2384 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2385 if neither x nor y is NaN. (This is a mixed blessing: for
2386 example, the expression above will never trap, hence
2387 optimizing it to x < y would be invalid). */
2388 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2389 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2390 rtrap = false;
2391
2392 /* If the comparison was short-circuited, and only the RHS
2393 trapped, we may now generate a spurious trap. */
2394 if (rtrap && !ltrap
2395 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2396 return NULL_TREE;
2397
2398 /* If we changed the conditions that cause a trap, we lose. */
2399 if ((ltrap || rtrap) != trap)
2400 return NULL_TREE;
2401 }
2402
2403 if (compcode == COMPCODE_TRUE)
2404 return constant_boolean_node (true, truth_type);
2405 else if (compcode == COMPCODE_FALSE)
2406 return constant_boolean_node (false, truth_type);
2407 else
2408 {
2409 enum tree_code tcode;
2410
2411 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2412 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2413 }
2414 }
2415 \f
2416 /* Return nonzero if two operands (typically of the same tree node)
2417 are necessarily equal. If either argument has side-effects this
2418 function returns zero. FLAGS modifies behavior as follows:
2419
2420 If OEP_ONLY_CONST is set, only return nonzero for constants.
2421 This function tests whether the operands are indistinguishable;
2422 it does not test whether they are equal using C's == operation.
2423 The distinction is important for IEEE floating point, because
2424 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2425 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2426
2427 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2428 even though it may hold multiple values during a function.
2429 This is because a GCC tree node guarantees that nothing else is
2430 executed between the evaluation of its "operands" (which may often
2431 be evaluated in arbitrary order). Hence if the operands themselves
2432 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2433 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2434 unset means assuming isochronic (or instantaneous) tree equivalence.
2435 Unless comparing arbitrary expression trees, such as from different
2436 statements, this flag can usually be left unset.
2437
2438 If OEP_PURE_SAME is set, then pure functions with identical arguments
2439 are considered the same. It is used when the caller has other ways
2440 to ensure that global memory is unchanged in between. */
2441
2442 int
2443 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2444 {
2445 /* If either is ERROR_MARK, they aren't equal. */
2446 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2447 || TREE_TYPE (arg0) == error_mark_node
2448 || TREE_TYPE (arg1) == error_mark_node)
2449 return 0;
2450
2451 /* Similar, if either does not have a type (like a released SSA name),
2452 they aren't equal. */
2453 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2454 return 0;
2455
2456 /* Check equality of integer constants before bailing out due to
2457 precision differences. */
2458 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2459 return tree_int_cst_equal (arg0, arg1);
2460
2461 /* If both types don't have the same signedness, then we can't consider
2462 them equal. We must check this before the STRIP_NOPS calls
2463 because they may change the signedness of the arguments. As pointers
2464 strictly don't have a signedness, require either two pointers or
2465 two non-pointers as well. */
2466 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2467 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2468 return 0;
2469
2470 /* We cannot consider pointers to different address space equal. */
2471 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2472 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2473 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2474 return 0;
2475
2476 /* If both types don't have the same precision, then it is not safe
2477 to strip NOPs. */
2478 if (element_precision (TREE_TYPE (arg0))
2479 != element_precision (TREE_TYPE (arg1)))
2480 return 0;
2481
2482 STRIP_NOPS (arg0);
2483 STRIP_NOPS (arg1);
2484
2485 /* In case both args are comparisons but with different comparison
2486 code, try to swap the comparison operands of one arg to produce
2487 a match and compare that variant. */
2488 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2489 && COMPARISON_CLASS_P (arg0)
2490 && COMPARISON_CLASS_P (arg1))
2491 {
2492 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2493
2494 if (TREE_CODE (arg0) == swap_code)
2495 return operand_equal_p (TREE_OPERAND (arg0, 0),
2496 TREE_OPERAND (arg1, 1), flags)
2497 && operand_equal_p (TREE_OPERAND (arg0, 1),
2498 TREE_OPERAND (arg1, 0), flags);
2499 }
2500
2501 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2502 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2503 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2504 return 0;
2505
2506 /* This is needed for conversions and for COMPONENT_REF.
2507 Might as well play it safe and always test this. */
2508 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2509 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2510 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2511 return 0;
2512
2513 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2514 We don't care about side effects in that case because the SAVE_EXPR
2515 takes care of that for us. In all other cases, two expressions are
2516 equal if they have no side effects. If we have two identical
2517 expressions with side effects that should be treated the same due
2518 to the only side effects being identical SAVE_EXPR's, that will
2519 be detected in the recursive calls below.
2520 If we are taking an invariant address of two identical objects
2521 they are necessarily equal as well. */
2522 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2523 && (TREE_CODE (arg0) == SAVE_EXPR
2524 || (flags & OEP_CONSTANT_ADDRESS_OF)
2525 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2526 return 1;
2527
2528 /* Next handle constant cases, those for which we can return 1 even
2529 if ONLY_CONST is set. */
2530 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2531 switch (TREE_CODE (arg0))
2532 {
2533 case INTEGER_CST:
2534 return tree_int_cst_equal (arg0, arg1);
2535
2536 case FIXED_CST:
2537 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2538 TREE_FIXED_CST (arg1));
2539
2540 case REAL_CST:
2541 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2542 TREE_REAL_CST (arg1)))
2543 return 1;
2544
2545
2546 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2547 {
2548 /* If we do not distinguish between signed and unsigned zero,
2549 consider them equal. */
2550 if (real_zerop (arg0) && real_zerop (arg1))
2551 return 1;
2552 }
2553 return 0;
2554
2555 case VECTOR_CST:
2556 {
2557 unsigned i;
2558
2559 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2560 return 0;
2561
2562 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2563 {
2564 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2565 VECTOR_CST_ELT (arg1, i), flags))
2566 return 0;
2567 }
2568 return 1;
2569 }
2570
2571 case COMPLEX_CST:
2572 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2573 flags)
2574 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2575 flags));
2576
2577 case STRING_CST:
2578 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2579 && ! memcmp (TREE_STRING_POINTER (arg0),
2580 TREE_STRING_POINTER (arg1),
2581 TREE_STRING_LENGTH (arg0)));
2582
2583 case ADDR_EXPR:
2584 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2585 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2586 ? OEP_CONSTANT_ADDRESS_OF : 0);
2587 default:
2588 break;
2589 }
2590
2591 if (flags & OEP_ONLY_CONST)
2592 return 0;
2593
2594 /* Define macros to test an operand from arg0 and arg1 for equality and a
2595 variant that allows null and views null as being different from any
2596 non-null value. In the latter case, if either is null, the both
2597 must be; otherwise, do the normal comparison. */
2598 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2599 TREE_OPERAND (arg1, N), flags)
2600
2601 #define OP_SAME_WITH_NULL(N) \
2602 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2603 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2604
2605 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2606 {
2607 case tcc_unary:
2608 /* Two conversions are equal only if signedness and modes match. */
2609 switch (TREE_CODE (arg0))
2610 {
2611 CASE_CONVERT:
2612 case FIX_TRUNC_EXPR:
2613 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2614 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2615 return 0;
2616 break;
2617 default:
2618 break;
2619 }
2620
2621 return OP_SAME (0);
2622
2623
2624 case tcc_comparison:
2625 case tcc_binary:
2626 if (OP_SAME (0) && OP_SAME (1))
2627 return 1;
2628
2629 /* For commutative ops, allow the other order. */
2630 return (commutative_tree_code (TREE_CODE (arg0))
2631 && operand_equal_p (TREE_OPERAND (arg0, 0),
2632 TREE_OPERAND (arg1, 1), flags)
2633 && operand_equal_p (TREE_OPERAND (arg0, 1),
2634 TREE_OPERAND (arg1, 0), flags));
2635
2636 case tcc_reference:
2637 /* If either of the pointer (or reference) expressions we are
2638 dereferencing contain a side effect, these cannot be equal,
2639 but their addresses can be. */
2640 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2641 && (TREE_SIDE_EFFECTS (arg0)
2642 || TREE_SIDE_EFFECTS (arg1)))
2643 return 0;
2644
2645 switch (TREE_CODE (arg0))
2646 {
2647 case INDIRECT_REF:
2648 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2649 return OP_SAME (0);
2650
2651 case REALPART_EXPR:
2652 case IMAGPART_EXPR:
2653 return OP_SAME (0);
2654
2655 case TARGET_MEM_REF:
2656 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2657 /* Require equal extra operands and then fall through to MEM_REF
2658 handling of the two common operands. */
2659 if (!OP_SAME_WITH_NULL (2)
2660 || !OP_SAME_WITH_NULL (3)
2661 || !OP_SAME_WITH_NULL (4))
2662 return 0;
2663 /* Fallthru. */
2664 case MEM_REF:
2665 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2666 /* Require equal access sizes, and similar pointer types.
2667 We can have incomplete types for array references of
2668 variable-sized arrays from the Fortran frontend
2669 though. Also verify the types are compatible. */
2670 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2671 || (TYPE_SIZE (TREE_TYPE (arg0))
2672 && TYPE_SIZE (TREE_TYPE (arg1))
2673 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2674 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2675 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2676 && alias_ptr_types_compatible_p
2677 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2678 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2679 && OP_SAME (0) && OP_SAME (1));
2680
2681 case ARRAY_REF:
2682 case ARRAY_RANGE_REF:
2683 /* Operands 2 and 3 may be null.
2684 Compare the array index by value if it is constant first as we
2685 may have different types but same value here. */
2686 if (!OP_SAME (0))
2687 return 0;
2688 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2689 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2690 TREE_OPERAND (arg1, 1))
2691 || OP_SAME (1))
2692 && OP_SAME_WITH_NULL (2)
2693 && OP_SAME_WITH_NULL (3));
2694
2695 case COMPONENT_REF:
2696 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2697 may be NULL when we're called to compare MEM_EXPRs. */
2698 if (!OP_SAME_WITH_NULL (0)
2699 || !OP_SAME (1))
2700 return 0;
2701 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2702 return OP_SAME_WITH_NULL (2);
2703
2704 case BIT_FIELD_REF:
2705 if (!OP_SAME (0))
2706 return 0;
2707 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2708 return OP_SAME (1) && OP_SAME (2);
2709
2710 default:
2711 return 0;
2712 }
2713
2714 case tcc_expression:
2715 switch (TREE_CODE (arg0))
2716 {
2717 case ADDR_EXPR:
2718 case TRUTH_NOT_EXPR:
2719 return OP_SAME (0);
2720
2721 case TRUTH_ANDIF_EXPR:
2722 case TRUTH_ORIF_EXPR:
2723 return OP_SAME (0) && OP_SAME (1);
2724
2725 case FMA_EXPR:
2726 case WIDEN_MULT_PLUS_EXPR:
2727 case WIDEN_MULT_MINUS_EXPR:
2728 if (!OP_SAME (2))
2729 return 0;
2730 /* The multiplcation operands are commutative. */
2731 /* FALLTHRU */
2732
2733 case TRUTH_AND_EXPR:
2734 case TRUTH_OR_EXPR:
2735 case TRUTH_XOR_EXPR:
2736 if (OP_SAME (0) && OP_SAME (1))
2737 return 1;
2738
2739 /* Otherwise take into account this is a commutative operation. */
2740 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2741 TREE_OPERAND (arg1, 1), flags)
2742 && operand_equal_p (TREE_OPERAND (arg0, 1),
2743 TREE_OPERAND (arg1, 0), flags));
2744
2745 case COND_EXPR:
2746 case VEC_COND_EXPR:
2747 case DOT_PROD_EXPR:
2748 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2749
2750 default:
2751 return 0;
2752 }
2753
2754 case tcc_vl_exp:
2755 switch (TREE_CODE (arg0))
2756 {
2757 case CALL_EXPR:
2758 /* If the CALL_EXPRs call different functions, then they
2759 clearly can not be equal. */
2760 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2761 flags))
2762 return 0;
2763
2764 {
2765 unsigned int cef = call_expr_flags (arg0);
2766 if (flags & OEP_PURE_SAME)
2767 cef &= ECF_CONST | ECF_PURE;
2768 else
2769 cef &= ECF_CONST;
2770 if (!cef)
2771 return 0;
2772 }
2773
2774 /* Now see if all the arguments are the same. */
2775 {
2776 const_call_expr_arg_iterator iter0, iter1;
2777 const_tree a0, a1;
2778 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2779 a1 = first_const_call_expr_arg (arg1, &iter1);
2780 a0 && a1;
2781 a0 = next_const_call_expr_arg (&iter0),
2782 a1 = next_const_call_expr_arg (&iter1))
2783 if (! operand_equal_p (a0, a1, flags))
2784 return 0;
2785
2786 /* If we get here and both argument lists are exhausted
2787 then the CALL_EXPRs are equal. */
2788 return ! (a0 || a1);
2789 }
2790 default:
2791 return 0;
2792 }
2793
2794 case tcc_declaration:
2795 /* Consider __builtin_sqrt equal to sqrt. */
2796 return (TREE_CODE (arg0) == FUNCTION_DECL
2797 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2798 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2799 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2800
2801 default:
2802 return 0;
2803 }
2804
2805 #undef OP_SAME
2806 #undef OP_SAME_WITH_NULL
2807 }
2808 \f
2809 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2810 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2811
2812 When in doubt, return 0. */
2813
2814 static int
2815 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2816 {
2817 int unsignedp1, unsignedpo;
2818 tree primarg0, primarg1, primother;
2819 unsigned int correct_width;
2820
2821 if (operand_equal_p (arg0, arg1, 0))
2822 return 1;
2823
2824 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2825 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2826 return 0;
2827
2828 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2829 and see if the inner values are the same. This removes any
2830 signedness comparison, which doesn't matter here. */
2831 primarg0 = arg0, primarg1 = arg1;
2832 STRIP_NOPS (primarg0);
2833 STRIP_NOPS (primarg1);
2834 if (operand_equal_p (primarg0, primarg1, 0))
2835 return 1;
2836
2837 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2838 actual comparison operand, ARG0.
2839
2840 First throw away any conversions to wider types
2841 already present in the operands. */
2842
2843 primarg1 = get_narrower (arg1, &unsignedp1);
2844 primother = get_narrower (other, &unsignedpo);
2845
2846 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2847 if (unsignedp1 == unsignedpo
2848 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2849 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2850 {
2851 tree type = TREE_TYPE (arg0);
2852
2853 /* Make sure shorter operand is extended the right way
2854 to match the longer operand. */
2855 primarg1 = fold_convert (signed_or_unsigned_type_for
2856 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2857
2858 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2859 return 1;
2860 }
2861
2862 return 0;
2863 }
2864 \f
2865 /* See if ARG is an expression that is either a comparison or is performing
2866 arithmetic on comparisons. The comparisons must only be comparing
2867 two different values, which will be stored in *CVAL1 and *CVAL2; if
2868 they are nonzero it means that some operands have already been found.
2869 No variables may be used anywhere else in the expression except in the
2870 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2871 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2872
2873 If this is true, return 1. Otherwise, return zero. */
2874
2875 static int
2876 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2877 {
2878 enum tree_code code = TREE_CODE (arg);
2879 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2880
2881 /* We can handle some of the tcc_expression cases here. */
2882 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2883 tclass = tcc_unary;
2884 else if (tclass == tcc_expression
2885 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2886 || code == COMPOUND_EXPR))
2887 tclass = tcc_binary;
2888
2889 else if (tclass == tcc_expression && code == SAVE_EXPR
2890 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2891 {
2892 /* If we've already found a CVAL1 or CVAL2, this expression is
2893 two complex to handle. */
2894 if (*cval1 || *cval2)
2895 return 0;
2896
2897 tclass = tcc_unary;
2898 *save_p = 1;
2899 }
2900
2901 switch (tclass)
2902 {
2903 case tcc_unary:
2904 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2905
2906 case tcc_binary:
2907 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2908 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2909 cval1, cval2, save_p));
2910
2911 case tcc_constant:
2912 return 1;
2913
2914 case tcc_expression:
2915 if (code == COND_EXPR)
2916 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2917 cval1, cval2, save_p)
2918 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2919 cval1, cval2, save_p)
2920 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2921 cval1, cval2, save_p));
2922 return 0;
2923
2924 case tcc_comparison:
2925 /* First see if we can handle the first operand, then the second. For
2926 the second operand, we know *CVAL1 can't be zero. It must be that
2927 one side of the comparison is each of the values; test for the
2928 case where this isn't true by failing if the two operands
2929 are the same. */
2930
2931 if (operand_equal_p (TREE_OPERAND (arg, 0),
2932 TREE_OPERAND (arg, 1), 0))
2933 return 0;
2934
2935 if (*cval1 == 0)
2936 *cval1 = TREE_OPERAND (arg, 0);
2937 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2938 ;
2939 else if (*cval2 == 0)
2940 *cval2 = TREE_OPERAND (arg, 0);
2941 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2942 ;
2943 else
2944 return 0;
2945
2946 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2947 ;
2948 else if (*cval2 == 0)
2949 *cval2 = TREE_OPERAND (arg, 1);
2950 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2951 ;
2952 else
2953 return 0;
2954
2955 return 1;
2956
2957 default:
2958 return 0;
2959 }
2960 }
2961 \f
2962 /* ARG is a tree that is known to contain just arithmetic operations and
2963 comparisons. Evaluate the operations in the tree substituting NEW0 for
2964 any occurrence of OLD0 as an operand of a comparison and likewise for
2965 NEW1 and OLD1. */
2966
2967 static tree
2968 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2969 tree old1, tree new1)
2970 {
2971 tree type = TREE_TYPE (arg);
2972 enum tree_code code = TREE_CODE (arg);
2973 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2974
2975 /* We can handle some of the tcc_expression cases here. */
2976 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2977 tclass = tcc_unary;
2978 else if (tclass == tcc_expression
2979 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2980 tclass = tcc_binary;
2981
2982 switch (tclass)
2983 {
2984 case tcc_unary:
2985 return fold_build1_loc (loc, code, type,
2986 eval_subst (loc, TREE_OPERAND (arg, 0),
2987 old0, new0, old1, new1));
2988
2989 case tcc_binary:
2990 return fold_build2_loc (loc, code, type,
2991 eval_subst (loc, TREE_OPERAND (arg, 0),
2992 old0, new0, old1, new1),
2993 eval_subst (loc, TREE_OPERAND (arg, 1),
2994 old0, new0, old1, new1));
2995
2996 case tcc_expression:
2997 switch (code)
2998 {
2999 case SAVE_EXPR:
3000 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3001 old1, new1);
3002
3003 case COMPOUND_EXPR:
3004 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3005 old1, new1);
3006
3007 case COND_EXPR:
3008 return fold_build3_loc (loc, code, type,
3009 eval_subst (loc, TREE_OPERAND (arg, 0),
3010 old0, new0, old1, new1),
3011 eval_subst (loc, TREE_OPERAND (arg, 1),
3012 old0, new0, old1, new1),
3013 eval_subst (loc, TREE_OPERAND (arg, 2),
3014 old0, new0, old1, new1));
3015 default:
3016 break;
3017 }
3018 /* Fall through - ??? */
3019
3020 case tcc_comparison:
3021 {
3022 tree arg0 = TREE_OPERAND (arg, 0);
3023 tree arg1 = TREE_OPERAND (arg, 1);
3024
3025 /* We need to check both for exact equality and tree equality. The
3026 former will be true if the operand has a side-effect. In that
3027 case, we know the operand occurred exactly once. */
3028
3029 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3030 arg0 = new0;
3031 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3032 arg0 = new1;
3033
3034 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3035 arg1 = new0;
3036 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3037 arg1 = new1;
3038
3039 return fold_build2_loc (loc, code, type, arg0, arg1);
3040 }
3041
3042 default:
3043 return arg;
3044 }
3045 }
3046 \f
3047 /* Return a tree for the case when the result of an expression is RESULT
3048 converted to TYPE and OMITTED was previously an operand of the expression
3049 but is now not needed (e.g., we folded OMITTED * 0).
3050
3051 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3052 the conversion of RESULT to TYPE. */
3053
3054 tree
3055 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3056 {
3057 tree t = fold_convert_loc (loc, type, result);
3058
3059 /* If the resulting operand is an empty statement, just return the omitted
3060 statement casted to void. */
3061 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3062 return build1_loc (loc, NOP_EXPR, void_type_node,
3063 fold_ignored_result (omitted));
3064
3065 if (TREE_SIDE_EFFECTS (omitted))
3066 return build2_loc (loc, COMPOUND_EXPR, type,
3067 fold_ignored_result (omitted), t);
3068
3069 return non_lvalue_loc (loc, t);
3070 }
3071
3072 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3073
3074 static tree
3075 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3076 tree omitted)
3077 {
3078 tree t = fold_convert_loc (loc, type, result);
3079
3080 /* If the resulting operand is an empty statement, just return the omitted
3081 statement casted to void. */
3082 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3083 return build1_loc (loc, NOP_EXPR, void_type_node,
3084 fold_ignored_result (omitted));
3085
3086 if (TREE_SIDE_EFFECTS (omitted))
3087 return build2_loc (loc, COMPOUND_EXPR, type,
3088 fold_ignored_result (omitted), t);
3089
3090 return pedantic_non_lvalue_loc (loc, t);
3091 }
3092
3093 /* Return a tree for the case when the result of an expression is RESULT
3094 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3095 of the expression but are now not needed.
3096
3097 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3098 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3099 evaluated before OMITTED2. Otherwise, if neither has side effects,
3100 just do the conversion of RESULT to TYPE. */
3101
3102 tree
3103 omit_two_operands_loc (location_t loc, tree type, tree result,
3104 tree omitted1, tree omitted2)
3105 {
3106 tree t = fold_convert_loc (loc, type, result);
3107
3108 if (TREE_SIDE_EFFECTS (omitted2))
3109 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3110 if (TREE_SIDE_EFFECTS (omitted1))
3111 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3112
3113 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3114 }
3115
3116 \f
3117 /* Return a simplified tree node for the truth-negation of ARG. This
3118 never alters ARG itself. We assume that ARG is an operation that
3119 returns a truth value (0 or 1).
3120
3121 FIXME: one would think we would fold the result, but it causes
3122 problems with the dominator optimizer. */
3123
3124 static tree
3125 fold_truth_not_expr (location_t loc, tree arg)
3126 {
3127 tree type = TREE_TYPE (arg);
3128 enum tree_code code = TREE_CODE (arg);
3129 location_t loc1, loc2;
3130
3131 /* If this is a comparison, we can simply invert it, except for
3132 floating-point non-equality comparisons, in which case we just
3133 enclose a TRUTH_NOT_EXPR around what we have. */
3134
3135 if (TREE_CODE_CLASS (code) == tcc_comparison)
3136 {
3137 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3138 if (FLOAT_TYPE_P (op_type)
3139 && flag_trapping_math
3140 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3141 && code != NE_EXPR && code != EQ_EXPR)
3142 return NULL_TREE;
3143
3144 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3145 if (code == ERROR_MARK)
3146 return NULL_TREE;
3147
3148 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3149 TREE_OPERAND (arg, 1));
3150 }
3151
3152 switch (code)
3153 {
3154 case INTEGER_CST:
3155 return constant_boolean_node (integer_zerop (arg), type);
3156
3157 case TRUTH_AND_EXPR:
3158 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3159 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3160 return build2_loc (loc, TRUTH_OR_EXPR, type,
3161 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3162 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3163
3164 case TRUTH_OR_EXPR:
3165 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3166 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3167 return build2_loc (loc, TRUTH_AND_EXPR, type,
3168 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3169 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3170
3171 case TRUTH_XOR_EXPR:
3172 /* Here we can invert either operand. We invert the first operand
3173 unless the second operand is a TRUTH_NOT_EXPR in which case our
3174 result is the XOR of the first operand with the inside of the
3175 negation of the second operand. */
3176
3177 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3178 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3179 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3180 else
3181 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3182 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3183 TREE_OPERAND (arg, 1));
3184
3185 case TRUTH_ANDIF_EXPR:
3186 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3187 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3188 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3189 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3190 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3191
3192 case TRUTH_ORIF_EXPR:
3193 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3194 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3195 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3196 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3197 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3198
3199 case TRUTH_NOT_EXPR:
3200 return TREE_OPERAND (arg, 0);
3201
3202 case COND_EXPR:
3203 {
3204 tree arg1 = TREE_OPERAND (arg, 1);
3205 tree arg2 = TREE_OPERAND (arg, 2);
3206
3207 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3208 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3209
3210 /* A COND_EXPR may have a throw as one operand, which
3211 then has void type. Just leave void operands
3212 as they are. */
3213 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3214 VOID_TYPE_P (TREE_TYPE (arg1))
3215 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3216 VOID_TYPE_P (TREE_TYPE (arg2))
3217 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3218 }
3219
3220 case COMPOUND_EXPR:
3221 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3222 return build2_loc (loc, COMPOUND_EXPR, type,
3223 TREE_OPERAND (arg, 0),
3224 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3225
3226 case NON_LVALUE_EXPR:
3227 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3228 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3229
3230 CASE_CONVERT:
3231 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3232 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3233
3234 /* ... fall through ... */
3235
3236 case FLOAT_EXPR:
3237 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3238 return build1_loc (loc, TREE_CODE (arg), type,
3239 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3240
3241 case BIT_AND_EXPR:
3242 if (!integer_onep (TREE_OPERAND (arg, 1)))
3243 return NULL_TREE;
3244 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3245
3246 case SAVE_EXPR:
3247 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3248
3249 case CLEANUP_POINT_EXPR:
3250 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3251 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3252 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3253
3254 default:
3255 return NULL_TREE;
3256 }
3257 }
3258
3259 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3260 assume that ARG is an operation that returns a truth value (0 or 1
3261 for scalars, 0 or -1 for vectors). Return the folded expression if
3262 folding is successful. Otherwise, return NULL_TREE. */
3263
3264 static tree
3265 fold_invert_truthvalue (location_t loc, tree arg)
3266 {
3267 tree type = TREE_TYPE (arg);
3268 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3269 ? BIT_NOT_EXPR
3270 : TRUTH_NOT_EXPR,
3271 type, arg);
3272 }
3273
3274 /* Return a simplified tree node for the truth-negation of ARG. This
3275 never alters ARG itself. We assume that ARG is an operation that
3276 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3277
3278 tree
3279 invert_truthvalue_loc (location_t loc, tree arg)
3280 {
3281 if (TREE_CODE (arg) == ERROR_MARK)
3282 return arg;
3283
3284 tree type = TREE_TYPE (arg);
3285 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3286 ? BIT_NOT_EXPR
3287 : TRUTH_NOT_EXPR,
3288 type, arg);
3289 }
3290
3291 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3292 operands are another bit-wise operation with a common input. If so,
3293 distribute the bit operations to save an operation and possibly two if
3294 constants are involved. For example, convert
3295 (A | B) & (A | C) into A | (B & C)
3296 Further simplification will occur if B and C are constants.
3297
3298 If this optimization cannot be done, 0 will be returned. */
3299
3300 static tree
3301 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3302 tree arg0, tree arg1)
3303 {
3304 tree common;
3305 tree left, right;
3306
3307 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3308 || TREE_CODE (arg0) == code
3309 || (TREE_CODE (arg0) != BIT_AND_EXPR
3310 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3311 return 0;
3312
3313 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3314 {
3315 common = TREE_OPERAND (arg0, 0);
3316 left = TREE_OPERAND (arg0, 1);
3317 right = TREE_OPERAND (arg1, 1);
3318 }
3319 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3320 {
3321 common = TREE_OPERAND (arg0, 0);
3322 left = TREE_OPERAND (arg0, 1);
3323 right = TREE_OPERAND (arg1, 0);
3324 }
3325 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3326 {
3327 common = TREE_OPERAND (arg0, 1);
3328 left = TREE_OPERAND (arg0, 0);
3329 right = TREE_OPERAND (arg1, 1);
3330 }
3331 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3332 {
3333 common = TREE_OPERAND (arg0, 1);
3334 left = TREE_OPERAND (arg0, 0);
3335 right = TREE_OPERAND (arg1, 0);
3336 }
3337 else
3338 return 0;
3339
3340 common = fold_convert_loc (loc, type, common);
3341 left = fold_convert_loc (loc, type, left);
3342 right = fold_convert_loc (loc, type, right);
3343 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3344 fold_build2_loc (loc, code, type, left, right));
3345 }
3346
3347 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3348 with code CODE. This optimization is unsafe. */
3349 static tree
3350 distribute_real_division (location_t loc, enum tree_code code, tree type,
3351 tree arg0, tree arg1)
3352 {
3353 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3354 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3355
3356 /* (A / C) +- (B / C) -> (A +- B) / C. */
3357 if (mul0 == mul1
3358 && operand_equal_p (TREE_OPERAND (arg0, 1),
3359 TREE_OPERAND (arg1, 1), 0))
3360 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3361 fold_build2_loc (loc, code, type,
3362 TREE_OPERAND (arg0, 0),
3363 TREE_OPERAND (arg1, 0)),
3364 TREE_OPERAND (arg0, 1));
3365
3366 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3367 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3368 TREE_OPERAND (arg1, 0), 0)
3369 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3370 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3371 {
3372 REAL_VALUE_TYPE r0, r1;
3373 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3374 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3375 if (!mul0)
3376 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3377 if (!mul1)
3378 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3379 real_arithmetic (&r0, code, &r0, &r1);
3380 return fold_build2_loc (loc, MULT_EXPR, type,
3381 TREE_OPERAND (arg0, 0),
3382 build_real (type, r0));
3383 }
3384
3385 return NULL_TREE;
3386 }
3387 \f
3388 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3389 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3390
3391 static tree
3392 make_bit_field_ref (location_t loc, tree inner, tree type,
3393 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3394 {
3395 tree result, bftype;
3396
3397 if (bitpos == 0)
3398 {
3399 tree size = TYPE_SIZE (TREE_TYPE (inner));
3400 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3401 || POINTER_TYPE_P (TREE_TYPE (inner)))
3402 && tree_fits_shwi_p (size)
3403 && tree_to_shwi (size) == bitsize)
3404 return fold_convert_loc (loc, type, inner);
3405 }
3406
3407 bftype = type;
3408 if (TYPE_PRECISION (bftype) != bitsize
3409 || TYPE_UNSIGNED (bftype) == !unsignedp)
3410 bftype = build_nonstandard_integer_type (bitsize, 0);
3411
3412 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3413 size_int (bitsize), bitsize_int (bitpos));
3414
3415 if (bftype != type)
3416 result = fold_convert_loc (loc, type, result);
3417
3418 return result;
3419 }
3420
3421 /* Optimize a bit-field compare.
3422
3423 There are two cases: First is a compare against a constant and the
3424 second is a comparison of two items where the fields are at the same
3425 bit position relative to the start of a chunk (byte, halfword, word)
3426 large enough to contain it. In these cases we can avoid the shift
3427 implicit in bitfield extractions.
3428
3429 For constants, we emit a compare of the shifted constant with the
3430 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3431 compared. For two fields at the same position, we do the ANDs with the
3432 similar mask and compare the result of the ANDs.
3433
3434 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3435 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3436 are the left and right operands of the comparison, respectively.
3437
3438 If the optimization described above can be done, we return the resulting
3439 tree. Otherwise we return zero. */
3440
3441 static tree
3442 optimize_bit_field_compare (location_t loc, enum tree_code code,
3443 tree compare_type, tree lhs, tree rhs)
3444 {
3445 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3446 tree type = TREE_TYPE (lhs);
3447 tree signed_type, unsigned_type;
3448 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3449 enum machine_mode lmode, rmode, nmode;
3450 int lunsignedp, runsignedp;
3451 int lvolatilep = 0, rvolatilep = 0;
3452 tree linner, rinner = NULL_TREE;
3453 tree mask;
3454 tree offset;
3455
3456 /* Get all the information about the extractions being done. If the bit size
3457 if the same as the size of the underlying object, we aren't doing an
3458 extraction at all and so can do nothing. We also don't want to
3459 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3460 then will no longer be able to replace it. */
3461 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3462 &lunsignedp, &lvolatilep, false);
3463 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3464 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3465 return 0;
3466
3467 if (!const_p)
3468 {
3469 /* If this is not a constant, we can only do something if bit positions,
3470 sizes, and signedness are the same. */
3471 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3472 &runsignedp, &rvolatilep, false);
3473
3474 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3475 || lunsignedp != runsignedp || offset != 0
3476 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3477 return 0;
3478 }
3479
3480 /* See if we can find a mode to refer to this field. We should be able to,
3481 but fail if we can't. */
3482 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3483 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3484 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3485 TYPE_ALIGN (TREE_TYPE (rinner))),
3486 word_mode, false);
3487 if (nmode == VOIDmode)
3488 return 0;
3489
3490 /* Set signed and unsigned types of the precision of this mode for the
3491 shifts below. */
3492 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3493 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3494
3495 /* Compute the bit position and size for the new reference and our offset
3496 within it. If the new reference is the same size as the original, we
3497 won't optimize anything, so return zero. */
3498 nbitsize = GET_MODE_BITSIZE (nmode);
3499 nbitpos = lbitpos & ~ (nbitsize - 1);
3500 lbitpos -= nbitpos;
3501 if (nbitsize == lbitsize)
3502 return 0;
3503
3504 if (BYTES_BIG_ENDIAN)
3505 lbitpos = nbitsize - lbitsize - lbitpos;
3506
3507 /* Make the mask to be used against the extracted field. */
3508 mask = build_int_cst_type (unsigned_type, -1);
3509 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3510 mask = const_binop (RSHIFT_EXPR, mask,
3511 size_int (nbitsize - lbitsize - lbitpos));
3512
3513 if (! const_p)
3514 /* If not comparing with constant, just rework the comparison
3515 and return. */
3516 return fold_build2_loc (loc, code, compare_type,
3517 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3518 make_bit_field_ref (loc, linner,
3519 unsigned_type,
3520 nbitsize, nbitpos,
3521 1),
3522 mask),
3523 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3524 make_bit_field_ref (loc, rinner,
3525 unsigned_type,
3526 nbitsize, nbitpos,
3527 1),
3528 mask));
3529
3530 /* Otherwise, we are handling the constant case. See if the constant is too
3531 big for the field. Warn and return a tree of for 0 (false) if so. We do
3532 this not only for its own sake, but to avoid having to test for this
3533 error case below. If we didn't, we might generate wrong code.
3534
3535 For unsigned fields, the constant shifted right by the field length should
3536 be all zero. For signed fields, the high-order bits should agree with
3537 the sign bit. */
3538
3539 if (lunsignedp)
3540 {
3541 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3542 fold_convert_loc (loc,
3543 unsigned_type, rhs),
3544 size_int (lbitsize))))
3545 {
3546 warning (0, "comparison is always %d due to width of bit-field",
3547 code == NE_EXPR);
3548 return constant_boolean_node (code == NE_EXPR, compare_type);
3549 }
3550 }
3551 else
3552 {
3553 tree tem = const_binop (RSHIFT_EXPR,
3554 fold_convert_loc (loc, signed_type, rhs),
3555 size_int (lbitsize - 1));
3556 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3557 {
3558 warning (0, "comparison is always %d due to width of bit-field",
3559 code == NE_EXPR);
3560 return constant_boolean_node (code == NE_EXPR, compare_type);
3561 }
3562 }
3563
3564 /* Single-bit compares should always be against zero. */
3565 if (lbitsize == 1 && ! integer_zerop (rhs))
3566 {
3567 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3568 rhs = build_int_cst (type, 0);
3569 }
3570
3571 /* Make a new bitfield reference, shift the constant over the
3572 appropriate number of bits and mask it with the computed mask
3573 (in case this was a signed field). If we changed it, make a new one. */
3574 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3575
3576 rhs = const_binop (BIT_AND_EXPR,
3577 const_binop (LSHIFT_EXPR,
3578 fold_convert_loc (loc, unsigned_type, rhs),
3579 size_int (lbitpos)),
3580 mask);
3581
3582 lhs = build2_loc (loc, code, compare_type,
3583 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3584 return lhs;
3585 }
3586 \f
3587 /* Subroutine for fold_truth_andor_1: decode a field reference.
3588
3589 If EXP is a comparison reference, we return the innermost reference.
3590
3591 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3592 set to the starting bit number.
3593
3594 If the innermost field can be completely contained in a mode-sized
3595 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3596
3597 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3598 otherwise it is not changed.
3599
3600 *PUNSIGNEDP is set to the signedness of the field.
3601
3602 *PMASK is set to the mask used. This is either contained in a
3603 BIT_AND_EXPR or derived from the width of the field.
3604
3605 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3606
3607 Return 0 if this is not a component reference or is one that we can't
3608 do anything with. */
3609
3610 static tree
3611 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3612 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3613 int *punsignedp, int *pvolatilep,
3614 tree *pmask, tree *pand_mask)
3615 {
3616 tree outer_type = 0;
3617 tree and_mask = 0;
3618 tree mask, inner, offset;
3619 tree unsigned_type;
3620 unsigned int precision;
3621
3622 /* All the optimizations using this function assume integer fields.
3623 There are problems with FP fields since the type_for_size call
3624 below can fail for, e.g., XFmode. */
3625 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3626 return 0;
3627
3628 /* We are interested in the bare arrangement of bits, so strip everything
3629 that doesn't affect the machine mode. However, record the type of the
3630 outermost expression if it may matter below. */
3631 if (CONVERT_EXPR_P (exp)
3632 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3633 outer_type = TREE_TYPE (exp);
3634 STRIP_NOPS (exp);
3635
3636 if (TREE_CODE (exp) == BIT_AND_EXPR)
3637 {
3638 and_mask = TREE_OPERAND (exp, 1);
3639 exp = TREE_OPERAND (exp, 0);
3640 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3641 if (TREE_CODE (and_mask) != INTEGER_CST)
3642 return 0;
3643 }
3644
3645 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3646 punsignedp, pvolatilep, false);
3647 if ((inner == exp && and_mask == 0)
3648 || *pbitsize < 0 || offset != 0
3649 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3650 return 0;
3651
3652 /* If the number of bits in the reference is the same as the bitsize of
3653 the outer type, then the outer type gives the signedness. Otherwise
3654 (in case of a small bitfield) the signedness is unchanged. */
3655 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3656 *punsignedp = TYPE_UNSIGNED (outer_type);
3657
3658 /* Compute the mask to access the bitfield. */
3659 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3660 precision = TYPE_PRECISION (unsigned_type);
3661
3662 mask = build_int_cst_type (unsigned_type, -1);
3663
3664 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3665 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3666
3667 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3668 if (and_mask != 0)
3669 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3670 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3671
3672 *pmask = mask;
3673 *pand_mask = and_mask;
3674 return inner;
3675 }
3676
3677 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3678 bit positions and MASK is SIGNED. */
3679
3680 static int
3681 all_ones_mask_p (const_tree mask, unsigned int size)
3682 {
3683 tree type = TREE_TYPE (mask);
3684 unsigned int precision = TYPE_PRECISION (type);
3685
3686 /* If this function returns true when the type of the mask is
3687 UNSIGNED, then there will be errors. In particular see
3688 gcc.c-torture/execute/990326-1.c. There does not appear to be
3689 any documentation paper trail as to why this is so. But the pre
3690 wide-int worked with that restriction and it has been preserved
3691 here. */
3692 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3693 return false;
3694
3695 return wi::mask (size, false, precision) == mask;
3696 }
3697
3698 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3699 represents the sign bit of EXP's type. If EXP represents a sign
3700 or zero extension, also test VAL against the unextended type.
3701 The return value is the (sub)expression whose sign bit is VAL,
3702 or NULL_TREE otherwise. */
3703
3704 static tree
3705 sign_bit_p (tree exp, const_tree val)
3706 {
3707 int width;
3708 tree t;
3709
3710 /* Tree EXP must have an integral type. */
3711 t = TREE_TYPE (exp);
3712 if (! INTEGRAL_TYPE_P (t))
3713 return NULL_TREE;
3714
3715 /* Tree VAL must be an integer constant. */
3716 if (TREE_CODE (val) != INTEGER_CST
3717 || TREE_OVERFLOW (val))
3718 return NULL_TREE;
3719
3720 width = TYPE_PRECISION (t);
3721 if (wi::only_sign_bit_p (val, width))
3722 return exp;
3723
3724 /* Handle extension from a narrower type. */
3725 if (TREE_CODE (exp) == NOP_EXPR
3726 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3727 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3728
3729 return NULL_TREE;
3730 }
3731
3732 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3733 to be evaluated unconditionally. */
3734
3735 static int
3736 simple_operand_p (const_tree exp)
3737 {
3738 /* Strip any conversions that don't change the machine mode. */
3739 STRIP_NOPS (exp);
3740
3741 return (CONSTANT_CLASS_P (exp)
3742 || TREE_CODE (exp) == SSA_NAME
3743 || (DECL_P (exp)
3744 && ! TREE_ADDRESSABLE (exp)
3745 && ! TREE_THIS_VOLATILE (exp)
3746 && ! DECL_NONLOCAL (exp)
3747 /* Don't regard global variables as simple. They may be
3748 allocated in ways unknown to the compiler (shared memory,
3749 #pragma weak, etc). */
3750 && ! TREE_PUBLIC (exp)
3751 && ! DECL_EXTERNAL (exp)
3752 /* Weakrefs are not safe to be read, since they can be NULL.
3753 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3754 have DECL_WEAK flag set. */
3755 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3756 /* Loading a static variable is unduly expensive, but global
3757 registers aren't expensive. */
3758 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3759 }
3760
3761 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3762 to be evaluated unconditionally.
3763 I addition to simple_operand_p, we assume that comparisons, conversions,
3764 and logic-not operations are simple, if their operands are simple, too. */
3765
3766 static bool
3767 simple_operand_p_2 (tree exp)
3768 {
3769 enum tree_code code;
3770
3771 if (TREE_SIDE_EFFECTS (exp)
3772 || tree_could_trap_p (exp))
3773 return false;
3774
3775 while (CONVERT_EXPR_P (exp))
3776 exp = TREE_OPERAND (exp, 0);
3777
3778 code = TREE_CODE (exp);
3779
3780 if (TREE_CODE_CLASS (code) == tcc_comparison)
3781 return (simple_operand_p (TREE_OPERAND (exp, 0))
3782 && simple_operand_p (TREE_OPERAND (exp, 1)));
3783
3784 if (code == TRUTH_NOT_EXPR)
3785 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3786
3787 return simple_operand_p (exp);
3788 }
3789
3790 \f
3791 /* The following functions are subroutines to fold_range_test and allow it to
3792 try to change a logical combination of comparisons into a range test.
3793
3794 For example, both
3795 X == 2 || X == 3 || X == 4 || X == 5
3796 and
3797 X >= 2 && X <= 5
3798 are converted to
3799 (unsigned) (X - 2) <= 3
3800
3801 We describe each set of comparisons as being either inside or outside
3802 a range, using a variable named like IN_P, and then describe the
3803 range with a lower and upper bound. If one of the bounds is omitted,
3804 it represents either the highest or lowest value of the type.
3805
3806 In the comments below, we represent a range by two numbers in brackets
3807 preceded by a "+" to designate being inside that range, or a "-" to
3808 designate being outside that range, so the condition can be inverted by
3809 flipping the prefix. An omitted bound is represented by a "-". For
3810 example, "- [-, 10]" means being outside the range starting at the lowest
3811 possible value and ending at 10, in other words, being greater than 10.
3812 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3813 always false.
3814
3815 We set up things so that the missing bounds are handled in a consistent
3816 manner so neither a missing bound nor "true" and "false" need to be
3817 handled using a special case. */
3818
3819 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3820 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3821 and UPPER1_P are nonzero if the respective argument is an upper bound
3822 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3823 must be specified for a comparison. ARG1 will be converted to ARG0's
3824 type if both are specified. */
3825
3826 static tree
3827 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3828 tree arg1, int upper1_p)
3829 {
3830 tree tem;
3831 int result;
3832 int sgn0, sgn1;
3833
3834 /* If neither arg represents infinity, do the normal operation.
3835 Else, if not a comparison, return infinity. Else handle the special
3836 comparison rules. Note that most of the cases below won't occur, but
3837 are handled for consistency. */
3838
3839 if (arg0 != 0 && arg1 != 0)
3840 {
3841 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3842 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3843 STRIP_NOPS (tem);
3844 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3845 }
3846
3847 if (TREE_CODE_CLASS (code) != tcc_comparison)
3848 return 0;
3849
3850 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3851 for neither. In real maths, we cannot assume open ended ranges are
3852 the same. But, this is computer arithmetic, where numbers are finite.
3853 We can therefore make the transformation of any unbounded range with
3854 the value Z, Z being greater than any representable number. This permits
3855 us to treat unbounded ranges as equal. */
3856 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3857 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3858 switch (code)
3859 {
3860 case EQ_EXPR:
3861 result = sgn0 == sgn1;
3862 break;
3863 case NE_EXPR:
3864 result = sgn0 != sgn1;
3865 break;
3866 case LT_EXPR:
3867 result = sgn0 < sgn1;
3868 break;
3869 case LE_EXPR:
3870 result = sgn0 <= sgn1;
3871 break;
3872 case GT_EXPR:
3873 result = sgn0 > sgn1;
3874 break;
3875 case GE_EXPR:
3876 result = sgn0 >= sgn1;
3877 break;
3878 default:
3879 gcc_unreachable ();
3880 }
3881
3882 return constant_boolean_node (result, type);
3883 }
3884 \f
3885 /* Helper routine for make_range. Perform one step for it, return
3886 new expression if the loop should continue or NULL_TREE if it should
3887 stop. */
3888
3889 tree
3890 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3891 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3892 bool *strict_overflow_p)
3893 {
3894 tree arg0_type = TREE_TYPE (arg0);
3895 tree n_low, n_high, low = *p_low, high = *p_high;
3896 int in_p = *p_in_p, n_in_p;
3897
3898 switch (code)
3899 {
3900 case TRUTH_NOT_EXPR:
3901 /* We can only do something if the range is testing for zero. */
3902 if (low == NULL_TREE || high == NULL_TREE
3903 || ! integer_zerop (low) || ! integer_zerop (high))
3904 return NULL_TREE;
3905 *p_in_p = ! in_p;
3906 return arg0;
3907
3908 case EQ_EXPR: case NE_EXPR:
3909 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3910 /* We can only do something if the range is testing for zero
3911 and if the second operand is an integer constant. Note that
3912 saying something is "in" the range we make is done by
3913 complementing IN_P since it will set in the initial case of
3914 being not equal to zero; "out" is leaving it alone. */
3915 if (low == NULL_TREE || high == NULL_TREE
3916 || ! integer_zerop (low) || ! integer_zerop (high)
3917 || TREE_CODE (arg1) != INTEGER_CST)
3918 return NULL_TREE;
3919
3920 switch (code)
3921 {
3922 case NE_EXPR: /* - [c, c] */
3923 low = high = arg1;
3924 break;
3925 case EQ_EXPR: /* + [c, c] */
3926 in_p = ! in_p, low = high = arg1;
3927 break;
3928 case GT_EXPR: /* - [-, c] */
3929 low = 0, high = arg1;
3930 break;
3931 case GE_EXPR: /* + [c, -] */
3932 in_p = ! in_p, low = arg1, high = 0;
3933 break;
3934 case LT_EXPR: /* - [c, -] */
3935 low = arg1, high = 0;
3936 break;
3937 case LE_EXPR: /* + [-, c] */
3938 in_p = ! in_p, low = 0, high = arg1;
3939 break;
3940 default:
3941 gcc_unreachable ();
3942 }
3943
3944 /* If this is an unsigned comparison, we also know that EXP is
3945 greater than or equal to zero. We base the range tests we make
3946 on that fact, so we record it here so we can parse existing
3947 range tests. We test arg0_type since often the return type
3948 of, e.g. EQ_EXPR, is boolean. */
3949 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3950 {
3951 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3952 in_p, low, high, 1,
3953 build_int_cst (arg0_type, 0),
3954 NULL_TREE))
3955 return NULL_TREE;
3956
3957 in_p = n_in_p, low = n_low, high = n_high;
3958
3959 /* If the high bound is missing, but we have a nonzero low
3960 bound, reverse the range so it goes from zero to the low bound
3961 minus 1. */
3962 if (high == 0 && low && ! integer_zerop (low))
3963 {
3964 in_p = ! in_p;
3965 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3966 build_int_cst (TREE_TYPE (low), 1), 0);
3967 low = build_int_cst (arg0_type, 0);
3968 }
3969 }
3970
3971 *p_low = low;
3972 *p_high = high;
3973 *p_in_p = in_p;
3974 return arg0;
3975
3976 case NEGATE_EXPR:
3977 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3978 low and high are non-NULL, then normalize will DTRT. */
3979 if (!TYPE_UNSIGNED (arg0_type)
3980 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3981 {
3982 if (low == NULL_TREE)
3983 low = TYPE_MIN_VALUE (arg0_type);
3984 if (high == NULL_TREE)
3985 high = TYPE_MAX_VALUE (arg0_type);
3986 }
3987
3988 /* (-x) IN [a,b] -> x in [-b, -a] */
3989 n_low = range_binop (MINUS_EXPR, exp_type,
3990 build_int_cst (exp_type, 0),
3991 0, high, 1);
3992 n_high = range_binop (MINUS_EXPR, exp_type,
3993 build_int_cst (exp_type, 0),
3994 0, low, 0);
3995 if (n_high != 0 && TREE_OVERFLOW (n_high))
3996 return NULL_TREE;
3997 goto normalize;
3998
3999 case BIT_NOT_EXPR:
4000 /* ~ X -> -X - 1 */
4001 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4002 build_int_cst (exp_type, 1));
4003
4004 case PLUS_EXPR:
4005 case MINUS_EXPR:
4006 if (TREE_CODE (arg1) != INTEGER_CST)
4007 return NULL_TREE;
4008
4009 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4010 move a constant to the other side. */
4011 if (!TYPE_UNSIGNED (arg0_type)
4012 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4013 return NULL_TREE;
4014
4015 /* If EXP is signed, any overflow in the computation is undefined,
4016 so we don't worry about it so long as our computations on
4017 the bounds don't overflow. For unsigned, overflow is defined
4018 and this is exactly the right thing. */
4019 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4020 arg0_type, low, 0, arg1, 0);
4021 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4022 arg0_type, high, 1, arg1, 0);
4023 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4024 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4025 return NULL_TREE;
4026
4027 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4028 *strict_overflow_p = true;
4029
4030 normalize:
4031 /* Check for an unsigned range which has wrapped around the maximum
4032 value thus making n_high < n_low, and normalize it. */
4033 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4034 {
4035 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4036 build_int_cst (TREE_TYPE (n_high), 1), 0);
4037 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4038 build_int_cst (TREE_TYPE (n_low), 1), 0);
4039
4040 /* If the range is of the form +/- [ x+1, x ], we won't
4041 be able to normalize it. But then, it represents the
4042 whole range or the empty set, so make it
4043 +/- [ -, - ]. */
4044 if (tree_int_cst_equal (n_low, low)
4045 && tree_int_cst_equal (n_high, high))
4046 low = high = 0;
4047 else
4048 in_p = ! in_p;
4049 }
4050 else
4051 low = n_low, high = n_high;
4052
4053 *p_low = low;
4054 *p_high = high;
4055 *p_in_p = in_p;
4056 return arg0;
4057
4058 CASE_CONVERT:
4059 case NON_LVALUE_EXPR:
4060 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4061 return NULL_TREE;
4062
4063 if (! INTEGRAL_TYPE_P (arg0_type)
4064 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4065 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4066 return NULL_TREE;
4067
4068 n_low = low, n_high = high;
4069
4070 if (n_low != 0)
4071 n_low = fold_convert_loc (loc, arg0_type, n_low);
4072
4073 if (n_high != 0)
4074 n_high = fold_convert_loc (loc, arg0_type, n_high);
4075
4076 /* If we're converting arg0 from an unsigned type, to exp,
4077 a signed type, we will be doing the comparison as unsigned.
4078 The tests above have already verified that LOW and HIGH
4079 are both positive.
4080
4081 So we have to ensure that we will handle large unsigned
4082 values the same way that the current signed bounds treat
4083 negative values. */
4084
4085 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4086 {
4087 tree high_positive;
4088 tree equiv_type;
4089 /* For fixed-point modes, we need to pass the saturating flag
4090 as the 2nd parameter. */
4091 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4092 equiv_type
4093 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4094 TYPE_SATURATING (arg0_type));
4095 else
4096 equiv_type
4097 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4098
4099 /* A range without an upper bound is, naturally, unbounded.
4100 Since convert would have cropped a very large value, use
4101 the max value for the destination type. */
4102 high_positive
4103 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4104 : TYPE_MAX_VALUE (arg0_type);
4105
4106 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4107 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4108 fold_convert_loc (loc, arg0_type,
4109 high_positive),
4110 build_int_cst (arg0_type, 1));
4111
4112 /* If the low bound is specified, "and" the range with the
4113 range for which the original unsigned value will be
4114 positive. */
4115 if (low != 0)
4116 {
4117 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4118 1, fold_convert_loc (loc, arg0_type,
4119 integer_zero_node),
4120 high_positive))
4121 return NULL_TREE;
4122
4123 in_p = (n_in_p == in_p);
4124 }
4125 else
4126 {
4127 /* Otherwise, "or" the range with the range of the input
4128 that will be interpreted as negative. */
4129 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4130 1, fold_convert_loc (loc, arg0_type,
4131 integer_zero_node),
4132 high_positive))
4133 return NULL_TREE;
4134
4135 in_p = (in_p != n_in_p);
4136 }
4137 }
4138
4139 *p_low = n_low;
4140 *p_high = n_high;
4141 *p_in_p = in_p;
4142 return arg0;
4143
4144 default:
4145 return NULL_TREE;
4146 }
4147 }
4148
4149 /* Given EXP, a logical expression, set the range it is testing into
4150 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4151 actually being tested. *PLOW and *PHIGH will be made of the same
4152 type as the returned expression. If EXP is not a comparison, we
4153 will most likely not be returning a useful value and range. Set
4154 *STRICT_OVERFLOW_P to true if the return value is only valid
4155 because signed overflow is undefined; otherwise, do not change
4156 *STRICT_OVERFLOW_P. */
4157
4158 tree
4159 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4160 bool *strict_overflow_p)
4161 {
4162 enum tree_code code;
4163 tree arg0, arg1 = NULL_TREE;
4164 tree exp_type, nexp;
4165 int in_p;
4166 tree low, high;
4167 location_t loc = EXPR_LOCATION (exp);
4168
4169 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4170 and see if we can refine the range. Some of the cases below may not
4171 happen, but it doesn't seem worth worrying about this. We "continue"
4172 the outer loop when we've changed something; otherwise we "break"
4173 the switch, which will "break" the while. */
4174
4175 in_p = 0;
4176 low = high = build_int_cst (TREE_TYPE (exp), 0);
4177
4178 while (1)
4179 {
4180 code = TREE_CODE (exp);
4181 exp_type = TREE_TYPE (exp);
4182 arg0 = NULL_TREE;
4183
4184 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4185 {
4186 if (TREE_OPERAND_LENGTH (exp) > 0)
4187 arg0 = TREE_OPERAND (exp, 0);
4188 if (TREE_CODE_CLASS (code) == tcc_binary
4189 || TREE_CODE_CLASS (code) == tcc_comparison
4190 || (TREE_CODE_CLASS (code) == tcc_expression
4191 && TREE_OPERAND_LENGTH (exp) > 1))
4192 arg1 = TREE_OPERAND (exp, 1);
4193 }
4194 if (arg0 == NULL_TREE)
4195 break;
4196
4197 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4198 &high, &in_p, strict_overflow_p);
4199 if (nexp == NULL_TREE)
4200 break;
4201 exp = nexp;
4202 }
4203
4204 /* If EXP is a constant, we can evaluate whether this is true or false. */
4205 if (TREE_CODE (exp) == INTEGER_CST)
4206 {
4207 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4208 exp, 0, low, 0))
4209 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4210 exp, 1, high, 1)));
4211 low = high = 0;
4212 exp = 0;
4213 }
4214
4215 *pin_p = in_p, *plow = low, *phigh = high;
4216 return exp;
4217 }
4218 \f
4219 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4220 type, TYPE, return an expression to test if EXP is in (or out of, depending
4221 on IN_P) the range. Return 0 if the test couldn't be created. */
4222
4223 tree
4224 build_range_check (location_t loc, tree type, tree exp, int in_p,
4225 tree low, tree high)
4226 {
4227 tree etype = TREE_TYPE (exp), value;
4228
4229 #ifdef HAVE_canonicalize_funcptr_for_compare
4230 /* Disable this optimization for function pointer expressions
4231 on targets that require function pointer canonicalization. */
4232 if (HAVE_canonicalize_funcptr_for_compare
4233 && TREE_CODE (etype) == POINTER_TYPE
4234 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4235 return NULL_TREE;
4236 #endif
4237
4238 if (! in_p)
4239 {
4240 value = build_range_check (loc, type, exp, 1, low, high);
4241 if (value != 0)
4242 return invert_truthvalue_loc (loc, value);
4243
4244 return 0;
4245 }
4246
4247 if (low == 0 && high == 0)
4248 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4249
4250 if (low == 0)
4251 return fold_build2_loc (loc, LE_EXPR, type, exp,
4252 fold_convert_loc (loc, etype, high));
4253
4254 if (high == 0)
4255 return fold_build2_loc (loc, GE_EXPR, type, exp,
4256 fold_convert_loc (loc, etype, low));
4257
4258 if (operand_equal_p (low, high, 0))
4259 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4260 fold_convert_loc (loc, etype, low));
4261
4262 if (integer_zerop (low))
4263 {
4264 if (! TYPE_UNSIGNED (etype))
4265 {
4266 etype = unsigned_type_for (etype);
4267 high = fold_convert_loc (loc, etype, high);
4268 exp = fold_convert_loc (loc, etype, exp);
4269 }
4270 return build_range_check (loc, type, exp, 1, 0, high);
4271 }
4272
4273 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4274 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4275 {
4276 int prec = TYPE_PRECISION (etype);
4277
4278 if (wi::mask (prec - 1, false, prec) == high)
4279 {
4280 if (TYPE_UNSIGNED (etype))
4281 {
4282 tree signed_etype = signed_type_for (etype);
4283 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4284 etype
4285 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4286 else
4287 etype = signed_etype;
4288 exp = fold_convert_loc (loc, etype, exp);
4289 }
4290 return fold_build2_loc (loc, GT_EXPR, type, exp,
4291 build_int_cst (etype, 0));
4292 }
4293 }
4294
4295 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4296 This requires wrap-around arithmetics for the type of the expression.
4297 First make sure that arithmetics in this type is valid, then make sure
4298 that it wraps around. */
4299 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4300 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4301 TYPE_UNSIGNED (etype));
4302
4303 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4304 {
4305 tree utype, minv, maxv;
4306
4307 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4308 for the type in question, as we rely on this here. */
4309 utype = unsigned_type_for (etype);
4310 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4311 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4312 build_int_cst (TREE_TYPE (maxv), 1), 1);
4313 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4314
4315 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4316 minv, 1, maxv, 1)))
4317 etype = utype;
4318 else
4319 return 0;
4320 }
4321
4322 high = fold_convert_loc (loc, etype, high);
4323 low = fold_convert_loc (loc, etype, low);
4324 exp = fold_convert_loc (loc, etype, exp);
4325
4326 value = const_binop (MINUS_EXPR, high, low);
4327
4328
4329 if (POINTER_TYPE_P (etype))
4330 {
4331 if (value != 0 && !TREE_OVERFLOW (value))
4332 {
4333 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4334 return build_range_check (loc, type,
4335 fold_build_pointer_plus_loc (loc, exp, low),
4336 1, build_int_cst (etype, 0), value);
4337 }
4338 return 0;
4339 }
4340
4341 if (value != 0 && !TREE_OVERFLOW (value))
4342 return build_range_check (loc, type,
4343 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4344 1, build_int_cst (etype, 0), value);
4345
4346 return 0;
4347 }
4348 \f
4349 /* Return the predecessor of VAL in its type, handling the infinite case. */
4350
4351 static tree
4352 range_predecessor (tree val)
4353 {
4354 tree type = TREE_TYPE (val);
4355
4356 if (INTEGRAL_TYPE_P (type)
4357 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4358 return 0;
4359 else
4360 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4361 build_int_cst (TREE_TYPE (val), 1), 0);
4362 }
4363
4364 /* Return the successor of VAL in its type, handling the infinite case. */
4365
4366 static tree
4367 range_successor (tree val)
4368 {
4369 tree type = TREE_TYPE (val);
4370
4371 if (INTEGRAL_TYPE_P (type)
4372 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4373 return 0;
4374 else
4375 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4376 build_int_cst (TREE_TYPE (val), 1), 0);
4377 }
4378
4379 /* Given two ranges, see if we can merge them into one. Return 1 if we
4380 can, 0 if we can't. Set the output range into the specified parameters. */
4381
4382 bool
4383 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4384 tree high0, int in1_p, tree low1, tree high1)
4385 {
4386 int no_overlap;
4387 int subset;
4388 int temp;
4389 tree tem;
4390 int in_p;
4391 tree low, high;
4392 int lowequal = ((low0 == 0 && low1 == 0)
4393 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4394 low0, 0, low1, 0)));
4395 int highequal = ((high0 == 0 && high1 == 0)
4396 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397 high0, 1, high1, 1)));
4398
4399 /* Make range 0 be the range that starts first, or ends last if they
4400 start at the same value. Swap them if it isn't. */
4401 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4402 low0, 0, low1, 0))
4403 || (lowequal
4404 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4405 high1, 1, high0, 1))))
4406 {
4407 temp = in0_p, in0_p = in1_p, in1_p = temp;
4408 tem = low0, low0 = low1, low1 = tem;
4409 tem = high0, high0 = high1, high1 = tem;
4410 }
4411
4412 /* Now flag two cases, whether the ranges are disjoint or whether the
4413 second range is totally subsumed in the first. Note that the tests
4414 below are simplified by the ones above. */
4415 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4416 high0, 1, low1, 0));
4417 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4418 high1, 1, high0, 1));
4419
4420 /* We now have four cases, depending on whether we are including or
4421 excluding the two ranges. */
4422 if (in0_p && in1_p)
4423 {
4424 /* If they don't overlap, the result is false. If the second range
4425 is a subset it is the result. Otherwise, the range is from the start
4426 of the second to the end of the first. */
4427 if (no_overlap)
4428 in_p = 0, low = high = 0;
4429 else if (subset)
4430 in_p = 1, low = low1, high = high1;
4431 else
4432 in_p = 1, low = low1, high = high0;
4433 }
4434
4435 else if (in0_p && ! in1_p)
4436 {
4437 /* If they don't overlap, the result is the first range. If they are
4438 equal, the result is false. If the second range is a subset of the
4439 first, and the ranges begin at the same place, we go from just after
4440 the end of the second range to the end of the first. If the second
4441 range is not a subset of the first, or if it is a subset and both
4442 ranges end at the same place, the range starts at the start of the
4443 first range and ends just before the second range.
4444 Otherwise, we can't describe this as a single range. */
4445 if (no_overlap)
4446 in_p = 1, low = low0, high = high0;
4447 else if (lowequal && highequal)
4448 in_p = 0, low = high = 0;
4449 else if (subset && lowequal)
4450 {
4451 low = range_successor (high1);
4452 high = high0;
4453 in_p = 1;
4454 if (low == 0)
4455 {
4456 /* We are in the weird situation where high0 > high1 but
4457 high1 has no successor. Punt. */
4458 return 0;
4459 }
4460 }
4461 else if (! subset || highequal)
4462 {
4463 low = low0;
4464 high = range_predecessor (low1);
4465 in_p = 1;
4466 if (high == 0)
4467 {
4468 /* low0 < low1 but low1 has no predecessor. Punt. */
4469 return 0;
4470 }
4471 }
4472 else
4473 return 0;
4474 }
4475
4476 else if (! in0_p && in1_p)
4477 {
4478 /* If they don't overlap, the result is the second range. If the second
4479 is a subset of the first, the result is false. Otherwise,
4480 the range starts just after the first range and ends at the
4481 end of the second. */
4482 if (no_overlap)
4483 in_p = 1, low = low1, high = high1;
4484 else if (subset || highequal)
4485 in_p = 0, low = high = 0;
4486 else
4487 {
4488 low = range_successor (high0);
4489 high = high1;
4490 in_p = 1;
4491 if (low == 0)
4492 {
4493 /* high1 > high0 but high0 has no successor. Punt. */
4494 return 0;
4495 }
4496 }
4497 }
4498
4499 else
4500 {
4501 /* The case where we are excluding both ranges. Here the complex case
4502 is if they don't overlap. In that case, the only time we have a
4503 range is if they are adjacent. If the second is a subset of the
4504 first, the result is the first. Otherwise, the range to exclude
4505 starts at the beginning of the first range and ends at the end of the
4506 second. */
4507 if (no_overlap)
4508 {
4509 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4510 range_successor (high0),
4511 1, low1, 0)))
4512 in_p = 0, low = low0, high = high1;
4513 else
4514 {
4515 /* Canonicalize - [min, x] into - [-, x]. */
4516 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4517 switch (TREE_CODE (TREE_TYPE (low0)))
4518 {
4519 case ENUMERAL_TYPE:
4520 if (TYPE_PRECISION (TREE_TYPE (low0))
4521 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4522 break;
4523 /* FALLTHROUGH */
4524 case INTEGER_TYPE:
4525 if (tree_int_cst_equal (low0,
4526 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4527 low0 = 0;
4528 break;
4529 case POINTER_TYPE:
4530 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4531 && integer_zerop (low0))
4532 low0 = 0;
4533 break;
4534 default:
4535 break;
4536 }
4537
4538 /* Canonicalize - [x, max] into - [x, -]. */
4539 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4540 switch (TREE_CODE (TREE_TYPE (high1)))
4541 {
4542 case ENUMERAL_TYPE:
4543 if (TYPE_PRECISION (TREE_TYPE (high1))
4544 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4545 break;
4546 /* FALLTHROUGH */
4547 case INTEGER_TYPE:
4548 if (tree_int_cst_equal (high1,
4549 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4550 high1 = 0;
4551 break;
4552 case POINTER_TYPE:
4553 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4554 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4555 high1, 1,
4556 build_int_cst (TREE_TYPE (high1), 1),
4557 1)))
4558 high1 = 0;
4559 break;
4560 default:
4561 break;
4562 }
4563
4564 /* The ranges might be also adjacent between the maximum and
4565 minimum values of the given type. For
4566 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4567 return + [x + 1, y - 1]. */
4568 if (low0 == 0 && high1 == 0)
4569 {
4570 low = range_successor (high0);
4571 high = range_predecessor (low1);
4572 if (low == 0 || high == 0)
4573 return 0;
4574
4575 in_p = 1;
4576 }
4577 else
4578 return 0;
4579 }
4580 }
4581 else if (subset)
4582 in_p = 0, low = low0, high = high0;
4583 else
4584 in_p = 0, low = low0, high = high1;
4585 }
4586
4587 *pin_p = in_p, *plow = low, *phigh = high;
4588 return 1;
4589 }
4590 \f
4591
4592 /* Subroutine of fold, looking inside expressions of the form
4593 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4594 of the COND_EXPR. This function is being used also to optimize
4595 A op B ? C : A, by reversing the comparison first.
4596
4597 Return a folded expression whose code is not a COND_EXPR
4598 anymore, or NULL_TREE if no folding opportunity is found. */
4599
4600 static tree
4601 fold_cond_expr_with_comparison (location_t loc, tree type,
4602 tree arg0, tree arg1, tree arg2)
4603 {
4604 enum tree_code comp_code = TREE_CODE (arg0);
4605 tree arg00 = TREE_OPERAND (arg0, 0);
4606 tree arg01 = TREE_OPERAND (arg0, 1);
4607 tree arg1_type = TREE_TYPE (arg1);
4608 tree tem;
4609
4610 STRIP_NOPS (arg1);
4611 STRIP_NOPS (arg2);
4612
4613 /* If we have A op 0 ? A : -A, consider applying the following
4614 transformations:
4615
4616 A == 0? A : -A same as -A
4617 A != 0? A : -A same as A
4618 A >= 0? A : -A same as abs (A)
4619 A > 0? A : -A same as abs (A)
4620 A <= 0? A : -A same as -abs (A)
4621 A < 0? A : -A same as -abs (A)
4622
4623 None of these transformations work for modes with signed
4624 zeros. If A is +/-0, the first two transformations will
4625 change the sign of the result (from +0 to -0, or vice
4626 versa). The last four will fix the sign of the result,
4627 even though the original expressions could be positive or
4628 negative, depending on the sign of A.
4629
4630 Note that all these transformations are correct if A is
4631 NaN, since the two alternatives (A and -A) are also NaNs. */
4632 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4633 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4634 ? real_zerop (arg01)
4635 : integer_zerop (arg01))
4636 && ((TREE_CODE (arg2) == NEGATE_EXPR
4637 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4638 /* In the case that A is of the form X-Y, '-A' (arg2) may
4639 have already been folded to Y-X, check for that. */
4640 || (TREE_CODE (arg1) == MINUS_EXPR
4641 && TREE_CODE (arg2) == MINUS_EXPR
4642 && operand_equal_p (TREE_OPERAND (arg1, 0),
4643 TREE_OPERAND (arg2, 1), 0)
4644 && operand_equal_p (TREE_OPERAND (arg1, 1),
4645 TREE_OPERAND (arg2, 0), 0))))
4646 switch (comp_code)
4647 {
4648 case EQ_EXPR:
4649 case UNEQ_EXPR:
4650 tem = fold_convert_loc (loc, arg1_type, arg1);
4651 return pedantic_non_lvalue_loc (loc,
4652 fold_convert_loc (loc, type,
4653 negate_expr (tem)));
4654 case NE_EXPR:
4655 case LTGT_EXPR:
4656 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4657 case UNGE_EXPR:
4658 case UNGT_EXPR:
4659 if (flag_trapping_math)
4660 break;
4661 /* Fall through. */
4662 case GE_EXPR:
4663 case GT_EXPR:
4664 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4665 arg1 = fold_convert_loc (loc, signed_type_for
4666 (TREE_TYPE (arg1)), arg1);
4667 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4668 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4669 case UNLE_EXPR:
4670 case UNLT_EXPR:
4671 if (flag_trapping_math)
4672 break;
4673 case LE_EXPR:
4674 case LT_EXPR:
4675 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4676 arg1 = fold_convert_loc (loc, signed_type_for
4677 (TREE_TYPE (arg1)), arg1);
4678 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4679 return negate_expr (fold_convert_loc (loc, type, tem));
4680 default:
4681 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4682 break;
4683 }
4684
4685 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4686 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4687 both transformations are correct when A is NaN: A != 0
4688 is then true, and A == 0 is false. */
4689
4690 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4691 && integer_zerop (arg01) && integer_zerop (arg2))
4692 {
4693 if (comp_code == NE_EXPR)
4694 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4695 else if (comp_code == EQ_EXPR)
4696 return build_zero_cst (type);
4697 }
4698
4699 /* Try some transformations of A op B ? A : B.
4700
4701 A == B? A : B same as B
4702 A != B? A : B same as A
4703 A >= B? A : B same as max (A, B)
4704 A > B? A : B same as max (B, A)
4705 A <= B? A : B same as min (A, B)
4706 A < B? A : B same as min (B, A)
4707
4708 As above, these transformations don't work in the presence
4709 of signed zeros. For example, if A and B are zeros of
4710 opposite sign, the first two transformations will change
4711 the sign of the result. In the last four, the original
4712 expressions give different results for (A=+0, B=-0) and
4713 (A=-0, B=+0), but the transformed expressions do not.
4714
4715 The first two transformations are correct if either A or B
4716 is a NaN. In the first transformation, the condition will
4717 be false, and B will indeed be chosen. In the case of the
4718 second transformation, the condition A != B will be true,
4719 and A will be chosen.
4720
4721 The conversions to max() and min() are not correct if B is
4722 a number and A is not. The conditions in the original
4723 expressions will be false, so all four give B. The min()
4724 and max() versions would give a NaN instead. */
4725 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4726 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4727 /* Avoid these transformations if the COND_EXPR may be used
4728 as an lvalue in the C++ front-end. PR c++/19199. */
4729 && (in_gimple_form
4730 || VECTOR_TYPE_P (type)
4731 || (strcmp (lang_hooks.name, "GNU C++") != 0
4732 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4733 || ! maybe_lvalue_p (arg1)
4734 || ! maybe_lvalue_p (arg2)))
4735 {
4736 tree comp_op0 = arg00;
4737 tree comp_op1 = arg01;
4738 tree comp_type = TREE_TYPE (comp_op0);
4739
4740 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4741 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4742 {
4743 comp_type = type;
4744 comp_op0 = arg1;
4745 comp_op1 = arg2;
4746 }
4747
4748 switch (comp_code)
4749 {
4750 case EQ_EXPR:
4751 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4752 case NE_EXPR:
4753 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4754 case LE_EXPR:
4755 case LT_EXPR:
4756 case UNLE_EXPR:
4757 case UNLT_EXPR:
4758 /* In C++ a ?: expression can be an lvalue, so put the
4759 operand which will be used if they are equal first
4760 so that we can convert this back to the
4761 corresponding COND_EXPR. */
4762 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4763 {
4764 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4765 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4766 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4767 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4768 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4769 comp_op1, comp_op0);
4770 return pedantic_non_lvalue_loc (loc,
4771 fold_convert_loc (loc, type, tem));
4772 }
4773 break;
4774 case GE_EXPR:
4775 case GT_EXPR:
4776 case UNGE_EXPR:
4777 case UNGT_EXPR:
4778 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4779 {
4780 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4781 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4782 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4783 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4784 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4785 comp_op1, comp_op0);
4786 return pedantic_non_lvalue_loc (loc,
4787 fold_convert_loc (loc, type, tem));
4788 }
4789 break;
4790 case UNEQ_EXPR:
4791 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4792 return pedantic_non_lvalue_loc (loc,
4793 fold_convert_loc (loc, type, arg2));
4794 break;
4795 case LTGT_EXPR:
4796 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4797 return pedantic_non_lvalue_loc (loc,
4798 fold_convert_loc (loc, type, arg1));
4799 break;
4800 default:
4801 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4802 break;
4803 }
4804 }
4805
4806 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4807 we might still be able to simplify this. For example,
4808 if C1 is one less or one more than C2, this might have started
4809 out as a MIN or MAX and been transformed by this function.
4810 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4811
4812 if (INTEGRAL_TYPE_P (type)
4813 && TREE_CODE (arg01) == INTEGER_CST
4814 && TREE_CODE (arg2) == INTEGER_CST)
4815 switch (comp_code)
4816 {
4817 case EQ_EXPR:
4818 if (TREE_CODE (arg1) == INTEGER_CST)
4819 break;
4820 /* We can replace A with C1 in this case. */
4821 arg1 = fold_convert_loc (loc, type, arg01);
4822 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4823
4824 case LT_EXPR:
4825 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4826 MIN_EXPR, to preserve the signedness of the comparison. */
4827 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4828 OEP_ONLY_CONST)
4829 && operand_equal_p (arg01,
4830 const_binop (PLUS_EXPR, arg2,
4831 build_int_cst (type, 1)),
4832 OEP_ONLY_CONST))
4833 {
4834 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4835 fold_convert_loc (loc, TREE_TYPE (arg00),
4836 arg2));
4837 return pedantic_non_lvalue_loc (loc,
4838 fold_convert_loc (loc, type, tem));
4839 }
4840 break;
4841
4842 case LE_EXPR:
4843 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4844 as above. */
4845 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4846 OEP_ONLY_CONST)
4847 && operand_equal_p (arg01,
4848 const_binop (MINUS_EXPR, arg2,
4849 build_int_cst (type, 1)),
4850 OEP_ONLY_CONST))
4851 {
4852 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4853 fold_convert_loc (loc, TREE_TYPE (arg00),
4854 arg2));
4855 return pedantic_non_lvalue_loc (loc,
4856 fold_convert_loc (loc, type, tem));
4857 }
4858 break;
4859
4860 case GT_EXPR:
4861 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4862 MAX_EXPR, to preserve the signedness of the comparison. */
4863 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4864 OEP_ONLY_CONST)
4865 && operand_equal_p (arg01,
4866 const_binop (MINUS_EXPR, arg2,
4867 build_int_cst (type, 1)),
4868 OEP_ONLY_CONST))
4869 {
4870 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4871 fold_convert_loc (loc, TREE_TYPE (arg00),
4872 arg2));
4873 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4874 }
4875 break;
4876
4877 case GE_EXPR:
4878 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4879 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4880 OEP_ONLY_CONST)
4881 && operand_equal_p (arg01,
4882 const_binop (PLUS_EXPR, arg2,
4883 build_int_cst (type, 1)),
4884 OEP_ONLY_CONST))
4885 {
4886 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4887 fold_convert_loc (loc, TREE_TYPE (arg00),
4888 arg2));
4889 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4890 }
4891 break;
4892 case NE_EXPR:
4893 break;
4894 default:
4895 gcc_unreachable ();
4896 }
4897
4898 return NULL_TREE;
4899 }
4900
4901
4902 \f
4903 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4904 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4905 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4906 false) >= 2)
4907 #endif
4908
4909 /* EXP is some logical combination of boolean tests. See if we can
4910 merge it into some range test. Return the new tree if so. */
4911
4912 static tree
4913 fold_range_test (location_t loc, enum tree_code code, tree type,
4914 tree op0, tree op1)
4915 {
4916 int or_op = (code == TRUTH_ORIF_EXPR
4917 || code == TRUTH_OR_EXPR);
4918 int in0_p, in1_p, in_p;
4919 tree low0, low1, low, high0, high1, high;
4920 bool strict_overflow_p = false;
4921 tree tem, lhs, rhs;
4922 const char * const warnmsg = G_("assuming signed overflow does not occur "
4923 "when simplifying range test");
4924
4925 if (!INTEGRAL_TYPE_P (type))
4926 return 0;
4927
4928 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4929 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4930
4931 /* If this is an OR operation, invert both sides; we will invert
4932 again at the end. */
4933 if (or_op)
4934 in0_p = ! in0_p, in1_p = ! in1_p;
4935
4936 /* If both expressions are the same, if we can merge the ranges, and we
4937 can build the range test, return it or it inverted. If one of the
4938 ranges is always true or always false, consider it to be the same
4939 expression as the other. */
4940 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4941 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4942 in1_p, low1, high1)
4943 && 0 != (tem = (build_range_check (loc, type,
4944 lhs != 0 ? lhs
4945 : rhs != 0 ? rhs : integer_zero_node,
4946 in_p, low, high))))
4947 {
4948 if (strict_overflow_p)
4949 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4950 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4951 }
4952
4953 /* On machines where the branch cost is expensive, if this is a
4954 short-circuited branch and the underlying object on both sides
4955 is the same, make a non-short-circuit operation. */
4956 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4957 && lhs != 0 && rhs != 0
4958 && (code == TRUTH_ANDIF_EXPR
4959 || code == TRUTH_ORIF_EXPR)
4960 && operand_equal_p (lhs, rhs, 0))
4961 {
4962 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4963 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4964 which cases we can't do this. */
4965 if (simple_operand_p (lhs))
4966 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4967 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4968 type, op0, op1);
4969
4970 else if (!lang_hooks.decls.global_bindings_p ()
4971 && !CONTAINS_PLACEHOLDER_P (lhs))
4972 {
4973 tree common = save_expr (lhs);
4974
4975 if (0 != (lhs = build_range_check (loc, type, common,
4976 or_op ? ! in0_p : in0_p,
4977 low0, high0))
4978 && (0 != (rhs = build_range_check (loc, type, common,
4979 or_op ? ! in1_p : in1_p,
4980 low1, high1))))
4981 {
4982 if (strict_overflow_p)
4983 fold_overflow_warning (warnmsg,
4984 WARN_STRICT_OVERFLOW_COMPARISON);
4985 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4986 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4987 type, lhs, rhs);
4988 }
4989 }
4990 }
4991
4992 return 0;
4993 }
4994 \f
4995 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4996 bit value. Arrange things so the extra bits will be set to zero if and
4997 only if C is signed-extended to its full width. If MASK is nonzero,
4998 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4999
5000 static tree
5001 unextend (tree c, int p, int unsignedp, tree mask)
5002 {
5003 tree type = TREE_TYPE (c);
5004 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5005 tree temp;
5006
5007 if (p == modesize || unsignedp)
5008 return c;
5009
5010 /* We work by getting just the sign bit into the low-order bit, then
5011 into the high-order bit, then sign-extend. We then XOR that value
5012 with C. */
5013 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5014
5015 /* We must use a signed type in order to get an arithmetic right shift.
5016 However, we must also avoid introducing accidental overflows, so that
5017 a subsequent call to integer_zerop will work. Hence we must
5018 do the type conversion here. At this point, the constant is either
5019 zero or one, and the conversion to a signed type can never overflow.
5020 We could get an overflow if this conversion is done anywhere else. */
5021 if (TYPE_UNSIGNED (type))
5022 temp = fold_convert (signed_type_for (type), temp);
5023
5024 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5025 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5026 if (mask != 0)
5027 temp = const_binop (BIT_AND_EXPR, temp,
5028 fold_convert (TREE_TYPE (c), mask));
5029 /* If necessary, convert the type back to match the type of C. */
5030 if (TYPE_UNSIGNED (type))
5031 temp = fold_convert (type, temp);
5032
5033 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5034 }
5035 \f
5036 /* For an expression that has the form
5037 (A && B) || ~B
5038 or
5039 (A || B) && ~B,
5040 we can drop one of the inner expressions and simplify to
5041 A || ~B
5042 or
5043 A && ~B
5044 LOC is the location of the resulting expression. OP is the inner
5045 logical operation; the left-hand side in the examples above, while CMPOP
5046 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5047 removing a condition that guards another, as in
5048 (A != NULL && A->...) || A == NULL
5049 which we must not transform. If RHS_ONLY is true, only eliminate the
5050 right-most operand of the inner logical operation. */
5051
5052 static tree
5053 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5054 bool rhs_only)
5055 {
5056 tree type = TREE_TYPE (cmpop);
5057 enum tree_code code = TREE_CODE (cmpop);
5058 enum tree_code truthop_code = TREE_CODE (op);
5059 tree lhs = TREE_OPERAND (op, 0);
5060 tree rhs = TREE_OPERAND (op, 1);
5061 tree orig_lhs = lhs, orig_rhs = rhs;
5062 enum tree_code rhs_code = TREE_CODE (rhs);
5063 enum tree_code lhs_code = TREE_CODE (lhs);
5064 enum tree_code inv_code;
5065
5066 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5067 return NULL_TREE;
5068
5069 if (TREE_CODE_CLASS (code) != tcc_comparison)
5070 return NULL_TREE;
5071
5072 if (rhs_code == truthop_code)
5073 {
5074 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5075 if (newrhs != NULL_TREE)
5076 {
5077 rhs = newrhs;
5078 rhs_code = TREE_CODE (rhs);
5079 }
5080 }
5081 if (lhs_code == truthop_code && !rhs_only)
5082 {
5083 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5084 if (newlhs != NULL_TREE)
5085 {
5086 lhs = newlhs;
5087 lhs_code = TREE_CODE (lhs);
5088 }
5089 }
5090
5091 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5092 if (inv_code == rhs_code
5093 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5094 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5095 return lhs;
5096 if (!rhs_only && inv_code == lhs_code
5097 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5098 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5099 return rhs;
5100 if (rhs != orig_rhs || lhs != orig_lhs)
5101 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5102 lhs, rhs);
5103 return NULL_TREE;
5104 }
5105
5106 /* Find ways of folding logical expressions of LHS and RHS:
5107 Try to merge two comparisons to the same innermost item.
5108 Look for range tests like "ch >= '0' && ch <= '9'".
5109 Look for combinations of simple terms on machines with expensive branches
5110 and evaluate the RHS unconditionally.
5111
5112 For example, if we have p->a == 2 && p->b == 4 and we can make an
5113 object large enough to span both A and B, we can do this with a comparison
5114 against the object ANDed with the a mask.
5115
5116 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5117 operations to do this with one comparison.
5118
5119 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5120 function and the one above.
5121
5122 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5123 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5124
5125 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5126 two operands.
5127
5128 We return the simplified tree or 0 if no optimization is possible. */
5129
5130 static tree
5131 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5132 tree lhs, tree rhs)
5133 {
5134 /* If this is the "or" of two comparisons, we can do something if
5135 the comparisons are NE_EXPR. If this is the "and", we can do something
5136 if the comparisons are EQ_EXPR. I.e.,
5137 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5138
5139 WANTED_CODE is this operation code. For single bit fields, we can
5140 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5141 comparison for one-bit fields. */
5142
5143 enum tree_code wanted_code;
5144 enum tree_code lcode, rcode;
5145 tree ll_arg, lr_arg, rl_arg, rr_arg;
5146 tree ll_inner, lr_inner, rl_inner, rr_inner;
5147 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5148 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5149 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5150 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5151 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5152 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5153 enum machine_mode lnmode, rnmode;
5154 tree ll_mask, lr_mask, rl_mask, rr_mask;
5155 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5156 tree l_const, r_const;
5157 tree lntype, rntype, result;
5158 HOST_WIDE_INT first_bit, end_bit;
5159 int volatilep;
5160
5161 /* Start by getting the comparison codes. Fail if anything is volatile.
5162 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5163 it were surrounded with a NE_EXPR. */
5164
5165 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5166 return 0;
5167
5168 lcode = TREE_CODE (lhs);
5169 rcode = TREE_CODE (rhs);
5170
5171 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5172 {
5173 lhs = build2 (NE_EXPR, truth_type, lhs,
5174 build_int_cst (TREE_TYPE (lhs), 0));
5175 lcode = NE_EXPR;
5176 }
5177
5178 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5179 {
5180 rhs = build2 (NE_EXPR, truth_type, rhs,
5181 build_int_cst (TREE_TYPE (rhs), 0));
5182 rcode = NE_EXPR;
5183 }
5184
5185 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5186 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5187 return 0;
5188
5189 ll_arg = TREE_OPERAND (lhs, 0);
5190 lr_arg = TREE_OPERAND (lhs, 1);
5191 rl_arg = TREE_OPERAND (rhs, 0);
5192 rr_arg = TREE_OPERAND (rhs, 1);
5193
5194 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5195 if (simple_operand_p (ll_arg)
5196 && simple_operand_p (lr_arg))
5197 {
5198 if (operand_equal_p (ll_arg, rl_arg, 0)
5199 && operand_equal_p (lr_arg, rr_arg, 0))
5200 {
5201 result = combine_comparisons (loc, code, lcode, rcode,
5202 truth_type, ll_arg, lr_arg);
5203 if (result)
5204 return result;
5205 }
5206 else if (operand_equal_p (ll_arg, rr_arg, 0)
5207 && operand_equal_p (lr_arg, rl_arg, 0))
5208 {
5209 result = combine_comparisons (loc, code, lcode,
5210 swap_tree_comparison (rcode),
5211 truth_type, ll_arg, lr_arg);
5212 if (result)
5213 return result;
5214 }
5215 }
5216
5217 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5218 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5219
5220 /* If the RHS can be evaluated unconditionally and its operands are
5221 simple, it wins to evaluate the RHS unconditionally on machines
5222 with expensive branches. In this case, this isn't a comparison
5223 that can be merged. */
5224
5225 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5226 false) >= 2
5227 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5228 && simple_operand_p (rl_arg)
5229 && simple_operand_p (rr_arg))
5230 {
5231 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5232 if (code == TRUTH_OR_EXPR
5233 && lcode == NE_EXPR && integer_zerop (lr_arg)
5234 && rcode == NE_EXPR && integer_zerop (rr_arg)
5235 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5236 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5237 return build2_loc (loc, NE_EXPR, truth_type,
5238 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5239 ll_arg, rl_arg),
5240 build_int_cst (TREE_TYPE (ll_arg), 0));
5241
5242 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5243 if (code == TRUTH_AND_EXPR
5244 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5245 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5246 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5247 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5248 return build2_loc (loc, EQ_EXPR, truth_type,
5249 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5250 ll_arg, rl_arg),
5251 build_int_cst (TREE_TYPE (ll_arg), 0));
5252 }
5253
5254 /* See if the comparisons can be merged. Then get all the parameters for
5255 each side. */
5256
5257 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5258 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5259 return 0;
5260
5261 volatilep = 0;
5262 ll_inner = decode_field_reference (loc, ll_arg,
5263 &ll_bitsize, &ll_bitpos, &ll_mode,
5264 &ll_unsignedp, &volatilep, &ll_mask,
5265 &ll_and_mask);
5266 lr_inner = decode_field_reference (loc, lr_arg,
5267 &lr_bitsize, &lr_bitpos, &lr_mode,
5268 &lr_unsignedp, &volatilep, &lr_mask,
5269 &lr_and_mask);
5270 rl_inner = decode_field_reference (loc, rl_arg,
5271 &rl_bitsize, &rl_bitpos, &rl_mode,
5272 &rl_unsignedp, &volatilep, &rl_mask,
5273 &rl_and_mask);
5274 rr_inner = decode_field_reference (loc, rr_arg,
5275 &rr_bitsize, &rr_bitpos, &rr_mode,
5276 &rr_unsignedp, &volatilep, &rr_mask,
5277 &rr_and_mask);
5278
5279 /* It must be true that the inner operation on the lhs of each
5280 comparison must be the same if we are to be able to do anything.
5281 Then see if we have constants. If not, the same must be true for
5282 the rhs's. */
5283 if (volatilep || ll_inner == 0 || rl_inner == 0
5284 || ! operand_equal_p (ll_inner, rl_inner, 0))
5285 return 0;
5286
5287 if (TREE_CODE (lr_arg) == INTEGER_CST
5288 && TREE_CODE (rr_arg) == INTEGER_CST)
5289 l_const = lr_arg, r_const = rr_arg;
5290 else if (lr_inner == 0 || rr_inner == 0
5291 || ! operand_equal_p (lr_inner, rr_inner, 0))
5292 return 0;
5293 else
5294 l_const = r_const = 0;
5295
5296 /* If either comparison code is not correct for our logical operation,
5297 fail. However, we can convert a one-bit comparison against zero into
5298 the opposite comparison against that bit being set in the field. */
5299
5300 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5301 if (lcode != wanted_code)
5302 {
5303 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5304 {
5305 /* Make the left operand unsigned, since we are only interested
5306 in the value of one bit. Otherwise we are doing the wrong
5307 thing below. */
5308 ll_unsignedp = 1;
5309 l_const = ll_mask;
5310 }
5311 else
5312 return 0;
5313 }
5314
5315 /* This is analogous to the code for l_const above. */
5316 if (rcode != wanted_code)
5317 {
5318 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5319 {
5320 rl_unsignedp = 1;
5321 r_const = rl_mask;
5322 }
5323 else
5324 return 0;
5325 }
5326
5327 /* See if we can find a mode that contains both fields being compared on
5328 the left. If we can't, fail. Otherwise, update all constants and masks
5329 to be relative to a field of that size. */
5330 first_bit = MIN (ll_bitpos, rl_bitpos);
5331 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5332 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5333 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5334 volatilep);
5335 if (lnmode == VOIDmode)
5336 return 0;
5337
5338 lnbitsize = GET_MODE_BITSIZE (lnmode);
5339 lnbitpos = first_bit & ~ (lnbitsize - 1);
5340 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5341 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5342
5343 if (BYTES_BIG_ENDIAN)
5344 {
5345 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5346 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5347 }
5348
5349 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5350 size_int (xll_bitpos));
5351 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5352 size_int (xrl_bitpos));
5353
5354 if (l_const)
5355 {
5356 l_const = fold_convert_loc (loc, lntype, l_const);
5357 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5358 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5359 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5360 fold_build1_loc (loc, BIT_NOT_EXPR,
5361 lntype, ll_mask))))
5362 {
5363 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5364
5365 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5366 }
5367 }
5368 if (r_const)
5369 {
5370 r_const = fold_convert_loc (loc, lntype, r_const);
5371 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5372 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5373 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5374 fold_build1_loc (loc, BIT_NOT_EXPR,
5375 lntype, rl_mask))))
5376 {
5377 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5378
5379 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5380 }
5381 }
5382
5383 /* If the right sides are not constant, do the same for it. Also,
5384 disallow this optimization if a size or signedness mismatch occurs
5385 between the left and right sides. */
5386 if (l_const == 0)
5387 {
5388 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5389 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5390 /* Make sure the two fields on the right
5391 correspond to the left without being swapped. */
5392 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5393 return 0;
5394
5395 first_bit = MIN (lr_bitpos, rr_bitpos);
5396 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5397 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5398 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5399 volatilep);
5400 if (rnmode == VOIDmode)
5401 return 0;
5402
5403 rnbitsize = GET_MODE_BITSIZE (rnmode);
5404 rnbitpos = first_bit & ~ (rnbitsize - 1);
5405 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5406 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5407
5408 if (BYTES_BIG_ENDIAN)
5409 {
5410 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5411 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5412 }
5413
5414 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5415 rntype, lr_mask),
5416 size_int (xlr_bitpos));
5417 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5418 rntype, rr_mask),
5419 size_int (xrr_bitpos));
5420
5421 /* Make a mask that corresponds to both fields being compared.
5422 Do this for both items being compared. If the operands are the
5423 same size and the bits being compared are in the same position
5424 then we can do this by masking both and comparing the masked
5425 results. */
5426 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5427 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5428 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5429 {
5430 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5431 ll_unsignedp || rl_unsignedp);
5432 if (! all_ones_mask_p (ll_mask, lnbitsize))
5433 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5434
5435 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5436 lr_unsignedp || rr_unsignedp);
5437 if (! all_ones_mask_p (lr_mask, rnbitsize))
5438 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5439
5440 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5441 }
5442
5443 /* There is still another way we can do something: If both pairs of
5444 fields being compared are adjacent, we may be able to make a wider
5445 field containing them both.
5446
5447 Note that we still must mask the lhs/rhs expressions. Furthermore,
5448 the mask must be shifted to account for the shift done by
5449 make_bit_field_ref. */
5450 if ((ll_bitsize + ll_bitpos == rl_bitpos
5451 && lr_bitsize + lr_bitpos == rr_bitpos)
5452 || (ll_bitpos == rl_bitpos + rl_bitsize
5453 && lr_bitpos == rr_bitpos + rr_bitsize))
5454 {
5455 tree type;
5456
5457 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5458 ll_bitsize + rl_bitsize,
5459 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5460 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5461 lr_bitsize + rr_bitsize,
5462 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5463
5464 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5465 size_int (MIN (xll_bitpos, xrl_bitpos)));
5466 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5467 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5468
5469 /* Convert to the smaller type before masking out unwanted bits. */
5470 type = lntype;
5471 if (lntype != rntype)
5472 {
5473 if (lnbitsize > rnbitsize)
5474 {
5475 lhs = fold_convert_loc (loc, rntype, lhs);
5476 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5477 type = rntype;
5478 }
5479 else if (lnbitsize < rnbitsize)
5480 {
5481 rhs = fold_convert_loc (loc, lntype, rhs);
5482 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5483 type = lntype;
5484 }
5485 }
5486
5487 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5488 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5489
5490 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5491 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5492
5493 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5494 }
5495
5496 return 0;
5497 }
5498
5499 /* Handle the case of comparisons with constants. If there is something in
5500 common between the masks, those bits of the constants must be the same.
5501 If not, the condition is always false. Test for this to avoid generating
5502 incorrect code below. */
5503 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5504 if (! integer_zerop (result)
5505 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5506 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5507 {
5508 if (wanted_code == NE_EXPR)
5509 {
5510 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5511 return constant_boolean_node (true, truth_type);
5512 }
5513 else
5514 {
5515 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5516 return constant_boolean_node (false, truth_type);
5517 }
5518 }
5519
5520 /* Construct the expression we will return. First get the component
5521 reference we will make. Unless the mask is all ones the width of
5522 that field, perform the mask operation. Then compare with the
5523 merged constant. */
5524 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5525 ll_unsignedp || rl_unsignedp);
5526
5527 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5528 if (! all_ones_mask_p (ll_mask, lnbitsize))
5529 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5530
5531 return build2_loc (loc, wanted_code, truth_type, result,
5532 const_binop (BIT_IOR_EXPR, l_const, r_const));
5533 }
5534 \f
5535 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5536 constant. */
5537
5538 static tree
5539 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5540 tree op0, tree op1)
5541 {
5542 tree arg0 = op0;
5543 enum tree_code op_code;
5544 tree comp_const;
5545 tree minmax_const;
5546 int consts_equal, consts_lt;
5547 tree inner;
5548
5549 STRIP_SIGN_NOPS (arg0);
5550
5551 op_code = TREE_CODE (arg0);
5552 minmax_const = TREE_OPERAND (arg0, 1);
5553 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5554 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5555 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5556 inner = TREE_OPERAND (arg0, 0);
5557
5558 /* If something does not permit us to optimize, return the original tree. */
5559 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5560 || TREE_CODE (comp_const) != INTEGER_CST
5561 || TREE_OVERFLOW (comp_const)
5562 || TREE_CODE (minmax_const) != INTEGER_CST
5563 || TREE_OVERFLOW (minmax_const))
5564 return NULL_TREE;
5565
5566 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5567 and GT_EXPR, doing the rest with recursive calls using logical
5568 simplifications. */
5569 switch (code)
5570 {
5571 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5572 {
5573 tree tem
5574 = optimize_minmax_comparison (loc,
5575 invert_tree_comparison (code, false),
5576 type, op0, op1);
5577 if (tem)
5578 return invert_truthvalue_loc (loc, tem);
5579 return NULL_TREE;
5580 }
5581
5582 case GE_EXPR:
5583 return
5584 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5585 optimize_minmax_comparison
5586 (loc, EQ_EXPR, type, arg0, comp_const),
5587 optimize_minmax_comparison
5588 (loc, GT_EXPR, type, arg0, comp_const));
5589
5590 case EQ_EXPR:
5591 if (op_code == MAX_EXPR && consts_equal)
5592 /* MAX (X, 0) == 0 -> X <= 0 */
5593 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5594
5595 else if (op_code == MAX_EXPR && consts_lt)
5596 /* MAX (X, 0) == 5 -> X == 5 */
5597 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5598
5599 else if (op_code == MAX_EXPR)
5600 /* MAX (X, 0) == -1 -> false */
5601 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5602
5603 else if (consts_equal)
5604 /* MIN (X, 0) == 0 -> X >= 0 */
5605 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5606
5607 else if (consts_lt)
5608 /* MIN (X, 0) == 5 -> false */
5609 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5610
5611 else
5612 /* MIN (X, 0) == -1 -> X == -1 */
5613 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5614
5615 case GT_EXPR:
5616 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5617 /* MAX (X, 0) > 0 -> X > 0
5618 MAX (X, 0) > 5 -> X > 5 */
5619 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5620
5621 else if (op_code == MAX_EXPR)
5622 /* MAX (X, 0) > -1 -> true */
5623 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5624
5625 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5626 /* MIN (X, 0) > 0 -> false
5627 MIN (X, 0) > 5 -> false */
5628 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5629
5630 else
5631 /* MIN (X, 0) > -1 -> X > -1 */
5632 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5633
5634 default:
5635 return NULL_TREE;
5636 }
5637 }
5638 \f
5639 /* T is an integer expression that is being multiplied, divided, or taken a
5640 modulus (CODE says which and what kind of divide or modulus) by a
5641 constant C. See if we can eliminate that operation by folding it with
5642 other operations already in T. WIDE_TYPE, if non-null, is a type that
5643 should be used for the computation if wider than our type.
5644
5645 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5646 (X * 2) + (Y * 4). We must, however, be assured that either the original
5647 expression would not overflow or that overflow is undefined for the type
5648 in the language in question.
5649
5650 If we return a non-null expression, it is an equivalent form of the
5651 original computation, but need not be in the original type.
5652
5653 We set *STRICT_OVERFLOW_P to true if the return values depends on
5654 signed overflow being undefined. Otherwise we do not change
5655 *STRICT_OVERFLOW_P. */
5656
5657 static tree
5658 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5659 bool *strict_overflow_p)
5660 {
5661 /* To avoid exponential search depth, refuse to allow recursion past
5662 three levels. Beyond that (1) it's highly unlikely that we'll find
5663 something interesting and (2) we've probably processed it before
5664 when we built the inner expression. */
5665
5666 static int depth;
5667 tree ret;
5668
5669 if (depth > 3)
5670 return NULL;
5671
5672 depth++;
5673 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5674 depth--;
5675
5676 return ret;
5677 }
5678
5679 static tree
5680 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5681 bool *strict_overflow_p)
5682 {
5683 tree type = TREE_TYPE (t);
5684 enum tree_code tcode = TREE_CODE (t);
5685 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5686 > GET_MODE_SIZE (TYPE_MODE (type)))
5687 ? wide_type : type);
5688 tree t1, t2;
5689 int same_p = tcode == code;
5690 tree op0 = NULL_TREE, op1 = NULL_TREE;
5691 bool sub_strict_overflow_p;
5692
5693 /* Don't deal with constants of zero here; they confuse the code below. */
5694 if (integer_zerop (c))
5695 return NULL_TREE;
5696
5697 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5698 op0 = TREE_OPERAND (t, 0);
5699
5700 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5701 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5702
5703 /* Note that we need not handle conditional operations here since fold
5704 already handles those cases. So just do arithmetic here. */
5705 switch (tcode)
5706 {
5707 case INTEGER_CST:
5708 /* For a constant, we can always simplify if we are a multiply
5709 or (for divide and modulus) if it is a multiple of our constant. */
5710 if (code == MULT_EXPR
5711 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5712 return const_binop (code, fold_convert (ctype, t),
5713 fold_convert (ctype, c));
5714 break;
5715
5716 CASE_CONVERT: case NON_LVALUE_EXPR:
5717 /* If op0 is an expression ... */
5718 if ((COMPARISON_CLASS_P (op0)
5719 || UNARY_CLASS_P (op0)
5720 || BINARY_CLASS_P (op0)
5721 || VL_EXP_CLASS_P (op0)
5722 || EXPRESSION_CLASS_P (op0))
5723 /* ... and has wrapping overflow, and its type is smaller
5724 than ctype, then we cannot pass through as widening. */
5725 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5726 && (TYPE_PRECISION (ctype)
5727 > TYPE_PRECISION (TREE_TYPE (op0))))
5728 /* ... or this is a truncation (t is narrower than op0),
5729 then we cannot pass through this narrowing. */
5730 || (TYPE_PRECISION (type)
5731 < TYPE_PRECISION (TREE_TYPE (op0)))
5732 /* ... or signedness changes for division or modulus,
5733 then we cannot pass through this conversion. */
5734 || (code != MULT_EXPR
5735 && (TYPE_UNSIGNED (ctype)
5736 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5737 /* ... or has undefined overflow while the converted to
5738 type has not, we cannot do the operation in the inner type
5739 as that would introduce undefined overflow. */
5740 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5741 && !TYPE_OVERFLOW_UNDEFINED (type))))
5742 break;
5743
5744 /* Pass the constant down and see if we can make a simplification. If
5745 we can, replace this expression with the inner simplification for
5746 possible later conversion to our or some other type. */
5747 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5748 && TREE_CODE (t2) == INTEGER_CST
5749 && !TREE_OVERFLOW (t2)
5750 && (0 != (t1 = extract_muldiv (op0, t2, code,
5751 code == MULT_EXPR
5752 ? ctype : NULL_TREE,
5753 strict_overflow_p))))
5754 return t1;
5755 break;
5756
5757 case ABS_EXPR:
5758 /* If widening the type changes it from signed to unsigned, then we
5759 must avoid building ABS_EXPR itself as unsigned. */
5760 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5761 {
5762 tree cstype = (*signed_type_for) (ctype);
5763 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5764 != 0)
5765 {
5766 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5767 return fold_convert (ctype, t1);
5768 }
5769 break;
5770 }
5771 /* If the constant is negative, we cannot simplify this. */
5772 if (tree_int_cst_sgn (c) == -1)
5773 break;
5774 /* FALLTHROUGH */
5775 case NEGATE_EXPR:
5776 /* For division and modulus, type can't be unsigned, as e.g.
5777 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5778 For signed types, even with wrapping overflow, this is fine. */
5779 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5780 break;
5781 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5782 != 0)
5783 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5784 break;
5785
5786 case MIN_EXPR: case MAX_EXPR:
5787 /* If widening the type changes the signedness, then we can't perform
5788 this optimization as that changes the result. */
5789 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5790 break;
5791
5792 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5793 sub_strict_overflow_p = false;
5794 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5795 &sub_strict_overflow_p)) != 0
5796 && (t2 = extract_muldiv (op1, c, code, wide_type,
5797 &sub_strict_overflow_p)) != 0)
5798 {
5799 if (tree_int_cst_sgn (c) < 0)
5800 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5801 if (sub_strict_overflow_p)
5802 *strict_overflow_p = true;
5803 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5804 fold_convert (ctype, t2));
5805 }
5806 break;
5807
5808 case LSHIFT_EXPR: case RSHIFT_EXPR:
5809 /* If the second operand is constant, this is a multiplication
5810 or floor division, by a power of two, so we can treat it that
5811 way unless the multiplier or divisor overflows. Signed
5812 left-shift overflow is implementation-defined rather than
5813 undefined in C90, so do not convert signed left shift into
5814 multiplication. */
5815 if (TREE_CODE (op1) == INTEGER_CST
5816 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5817 /* const_binop may not detect overflow correctly,
5818 so check for it explicitly here. */
5819 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5820 && 0 != (t1 = fold_convert (ctype,
5821 const_binop (LSHIFT_EXPR,
5822 size_one_node,
5823 op1)))
5824 && !TREE_OVERFLOW (t1))
5825 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5826 ? MULT_EXPR : FLOOR_DIV_EXPR,
5827 ctype,
5828 fold_convert (ctype, op0),
5829 t1),
5830 c, code, wide_type, strict_overflow_p);
5831 break;
5832
5833 case PLUS_EXPR: case MINUS_EXPR:
5834 /* See if we can eliminate the operation on both sides. If we can, we
5835 can return a new PLUS or MINUS. If we can't, the only remaining
5836 cases where we can do anything are if the second operand is a
5837 constant. */
5838 sub_strict_overflow_p = false;
5839 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5840 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5841 if (t1 != 0 && t2 != 0
5842 && (code == MULT_EXPR
5843 /* If not multiplication, we can only do this if both operands
5844 are divisible by c. */
5845 || (multiple_of_p (ctype, op0, c)
5846 && multiple_of_p (ctype, op1, c))))
5847 {
5848 if (sub_strict_overflow_p)
5849 *strict_overflow_p = true;
5850 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5851 fold_convert (ctype, t2));
5852 }
5853
5854 /* If this was a subtraction, negate OP1 and set it to be an addition.
5855 This simplifies the logic below. */
5856 if (tcode == MINUS_EXPR)
5857 {
5858 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5859 /* If OP1 was not easily negatable, the constant may be OP0. */
5860 if (TREE_CODE (op0) == INTEGER_CST)
5861 {
5862 tree tem = op0;
5863 op0 = op1;
5864 op1 = tem;
5865 tem = t1;
5866 t1 = t2;
5867 t2 = tem;
5868 }
5869 }
5870
5871 if (TREE_CODE (op1) != INTEGER_CST)
5872 break;
5873
5874 /* If either OP1 or C are negative, this optimization is not safe for
5875 some of the division and remainder types while for others we need
5876 to change the code. */
5877 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5878 {
5879 if (code == CEIL_DIV_EXPR)
5880 code = FLOOR_DIV_EXPR;
5881 else if (code == FLOOR_DIV_EXPR)
5882 code = CEIL_DIV_EXPR;
5883 else if (code != MULT_EXPR
5884 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5885 break;
5886 }
5887
5888 /* If it's a multiply or a division/modulus operation of a multiple
5889 of our constant, do the operation and verify it doesn't overflow. */
5890 if (code == MULT_EXPR
5891 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5892 {
5893 op1 = const_binop (code, fold_convert (ctype, op1),
5894 fold_convert (ctype, c));
5895 /* We allow the constant to overflow with wrapping semantics. */
5896 if (op1 == 0
5897 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5898 break;
5899 }
5900 else
5901 break;
5902
5903 /* If we have an unsigned type, we cannot widen the operation since it
5904 will change the result if the original computation overflowed. */
5905 if (TYPE_UNSIGNED (ctype) && ctype != type)
5906 break;
5907
5908 /* If we were able to eliminate our operation from the first side,
5909 apply our operation to the second side and reform the PLUS. */
5910 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5911 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5912
5913 /* The last case is if we are a multiply. In that case, we can
5914 apply the distributive law to commute the multiply and addition
5915 if the multiplication of the constants doesn't overflow
5916 and overflow is defined. With undefined overflow
5917 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5918 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5919 return fold_build2 (tcode, ctype,
5920 fold_build2 (code, ctype,
5921 fold_convert (ctype, op0),
5922 fold_convert (ctype, c)),
5923 op1);
5924
5925 break;
5926
5927 case MULT_EXPR:
5928 /* We have a special case here if we are doing something like
5929 (C * 8) % 4 since we know that's zero. */
5930 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5931 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5932 /* If the multiplication can overflow we cannot optimize this. */
5933 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5934 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5935 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5936 {
5937 *strict_overflow_p = true;
5938 return omit_one_operand (type, integer_zero_node, op0);
5939 }
5940
5941 /* ... fall through ... */
5942
5943 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5944 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5945 /* If we can extract our operation from the LHS, do so and return a
5946 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5947 do something only if the second operand is a constant. */
5948 if (same_p
5949 && (t1 = extract_muldiv (op0, c, code, wide_type,
5950 strict_overflow_p)) != 0)
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5952 fold_convert (ctype, op1));
5953 else if (tcode == MULT_EXPR && code == MULT_EXPR
5954 && (t1 = extract_muldiv (op1, c, code, wide_type,
5955 strict_overflow_p)) != 0)
5956 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5957 fold_convert (ctype, t1));
5958 else if (TREE_CODE (op1) != INTEGER_CST)
5959 return 0;
5960
5961 /* If these are the same operation types, we can associate them
5962 assuming no overflow. */
5963 if (tcode == code)
5964 {
5965 bool overflow_p = false;
5966 bool overflow_mul_p;
5967 signop sign = TYPE_SIGN (ctype);
5968 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5969 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5970 if (overflow_mul_p
5971 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5972 overflow_p = true;
5973 if (!overflow_p)
5974 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5975 wide_int_to_tree (ctype, mul));
5976 }
5977
5978 /* If these operations "cancel" each other, we have the main
5979 optimizations of this pass, which occur when either constant is a
5980 multiple of the other, in which case we replace this with either an
5981 operation or CODE or TCODE.
5982
5983 If we have an unsigned type, we cannot do this since it will change
5984 the result if the original computation overflowed. */
5985 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5986 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5987 || (tcode == MULT_EXPR
5988 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5989 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5990 && code != MULT_EXPR)))
5991 {
5992 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5993 {
5994 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5995 *strict_overflow_p = true;
5996 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5997 fold_convert (ctype,
5998 const_binop (TRUNC_DIV_EXPR,
5999 op1, c)));
6000 }
6001 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6002 {
6003 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6004 *strict_overflow_p = true;
6005 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6006 fold_convert (ctype,
6007 const_binop (TRUNC_DIV_EXPR,
6008 c, op1)));
6009 }
6010 }
6011 break;
6012
6013 default:
6014 break;
6015 }
6016
6017 return 0;
6018 }
6019 \f
6020 /* Return a node which has the indicated constant VALUE (either 0 or
6021 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6022 and is of the indicated TYPE. */
6023
6024 tree
6025 constant_boolean_node (bool value, tree type)
6026 {
6027 if (type == integer_type_node)
6028 return value ? integer_one_node : integer_zero_node;
6029 else if (type == boolean_type_node)
6030 return value ? boolean_true_node : boolean_false_node;
6031 else if (TREE_CODE (type) == VECTOR_TYPE)
6032 return build_vector_from_val (type,
6033 build_int_cst (TREE_TYPE (type),
6034 value ? -1 : 0));
6035 else
6036 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6037 }
6038
6039
6040 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6041 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6042 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6043 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6044 COND is the first argument to CODE; otherwise (as in the example
6045 given here), it is the second argument. TYPE is the type of the
6046 original expression. Return NULL_TREE if no simplification is
6047 possible. */
6048
6049 static tree
6050 fold_binary_op_with_conditional_arg (location_t loc,
6051 enum tree_code code,
6052 tree type, tree op0, tree op1,
6053 tree cond, tree arg, int cond_first_p)
6054 {
6055 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6056 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6057 tree test, true_value, false_value;
6058 tree lhs = NULL_TREE;
6059 tree rhs = NULL_TREE;
6060 enum tree_code cond_code = COND_EXPR;
6061
6062 if (TREE_CODE (cond) == COND_EXPR
6063 || TREE_CODE (cond) == VEC_COND_EXPR)
6064 {
6065 test = TREE_OPERAND (cond, 0);
6066 true_value = TREE_OPERAND (cond, 1);
6067 false_value = TREE_OPERAND (cond, 2);
6068 /* If this operand throws an expression, then it does not make
6069 sense to try to perform a logical or arithmetic operation
6070 involving it. */
6071 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6072 lhs = true_value;
6073 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6074 rhs = false_value;
6075 }
6076 else
6077 {
6078 tree testtype = TREE_TYPE (cond);
6079 test = cond;
6080 true_value = constant_boolean_node (true, testtype);
6081 false_value = constant_boolean_node (false, testtype);
6082 }
6083
6084 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6085 cond_code = VEC_COND_EXPR;
6086
6087 /* This transformation is only worthwhile if we don't have to wrap ARG
6088 in a SAVE_EXPR and the operation can be simplified without recursing
6089 on at least one of the branches once its pushed inside the COND_EXPR. */
6090 if (!TREE_CONSTANT (arg)
6091 && (TREE_SIDE_EFFECTS (arg)
6092 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6093 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6094 return NULL_TREE;
6095
6096 arg = fold_convert_loc (loc, arg_type, arg);
6097 if (lhs == 0)
6098 {
6099 true_value = fold_convert_loc (loc, cond_type, true_value);
6100 if (cond_first_p)
6101 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6102 else
6103 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6104 }
6105 if (rhs == 0)
6106 {
6107 false_value = fold_convert_loc (loc, cond_type, false_value);
6108 if (cond_first_p)
6109 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6110 else
6111 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6112 }
6113
6114 /* Check that we have simplified at least one of the branches. */
6115 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6116 return NULL_TREE;
6117
6118 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6119 }
6120
6121 \f
6122 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6123
6124 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6125 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6126 ADDEND is the same as X.
6127
6128 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6129 and finite. The problematic cases are when X is zero, and its mode
6130 has signed zeros. In the case of rounding towards -infinity,
6131 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6132 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6133
6134 bool
6135 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6136 {
6137 if (!real_zerop (addend))
6138 return false;
6139
6140 /* Don't allow the fold with -fsignaling-nans. */
6141 if (HONOR_SNANS (TYPE_MODE (type)))
6142 return false;
6143
6144 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6145 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6146 return true;
6147
6148 /* In a vector or complex, we would need to check the sign of all zeros. */
6149 if (TREE_CODE (addend) != REAL_CST)
6150 return false;
6151
6152 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6153 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6154 negate = !negate;
6155
6156 /* The mode has signed zeros, and we have to honor their sign.
6157 In this situation, there is only one case we can return true for.
6158 X - 0 is the same as X unless rounding towards -infinity is
6159 supported. */
6160 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6161 }
6162
6163 /* Subroutine of fold() that checks comparisons of built-in math
6164 functions against real constants.
6165
6166 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6167 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6168 is the type of the result and ARG0 and ARG1 are the operands of the
6169 comparison. ARG1 must be a TREE_REAL_CST.
6170
6171 The function returns the constant folded tree if a simplification
6172 can be made, and NULL_TREE otherwise. */
6173
6174 static tree
6175 fold_mathfn_compare (location_t loc,
6176 enum built_in_function fcode, enum tree_code code,
6177 tree type, tree arg0, tree arg1)
6178 {
6179 REAL_VALUE_TYPE c;
6180
6181 if (BUILTIN_SQRT_P (fcode))
6182 {
6183 tree arg = CALL_EXPR_ARG (arg0, 0);
6184 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6185
6186 c = TREE_REAL_CST (arg1);
6187 if (REAL_VALUE_NEGATIVE (c))
6188 {
6189 /* sqrt(x) < y is always false, if y is negative. */
6190 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6191 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6192
6193 /* sqrt(x) > y is always true, if y is negative and we
6194 don't care about NaNs, i.e. negative values of x. */
6195 if (code == NE_EXPR || !HONOR_NANS (mode))
6196 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6197
6198 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6199 return fold_build2_loc (loc, GE_EXPR, type, arg,
6200 build_real (TREE_TYPE (arg), dconst0));
6201 }
6202 else if (code == GT_EXPR || code == GE_EXPR)
6203 {
6204 REAL_VALUE_TYPE c2;
6205
6206 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6207 real_convert (&c2, mode, &c2);
6208
6209 if (REAL_VALUE_ISINF (c2))
6210 {
6211 /* sqrt(x) > y is x == +Inf, when y is very large. */
6212 if (HONOR_INFINITIES (mode))
6213 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6214 build_real (TREE_TYPE (arg), c2));
6215
6216 /* sqrt(x) > y is always false, when y is very large
6217 and we don't care about infinities. */
6218 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6219 }
6220
6221 /* sqrt(x) > c is the same as x > c*c. */
6222 return fold_build2_loc (loc, code, type, arg,
6223 build_real (TREE_TYPE (arg), c2));
6224 }
6225 else if (code == LT_EXPR || code == LE_EXPR)
6226 {
6227 REAL_VALUE_TYPE c2;
6228
6229 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6230 real_convert (&c2, mode, &c2);
6231
6232 if (REAL_VALUE_ISINF (c2))
6233 {
6234 /* sqrt(x) < y is always true, when y is a very large
6235 value and we don't care about NaNs or Infinities. */
6236 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6237 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6238
6239 /* sqrt(x) < y is x != +Inf when y is very large and we
6240 don't care about NaNs. */
6241 if (! HONOR_NANS (mode))
6242 return fold_build2_loc (loc, NE_EXPR, type, arg,
6243 build_real (TREE_TYPE (arg), c2));
6244
6245 /* sqrt(x) < y is x >= 0 when y is very large and we
6246 don't care about Infinities. */
6247 if (! HONOR_INFINITIES (mode))
6248 return fold_build2_loc (loc, GE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg), dconst0));
6250
6251 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6252 arg = save_expr (arg);
6253 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6254 fold_build2_loc (loc, GE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg),
6256 dconst0)),
6257 fold_build2_loc (loc, NE_EXPR, type, arg,
6258 build_real (TREE_TYPE (arg),
6259 c2)));
6260 }
6261
6262 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6263 if (! HONOR_NANS (mode))
6264 return fold_build2_loc (loc, code, type, arg,
6265 build_real (TREE_TYPE (arg), c2));
6266
6267 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6268 arg = save_expr (arg);
6269 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6270 fold_build2_loc (loc, GE_EXPR, type, arg,
6271 build_real (TREE_TYPE (arg),
6272 dconst0)),
6273 fold_build2_loc (loc, code, type, arg,
6274 build_real (TREE_TYPE (arg),
6275 c2)));
6276 }
6277 }
6278
6279 return NULL_TREE;
6280 }
6281
6282 /* Subroutine of fold() that optimizes comparisons against Infinities,
6283 either +Inf or -Inf.
6284
6285 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6286 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6287 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6288
6289 The function returns the constant folded tree if a simplification
6290 can be made, and NULL_TREE otherwise. */
6291
6292 static tree
6293 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6294 tree arg0, tree arg1)
6295 {
6296 enum machine_mode mode;
6297 REAL_VALUE_TYPE max;
6298 tree temp;
6299 bool neg;
6300
6301 mode = TYPE_MODE (TREE_TYPE (arg0));
6302
6303 /* For negative infinity swap the sense of the comparison. */
6304 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6305 if (neg)
6306 code = swap_tree_comparison (code);
6307
6308 switch (code)
6309 {
6310 case GT_EXPR:
6311 /* x > +Inf is always false, if with ignore sNANs. */
6312 if (HONOR_SNANS (mode))
6313 return NULL_TREE;
6314 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6315
6316 case LE_EXPR:
6317 /* x <= +Inf is always true, if we don't case about NaNs. */
6318 if (! HONOR_NANS (mode))
6319 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6320
6321 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6322 arg0 = save_expr (arg0);
6323 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6324
6325 case EQ_EXPR:
6326 case GE_EXPR:
6327 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6328 real_maxval (&max, neg, mode);
6329 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6331
6332 case LT_EXPR:
6333 /* x < +Inf is always equal to x <= DBL_MAX. */
6334 real_maxval (&max, neg, mode);
6335 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6336 arg0, build_real (TREE_TYPE (arg0), max));
6337
6338 case NE_EXPR:
6339 /* x != +Inf is always equal to !(x > DBL_MAX). */
6340 real_maxval (&max, neg, mode);
6341 if (! HONOR_NANS (mode))
6342 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6343 arg0, build_real (TREE_TYPE (arg0), max));
6344
6345 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6346 arg0, build_real (TREE_TYPE (arg0), max));
6347 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6348
6349 default:
6350 break;
6351 }
6352
6353 return NULL_TREE;
6354 }
6355
6356 /* Subroutine of fold() that optimizes comparisons of a division by
6357 a nonzero integer constant against an integer constant, i.e.
6358 X/C1 op C2.
6359
6360 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6361 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6362 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6363
6364 The function returns the constant folded tree if a simplification
6365 can be made, and NULL_TREE otherwise. */
6366
6367 static tree
6368 fold_div_compare (location_t loc,
6369 enum tree_code code, tree type, tree arg0, tree arg1)
6370 {
6371 tree prod, tmp, hi, lo;
6372 tree arg00 = TREE_OPERAND (arg0, 0);
6373 tree arg01 = TREE_OPERAND (arg0, 1);
6374 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6375 bool neg_overflow = false;
6376 bool overflow;
6377
6378 /* We have to do this the hard way to detect unsigned overflow.
6379 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6380 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6381 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6382 neg_overflow = false;
6383
6384 if (sign == UNSIGNED)
6385 {
6386 tmp = int_const_binop (MINUS_EXPR, arg01,
6387 build_int_cst (TREE_TYPE (arg01), 1));
6388 lo = prod;
6389
6390 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6391 val = wi::add (prod, tmp, sign, &overflow);
6392 hi = force_fit_type (TREE_TYPE (arg00), val,
6393 -1, overflow | TREE_OVERFLOW (prod));
6394 }
6395 else if (tree_int_cst_sgn (arg01) >= 0)
6396 {
6397 tmp = int_const_binop (MINUS_EXPR, arg01,
6398 build_int_cst (TREE_TYPE (arg01), 1));
6399 switch (tree_int_cst_sgn (arg1))
6400 {
6401 case -1:
6402 neg_overflow = true;
6403 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6404 hi = prod;
6405 break;
6406
6407 case 0:
6408 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6409 hi = tmp;
6410 break;
6411
6412 case 1:
6413 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6414 lo = prod;
6415 break;
6416
6417 default:
6418 gcc_unreachable ();
6419 }
6420 }
6421 else
6422 {
6423 /* A negative divisor reverses the relational operators. */
6424 code = swap_tree_comparison (code);
6425
6426 tmp = int_const_binop (PLUS_EXPR, arg01,
6427 build_int_cst (TREE_TYPE (arg01), 1));
6428 switch (tree_int_cst_sgn (arg1))
6429 {
6430 case -1:
6431 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6432 lo = prod;
6433 break;
6434
6435 case 0:
6436 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6437 lo = tmp;
6438 break;
6439
6440 case 1:
6441 neg_overflow = true;
6442 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6443 hi = prod;
6444 break;
6445
6446 default:
6447 gcc_unreachable ();
6448 }
6449 }
6450
6451 switch (code)
6452 {
6453 case EQ_EXPR:
6454 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6455 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6456 if (TREE_OVERFLOW (hi))
6457 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6458 if (TREE_OVERFLOW (lo))
6459 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6460 return build_range_check (loc, type, arg00, 1, lo, hi);
6461
6462 case NE_EXPR:
6463 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6464 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6465 if (TREE_OVERFLOW (hi))
6466 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6467 if (TREE_OVERFLOW (lo))
6468 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6469 return build_range_check (loc, type, arg00, 0, lo, hi);
6470
6471 case LT_EXPR:
6472 if (TREE_OVERFLOW (lo))
6473 {
6474 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6475 return omit_one_operand_loc (loc, type, tmp, arg00);
6476 }
6477 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6478
6479 case LE_EXPR:
6480 if (TREE_OVERFLOW (hi))
6481 {
6482 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6483 return omit_one_operand_loc (loc, type, tmp, arg00);
6484 }
6485 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6486
6487 case GT_EXPR:
6488 if (TREE_OVERFLOW (hi))
6489 {
6490 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6491 return omit_one_operand_loc (loc, type, tmp, arg00);
6492 }
6493 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6494
6495 case GE_EXPR:
6496 if (TREE_OVERFLOW (lo))
6497 {
6498 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6499 return omit_one_operand_loc (loc, type, tmp, arg00);
6500 }
6501 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6502
6503 default:
6504 break;
6505 }
6506
6507 return NULL_TREE;
6508 }
6509
6510
6511 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6512 equality/inequality test, then return a simplified form of the test
6513 using a sign testing. Otherwise return NULL. TYPE is the desired
6514 result type. */
6515
6516 static tree
6517 fold_single_bit_test_into_sign_test (location_t loc,
6518 enum tree_code code, tree arg0, tree arg1,
6519 tree result_type)
6520 {
6521 /* If this is testing a single bit, we can optimize the test. */
6522 if ((code == NE_EXPR || code == EQ_EXPR)
6523 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6524 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6525 {
6526 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6527 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6528 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6529
6530 if (arg00 != NULL_TREE
6531 /* This is only a win if casting to a signed type is cheap,
6532 i.e. when arg00's type is not a partial mode. */
6533 && TYPE_PRECISION (TREE_TYPE (arg00))
6534 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6535 {
6536 tree stype = signed_type_for (TREE_TYPE (arg00));
6537 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6538 result_type,
6539 fold_convert_loc (loc, stype, arg00),
6540 build_int_cst (stype, 0));
6541 }
6542 }
6543
6544 return NULL_TREE;
6545 }
6546
6547 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6548 equality/inequality test, then return a simplified form of
6549 the test using shifts and logical operations. Otherwise return
6550 NULL. TYPE is the desired result type. */
6551
6552 tree
6553 fold_single_bit_test (location_t loc, enum tree_code code,
6554 tree arg0, tree arg1, tree result_type)
6555 {
6556 /* If this is testing a single bit, we can optimize the test. */
6557 if ((code == NE_EXPR || code == EQ_EXPR)
6558 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6559 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6560 {
6561 tree inner = TREE_OPERAND (arg0, 0);
6562 tree type = TREE_TYPE (arg0);
6563 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6564 enum machine_mode operand_mode = TYPE_MODE (type);
6565 int ops_unsigned;
6566 tree signed_type, unsigned_type, intermediate_type;
6567 tree tem, one;
6568
6569 /* First, see if we can fold the single bit test into a sign-bit
6570 test. */
6571 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6572 result_type);
6573 if (tem)
6574 return tem;
6575
6576 /* Otherwise we have (A & C) != 0 where C is a single bit,
6577 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6578 Similarly for (A & C) == 0. */
6579
6580 /* If INNER is a right shift of a constant and it plus BITNUM does
6581 not overflow, adjust BITNUM and INNER. */
6582 if (TREE_CODE (inner) == RSHIFT_EXPR
6583 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6584 && bitnum < TYPE_PRECISION (type)
6585 && wi::ltu_p (TREE_OPERAND (inner, 1),
6586 TYPE_PRECISION (type) - bitnum))
6587 {
6588 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6589 inner = TREE_OPERAND (inner, 0);
6590 }
6591
6592 /* If we are going to be able to omit the AND below, we must do our
6593 operations as unsigned. If we must use the AND, we have a choice.
6594 Normally unsigned is faster, but for some machines signed is. */
6595 #ifdef LOAD_EXTEND_OP
6596 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6597 && !flag_syntax_only) ? 0 : 1;
6598 #else
6599 ops_unsigned = 1;
6600 #endif
6601
6602 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6603 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6604 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6605 inner = fold_convert_loc (loc, intermediate_type, inner);
6606
6607 if (bitnum != 0)
6608 inner = build2 (RSHIFT_EXPR, intermediate_type,
6609 inner, size_int (bitnum));
6610
6611 one = build_int_cst (intermediate_type, 1);
6612
6613 if (code == EQ_EXPR)
6614 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6615
6616 /* Put the AND last so it can combine with more things. */
6617 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6618
6619 /* Make sure to return the proper type. */
6620 inner = fold_convert_loc (loc, result_type, inner);
6621
6622 return inner;
6623 }
6624 return NULL_TREE;
6625 }
6626
6627 /* Check whether we are allowed to reorder operands arg0 and arg1,
6628 such that the evaluation of arg1 occurs before arg0. */
6629
6630 static bool
6631 reorder_operands_p (const_tree arg0, const_tree arg1)
6632 {
6633 if (! flag_evaluation_order)
6634 return true;
6635 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6636 return true;
6637 return ! TREE_SIDE_EFFECTS (arg0)
6638 && ! TREE_SIDE_EFFECTS (arg1);
6639 }
6640
6641 /* Test whether it is preferable two swap two operands, ARG0 and
6642 ARG1, for example because ARG0 is an integer constant and ARG1
6643 isn't. If REORDER is true, only recommend swapping if we can
6644 evaluate the operands in reverse order. */
6645
6646 bool
6647 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6648 {
6649 STRIP_SIGN_NOPS (arg0);
6650 STRIP_SIGN_NOPS (arg1);
6651
6652 if (TREE_CODE (arg1) == INTEGER_CST)
6653 return 0;
6654 if (TREE_CODE (arg0) == INTEGER_CST)
6655 return 1;
6656
6657 if (TREE_CODE (arg1) == REAL_CST)
6658 return 0;
6659 if (TREE_CODE (arg0) == REAL_CST)
6660 return 1;
6661
6662 if (TREE_CODE (arg1) == FIXED_CST)
6663 return 0;
6664 if (TREE_CODE (arg0) == FIXED_CST)
6665 return 1;
6666
6667 if (TREE_CODE (arg1) == COMPLEX_CST)
6668 return 0;
6669 if (TREE_CODE (arg0) == COMPLEX_CST)
6670 return 1;
6671
6672 if (TREE_CONSTANT (arg1))
6673 return 0;
6674 if (TREE_CONSTANT (arg0))
6675 return 1;
6676
6677 if (optimize_function_for_size_p (cfun))
6678 return 0;
6679
6680 if (reorder && flag_evaluation_order
6681 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6682 return 0;
6683
6684 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6685 for commutative and comparison operators. Ensuring a canonical
6686 form allows the optimizers to find additional redundancies without
6687 having to explicitly check for both orderings. */
6688 if (TREE_CODE (arg0) == SSA_NAME
6689 && TREE_CODE (arg1) == SSA_NAME
6690 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6691 return 1;
6692
6693 /* Put SSA_NAMEs last. */
6694 if (TREE_CODE (arg1) == SSA_NAME)
6695 return 0;
6696 if (TREE_CODE (arg0) == SSA_NAME)
6697 return 1;
6698
6699 /* Put variables last. */
6700 if (DECL_P (arg1))
6701 return 0;
6702 if (DECL_P (arg0))
6703 return 1;
6704
6705 return 0;
6706 }
6707
6708 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6709 ARG0 is extended to a wider type. */
6710
6711 static tree
6712 fold_widened_comparison (location_t loc, enum tree_code code,
6713 tree type, tree arg0, tree arg1)
6714 {
6715 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6716 tree arg1_unw;
6717 tree shorter_type, outer_type;
6718 tree min, max;
6719 bool above, below;
6720
6721 if (arg0_unw == arg0)
6722 return NULL_TREE;
6723 shorter_type = TREE_TYPE (arg0_unw);
6724
6725 #ifdef HAVE_canonicalize_funcptr_for_compare
6726 /* Disable this optimization if we're casting a function pointer
6727 type on targets that require function pointer canonicalization. */
6728 if (HAVE_canonicalize_funcptr_for_compare
6729 && TREE_CODE (shorter_type) == POINTER_TYPE
6730 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6731 return NULL_TREE;
6732 #endif
6733
6734 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6735 return NULL_TREE;
6736
6737 arg1_unw = get_unwidened (arg1, NULL_TREE);
6738
6739 /* If possible, express the comparison in the shorter mode. */
6740 if ((code == EQ_EXPR || code == NE_EXPR
6741 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6742 && (TREE_TYPE (arg1_unw) == shorter_type
6743 || ((TYPE_PRECISION (shorter_type)
6744 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6745 && (TYPE_UNSIGNED (shorter_type)
6746 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6747 || (TREE_CODE (arg1_unw) == INTEGER_CST
6748 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6749 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6750 && int_fits_type_p (arg1_unw, shorter_type))))
6751 return fold_build2_loc (loc, code, type, arg0_unw,
6752 fold_convert_loc (loc, shorter_type, arg1_unw));
6753
6754 if (TREE_CODE (arg1_unw) != INTEGER_CST
6755 || TREE_CODE (shorter_type) != INTEGER_TYPE
6756 || !int_fits_type_p (arg1_unw, shorter_type))
6757 return NULL_TREE;
6758
6759 /* If we are comparing with the integer that does not fit into the range
6760 of the shorter type, the result is known. */
6761 outer_type = TREE_TYPE (arg1_unw);
6762 min = lower_bound_in_type (outer_type, shorter_type);
6763 max = upper_bound_in_type (outer_type, shorter_type);
6764
6765 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6766 max, arg1_unw));
6767 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6768 arg1_unw, min));
6769
6770 switch (code)
6771 {
6772 case EQ_EXPR:
6773 if (above || below)
6774 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6775 break;
6776
6777 case NE_EXPR:
6778 if (above || below)
6779 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6780 break;
6781
6782 case LT_EXPR:
6783 case LE_EXPR:
6784 if (above)
6785 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6786 else if (below)
6787 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6788
6789 case GT_EXPR:
6790 case GE_EXPR:
6791 if (above)
6792 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6793 else if (below)
6794 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6795
6796 default:
6797 break;
6798 }
6799
6800 return NULL_TREE;
6801 }
6802
6803 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6804 ARG0 just the signedness is changed. */
6805
6806 static tree
6807 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6808 tree arg0, tree arg1)
6809 {
6810 tree arg0_inner;
6811 tree inner_type, outer_type;
6812
6813 if (!CONVERT_EXPR_P (arg0))
6814 return NULL_TREE;
6815
6816 outer_type = TREE_TYPE (arg0);
6817 arg0_inner = TREE_OPERAND (arg0, 0);
6818 inner_type = TREE_TYPE (arg0_inner);
6819
6820 #ifdef HAVE_canonicalize_funcptr_for_compare
6821 /* Disable this optimization if we're casting a function pointer
6822 type on targets that require function pointer canonicalization. */
6823 if (HAVE_canonicalize_funcptr_for_compare
6824 && TREE_CODE (inner_type) == POINTER_TYPE
6825 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6826 return NULL_TREE;
6827 #endif
6828
6829 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6830 return NULL_TREE;
6831
6832 if (TREE_CODE (arg1) != INTEGER_CST
6833 && !(CONVERT_EXPR_P (arg1)
6834 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6835 return NULL_TREE;
6836
6837 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6838 && code != NE_EXPR
6839 && code != EQ_EXPR)
6840 return NULL_TREE;
6841
6842 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6843 return NULL_TREE;
6844
6845 if (TREE_CODE (arg1) == INTEGER_CST)
6846 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6847 TREE_OVERFLOW (arg1));
6848 else
6849 arg1 = fold_convert_loc (loc, inner_type, arg1);
6850
6851 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6852 }
6853
6854 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6855 step of the array. Reconstructs s and delta in the case of s *
6856 delta being an integer constant (and thus already folded). ADDR is
6857 the address. MULT is the multiplicative expression. If the
6858 function succeeds, the new address expression is returned.
6859 Otherwise NULL_TREE is returned. LOC is the location of the
6860 resulting expression. */
6861
6862 static tree
6863 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6864 {
6865 tree s, delta, step;
6866 tree ref = TREE_OPERAND (addr, 0), pref;
6867 tree ret, pos;
6868 tree itype;
6869 bool mdim = false;
6870
6871 /* Strip the nops that might be added when converting op1 to sizetype. */
6872 STRIP_NOPS (op1);
6873
6874 /* Canonicalize op1 into a possibly non-constant delta
6875 and an INTEGER_CST s. */
6876 if (TREE_CODE (op1) == MULT_EXPR)
6877 {
6878 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6879
6880 STRIP_NOPS (arg0);
6881 STRIP_NOPS (arg1);
6882
6883 if (TREE_CODE (arg0) == INTEGER_CST)
6884 {
6885 s = arg0;
6886 delta = arg1;
6887 }
6888 else if (TREE_CODE (arg1) == INTEGER_CST)
6889 {
6890 s = arg1;
6891 delta = arg0;
6892 }
6893 else
6894 return NULL_TREE;
6895 }
6896 else if (TREE_CODE (op1) == INTEGER_CST)
6897 {
6898 delta = op1;
6899 s = NULL_TREE;
6900 }
6901 else
6902 {
6903 /* Simulate we are delta * 1. */
6904 delta = op1;
6905 s = integer_one_node;
6906 }
6907
6908 /* Handle &x.array the same as we would handle &x.array[0]. */
6909 if (TREE_CODE (ref) == COMPONENT_REF
6910 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6911 {
6912 tree domain;
6913
6914 /* Remember if this was a multi-dimensional array. */
6915 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6916 mdim = true;
6917
6918 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6919 if (! domain)
6920 goto cont;
6921 itype = TREE_TYPE (domain);
6922
6923 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6924 if (TREE_CODE (step) != INTEGER_CST)
6925 goto cont;
6926
6927 if (s)
6928 {
6929 if (! tree_int_cst_equal (step, s))
6930 goto cont;
6931 }
6932 else
6933 {
6934 /* Try if delta is a multiple of step. */
6935 tree tmp = div_if_zero_remainder (op1, step);
6936 if (! tmp)
6937 goto cont;
6938 delta = tmp;
6939 }
6940
6941 /* Only fold here if we can verify we do not overflow one
6942 dimension of a multi-dimensional array. */
6943 if (mdim)
6944 {
6945 tree tmp;
6946
6947 if (!TYPE_MIN_VALUE (domain)
6948 || !TYPE_MAX_VALUE (domain)
6949 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6950 goto cont;
6951
6952 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6953 fold_convert_loc (loc, itype,
6954 TYPE_MIN_VALUE (domain)),
6955 fold_convert_loc (loc, itype, delta));
6956 if (TREE_CODE (tmp) != INTEGER_CST
6957 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6958 goto cont;
6959 }
6960
6961 /* We found a suitable component reference. */
6962
6963 pref = TREE_OPERAND (addr, 0);
6964 ret = copy_node (pref);
6965 SET_EXPR_LOCATION (ret, loc);
6966
6967 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6968 fold_build2_loc
6969 (loc, PLUS_EXPR, itype,
6970 fold_convert_loc (loc, itype,
6971 TYPE_MIN_VALUE
6972 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6973 fold_convert_loc (loc, itype, delta)),
6974 NULL_TREE, NULL_TREE);
6975 return build_fold_addr_expr_loc (loc, ret);
6976 }
6977
6978 cont:
6979
6980 for (;; ref = TREE_OPERAND (ref, 0))
6981 {
6982 if (TREE_CODE (ref) == ARRAY_REF)
6983 {
6984 tree domain;
6985
6986 /* Remember if this was a multi-dimensional array. */
6987 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6988 mdim = true;
6989
6990 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6991 if (! domain)
6992 continue;
6993 itype = TREE_TYPE (domain);
6994
6995 step = array_ref_element_size (ref);
6996 if (TREE_CODE (step) != INTEGER_CST)
6997 continue;
6998
6999 if (s)
7000 {
7001 if (! tree_int_cst_equal (step, s))
7002 continue;
7003 }
7004 else
7005 {
7006 /* Try if delta is a multiple of step. */
7007 tree tmp = div_if_zero_remainder (op1, step);
7008 if (! tmp)
7009 continue;
7010 delta = tmp;
7011 }
7012
7013 /* Only fold here if we can verify we do not overflow one
7014 dimension of a multi-dimensional array. */
7015 if (mdim)
7016 {
7017 tree tmp;
7018
7019 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7020 || !TYPE_MAX_VALUE (domain)
7021 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7022 continue;
7023
7024 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7025 fold_convert_loc (loc, itype,
7026 TREE_OPERAND (ref, 1)),
7027 fold_convert_loc (loc, itype, delta));
7028 if (!tmp
7029 || TREE_CODE (tmp) != INTEGER_CST
7030 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7031 continue;
7032 }
7033
7034 break;
7035 }
7036 else
7037 mdim = false;
7038
7039 if (!handled_component_p (ref))
7040 return NULL_TREE;
7041 }
7042
7043 /* We found the suitable array reference. So copy everything up to it,
7044 and replace the index. */
7045
7046 pref = TREE_OPERAND (addr, 0);
7047 ret = copy_node (pref);
7048 SET_EXPR_LOCATION (ret, loc);
7049 pos = ret;
7050
7051 while (pref != ref)
7052 {
7053 pref = TREE_OPERAND (pref, 0);
7054 TREE_OPERAND (pos, 0) = copy_node (pref);
7055 pos = TREE_OPERAND (pos, 0);
7056 }
7057
7058 TREE_OPERAND (pos, 1)
7059 = fold_build2_loc (loc, PLUS_EXPR, itype,
7060 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7061 fold_convert_loc (loc, itype, delta));
7062 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7063 }
7064
7065
7066 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7067 means A >= Y && A != MAX, but in this case we know that
7068 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7069
7070 static tree
7071 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7072 {
7073 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7074
7075 if (TREE_CODE (bound) == LT_EXPR)
7076 a = TREE_OPERAND (bound, 0);
7077 else if (TREE_CODE (bound) == GT_EXPR)
7078 a = TREE_OPERAND (bound, 1);
7079 else
7080 return NULL_TREE;
7081
7082 typea = TREE_TYPE (a);
7083 if (!INTEGRAL_TYPE_P (typea)
7084 && !POINTER_TYPE_P (typea))
7085 return NULL_TREE;
7086
7087 if (TREE_CODE (ineq) == LT_EXPR)
7088 {
7089 a1 = TREE_OPERAND (ineq, 1);
7090 y = TREE_OPERAND (ineq, 0);
7091 }
7092 else if (TREE_CODE (ineq) == GT_EXPR)
7093 {
7094 a1 = TREE_OPERAND (ineq, 0);
7095 y = TREE_OPERAND (ineq, 1);
7096 }
7097 else
7098 return NULL_TREE;
7099
7100 if (TREE_TYPE (a1) != typea)
7101 return NULL_TREE;
7102
7103 if (POINTER_TYPE_P (typea))
7104 {
7105 /* Convert the pointer types into integer before taking the difference. */
7106 tree ta = fold_convert_loc (loc, ssizetype, a);
7107 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7108 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7109 }
7110 else
7111 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7112
7113 if (!diff || !integer_onep (diff))
7114 return NULL_TREE;
7115
7116 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7117 }
7118
7119 /* Fold a sum or difference of at least one multiplication.
7120 Returns the folded tree or NULL if no simplification could be made. */
7121
7122 static tree
7123 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7124 tree arg0, tree arg1)
7125 {
7126 tree arg00, arg01, arg10, arg11;
7127 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7128
7129 /* (A * C) +- (B * C) -> (A+-B) * C.
7130 (A * C) +- A -> A * (C+-1).
7131 We are most concerned about the case where C is a constant,
7132 but other combinations show up during loop reduction. Since
7133 it is not difficult, try all four possibilities. */
7134
7135 if (TREE_CODE (arg0) == MULT_EXPR)
7136 {
7137 arg00 = TREE_OPERAND (arg0, 0);
7138 arg01 = TREE_OPERAND (arg0, 1);
7139 }
7140 else if (TREE_CODE (arg0) == INTEGER_CST)
7141 {
7142 arg00 = build_one_cst (type);
7143 arg01 = arg0;
7144 }
7145 else
7146 {
7147 /* We cannot generate constant 1 for fract. */
7148 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7149 return NULL_TREE;
7150 arg00 = arg0;
7151 arg01 = build_one_cst (type);
7152 }
7153 if (TREE_CODE (arg1) == MULT_EXPR)
7154 {
7155 arg10 = TREE_OPERAND (arg1, 0);
7156 arg11 = TREE_OPERAND (arg1, 1);
7157 }
7158 else if (TREE_CODE (arg1) == INTEGER_CST)
7159 {
7160 arg10 = build_one_cst (type);
7161 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7162 the purpose of this canonicalization. */
7163 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7164 && negate_expr_p (arg1)
7165 && code == PLUS_EXPR)
7166 {
7167 arg11 = negate_expr (arg1);
7168 code = MINUS_EXPR;
7169 }
7170 else
7171 arg11 = arg1;
7172 }
7173 else
7174 {
7175 /* We cannot generate constant 1 for fract. */
7176 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7177 return NULL_TREE;
7178 arg10 = arg1;
7179 arg11 = build_one_cst (type);
7180 }
7181 same = NULL_TREE;
7182
7183 if (operand_equal_p (arg01, arg11, 0))
7184 same = arg01, alt0 = arg00, alt1 = arg10;
7185 else if (operand_equal_p (arg00, arg10, 0))
7186 same = arg00, alt0 = arg01, alt1 = arg11;
7187 else if (operand_equal_p (arg00, arg11, 0))
7188 same = arg00, alt0 = arg01, alt1 = arg10;
7189 else if (operand_equal_p (arg01, arg10, 0))
7190 same = arg01, alt0 = arg00, alt1 = arg11;
7191
7192 /* No identical multiplicands; see if we can find a common
7193 power-of-two factor in non-power-of-two multiplies. This
7194 can help in multi-dimensional array access. */
7195 else if (tree_fits_shwi_p (arg01)
7196 && tree_fits_shwi_p (arg11))
7197 {
7198 HOST_WIDE_INT int01, int11, tmp;
7199 bool swap = false;
7200 tree maybe_same;
7201 int01 = tree_to_shwi (arg01);
7202 int11 = tree_to_shwi (arg11);
7203
7204 /* Move min of absolute values to int11. */
7205 if (absu_hwi (int01) < absu_hwi (int11))
7206 {
7207 tmp = int01, int01 = int11, int11 = tmp;
7208 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7209 maybe_same = arg01;
7210 swap = true;
7211 }
7212 else
7213 maybe_same = arg11;
7214
7215 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7216 /* The remainder should not be a constant, otherwise we
7217 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7218 increased the number of multiplications necessary. */
7219 && TREE_CODE (arg10) != INTEGER_CST)
7220 {
7221 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7222 build_int_cst (TREE_TYPE (arg00),
7223 int01 / int11));
7224 alt1 = arg10;
7225 same = maybe_same;
7226 if (swap)
7227 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7228 }
7229 }
7230
7231 if (same)
7232 return fold_build2_loc (loc, MULT_EXPR, type,
7233 fold_build2_loc (loc, code, type,
7234 fold_convert_loc (loc, type, alt0),
7235 fold_convert_loc (loc, type, alt1)),
7236 fold_convert_loc (loc, type, same));
7237
7238 return NULL_TREE;
7239 }
7240
7241 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7242 specified by EXPR into the buffer PTR of length LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero
7244 upon failure. */
7245
7246 static int
7247 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7248 {
7249 tree type = TREE_TYPE (expr);
7250 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7251 int byte, offset, word, words;
7252 unsigned char value;
7253
7254 if (total_bytes > len)
7255 return 0;
7256 words = total_bytes / UNITS_PER_WORD;
7257
7258 for (byte = 0; byte < total_bytes; byte++)
7259 {
7260 int bitpos = byte * BITS_PER_UNIT;
7261 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7262 number of bytes. */
7263 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7264
7265 if (total_bytes > UNITS_PER_WORD)
7266 {
7267 word = byte / UNITS_PER_WORD;
7268 if (WORDS_BIG_ENDIAN)
7269 word = (words - 1) - word;
7270 offset = word * UNITS_PER_WORD;
7271 if (BYTES_BIG_ENDIAN)
7272 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7273 else
7274 offset += byte % UNITS_PER_WORD;
7275 }
7276 else
7277 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7278 ptr[offset] = value;
7279 }
7280 return total_bytes;
7281 }
7282
7283
7284 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7285 specified by EXPR into the buffer PTR of length LEN bytes.
7286 Return the number of bytes placed in the buffer, or zero
7287 upon failure. */
7288
7289 static int
7290 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7291 {
7292 tree type = TREE_TYPE (expr);
7293 enum machine_mode mode = TYPE_MODE (type);
7294 int total_bytes = GET_MODE_SIZE (mode);
7295 FIXED_VALUE_TYPE value;
7296 tree i_value, i_type;
7297
7298 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7299 return 0;
7300
7301 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7302
7303 if (NULL_TREE == i_type
7304 || TYPE_PRECISION (i_type) != total_bytes)
7305 return 0;
7306
7307 value = TREE_FIXED_CST (expr);
7308 i_value = double_int_to_tree (i_type, value.data);
7309
7310 return native_encode_int (i_value, ptr, len);
7311 }
7312
7313
7314 /* Subroutine of native_encode_expr. Encode the REAL_CST
7315 specified by EXPR into the buffer PTR of length LEN bytes.
7316 Return the number of bytes placed in the buffer, or zero
7317 upon failure. */
7318
7319 static int
7320 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7321 {
7322 tree type = TREE_TYPE (expr);
7323 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7324 int byte, offset, word, words, bitpos;
7325 unsigned char value;
7326
7327 /* There are always 32 bits in each long, no matter the size of
7328 the hosts long. We handle floating point representations with
7329 up to 192 bits. */
7330 long tmp[6];
7331
7332 if (total_bytes > len)
7333 return 0;
7334 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7335
7336 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7337
7338 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7339 bitpos += BITS_PER_UNIT)
7340 {
7341 byte = (bitpos / BITS_PER_UNIT) & 3;
7342 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7343
7344 if (UNITS_PER_WORD < 4)
7345 {
7346 word = byte / UNITS_PER_WORD;
7347 if (WORDS_BIG_ENDIAN)
7348 word = (words - 1) - word;
7349 offset = word * UNITS_PER_WORD;
7350 if (BYTES_BIG_ENDIAN)
7351 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7352 else
7353 offset += byte % UNITS_PER_WORD;
7354 }
7355 else
7356 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7357 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7358 }
7359 return total_bytes;
7360 }
7361
7362 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7363 specified by EXPR into the buffer PTR of length LEN bytes.
7364 Return the number of bytes placed in the buffer, or zero
7365 upon failure. */
7366
7367 static int
7368 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7369 {
7370 int rsize, isize;
7371 tree part;
7372
7373 part = TREE_REALPART (expr);
7374 rsize = native_encode_expr (part, ptr, len);
7375 if (rsize == 0)
7376 return 0;
7377 part = TREE_IMAGPART (expr);
7378 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7379 if (isize != rsize)
7380 return 0;
7381 return rsize + isize;
7382 }
7383
7384
7385 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7386 specified by EXPR into the buffer PTR of length LEN bytes.
7387 Return the number of bytes placed in the buffer, or zero
7388 upon failure. */
7389
7390 static int
7391 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7392 {
7393 unsigned i, count;
7394 int size, offset;
7395 tree itype, elem;
7396
7397 offset = 0;
7398 count = VECTOR_CST_NELTS (expr);
7399 itype = TREE_TYPE (TREE_TYPE (expr));
7400 size = GET_MODE_SIZE (TYPE_MODE (itype));
7401 for (i = 0; i < count; i++)
7402 {
7403 elem = VECTOR_CST_ELT (expr, i);
7404 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7405 return 0;
7406 offset += size;
7407 }
7408 return offset;
7409 }
7410
7411
7412 /* Subroutine of native_encode_expr. Encode the STRING_CST
7413 specified by EXPR into the buffer PTR of length LEN bytes.
7414 Return the number of bytes placed in the buffer, or zero
7415 upon failure. */
7416
7417 static int
7418 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7419 {
7420 tree type = TREE_TYPE (expr);
7421 HOST_WIDE_INT total_bytes;
7422
7423 if (TREE_CODE (type) != ARRAY_TYPE
7424 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7425 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7426 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7427 return 0;
7428 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7429 if (total_bytes > len)
7430 return 0;
7431 if (TREE_STRING_LENGTH (expr) < total_bytes)
7432 {
7433 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7434 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7435 total_bytes - TREE_STRING_LENGTH (expr));
7436 }
7437 else
7438 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7439 return total_bytes;
7440 }
7441
7442
7443 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7444 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7445 buffer PTR of length LEN bytes. Return the number of bytes
7446 placed in the buffer, or zero upon failure. */
7447
7448 int
7449 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7450 {
7451 switch (TREE_CODE (expr))
7452 {
7453 case INTEGER_CST:
7454 return native_encode_int (expr, ptr, len);
7455
7456 case REAL_CST:
7457 return native_encode_real (expr, ptr, len);
7458
7459 case FIXED_CST:
7460 return native_encode_fixed (expr, ptr, len);
7461
7462 case COMPLEX_CST:
7463 return native_encode_complex (expr, ptr, len);
7464
7465 case VECTOR_CST:
7466 return native_encode_vector (expr, ptr, len);
7467
7468 case STRING_CST:
7469 return native_encode_string (expr, ptr, len);
7470
7471 default:
7472 return 0;
7473 }
7474 }
7475
7476
7477 /* Subroutine of native_interpret_expr. Interpret the contents of
7478 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7479 If the buffer cannot be interpreted, return NULL_TREE. */
7480
7481 static tree
7482 native_interpret_int (tree type, const unsigned char *ptr, int len)
7483 {
7484 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7485
7486 if (total_bytes > len
7487 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7488 return NULL_TREE;
7489
7490 wide_int result = wi::from_buffer (ptr, total_bytes);
7491
7492 return wide_int_to_tree (type, result);
7493 }
7494
7495
7496 /* Subroutine of native_interpret_expr. Interpret the contents of
7497 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7498 If the buffer cannot be interpreted, return NULL_TREE. */
7499
7500 static tree
7501 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7502 {
7503 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7504 double_int result;
7505 FIXED_VALUE_TYPE fixed_value;
7506
7507 if (total_bytes > len
7508 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7509 return NULL_TREE;
7510
7511 result = double_int::from_buffer (ptr, total_bytes);
7512 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7513
7514 return build_fixed (type, fixed_value);
7515 }
7516
7517
7518 /* Subroutine of native_interpret_expr. Interpret the contents of
7519 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7520 If the buffer cannot be interpreted, return NULL_TREE. */
7521
7522 static tree
7523 native_interpret_real (tree type, const unsigned char *ptr, int len)
7524 {
7525 enum machine_mode mode = TYPE_MODE (type);
7526 int total_bytes = GET_MODE_SIZE (mode);
7527 int byte, offset, word, words, bitpos;
7528 unsigned char value;
7529 /* There are always 32 bits in each long, no matter the size of
7530 the hosts long. We handle floating point representations with
7531 up to 192 bits. */
7532 REAL_VALUE_TYPE r;
7533 long tmp[6];
7534
7535 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7536 if (total_bytes > len || total_bytes > 24)
7537 return NULL_TREE;
7538 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7539
7540 memset (tmp, 0, sizeof (tmp));
7541 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7542 bitpos += BITS_PER_UNIT)
7543 {
7544 byte = (bitpos / BITS_PER_UNIT) & 3;
7545 if (UNITS_PER_WORD < 4)
7546 {
7547 word = byte / UNITS_PER_WORD;
7548 if (WORDS_BIG_ENDIAN)
7549 word = (words - 1) - word;
7550 offset = word * UNITS_PER_WORD;
7551 if (BYTES_BIG_ENDIAN)
7552 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7553 else
7554 offset += byte % UNITS_PER_WORD;
7555 }
7556 else
7557 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7558 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7559
7560 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7561 }
7562
7563 real_from_target (&r, tmp, mode);
7564 return build_real (type, r);
7565 }
7566
7567
7568 /* Subroutine of native_interpret_expr. Interpret the contents of
7569 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7570 If the buffer cannot be interpreted, return NULL_TREE. */
7571
7572 static tree
7573 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7574 {
7575 tree etype, rpart, ipart;
7576 int size;
7577
7578 etype = TREE_TYPE (type);
7579 size = GET_MODE_SIZE (TYPE_MODE (etype));
7580 if (size * 2 > len)
7581 return NULL_TREE;
7582 rpart = native_interpret_expr (etype, ptr, size);
7583 if (!rpart)
7584 return NULL_TREE;
7585 ipart = native_interpret_expr (etype, ptr+size, size);
7586 if (!ipart)
7587 return NULL_TREE;
7588 return build_complex (type, rpart, ipart);
7589 }
7590
7591
7592 /* Subroutine of native_interpret_expr. Interpret the contents of
7593 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7594 If the buffer cannot be interpreted, return NULL_TREE. */
7595
7596 static tree
7597 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7598 {
7599 tree etype, elem;
7600 int i, size, count;
7601 tree *elements;
7602
7603 etype = TREE_TYPE (type);
7604 size = GET_MODE_SIZE (TYPE_MODE (etype));
7605 count = TYPE_VECTOR_SUBPARTS (type);
7606 if (size * count > len)
7607 return NULL_TREE;
7608
7609 elements = XALLOCAVEC (tree, count);
7610 for (i = count - 1; i >= 0; i--)
7611 {
7612 elem = native_interpret_expr (etype, ptr+(i*size), size);
7613 if (!elem)
7614 return NULL_TREE;
7615 elements[i] = elem;
7616 }
7617 return build_vector (type, elements);
7618 }
7619
7620
7621 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7622 the buffer PTR of length LEN as a constant of type TYPE. For
7623 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7624 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7625 return NULL_TREE. */
7626
7627 tree
7628 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7629 {
7630 switch (TREE_CODE (type))
7631 {
7632 case INTEGER_TYPE:
7633 case ENUMERAL_TYPE:
7634 case BOOLEAN_TYPE:
7635 case POINTER_TYPE:
7636 case REFERENCE_TYPE:
7637 return native_interpret_int (type, ptr, len);
7638
7639 case REAL_TYPE:
7640 return native_interpret_real (type, ptr, len);
7641
7642 case FIXED_POINT_TYPE:
7643 return native_interpret_fixed (type, ptr, len);
7644
7645 case COMPLEX_TYPE:
7646 return native_interpret_complex (type, ptr, len);
7647
7648 case VECTOR_TYPE:
7649 return native_interpret_vector (type, ptr, len);
7650
7651 default:
7652 return NULL_TREE;
7653 }
7654 }
7655
7656 /* Returns true if we can interpret the contents of a native encoding
7657 as TYPE. */
7658
7659 static bool
7660 can_native_interpret_type_p (tree type)
7661 {
7662 switch (TREE_CODE (type))
7663 {
7664 case INTEGER_TYPE:
7665 case ENUMERAL_TYPE:
7666 case BOOLEAN_TYPE:
7667 case POINTER_TYPE:
7668 case REFERENCE_TYPE:
7669 case FIXED_POINT_TYPE:
7670 case REAL_TYPE:
7671 case COMPLEX_TYPE:
7672 case VECTOR_TYPE:
7673 return true;
7674 default:
7675 return false;
7676 }
7677 }
7678
7679 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7680 TYPE at compile-time. If we're unable to perform the conversion
7681 return NULL_TREE. */
7682
7683 static tree
7684 fold_view_convert_expr (tree type, tree expr)
7685 {
7686 /* We support up to 512-bit values (for V8DFmode). */
7687 unsigned char buffer[64];
7688 int len;
7689
7690 /* Check that the host and target are sane. */
7691 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7692 return NULL_TREE;
7693
7694 len = native_encode_expr (expr, buffer, sizeof (buffer));
7695 if (len == 0)
7696 return NULL_TREE;
7697
7698 return native_interpret_expr (type, buffer, len);
7699 }
7700
7701 /* Build an expression for the address of T. Folds away INDIRECT_REF
7702 to avoid confusing the gimplify process. */
7703
7704 tree
7705 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7706 {
7707 /* The size of the object is not relevant when talking about its address. */
7708 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7709 t = TREE_OPERAND (t, 0);
7710
7711 if (TREE_CODE (t) == INDIRECT_REF)
7712 {
7713 t = TREE_OPERAND (t, 0);
7714
7715 if (TREE_TYPE (t) != ptrtype)
7716 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7717 }
7718 else if (TREE_CODE (t) == MEM_REF
7719 && integer_zerop (TREE_OPERAND (t, 1)))
7720 return TREE_OPERAND (t, 0);
7721 else if (TREE_CODE (t) == MEM_REF
7722 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7723 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7724 TREE_OPERAND (t, 0),
7725 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7726 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7727 {
7728 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7729
7730 if (TREE_TYPE (t) != ptrtype)
7731 t = fold_convert_loc (loc, ptrtype, t);
7732 }
7733 else
7734 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7735
7736 return t;
7737 }
7738
7739 /* Build an expression for the address of T. */
7740
7741 tree
7742 build_fold_addr_expr_loc (location_t loc, tree t)
7743 {
7744 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7745
7746 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7747 }
7748
7749 static bool vec_cst_ctor_to_array (tree, tree *);
7750
7751 /* Fold a unary expression of code CODE and type TYPE with operand
7752 OP0. Return the folded expression if folding is successful.
7753 Otherwise, return NULL_TREE. */
7754
7755 tree
7756 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7757 {
7758 tree tem;
7759 tree arg0;
7760 enum tree_code_class kind = TREE_CODE_CLASS (code);
7761
7762 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7763 && TREE_CODE_LENGTH (code) == 1);
7764
7765 arg0 = op0;
7766 if (arg0)
7767 {
7768 if (CONVERT_EXPR_CODE_P (code)
7769 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7770 {
7771 /* Don't use STRIP_NOPS, because signedness of argument type
7772 matters. */
7773 STRIP_SIGN_NOPS (arg0);
7774 }
7775 else
7776 {
7777 /* Strip any conversions that don't change the mode. This
7778 is safe for every expression, except for a comparison
7779 expression because its signedness is derived from its
7780 operands.
7781
7782 Note that this is done as an internal manipulation within
7783 the constant folder, in order to find the simplest
7784 representation of the arguments so that their form can be
7785 studied. In any cases, the appropriate type conversions
7786 should be put back in the tree that will get out of the
7787 constant folder. */
7788 STRIP_NOPS (arg0);
7789 }
7790 }
7791
7792 if (TREE_CODE_CLASS (code) == tcc_unary)
7793 {
7794 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7795 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7796 fold_build1_loc (loc, code, type,
7797 fold_convert_loc (loc, TREE_TYPE (op0),
7798 TREE_OPERAND (arg0, 1))));
7799 else if (TREE_CODE (arg0) == COND_EXPR)
7800 {
7801 tree arg01 = TREE_OPERAND (arg0, 1);
7802 tree arg02 = TREE_OPERAND (arg0, 2);
7803 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7804 arg01 = fold_build1_loc (loc, code, type,
7805 fold_convert_loc (loc,
7806 TREE_TYPE (op0), arg01));
7807 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7808 arg02 = fold_build1_loc (loc, code, type,
7809 fold_convert_loc (loc,
7810 TREE_TYPE (op0), arg02));
7811 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7812 arg01, arg02);
7813
7814 /* If this was a conversion, and all we did was to move into
7815 inside the COND_EXPR, bring it back out. But leave it if
7816 it is a conversion from integer to integer and the
7817 result precision is no wider than a word since such a
7818 conversion is cheap and may be optimized away by combine,
7819 while it couldn't if it were outside the COND_EXPR. Then return
7820 so we don't get into an infinite recursion loop taking the
7821 conversion out and then back in. */
7822
7823 if ((CONVERT_EXPR_CODE_P (code)
7824 || code == NON_LVALUE_EXPR)
7825 && TREE_CODE (tem) == COND_EXPR
7826 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7827 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7828 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7829 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7830 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7831 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7832 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7833 && (INTEGRAL_TYPE_P
7834 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7835 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7836 || flag_syntax_only))
7837 tem = build1_loc (loc, code, type,
7838 build3 (COND_EXPR,
7839 TREE_TYPE (TREE_OPERAND
7840 (TREE_OPERAND (tem, 1), 0)),
7841 TREE_OPERAND (tem, 0),
7842 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7843 TREE_OPERAND (TREE_OPERAND (tem, 2),
7844 0)));
7845 return tem;
7846 }
7847 }
7848
7849 switch (code)
7850 {
7851 case PAREN_EXPR:
7852 /* Re-association barriers around constants and other re-association
7853 barriers can be removed. */
7854 if (CONSTANT_CLASS_P (op0)
7855 || TREE_CODE (op0) == PAREN_EXPR)
7856 return fold_convert_loc (loc, type, op0);
7857 return NULL_TREE;
7858
7859 CASE_CONVERT:
7860 case FLOAT_EXPR:
7861 case FIX_TRUNC_EXPR:
7862 if (TREE_TYPE (op0) == type)
7863 return op0;
7864
7865 if (COMPARISON_CLASS_P (op0))
7866 {
7867 /* If we have (type) (a CMP b) and type is an integral type, return
7868 new expression involving the new type. Canonicalize
7869 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7870 non-integral type.
7871 Do not fold the result as that would not simplify further, also
7872 folding again results in recursions. */
7873 if (TREE_CODE (type) == BOOLEAN_TYPE)
7874 return build2_loc (loc, TREE_CODE (op0), type,
7875 TREE_OPERAND (op0, 0),
7876 TREE_OPERAND (op0, 1));
7877 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7878 && TREE_CODE (type) != VECTOR_TYPE)
7879 return build3_loc (loc, COND_EXPR, type, op0,
7880 constant_boolean_node (true, type),
7881 constant_boolean_node (false, type));
7882 }
7883
7884 /* Handle cases of two conversions in a row. */
7885 if (CONVERT_EXPR_P (op0))
7886 {
7887 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7888 tree inter_type = TREE_TYPE (op0);
7889 int inside_int = INTEGRAL_TYPE_P (inside_type);
7890 int inside_ptr = POINTER_TYPE_P (inside_type);
7891 int inside_float = FLOAT_TYPE_P (inside_type);
7892 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7893 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7894 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7895 int inter_int = INTEGRAL_TYPE_P (inter_type);
7896 int inter_ptr = POINTER_TYPE_P (inter_type);
7897 int inter_float = FLOAT_TYPE_P (inter_type);
7898 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7899 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7900 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7901 int final_int = INTEGRAL_TYPE_P (type);
7902 int final_ptr = POINTER_TYPE_P (type);
7903 int final_float = FLOAT_TYPE_P (type);
7904 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7905 unsigned int final_prec = TYPE_PRECISION (type);
7906 int final_unsignedp = TYPE_UNSIGNED (type);
7907
7908 /* In addition to the cases of two conversions in a row
7909 handled below, if we are converting something to its own
7910 type via an object of identical or wider precision, neither
7911 conversion is needed. */
7912 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7913 && (((inter_int || inter_ptr) && final_int)
7914 || (inter_float && final_float))
7915 && inter_prec >= final_prec)
7916 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7917
7918 /* Likewise, if the intermediate and initial types are either both
7919 float or both integer, we don't need the middle conversion if the
7920 former is wider than the latter and doesn't change the signedness
7921 (for integers). Avoid this if the final type is a pointer since
7922 then we sometimes need the middle conversion. Likewise if the
7923 final type has a precision not equal to the size of its mode. */
7924 if (((inter_int && inside_int)
7925 || (inter_float && inside_float)
7926 || (inter_vec && inside_vec))
7927 && inter_prec >= inside_prec
7928 && (inter_float || inter_vec
7929 || inter_unsignedp == inside_unsignedp)
7930 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7931 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7932 && ! final_ptr
7933 && (! final_vec || inter_prec == inside_prec))
7934 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7935
7936 /* If we have a sign-extension of a zero-extended value, we can
7937 replace that by a single zero-extension. Likewise if the
7938 final conversion does not change precision we can drop the
7939 intermediate conversion. */
7940 if (inside_int && inter_int && final_int
7941 && ((inside_prec < inter_prec && inter_prec < final_prec
7942 && inside_unsignedp && !inter_unsignedp)
7943 || final_prec == inter_prec))
7944 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7945
7946 /* Two conversions in a row are not needed unless:
7947 - some conversion is floating-point (overstrict for now), or
7948 - some conversion is a vector (overstrict for now), or
7949 - the intermediate type is narrower than both initial and
7950 final, or
7951 - the intermediate type and innermost type differ in signedness,
7952 and the outermost type is wider than the intermediate, or
7953 - the initial type is a pointer type and the precisions of the
7954 intermediate and final types differ, or
7955 - the final type is a pointer type and the precisions of the
7956 initial and intermediate types differ. */
7957 if (! inside_float && ! inter_float && ! final_float
7958 && ! inside_vec && ! inter_vec && ! final_vec
7959 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7960 && ! (inside_int && inter_int
7961 && inter_unsignedp != inside_unsignedp
7962 && inter_prec < final_prec)
7963 && ((inter_unsignedp && inter_prec > inside_prec)
7964 == (final_unsignedp && final_prec > inter_prec))
7965 && ! (inside_ptr && inter_prec != final_prec)
7966 && ! (final_ptr && inside_prec != inter_prec)
7967 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7968 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7969 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7970 }
7971
7972 /* Handle (T *)&A.B.C for A being of type T and B and C
7973 living at offset zero. This occurs frequently in
7974 C++ upcasting and then accessing the base. */
7975 if (TREE_CODE (op0) == ADDR_EXPR
7976 && POINTER_TYPE_P (type)
7977 && handled_component_p (TREE_OPERAND (op0, 0)))
7978 {
7979 HOST_WIDE_INT bitsize, bitpos;
7980 tree offset;
7981 enum machine_mode mode;
7982 int unsignedp, volatilep;
7983 tree base = TREE_OPERAND (op0, 0);
7984 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7985 &mode, &unsignedp, &volatilep, false);
7986 /* If the reference was to a (constant) zero offset, we can use
7987 the address of the base if it has the same base type
7988 as the result type and the pointer type is unqualified. */
7989 if (! offset && bitpos == 0
7990 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7991 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7992 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7993 return fold_convert_loc (loc, type,
7994 build_fold_addr_expr_loc (loc, base));
7995 }
7996
7997 if (TREE_CODE (op0) == MODIFY_EXPR
7998 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7999 /* Detect assigning a bitfield. */
8000 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8001 && DECL_BIT_FIELD
8002 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8003 {
8004 /* Don't leave an assignment inside a conversion
8005 unless assigning a bitfield. */
8006 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8007 /* First do the assignment, then return converted constant. */
8008 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8009 TREE_NO_WARNING (tem) = 1;
8010 TREE_USED (tem) = 1;
8011 return tem;
8012 }
8013
8014 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8015 constants (if x has signed type, the sign bit cannot be set
8016 in c). This folds extension into the BIT_AND_EXPR.
8017 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8018 very likely don't have maximal range for their precision and this
8019 transformation effectively doesn't preserve non-maximal ranges. */
8020 if (TREE_CODE (type) == INTEGER_TYPE
8021 && TREE_CODE (op0) == BIT_AND_EXPR
8022 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8023 {
8024 tree and_expr = op0;
8025 tree and0 = TREE_OPERAND (and_expr, 0);
8026 tree and1 = TREE_OPERAND (and_expr, 1);
8027 int change = 0;
8028
8029 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8030 || (TYPE_PRECISION (type)
8031 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8032 change = 1;
8033 else if (TYPE_PRECISION (TREE_TYPE (and1))
8034 <= HOST_BITS_PER_WIDE_INT
8035 && tree_fits_uhwi_p (and1))
8036 {
8037 unsigned HOST_WIDE_INT cst;
8038
8039 cst = tree_to_uhwi (and1);
8040 cst &= HOST_WIDE_INT_M1U
8041 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8042 change = (cst == 0);
8043 #ifdef LOAD_EXTEND_OP
8044 if (change
8045 && !flag_syntax_only
8046 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8047 == ZERO_EXTEND))
8048 {
8049 tree uns = unsigned_type_for (TREE_TYPE (and0));
8050 and0 = fold_convert_loc (loc, uns, and0);
8051 and1 = fold_convert_loc (loc, uns, and1);
8052 }
8053 #endif
8054 }
8055 if (change)
8056 {
8057 tem = force_fit_type (type, wi::to_widest (and1), 0,
8058 TREE_OVERFLOW (and1));
8059 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8060 fold_convert_loc (loc, type, and0), tem);
8061 }
8062 }
8063
8064 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8065 when one of the new casts will fold away. Conservatively we assume
8066 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8067 if (POINTER_TYPE_P (type)
8068 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8069 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8070 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8071 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8072 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8073 {
8074 tree arg00 = TREE_OPERAND (arg0, 0);
8075 tree arg01 = TREE_OPERAND (arg0, 1);
8076
8077 return fold_build_pointer_plus_loc
8078 (loc, fold_convert_loc (loc, type, arg00), arg01);
8079 }
8080
8081 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8082 of the same precision, and X is an integer type not narrower than
8083 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8084 if (INTEGRAL_TYPE_P (type)
8085 && TREE_CODE (op0) == BIT_NOT_EXPR
8086 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8087 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8088 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8089 {
8090 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8091 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8092 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8093 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8094 fold_convert_loc (loc, type, tem));
8095 }
8096
8097 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8098 type of X and Y (integer types only). */
8099 if (INTEGRAL_TYPE_P (type)
8100 && TREE_CODE (op0) == MULT_EXPR
8101 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8102 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8103 {
8104 /* Be careful not to introduce new overflows. */
8105 tree mult_type;
8106 if (TYPE_OVERFLOW_WRAPS (type))
8107 mult_type = type;
8108 else
8109 mult_type = unsigned_type_for (type);
8110
8111 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8112 {
8113 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8114 fold_convert_loc (loc, mult_type,
8115 TREE_OPERAND (op0, 0)),
8116 fold_convert_loc (loc, mult_type,
8117 TREE_OPERAND (op0, 1)));
8118 return fold_convert_loc (loc, type, tem);
8119 }
8120 }
8121
8122 tem = fold_convert_const (code, type, op0);
8123 return tem ? tem : NULL_TREE;
8124
8125 case ADDR_SPACE_CONVERT_EXPR:
8126 if (integer_zerop (arg0))
8127 return fold_convert_const (code, type, arg0);
8128 return NULL_TREE;
8129
8130 case FIXED_CONVERT_EXPR:
8131 tem = fold_convert_const (code, type, arg0);
8132 return tem ? tem : NULL_TREE;
8133
8134 case VIEW_CONVERT_EXPR:
8135 if (TREE_TYPE (op0) == type)
8136 return op0;
8137 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8138 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8139 type, TREE_OPERAND (op0, 0));
8140 if (TREE_CODE (op0) == MEM_REF)
8141 return fold_build2_loc (loc, MEM_REF, type,
8142 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8143
8144 /* For integral conversions with the same precision or pointer
8145 conversions use a NOP_EXPR instead. */
8146 if ((INTEGRAL_TYPE_P (type)
8147 || POINTER_TYPE_P (type))
8148 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8149 || POINTER_TYPE_P (TREE_TYPE (op0)))
8150 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8151 return fold_convert_loc (loc, type, op0);
8152
8153 /* Strip inner integral conversions that do not change the precision. */
8154 if (CONVERT_EXPR_P (op0)
8155 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8156 || POINTER_TYPE_P (TREE_TYPE (op0)))
8157 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8158 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8159 && (TYPE_PRECISION (TREE_TYPE (op0))
8160 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8161 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8162 type, TREE_OPERAND (op0, 0));
8163
8164 return fold_view_convert_expr (type, op0);
8165
8166 case NEGATE_EXPR:
8167 tem = fold_negate_expr (loc, arg0);
8168 if (tem)
8169 return fold_convert_loc (loc, type, tem);
8170 return NULL_TREE;
8171
8172 case ABS_EXPR:
8173 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8174 return fold_abs_const (arg0, type);
8175 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8176 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8177 /* Convert fabs((double)float) into (double)fabsf(float). */
8178 else if (TREE_CODE (arg0) == NOP_EXPR
8179 && TREE_CODE (type) == REAL_TYPE)
8180 {
8181 tree targ0 = strip_float_extensions (arg0);
8182 if (targ0 != arg0)
8183 return fold_convert_loc (loc, type,
8184 fold_build1_loc (loc, ABS_EXPR,
8185 TREE_TYPE (targ0),
8186 targ0));
8187 }
8188 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8189 else if (TREE_CODE (arg0) == ABS_EXPR)
8190 return arg0;
8191 else if (tree_expr_nonnegative_p (arg0))
8192 return arg0;
8193
8194 /* Strip sign ops from argument. */
8195 if (TREE_CODE (type) == REAL_TYPE)
8196 {
8197 tem = fold_strip_sign_ops (arg0);
8198 if (tem)
8199 return fold_build1_loc (loc, ABS_EXPR, type,
8200 fold_convert_loc (loc, type, tem));
8201 }
8202 return NULL_TREE;
8203
8204 case CONJ_EXPR:
8205 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8206 return fold_convert_loc (loc, type, arg0);
8207 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8208 {
8209 tree itype = TREE_TYPE (type);
8210 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8211 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8212 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8213 negate_expr (ipart));
8214 }
8215 if (TREE_CODE (arg0) == COMPLEX_CST)
8216 {
8217 tree itype = TREE_TYPE (type);
8218 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8219 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8220 return build_complex (type, rpart, negate_expr (ipart));
8221 }
8222 if (TREE_CODE (arg0) == CONJ_EXPR)
8223 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8224 return NULL_TREE;
8225
8226 case BIT_NOT_EXPR:
8227 if (TREE_CODE (arg0) == INTEGER_CST)
8228 return fold_not_const (arg0, type);
8229 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8230 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8231 /* Convert ~ (-A) to A - 1. */
8232 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8233 return fold_build2_loc (loc, MINUS_EXPR, type,
8234 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8235 build_int_cst (type, 1));
8236 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8237 else if (INTEGRAL_TYPE_P (type)
8238 && ((TREE_CODE (arg0) == MINUS_EXPR
8239 && integer_onep (TREE_OPERAND (arg0, 1)))
8240 || (TREE_CODE (arg0) == PLUS_EXPR
8241 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8242 return fold_build1_loc (loc, NEGATE_EXPR, type,
8243 fold_convert_loc (loc, type,
8244 TREE_OPERAND (arg0, 0)));
8245 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8246 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8247 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8248 fold_convert_loc (loc, type,
8249 TREE_OPERAND (arg0, 0)))))
8250 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8251 fold_convert_loc (loc, type,
8252 TREE_OPERAND (arg0, 1)));
8253 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8254 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8255 fold_convert_loc (loc, type,
8256 TREE_OPERAND (arg0, 1)))))
8257 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8258 fold_convert_loc (loc, type,
8259 TREE_OPERAND (arg0, 0)), tem);
8260 /* Perform BIT_NOT_EXPR on each element individually. */
8261 else if (TREE_CODE (arg0) == VECTOR_CST)
8262 {
8263 tree *elements;
8264 tree elem;
8265 unsigned count = VECTOR_CST_NELTS (arg0), i;
8266
8267 elements = XALLOCAVEC (tree, count);
8268 for (i = 0; i < count; i++)
8269 {
8270 elem = VECTOR_CST_ELT (arg0, i);
8271 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8272 if (elem == NULL_TREE)
8273 break;
8274 elements[i] = elem;
8275 }
8276 if (i == count)
8277 return build_vector (type, elements);
8278 }
8279 else if (COMPARISON_CLASS_P (arg0)
8280 && (VECTOR_TYPE_P (type)
8281 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8282 {
8283 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8284 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8285 HONOR_NANS (TYPE_MODE (op_type)));
8286 if (subcode != ERROR_MARK)
8287 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8288 TREE_OPERAND (arg0, 1));
8289 }
8290
8291
8292 return NULL_TREE;
8293
8294 case TRUTH_NOT_EXPR:
8295 /* Note that the operand of this must be an int
8296 and its values must be 0 or 1.
8297 ("true" is a fixed value perhaps depending on the language,
8298 but we don't handle values other than 1 correctly yet.) */
8299 tem = fold_truth_not_expr (loc, arg0);
8300 if (!tem)
8301 return NULL_TREE;
8302 return fold_convert_loc (loc, type, tem);
8303
8304 case REALPART_EXPR:
8305 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8306 return fold_convert_loc (loc, type, arg0);
8307 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8308 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8309 TREE_OPERAND (arg0, 1));
8310 if (TREE_CODE (arg0) == COMPLEX_CST)
8311 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8312 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8313 {
8314 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8315 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8316 fold_build1_loc (loc, REALPART_EXPR, itype,
8317 TREE_OPERAND (arg0, 0)),
8318 fold_build1_loc (loc, REALPART_EXPR, itype,
8319 TREE_OPERAND (arg0, 1)));
8320 return fold_convert_loc (loc, type, tem);
8321 }
8322 if (TREE_CODE (arg0) == CONJ_EXPR)
8323 {
8324 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8325 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8326 TREE_OPERAND (arg0, 0));
8327 return fold_convert_loc (loc, type, tem);
8328 }
8329 if (TREE_CODE (arg0) == CALL_EXPR)
8330 {
8331 tree fn = get_callee_fndecl (arg0);
8332 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8333 switch (DECL_FUNCTION_CODE (fn))
8334 {
8335 CASE_FLT_FN (BUILT_IN_CEXPI):
8336 fn = mathfn_built_in (type, BUILT_IN_COS);
8337 if (fn)
8338 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8339 break;
8340
8341 default:
8342 break;
8343 }
8344 }
8345 return NULL_TREE;
8346
8347 case IMAGPART_EXPR:
8348 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8349 return build_zero_cst (type);
8350 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8351 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8352 TREE_OPERAND (arg0, 0));
8353 if (TREE_CODE (arg0) == COMPLEX_CST)
8354 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8355 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8356 {
8357 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8358 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8359 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8360 TREE_OPERAND (arg0, 0)),
8361 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8362 TREE_OPERAND (arg0, 1)));
8363 return fold_convert_loc (loc, type, tem);
8364 }
8365 if (TREE_CODE (arg0) == CONJ_EXPR)
8366 {
8367 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8368 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8369 return fold_convert_loc (loc, type, negate_expr (tem));
8370 }
8371 if (TREE_CODE (arg0) == CALL_EXPR)
8372 {
8373 tree fn = get_callee_fndecl (arg0);
8374 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8375 switch (DECL_FUNCTION_CODE (fn))
8376 {
8377 CASE_FLT_FN (BUILT_IN_CEXPI):
8378 fn = mathfn_built_in (type, BUILT_IN_SIN);
8379 if (fn)
8380 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8381 break;
8382
8383 default:
8384 break;
8385 }
8386 }
8387 return NULL_TREE;
8388
8389 case INDIRECT_REF:
8390 /* Fold *&X to X if X is an lvalue. */
8391 if (TREE_CODE (op0) == ADDR_EXPR)
8392 {
8393 tree op00 = TREE_OPERAND (op0, 0);
8394 if ((TREE_CODE (op00) == VAR_DECL
8395 || TREE_CODE (op00) == PARM_DECL
8396 || TREE_CODE (op00) == RESULT_DECL)
8397 && !TREE_READONLY (op00))
8398 return op00;
8399 }
8400 return NULL_TREE;
8401
8402 case VEC_UNPACK_LO_EXPR:
8403 case VEC_UNPACK_HI_EXPR:
8404 case VEC_UNPACK_FLOAT_LO_EXPR:
8405 case VEC_UNPACK_FLOAT_HI_EXPR:
8406 {
8407 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8408 tree *elts;
8409 enum tree_code subcode;
8410
8411 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8412 if (TREE_CODE (arg0) != VECTOR_CST)
8413 return NULL_TREE;
8414
8415 elts = XALLOCAVEC (tree, nelts * 2);
8416 if (!vec_cst_ctor_to_array (arg0, elts))
8417 return NULL_TREE;
8418
8419 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8420 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8421 elts += nelts;
8422
8423 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8424 subcode = NOP_EXPR;
8425 else
8426 subcode = FLOAT_EXPR;
8427
8428 for (i = 0; i < nelts; i++)
8429 {
8430 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8431 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8432 return NULL_TREE;
8433 }
8434
8435 return build_vector (type, elts);
8436 }
8437
8438 case REDUC_MIN_EXPR:
8439 case REDUC_MAX_EXPR:
8440 case REDUC_PLUS_EXPR:
8441 {
8442 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8443 tree *elts;
8444 enum tree_code subcode;
8445
8446 if (TREE_CODE (op0) != VECTOR_CST)
8447 return NULL_TREE;
8448
8449 elts = XALLOCAVEC (tree, nelts);
8450 if (!vec_cst_ctor_to_array (op0, elts))
8451 return NULL_TREE;
8452
8453 switch (code)
8454 {
8455 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8456 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8457 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8458 default: gcc_unreachable ();
8459 }
8460
8461 for (i = 1; i < nelts; i++)
8462 {
8463 elts[0] = const_binop (subcode, elts[0], elts[i]);
8464 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8465 return NULL_TREE;
8466 elts[i] = build_zero_cst (TREE_TYPE (type));
8467 }
8468
8469 return build_vector (type, elts);
8470 }
8471
8472 default:
8473 return NULL_TREE;
8474 } /* switch (code) */
8475 }
8476
8477
8478 /* If the operation was a conversion do _not_ mark a resulting constant
8479 with TREE_OVERFLOW if the original constant was not. These conversions
8480 have implementation defined behavior and retaining the TREE_OVERFLOW
8481 flag here would confuse later passes such as VRP. */
8482 tree
8483 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8484 tree type, tree op0)
8485 {
8486 tree res = fold_unary_loc (loc, code, type, op0);
8487 if (res
8488 && TREE_CODE (res) == INTEGER_CST
8489 && TREE_CODE (op0) == INTEGER_CST
8490 && CONVERT_EXPR_CODE_P (code))
8491 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8492
8493 return res;
8494 }
8495
8496 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8497 operands OP0 and OP1. LOC is the location of the resulting expression.
8498 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8499 Return the folded expression if folding is successful. Otherwise,
8500 return NULL_TREE. */
8501 static tree
8502 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8503 tree arg0, tree arg1, tree op0, tree op1)
8504 {
8505 tree tem;
8506
8507 /* We only do these simplifications if we are optimizing. */
8508 if (!optimize)
8509 return NULL_TREE;
8510
8511 /* Check for things like (A || B) && (A || C). We can convert this
8512 to A || (B && C). Note that either operator can be any of the four
8513 truth and/or operations and the transformation will still be
8514 valid. Also note that we only care about order for the
8515 ANDIF and ORIF operators. If B contains side effects, this
8516 might change the truth-value of A. */
8517 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8518 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8519 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8520 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8521 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8522 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8523 {
8524 tree a00 = TREE_OPERAND (arg0, 0);
8525 tree a01 = TREE_OPERAND (arg0, 1);
8526 tree a10 = TREE_OPERAND (arg1, 0);
8527 tree a11 = TREE_OPERAND (arg1, 1);
8528 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8529 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8530 && (code == TRUTH_AND_EXPR
8531 || code == TRUTH_OR_EXPR));
8532
8533 if (operand_equal_p (a00, a10, 0))
8534 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8535 fold_build2_loc (loc, code, type, a01, a11));
8536 else if (commutative && operand_equal_p (a00, a11, 0))
8537 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8538 fold_build2_loc (loc, code, type, a01, a10));
8539 else if (commutative && operand_equal_p (a01, a10, 0))
8540 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8541 fold_build2_loc (loc, code, type, a00, a11));
8542
8543 /* This case if tricky because we must either have commutative
8544 operators or else A10 must not have side-effects. */
8545
8546 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8547 && operand_equal_p (a01, a11, 0))
8548 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8549 fold_build2_loc (loc, code, type, a00, a10),
8550 a01);
8551 }
8552
8553 /* See if we can build a range comparison. */
8554 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8555 return tem;
8556
8557 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8558 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8559 {
8560 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8561 if (tem)
8562 return fold_build2_loc (loc, code, type, tem, arg1);
8563 }
8564
8565 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8566 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8567 {
8568 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8569 if (tem)
8570 return fold_build2_loc (loc, code, type, arg0, tem);
8571 }
8572
8573 /* Check for the possibility of merging component references. If our
8574 lhs is another similar operation, try to merge its rhs with our
8575 rhs. Then try to merge our lhs and rhs. */
8576 if (TREE_CODE (arg0) == code
8577 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8578 TREE_OPERAND (arg0, 1), arg1)))
8579 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8580
8581 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8582 return tem;
8583
8584 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8585 && (code == TRUTH_AND_EXPR
8586 || code == TRUTH_ANDIF_EXPR
8587 || code == TRUTH_OR_EXPR
8588 || code == TRUTH_ORIF_EXPR))
8589 {
8590 enum tree_code ncode, icode;
8591
8592 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8593 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8594 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8595
8596 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8597 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8598 We don't want to pack more than two leafs to a non-IF AND/OR
8599 expression.
8600 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8601 equal to IF-CODE, then we don't want to add right-hand operand.
8602 If the inner right-hand side of left-hand operand has
8603 side-effects, or isn't simple, then we can't add to it,
8604 as otherwise we might destroy if-sequence. */
8605 if (TREE_CODE (arg0) == icode
8606 && simple_operand_p_2 (arg1)
8607 /* Needed for sequence points to handle trappings, and
8608 side-effects. */
8609 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8610 {
8611 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8612 arg1);
8613 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8614 tem);
8615 }
8616 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8617 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8618 else if (TREE_CODE (arg1) == icode
8619 && simple_operand_p_2 (arg0)
8620 /* Needed for sequence points to handle trappings, and
8621 side-effects. */
8622 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8623 {
8624 tem = fold_build2_loc (loc, ncode, type,
8625 arg0, TREE_OPERAND (arg1, 0));
8626 return fold_build2_loc (loc, icode, type, tem,
8627 TREE_OPERAND (arg1, 1));
8628 }
8629 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8630 into (A OR B).
8631 For sequence point consistancy, we need to check for trapping,
8632 and side-effects. */
8633 else if (code == icode && simple_operand_p_2 (arg0)
8634 && simple_operand_p_2 (arg1))
8635 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8636 }
8637
8638 return NULL_TREE;
8639 }
8640
8641 /* Fold a binary expression of code CODE and type TYPE with operands
8642 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8643 Return the folded expression if folding is successful. Otherwise,
8644 return NULL_TREE. */
8645
8646 static tree
8647 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8648 {
8649 enum tree_code compl_code;
8650
8651 if (code == MIN_EXPR)
8652 compl_code = MAX_EXPR;
8653 else if (code == MAX_EXPR)
8654 compl_code = MIN_EXPR;
8655 else
8656 gcc_unreachable ();
8657
8658 /* MIN (MAX (a, b), b) == b. */
8659 if (TREE_CODE (op0) == compl_code
8660 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8661 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8662
8663 /* MIN (MAX (b, a), b) == b. */
8664 if (TREE_CODE (op0) == compl_code
8665 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8666 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8667 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8668
8669 /* MIN (a, MAX (a, b)) == a. */
8670 if (TREE_CODE (op1) == compl_code
8671 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8672 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8673 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8674
8675 /* MIN (a, MAX (b, a)) == a. */
8676 if (TREE_CODE (op1) == compl_code
8677 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8678 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8679 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8680
8681 return NULL_TREE;
8682 }
8683
8684 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8685 by changing CODE to reduce the magnitude of constants involved in
8686 ARG0 of the comparison.
8687 Returns a canonicalized comparison tree if a simplification was
8688 possible, otherwise returns NULL_TREE.
8689 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8690 valid if signed overflow is undefined. */
8691
8692 static tree
8693 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8694 tree arg0, tree arg1,
8695 bool *strict_overflow_p)
8696 {
8697 enum tree_code code0 = TREE_CODE (arg0);
8698 tree t, cst0 = NULL_TREE;
8699 int sgn0;
8700 bool swap = false;
8701
8702 /* Match A +- CST code arg1 and CST code arg1. We can change the
8703 first form only if overflow is undefined. */
8704 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8705 /* In principle pointers also have undefined overflow behavior,
8706 but that causes problems elsewhere. */
8707 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8708 && (code0 == MINUS_EXPR
8709 || code0 == PLUS_EXPR)
8710 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8711 || code0 == INTEGER_CST))
8712 return NULL_TREE;
8713
8714 /* Identify the constant in arg0 and its sign. */
8715 if (code0 == INTEGER_CST)
8716 cst0 = arg0;
8717 else
8718 cst0 = TREE_OPERAND (arg0, 1);
8719 sgn0 = tree_int_cst_sgn (cst0);
8720
8721 /* Overflowed constants and zero will cause problems. */
8722 if (integer_zerop (cst0)
8723 || TREE_OVERFLOW (cst0))
8724 return NULL_TREE;
8725
8726 /* See if we can reduce the magnitude of the constant in
8727 arg0 by changing the comparison code. */
8728 if (code0 == INTEGER_CST)
8729 {
8730 /* CST <= arg1 -> CST-1 < arg1. */
8731 if (code == LE_EXPR && sgn0 == 1)
8732 code = LT_EXPR;
8733 /* -CST < arg1 -> -CST-1 <= arg1. */
8734 else if (code == LT_EXPR && sgn0 == -1)
8735 code = LE_EXPR;
8736 /* CST > arg1 -> CST-1 >= arg1. */
8737 else if (code == GT_EXPR && sgn0 == 1)
8738 code = GE_EXPR;
8739 /* -CST >= arg1 -> -CST-1 > arg1. */
8740 else if (code == GE_EXPR && sgn0 == -1)
8741 code = GT_EXPR;
8742 else
8743 return NULL_TREE;
8744 /* arg1 code' CST' might be more canonical. */
8745 swap = true;
8746 }
8747 else
8748 {
8749 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8750 if (code == LT_EXPR
8751 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8752 code = LE_EXPR;
8753 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8754 else if (code == GT_EXPR
8755 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8756 code = GE_EXPR;
8757 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8758 else if (code == LE_EXPR
8759 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8760 code = LT_EXPR;
8761 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8762 else if (code == GE_EXPR
8763 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8764 code = GT_EXPR;
8765 else
8766 return NULL_TREE;
8767 *strict_overflow_p = true;
8768 }
8769
8770 /* Now build the constant reduced in magnitude. But not if that
8771 would produce one outside of its types range. */
8772 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8773 && ((sgn0 == 1
8774 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8775 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8776 || (sgn0 == -1
8777 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8778 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8779 /* We cannot swap the comparison here as that would cause us to
8780 endlessly recurse. */
8781 return NULL_TREE;
8782
8783 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8784 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8785 if (code0 != INTEGER_CST)
8786 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8787 t = fold_convert (TREE_TYPE (arg1), t);
8788
8789 /* If swapping might yield to a more canonical form, do so. */
8790 if (swap)
8791 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8792 else
8793 return fold_build2_loc (loc, code, type, t, arg1);
8794 }
8795
8796 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8797 overflow further. Try to decrease the magnitude of constants involved
8798 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8799 and put sole constants at the second argument position.
8800 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8801
8802 static tree
8803 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8804 tree arg0, tree arg1)
8805 {
8806 tree t;
8807 bool strict_overflow_p;
8808 const char * const warnmsg = G_("assuming signed overflow does not occur "
8809 "when reducing constant in comparison");
8810
8811 /* Try canonicalization by simplifying arg0. */
8812 strict_overflow_p = false;
8813 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8814 &strict_overflow_p);
8815 if (t)
8816 {
8817 if (strict_overflow_p)
8818 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8819 return t;
8820 }
8821
8822 /* Try canonicalization by simplifying arg1 using the swapped
8823 comparison. */
8824 code = swap_tree_comparison (code);
8825 strict_overflow_p = false;
8826 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8827 &strict_overflow_p);
8828 if (t && strict_overflow_p)
8829 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8830 return t;
8831 }
8832
8833 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8834 space. This is used to avoid issuing overflow warnings for
8835 expressions like &p->x which can not wrap. */
8836
8837 static bool
8838 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8839 {
8840 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8841 return true;
8842
8843 if (bitpos < 0)
8844 return true;
8845
8846 wide_int wi_offset;
8847 int precision = TYPE_PRECISION (TREE_TYPE (base));
8848 if (offset == NULL_TREE)
8849 wi_offset = wi::zero (precision);
8850 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8851 return true;
8852 else
8853 wi_offset = offset;
8854
8855 bool overflow;
8856 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8857 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8858 if (overflow)
8859 return true;
8860
8861 if (!wi::fits_uhwi_p (total))
8862 return true;
8863
8864 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8865 if (size <= 0)
8866 return true;
8867
8868 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8869 array. */
8870 if (TREE_CODE (base) == ADDR_EXPR)
8871 {
8872 HOST_WIDE_INT base_size;
8873
8874 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8875 if (base_size > 0 && size < base_size)
8876 size = base_size;
8877 }
8878
8879 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8880 }
8881
8882 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8883 kind INTEGER_CST. This makes sure to properly sign-extend the
8884 constant. */
8885
8886 static HOST_WIDE_INT
8887 size_low_cst (const_tree t)
8888 {
8889 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8890 int prec = TYPE_PRECISION (TREE_TYPE (t));
8891 if (prec < HOST_BITS_PER_WIDE_INT)
8892 return sext_hwi (w, prec);
8893 return w;
8894 }
8895
8896 /* Subroutine of fold_binary. This routine performs all of the
8897 transformations that are common to the equality/inequality
8898 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8899 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8900 fold_binary should call fold_binary. Fold a comparison with
8901 tree code CODE and type TYPE with operands OP0 and OP1. Return
8902 the folded comparison or NULL_TREE. */
8903
8904 static tree
8905 fold_comparison (location_t loc, enum tree_code code, tree type,
8906 tree op0, tree op1)
8907 {
8908 tree arg0, arg1, tem;
8909
8910 arg0 = op0;
8911 arg1 = op1;
8912
8913 STRIP_SIGN_NOPS (arg0);
8914 STRIP_SIGN_NOPS (arg1);
8915
8916 tem = fold_relational_const (code, type, arg0, arg1);
8917 if (tem != NULL_TREE)
8918 return tem;
8919
8920 /* If one arg is a real or integer constant, put it last. */
8921 if (tree_swap_operands_p (arg0, arg1, true))
8922 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8923
8924 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8925 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8926 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8927 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8928 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8929 && (TREE_CODE (arg1) == INTEGER_CST
8930 && !TREE_OVERFLOW (arg1)))
8931 {
8932 tree const1 = TREE_OPERAND (arg0, 1);
8933 tree const2 = arg1;
8934 tree variable = TREE_OPERAND (arg0, 0);
8935 tree lhs;
8936 int lhs_add;
8937 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8938
8939 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8940 TREE_TYPE (arg1), const2, const1);
8941
8942 /* If the constant operation overflowed this can be
8943 simplified as a comparison against INT_MAX/INT_MIN. */
8944 if (TREE_CODE (lhs) == INTEGER_CST
8945 && TREE_OVERFLOW (lhs))
8946 {
8947 int const1_sgn = tree_int_cst_sgn (const1);
8948 enum tree_code code2 = code;
8949
8950 /* Get the sign of the constant on the lhs if the
8951 operation were VARIABLE + CONST1. */
8952 if (TREE_CODE (arg0) == MINUS_EXPR)
8953 const1_sgn = -const1_sgn;
8954
8955 /* The sign of the constant determines if we overflowed
8956 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8957 Canonicalize to the INT_MIN overflow by swapping the comparison
8958 if necessary. */
8959 if (const1_sgn == -1)
8960 code2 = swap_tree_comparison (code);
8961
8962 /* We now can look at the canonicalized case
8963 VARIABLE + 1 CODE2 INT_MIN
8964 and decide on the result. */
8965 if (code2 == LT_EXPR
8966 || code2 == LE_EXPR
8967 || code2 == EQ_EXPR)
8968 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8969 else if (code2 == NE_EXPR
8970 || code2 == GE_EXPR
8971 || code2 == GT_EXPR)
8972 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8973 }
8974
8975 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8976 && (TREE_CODE (lhs) != INTEGER_CST
8977 || !TREE_OVERFLOW (lhs)))
8978 {
8979 if (code != EQ_EXPR && code != NE_EXPR)
8980 fold_overflow_warning ("assuming signed overflow does not occur "
8981 "when changing X +- C1 cmp C2 to "
8982 "X cmp C1 +- C2",
8983 WARN_STRICT_OVERFLOW_COMPARISON);
8984 return fold_build2_loc (loc, code, type, variable, lhs);
8985 }
8986 }
8987
8988 /* For comparisons of pointers we can decompose it to a compile time
8989 comparison of the base objects and the offsets into the object.
8990 This requires at least one operand being an ADDR_EXPR or a
8991 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8992 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8993 && (TREE_CODE (arg0) == ADDR_EXPR
8994 || TREE_CODE (arg1) == ADDR_EXPR
8995 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8996 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8997 {
8998 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8999 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9000 enum machine_mode mode;
9001 int volatilep, unsignedp;
9002 bool indirect_base0 = false, indirect_base1 = false;
9003
9004 /* Get base and offset for the access. Strip ADDR_EXPR for
9005 get_inner_reference, but put it back by stripping INDIRECT_REF
9006 off the base object if possible. indirect_baseN will be true
9007 if baseN is not an address but refers to the object itself. */
9008 base0 = arg0;
9009 if (TREE_CODE (arg0) == ADDR_EXPR)
9010 {
9011 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9012 &bitsize, &bitpos0, &offset0, &mode,
9013 &unsignedp, &volatilep, false);
9014 if (TREE_CODE (base0) == INDIRECT_REF)
9015 base0 = TREE_OPERAND (base0, 0);
9016 else
9017 indirect_base0 = true;
9018 }
9019 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9020 {
9021 base0 = TREE_OPERAND (arg0, 0);
9022 STRIP_SIGN_NOPS (base0);
9023 if (TREE_CODE (base0) == ADDR_EXPR)
9024 {
9025 base0 = TREE_OPERAND (base0, 0);
9026 indirect_base0 = true;
9027 }
9028 offset0 = TREE_OPERAND (arg0, 1);
9029 if (tree_fits_shwi_p (offset0))
9030 {
9031 HOST_WIDE_INT off = size_low_cst (offset0);
9032 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9033 * BITS_PER_UNIT)
9034 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9035 {
9036 bitpos0 = off * BITS_PER_UNIT;
9037 offset0 = NULL_TREE;
9038 }
9039 }
9040 }
9041
9042 base1 = arg1;
9043 if (TREE_CODE (arg1) == ADDR_EXPR)
9044 {
9045 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9046 &bitsize, &bitpos1, &offset1, &mode,
9047 &unsignedp, &volatilep, false);
9048 if (TREE_CODE (base1) == INDIRECT_REF)
9049 base1 = TREE_OPERAND (base1, 0);
9050 else
9051 indirect_base1 = true;
9052 }
9053 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9054 {
9055 base1 = TREE_OPERAND (arg1, 0);
9056 STRIP_SIGN_NOPS (base1);
9057 if (TREE_CODE (base1) == ADDR_EXPR)
9058 {
9059 base1 = TREE_OPERAND (base1, 0);
9060 indirect_base1 = true;
9061 }
9062 offset1 = TREE_OPERAND (arg1, 1);
9063 if (tree_fits_shwi_p (offset1))
9064 {
9065 HOST_WIDE_INT off = size_low_cst (offset1);
9066 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9067 * BITS_PER_UNIT)
9068 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9069 {
9070 bitpos1 = off * BITS_PER_UNIT;
9071 offset1 = NULL_TREE;
9072 }
9073 }
9074 }
9075
9076 /* A local variable can never be pointed to by
9077 the default SSA name of an incoming parameter. */
9078 if ((TREE_CODE (arg0) == ADDR_EXPR
9079 && indirect_base0
9080 && TREE_CODE (base0) == VAR_DECL
9081 && auto_var_in_fn_p (base0, current_function_decl)
9082 && !indirect_base1
9083 && TREE_CODE (base1) == SSA_NAME
9084 && SSA_NAME_IS_DEFAULT_DEF (base1)
9085 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9086 || (TREE_CODE (arg1) == ADDR_EXPR
9087 && indirect_base1
9088 && TREE_CODE (base1) == VAR_DECL
9089 && auto_var_in_fn_p (base1, current_function_decl)
9090 && !indirect_base0
9091 && TREE_CODE (base0) == SSA_NAME
9092 && SSA_NAME_IS_DEFAULT_DEF (base0)
9093 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9094 {
9095 if (code == NE_EXPR)
9096 return constant_boolean_node (1, type);
9097 else if (code == EQ_EXPR)
9098 return constant_boolean_node (0, type);
9099 }
9100 /* If we have equivalent bases we might be able to simplify. */
9101 else if (indirect_base0 == indirect_base1
9102 && operand_equal_p (base0, base1, 0))
9103 {
9104 /* We can fold this expression to a constant if the non-constant
9105 offset parts are equal. */
9106 if ((offset0 == offset1
9107 || (offset0 && offset1
9108 && operand_equal_p (offset0, offset1, 0)))
9109 && (code == EQ_EXPR
9110 || code == NE_EXPR
9111 || (indirect_base0 && DECL_P (base0))
9112 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9113
9114 {
9115 if (code != EQ_EXPR
9116 && code != NE_EXPR
9117 && bitpos0 != bitpos1
9118 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9119 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9120 fold_overflow_warning (("assuming pointer wraparound does not "
9121 "occur when comparing P +- C1 with "
9122 "P +- C2"),
9123 WARN_STRICT_OVERFLOW_CONDITIONAL);
9124
9125 switch (code)
9126 {
9127 case EQ_EXPR:
9128 return constant_boolean_node (bitpos0 == bitpos1, type);
9129 case NE_EXPR:
9130 return constant_boolean_node (bitpos0 != bitpos1, type);
9131 case LT_EXPR:
9132 return constant_boolean_node (bitpos0 < bitpos1, type);
9133 case LE_EXPR:
9134 return constant_boolean_node (bitpos0 <= bitpos1, type);
9135 case GE_EXPR:
9136 return constant_boolean_node (bitpos0 >= bitpos1, type);
9137 case GT_EXPR:
9138 return constant_boolean_node (bitpos0 > bitpos1, type);
9139 default:;
9140 }
9141 }
9142 /* We can simplify the comparison to a comparison of the variable
9143 offset parts if the constant offset parts are equal.
9144 Be careful to use signed sizetype here because otherwise we
9145 mess with array offsets in the wrong way. This is possible
9146 because pointer arithmetic is restricted to retain within an
9147 object and overflow on pointer differences is undefined as of
9148 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9149 else if (bitpos0 == bitpos1
9150 && ((code == EQ_EXPR || code == NE_EXPR)
9151 || (indirect_base0 && DECL_P (base0))
9152 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9153 {
9154 /* By converting to signed sizetype we cover middle-end pointer
9155 arithmetic which operates on unsigned pointer types of size
9156 type size and ARRAY_REF offsets which are properly sign or
9157 zero extended from their type in case it is narrower than
9158 sizetype. */
9159 if (offset0 == NULL_TREE)
9160 offset0 = build_int_cst (ssizetype, 0);
9161 else
9162 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9163 if (offset1 == NULL_TREE)
9164 offset1 = build_int_cst (ssizetype, 0);
9165 else
9166 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9167
9168 if (code != EQ_EXPR
9169 && code != NE_EXPR
9170 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9171 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9172 fold_overflow_warning (("assuming pointer wraparound does not "
9173 "occur when comparing P +- C1 with "
9174 "P +- C2"),
9175 WARN_STRICT_OVERFLOW_COMPARISON);
9176
9177 return fold_build2_loc (loc, code, type, offset0, offset1);
9178 }
9179 }
9180 /* For non-equal bases we can simplify if they are addresses
9181 of local binding decls or constants. */
9182 else if (indirect_base0 && indirect_base1
9183 /* We know that !operand_equal_p (base0, base1, 0)
9184 because the if condition was false. But make
9185 sure two decls are not the same. */
9186 && base0 != base1
9187 && TREE_CODE (arg0) == ADDR_EXPR
9188 && TREE_CODE (arg1) == ADDR_EXPR
9189 && (((TREE_CODE (base0) == VAR_DECL
9190 || TREE_CODE (base0) == PARM_DECL)
9191 && (targetm.binds_local_p (base0)
9192 || CONSTANT_CLASS_P (base1)))
9193 || CONSTANT_CLASS_P (base0))
9194 && (((TREE_CODE (base1) == VAR_DECL
9195 || TREE_CODE (base1) == PARM_DECL)
9196 && (targetm.binds_local_p (base1)
9197 || CONSTANT_CLASS_P (base0)))
9198 || CONSTANT_CLASS_P (base1)))
9199 {
9200 if (code == EQ_EXPR)
9201 return omit_two_operands_loc (loc, type, boolean_false_node,
9202 arg0, arg1);
9203 else if (code == NE_EXPR)
9204 return omit_two_operands_loc (loc, type, boolean_true_node,
9205 arg0, arg1);
9206 }
9207 /* For equal offsets we can simplify to a comparison of the
9208 base addresses. */
9209 else if (bitpos0 == bitpos1
9210 && (indirect_base0
9211 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9212 && (indirect_base1
9213 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9214 && ((offset0 == offset1)
9215 || (offset0 && offset1
9216 && operand_equal_p (offset0, offset1, 0))))
9217 {
9218 if (indirect_base0)
9219 base0 = build_fold_addr_expr_loc (loc, base0);
9220 if (indirect_base1)
9221 base1 = build_fold_addr_expr_loc (loc, base1);
9222 return fold_build2_loc (loc, code, type, base0, base1);
9223 }
9224 }
9225
9226 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9227 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9228 the resulting offset is smaller in absolute value than the
9229 original one. */
9230 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9231 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9232 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9233 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9234 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9235 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9236 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9237 {
9238 tree const1 = TREE_OPERAND (arg0, 1);
9239 tree const2 = TREE_OPERAND (arg1, 1);
9240 tree variable1 = TREE_OPERAND (arg0, 0);
9241 tree variable2 = TREE_OPERAND (arg1, 0);
9242 tree cst;
9243 const char * const warnmsg = G_("assuming signed overflow does not "
9244 "occur when combining constants around "
9245 "a comparison");
9246
9247 /* Put the constant on the side where it doesn't overflow and is
9248 of lower absolute value than before. */
9249 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9250 ? MINUS_EXPR : PLUS_EXPR,
9251 const2, const1);
9252 if (!TREE_OVERFLOW (cst)
9253 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9254 {
9255 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9256 return fold_build2_loc (loc, code, type,
9257 variable1,
9258 fold_build2_loc (loc,
9259 TREE_CODE (arg1), TREE_TYPE (arg1),
9260 variable2, cst));
9261 }
9262
9263 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9264 ? MINUS_EXPR : PLUS_EXPR,
9265 const1, const2);
9266 if (!TREE_OVERFLOW (cst)
9267 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9268 {
9269 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9270 return fold_build2_loc (loc, code, type,
9271 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9272 variable1, cst),
9273 variable2);
9274 }
9275 }
9276
9277 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9278 signed arithmetic case. That form is created by the compiler
9279 often enough for folding it to be of value. One example is in
9280 computing loop trip counts after Operator Strength Reduction. */
9281 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9282 && TREE_CODE (arg0) == MULT_EXPR
9283 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9284 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9285 && integer_zerop (arg1))
9286 {
9287 tree const1 = TREE_OPERAND (arg0, 1);
9288 tree const2 = arg1; /* zero */
9289 tree variable1 = TREE_OPERAND (arg0, 0);
9290 enum tree_code cmp_code = code;
9291
9292 /* Handle unfolded multiplication by zero. */
9293 if (integer_zerop (const1))
9294 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9295
9296 fold_overflow_warning (("assuming signed overflow does not occur when "
9297 "eliminating multiplication in comparison "
9298 "with zero"),
9299 WARN_STRICT_OVERFLOW_COMPARISON);
9300
9301 /* If const1 is negative we swap the sense of the comparison. */
9302 if (tree_int_cst_sgn (const1) < 0)
9303 cmp_code = swap_tree_comparison (cmp_code);
9304
9305 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9306 }
9307
9308 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9309 if (tem)
9310 return tem;
9311
9312 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9313 {
9314 tree targ0 = strip_float_extensions (arg0);
9315 tree targ1 = strip_float_extensions (arg1);
9316 tree newtype = TREE_TYPE (targ0);
9317
9318 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9319 newtype = TREE_TYPE (targ1);
9320
9321 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9322 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9323 return fold_build2_loc (loc, code, type,
9324 fold_convert_loc (loc, newtype, targ0),
9325 fold_convert_loc (loc, newtype, targ1));
9326
9327 /* (-a) CMP (-b) -> b CMP a */
9328 if (TREE_CODE (arg0) == NEGATE_EXPR
9329 && TREE_CODE (arg1) == NEGATE_EXPR)
9330 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9331 TREE_OPERAND (arg0, 0));
9332
9333 if (TREE_CODE (arg1) == REAL_CST)
9334 {
9335 REAL_VALUE_TYPE cst;
9336 cst = TREE_REAL_CST (arg1);
9337
9338 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9339 if (TREE_CODE (arg0) == NEGATE_EXPR)
9340 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9341 TREE_OPERAND (arg0, 0),
9342 build_real (TREE_TYPE (arg1),
9343 real_value_negate (&cst)));
9344
9345 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9346 /* a CMP (-0) -> a CMP 0 */
9347 if (REAL_VALUE_MINUS_ZERO (cst))
9348 return fold_build2_loc (loc, code, type, arg0,
9349 build_real (TREE_TYPE (arg1), dconst0));
9350
9351 /* x != NaN is always true, other ops are always false. */
9352 if (REAL_VALUE_ISNAN (cst)
9353 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9354 {
9355 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9356 return omit_one_operand_loc (loc, type, tem, arg0);
9357 }
9358
9359 /* Fold comparisons against infinity. */
9360 if (REAL_VALUE_ISINF (cst)
9361 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9362 {
9363 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9364 if (tem != NULL_TREE)
9365 return tem;
9366 }
9367 }
9368
9369 /* If this is a comparison of a real constant with a PLUS_EXPR
9370 or a MINUS_EXPR of a real constant, we can convert it into a
9371 comparison with a revised real constant as long as no overflow
9372 occurs when unsafe_math_optimizations are enabled. */
9373 if (flag_unsafe_math_optimizations
9374 && TREE_CODE (arg1) == REAL_CST
9375 && (TREE_CODE (arg0) == PLUS_EXPR
9376 || TREE_CODE (arg0) == MINUS_EXPR)
9377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9378 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9379 ? MINUS_EXPR : PLUS_EXPR,
9380 arg1, TREE_OPERAND (arg0, 1)))
9381 && !TREE_OVERFLOW (tem))
9382 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9383
9384 /* Likewise, we can simplify a comparison of a real constant with
9385 a MINUS_EXPR whose first operand is also a real constant, i.e.
9386 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9387 floating-point types only if -fassociative-math is set. */
9388 if (flag_associative_math
9389 && TREE_CODE (arg1) == REAL_CST
9390 && TREE_CODE (arg0) == MINUS_EXPR
9391 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9392 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9393 arg1))
9394 && !TREE_OVERFLOW (tem))
9395 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9396 TREE_OPERAND (arg0, 1), tem);
9397
9398 /* Fold comparisons against built-in math functions. */
9399 if (TREE_CODE (arg1) == REAL_CST
9400 && flag_unsafe_math_optimizations
9401 && ! flag_errno_math)
9402 {
9403 enum built_in_function fcode = builtin_mathfn_code (arg0);
9404
9405 if (fcode != END_BUILTINS)
9406 {
9407 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9408 if (tem != NULL_TREE)
9409 return tem;
9410 }
9411 }
9412 }
9413
9414 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9415 && CONVERT_EXPR_P (arg0))
9416 {
9417 /* If we are widening one operand of an integer comparison,
9418 see if the other operand is similarly being widened. Perhaps we
9419 can do the comparison in the narrower type. */
9420 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9421 if (tem)
9422 return tem;
9423
9424 /* Or if we are changing signedness. */
9425 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9426 if (tem)
9427 return tem;
9428 }
9429
9430 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9431 constant, we can simplify it. */
9432 if (TREE_CODE (arg1) == INTEGER_CST
9433 && (TREE_CODE (arg0) == MIN_EXPR
9434 || TREE_CODE (arg0) == MAX_EXPR)
9435 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9436 {
9437 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9438 if (tem)
9439 return tem;
9440 }
9441
9442 /* Simplify comparison of something with itself. (For IEEE
9443 floating-point, we can only do some of these simplifications.) */
9444 if (operand_equal_p (arg0, arg1, 0))
9445 {
9446 switch (code)
9447 {
9448 case EQ_EXPR:
9449 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9450 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9451 return constant_boolean_node (1, type);
9452 break;
9453
9454 case GE_EXPR:
9455 case LE_EXPR:
9456 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9457 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9458 return constant_boolean_node (1, type);
9459 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9460
9461 case NE_EXPR:
9462 /* For NE, we can only do this simplification if integer
9463 or we don't honor IEEE floating point NaNs. */
9464 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9465 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9466 break;
9467 /* ... fall through ... */
9468 case GT_EXPR:
9469 case LT_EXPR:
9470 return constant_boolean_node (0, type);
9471 default:
9472 gcc_unreachable ();
9473 }
9474 }
9475
9476 /* If we are comparing an expression that just has comparisons
9477 of two integer values, arithmetic expressions of those comparisons,
9478 and constants, we can simplify it. There are only three cases
9479 to check: the two values can either be equal, the first can be
9480 greater, or the second can be greater. Fold the expression for
9481 those three values. Since each value must be 0 or 1, we have
9482 eight possibilities, each of which corresponds to the constant 0
9483 or 1 or one of the six possible comparisons.
9484
9485 This handles common cases like (a > b) == 0 but also handles
9486 expressions like ((x > y) - (y > x)) > 0, which supposedly
9487 occur in macroized code. */
9488
9489 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9490 {
9491 tree cval1 = 0, cval2 = 0;
9492 int save_p = 0;
9493
9494 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9495 /* Don't handle degenerate cases here; they should already
9496 have been handled anyway. */
9497 && cval1 != 0 && cval2 != 0
9498 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9499 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9500 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9501 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9502 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9503 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9504 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9505 {
9506 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9507 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9508
9509 /* We can't just pass T to eval_subst in case cval1 or cval2
9510 was the same as ARG1. */
9511
9512 tree high_result
9513 = fold_build2_loc (loc, code, type,
9514 eval_subst (loc, arg0, cval1, maxval,
9515 cval2, minval),
9516 arg1);
9517 tree equal_result
9518 = fold_build2_loc (loc, code, type,
9519 eval_subst (loc, arg0, cval1, maxval,
9520 cval2, maxval),
9521 arg1);
9522 tree low_result
9523 = fold_build2_loc (loc, code, type,
9524 eval_subst (loc, arg0, cval1, minval,
9525 cval2, maxval),
9526 arg1);
9527
9528 /* All three of these results should be 0 or 1. Confirm they are.
9529 Then use those values to select the proper code to use. */
9530
9531 if (TREE_CODE (high_result) == INTEGER_CST
9532 && TREE_CODE (equal_result) == INTEGER_CST
9533 && TREE_CODE (low_result) == INTEGER_CST)
9534 {
9535 /* Make a 3-bit mask with the high-order bit being the
9536 value for `>', the next for '=', and the low for '<'. */
9537 switch ((integer_onep (high_result) * 4)
9538 + (integer_onep (equal_result) * 2)
9539 + integer_onep (low_result))
9540 {
9541 case 0:
9542 /* Always false. */
9543 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9544 case 1:
9545 code = LT_EXPR;
9546 break;
9547 case 2:
9548 code = EQ_EXPR;
9549 break;
9550 case 3:
9551 code = LE_EXPR;
9552 break;
9553 case 4:
9554 code = GT_EXPR;
9555 break;
9556 case 5:
9557 code = NE_EXPR;
9558 break;
9559 case 6:
9560 code = GE_EXPR;
9561 break;
9562 case 7:
9563 /* Always true. */
9564 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9565 }
9566
9567 if (save_p)
9568 {
9569 tem = save_expr (build2 (code, type, cval1, cval2));
9570 SET_EXPR_LOCATION (tem, loc);
9571 return tem;
9572 }
9573 return fold_build2_loc (loc, code, type, cval1, cval2);
9574 }
9575 }
9576 }
9577
9578 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9579 into a single range test. */
9580 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9581 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9582 && TREE_CODE (arg1) == INTEGER_CST
9583 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9584 && !integer_zerop (TREE_OPERAND (arg0, 1))
9585 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9586 && !TREE_OVERFLOW (arg1))
9587 {
9588 tem = fold_div_compare (loc, code, type, arg0, arg1);
9589 if (tem != NULL_TREE)
9590 return tem;
9591 }
9592
9593 /* Fold ~X op ~Y as Y op X. */
9594 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9595 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9596 {
9597 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9598 return fold_build2_loc (loc, code, type,
9599 fold_convert_loc (loc, cmp_type,
9600 TREE_OPERAND (arg1, 0)),
9601 TREE_OPERAND (arg0, 0));
9602 }
9603
9604 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9605 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9606 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9607 {
9608 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9609 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9610 TREE_OPERAND (arg0, 0),
9611 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9612 fold_convert_loc (loc, cmp_type, arg1)));
9613 }
9614
9615 return NULL_TREE;
9616 }
9617
9618
9619 /* Subroutine of fold_binary. Optimize complex multiplications of the
9620 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9621 argument EXPR represents the expression "z" of type TYPE. */
9622
9623 static tree
9624 fold_mult_zconjz (location_t loc, tree type, tree expr)
9625 {
9626 tree itype = TREE_TYPE (type);
9627 tree rpart, ipart, tem;
9628
9629 if (TREE_CODE (expr) == COMPLEX_EXPR)
9630 {
9631 rpart = TREE_OPERAND (expr, 0);
9632 ipart = TREE_OPERAND (expr, 1);
9633 }
9634 else if (TREE_CODE (expr) == COMPLEX_CST)
9635 {
9636 rpart = TREE_REALPART (expr);
9637 ipart = TREE_IMAGPART (expr);
9638 }
9639 else
9640 {
9641 expr = save_expr (expr);
9642 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9643 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9644 }
9645
9646 rpart = save_expr (rpart);
9647 ipart = save_expr (ipart);
9648 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9649 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9650 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9651 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9652 build_zero_cst (itype));
9653 }
9654
9655
9656 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9657 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9658 guarantees that P and N have the same least significant log2(M) bits.
9659 N is not otherwise constrained. In particular, N is not normalized to
9660 0 <= N < M as is common. In general, the precise value of P is unknown.
9661 M is chosen as large as possible such that constant N can be determined.
9662
9663 Returns M and sets *RESIDUE to N.
9664
9665 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9666 account. This is not always possible due to PR 35705.
9667 */
9668
9669 static unsigned HOST_WIDE_INT
9670 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9671 bool allow_func_align)
9672 {
9673 enum tree_code code;
9674
9675 *residue = 0;
9676
9677 code = TREE_CODE (expr);
9678 if (code == ADDR_EXPR)
9679 {
9680 unsigned int bitalign;
9681 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9682 *residue /= BITS_PER_UNIT;
9683 return bitalign / BITS_PER_UNIT;
9684 }
9685 else if (code == POINTER_PLUS_EXPR)
9686 {
9687 tree op0, op1;
9688 unsigned HOST_WIDE_INT modulus;
9689 enum tree_code inner_code;
9690
9691 op0 = TREE_OPERAND (expr, 0);
9692 STRIP_NOPS (op0);
9693 modulus = get_pointer_modulus_and_residue (op0, residue,
9694 allow_func_align);
9695
9696 op1 = TREE_OPERAND (expr, 1);
9697 STRIP_NOPS (op1);
9698 inner_code = TREE_CODE (op1);
9699 if (inner_code == INTEGER_CST)
9700 {
9701 *residue += TREE_INT_CST_LOW (op1);
9702 return modulus;
9703 }
9704 else if (inner_code == MULT_EXPR)
9705 {
9706 op1 = TREE_OPERAND (op1, 1);
9707 if (TREE_CODE (op1) == INTEGER_CST)
9708 {
9709 unsigned HOST_WIDE_INT align;
9710
9711 /* Compute the greatest power-of-2 divisor of op1. */
9712 align = TREE_INT_CST_LOW (op1);
9713 align &= -align;
9714
9715 /* If align is non-zero and less than *modulus, replace
9716 *modulus with align., If align is 0, then either op1 is 0
9717 or the greatest power-of-2 divisor of op1 doesn't fit in an
9718 unsigned HOST_WIDE_INT. In either case, no additional
9719 constraint is imposed. */
9720 if (align)
9721 modulus = MIN (modulus, align);
9722
9723 return modulus;
9724 }
9725 }
9726 }
9727
9728 /* If we get here, we were unable to determine anything useful about the
9729 expression. */
9730 return 1;
9731 }
9732
9733 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9734 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9735
9736 static bool
9737 vec_cst_ctor_to_array (tree arg, tree *elts)
9738 {
9739 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9740
9741 if (TREE_CODE (arg) == VECTOR_CST)
9742 {
9743 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9744 elts[i] = VECTOR_CST_ELT (arg, i);
9745 }
9746 else if (TREE_CODE (arg) == CONSTRUCTOR)
9747 {
9748 constructor_elt *elt;
9749
9750 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9751 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9752 return false;
9753 else
9754 elts[i] = elt->value;
9755 }
9756 else
9757 return false;
9758 for (; i < nelts; i++)
9759 elts[i]
9760 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9761 return true;
9762 }
9763
9764 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9765 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9766 NULL_TREE otherwise. */
9767
9768 static tree
9769 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9770 {
9771 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9772 tree *elts;
9773 bool need_ctor = false;
9774
9775 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9776 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9777 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9778 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9779 return NULL_TREE;
9780
9781 elts = XALLOCAVEC (tree, nelts * 3);
9782 if (!vec_cst_ctor_to_array (arg0, elts)
9783 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9784 return NULL_TREE;
9785
9786 for (i = 0; i < nelts; i++)
9787 {
9788 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9789 need_ctor = true;
9790 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9791 }
9792
9793 if (need_ctor)
9794 {
9795 vec<constructor_elt, va_gc> *v;
9796 vec_alloc (v, nelts);
9797 for (i = 0; i < nelts; i++)
9798 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9799 return build_constructor (type, v);
9800 }
9801 else
9802 return build_vector (type, &elts[2 * nelts]);
9803 }
9804
9805 /* Try to fold a pointer difference of type TYPE two address expressions of
9806 array references AREF0 and AREF1 using location LOC. Return a
9807 simplified expression for the difference or NULL_TREE. */
9808
9809 static tree
9810 fold_addr_of_array_ref_difference (location_t loc, tree type,
9811 tree aref0, tree aref1)
9812 {
9813 tree base0 = TREE_OPERAND (aref0, 0);
9814 tree base1 = TREE_OPERAND (aref1, 0);
9815 tree base_offset = build_int_cst (type, 0);
9816
9817 /* If the bases are array references as well, recurse. If the bases
9818 are pointer indirections compute the difference of the pointers.
9819 If the bases are equal, we are set. */
9820 if ((TREE_CODE (base0) == ARRAY_REF
9821 && TREE_CODE (base1) == ARRAY_REF
9822 && (base_offset
9823 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9824 || (INDIRECT_REF_P (base0)
9825 && INDIRECT_REF_P (base1)
9826 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9827 TREE_OPERAND (base0, 0),
9828 TREE_OPERAND (base1, 0))))
9829 || operand_equal_p (base0, base1, 0))
9830 {
9831 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9832 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9833 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9834 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9835 return fold_build2_loc (loc, PLUS_EXPR, type,
9836 base_offset,
9837 fold_build2_loc (loc, MULT_EXPR, type,
9838 diff, esz));
9839 }
9840 return NULL_TREE;
9841 }
9842
9843 /* If the real or vector real constant CST of type TYPE has an exact
9844 inverse, return it, else return NULL. */
9845
9846 static tree
9847 exact_inverse (tree type, tree cst)
9848 {
9849 REAL_VALUE_TYPE r;
9850 tree unit_type, *elts;
9851 enum machine_mode mode;
9852 unsigned vec_nelts, i;
9853
9854 switch (TREE_CODE (cst))
9855 {
9856 case REAL_CST:
9857 r = TREE_REAL_CST (cst);
9858
9859 if (exact_real_inverse (TYPE_MODE (type), &r))
9860 return build_real (type, r);
9861
9862 return NULL_TREE;
9863
9864 case VECTOR_CST:
9865 vec_nelts = VECTOR_CST_NELTS (cst);
9866 elts = XALLOCAVEC (tree, vec_nelts);
9867 unit_type = TREE_TYPE (type);
9868 mode = TYPE_MODE (unit_type);
9869
9870 for (i = 0; i < vec_nelts; i++)
9871 {
9872 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9873 if (!exact_real_inverse (mode, &r))
9874 return NULL_TREE;
9875 elts[i] = build_real (unit_type, r);
9876 }
9877
9878 return build_vector (type, elts);
9879
9880 default:
9881 return NULL_TREE;
9882 }
9883 }
9884
9885 /* Mask out the tz least significant bits of X of type TYPE where
9886 tz is the number of trailing zeroes in Y. */
9887 static wide_int
9888 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9889 {
9890 int tz = wi::ctz (y);
9891 if (tz > 0)
9892 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9893 return x;
9894 }
9895
9896 /* Return true when T is an address and is known to be nonzero.
9897 For floating point we further ensure that T is not denormal.
9898 Similar logic is present in nonzero_address in rtlanal.h.
9899
9900 If the return value is based on the assumption that signed overflow
9901 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9902 change *STRICT_OVERFLOW_P. */
9903
9904 static bool
9905 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9906 {
9907 tree type = TREE_TYPE (t);
9908 enum tree_code code;
9909
9910 /* Doing something useful for floating point would need more work. */
9911 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9912 return false;
9913
9914 code = TREE_CODE (t);
9915 switch (TREE_CODE_CLASS (code))
9916 {
9917 case tcc_unary:
9918 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9919 strict_overflow_p);
9920 case tcc_binary:
9921 case tcc_comparison:
9922 return tree_binary_nonzero_warnv_p (code, type,
9923 TREE_OPERAND (t, 0),
9924 TREE_OPERAND (t, 1),
9925 strict_overflow_p);
9926 case tcc_constant:
9927 case tcc_declaration:
9928 case tcc_reference:
9929 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9930
9931 default:
9932 break;
9933 }
9934
9935 switch (code)
9936 {
9937 case TRUTH_NOT_EXPR:
9938 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9939 strict_overflow_p);
9940
9941 case TRUTH_AND_EXPR:
9942 case TRUTH_OR_EXPR:
9943 case TRUTH_XOR_EXPR:
9944 return tree_binary_nonzero_warnv_p (code, type,
9945 TREE_OPERAND (t, 0),
9946 TREE_OPERAND (t, 1),
9947 strict_overflow_p);
9948
9949 case COND_EXPR:
9950 case CONSTRUCTOR:
9951 case OBJ_TYPE_REF:
9952 case ASSERT_EXPR:
9953 case ADDR_EXPR:
9954 case WITH_SIZE_EXPR:
9955 case SSA_NAME:
9956 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9957
9958 case COMPOUND_EXPR:
9959 case MODIFY_EXPR:
9960 case BIND_EXPR:
9961 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9962 strict_overflow_p);
9963
9964 case SAVE_EXPR:
9965 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9966 strict_overflow_p);
9967
9968 case CALL_EXPR:
9969 {
9970 tree fndecl = get_callee_fndecl (t);
9971 if (!fndecl) return false;
9972 if (flag_delete_null_pointer_checks && !flag_check_new
9973 && DECL_IS_OPERATOR_NEW (fndecl)
9974 && !TREE_NOTHROW (fndecl))
9975 return true;
9976 if (flag_delete_null_pointer_checks
9977 && lookup_attribute ("returns_nonnull",
9978 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9979 return true;
9980 return alloca_call_p (t);
9981 }
9982
9983 default:
9984 break;
9985 }
9986 return false;
9987 }
9988
9989 /* Return true when T is an address and is known to be nonzero.
9990 Handle warnings about undefined signed overflow. */
9991
9992 static bool
9993 tree_expr_nonzero_p (tree t)
9994 {
9995 bool ret, strict_overflow_p;
9996
9997 strict_overflow_p = false;
9998 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9999 if (strict_overflow_p)
10000 fold_overflow_warning (("assuming signed overflow does not occur when "
10001 "determining that expression is always "
10002 "non-zero"),
10003 WARN_STRICT_OVERFLOW_MISC);
10004 return ret;
10005 }
10006
10007 /* Fold a binary expression of code CODE and type TYPE with operands
10008 OP0 and OP1. LOC is the location of the resulting expression.
10009 Return the folded expression if folding is successful. Otherwise,
10010 return NULL_TREE. */
10011
10012 tree
10013 fold_binary_loc (location_t loc,
10014 enum tree_code code, tree type, tree op0, tree op1)
10015 {
10016 enum tree_code_class kind = TREE_CODE_CLASS (code);
10017 tree arg0, arg1, tem;
10018 tree t1 = NULL_TREE;
10019 bool strict_overflow_p;
10020 unsigned int prec;
10021
10022 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10023 && TREE_CODE_LENGTH (code) == 2
10024 && op0 != NULL_TREE
10025 && op1 != NULL_TREE);
10026
10027 arg0 = op0;
10028 arg1 = op1;
10029
10030 /* Strip any conversions that don't change the mode. This is
10031 safe for every expression, except for a comparison expression
10032 because its signedness is derived from its operands. So, in
10033 the latter case, only strip conversions that don't change the
10034 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10035 preserved.
10036
10037 Note that this is done as an internal manipulation within the
10038 constant folder, in order to find the simplest representation
10039 of the arguments so that their form can be studied. In any
10040 cases, the appropriate type conversions should be put back in
10041 the tree that will get out of the constant folder. */
10042
10043 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10044 {
10045 STRIP_SIGN_NOPS (arg0);
10046 STRIP_SIGN_NOPS (arg1);
10047 }
10048 else
10049 {
10050 STRIP_NOPS (arg0);
10051 STRIP_NOPS (arg1);
10052 }
10053
10054 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10055 constant but we can't do arithmetic on them. */
10056 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10057 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10058 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10059 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10060 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10061 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10062 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10063 {
10064 if (kind == tcc_binary)
10065 {
10066 /* Make sure type and arg0 have the same saturating flag. */
10067 gcc_assert (TYPE_SATURATING (type)
10068 == TYPE_SATURATING (TREE_TYPE (arg0)));
10069 tem = const_binop (code, arg0, arg1);
10070 }
10071 else if (kind == tcc_comparison)
10072 tem = fold_relational_const (code, type, arg0, arg1);
10073 else
10074 tem = NULL_TREE;
10075
10076 if (tem != NULL_TREE)
10077 {
10078 if (TREE_TYPE (tem) != type)
10079 tem = fold_convert_loc (loc, type, tem);
10080 return tem;
10081 }
10082 }
10083
10084 /* If this is a commutative operation, and ARG0 is a constant, move it
10085 to ARG1 to reduce the number of tests below. */
10086 if (commutative_tree_code (code)
10087 && tree_swap_operands_p (arg0, arg1, true))
10088 return fold_build2_loc (loc, code, type, op1, op0);
10089
10090 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10091
10092 First check for cases where an arithmetic operation is applied to a
10093 compound, conditional, or comparison operation. Push the arithmetic
10094 operation inside the compound or conditional to see if any folding
10095 can then be done. Convert comparison to conditional for this purpose.
10096 The also optimizes non-constant cases that used to be done in
10097 expand_expr.
10098
10099 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10100 one of the operands is a comparison and the other is a comparison, a
10101 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10102 code below would make the expression more complex. Change it to a
10103 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10104 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10105
10106 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10107 || code == EQ_EXPR || code == NE_EXPR)
10108 && TREE_CODE (type) != VECTOR_TYPE
10109 && ((truth_value_p (TREE_CODE (arg0))
10110 && (truth_value_p (TREE_CODE (arg1))
10111 || (TREE_CODE (arg1) == BIT_AND_EXPR
10112 && integer_onep (TREE_OPERAND (arg1, 1)))))
10113 || (truth_value_p (TREE_CODE (arg1))
10114 && (truth_value_p (TREE_CODE (arg0))
10115 || (TREE_CODE (arg0) == BIT_AND_EXPR
10116 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10117 {
10118 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10119 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10120 : TRUTH_XOR_EXPR,
10121 boolean_type_node,
10122 fold_convert_loc (loc, boolean_type_node, arg0),
10123 fold_convert_loc (loc, boolean_type_node, arg1));
10124
10125 if (code == EQ_EXPR)
10126 tem = invert_truthvalue_loc (loc, tem);
10127
10128 return fold_convert_loc (loc, type, tem);
10129 }
10130
10131 if (TREE_CODE_CLASS (code) == tcc_binary
10132 || TREE_CODE_CLASS (code) == tcc_comparison)
10133 {
10134 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10135 {
10136 tem = fold_build2_loc (loc, code, type,
10137 fold_convert_loc (loc, TREE_TYPE (op0),
10138 TREE_OPERAND (arg0, 1)), op1);
10139 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10140 tem);
10141 }
10142 if (TREE_CODE (arg1) == COMPOUND_EXPR
10143 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10144 {
10145 tem = fold_build2_loc (loc, code, type, op0,
10146 fold_convert_loc (loc, TREE_TYPE (op1),
10147 TREE_OPERAND (arg1, 1)));
10148 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10149 tem);
10150 }
10151
10152 if (TREE_CODE (arg0) == COND_EXPR
10153 || TREE_CODE (arg0) == VEC_COND_EXPR
10154 || COMPARISON_CLASS_P (arg0))
10155 {
10156 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10157 arg0, arg1,
10158 /*cond_first_p=*/1);
10159 if (tem != NULL_TREE)
10160 return tem;
10161 }
10162
10163 if (TREE_CODE (arg1) == COND_EXPR
10164 || TREE_CODE (arg1) == VEC_COND_EXPR
10165 || COMPARISON_CLASS_P (arg1))
10166 {
10167 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10168 arg1, arg0,
10169 /*cond_first_p=*/0);
10170 if (tem != NULL_TREE)
10171 return tem;
10172 }
10173 }
10174
10175 switch (code)
10176 {
10177 case MEM_REF:
10178 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10179 if (TREE_CODE (arg0) == ADDR_EXPR
10180 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10181 {
10182 tree iref = TREE_OPERAND (arg0, 0);
10183 return fold_build2 (MEM_REF, type,
10184 TREE_OPERAND (iref, 0),
10185 int_const_binop (PLUS_EXPR, arg1,
10186 TREE_OPERAND (iref, 1)));
10187 }
10188
10189 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10190 if (TREE_CODE (arg0) == ADDR_EXPR
10191 && handled_component_p (TREE_OPERAND (arg0, 0)))
10192 {
10193 tree base;
10194 HOST_WIDE_INT coffset;
10195 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10196 &coffset);
10197 if (!base)
10198 return NULL_TREE;
10199 return fold_build2 (MEM_REF, type,
10200 build_fold_addr_expr (base),
10201 int_const_binop (PLUS_EXPR, arg1,
10202 size_int (coffset)));
10203 }
10204
10205 return NULL_TREE;
10206
10207 case POINTER_PLUS_EXPR:
10208 /* 0 +p index -> (type)index */
10209 if (integer_zerop (arg0))
10210 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10211
10212 /* PTR +p 0 -> PTR */
10213 if (integer_zerop (arg1))
10214 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10215
10216 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10217 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10218 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10219 return fold_convert_loc (loc, type,
10220 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10221 fold_convert_loc (loc, sizetype,
10222 arg1),
10223 fold_convert_loc (loc, sizetype,
10224 arg0)));
10225
10226 /* (PTR +p B) +p A -> PTR +p (B + A) */
10227 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10228 {
10229 tree inner;
10230 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10231 tree arg00 = TREE_OPERAND (arg0, 0);
10232 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10233 arg01, fold_convert_loc (loc, sizetype, arg1));
10234 return fold_convert_loc (loc, type,
10235 fold_build_pointer_plus_loc (loc,
10236 arg00, inner));
10237 }
10238
10239 /* PTR_CST +p CST -> CST1 */
10240 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10241 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10242 fold_convert_loc (loc, type, arg1));
10243
10244 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10245 of the array. Loop optimizer sometimes produce this type of
10246 expressions. */
10247 if (TREE_CODE (arg0) == ADDR_EXPR)
10248 {
10249 tem = try_move_mult_to_index (loc, arg0,
10250 fold_convert_loc (loc,
10251 ssizetype, arg1));
10252 if (tem)
10253 return fold_convert_loc (loc, type, tem);
10254 }
10255
10256 return NULL_TREE;
10257
10258 case PLUS_EXPR:
10259 /* A + (-B) -> A - B */
10260 if (TREE_CODE (arg1) == NEGATE_EXPR
10261 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10262 return fold_build2_loc (loc, MINUS_EXPR, type,
10263 fold_convert_loc (loc, type, arg0),
10264 fold_convert_loc (loc, type,
10265 TREE_OPERAND (arg1, 0)));
10266 /* (-A) + B -> B - A */
10267 if (TREE_CODE (arg0) == NEGATE_EXPR
10268 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10269 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10270 return fold_build2_loc (loc, MINUS_EXPR, type,
10271 fold_convert_loc (loc, type, arg1),
10272 fold_convert_loc (loc, type,
10273 TREE_OPERAND (arg0, 0)));
10274
10275 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10276 {
10277 /* Convert ~A + 1 to -A. */
10278 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10279 && integer_onep (arg1))
10280 return fold_build1_loc (loc, NEGATE_EXPR, type,
10281 fold_convert_loc (loc, type,
10282 TREE_OPERAND (arg0, 0)));
10283
10284 /* ~X + X is -1. */
10285 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10286 && !TYPE_OVERFLOW_TRAPS (type))
10287 {
10288 tree tem = TREE_OPERAND (arg0, 0);
10289
10290 STRIP_NOPS (tem);
10291 if (operand_equal_p (tem, arg1, 0))
10292 {
10293 t1 = build_all_ones_cst (type);
10294 return omit_one_operand_loc (loc, type, t1, arg1);
10295 }
10296 }
10297
10298 /* X + ~X is -1. */
10299 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10300 && !TYPE_OVERFLOW_TRAPS (type))
10301 {
10302 tree tem = TREE_OPERAND (arg1, 0);
10303
10304 STRIP_NOPS (tem);
10305 if (operand_equal_p (arg0, tem, 0))
10306 {
10307 t1 = build_all_ones_cst (type);
10308 return omit_one_operand_loc (loc, type, t1, arg0);
10309 }
10310 }
10311
10312 /* X + (X / CST) * -CST is X % CST. */
10313 if (TREE_CODE (arg1) == MULT_EXPR
10314 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10315 && operand_equal_p (arg0,
10316 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10317 {
10318 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10319 tree cst1 = TREE_OPERAND (arg1, 1);
10320 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10321 cst1, cst0);
10322 if (sum && integer_zerop (sum))
10323 return fold_convert_loc (loc, type,
10324 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10325 TREE_TYPE (arg0), arg0,
10326 cst0));
10327 }
10328 }
10329
10330 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10331 one. Make sure the type is not saturating and has the signedness of
10332 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10333 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10334 if ((TREE_CODE (arg0) == MULT_EXPR
10335 || TREE_CODE (arg1) == MULT_EXPR)
10336 && !TYPE_SATURATING (type)
10337 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10338 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10339 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10340 {
10341 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10342 if (tem)
10343 return tem;
10344 }
10345
10346 if (! FLOAT_TYPE_P (type))
10347 {
10348 if (integer_zerop (arg1))
10349 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10350
10351 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10352 with a constant, and the two constants have no bits in common,
10353 we should treat this as a BIT_IOR_EXPR since this may produce more
10354 simplifications. */
10355 if (TREE_CODE (arg0) == BIT_AND_EXPR
10356 && TREE_CODE (arg1) == BIT_AND_EXPR
10357 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10358 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10359 && integer_zerop (const_binop (BIT_AND_EXPR,
10360 TREE_OPERAND (arg0, 1),
10361 TREE_OPERAND (arg1, 1))))
10362 {
10363 code = BIT_IOR_EXPR;
10364 goto bit_ior;
10365 }
10366
10367 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10368 (plus (plus (mult) (mult)) (foo)) so that we can
10369 take advantage of the factoring cases below. */
10370 if (TYPE_OVERFLOW_WRAPS (type)
10371 && (((TREE_CODE (arg0) == PLUS_EXPR
10372 || TREE_CODE (arg0) == MINUS_EXPR)
10373 && TREE_CODE (arg1) == MULT_EXPR)
10374 || ((TREE_CODE (arg1) == PLUS_EXPR
10375 || TREE_CODE (arg1) == MINUS_EXPR)
10376 && TREE_CODE (arg0) == MULT_EXPR)))
10377 {
10378 tree parg0, parg1, parg, marg;
10379 enum tree_code pcode;
10380
10381 if (TREE_CODE (arg1) == MULT_EXPR)
10382 parg = arg0, marg = arg1;
10383 else
10384 parg = arg1, marg = arg0;
10385 pcode = TREE_CODE (parg);
10386 parg0 = TREE_OPERAND (parg, 0);
10387 parg1 = TREE_OPERAND (parg, 1);
10388 STRIP_NOPS (parg0);
10389 STRIP_NOPS (parg1);
10390
10391 if (TREE_CODE (parg0) == MULT_EXPR
10392 && TREE_CODE (parg1) != MULT_EXPR)
10393 return fold_build2_loc (loc, pcode, type,
10394 fold_build2_loc (loc, PLUS_EXPR, type,
10395 fold_convert_loc (loc, type,
10396 parg0),
10397 fold_convert_loc (loc, type,
10398 marg)),
10399 fold_convert_loc (loc, type, parg1));
10400 if (TREE_CODE (parg0) != MULT_EXPR
10401 && TREE_CODE (parg1) == MULT_EXPR)
10402 return
10403 fold_build2_loc (loc, PLUS_EXPR, type,
10404 fold_convert_loc (loc, type, parg0),
10405 fold_build2_loc (loc, pcode, type,
10406 fold_convert_loc (loc, type, marg),
10407 fold_convert_loc (loc, type,
10408 parg1)));
10409 }
10410 }
10411 else
10412 {
10413 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10414 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10415 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10416
10417 /* Likewise if the operands are reversed. */
10418 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10419 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10420
10421 /* Convert X + -C into X - C. */
10422 if (TREE_CODE (arg1) == REAL_CST
10423 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10424 {
10425 tem = fold_negate_const (arg1, type);
10426 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10427 return fold_build2_loc (loc, MINUS_EXPR, type,
10428 fold_convert_loc (loc, type, arg0),
10429 fold_convert_loc (loc, type, tem));
10430 }
10431
10432 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10433 to __complex__ ( x, y ). This is not the same for SNaNs or
10434 if signed zeros are involved. */
10435 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10436 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10437 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10438 {
10439 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10440 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10441 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10442 bool arg0rz = false, arg0iz = false;
10443 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10444 || (arg0i && (arg0iz = real_zerop (arg0i))))
10445 {
10446 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10447 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10448 if (arg0rz && arg1i && real_zerop (arg1i))
10449 {
10450 tree rp = arg1r ? arg1r
10451 : build1 (REALPART_EXPR, rtype, arg1);
10452 tree ip = arg0i ? arg0i
10453 : build1 (IMAGPART_EXPR, rtype, arg0);
10454 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10455 }
10456 else if (arg0iz && arg1r && real_zerop (arg1r))
10457 {
10458 tree rp = arg0r ? arg0r
10459 : build1 (REALPART_EXPR, rtype, arg0);
10460 tree ip = arg1i ? arg1i
10461 : build1 (IMAGPART_EXPR, rtype, arg1);
10462 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10463 }
10464 }
10465 }
10466
10467 if (flag_unsafe_math_optimizations
10468 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10469 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10470 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10471 return tem;
10472
10473 /* Convert x+x into x*2.0. */
10474 if (operand_equal_p (arg0, arg1, 0)
10475 && SCALAR_FLOAT_TYPE_P (type))
10476 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10477 build_real (type, dconst2));
10478
10479 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10480 We associate floats only if the user has specified
10481 -fassociative-math. */
10482 if (flag_associative_math
10483 && TREE_CODE (arg1) == PLUS_EXPR
10484 && TREE_CODE (arg0) != MULT_EXPR)
10485 {
10486 tree tree10 = TREE_OPERAND (arg1, 0);
10487 tree tree11 = TREE_OPERAND (arg1, 1);
10488 if (TREE_CODE (tree11) == MULT_EXPR
10489 && TREE_CODE (tree10) == MULT_EXPR)
10490 {
10491 tree tree0;
10492 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10493 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10494 }
10495 }
10496 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10497 We associate floats only if the user has specified
10498 -fassociative-math. */
10499 if (flag_associative_math
10500 && TREE_CODE (arg0) == PLUS_EXPR
10501 && TREE_CODE (arg1) != MULT_EXPR)
10502 {
10503 tree tree00 = TREE_OPERAND (arg0, 0);
10504 tree tree01 = TREE_OPERAND (arg0, 1);
10505 if (TREE_CODE (tree01) == MULT_EXPR
10506 && TREE_CODE (tree00) == MULT_EXPR)
10507 {
10508 tree tree0;
10509 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10510 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10511 }
10512 }
10513 }
10514
10515 bit_rotate:
10516 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10517 is a rotate of A by C1 bits. */
10518 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10519 is a rotate of A by B bits. */
10520 {
10521 enum tree_code code0, code1;
10522 tree rtype;
10523 code0 = TREE_CODE (arg0);
10524 code1 = TREE_CODE (arg1);
10525 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10526 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10527 && operand_equal_p (TREE_OPERAND (arg0, 0),
10528 TREE_OPERAND (arg1, 0), 0)
10529 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10530 TYPE_UNSIGNED (rtype))
10531 /* Only create rotates in complete modes. Other cases are not
10532 expanded properly. */
10533 && (element_precision (rtype)
10534 == element_precision (TYPE_MODE (rtype))))
10535 {
10536 tree tree01, tree11;
10537 enum tree_code code01, code11;
10538
10539 tree01 = TREE_OPERAND (arg0, 1);
10540 tree11 = TREE_OPERAND (arg1, 1);
10541 STRIP_NOPS (tree01);
10542 STRIP_NOPS (tree11);
10543 code01 = TREE_CODE (tree01);
10544 code11 = TREE_CODE (tree11);
10545 if (code01 == INTEGER_CST
10546 && code11 == INTEGER_CST
10547 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10548 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10549 {
10550 tem = build2_loc (loc, LROTATE_EXPR,
10551 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10552 TREE_OPERAND (arg0, 0),
10553 code0 == LSHIFT_EXPR ? tree01 : tree11);
10554 return fold_convert_loc (loc, type, tem);
10555 }
10556 else if (code11 == MINUS_EXPR)
10557 {
10558 tree tree110, tree111;
10559 tree110 = TREE_OPERAND (tree11, 0);
10560 tree111 = TREE_OPERAND (tree11, 1);
10561 STRIP_NOPS (tree110);
10562 STRIP_NOPS (tree111);
10563 if (TREE_CODE (tree110) == INTEGER_CST
10564 && 0 == compare_tree_int (tree110,
10565 element_precision
10566 (TREE_TYPE (TREE_OPERAND
10567 (arg0, 0))))
10568 && operand_equal_p (tree01, tree111, 0))
10569 return
10570 fold_convert_loc (loc, type,
10571 build2 ((code0 == LSHIFT_EXPR
10572 ? LROTATE_EXPR
10573 : RROTATE_EXPR),
10574 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10575 TREE_OPERAND (arg0, 0), tree01));
10576 }
10577 else if (code01 == MINUS_EXPR)
10578 {
10579 tree tree010, tree011;
10580 tree010 = TREE_OPERAND (tree01, 0);
10581 tree011 = TREE_OPERAND (tree01, 1);
10582 STRIP_NOPS (tree010);
10583 STRIP_NOPS (tree011);
10584 if (TREE_CODE (tree010) == INTEGER_CST
10585 && 0 == compare_tree_int (tree010,
10586 element_precision
10587 (TREE_TYPE (TREE_OPERAND
10588 (arg0, 0))))
10589 && operand_equal_p (tree11, tree011, 0))
10590 return fold_convert_loc
10591 (loc, type,
10592 build2 ((code0 != LSHIFT_EXPR
10593 ? LROTATE_EXPR
10594 : RROTATE_EXPR),
10595 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10596 TREE_OPERAND (arg0, 0), tree11));
10597 }
10598 }
10599 }
10600
10601 associate:
10602 /* In most languages, can't associate operations on floats through
10603 parentheses. Rather than remember where the parentheses were, we
10604 don't associate floats at all, unless the user has specified
10605 -fassociative-math.
10606 And, we need to make sure type is not saturating. */
10607
10608 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10609 && !TYPE_SATURATING (type))
10610 {
10611 tree var0, con0, lit0, minus_lit0;
10612 tree var1, con1, lit1, minus_lit1;
10613 tree atype = type;
10614 bool ok = true;
10615
10616 /* Split both trees into variables, constants, and literals. Then
10617 associate each group together, the constants with literals,
10618 then the result with variables. This increases the chances of
10619 literals being recombined later and of generating relocatable
10620 expressions for the sum of a constant and literal. */
10621 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10622 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10623 code == MINUS_EXPR);
10624
10625 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10626 if (code == MINUS_EXPR)
10627 code = PLUS_EXPR;
10628
10629 /* With undefined overflow prefer doing association in a type
10630 which wraps on overflow, if that is one of the operand types. */
10631 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10632 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10633 {
10634 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10635 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10636 atype = TREE_TYPE (arg0);
10637 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10638 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10639 atype = TREE_TYPE (arg1);
10640 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10641 }
10642
10643 /* With undefined overflow we can only associate constants with one
10644 variable, and constants whose association doesn't overflow. */
10645 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10646 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10647 {
10648 if (var0 && var1)
10649 {
10650 tree tmp0 = var0;
10651 tree tmp1 = var1;
10652
10653 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10654 tmp0 = TREE_OPERAND (tmp0, 0);
10655 if (CONVERT_EXPR_P (tmp0)
10656 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10657 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10658 <= TYPE_PRECISION (atype)))
10659 tmp0 = TREE_OPERAND (tmp0, 0);
10660 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10661 tmp1 = TREE_OPERAND (tmp1, 0);
10662 if (CONVERT_EXPR_P (tmp1)
10663 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10664 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10665 <= TYPE_PRECISION (atype)))
10666 tmp1 = TREE_OPERAND (tmp1, 0);
10667 /* The only case we can still associate with two variables
10668 is if they are the same, modulo negation and bit-pattern
10669 preserving conversions. */
10670 if (!operand_equal_p (tmp0, tmp1, 0))
10671 ok = false;
10672 }
10673 }
10674
10675 /* Only do something if we found more than two objects. Otherwise,
10676 nothing has changed and we risk infinite recursion. */
10677 if (ok
10678 && (2 < ((var0 != 0) + (var1 != 0)
10679 + (con0 != 0) + (con1 != 0)
10680 + (lit0 != 0) + (lit1 != 0)
10681 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10682 {
10683 bool any_overflows = false;
10684 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10685 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10686 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10687 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10688 var0 = associate_trees (loc, var0, var1, code, atype);
10689 con0 = associate_trees (loc, con0, con1, code, atype);
10690 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10691 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10692 code, atype);
10693
10694 /* Preserve the MINUS_EXPR if the negative part of the literal is
10695 greater than the positive part. Otherwise, the multiplicative
10696 folding code (i.e extract_muldiv) may be fooled in case
10697 unsigned constants are subtracted, like in the following
10698 example: ((X*2 + 4) - 8U)/2. */
10699 if (minus_lit0 && lit0)
10700 {
10701 if (TREE_CODE (lit0) == INTEGER_CST
10702 && TREE_CODE (minus_lit0) == INTEGER_CST
10703 && tree_int_cst_lt (lit0, minus_lit0))
10704 {
10705 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10706 MINUS_EXPR, atype);
10707 lit0 = 0;
10708 }
10709 else
10710 {
10711 lit0 = associate_trees (loc, lit0, minus_lit0,
10712 MINUS_EXPR, atype);
10713 minus_lit0 = 0;
10714 }
10715 }
10716
10717 /* Don't introduce overflows through reassociation. */
10718 if (!any_overflows
10719 && ((lit0 && TREE_OVERFLOW (lit0))
10720 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10721 return NULL_TREE;
10722
10723 if (minus_lit0)
10724 {
10725 if (con0 == 0)
10726 return
10727 fold_convert_loc (loc, type,
10728 associate_trees (loc, var0, minus_lit0,
10729 MINUS_EXPR, atype));
10730 else
10731 {
10732 con0 = associate_trees (loc, con0, minus_lit0,
10733 MINUS_EXPR, atype);
10734 return
10735 fold_convert_loc (loc, type,
10736 associate_trees (loc, var0, con0,
10737 PLUS_EXPR, atype));
10738 }
10739 }
10740
10741 con0 = associate_trees (loc, con0, lit0, code, atype);
10742 return
10743 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10744 code, atype));
10745 }
10746 }
10747
10748 return NULL_TREE;
10749
10750 case MINUS_EXPR:
10751 /* Pointer simplifications for subtraction, simple reassociations. */
10752 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10753 {
10754 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10755 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10756 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10757 {
10758 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10759 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10760 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10761 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10762 return fold_build2_loc (loc, PLUS_EXPR, type,
10763 fold_build2_loc (loc, MINUS_EXPR, type,
10764 arg00, arg10),
10765 fold_build2_loc (loc, MINUS_EXPR, type,
10766 arg01, arg11));
10767 }
10768 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10769 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10770 {
10771 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10772 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10773 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10774 fold_convert_loc (loc, type, arg1));
10775 if (tmp)
10776 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10777 }
10778 }
10779 /* A - (-B) -> A + B */
10780 if (TREE_CODE (arg1) == NEGATE_EXPR)
10781 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10782 fold_convert_loc (loc, type,
10783 TREE_OPERAND (arg1, 0)));
10784 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10785 if (TREE_CODE (arg0) == NEGATE_EXPR
10786 && negate_expr_p (arg1)
10787 && reorder_operands_p (arg0, arg1))
10788 return fold_build2_loc (loc, MINUS_EXPR, type,
10789 fold_convert_loc (loc, type,
10790 negate_expr (arg1)),
10791 fold_convert_loc (loc, type,
10792 TREE_OPERAND (arg0, 0)));
10793 /* Convert -A - 1 to ~A. */
10794 if (TREE_CODE (type) != COMPLEX_TYPE
10795 && TREE_CODE (arg0) == NEGATE_EXPR
10796 && integer_onep (arg1)
10797 && !TYPE_OVERFLOW_TRAPS (type))
10798 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10799 fold_convert_loc (loc, type,
10800 TREE_OPERAND (arg0, 0)));
10801
10802 /* Convert -1 - A to ~A. */
10803 if (TREE_CODE (type) != COMPLEX_TYPE
10804 && integer_all_onesp (arg0))
10805 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10806
10807
10808 /* X - (X / Y) * Y is X % Y. */
10809 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10810 && TREE_CODE (arg1) == MULT_EXPR
10811 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10812 && operand_equal_p (arg0,
10813 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10814 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10815 TREE_OPERAND (arg1, 1), 0))
10816 return
10817 fold_convert_loc (loc, type,
10818 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10819 arg0, TREE_OPERAND (arg1, 1)));
10820
10821 if (! FLOAT_TYPE_P (type))
10822 {
10823 if (integer_zerop (arg0))
10824 return negate_expr (fold_convert_loc (loc, type, arg1));
10825 if (integer_zerop (arg1))
10826 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10827
10828 /* Fold A - (A & B) into ~B & A. */
10829 if (!TREE_SIDE_EFFECTS (arg0)
10830 && TREE_CODE (arg1) == BIT_AND_EXPR)
10831 {
10832 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10833 {
10834 tree arg10 = fold_convert_loc (loc, type,
10835 TREE_OPERAND (arg1, 0));
10836 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10837 fold_build1_loc (loc, BIT_NOT_EXPR,
10838 type, arg10),
10839 fold_convert_loc (loc, type, arg0));
10840 }
10841 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10842 {
10843 tree arg11 = fold_convert_loc (loc,
10844 type, TREE_OPERAND (arg1, 1));
10845 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10846 fold_build1_loc (loc, BIT_NOT_EXPR,
10847 type, arg11),
10848 fold_convert_loc (loc, type, arg0));
10849 }
10850 }
10851
10852 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10853 any power of 2 minus 1. */
10854 if (TREE_CODE (arg0) == BIT_AND_EXPR
10855 && TREE_CODE (arg1) == BIT_AND_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0, 0),
10857 TREE_OPERAND (arg1, 0), 0))
10858 {
10859 tree mask0 = TREE_OPERAND (arg0, 1);
10860 tree mask1 = TREE_OPERAND (arg1, 1);
10861 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10862
10863 if (operand_equal_p (tem, mask1, 0))
10864 {
10865 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10866 TREE_OPERAND (arg0, 0), mask1);
10867 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10868 }
10869 }
10870 }
10871
10872 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10873 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10874 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10875
10876 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10877 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10878 (-ARG1 + ARG0) reduces to -ARG1. */
10879 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10880 return negate_expr (fold_convert_loc (loc, type, arg1));
10881
10882 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10883 __complex__ ( x, -y ). This is not the same for SNaNs or if
10884 signed zeros are involved. */
10885 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10886 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10887 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10888 {
10889 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10890 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10891 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10892 bool arg0rz = false, arg0iz = false;
10893 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10894 || (arg0i && (arg0iz = real_zerop (arg0i))))
10895 {
10896 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10897 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10898 if (arg0rz && arg1i && real_zerop (arg1i))
10899 {
10900 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10901 arg1r ? arg1r
10902 : build1 (REALPART_EXPR, rtype, arg1));
10903 tree ip = arg0i ? arg0i
10904 : build1 (IMAGPART_EXPR, rtype, arg0);
10905 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10906 }
10907 else if (arg0iz && arg1r && real_zerop (arg1r))
10908 {
10909 tree rp = arg0r ? arg0r
10910 : build1 (REALPART_EXPR, rtype, arg0);
10911 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10912 arg1i ? arg1i
10913 : build1 (IMAGPART_EXPR, rtype, arg1));
10914 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10915 }
10916 }
10917 }
10918
10919 /* Fold &x - &x. This can happen from &x.foo - &x.
10920 This is unsafe for certain floats even in non-IEEE formats.
10921 In IEEE, it is unsafe because it does wrong for NaNs.
10922 Also note that operand_equal_p is always false if an operand
10923 is volatile. */
10924
10925 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10926 && operand_equal_p (arg0, arg1, 0))
10927 return build_zero_cst (type);
10928
10929 /* A - B -> A + (-B) if B is easily negatable. */
10930 if (negate_expr_p (arg1)
10931 && ((FLOAT_TYPE_P (type)
10932 /* Avoid this transformation if B is a positive REAL_CST. */
10933 && (TREE_CODE (arg1) != REAL_CST
10934 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10935 || INTEGRAL_TYPE_P (type)))
10936 return fold_build2_loc (loc, PLUS_EXPR, type,
10937 fold_convert_loc (loc, type, arg0),
10938 fold_convert_loc (loc, type,
10939 negate_expr (arg1)));
10940
10941 /* Try folding difference of addresses. */
10942 {
10943 HOST_WIDE_INT diff;
10944
10945 if ((TREE_CODE (arg0) == ADDR_EXPR
10946 || TREE_CODE (arg1) == ADDR_EXPR)
10947 && ptr_difference_const (arg0, arg1, &diff))
10948 return build_int_cst_type (type, diff);
10949 }
10950
10951 /* Fold &a[i] - &a[j] to i-j. */
10952 if (TREE_CODE (arg0) == ADDR_EXPR
10953 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10954 && TREE_CODE (arg1) == ADDR_EXPR
10955 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10956 {
10957 tree tem = fold_addr_of_array_ref_difference (loc, type,
10958 TREE_OPERAND (arg0, 0),
10959 TREE_OPERAND (arg1, 0));
10960 if (tem)
10961 return tem;
10962 }
10963
10964 if (FLOAT_TYPE_P (type)
10965 && flag_unsafe_math_optimizations
10966 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10967 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10968 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10969 return tem;
10970
10971 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10972 one. Make sure the type is not saturating and has the signedness of
10973 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10974 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10975 if ((TREE_CODE (arg0) == MULT_EXPR
10976 || TREE_CODE (arg1) == MULT_EXPR)
10977 && !TYPE_SATURATING (type)
10978 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10979 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10980 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10981 {
10982 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10983 if (tem)
10984 return tem;
10985 }
10986
10987 goto associate;
10988
10989 case MULT_EXPR:
10990 /* (-A) * (-B) -> A * B */
10991 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10992 return fold_build2_loc (loc, MULT_EXPR, type,
10993 fold_convert_loc (loc, type,
10994 TREE_OPERAND (arg0, 0)),
10995 fold_convert_loc (loc, type,
10996 negate_expr (arg1)));
10997 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10998 return fold_build2_loc (loc, MULT_EXPR, type,
10999 fold_convert_loc (loc, type,
11000 negate_expr (arg0)),
11001 fold_convert_loc (loc, type,
11002 TREE_OPERAND (arg1, 0)));
11003
11004 if (! FLOAT_TYPE_P (type))
11005 {
11006 if (integer_zerop (arg1))
11007 return omit_one_operand_loc (loc, type, arg1, arg0);
11008 if (integer_onep (arg1))
11009 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11010 /* Transform x * -1 into -x. Make sure to do the negation
11011 on the original operand with conversions not stripped
11012 because we can only strip non-sign-changing conversions. */
11013 if (integer_minus_onep (arg1))
11014 return fold_convert_loc (loc, type, negate_expr (op0));
11015 /* Transform x * -C into -x * C if x is easily negatable. */
11016 if (TREE_CODE (arg1) == INTEGER_CST
11017 && tree_int_cst_sgn (arg1) == -1
11018 && negate_expr_p (arg0)
11019 && (tem = negate_expr (arg1)) != arg1
11020 && !TREE_OVERFLOW (tem))
11021 return fold_build2_loc (loc, MULT_EXPR, type,
11022 fold_convert_loc (loc, type,
11023 negate_expr (arg0)),
11024 tem);
11025
11026 /* (a * (1 << b)) is (a << b) */
11027 if (TREE_CODE (arg1) == LSHIFT_EXPR
11028 && integer_onep (TREE_OPERAND (arg1, 0)))
11029 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11030 TREE_OPERAND (arg1, 1));
11031 if (TREE_CODE (arg0) == LSHIFT_EXPR
11032 && integer_onep (TREE_OPERAND (arg0, 0)))
11033 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11034 TREE_OPERAND (arg0, 1));
11035
11036 /* (A + A) * C -> A * 2 * C */
11037 if (TREE_CODE (arg0) == PLUS_EXPR
11038 && TREE_CODE (arg1) == INTEGER_CST
11039 && operand_equal_p (TREE_OPERAND (arg0, 0),
11040 TREE_OPERAND (arg0, 1), 0))
11041 return fold_build2_loc (loc, MULT_EXPR, type,
11042 omit_one_operand_loc (loc, type,
11043 TREE_OPERAND (arg0, 0),
11044 TREE_OPERAND (arg0, 1)),
11045 fold_build2_loc (loc, MULT_EXPR, type,
11046 build_int_cst (type, 2) , arg1));
11047
11048 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11049 sign-changing only. */
11050 if (TREE_CODE (arg1) == INTEGER_CST
11051 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11052 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11053 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11054
11055 strict_overflow_p = false;
11056 if (TREE_CODE (arg1) == INTEGER_CST
11057 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11058 &strict_overflow_p)))
11059 {
11060 if (strict_overflow_p)
11061 fold_overflow_warning (("assuming signed overflow does not "
11062 "occur when simplifying "
11063 "multiplication"),
11064 WARN_STRICT_OVERFLOW_MISC);
11065 return fold_convert_loc (loc, type, tem);
11066 }
11067
11068 /* Optimize z * conj(z) for integer complex numbers. */
11069 if (TREE_CODE (arg0) == CONJ_EXPR
11070 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11071 return fold_mult_zconjz (loc, type, arg1);
11072 if (TREE_CODE (arg1) == CONJ_EXPR
11073 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11074 return fold_mult_zconjz (loc, type, arg0);
11075 }
11076 else
11077 {
11078 /* Maybe fold x * 0 to 0. The expressions aren't the same
11079 when x is NaN, since x * 0 is also NaN. Nor are they the
11080 same in modes with signed zeros, since multiplying a
11081 negative value by 0 gives -0, not +0. */
11082 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11083 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11084 && real_zerop (arg1))
11085 return omit_one_operand_loc (loc, type, arg1, arg0);
11086 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11087 Likewise for complex arithmetic with signed zeros. */
11088 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11089 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11090 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11091 && real_onep (arg1))
11092 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11093
11094 /* Transform x * -1.0 into -x. */
11095 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11096 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11097 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11098 && real_minus_onep (arg1))
11099 return fold_convert_loc (loc, type, negate_expr (arg0));
11100
11101 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11102 the result for floating point types due to rounding so it is applied
11103 only if -fassociative-math was specify. */
11104 if (flag_associative_math
11105 && TREE_CODE (arg0) == RDIV_EXPR
11106 && TREE_CODE (arg1) == REAL_CST
11107 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11108 {
11109 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11110 arg1);
11111 if (tem)
11112 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11113 TREE_OPERAND (arg0, 1));
11114 }
11115
11116 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11117 if (operand_equal_p (arg0, arg1, 0))
11118 {
11119 tree tem = fold_strip_sign_ops (arg0);
11120 if (tem != NULL_TREE)
11121 {
11122 tem = fold_convert_loc (loc, type, tem);
11123 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11124 }
11125 }
11126
11127 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11128 This is not the same for NaNs or if signed zeros are
11129 involved. */
11130 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11131 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11132 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11133 && TREE_CODE (arg1) == COMPLEX_CST
11134 && real_zerop (TREE_REALPART (arg1)))
11135 {
11136 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11137 if (real_onep (TREE_IMAGPART (arg1)))
11138 return
11139 fold_build2_loc (loc, COMPLEX_EXPR, type,
11140 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11141 rtype, arg0)),
11142 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11143 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11144 return
11145 fold_build2_loc (loc, COMPLEX_EXPR, type,
11146 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11147 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11148 rtype, arg0)));
11149 }
11150
11151 /* Optimize z * conj(z) for floating point complex numbers.
11152 Guarded by flag_unsafe_math_optimizations as non-finite
11153 imaginary components don't produce scalar results. */
11154 if (flag_unsafe_math_optimizations
11155 && TREE_CODE (arg0) == CONJ_EXPR
11156 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11157 return fold_mult_zconjz (loc, type, arg1);
11158 if (flag_unsafe_math_optimizations
11159 && TREE_CODE (arg1) == CONJ_EXPR
11160 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11161 return fold_mult_zconjz (loc, type, arg0);
11162
11163 if (flag_unsafe_math_optimizations)
11164 {
11165 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11166 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11167
11168 /* Optimizations of root(...)*root(...). */
11169 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11170 {
11171 tree rootfn, arg;
11172 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11173 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11174
11175 /* Optimize sqrt(x)*sqrt(x) as x. */
11176 if (BUILTIN_SQRT_P (fcode0)
11177 && operand_equal_p (arg00, arg10, 0)
11178 && ! HONOR_SNANS (TYPE_MODE (type)))
11179 return arg00;
11180
11181 /* Optimize root(x)*root(y) as root(x*y). */
11182 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11183 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11184 return build_call_expr_loc (loc, rootfn, 1, arg);
11185 }
11186
11187 /* Optimize expN(x)*expN(y) as expN(x+y). */
11188 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11189 {
11190 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11191 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11192 CALL_EXPR_ARG (arg0, 0),
11193 CALL_EXPR_ARG (arg1, 0));
11194 return build_call_expr_loc (loc, expfn, 1, arg);
11195 }
11196
11197 /* Optimizations of pow(...)*pow(...). */
11198 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11199 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11200 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11201 {
11202 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11203 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11204 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11205 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11206
11207 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11208 if (operand_equal_p (arg01, arg11, 0))
11209 {
11210 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11211 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11212 arg00, arg10);
11213 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11214 }
11215
11216 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11217 if (operand_equal_p (arg00, arg10, 0))
11218 {
11219 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11220 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11221 arg01, arg11);
11222 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11223 }
11224 }
11225
11226 /* Optimize tan(x)*cos(x) as sin(x). */
11227 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11228 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11229 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11230 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11231 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11232 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11233 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11234 CALL_EXPR_ARG (arg1, 0), 0))
11235 {
11236 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11237
11238 if (sinfn != NULL_TREE)
11239 return build_call_expr_loc (loc, sinfn, 1,
11240 CALL_EXPR_ARG (arg0, 0));
11241 }
11242
11243 /* Optimize x*pow(x,c) as pow(x,c+1). */
11244 if (fcode1 == BUILT_IN_POW
11245 || fcode1 == BUILT_IN_POWF
11246 || fcode1 == BUILT_IN_POWL)
11247 {
11248 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11249 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11250 if (TREE_CODE (arg11) == REAL_CST
11251 && !TREE_OVERFLOW (arg11)
11252 && operand_equal_p (arg0, arg10, 0))
11253 {
11254 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11255 REAL_VALUE_TYPE c;
11256 tree arg;
11257
11258 c = TREE_REAL_CST (arg11);
11259 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11260 arg = build_real (type, c);
11261 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11262 }
11263 }
11264
11265 /* Optimize pow(x,c)*x as pow(x,c+1). */
11266 if (fcode0 == BUILT_IN_POW
11267 || fcode0 == BUILT_IN_POWF
11268 || fcode0 == BUILT_IN_POWL)
11269 {
11270 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11271 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11272 if (TREE_CODE (arg01) == REAL_CST
11273 && !TREE_OVERFLOW (arg01)
11274 && operand_equal_p (arg1, arg00, 0))
11275 {
11276 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11277 REAL_VALUE_TYPE c;
11278 tree arg;
11279
11280 c = TREE_REAL_CST (arg01);
11281 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11282 arg = build_real (type, c);
11283 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11284 }
11285 }
11286
11287 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11288 if (!in_gimple_form
11289 && optimize
11290 && operand_equal_p (arg0, arg1, 0))
11291 {
11292 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11293
11294 if (powfn)
11295 {
11296 tree arg = build_real (type, dconst2);
11297 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11298 }
11299 }
11300 }
11301 }
11302 goto associate;
11303
11304 case BIT_IOR_EXPR:
11305 bit_ior:
11306 if (integer_all_onesp (arg1))
11307 return omit_one_operand_loc (loc, type, arg1, arg0);
11308 if (integer_zerop (arg1))
11309 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11310 if (operand_equal_p (arg0, arg1, 0))
11311 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11312
11313 /* ~X | X is -1. */
11314 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11315 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11316 {
11317 t1 = build_zero_cst (type);
11318 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11319 return omit_one_operand_loc (loc, type, t1, arg1);
11320 }
11321
11322 /* X | ~X is -1. */
11323 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11324 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11325 {
11326 t1 = build_zero_cst (type);
11327 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11328 return omit_one_operand_loc (loc, type, t1, arg0);
11329 }
11330
11331 /* Canonicalize (X & C1) | C2. */
11332 if (TREE_CODE (arg0) == BIT_AND_EXPR
11333 && TREE_CODE (arg1) == INTEGER_CST
11334 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11335 {
11336 int width = TYPE_PRECISION (type), w;
11337 wide_int c1 = TREE_OPERAND (arg0, 1);
11338 wide_int c2 = arg1;
11339
11340 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11341 if ((c1 & c2) == c1)
11342 return omit_one_operand_loc (loc, type, arg1,
11343 TREE_OPERAND (arg0, 0));
11344
11345 wide_int msk = wi::mask (width, false,
11346 TYPE_PRECISION (TREE_TYPE (arg1)));
11347
11348 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11349 if (msk.and_not (c1 | c2) == 0)
11350 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11351 TREE_OPERAND (arg0, 0), arg1);
11352
11353 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11354 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11355 mode which allows further optimizations. */
11356 c1 &= msk;
11357 c2 &= msk;
11358 wide_int c3 = c1.and_not (c2);
11359 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11360 {
11361 wide_int mask = wi::mask (w, false,
11362 TYPE_PRECISION (type));
11363 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11364 {
11365 c3 = mask;
11366 break;
11367 }
11368 }
11369
11370 if (c3 != c1)
11371 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11372 fold_build2_loc (loc, BIT_AND_EXPR, type,
11373 TREE_OPERAND (arg0, 0),
11374 wide_int_to_tree (type,
11375 c3)),
11376 arg1);
11377 }
11378
11379 /* (X & Y) | Y is (X, Y). */
11380 if (TREE_CODE (arg0) == BIT_AND_EXPR
11381 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11382 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11383 /* (X & Y) | X is (Y, X). */
11384 if (TREE_CODE (arg0) == BIT_AND_EXPR
11385 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11386 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11387 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11388 /* X | (X & Y) is (Y, X). */
11389 if (TREE_CODE (arg1) == BIT_AND_EXPR
11390 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11391 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11392 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11393 /* X | (Y & X) is (Y, X). */
11394 if (TREE_CODE (arg1) == BIT_AND_EXPR
11395 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11396 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11397 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11398
11399 /* (X & ~Y) | (~X & Y) is X ^ Y */
11400 if (TREE_CODE (arg0) == BIT_AND_EXPR
11401 && TREE_CODE (arg1) == BIT_AND_EXPR)
11402 {
11403 tree a0, a1, l0, l1, n0, n1;
11404
11405 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11406 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11407
11408 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11409 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11410
11411 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11412 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11413
11414 if ((operand_equal_p (n0, a0, 0)
11415 && operand_equal_p (n1, a1, 0))
11416 || (operand_equal_p (n0, a1, 0)
11417 && operand_equal_p (n1, a0, 0)))
11418 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11419 }
11420
11421 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11422 if (t1 != NULL_TREE)
11423 return t1;
11424
11425 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11426
11427 This results in more efficient code for machines without a NAND
11428 instruction. Combine will canonicalize to the first form
11429 which will allow use of NAND instructions provided by the
11430 backend if they exist. */
11431 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11432 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11433 {
11434 return
11435 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11436 build2 (BIT_AND_EXPR, type,
11437 fold_convert_loc (loc, type,
11438 TREE_OPERAND (arg0, 0)),
11439 fold_convert_loc (loc, type,
11440 TREE_OPERAND (arg1, 0))));
11441 }
11442
11443 /* See if this can be simplified into a rotate first. If that
11444 is unsuccessful continue in the association code. */
11445 goto bit_rotate;
11446
11447 case BIT_XOR_EXPR:
11448 if (integer_zerop (arg1))
11449 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11450 if (integer_all_onesp (arg1))
11451 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11452 if (operand_equal_p (arg0, arg1, 0))
11453 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11454
11455 /* ~X ^ X is -1. */
11456 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11457 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11458 {
11459 t1 = build_zero_cst (type);
11460 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11461 return omit_one_operand_loc (loc, type, t1, arg1);
11462 }
11463
11464 /* X ^ ~X is -1. */
11465 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11466 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11467 {
11468 t1 = build_zero_cst (type);
11469 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11470 return omit_one_operand_loc (loc, type, t1, arg0);
11471 }
11472
11473 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11474 with a constant, and the two constants have no bits in common,
11475 we should treat this as a BIT_IOR_EXPR since this may produce more
11476 simplifications. */
11477 if (TREE_CODE (arg0) == BIT_AND_EXPR
11478 && TREE_CODE (arg1) == BIT_AND_EXPR
11479 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11480 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11481 && integer_zerop (const_binop (BIT_AND_EXPR,
11482 TREE_OPERAND (arg0, 1),
11483 TREE_OPERAND (arg1, 1))))
11484 {
11485 code = BIT_IOR_EXPR;
11486 goto bit_ior;
11487 }
11488
11489 /* (X | Y) ^ X -> Y & ~ X*/
11490 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11491 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11492 {
11493 tree t2 = TREE_OPERAND (arg0, 1);
11494 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11495 arg1);
11496 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11497 fold_convert_loc (loc, type, t2),
11498 fold_convert_loc (loc, type, t1));
11499 return t1;
11500 }
11501
11502 /* (Y | X) ^ X -> Y & ~ X*/
11503 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11504 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11505 {
11506 tree t2 = TREE_OPERAND (arg0, 0);
11507 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11508 arg1);
11509 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11510 fold_convert_loc (loc, type, t2),
11511 fold_convert_loc (loc, type, t1));
11512 return t1;
11513 }
11514
11515 /* X ^ (X | Y) -> Y & ~ X*/
11516 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11517 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11518 {
11519 tree t2 = TREE_OPERAND (arg1, 1);
11520 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11521 arg0);
11522 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11523 fold_convert_loc (loc, type, t2),
11524 fold_convert_loc (loc, type, t1));
11525 return t1;
11526 }
11527
11528 /* X ^ (Y | X) -> Y & ~ X*/
11529 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11530 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11531 {
11532 tree t2 = TREE_OPERAND (arg1, 0);
11533 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11534 arg0);
11535 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11536 fold_convert_loc (loc, type, t2),
11537 fold_convert_loc (loc, type, t1));
11538 return t1;
11539 }
11540
11541 /* Convert ~X ^ ~Y to X ^ Y. */
11542 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11543 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11544 return fold_build2_loc (loc, code, type,
11545 fold_convert_loc (loc, type,
11546 TREE_OPERAND (arg0, 0)),
11547 fold_convert_loc (loc, type,
11548 TREE_OPERAND (arg1, 0)));
11549
11550 /* Convert ~X ^ C to X ^ ~C. */
11551 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11552 && TREE_CODE (arg1) == INTEGER_CST)
11553 return fold_build2_loc (loc, code, type,
11554 fold_convert_loc (loc, type,
11555 TREE_OPERAND (arg0, 0)),
11556 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11557
11558 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11559 if (TREE_CODE (arg0) == BIT_AND_EXPR
11560 && integer_onep (TREE_OPERAND (arg0, 1))
11561 && integer_onep (arg1))
11562 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11563 build_zero_cst (TREE_TYPE (arg0)));
11564
11565 /* Fold (X & Y) ^ Y as ~X & Y. */
11566 if (TREE_CODE (arg0) == BIT_AND_EXPR
11567 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11568 {
11569 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11570 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11571 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11572 fold_convert_loc (loc, type, arg1));
11573 }
11574 /* Fold (X & Y) ^ X as ~Y & X. */
11575 if (TREE_CODE (arg0) == BIT_AND_EXPR
11576 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11577 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11578 {
11579 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11580 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11581 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11582 fold_convert_loc (loc, type, arg1));
11583 }
11584 /* Fold X ^ (X & Y) as X & ~Y. */
11585 if (TREE_CODE (arg1) == BIT_AND_EXPR
11586 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11587 {
11588 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11589 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11590 fold_convert_loc (loc, type, arg0),
11591 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11592 }
11593 /* Fold X ^ (Y & X) as ~Y & X. */
11594 if (TREE_CODE (arg1) == BIT_AND_EXPR
11595 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11596 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11597 {
11598 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11599 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11600 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11601 fold_convert_loc (loc, type, arg0));
11602 }
11603
11604 /* See if this can be simplified into a rotate first. If that
11605 is unsuccessful continue in the association code. */
11606 goto bit_rotate;
11607
11608 case BIT_AND_EXPR:
11609 if (integer_all_onesp (arg1))
11610 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11611 if (integer_zerop (arg1))
11612 return omit_one_operand_loc (loc, type, arg1, arg0);
11613 if (operand_equal_p (arg0, arg1, 0))
11614 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11615
11616 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11617 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11618 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11619 || (TREE_CODE (arg0) == EQ_EXPR
11620 && integer_zerop (TREE_OPERAND (arg0, 1))))
11621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11622 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11623
11624 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11625 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11626 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11627 || (TREE_CODE (arg1) == EQ_EXPR
11628 && integer_zerop (TREE_OPERAND (arg1, 1))))
11629 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11630 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11631
11632 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11633 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11634 && TREE_CODE (arg1) == INTEGER_CST
11635 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11636 {
11637 tree tmp1 = fold_convert_loc (loc, type, arg1);
11638 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11639 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11640 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11641 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11642 return
11643 fold_convert_loc (loc, type,
11644 fold_build2_loc (loc, BIT_IOR_EXPR,
11645 type, tmp2, tmp3));
11646 }
11647
11648 /* (X | Y) & Y is (X, Y). */
11649 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11650 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11651 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11652 /* (X | Y) & X is (Y, X). */
11653 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11654 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11655 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11656 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11657 /* X & (X | Y) is (Y, X). */
11658 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11659 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11660 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11661 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11662 /* X & (Y | X) is (Y, X). */
11663 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11664 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11665 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11666 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11667
11668 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11669 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11670 && integer_onep (TREE_OPERAND (arg0, 1))
11671 && integer_onep (arg1))
11672 {
11673 tree tem2;
11674 tem = TREE_OPERAND (arg0, 0);
11675 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11676 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11677 tem, tem2);
11678 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11679 build_zero_cst (TREE_TYPE (tem)));
11680 }
11681 /* Fold ~X & 1 as (X & 1) == 0. */
11682 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11683 && integer_onep (arg1))
11684 {
11685 tree tem2;
11686 tem = TREE_OPERAND (arg0, 0);
11687 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11688 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11689 tem, tem2);
11690 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11691 build_zero_cst (TREE_TYPE (tem)));
11692 }
11693 /* Fold !X & 1 as X == 0. */
11694 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11695 && integer_onep (arg1))
11696 {
11697 tem = TREE_OPERAND (arg0, 0);
11698 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11699 build_zero_cst (TREE_TYPE (tem)));
11700 }
11701
11702 /* Fold (X ^ Y) & Y as ~X & Y. */
11703 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11704 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11705 {
11706 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11707 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11708 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11709 fold_convert_loc (loc, type, arg1));
11710 }
11711 /* Fold (X ^ Y) & X as ~Y & X. */
11712 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11713 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11714 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11715 {
11716 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11717 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11718 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11719 fold_convert_loc (loc, type, arg1));
11720 }
11721 /* Fold X & (X ^ Y) as X & ~Y. */
11722 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11723 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11724 {
11725 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11726 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11727 fold_convert_loc (loc, type, arg0),
11728 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11729 }
11730 /* Fold X & (Y ^ X) as ~Y & X. */
11731 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11732 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11733 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11734 {
11735 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11736 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11737 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11738 fold_convert_loc (loc, type, arg0));
11739 }
11740
11741 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11742 multiple of 1 << CST. */
11743 if (TREE_CODE (arg1) == INTEGER_CST)
11744 {
11745 wide_int cst1 = arg1;
11746 wide_int ncst1 = -cst1;
11747 if ((cst1 & ncst1) == ncst1
11748 && multiple_of_p (type, arg0,
11749 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11750 return fold_convert_loc (loc, type, arg0);
11751 }
11752
11753 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11754 bits from CST2. */
11755 if (TREE_CODE (arg1) == INTEGER_CST
11756 && TREE_CODE (arg0) == MULT_EXPR
11757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11758 {
11759 wide_int warg1 = arg1;
11760 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11761
11762 if (masked == 0)
11763 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11764 arg0, arg1);
11765 else if (masked != warg1)
11766 {
11767 /* Avoid the transform if arg1 is a mask of some
11768 mode which allows further optimizations. */
11769 int pop = wi::popcount (warg1);
11770 if (!(pop >= BITS_PER_UNIT
11771 && exact_log2 (pop) != -1
11772 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11773 return fold_build2_loc (loc, code, type, op0,
11774 wide_int_to_tree (type, masked));
11775 }
11776 }
11777
11778 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11779 ((A & N) + B) & M -> (A + B) & M
11780 Similarly if (N & M) == 0,
11781 ((A | N) + B) & M -> (A + B) & M
11782 and for - instead of + (or unary - instead of +)
11783 and/or ^ instead of |.
11784 If B is constant and (B & M) == 0, fold into A & M. */
11785 if (TREE_CODE (arg1) == INTEGER_CST)
11786 {
11787 wide_int cst1 = arg1;
11788 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11789 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11790 && (TREE_CODE (arg0) == PLUS_EXPR
11791 || TREE_CODE (arg0) == MINUS_EXPR
11792 || TREE_CODE (arg0) == NEGATE_EXPR)
11793 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11794 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11795 {
11796 tree pmop[2];
11797 int which = 0;
11798 wide_int cst0;
11799
11800 /* Now we know that arg0 is (C + D) or (C - D) or
11801 -C and arg1 (M) is == (1LL << cst) - 1.
11802 Store C into PMOP[0] and D into PMOP[1]. */
11803 pmop[0] = TREE_OPERAND (arg0, 0);
11804 pmop[1] = NULL;
11805 if (TREE_CODE (arg0) != NEGATE_EXPR)
11806 {
11807 pmop[1] = TREE_OPERAND (arg0, 1);
11808 which = 1;
11809 }
11810
11811 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11812 which = -1;
11813
11814 for (; which >= 0; which--)
11815 switch (TREE_CODE (pmop[which]))
11816 {
11817 case BIT_AND_EXPR:
11818 case BIT_IOR_EXPR:
11819 case BIT_XOR_EXPR:
11820 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11821 != INTEGER_CST)
11822 break;
11823 cst0 = TREE_OPERAND (pmop[which], 1);
11824 cst0 &= cst1;
11825 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11826 {
11827 if (cst0 != cst1)
11828 break;
11829 }
11830 else if (cst0 != 0)
11831 break;
11832 /* If C or D is of the form (A & N) where
11833 (N & M) == M, or of the form (A | N) or
11834 (A ^ N) where (N & M) == 0, replace it with A. */
11835 pmop[which] = TREE_OPERAND (pmop[which], 0);
11836 break;
11837 case INTEGER_CST:
11838 /* If C or D is a N where (N & M) == 0, it can be
11839 omitted (assumed 0). */
11840 if ((TREE_CODE (arg0) == PLUS_EXPR
11841 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11842 && (cst1 & pmop[which]) == 0)
11843 pmop[which] = NULL;
11844 break;
11845 default:
11846 break;
11847 }
11848
11849 /* Only build anything new if we optimized one or both arguments
11850 above. */
11851 if (pmop[0] != TREE_OPERAND (arg0, 0)
11852 || (TREE_CODE (arg0) != NEGATE_EXPR
11853 && pmop[1] != TREE_OPERAND (arg0, 1)))
11854 {
11855 tree utype = TREE_TYPE (arg0);
11856 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11857 {
11858 /* Perform the operations in a type that has defined
11859 overflow behavior. */
11860 utype = unsigned_type_for (TREE_TYPE (arg0));
11861 if (pmop[0] != NULL)
11862 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11863 if (pmop[1] != NULL)
11864 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11865 }
11866
11867 if (TREE_CODE (arg0) == NEGATE_EXPR)
11868 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11869 else if (TREE_CODE (arg0) == PLUS_EXPR)
11870 {
11871 if (pmop[0] != NULL && pmop[1] != NULL)
11872 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11873 pmop[0], pmop[1]);
11874 else if (pmop[0] != NULL)
11875 tem = pmop[0];
11876 else if (pmop[1] != NULL)
11877 tem = pmop[1];
11878 else
11879 return build_int_cst (type, 0);
11880 }
11881 else if (pmop[0] == NULL)
11882 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11883 else
11884 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11885 pmop[0], pmop[1]);
11886 /* TEM is now the new binary +, - or unary - replacement. */
11887 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11888 fold_convert_loc (loc, utype, arg1));
11889 return fold_convert_loc (loc, type, tem);
11890 }
11891 }
11892 }
11893
11894 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11895 if (t1 != NULL_TREE)
11896 return t1;
11897 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11898 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11899 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11900 {
11901 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11902
11903 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11904 if (mask == -1)
11905 return
11906 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11907 }
11908
11909 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11910
11911 This results in more efficient code for machines without a NOR
11912 instruction. Combine will canonicalize to the first form
11913 which will allow use of NOR instructions provided by the
11914 backend if they exist. */
11915 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11916 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11917 {
11918 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11919 build2 (BIT_IOR_EXPR, type,
11920 fold_convert_loc (loc, type,
11921 TREE_OPERAND (arg0, 0)),
11922 fold_convert_loc (loc, type,
11923 TREE_OPERAND (arg1, 0))));
11924 }
11925
11926 /* If arg0 is derived from the address of an object or function, we may
11927 be able to fold this expression using the object or function's
11928 alignment. */
11929 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11930 {
11931 unsigned HOST_WIDE_INT modulus, residue;
11932 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11933
11934 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11935 integer_onep (arg1));
11936
11937 /* This works because modulus is a power of 2. If this weren't the
11938 case, we'd have to replace it by its greatest power-of-2
11939 divisor: modulus & -modulus. */
11940 if (low < modulus)
11941 return build_int_cst (type, residue & low);
11942 }
11943
11944 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11945 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11946 if the new mask might be further optimized. */
11947 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11948 || TREE_CODE (arg0) == RSHIFT_EXPR)
11949 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11950 && TREE_CODE (arg1) == INTEGER_CST
11951 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11952 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11953 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11954 < TYPE_PRECISION (TREE_TYPE (arg0))))
11955 {
11956 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11957 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11958 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11959 tree shift_type = TREE_TYPE (arg0);
11960
11961 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11962 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11963 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11964 && TYPE_PRECISION (TREE_TYPE (arg0))
11965 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11966 {
11967 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11968 tree arg00 = TREE_OPERAND (arg0, 0);
11969 /* See if more bits can be proven as zero because of
11970 zero extension. */
11971 if (TREE_CODE (arg00) == NOP_EXPR
11972 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11973 {
11974 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11975 if (TYPE_PRECISION (inner_type)
11976 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11977 && TYPE_PRECISION (inner_type) < prec)
11978 {
11979 prec = TYPE_PRECISION (inner_type);
11980 /* See if we can shorten the right shift. */
11981 if (shiftc < prec)
11982 shift_type = inner_type;
11983 }
11984 }
11985 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11986 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11987 zerobits <<= prec - shiftc;
11988 /* For arithmetic shift if sign bit could be set, zerobits
11989 can contain actually sign bits, so no transformation is
11990 possible, unless MASK masks them all away. In that
11991 case the shift needs to be converted into logical shift. */
11992 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11993 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11994 {
11995 if ((mask & zerobits) == 0)
11996 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11997 else
11998 zerobits = 0;
11999 }
12000 }
12001
12002 /* ((X << 16) & 0xff00) is (X, 0). */
12003 if ((mask & zerobits) == mask)
12004 return omit_one_operand_loc (loc, type,
12005 build_int_cst (type, 0), arg0);
12006
12007 newmask = mask | zerobits;
12008 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12009 {
12010 /* Only do the transformation if NEWMASK is some integer
12011 mode's mask. */
12012 for (prec = BITS_PER_UNIT;
12013 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12014 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12015 break;
12016 if (prec < HOST_BITS_PER_WIDE_INT
12017 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12018 {
12019 tree newmaskt;
12020
12021 if (shift_type != TREE_TYPE (arg0))
12022 {
12023 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12024 fold_convert_loc (loc, shift_type,
12025 TREE_OPERAND (arg0, 0)),
12026 TREE_OPERAND (arg0, 1));
12027 tem = fold_convert_loc (loc, type, tem);
12028 }
12029 else
12030 tem = op0;
12031 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12032 if (!tree_int_cst_equal (newmaskt, arg1))
12033 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12034 }
12035 }
12036 }
12037
12038 goto associate;
12039
12040 case RDIV_EXPR:
12041 /* Don't touch a floating-point divide by zero unless the mode
12042 of the constant can represent infinity. */
12043 if (TREE_CODE (arg1) == REAL_CST
12044 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12045 && real_zerop (arg1))
12046 return NULL_TREE;
12047
12048 /* Optimize A / A to 1.0 if we don't care about
12049 NaNs or Infinities. Skip the transformation
12050 for non-real operands. */
12051 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12052 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12053 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12054 && operand_equal_p (arg0, arg1, 0))
12055 {
12056 tree r = build_real (TREE_TYPE (arg0), dconst1);
12057
12058 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12059 }
12060
12061 /* The complex version of the above A / A optimization. */
12062 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12063 && operand_equal_p (arg0, arg1, 0))
12064 {
12065 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12066 if (! HONOR_NANS (TYPE_MODE (elem_type))
12067 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12068 {
12069 tree r = build_real (elem_type, dconst1);
12070 /* omit_two_operands will call fold_convert for us. */
12071 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12072 }
12073 }
12074
12075 /* (-A) / (-B) -> A / B */
12076 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12077 return fold_build2_loc (loc, RDIV_EXPR, type,
12078 TREE_OPERAND (arg0, 0),
12079 negate_expr (arg1));
12080 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12081 return fold_build2_loc (loc, RDIV_EXPR, type,
12082 negate_expr (arg0),
12083 TREE_OPERAND (arg1, 0));
12084
12085 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12086 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12087 && real_onep (arg1))
12088 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12089
12090 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12091 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12092 && real_minus_onep (arg1))
12093 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12094 negate_expr (arg0)));
12095
12096 /* If ARG1 is a constant, we can convert this to a multiply by the
12097 reciprocal. This does not have the same rounding properties,
12098 so only do this if -freciprocal-math. We can actually
12099 always safely do it if ARG1 is a power of two, but it's hard to
12100 tell if it is or not in a portable manner. */
12101 if (optimize
12102 && (TREE_CODE (arg1) == REAL_CST
12103 || (TREE_CODE (arg1) == COMPLEX_CST
12104 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12105 || (TREE_CODE (arg1) == VECTOR_CST
12106 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12107 {
12108 if (flag_reciprocal_math
12109 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12110 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12111 /* Find the reciprocal if optimizing and the result is exact.
12112 TODO: Complex reciprocal not implemented. */
12113 if (TREE_CODE (arg1) != COMPLEX_CST)
12114 {
12115 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12116
12117 if (inverse)
12118 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12119 }
12120 }
12121 /* Convert A/B/C to A/(B*C). */
12122 if (flag_reciprocal_math
12123 && TREE_CODE (arg0) == RDIV_EXPR)
12124 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12125 fold_build2_loc (loc, MULT_EXPR, type,
12126 TREE_OPERAND (arg0, 1), arg1));
12127
12128 /* Convert A/(B/C) to (A/B)*C. */
12129 if (flag_reciprocal_math
12130 && TREE_CODE (arg1) == RDIV_EXPR)
12131 return fold_build2_loc (loc, MULT_EXPR, type,
12132 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12133 TREE_OPERAND (arg1, 0)),
12134 TREE_OPERAND (arg1, 1));
12135
12136 /* Convert C1/(X*C2) into (C1/C2)/X. */
12137 if (flag_reciprocal_math
12138 && TREE_CODE (arg1) == MULT_EXPR
12139 && TREE_CODE (arg0) == REAL_CST
12140 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12141 {
12142 tree tem = const_binop (RDIV_EXPR, arg0,
12143 TREE_OPERAND (arg1, 1));
12144 if (tem)
12145 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12146 TREE_OPERAND (arg1, 0));
12147 }
12148
12149 if (flag_unsafe_math_optimizations)
12150 {
12151 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12152 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12153
12154 /* Optimize sin(x)/cos(x) as tan(x). */
12155 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12156 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12157 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12158 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12159 CALL_EXPR_ARG (arg1, 0), 0))
12160 {
12161 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12162
12163 if (tanfn != NULL_TREE)
12164 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12165 }
12166
12167 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12168 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12169 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12170 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12171 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12172 CALL_EXPR_ARG (arg1, 0), 0))
12173 {
12174 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12175
12176 if (tanfn != NULL_TREE)
12177 {
12178 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12179 CALL_EXPR_ARG (arg0, 0));
12180 return fold_build2_loc (loc, RDIV_EXPR, type,
12181 build_real (type, dconst1), tmp);
12182 }
12183 }
12184
12185 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12186 NaNs or Infinities. */
12187 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12188 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12189 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12190 {
12191 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12192 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12193
12194 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12195 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12196 && operand_equal_p (arg00, arg01, 0))
12197 {
12198 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12199
12200 if (cosfn != NULL_TREE)
12201 return build_call_expr_loc (loc, cosfn, 1, arg00);
12202 }
12203 }
12204
12205 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12206 NaNs or Infinities. */
12207 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12208 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12209 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12210 {
12211 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12212 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12213
12214 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12215 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12216 && operand_equal_p (arg00, arg01, 0))
12217 {
12218 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12219
12220 if (cosfn != NULL_TREE)
12221 {
12222 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12223 return fold_build2_loc (loc, RDIV_EXPR, type,
12224 build_real (type, dconst1),
12225 tmp);
12226 }
12227 }
12228 }
12229
12230 /* Optimize pow(x,c)/x as pow(x,c-1). */
12231 if (fcode0 == BUILT_IN_POW
12232 || fcode0 == BUILT_IN_POWF
12233 || fcode0 == BUILT_IN_POWL)
12234 {
12235 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12236 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12237 if (TREE_CODE (arg01) == REAL_CST
12238 && !TREE_OVERFLOW (arg01)
12239 && operand_equal_p (arg1, arg00, 0))
12240 {
12241 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12242 REAL_VALUE_TYPE c;
12243 tree arg;
12244
12245 c = TREE_REAL_CST (arg01);
12246 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12247 arg = build_real (type, c);
12248 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12249 }
12250 }
12251
12252 /* Optimize a/root(b/c) into a*root(c/b). */
12253 if (BUILTIN_ROOT_P (fcode1))
12254 {
12255 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12256
12257 if (TREE_CODE (rootarg) == RDIV_EXPR)
12258 {
12259 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12260 tree b = TREE_OPERAND (rootarg, 0);
12261 tree c = TREE_OPERAND (rootarg, 1);
12262
12263 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12264
12265 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12266 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12267 }
12268 }
12269
12270 /* Optimize x/expN(y) into x*expN(-y). */
12271 if (BUILTIN_EXPONENT_P (fcode1))
12272 {
12273 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12274 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12275 arg1 = build_call_expr_loc (loc,
12276 expfn, 1,
12277 fold_convert_loc (loc, type, arg));
12278 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12279 }
12280
12281 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12282 if (fcode1 == BUILT_IN_POW
12283 || fcode1 == BUILT_IN_POWF
12284 || fcode1 == BUILT_IN_POWL)
12285 {
12286 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12287 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12288 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12289 tree neg11 = fold_convert_loc (loc, type,
12290 negate_expr (arg11));
12291 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12292 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12293 }
12294 }
12295 return NULL_TREE;
12296
12297 case TRUNC_DIV_EXPR:
12298 /* Optimize (X & (-A)) / A where A is a power of 2,
12299 to X >> log2(A) */
12300 if (TREE_CODE (arg0) == BIT_AND_EXPR
12301 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12302 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12303 {
12304 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12305 arg1, TREE_OPERAND (arg0, 1));
12306 if (sum && integer_zerop (sum)) {
12307 tree pow2 = build_int_cst (integer_type_node,
12308 wi::exact_log2 (arg1));
12309 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12310 TREE_OPERAND (arg0, 0), pow2);
12311 }
12312 }
12313
12314 /* Fall through */
12315
12316 case FLOOR_DIV_EXPR:
12317 /* Simplify A / (B << N) where A and B are positive and B is
12318 a power of 2, to A >> (N + log2(B)). */
12319 strict_overflow_p = false;
12320 if (TREE_CODE (arg1) == LSHIFT_EXPR
12321 && (TYPE_UNSIGNED (type)
12322 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12323 {
12324 tree sval = TREE_OPERAND (arg1, 0);
12325 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12326 {
12327 tree sh_cnt = TREE_OPERAND (arg1, 1);
12328 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12329 wi::exact_log2 (sval));
12330
12331 if (strict_overflow_p)
12332 fold_overflow_warning (("assuming signed overflow does not "
12333 "occur when simplifying A / (B << N)"),
12334 WARN_STRICT_OVERFLOW_MISC);
12335
12336 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12337 sh_cnt, pow2);
12338 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12339 fold_convert_loc (loc, type, arg0), sh_cnt);
12340 }
12341 }
12342
12343 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12344 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12345 if (INTEGRAL_TYPE_P (type)
12346 && TYPE_UNSIGNED (type)
12347 && code == FLOOR_DIV_EXPR)
12348 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12349
12350 /* Fall through */
12351
12352 case ROUND_DIV_EXPR:
12353 case CEIL_DIV_EXPR:
12354 case EXACT_DIV_EXPR:
12355 if (integer_onep (arg1))
12356 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12357 if (integer_zerop (arg1))
12358 return NULL_TREE;
12359 /* X / -1 is -X. */
12360 if (!TYPE_UNSIGNED (type)
12361 && TREE_CODE (arg1) == INTEGER_CST
12362 && wi::eq_p (arg1, -1))
12363 return fold_convert_loc (loc, type, negate_expr (arg0));
12364
12365 /* Convert -A / -B to A / B when the type is signed and overflow is
12366 undefined. */
12367 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12368 && TREE_CODE (arg0) == NEGATE_EXPR
12369 && negate_expr_p (arg1))
12370 {
12371 if (INTEGRAL_TYPE_P (type))
12372 fold_overflow_warning (("assuming signed overflow does not occur "
12373 "when distributing negation across "
12374 "division"),
12375 WARN_STRICT_OVERFLOW_MISC);
12376 return fold_build2_loc (loc, code, type,
12377 fold_convert_loc (loc, type,
12378 TREE_OPERAND (arg0, 0)),
12379 fold_convert_loc (loc, type,
12380 negate_expr (arg1)));
12381 }
12382 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12383 && TREE_CODE (arg1) == NEGATE_EXPR
12384 && negate_expr_p (arg0))
12385 {
12386 if (INTEGRAL_TYPE_P (type))
12387 fold_overflow_warning (("assuming signed overflow does not occur "
12388 "when distributing negation across "
12389 "division"),
12390 WARN_STRICT_OVERFLOW_MISC);
12391 return fold_build2_loc (loc, code, type,
12392 fold_convert_loc (loc, type,
12393 negate_expr (arg0)),
12394 fold_convert_loc (loc, type,
12395 TREE_OPERAND (arg1, 0)));
12396 }
12397
12398 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12399 operation, EXACT_DIV_EXPR.
12400
12401 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12402 At one time others generated faster code, it's not clear if they do
12403 after the last round to changes to the DIV code in expmed.c. */
12404 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12405 && multiple_of_p (type, arg0, arg1))
12406 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12407
12408 strict_overflow_p = false;
12409 if (TREE_CODE (arg1) == INTEGER_CST
12410 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12411 &strict_overflow_p)))
12412 {
12413 if (strict_overflow_p)
12414 fold_overflow_warning (("assuming signed overflow does not occur "
12415 "when simplifying division"),
12416 WARN_STRICT_OVERFLOW_MISC);
12417 return fold_convert_loc (loc, type, tem);
12418 }
12419
12420 return NULL_TREE;
12421
12422 case CEIL_MOD_EXPR:
12423 case FLOOR_MOD_EXPR:
12424 case ROUND_MOD_EXPR:
12425 case TRUNC_MOD_EXPR:
12426 /* X % 1 is always zero, but be sure to preserve any side
12427 effects in X. */
12428 if (integer_onep (arg1))
12429 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12430
12431 /* X % 0, return X % 0 unchanged so that we can get the
12432 proper warnings and errors. */
12433 if (integer_zerop (arg1))
12434 return NULL_TREE;
12435
12436 /* 0 % X is always zero, but be sure to preserve any side
12437 effects in X. Place this after checking for X == 0. */
12438 if (integer_zerop (arg0))
12439 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12440
12441 /* X % -1 is zero. */
12442 if (!TYPE_UNSIGNED (type)
12443 && TREE_CODE (arg1) == INTEGER_CST
12444 && wi::eq_p (arg1, -1))
12445 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12446
12447 /* X % -C is the same as X % C. */
12448 if (code == TRUNC_MOD_EXPR
12449 && TYPE_SIGN (type) == SIGNED
12450 && TREE_CODE (arg1) == INTEGER_CST
12451 && !TREE_OVERFLOW (arg1)
12452 && wi::neg_p (arg1)
12453 && !TYPE_OVERFLOW_TRAPS (type)
12454 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12455 && !sign_bit_p (arg1, arg1))
12456 return fold_build2_loc (loc, code, type,
12457 fold_convert_loc (loc, type, arg0),
12458 fold_convert_loc (loc, type,
12459 negate_expr (arg1)));
12460
12461 /* X % -Y is the same as X % Y. */
12462 if (code == TRUNC_MOD_EXPR
12463 && !TYPE_UNSIGNED (type)
12464 && TREE_CODE (arg1) == NEGATE_EXPR
12465 && !TYPE_OVERFLOW_TRAPS (type))
12466 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12467 fold_convert_loc (loc, type,
12468 TREE_OPERAND (arg1, 0)));
12469
12470 strict_overflow_p = false;
12471 if (TREE_CODE (arg1) == INTEGER_CST
12472 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12473 &strict_overflow_p)))
12474 {
12475 if (strict_overflow_p)
12476 fold_overflow_warning (("assuming signed overflow does not occur "
12477 "when simplifying modulus"),
12478 WARN_STRICT_OVERFLOW_MISC);
12479 return fold_convert_loc (loc, type, tem);
12480 }
12481
12482 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12483 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12484 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12485 && (TYPE_UNSIGNED (type)
12486 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12487 {
12488 tree c = arg1;
12489 /* Also optimize A % (C << N) where C is a power of 2,
12490 to A & ((C << N) - 1). */
12491 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12492 c = TREE_OPERAND (arg1, 0);
12493
12494 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12495 {
12496 tree mask
12497 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12498 build_int_cst (TREE_TYPE (arg1), 1));
12499 if (strict_overflow_p)
12500 fold_overflow_warning (("assuming signed overflow does not "
12501 "occur when simplifying "
12502 "X % (power of two)"),
12503 WARN_STRICT_OVERFLOW_MISC);
12504 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12505 fold_convert_loc (loc, type, arg0),
12506 fold_convert_loc (loc, type, mask));
12507 }
12508 }
12509
12510 return NULL_TREE;
12511
12512 case LROTATE_EXPR:
12513 case RROTATE_EXPR:
12514 if (integer_all_onesp (arg0))
12515 return omit_one_operand_loc (loc, type, arg0, arg1);
12516 goto shift;
12517
12518 case RSHIFT_EXPR:
12519 /* Optimize -1 >> x for arithmetic right shifts. */
12520 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12521 && tree_expr_nonnegative_p (arg1))
12522 return omit_one_operand_loc (loc, type, arg0, arg1);
12523 /* ... fall through ... */
12524
12525 case LSHIFT_EXPR:
12526 shift:
12527 if (integer_zerop (arg1))
12528 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12529 if (integer_zerop (arg0))
12530 return omit_one_operand_loc (loc, type, arg0, arg1);
12531
12532 /* Prefer vector1 << scalar to vector1 << vector2
12533 if vector2 is uniform. */
12534 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12535 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12536 return fold_build2_loc (loc, code, type, op0, tem);
12537
12538 /* Since negative shift count is not well-defined,
12539 don't try to compute it in the compiler. */
12540 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12541 return NULL_TREE;
12542
12543 prec = element_precision (type);
12544
12545 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12546 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12547 && tree_to_uhwi (arg1) < prec
12548 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12549 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12550 {
12551 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12552 + tree_to_uhwi (arg1));
12553
12554 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12555 being well defined. */
12556 if (low >= prec)
12557 {
12558 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12559 low = low % prec;
12560 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12561 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12562 TREE_OPERAND (arg0, 0));
12563 else
12564 low = prec - 1;
12565 }
12566
12567 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12568 build_int_cst (TREE_TYPE (arg1), low));
12569 }
12570
12571 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12572 into x & ((unsigned)-1 >> c) for unsigned types. */
12573 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12574 || (TYPE_UNSIGNED (type)
12575 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12576 && tree_fits_uhwi_p (arg1)
12577 && tree_to_uhwi (arg1) < prec
12578 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12579 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12580 {
12581 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12582 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12583 tree lshift;
12584 tree arg00;
12585
12586 if (low0 == low1)
12587 {
12588 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12589
12590 lshift = build_minus_one_cst (type);
12591 lshift = const_binop (code, lshift, arg1);
12592
12593 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12594 }
12595 }
12596
12597 /* Rewrite an LROTATE_EXPR by a constant into an
12598 RROTATE_EXPR by a new constant. */
12599 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12600 {
12601 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12602 tem = const_binop (MINUS_EXPR, tem, arg1);
12603 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12604 }
12605
12606 /* If we have a rotate of a bit operation with the rotate count and
12607 the second operand of the bit operation both constant,
12608 permute the two operations. */
12609 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12610 && (TREE_CODE (arg0) == BIT_AND_EXPR
12611 || TREE_CODE (arg0) == BIT_IOR_EXPR
12612 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12613 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12614 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12615 fold_build2_loc (loc, code, type,
12616 TREE_OPERAND (arg0, 0), arg1),
12617 fold_build2_loc (loc, code, type,
12618 TREE_OPERAND (arg0, 1), arg1));
12619
12620 /* Two consecutive rotates adding up to the some integer
12621 multiple of the precision of the type can be ignored. */
12622 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12623 && TREE_CODE (arg0) == RROTATE_EXPR
12624 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12625 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12626 prec) == 0)
12627 return TREE_OPERAND (arg0, 0);
12628
12629 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12630 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12631 if the latter can be further optimized. */
12632 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12633 && TREE_CODE (arg0) == BIT_AND_EXPR
12634 && TREE_CODE (arg1) == INTEGER_CST
12635 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12636 {
12637 tree mask = fold_build2_loc (loc, code, type,
12638 fold_convert_loc (loc, type,
12639 TREE_OPERAND (arg0, 1)),
12640 arg1);
12641 tree shift = fold_build2_loc (loc, code, type,
12642 fold_convert_loc (loc, type,
12643 TREE_OPERAND (arg0, 0)),
12644 arg1);
12645 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12646 if (tem)
12647 return tem;
12648 }
12649
12650 return NULL_TREE;
12651
12652 case MIN_EXPR:
12653 if (operand_equal_p (arg0, arg1, 0))
12654 return omit_one_operand_loc (loc, type, arg0, arg1);
12655 if (INTEGRAL_TYPE_P (type)
12656 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12657 return omit_one_operand_loc (loc, type, arg1, arg0);
12658 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12659 if (tem)
12660 return tem;
12661 goto associate;
12662
12663 case MAX_EXPR:
12664 if (operand_equal_p (arg0, arg1, 0))
12665 return omit_one_operand_loc (loc, type, arg0, arg1);
12666 if (INTEGRAL_TYPE_P (type)
12667 && TYPE_MAX_VALUE (type)
12668 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12669 return omit_one_operand_loc (loc, type, arg1, arg0);
12670 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12671 if (tem)
12672 return tem;
12673 goto associate;
12674
12675 case TRUTH_ANDIF_EXPR:
12676 /* Note that the operands of this must be ints
12677 and their values must be 0 or 1.
12678 ("true" is a fixed value perhaps depending on the language.) */
12679 /* If first arg is constant zero, return it. */
12680 if (integer_zerop (arg0))
12681 return fold_convert_loc (loc, type, arg0);
12682 case TRUTH_AND_EXPR:
12683 /* If either arg is constant true, drop it. */
12684 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12685 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12686 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12687 /* Preserve sequence points. */
12688 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12689 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12690 /* If second arg is constant zero, result is zero, but first arg
12691 must be evaluated. */
12692 if (integer_zerop (arg1))
12693 return omit_one_operand_loc (loc, type, arg1, arg0);
12694 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12695 case will be handled here. */
12696 if (integer_zerop (arg0))
12697 return omit_one_operand_loc (loc, type, arg0, arg1);
12698
12699 /* !X && X is always false. */
12700 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12701 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12702 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12703 /* X && !X is always false. */
12704 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12705 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12706 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12707
12708 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12709 means A >= Y && A != MAX, but in this case we know that
12710 A < X <= MAX. */
12711
12712 if (!TREE_SIDE_EFFECTS (arg0)
12713 && !TREE_SIDE_EFFECTS (arg1))
12714 {
12715 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12716 if (tem && !operand_equal_p (tem, arg0, 0))
12717 return fold_build2_loc (loc, code, type, tem, arg1);
12718
12719 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12720 if (tem && !operand_equal_p (tem, arg1, 0))
12721 return fold_build2_loc (loc, code, type, arg0, tem);
12722 }
12723
12724 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12725 != NULL_TREE)
12726 return tem;
12727
12728 return NULL_TREE;
12729
12730 case TRUTH_ORIF_EXPR:
12731 /* Note that the operands of this must be ints
12732 and their values must be 0 or true.
12733 ("true" is a fixed value perhaps depending on the language.) */
12734 /* If first arg is constant true, return it. */
12735 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12736 return fold_convert_loc (loc, type, arg0);
12737 case TRUTH_OR_EXPR:
12738 /* If either arg is constant zero, drop it. */
12739 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12740 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12741 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12742 /* Preserve sequence points. */
12743 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12744 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12745 /* If second arg is constant true, result is true, but we must
12746 evaluate first arg. */
12747 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12748 return omit_one_operand_loc (loc, type, arg1, arg0);
12749 /* Likewise for first arg, but note this only occurs here for
12750 TRUTH_OR_EXPR. */
12751 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12752 return omit_one_operand_loc (loc, type, arg0, arg1);
12753
12754 /* !X || X is always true. */
12755 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12756 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12757 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12758 /* X || !X is always true. */
12759 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12760 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12761 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12762
12763 /* (X && !Y) || (!X && Y) is X ^ Y */
12764 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12765 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12766 {
12767 tree a0, a1, l0, l1, n0, n1;
12768
12769 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12770 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12771
12772 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12773 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12774
12775 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12776 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12777
12778 if ((operand_equal_p (n0, a0, 0)
12779 && operand_equal_p (n1, a1, 0))
12780 || (operand_equal_p (n0, a1, 0)
12781 && operand_equal_p (n1, a0, 0)))
12782 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12783 }
12784
12785 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12786 != NULL_TREE)
12787 return tem;
12788
12789 return NULL_TREE;
12790
12791 case TRUTH_XOR_EXPR:
12792 /* If the second arg is constant zero, drop it. */
12793 if (integer_zerop (arg1))
12794 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12795 /* If the second arg is constant true, this is a logical inversion. */
12796 if (integer_onep (arg1))
12797 {
12798 tem = invert_truthvalue_loc (loc, arg0);
12799 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12800 }
12801 /* Identical arguments cancel to zero. */
12802 if (operand_equal_p (arg0, arg1, 0))
12803 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12804
12805 /* !X ^ X is always true. */
12806 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12807 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12808 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12809
12810 /* X ^ !X is always true. */
12811 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12812 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12813 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12814
12815 return NULL_TREE;
12816
12817 case EQ_EXPR:
12818 case NE_EXPR:
12819 STRIP_NOPS (arg0);
12820 STRIP_NOPS (arg1);
12821
12822 tem = fold_comparison (loc, code, type, op0, op1);
12823 if (tem != NULL_TREE)
12824 return tem;
12825
12826 /* bool_var != 0 becomes bool_var. */
12827 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12828 && code == NE_EXPR)
12829 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12830
12831 /* bool_var == 1 becomes bool_var. */
12832 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12833 && code == EQ_EXPR)
12834 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12835
12836 /* bool_var != 1 becomes !bool_var. */
12837 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12838 && code == NE_EXPR)
12839 return fold_convert_loc (loc, type,
12840 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12841 TREE_TYPE (arg0), arg0));
12842
12843 /* bool_var == 0 becomes !bool_var. */
12844 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12845 && code == EQ_EXPR)
12846 return fold_convert_loc (loc, type,
12847 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12848 TREE_TYPE (arg0), arg0));
12849
12850 /* !exp != 0 becomes !exp */
12851 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12852 && code == NE_EXPR)
12853 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12854
12855 /* If this is an equality comparison of the address of two non-weak,
12856 unaliased symbols neither of which are extern (since we do not
12857 have access to attributes for externs), then we know the result. */
12858 if (TREE_CODE (arg0) == ADDR_EXPR
12859 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12860 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12861 && ! lookup_attribute ("alias",
12862 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12863 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12864 && TREE_CODE (arg1) == ADDR_EXPR
12865 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12866 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12867 && ! lookup_attribute ("alias",
12868 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12869 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12870 {
12871 /* We know that we're looking at the address of two
12872 non-weak, unaliased, static _DECL nodes.
12873
12874 It is both wasteful and incorrect to call operand_equal_p
12875 to compare the two ADDR_EXPR nodes. It is wasteful in that
12876 all we need to do is test pointer equality for the arguments
12877 to the two ADDR_EXPR nodes. It is incorrect to use
12878 operand_equal_p as that function is NOT equivalent to a
12879 C equality test. It can in fact return false for two
12880 objects which would test as equal using the C equality
12881 operator. */
12882 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12883 return constant_boolean_node (equal
12884 ? code == EQ_EXPR : code != EQ_EXPR,
12885 type);
12886 }
12887
12888 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12889 a MINUS_EXPR of a constant, we can convert it into a comparison with
12890 a revised constant as long as no overflow occurs. */
12891 if (TREE_CODE (arg1) == INTEGER_CST
12892 && (TREE_CODE (arg0) == PLUS_EXPR
12893 || TREE_CODE (arg0) == MINUS_EXPR)
12894 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12895 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12896 ? MINUS_EXPR : PLUS_EXPR,
12897 fold_convert_loc (loc, TREE_TYPE (arg0),
12898 arg1),
12899 TREE_OPERAND (arg0, 1)))
12900 && !TREE_OVERFLOW (tem))
12901 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12902
12903 /* Similarly for a NEGATE_EXPR. */
12904 if (TREE_CODE (arg0) == NEGATE_EXPR
12905 && TREE_CODE (arg1) == INTEGER_CST
12906 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12907 arg1)))
12908 && TREE_CODE (tem) == INTEGER_CST
12909 && !TREE_OVERFLOW (tem))
12910 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12911
12912 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12913 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12914 && TREE_CODE (arg1) == INTEGER_CST
12915 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12916 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12917 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12918 fold_convert_loc (loc,
12919 TREE_TYPE (arg0),
12920 arg1),
12921 TREE_OPERAND (arg0, 1)));
12922
12923 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12924 if ((TREE_CODE (arg0) == PLUS_EXPR
12925 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12926 || TREE_CODE (arg0) == MINUS_EXPR)
12927 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12928 0)),
12929 arg1, 0)
12930 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12931 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12932 {
12933 tree val = TREE_OPERAND (arg0, 1);
12934 return omit_two_operands_loc (loc, type,
12935 fold_build2_loc (loc, code, type,
12936 val,
12937 build_int_cst (TREE_TYPE (val),
12938 0)),
12939 TREE_OPERAND (arg0, 0), arg1);
12940 }
12941
12942 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12943 if (TREE_CODE (arg0) == MINUS_EXPR
12944 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12945 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12946 1)),
12947 arg1, 0)
12948 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12949 {
12950 return omit_two_operands_loc (loc, type,
12951 code == NE_EXPR
12952 ? boolean_true_node : boolean_false_node,
12953 TREE_OPERAND (arg0, 1), arg1);
12954 }
12955
12956 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12957 for !=. Don't do this for ordered comparisons due to overflow. */
12958 if (TREE_CODE (arg0) == MINUS_EXPR
12959 && integer_zerop (arg1))
12960 return fold_build2_loc (loc, code, type,
12961 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12962
12963 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12964 if (TREE_CODE (arg0) == ABS_EXPR
12965 && (integer_zerop (arg1) || real_zerop (arg1)))
12966 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12967
12968 /* If this is an EQ or NE comparison with zero and ARG0 is
12969 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12970 two operations, but the latter can be done in one less insn
12971 on machines that have only two-operand insns or on which a
12972 constant cannot be the first operand. */
12973 if (TREE_CODE (arg0) == BIT_AND_EXPR
12974 && integer_zerop (arg1))
12975 {
12976 tree arg00 = TREE_OPERAND (arg0, 0);
12977 tree arg01 = TREE_OPERAND (arg0, 1);
12978 if (TREE_CODE (arg00) == LSHIFT_EXPR
12979 && integer_onep (TREE_OPERAND (arg00, 0)))
12980 {
12981 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12982 arg01, TREE_OPERAND (arg00, 1));
12983 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12984 build_int_cst (TREE_TYPE (arg0), 1));
12985 return fold_build2_loc (loc, code, type,
12986 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12987 arg1);
12988 }
12989 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12990 && integer_onep (TREE_OPERAND (arg01, 0)))
12991 {
12992 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12993 arg00, TREE_OPERAND (arg01, 1));
12994 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12995 build_int_cst (TREE_TYPE (arg0), 1));
12996 return fold_build2_loc (loc, code, type,
12997 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12998 arg1);
12999 }
13000 }
13001
13002 /* If this is an NE or EQ comparison of zero against the result of a
13003 signed MOD operation whose second operand is a power of 2, make
13004 the MOD operation unsigned since it is simpler and equivalent. */
13005 if (integer_zerop (arg1)
13006 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13007 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13008 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13009 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13010 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13011 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13012 {
13013 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13014 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13015 fold_convert_loc (loc, newtype,
13016 TREE_OPERAND (arg0, 0)),
13017 fold_convert_loc (loc, newtype,
13018 TREE_OPERAND (arg0, 1)));
13019
13020 return fold_build2_loc (loc, code, type, newmod,
13021 fold_convert_loc (loc, newtype, arg1));
13022 }
13023
13024 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13025 C1 is a valid shift constant, and C2 is a power of two, i.e.
13026 a single bit. */
13027 if (TREE_CODE (arg0) == BIT_AND_EXPR
13028 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13029 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13030 == INTEGER_CST
13031 && integer_pow2p (TREE_OPERAND (arg0, 1))
13032 && integer_zerop (arg1))
13033 {
13034 tree itype = TREE_TYPE (arg0);
13035 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13036 prec = TYPE_PRECISION (itype);
13037
13038 /* Check for a valid shift count. */
13039 if (wi::ltu_p (arg001, prec))
13040 {
13041 tree arg01 = TREE_OPERAND (arg0, 1);
13042 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13043 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13044 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13045 can be rewritten as (X & (C2 << C1)) != 0. */
13046 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13047 {
13048 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13049 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13050 return fold_build2_loc (loc, code, type, tem,
13051 fold_convert_loc (loc, itype, arg1));
13052 }
13053 /* Otherwise, for signed (arithmetic) shifts,
13054 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13055 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13056 else if (!TYPE_UNSIGNED (itype))
13057 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13058 arg000, build_int_cst (itype, 0));
13059 /* Otherwise, of unsigned (logical) shifts,
13060 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13061 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13062 else
13063 return omit_one_operand_loc (loc, type,
13064 code == EQ_EXPR ? integer_one_node
13065 : integer_zero_node,
13066 arg000);
13067 }
13068 }
13069
13070 /* If we have (A & C) == C where C is a power of 2, convert this into
13071 (A & C) != 0. Similarly for NE_EXPR. */
13072 if (TREE_CODE (arg0) == BIT_AND_EXPR
13073 && integer_pow2p (TREE_OPERAND (arg0, 1))
13074 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13075 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13076 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13077 integer_zero_node));
13078
13079 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13080 bit, then fold the expression into A < 0 or A >= 0. */
13081 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13082 if (tem)
13083 return tem;
13084
13085 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13086 Similarly for NE_EXPR. */
13087 if (TREE_CODE (arg0) == BIT_AND_EXPR
13088 && TREE_CODE (arg1) == INTEGER_CST
13089 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13090 {
13091 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13092 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13093 TREE_OPERAND (arg0, 1));
13094 tree dandnotc
13095 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13096 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13097 notc);
13098 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13099 if (integer_nonzerop (dandnotc))
13100 return omit_one_operand_loc (loc, type, rslt, arg0);
13101 }
13102
13103 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13104 Similarly for NE_EXPR. */
13105 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13106 && TREE_CODE (arg1) == INTEGER_CST
13107 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13108 {
13109 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13110 tree candnotd
13111 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13112 TREE_OPERAND (arg0, 1),
13113 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13114 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13115 if (integer_nonzerop (candnotd))
13116 return omit_one_operand_loc (loc, type, rslt, arg0);
13117 }
13118
13119 /* If this is a comparison of a field, we may be able to simplify it. */
13120 if ((TREE_CODE (arg0) == COMPONENT_REF
13121 || TREE_CODE (arg0) == BIT_FIELD_REF)
13122 /* Handle the constant case even without -O
13123 to make sure the warnings are given. */
13124 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13125 {
13126 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13127 if (t1)
13128 return t1;
13129 }
13130
13131 /* Optimize comparisons of strlen vs zero to a compare of the
13132 first character of the string vs zero. To wit,
13133 strlen(ptr) == 0 => *ptr == 0
13134 strlen(ptr) != 0 => *ptr != 0
13135 Other cases should reduce to one of these two (or a constant)
13136 due to the return value of strlen being unsigned. */
13137 if (TREE_CODE (arg0) == CALL_EXPR
13138 && integer_zerop (arg1))
13139 {
13140 tree fndecl = get_callee_fndecl (arg0);
13141
13142 if (fndecl
13143 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13144 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13145 && call_expr_nargs (arg0) == 1
13146 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13147 {
13148 tree iref = build_fold_indirect_ref_loc (loc,
13149 CALL_EXPR_ARG (arg0, 0));
13150 return fold_build2_loc (loc, code, type, iref,
13151 build_int_cst (TREE_TYPE (iref), 0));
13152 }
13153 }
13154
13155 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13156 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13157 if (TREE_CODE (arg0) == RSHIFT_EXPR
13158 && integer_zerop (arg1)
13159 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13160 {
13161 tree arg00 = TREE_OPERAND (arg0, 0);
13162 tree arg01 = TREE_OPERAND (arg0, 1);
13163 tree itype = TREE_TYPE (arg00);
13164 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13165 {
13166 if (TYPE_UNSIGNED (itype))
13167 {
13168 itype = signed_type_for (itype);
13169 arg00 = fold_convert_loc (loc, itype, arg00);
13170 }
13171 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13172 type, arg00, build_zero_cst (itype));
13173 }
13174 }
13175
13176 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13177 if (integer_zerop (arg1)
13178 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13179 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13180 TREE_OPERAND (arg0, 1));
13181
13182 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13183 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13184 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13185 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13186 build_zero_cst (TREE_TYPE (arg0)));
13187 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13188 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13189 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13190 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13191 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13192 build_zero_cst (TREE_TYPE (arg0)));
13193
13194 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13195 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13196 && TREE_CODE (arg1) == INTEGER_CST
13197 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13198 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13199 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13200 TREE_OPERAND (arg0, 1), arg1));
13201
13202 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13203 (X & C) == 0 when C is a single bit. */
13204 if (TREE_CODE (arg0) == BIT_AND_EXPR
13205 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13206 && integer_zerop (arg1)
13207 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13208 {
13209 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13210 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13211 TREE_OPERAND (arg0, 1));
13212 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13213 type, tem,
13214 fold_convert_loc (loc, TREE_TYPE (arg0),
13215 arg1));
13216 }
13217
13218 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13219 constant C is a power of two, i.e. a single bit. */
13220 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13221 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13222 && integer_zerop (arg1)
13223 && integer_pow2p (TREE_OPERAND (arg0, 1))
13224 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13225 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13226 {
13227 tree arg00 = TREE_OPERAND (arg0, 0);
13228 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13229 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13230 }
13231
13232 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13233 when is C is a power of two, i.e. a single bit. */
13234 if (TREE_CODE (arg0) == BIT_AND_EXPR
13235 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13236 && integer_zerop (arg1)
13237 && integer_pow2p (TREE_OPERAND (arg0, 1))
13238 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13239 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13240 {
13241 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13242 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13243 arg000, TREE_OPERAND (arg0, 1));
13244 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13245 tem, build_int_cst (TREE_TYPE (tem), 0));
13246 }
13247
13248 if (integer_zerop (arg1)
13249 && tree_expr_nonzero_p (arg0))
13250 {
13251 tree res = constant_boolean_node (code==NE_EXPR, type);
13252 return omit_one_operand_loc (loc, type, res, arg0);
13253 }
13254
13255 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13256 if (TREE_CODE (arg0) == NEGATE_EXPR
13257 && TREE_CODE (arg1) == NEGATE_EXPR)
13258 return fold_build2_loc (loc, code, type,
13259 TREE_OPERAND (arg0, 0),
13260 fold_convert_loc (loc, TREE_TYPE (arg0),
13261 TREE_OPERAND (arg1, 0)));
13262
13263 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13264 if (TREE_CODE (arg0) == BIT_AND_EXPR
13265 && TREE_CODE (arg1) == BIT_AND_EXPR)
13266 {
13267 tree arg00 = TREE_OPERAND (arg0, 0);
13268 tree arg01 = TREE_OPERAND (arg0, 1);
13269 tree arg10 = TREE_OPERAND (arg1, 0);
13270 tree arg11 = TREE_OPERAND (arg1, 1);
13271 tree itype = TREE_TYPE (arg0);
13272
13273 if (operand_equal_p (arg01, arg11, 0))
13274 return fold_build2_loc (loc, code, type,
13275 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13276 fold_build2_loc (loc,
13277 BIT_XOR_EXPR, itype,
13278 arg00, arg10),
13279 arg01),
13280 build_zero_cst (itype));
13281
13282 if (operand_equal_p (arg01, arg10, 0))
13283 return fold_build2_loc (loc, code, type,
13284 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13285 fold_build2_loc (loc,
13286 BIT_XOR_EXPR, itype,
13287 arg00, arg11),
13288 arg01),
13289 build_zero_cst (itype));
13290
13291 if (operand_equal_p (arg00, arg11, 0))
13292 return fold_build2_loc (loc, code, type,
13293 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13294 fold_build2_loc (loc,
13295 BIT_XOR_EXPR, itype,
13296 arg01, arg10),
13297 arg00),
13298 build_zero_cst (itype));
13299
13300 if (operand_equal_p (arg00, arg10, 0))
13301 return fold_build2_loc (loc, code, type,
13302 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13303 fold_build2_loc (loc,
13304 BIT_XOR_EXPR, itype,
13305 arg01, arg11),
13306 arg00),
13307 build_zero_cst (itype));
13308 }
13309
13310 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13311 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13312 {
13313 tree arg00 = TREE_OPERAND (arg0, 0);
13314 tree arg01 = TREE_OPERAND (arg0, 1);
13315 tree arg10 = TREE_OPERAND (arg1, 0);
13316 tree arg11 = TREE_OPERAND (arg1, 1);
13317 tree itype = TREE_TYPE (arg0);
13318
13319 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13320 operand_equal_p guarantees no side-effects so we don't need
13321 to use omit_one_operand on Z. */
13322 if (operand_equal_p (arg01, arg11, 0))
13323 return fold_build2_loc (loc, code, type, arg00,
13324 fold_convert_loc (loc, TREE_TYPE (arg00),
13325 arg10));
13326 if (operand_equal_p (arg01, arg10, 0))
13327 return fold_build2_loc (loc, code, type, arg00,
13328 fold_convert_loc (loc, TREE_TYPE (arg00),
13329 arg11));
13330 if (operand_equal_p (arg00, arg11, 0))
13331 return fold_build2_loc (loc, code, type, arg01,
13332 fold_convert_loc (loc, TREE_TYPE (arg01),
13333 arg10));
13334 if (operand_equal_p (arg00, arg10, 0))
13335 return fold_build2_loc (loc, code, type, arg01,
13336 fold_convert_loc (loc, TREE_TYPE (arg01),
13337 arg11));
13338
13339 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13340 if (TREE_CODE (arg01) == INTEGER_CST
13341 && TREE_CODE (arg11) == INTEGER_CST)
13342 {
13343 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13344 fold_convert_loc (loc, itype, arg11));
13345 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13346 return fold_build2_loc (loc, code, type, tem,
13347 fold_convert_loc (loc, itype, arg10));
13348 }
13349 }
13350
13351 /* Attempt to simplify equality/inequality comparisons of complex
13352 values. Only lower the comparison if the result is known or
13353 can be simplified to a single scalar comparison. */
13354 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13355 || TREE_CODE (arg0) == COMPLEX_CST)
13356 && (TREE_CODE (arg1) == COMPLEX_EXPR
13357 || TREE_CODE (arg1) == COMPLEX_CST))
13358 {
13359 tree real0, imag0, real1, imag1;
13360 tree rcond, icond;
13361
13362 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13363 {
13364 real0 = TREE_OPERAND (arg0, 0);
13365 imag0 = TREE_OPERAND (arg0, 1);
13366 }
13367 else
13368 {
13369 real0 = TREE_REALPART (arg0);
13370 imag0 = TREE_IMAGPART (arg0);
13371 }
13372
13373 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13374 {
13375 real1 = TREE_OPERAND (arg1, 0);
13376 imag1 = TREE_OPERAND (arg1, 1);
13377 }
13378 else
13379 {
13380 real1 = TREE_REALPART (arg1);
13381 imag1 = TREE_IMAGPART (arg1);
13382 }
13383
13384 rcond = fold_binary_loc (loc, code, type, real0, real1);
13385 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13386 {
13387 if (integer_zerop (rcond))
13388 {
13389 if (code == EQ_EXPR)
13390 return omit_two_operands_loc (loc, type, boolean_false_node,
13391 imag0, imag1);
13392 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13393 }
13394 else
13395 {
13396 if (code == NE_EXPR)
13397 return omit_two_operands_loc (loc, type, boolean_true_node,
13398 imag0, imag1);
13399 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13400 }
13401 }
13402
13403 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13404 if (icond && TREE_CODE (icond) == INTEGER_CST)
13405 {
13406 if (integer_zerop (icond))
13407 {
13408 if (code == EQ_EXPR)
13409 return omit_two_operands_loc (loc, type, boolean_false_node,
13410 real0, real1);
13411 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13412 }
13413 else
13414 {
13415 if (code == NE_EXPR)
13416 return omit_two_operands_loc (loc, type, boolean_true_node,
13417 real0, real1);
13418 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13419 }
13420 }
13421 }
13422
13423 return NULL_TREE;
13424
13425 case LT_EXPR:
13426 case GT_EXPR:
13427 case LE_EXPR:
13428 case GE_EXPR:
13429 tem = fold_comparison (loc, code, type, op0, op1);
13430 if (tem != NULL_TREE)
13431 return tem;
13432
13433 /* Transform comparisons of the form X +- C CMP X. */
13434 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13435 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13436 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13437 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13438 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13439 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13440 {
13441 tree arg01 = TREE_OPERAND (arg0, 1);
13442 enum tree_code code0 = TREE_CODE (arg0);
13443 int is_positive;
13444
13445 if (TREE_CODE (arg01) == REAL_CST)
13446 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13447 else
13448 is_positive = tree_int_cst_sgn (arg01);
13449
13450 /* (X - c) > X becomes false. */
13451 if (code == GT_EXPR
13452 && ((code0 == MINUS_EXPR && is_positive >= 0)
13453 || (code0 == PLUS_EXPR && is_positive <= 0)))
13454 {
13455 if (TREE_CODE (arg01) == INTEGER_CST
13456 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13457 fold_overflow_warning (("assuming signed overflow does not "
13458 "occur when assuming that (X - c) > X "
13459 "is always false"),
13460 WARN_STRICT_OVERFLOW_ALL);
13461 return constant_boolean_node (0, type);
13462 }
13463
13464 /* Likewise (X + c) < X becomes false. */
13465 if (code == LT_EXPR
13466 && ((code0 == PLUS_EXPR && is_positive >= 0)
13467 || (code0 == MINUS_EXPR && is_positive <= 0)))
13468 {
13469 if (TREE_CODE (arg01) == INTEGER_CST
13470 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13471 fold_overflow_warning (("assuming signed overflow does not "
13472 "occur when assuming that "
13473 "(X + c) < X is always false"),
13474 WARN_STRICT_OVERFLOW_ALL);
13475 return constant_boolean_node (0, type);
13476 }
13477
13478 /* Convert (X - c) <= X to true. */
13479 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13480 && code == LE_EXPR
13481 && ((code0 == MINUS_EXPR && is_positive >= 0)
13482 || (code0 == PLUS_EXPR && is_positive <= 0)))
13483 {
13484 if (TREE_CODE (arg01) == INTEGER_CST
13485 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13486 fold_overflow_warning (("assuming signed overflow does not "
13487 "occur when assuming that "
13488 "(X - c) <= X is always true"),
13489 WARN_STRICT_OVERFLOW_ALL);
13490 return constant_boolean_node (1, type);
13491 }
13492
13493 /* Convert (X + c) >= X to true. */
13494 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13495 && code == GE_EXPR
13496 && ((code0 == PLUS_EXPR && is_positive >= 0)
13497 || (code0 == MINUS_EXPR && is_positive <= 0)))
13498 {
13499 if (TREE_CODE (arg01) == INTEGER_CST
13500 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13501 fold_overflow_warning (("assuming signed overflow does not "
13502 "occur when assuming that "
13503 "(X + c) >= X is always true"),
13504 WARN_STRICT_OVERFLOW_ALL);
13505 return constant_boolean_node (1, type);
13506 }
13507
13508 if (TREE_CODE (arg01) == INTEGER_CST)
13509 {
13510 /* Convert X + c > X and X - c < X to true for integers. */
13511 if (code == GT_EXPR
13512 && ((code0 == PLUS_EXPR && is_positive > 0)
13513 || (code0 == MINUS_EXPR && is_positive < 0)))
13514 {
13515 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13516 fold_overflow_warning (("assuming signed overflow does "
13517 "not occur when assuming that "
13518 "(X + c) > X is always true"),
13519 WARN_STRICT_OVERFLOW_ALL);
13520 return constant_boolean_node (1, type);
13521 }
13522
13523 if (code == LT_EXPR
13524 && ((code0 == MINUS_EXPR && is_positive > 0)
13525 || (code0 == PLUS_EXPR && is_positive < 0)))
13526 {
13527 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13528 fold_overflow_warning (("assuming signed overflow does "
13529 "not occur when assuming that "
13530 "(X - c) < X is always true"),
13531 WARN_STRICT_OVERFLOW_ALL);
13532 return constant_boolean_node (1, type);
13533 }
13534
13535 /* Convert X + c <= X and X - c >= X to false for integers. */
13536 if (code == LE_EXPR
13537 && ((code0 == PLUS_EXPR && is_positive > 0)
13538 || (code0 == MINUS_EXPR && is_positive < 0)))
13539 {
13540 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13541 fold_overflow_warning (("assuming signed overflow does "
13542 "not occur when assuming that "
13543 "(X + c) <= X is always false"),
13544 WARN_STRICT_OVERFLOW_ALL);
13545 return constant_boolean_node (0, type);
13546 }
13547
13548 if (code == GE_EXPR
13549 && ((code0 == MINUS_EXPR && is_positive > 0)
13550 || (code0 == PLUS_EXPR && is_positive < 0)))
13551 {
13552 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13553 fold_overflow_warning (("assuming signed overflow does "
13554 "not occur when assuming that "
13555 "(X - c) >= X is always false"),
13556 WARN_STRICT_OVERFLOW_ALL);
13557 return constant_boolean_node (0, type);
13558 }
13559 }
13560 }
13561
13562 /* Comparisons with the highest or lowest possible integer of
13563 the specified precision will have known values. */
13564 {
13565 tree arg1_type = TREE_TYPE (arg1);
13566 unsigned int prec = TYPE_PRECISION (arg1_type);
13567
13568 if (TREE_CODE (arg1) == INTEGER_CST
13569 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13570 {
13571 wide_int max = wi::max_value (arg1_type);
13572 wide_int signed_max = wi::max_value (prec, SIGNED);
13573 wide_int min = wi::min_value (arg1_type);
13574
13575 if (wi::eq_p (arg1, max))
13576 switch (code)
13577 {
13578 case GT_EXPR:
13579 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13580
13581 case GE_EXPR:
13582 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13583
13584 case LE_EXPR:
13585 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13586
13587 case LT_EXPR:
13588 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13589
13590 /* The GE_EXPR and LT_EXPR cases above are not normally
13591 reached because of previous transformations. */
13592
13593 default:
13594 break;
13595 }
13596 else if (wi::eq_p (arg1, max - 1))
13597 switch (code)
13598 {
13599 case GT_EXPR:
13600 arg1 = const_binop (PLUS_EXPR, arg1,
13601 build_int_cst (TREE_TYPE (arg1), 1));
13602 return fold_build2_loc (loc, EQ_EXPR, type,
13603 fold_convert_loc (loc,
13604 TREE_TYPE (arg1), arg0),
13605 arg1);
13606 case LE_EXPR:
13607 arg1 = const_binop (PLUS_EXPR, arg1,
13608 build_int_cst (TREE_TYPE (arg1), 1));
13609 return fold_build2_loc (loc, NE_EXPR, type,
13610 fold_convert_loc (loc, TREE_TYPE (arg1),
13611 arg0),
13612 arg1);
13613 default:
13614 break;
13615 }
13616 else if (wi::eq_p (arg1, min))
13617 switch (code)
13618 {
13619 case LT_EXPR:
13620 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13621
13622 case LE_EXPR:
13623 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13624
13625 case GE_EXPR:
13626 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13627
13628 case GT_EXPR:
13629 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13630
13631 default:
13632 break;
13633 }
13634 else if (wi::eq_p (arg1, min + 1))
13635 switch (code)
13636 {
13637 case GE_EXPR:
13638 arg1 = const_binop (MINUS_EXPR, arg1,
13639 build_int_cst (TREE_TYPE (arg1), 1));
13640 return fold_build2_loc (loc, NE_EXPR, type,
13641 fold_convert_loc (loc,
13642 TREE_TYPE (arg1), arg0),
13643 arg1);
13644 case LT_EXPR:
13645 arg1 = const_binop (MINUS_EXPR, arg1,
13646 build_int_cst (TREE_TYPE (arg1), 1));
13647 return fold_build2_loc (loc, EQ_EXPR, type,
13648 fold_convert_loc (loc, TREE_TYPE (arg1),
13649 arg0),
13650 arg1);
13651 default:
13652 break;
13653 }
13654
13655 else if (wi::eq_p (arg1, signed_max)
13656 && TYPE_UNSIGNED (arg1_type)
13657 /* We will flip the signedness of the comparison operator
13658 associated with the mode of arg1, so the sign bit is
13659 specified by this mode. Check that arg1 is the signed
13660 max associated with this sign bit. */
13661 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13662 /* signed_type does not work on pointer types. */
13663 && INTEGRAL_TYPE_P (arg1_type))
13664 {
13665 /* The following case also applies to X < signed_max+1
13666 and X >= signed_max+1 because previous transformations. */
13667 if (code == LE_EXPR || code == GT_EXPR)
13668 {
13669 tree st = signed_type_for (arg1_type);
13670 return fold_build2_loc (loc,
13671 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13672 type, fold_convert_loc (loc, st, arg0),
13673 build_int_cst (st, 0));
13674 }
13675 }
13676 }
13677 }
13678
13679 /* If we are comparing an ABS_EXPR with a constant, we can
13680 convert all the cases into explicit comparisons, but they may
13681 well not be faster than doing the ABS and one comparison.
13682 But ABS (X) <= C is a range comparison, which becomes a subtraction
13683 and a comparison, and is probably faster. */
13684 if (code == LE_EXPR
13685 && TREE_CODE (arg1) == INTEGER_CST
13686 && TREE_CODE (arg0) == ABS_EXPR
13687 && ! TREE_SIDE_EFFECTS (arg0)
13688 && (0 != (tem = negate_expr (arg1)))
13689 && TREE_CODE (tem) == INTEGER_CST
13690 && !TREE_OVERFLOW (tem))
13691 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13692 build2 (GE_EXPR, type,
13693 TREE_OPERAND (arg0, 0), tem),
13694 build2 (LE_EXPR, type,
13695 TREE_OPERAND (arg0, 0), arg1));
13696
13697 /* Convert ABS_EXPR<x> >= 0 to true. */
13698 strict_overflow_p = false;
13699 if (code == GE_EXPR
13700 && (integer_zerop (arg1)
13701 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13702 && real_zerop (arg1)))
13703 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13704 {
13705 if (strict_overflow_p)
13706 fold_overflow_warning (("assuming signed overflow does not occur "
13707 "when simplifying comparison of "
13708 "absolute value and zero"),
13709 WARN_STRICT_OVERFLOW_CONDITIONAL);
13710 return omit_one_operand_loc (loc, type,
13711 constant_boolean_node (true, type),
13712 arg0);
13713 }
13714
13715 /* Convert ABS_EXPR<x> < 0 to false. */
13716 strict_overflow_p = false;
13717 if (code == LT_EXPR
13718 && (integer_zerop (arg1) || real_zerop (arg1))
13719 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13720 {
13721 if (strict_overflow_p)
13722 fold_overflow_warning (("assuming signed overflow does not occur "
13723 "when simplifying comparison of "
13724 "absolute value and zero"),
13725 WARN_STRICT_OVERFLOW_CONDITIONAL);
13726 return omit_one_operand_loc (loc, type,
13727 constant_boolean_node (false, type),
13728 arg0);
13729 }
13730
13731 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13732 and similarly for >= into !=. */
13733 if ((code == LT_EXPR || code == GE_EXPR)
13734 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13735 && TREE_CODE (arg1) == LSHIFT_EXPR
13736 && integer_onep (TREE_OPERAND (arg1, 0)))
13737 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13738 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13739 TREE_OPERAND (arg1, 1)),
13740 build_zero_cst (TREE_TYPE (arg0)));
13741
13742 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13743 otherwise Y might be >= # of bits in X's type and thus e.g.
13744 (unsigned char) (1 << Y) for Y 15 might be 0.
13745 If the cast is widening, then 1 << Y should have unsigned type,
13746 otherwise if Y is number of bits in the signed shift type minus 1,
13747 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13748 31 might be 0xffffffff80000000. */
13749 if ((code == LT_EXPR || code == GE_EXPR)
13750 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13751 && CONVERT_EXPR_P (arg1)
13752 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13753 && (TYPE_PRECISION (TREE_TYPE (arg1))
13754 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13755 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13756 || (TYPE_PRECISION (TREE_TYPE (arg1))
13757 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13758 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13759 {
13760 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13761 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13762 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13763 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13764 build_zero_cst (TREE_TYPE (arg0)));
13765 }
13766
13767 return NULL_TREE;
13768
13769 case UNORDERED_EXPR:
13770 case ORDERED_EXPR:
13771 case UNLT_EXPR:
13772 case UNLE_EXPR:
13773 case UNGT_EXPR:
13774 case UNGE_EXPR:
13775 case UNEQ_EXPR:
13776 case LTGT_EXPR:
13777 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13778 {
13779 t1 = fold_relational_const (code, type, arg0, arg1);
13780 if (t1 != NULL_TREE)
13781 return t1;
13782 }
13783
13784 /* If the first operand is NaN, the result is constant. */
13785 if (TREE_CODE (arg0) == REAL_CST
13786 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13787 && (code != LTGT_EXPR || ! flag_trapping_math))
13788 {
13789 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13790 ? integer_zero_node
13791 : integer_one_node;
13792 return omit_one_operand_loc (loc, type, t1, arg1);
13793 }
13794
13795 /* If the second operand is NaN, the result is constant. */
13796 if (TREE_CODE (arg1) == REAL_CST
13797 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13798 && (code != LTGT_EXPR || ! flag_trapping_math))
13799 {
13800 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13801 ? integer_zero_node
13802 : integer_one_node;
13803 return omit_one_operand_loc (loc, type, t1, arg0);
13804 }
13805
13806 /* Simplify unordered comparison of something with itself. */
13807 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13808 && operand_equal_p (arg0, arg1, 0))
13809 return constant_boolean_node (1, type);
13810
13811 if (code == LTGT_EXPR
13812 && !flag_trapping_math
13813 && operand_equal_p (arg0, arg1, 0))
13814 return constant_boolean_node (0, type);
13815
13816 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13817 {
13818 tree targ0 = strip_float_extensions (arg0);
13819 tree targ1 = strip_float_extensions (arg1);
13820 tree newtype = TREE_TYPE (targ0);
13821
13822 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13823 newtype = TREE_TYPE (targ1);
13824
13825 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13826 return fold_build2_loc (loc, code, type,
13827 fold_convert_loc (loc, newtype, targ0),
13828 fold_convert_loc (loc, newtype, targ1));
13829 }
13830
13831 return NULL_TREE;
13832
13833 case COMPOUND_EXPR:
13834 /* When pedantic, a compound expression can be neither an lvalue
13835 nor an integer constant expression. */
13836 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13837 return NULL_TREE;
13838 /* Don't let (0, 0) be null pointer constant. */
13839 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13840 : fold_convert_loc (loc, type, arg1);
13841 return pedantic_non_lvalue_loc (loc, tem);
13842
13843 case COMPLEX_EXPR:
13844 if ((TREE_CODE (arg0) == REAL_CST
13845 && TREE_CODE (arg1) == REAL_CST)
13846 || (TREE_CODE (arg0) == INTEGER_CST
13847 && TREE_CODE (arg1) == INTEGER_CST))
13848 return build_complex (type, arg0, arg1);
13849 if (TREE_CODE (arg0) == REALPART_EXPR
13850 && TREE_CODE (arg1) == IMAGPART_EXPR
13851 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13852 && operand_equal_p (TREE_OPERAND (arg0, 0),
13853 TREE_OPERAND (arg1, 0), 0))
13854 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13855 TREE_OPERAND (arg1, 0));
13856 return NULL_TREE;
13857
13858 case ASSERT_EXPR:
13859 /* An ASSERT_EXPR should never be passed to fold_binary. */
13860 gcc_unreachable ();
13861
13862 case VEC_PACK_TRUNC_EXPR:
13863 case VEC_PACK_FIX_TRUNC_EXPR:
13864 {
13865 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13866 tree *elts;
13867
13868 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13869 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13870 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13871 return NULL_TREE;
13872
13873 elts = XALLOCAVEC (tree, nelts);
13874 if (!vec_cst_ctor_to_array (arg0, elts)
13875 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13876 return NULL_TREE;
13877
13878 for (i = 0; i < nelts; i++)
13879 {
13880 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13881 ? NOP_EXPR : FIX_TRUNC_EXPR,
13882 TREE_TYPE (type), elts[i]);
13883 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13884 return NULL_TREE;
13885 }
13886
13887 return build_vector (type, elts);
13888 }
13889
13890 case VEC_WIDEN_MULT_LO_EXPR:
13891 case VEC_WIDEN_MULT_HI_EXPR:
13892 case VEC_WIDEN_MULT_EVEN_EXPR:
13893 case VEC_WIDEN_MULT_ODD_EXPR:
13894 {
13895 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13896 unsigned int out, ofs, scale;
13897 tree *elts;
13898
13899 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13900 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13901 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13902 return NULL_TREE;
13903
13904 elts = XALLOCAVEC (tree, nelts * 4);
13905 if (!vec_cst_ctor_to_array (arg0, elts)
13906 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13907 return NULL_TREE;
13908
13909 if (code == VEC_WIDEN_MULT_LO_EXPR)
13910 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13911 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13912 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13913 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13914 scale = 1, ofs = 0;
13915 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13916 scale = 1, ofs = 1;
13917
13918 for (out = 0; out < nelts; out++)
13919 {
13920 unsigned int in1 = (out << scale) + ofs;
13921 unsigned int in2 = in1 + nelts * 2;
13922 tree t1, t2;
13923
13924 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13925 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13926
13927 if (t1 == NULL_TREE || t2 == NULL_TREE)
13928 return NULL_TREE;
13929 elts[out] = const_binop (MULT_EXPR, t1, t2);
13930 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13931 return NULL_TREE;
13932 }
13933
13934 return build_vector (type, elts);
13935 }
13936
13937 default:
13938 return NULL_TREE;
13939 } /* switch (code) */
13940 }
13941
13942 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13943 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13944 of GOTO_EXPR. */
13945
13946 static tree
13947 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13948 {
13949 switch (TREE_CODE (*tp))
13950 {
13951 case LABEL_EXPR:
13952 return *tp;
13953
13954 case GOTO_EXPR:
13955 *walk_subtrees = 0;
13956
13957 /* ... fall through ... */
13958
13959 default:
13960 return NULL_TREE;
13961 }
13962 }
13963
13964 /* Return whether the sub-tree ST contains a label which is accessible from
13965 outside the sub-tree. */
13966
13967 static bool
13968 contains_label_p (tree st)
13969 {
13970 return
13971 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13972 }
13973
13974 /* Fold a ternary expression of code CODE and type TYPE with operands
13975 OP0, OP1, and OP2. Return the folded expression if folding is
13976 successful. Otherwise, return NULL_TREE. */
13977
13978 tree
13979 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13980 tree op0, tree op1, tree op2)
13981 {
13982 tree tem;
13983 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13984 enum tree_code_class kind = TREE_CODE_CLASS (code);
13985
13986 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13987 && TREE_CODE_LENGTH (code) == 3);
13988
13989 /* Strip any conversions that don't change the mode. This is safe
13990 for every expression, except for a comparison expression because
13991 its signedness is derived from its operands. So, in the latter
13992 case, only strip conversions that don't change the signedness.
13993
13994 Note that this is done as an internal manipulation within the
13995 constant folder, in order to find the simplest representation of
13996 the arguments so that their form can be studied. In any cases,
13997 the appropriate type conversions should be put back in the tree
13998 that will get out of the constant folder. */
13999 if (op0)
14000 {
14001 arg0 = op0;
14002 STRIP_NOPS (arg0);
14003 }
14004
14005 if (op1)
14006 {
14007 arg1 = op1;
14008 STRIP_NOPS (arg1);
14009 }
14010
14011 if (op2)
14012 {
14013 arg2 = op2;
14014 STRIP_NOPS (arg2);
14015 }
14016
14017 switch (code)
14018 {
14019 case COMPONENT_REF:
14020 if (TREE_CODE (arg0) == CONSTRUCTOR
14021 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14022 {
14023 unsigned HOST_WIDE_INT idx;
14024 tree field, value;
14025 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14026 if (field == arg1)
14027 return value;
14028 }
14029 return NULL_TREE;
14030
14031 case COND_EXPR:
14032 case VEC_COND_EXPR:
14033 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14034 so all simple results must be passed through pedantic_non_lvalue. */
14035 if (TREE_CODE (arg0) == INTEGER_CST)
14036 {
14037 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14038 tem = integer_zerop (arg0) ? op2 : op1;
14039 /* Only optimize constant conditions when the selected branch
14040 has the same type as the COND_EXPR. This avoids optimizing
14041 away "c ? x : throw", where the throw has a void type.
14042 Avoid throwing away that operand which contains label. */
14043 if ((!TREE_SIDE_EFFECTS (unused_op)
14044 || !contains_label_p (unused_op))
14045 && (! VOID_TYPE_P (TREE_TYPE (tem))
14046 || VOID_TYPE_P (type)))
14047 return pedantic_non_lvalue_loc (loc, tem);
14048 return NULL_TREE;
14049 }
14050 else if (TREE_CODE (arg0) == VECTOR_CST)
14051 {
14052 if (integer_all_onesp (arg0))
14053 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14054 if (integer_zerop (arg0))
14055 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14056
14057 if ((TREE_CODE (arg1) == VECTOR_CST
14058 || TREE_CODE (arg1) == CONSTRUCTOR)
14059 && (TREE_CODE (arg2) == VECTOR_CST
14060 || TREE_CODE (arg2) == CONSTRUCTOR))
14061 {
14062 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14063 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14064 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14065 for (i = 0; i < nelts; i++)
14066 {
14067 tree val = VECTOR_CST_ELT (arg0, i);
14068 if (integer_all_onesp (val))
14069 sel[i] = i;
14070 else if (integer_zerop (val))
14071 sel[i] = nelts + i;
14072 else /* Currently unreachable. */
14073 return NULL_TREE;
14074 }
14075 tree t = fold_vec_perm (type, arg1, arg2, sel);
14076 if (t != NULL_TREE)
14077 return t;
14078 }
14079 }
14080
14081 if (operand_equal_p (arg1, op2, 0))
14082 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14083
14084 /* If we have A op B ? A : C, we may be able to convert this to a
14085 simpler expression, depending on the operation and the values
14086 of B and C. Signed zeros prevent all of these transformations,
14087 for reasons given above each one.
14088
14089 Also try swapping the arguments and inverting the conditional. */
14090 if (COMPARISON_CLASS_P (arg0)
14091 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14092 arg1, TREE_OPERAND (arg0, 1))
14093 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14094 {
14095 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14096 if (tem)
14097 return tem;
14098 }
14099
14100 if (COMPARISON_CLASS_P (arg0)
14101 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14102 op2,
14103 TREE_OPERAND (arg0, 1))
14104 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14105 {
14106 location_t loc0 = expr_location_or (arg0, loc);
14107 tem = fold_invert_truthvalue (loc0, arg0);
14108 if (tem && COMPARISON_CLASS_P (tem))
14109 {
14110 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14111 if (tem)
14112 return tem;
14113 }
14114 }
14115
14116 /* If the second operand is simpler than the third, swap them
14117 since that produces better jump optimization results. */
14118 if (truth_value_p (TREE_CODE (arg0))
14119 && tree_swap_operands_p (op1, op2, false))
14120 {
14121 location_t loc0 = expr_location_or (arg0, loc);
14122 /* See if this can be inverted. If it can't, possibly because
14123 it was a floating-point inequality comparison, don't do
14124 anything. */
14125 tem = fold_invert_truthvalue (loc0, arg0);
14126 if (tem)
14127 return fold_build3_loc (loc, code, type, tem, op2, op1);
14128 }
14129
14130 /* Convert A ? 1 : 0 to simply A. */
14131 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14132 : (integer_onep (op1)
14133 && !VECTOR_TYPE_P (type)))
14134 && integer_zerop (op2)
14135 /* If we try to convert OP0 to our type, the
14136 call to fold will try to move the conversion inside
14137 a COND, which will recurse. In that case, the COND_EXPR
14138 is probably the best choice, so leave it alone. */
14139 && type == TREE_TYPE (arg0))
14140 return pedantic_non_lvalue_loc (loc, arg0);
14141
14142 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14143 over COND_EXPR in cases such as floating point comparisons. */
14144 if (integer_zerop (op1)
14145 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14146 : (integer_onep (op2)
14147 && !VECTOR_TYPE_P (type)))
14148 && truth_value_p (TREE_CODE (arg0)))
14149 return pedantic_non_lvalue_loc (loc,
14150 fold_convert_loc (loc, type,
14151 invert_truthvalue_loc (loc,
14152 arg0)));
14153
14154 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14155 if (TREE_CODE (arg0) == LT_EXPR
14156 && integer_zerop (TREE_OPERAND (arg0, 1))
14157 && integer_zerop (op2)
14158 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14159 {
14160 /* sign_bit_p looks through both zero and sign extensions,
14161 but for this optimization only sign extensions are
14162 usable. */
14163 tree tem2 = TREE_OPERAND (arg0, 0);
14164 while (tem != tem2)
14165 {
14166 if (TREE_CODE (tem2) != NOP_EXPR
14167 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14168 {
14169 tem = NULL_TREE;
14170 break;
14171 }
14172 tem2 = TREE_OPERAND (tem2, 0);
14173 }
14174 /* sign_bit_p only checks ARG1 bits within A's precision.
14175 If <sign bit of A> has wider type than A, bits outside
14176 of A's precision in <sign bit of A> need to be checked.
14177 If they are all 0, this optimization needs to be done
14178 in unsigned A's type, if they are all 1 in signed A's type,
14179 otherwise this can't be done. */
14180 if (tem
14181 && TYPE_PRECISION (TREE_TYPE (tem))
14182 < TYPE_PRECISION (TREE_TYPE (arg1))
14183 && TYPE_PRECISION (TREE_TYPE (tem))
14184 < TYPE_PRECISION (type))
14185 {
14186 int inner_width, outer_width;
14187 tree tem_type;
14188
14189 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14190 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14191 if (outer_width > TYPE_PRECISION (type))
14192 outer_width = TYPE_PRECISION (type);
14193
14194 wide_int mask = wi::shifted_mask
14195 (inner_width, outer_width - inner_width, false,
14196 TYPE_PRECISION (TREE_TYPE (arg1)));
14197
14198 wide_int common = mask & arg1;
14199 if (common == mask)
14200 {
14201 tem_type = signed_type_for (TREE_TYPE (tem));
14202 tem = fold_convert_loc (loc, tem_type, tem);
14203 }
14204 else if (common == 0)
14205 {
14206 tem_type = unsigned_type_for (TREE_TYPE (tem));
14207 tem = fold_convert_loc (loc, tem_type, tem);
14208 }
14209 else
14210 tem = NULL;
14211 }
14212
14213 if (tem)
14214 return
14215 fold_convert_loc (loc, type,
14216 fold_build2_loc (loc, BIT_AND_EXPR,
14217 TREE_TYPE (tem), tem,
14218 fold_convert_loc (loc,
14219 TREE_TYPE (tem),
14220 arg1)));
14221 }
14222
14223 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14224 already handled above. */
14225 if (TREE_CODE (arg0) == BIT_AND_EXPR
14226 && integer_onep (TREE_OPERAND (arg0, 1))
14227 && integer_zerop (op2)
14228 && integer_pow2p (arg1))
14229 {
14230 tree tem = TREE_OPERAND (arg0, 0);
14231 STRIP_NOPS (tem);
14232 if (TREE_CODE (tem) == RSHIFT_EXPR
14233 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14234 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14235 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14236 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14237 TREE_OPERAND (tem, 0), arg1);
14238 }
14239
14240 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14241 is probably obsolete because the first operand should be a
14242 truth value (that's why we have the two cases above), but let's
14243 leave it in until we can confirm this for all front-ends. */
14244 if (integer_zerop (op2)
14245 && TREE_CODE (arg0) == NE_EXPR
14246 && integer_zerop (TREE_OPERAND (arg0, 1))
14247 && integer_pow2p (arg1)
14248 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14249 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14250 arg1, OEP_ONLY_CONST))
14251 return pedantic_non_lvalue_loc (loc,
14252 fold_convert_loc (loc, type,
14253 TREE_OPERAND (arg0, 0)));
14254
14255 /* Disable the transformations below for vectors, since
14256 fold_binary_op_with_conditional_arg may undo them immediately,
14257 yielding an infinite loop. */
14258 if (code == VEC_COND_EXPR)
14259 return NULL_TREE;
14260
14261 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14262 if (integer_zerop (op2)
14263 && truth_value_p (TREE_CODE (arg0))
14264 && truth_value_p (TREE_CODE (arg1))
14265 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14266 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14267 : TRUTH_ANDIF_EXPR,
14268 type, fold_convert_loc (loc, type, arg0), arg1);
14269
14270 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14271 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14272 && truth_value_p (TREE_CODE (arg0))
14273 && truth_value_p (TREE_CODE (arg1))
14274 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14275 {
14276 location_t loc0 = expr_location_or (arg0, loc);
14277 /* Only perform transformation if ARG0 is easily inverted. */
14278 tem = fold_invert_truthvalue (loc0, arg0);
14279 if (tem)
14280 return fold_build2_loc (loc, code == VEC_COND_EXPR
14281 ? BIT_IOR_EXPR
14282 : TRUTH_ORIF_EXPR,
14283 type, fold_convert_loc (loc, type, tem),
14284 arg1);
14285 }
14286
14287 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14288 if (integer_zerop (arg1)
14289 && truth_value_p (TREE_CODE (arg0))
14290 && truth_value_p (TREE_CODE (op2))
14291 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14292 {
14293 location_t loc0 = expr_location_or (arg0, loc);
14294 /* Only perform transformation if ARG0 is easily inverted. */
14295 tem = fold_invert_truthvalue (loc0, arg0);
14296 if (tem)
14297 return fold_build2_loc (loc, code == VEC_COND_EXPR
14298 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14299 type, fold_convert_loc (loc, type, tem),
14300 op2);
14301 }
14302
14303 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14304 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14305 && truth_value_p (TREE_CODE (arg0))
14306 && truth_value_p (TREE_CODE (op2))
14307 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14308 return fold_build2_loc (loc, code == VEC_COND_EXPR
14309 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14310 type, fold_convert_loc (loc, type, arg0), op2);
14311
14312 return NULL_TREE;
14313
14314 case CALL_EXPR:
14315 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14316 of fold_ternary on them. */
14317 gcc_unreachable ();
14318
14319 case BIT_FIELD_REF:
14320 if ((TREE_CODE (arg0) == VECTOR_CST
14321 || (TREE_CODE (arg0) == CONSTRUCTOR
14322 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14323 && (type == TREE_TYPE (TREE_TYPE (arg0))
14324 || (TREE_CODE (type) == VECTOR_TYPE
14325 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14326 {
14327 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14328 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14329 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14330 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14331
14332 if (n != 0
14333 && (idx % width) == 0
14334 && (n % width) == 0
14335 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14336 {
14337 idx = idx / width;
14338 n = n / width;
14339
14340 if (TREE_CODE (arg0) == VECTOR_CST)
14341 {
14342 if (n == 1)
14343 return VECTOR_CST_ELT (arg0, idx);
14344
14345 tree *vals = XALLOCAVEC (tree, n);
14346 for (unsigned i = 0; i < n; ++i)
14347 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14348 return build_vector (type, vals);
14349 }
14350
14351 /* Constructor elements can be subvectors. */
14352 unsigned HOST_WIDE_INT k = 1;
14353 if (CONSTRUCTOR_NELTS (arg0) != 0)
14354 {
14355 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14356 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14357 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14358 }
14359
14360 /* We keep an exact subset of the constructor elements. */
14361 if ((idx % k) == 0 && (n % k) == 0)
14362 {
14363 if (CONSTRUCTOR_NELTS (arg0) == 0)
14364 return build_constructor (type, NULL);
14365 idx /= k;
14366 n /= k;
14367 if (n == 1)
14368 {
14369 if (idx < CONSTRUCTOR_NELTS (arg0))
14370 return CONSTRUCTOR_ELT (arg0, idx)->value;
14371 return build_zero_cst (type);
14372 }
14373
14374 vec<constructor_elt, va_gc> *vals;
14375 vec_alloc (vals, n);
14376 for (unsigned i = 0;
14377 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14378 ++i)
14379 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14380 CONSTRUCTOR_ELT
14381 (arg0, idx + i)->value);
14382 return build_constructor (type, vals);
14383 }
14384 /* The bitfield references a single constructor element. */
14385 else if (idx + n <= (idx / k + 1) * k)
14386 {
14387 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14388 return build_zero_cst (type);
14389 else if (n == k)
14390 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14391 else
14392 return fold_build3_loc (loc, code, type,
14393 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14394 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14395 }
14396 }
14397 }
14398
14399 /* A bit-field-ref that referenced the full argument can be stripped. */
14400 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14401 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14402 && integer_zerop (op2))
14403 return fold_convert_loc (loc, type, arg0);
14404
14405 /* On constants we can use native encode/interpret to constant
14406 fold (nearly) all BIT_FIELD_REFs. */
14407 if (CONSTANT_CLASS_P (arg0)
14408 && can_native_interpret_type_p (type)
14409 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14410 /* This limitation should not be necessary, we just need to
14411 round this up to mode size. */
14412 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14413 /* Need bit-shifting of the buffer to relax the following. */
14414 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14415 {
14416 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14417 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14418 unsigned HOST_WIDE_INT clen;
14419 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14420 /* ??? We cannot tell native_encode_expr to start at
14421 some random byte only. So limit us to a reasonable amount
14422 of work. */
14423 if (clen <= 4096)
14424 {
14425 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14426 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14427 if (len > 0
14428 && len * BITS_PER_UNIT >= bitpos + bitsize)
14429 {
14430 tree v = native_interpret_expr (type,
14431 b + bitpos / BITS_PER_UNIT,
14432 bitsize / BITS_PER_UNIT);
14433 if (v)
14434 return v;
14435 }
14436 }
14437 }
14438
14439 return NULL_TREE;
14440
14441 case FMA_EXPR:
14442 /* For integers we can decompose the FMA if possible. */
14443 if (TREE_CODE (arg0) == INTEGER_CST
14444 && TREE_CODE (arg1) == INTEGER_CST)
14445 return fold_build2_loc (loc, PLUS_EXPR, type,
14446 const_binop (MULT_EXPR, arg0, arg1), arg2);
14447 if (integer_zerop (arg2))
14448 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14449
14450 return fold_fma (loc, type, arg0, arg1, arg2);
14451
14452 case VEC_PERM_EXPR:
14453 if (TREE_CODE (arg2) == VECTOR_CST)
14454 {
14455 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14456 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14457 bool need_mask_canon = false;
14458 bool all_in_vec0 = true;
14459 bool all_in_vec1 = true;
14460 bool maybe_identity = true;
14461 bool single_arg = (op0 == op1);
14462 bool changed = false;
14463
14464 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14465 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14466 for (i = 0; i < nelts; i++)
14467 {
14468 tree val = VECTOR_CST_ELT (arg2, i);
14469 if (TREE_CODE (val) != INTEGER_CST)
14470 return NULL_TREE;
14471
14472 /* Make sure that the perm value is in an acceptable
14473 range. */
14474 wide_int t = val;
14475 if (wi::gtu_p (t, mask))
14476 {
14477 need_mask_canon = true;
14478 sel[i] = t.to_uhwi () & mask;
14479 }
14480 else
14481 sel[i] = t.to_uhwi ();
14482
14483 if (sel[i] < nelts)
14484 all_in_vec1 = false;
14485 else
14486 all_in_vec0 = false;
14487
14488 if ((sel[i] & (nelts-1)) != i)
14489 maybe_identity = false;
14490 }
14491
14492 if (maybe_identity)
14493 {
14494 if (all_in_vec0)
14495 return op0;
14496 if (all_in_vec1)
14497 return op1;
14498 }
14499
14500 if (all_in_vec0)
14501 op1 = op0;
14502 else if (all_in_vec1)
14503 {
14504 op0 = op1;
14505 for (i = 0; i < nelts; i++)
14506 sel[i] -= nelts;
14507 need_mask_canon = true;
14508 }
14509
14510 if ((TREE_CODE (op0) == VECTOR_CST
14511 || TREE_CODE (op0) == CONSTRUCTOR)
14512 && (TREE_CODE (op1) == VECTOR_CST
14513 || TREE_CODE (op1) == CONSTRUCTOR))
14514 {
14515 tree t = fold_vec_perm (type, op0, op1, sel);
14516 if (t != NULL_TREE)
14517 return t;
14518 }
14519
14520 if (op0 == op1 && !single_arg)
14521 changed = true;
14522
14523 if (need_mask_canon && arg2 == op2)
14524 {
14525 tree *tsel = XALLOCAVEC (tree, nelts);
14526 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14527 for (i = 0; i < nelts; i++)
14528 tsel[i] = build_int_cst (eltype, sel[i]);
14529 op2 = build_vector (TREE_TYPE (arg2), tsel);
14530 changed = true;
14531 }
14532
14533 if (changed)
14534 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14535 }
14536 return NULL_TREE;
14537
14538 default:
14539 return NULL_TREE;
14540 } /* switch (code) */
14541 }
14542
14543 /* Perform constant folding and related simplification of EXPR.
14544 The related simplifications include x*1 => x, x*0 => 0, etc.,
14545 and application of the associative law.
14546 NOP_EXPR conversions may be removed freely (as long as we
14547 are careful not to change the type of the overall expression).
14548 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14549 but we can constant-fold them if they have constant operands. */
14550
14551 #ifdef ENABLE_FOLD_CHECKING
14552 # define fold(x) fold_1 (x)
14553 static tree fold_1 (tree);
14554 static
14555 #endif
14556 tree
14557 fold (tree expr)
14558 {
14559 const tree t = expr;
14560 enum tree_code code = TREE_CODE (t);
14561 enum tree_code_class kind = TREE_CODE_CLASS (code);
14562 tree tem;
14563 location_t loc = EXPR_LOCATION (expr);
14564
14565 /* Return right away if a constant. */
14566 if (kind == tcc_constant)
14567 return t;
14568
14569 /* CALL_EXPR-like objects with variable numbers of operands are
14570 treated specially. */
14571 if (kind == tcc_vl_exp)
14572 {
14573 if (code == CALL_EXPR)
14574 {
14575 tem = fold_call_expr (loc, expr, false);
14576 return tem ? tem : expr;
14577 }
14578 return expr;
14579 }
14580
14581 if (IS_EXPR_CODE_CLASS (kind))
14582 {
14583 tree type = TREE_TYPE (t);
14584 tree op0, op1, op2;
14585
14586 switch (TREE_CODE_LENGTH (code))
14587 {
14588 case 1:
14589 op0 = TREE_OPERAND (t, 0);
14590 tem = fold_unary_loc (loc, code, type, op0);
14591 return tem ? tem : expr;
14592 case 2:
14593 op0 = TREE_OPERAND (t, 0);
14594 op1 = TREE_OPERAND (t, 1);
14595 tem = fold_binary_loc (loc, code, type, op0, op1);
14596 return tem ? tem : expr;
14597 case 3:
14598 op0 = TREE_OPERAND (t, 0);
14599 op1 = TREE_OPERAND (t, 1);
14600 op2 = TREE_OPERAND (t, 2);
14601 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14602 return tem ? tem : expr;
14603 default:
14604 break;
14605 }
14606 }
14607
14608 switch (code)
14609 {
14610 case ARRAY_REF:
14611 {
14612 tree op0 = TREE_OPERAND (t, 0);
14613 tree op1 = TREE_OPERAND (t, 1);
14614
14615 if (TREE_CODE (op1) == INTEGER_CST
14616 && TREE_CODE (op0) == CONSTRUCTOR
14617 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14618 {
14619 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14620 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14621 unsigned HOST_WIDE_INT begin = 0;
14622
14623 /* Find a matching index by means of a binary search. */
14624 while (begin != end)
14625 {
14626 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14627 tree index = (*elts)[middle].index;
14628
14629 if (TREE_CODE (index) == INTEGER_CST
14630 && tree_int_cst_lt (index, op1))
14631 begin = middle + 1;
14632 else if (TREE_CODE (index) == INTEGER_CST
14633 && tree_int_cst_lt (op1, index))
14634 end = middle;
14635 else if (TREE_CODE (index) == RANGE_EXPR
14636 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14637 begin = middle + 1;
14638 else if (TREE_CODE (index) == RANGE_EXPR
14639 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14640 end = middle;
14641 else
14642 return (*elts)[middle].value;
14643 }
14644 }
14645
14646 return t;
14647 }
14648
14649 /* Return a VECTOR_CST if possible. */
14650 case CONSTRUCTOR:
14651 {
14652 tree type = TREE_TYPE (t);
14653 if (TREE_CODE (type) != VECTOR_TYPE)
14654 return t;
14655
14656 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14657 unsigned HOST_WIDE_INT idx, pos = 0;
14658 tree value;
14659
14660 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14661 {
14662 if (!CONSTANT_CLASS_P (value))
14663 return t;
14664 if (TREE_CODE (value) == VECTOR_CST)
14665 {
14666 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14667 vec[pos++] = VECTOR_CST_ELT (value, i);
14668 }
14669 else
14670 vec[pos++] = value;
14671 }
14672 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14673 vec[pos] = build_zero_cst (TREE_TYPE (type));
14674
14675 return build_vector (type, vec);
14676 }
14677
14678 case CONST_DECL:
14679 return fold (DECL_INITIAL (t));
14680
14681 default:
14682 return t;
14683 } /* switch (code) */
14684 }
14685
14686 #ifdef ENABLE_FOLD_CHECKING
14687 #undef fold
14688
14689 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14690 hash_table <pointer_hash <tree_node> >);
14691 static void fold_check_failed (const_tree, const_tree);
14692 void print_fold_checksum (const_tree);
14693
14694 /* When --enable-checking=fold, compute a digest of expr before
14695 and after actual fold call to see if fold did not accidentally
14696 change original expr. */
14697
14698 tree
14699 fold (tree expr)
14700 {
14701 tree ret;
14702 struct md5_ctx ctx;
14703 unsigned char checksum_before[16], checksum_after[16];
14704 hash_table <pointer_hash <tree_node> > ht;
14705
14706 ht.create (32);
14707 md5_init_ctx (&ctx);
14708 fold_checksum_tree (expr, &ctx, ht);
14709 md5_finish_ctx (&ctx, checksum_before);
14710 ht.empty ();
14711
14712 ret = fold_1 (expr);
14713
14714 md5_init_ctx (&ctx);
14715 fold_checksum_tree (expr, &ctx, ht);
14716 md5_finish_ctx (&ctx, checksum_after);
14717 ht.dispose ();
14718
14719 if (memcmp (checksum_before, checksum_after, 16))
14720 fold_check_failed (expr, ret);
14721
14722 return ret;
14723 }
14724
14725 void
14726 print_fold_checksum (const_tree expr)
14727 {
14728 struct md5_ctx ctx;
14729 unsigned char checksum[16], cnt;
14730 hash_table <pointer_hash <tree_node> > ht;
14731
14732 ht.create (32);
14733 md5_init_ctx (&ctx);
14734 fold_checksum_tree (expr, &ctx, ht);
14735 md5_finish_ctx (&ctx, checksum);
14736 ht.dispose ();
14737 for (cnt = 0; cnt < 16; ++cnt)
14738 fprintf (stderr, "%02x", checksum[cnt]);
14739 putc ('\n', stderr);
14740 }
14741
14742 static void
14743 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14744 {
14745 internal_error ("fold check: original tree changed by fold");
14746 }
14747
14748 static void
14749 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14750 hash_table <pointer_hash <tree_node> > ht)
14751 {
14752 tree_node **slot;
14753 enum tree_code code;
14754 union tree_node buf;
14755 int i, len;
14756
14757 recursive_label:
14758 if (expr == NULL)
14759 return;
14760 slot = ht.find_slot (expr, INSERT);
14761 if (*slot != NULL)
14762 return;
14763 *slot = CONST_CAST_TREE (expr);
14764 code = TREE_CODE (expr);
14765 if (TREE_CODE_CLASS (code) == tcc_declaration
14766 && DECL_ASSEMBLER_NAME_SET_P (expr))
14767 {
14768 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14769 memcpy ((char *) &buf, expr, tree_size (expr));
14770 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14771 expr = (tree) &buf;
14772 }
14773 else if (TREE_CODE_CLASS (code) == tcc_type
14774 && (TYPE_POINTER_TO (expr)
14775 || TYPE_REFERENCE_TO (expr)
14776 || TYPE_CACHED_VALUES_P (expr)
14777 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14778 || TYPE_NEXT_VARIANT (expr)))
14779 {
14780 /* Allow these fields to be modified. */
14781 tree tmp;
14782 memcpy ((char *) &buf, expr, tree_size (expr));
14783 expr = tmp = (tree) &buf;
14784 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14785 TYPE_POINTER_TO (tmp) = NULL;
14786 TYPE_REFERENCE_TO (tmp) = NULL;
14787 TYPE_NEXT_VARIANT (tmp) = NULL;
14788 if (TYPE_CACHED_VALUES_P (tmp))
14789 {
14790 TYPE_CACHED_VALUES_P (tmp) = 0;
14791 TYPE_CACHED_VALUES (tmp) = NULL;
14792 }
14793 }
14794 md5_process_bytes (expr, tree_size (expr), ctx);
14795 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14796 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14797 if (TREE_CODE_CLASS (code) != tcc_type
14798 && TREE_CODE_CLASS (code) != tcc_declaration
14799 && code != TREE_LIST
14800 && code != SSA_NAME
14801 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14802 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14803 switch (TREE_CODE_CLASS (code))
14804 {
14805 case tcc_constant:
14806 switch (code)
14807 {
14808 case STRING_CST:
14809 md5_process_bytes (TREE_STRING_POINTER (expr),
14810 TREE_STRING_LENGTH (expr), ctx);
14811 break;
14812 case COMPLEX_CST:
14813 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14814 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14815 break;
14816 case VECTOR_CST:
14817 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14818 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14819 break;
14820 default:
14821 break;
14822 }
14823 break;
14824 case tcc_exceptional:
14825 switch (code)
14826 {
14827 case TREE_LIST:
14828 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14829 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14830 expr = TREE_CHAIN (expr);
14831 goto recursive_label;
14832 break;
14833 case TREE_VEC:
14834 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14835 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14836 break;
14837 default:
14838 break;
14839 }
14840 break;
14841 case tcc_expression:
14842 case tcc_reference:
14843 case tcc_comparison:
14844 case tcc_unary:
14845 case tcc_binary:
14846 case tcc_statement:
14847 case tcc_vl_exp:
14848 len = TREE_OPERAND_LENGTH (expr);
14849 for (i = 0; i < len; ++i)
14850 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14851 break;
14852 case tcc_declaration:
14853 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14854 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14855 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14856 {
14857 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14858 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14859 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14860 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14861 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14862 }
14863 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14864 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14865
14866 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14867 {
14868 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14869 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14870 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14871 }
14872 break;
14873 case tcc_type:
14874 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14875 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14876 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14877 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14878 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14879 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14880 if (INTEGRAL_TYPE_P (expr)
14881 || SCALAR_FLOAT_TYPE_P (expr))
14882 {
14883 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14884 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14885 }
14886 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14887 if (TREE_CODE (expr) == RECORD_TYPE
14888 || TREE_CODE (expr) == UNION_TYPE
14889 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14890 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14891 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14892 break;
14893 default:
14894 break;
14895 }
14896 }
14897
14898 /* Helper function for outputting the checksum of a tree T. When
14899 debugging with gdb, you can "define mynext" to be "next" followed
14900 by "call debug_fold_checksum (op0)", then just trace down till the
14901 outputs differ. */
14902
14903 DEBUG_FUNCTION void
14904 debug_fold_checksum (const_tree t)
14905 {
14906 int i;
14907 unsigned char checksum[16];
14908 struct md5_ctx ctx;
14909 hash_table <pointer_hash <tree_node> > ht;
14910 ht.create (32);
14911
14912 md5_init_ctx (&ctx);
14913 fold_checksum_tree (t, &ctx, ht);
14914 md5_finish_ctx (&ctx, checksum);
14915 ht.empty ();
14916
14917 for (i = 0; i < 16; i++)
14918 fprintf (stderr, "%d ", checksum[i]);
14919
14920 fprintf (stderr, "\n");
14921 }
14922
14923 #endif
14924
14925 /* Fold a unary tree expression with code CODE of type TYPE with an
14926 operand OP0. LOC is the location of the resulting expression.
14927 Return a folded expression if successful. Otherwise, return a tree
14928 expression with code CODE of type TYPE with an operand OP0. */
14929
14930 tree
14931 fold_build1_stat_loc (location_t loc,
14932 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14933 {
14934 tree tem;
14935 #ifdef ENABLE_FOLD_CHECKING
14936 unsigned char checksum_before[16], checksum_after[16];
14937 struct md5_ctx ctx;
14938 hash_table <pointer_hash <tree_node> > ht;
14939
14940 ht.create (32);
14941 md5_init_ctx (&ctx);
14942 fold_checksum_tree (op0, &ctx, ht);
14943 md5_finish_ctx (&ctx, checksum_before);
14944 ht.empty ();
14945 #endif
14946
14947 tem = fold_unary_loc (loc, code, type, op0);
14948 if (!tem)
14949 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14950
14951 #ifdef ENABLE_FOLD_CHECKING
14952 md5_init_ctx (&ctx);
14953 fold_checksum_tree (op0, &ctx, ht);
14954 md5_finish_ctx (&ctx, checksum_after);
14955 ht.dispose ();
14956
14957 if (memcmp (checksum_before, checksum_after, 16))
14958 fold_check_failed (op0, tem);
14959 #endif
14960 return tem;
14961 }
14962
14963 /* Fold a binary tree expression with code CODE of type TYPE with
14964 operands OP0 and OP1. LOC is the location of the resulting
14965 expression. Return a folded expression if successful. Otherwise,
14966 return a tree expression with code CODE of type TYPE with operands
14967 OP0 and OP1. */
14968
14969 tree
14970 fold_build2_stat_loc (location_t loc,
14971 enum tree_code code, tree type, tree op0, tree op1
14972 MEM_STAT_DECL)
14973 {
14974 tree tem;
14975 #ifdef ENABLE_FOLD_CHECKING
14976 unsigned char checksum_before_op0[16],
14977 checksum_before_op1[16],
14978 checksum_after_op0[16],
14979 checksum_after_op1[16];
14980 struct md5_ctx ctx;
14981 hash_table <pointer_hash <tree_node> > ht;
14982
14983 ht.create (32);
14984 md5_init_ctx (&ctx);
14985 fold_checksum_tree (op0, &ctx, ht);
14986 md5_finish_ctx (&ctx, checksum_before_op0);
14987 ht.empty ();
14988
14989 md5_init_ctx (&ctx);
14990 fold_checksum_tree (op1, &ctx, ht);
14991 md5_finish_ctx (&ctx, checksum_before_op1);
14992 ht.empty ();
14993 #endif
14994
14995 tem = fold_binary_loc (loc, code, type, op0, op1);
14996 if (!tem)
14997 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14998
14999 #ifdef ENABLE_FOLD_CHECKING
15000 md5_init_ctx (&ctx);
15001 fold_checksum_tree (op0, &ctx, ht);
15002 md5_finish_ctx (&ctx, checksum_after_op0);
15003 ht.empty ();
15004
15005 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15006 fold_check_failed (op0, tem);
15007
15008 md5_init_ctx (&ctx);
15009 fold_checksum_tree (op1, &ctx, ht);
15010 md5_finish_ctx (&ctx, checksum_after_op1);
15011 ht.dispose ();
15012
15013 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15014 fold_check_failed (op1, tem);
15015 #endif
15016 return tem;
15017 }
15018
15019 /* Fold a ternary tree expression with code CODE of type TYPE with
15020 operands OP0, OP1, and OP2. Return a folded expression if
15021 successful. Otherwise, return a tree expression with code CODE of
15022 type TYPE with operands OP0, OP1, and OP2. */
15023
15024 tree
15025 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15026 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15027 {
15028 tree tem;
15029 #ifdef ENABLE_FOLD_CHECKING
15030 unsigned char checksum_before_op0[16],
15031 checksum_before_op1[16],
15032 checksum_before_op2[16],
15033 checksum_after_op0[16],
15034 checksum_after_op1[16],
15035 checksum_after_op2[16];
15036 struct md5_ctx ctx;
15037 hash_table <pointer_hash <tree_node> > ht;
15038
15039 ht.create (32);
15040 md5_init_ctx (&ctx);
15041 fold_checksum_tree (op0, &ctx, ht);
15042 md5_finish_ctx (&ctx, checksum_before_op0);
15043 ht.empty ();
15044
15045 md5_init_ctx (&ctx);
15046 fold_checksum_tree (op1, &ctx, ht);
15047 md5_finish_ctx (&ctx, checksum_before_op1);
15048 ht.empty ();
15049
15050 md5_init_ctx (&ctx);
15051 fold_checksum_tree (op2, &ctx, ht);
15052 md5_finish_ctx (&ctx, checksum_before_op2);
15053 ht.empty ();
15054 #endif
15055
15056 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15057 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15058 if (!tem)
15059 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15060
15061 #ifdef ENABLE_FOLD_CHECKING
15062 md5_init_ctx (&ctx);
15063 fold_checksum_tree (op0, &ctx, ht);
15064 md5_finish_ctx (&ctx, checksum_after_op0);
15065 ht.empty ();
15066
15067 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15068 fold_check_failed (op0, tem);
15069
15070 md5_init_ctx (&ctx);
15071 fold_checksum_tree (op1, &ctx, ht);
15072 md5_finish_ctx (&ctx, checksum_after_op1);
15073 ht.empty ();
15074
15075 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15076 fold_check_failed (op1, tem);
15077
15078 md5_init_ctx (&ctx);
15079 fold_checksum_tree (op2, &ctx, ht);
15080 md5_finish_ctx (&ctx, checksum_after_op2);
15081 ht.dispose ();
15082
15083 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15084 fold_check_failed (op2, tem);
15085 #endif
15086 return tem;
15087 }
15088
15089 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15090 arguments in ARGARRAY, and a null static chain.
15091 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15092 of type TYPE from the given operands as constructed by build_call_array. */
15093
15094 tree
15095 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15096 int nargs, tree *argarray)
15097 {
15098 tree tem;
15099 #ifdef ENABLE_FOLD_CHECKING
15100 unsigned char checksum_before_fn[16],
15101 checksum_before_arglist[16],
15102 checksum_after_fn[16],
15103 checksum_after_arglist[16];
15104 struct md5_ctx ctx;
15105 hash_table <pointer_hash <tree_node> > ht;
15106 int i;
15107
15108 ht.create (32);
15109 md5_init_ctx (&ctx);
15110 fold_checksum_tree (fn, &ctx, ht);
15111 md5_finish_ctx (&ctx, checksum_before_fn);
15112 ht.empty ();
15113
15114 md5_init_ctx (&ctx);
15115 for (i = 0; i < nargs; i++)
15116 fold_checksum_tree (argarray[i], &ctx, ht);
15117 md5_finish_ctx (&ctx, checksum_before_arglist);
15118 ht.empty ();
15119 #endif
15120
15121 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15122
15123 #ifdef ENABLE_FOLD_CHECKING
15124 md5_init_ctx (&ctx);
15125 fold_checksum_tree (fn, &ctx, ht);
15126 md5_finish_ctx (&ctx, checksum_after_fn);
15127 ht.empty ();
15128
15129 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15130 fold_check_failed (fn, tem);
15131
15132 md5_init_ctx (&ctx);
15133 for (i = 0; i < nargs; i++)
15134 fold_checksum_tree (argarray[i], &ctx, ht);
15135 md5_finish_ctx (&ctx, checksum_after_arglist);
15136 ht.dispose ();
15137
15138 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15139 fold_check_failed (NULL_TREE, tem);
15140 #endif
15141 return tem;
15142 }
15143
15144 /* Perform constant folding and related simplification of initializer
15145 expression EXPR. These behave identically to "fold_buildN" but ignore
15146 potential run-time traps and exceptions that fold must preserve. */
15147
15148 #define START_FOLD_INIT \
15149 int saved_signaling_nans = flag_signaling_nans;\
15150 int saved_trapping_math = flag_trapping_math;\
15151 int saved_rounding_math = flag_rounding_math;\
15152 int saved_trapv = flag_trapv;\
15153 int saved_folding_initializer = folding_initializer;\
15154 flag_signaling_nans = 0;\
15155 flag_trapping_math = 0;\
15156 flag_rounding_math = 0;\
15157 flag_trapv = 0;\
15158 folding_initializer = 1;
15159
15160 #define END_FOLD_INIT \
15161 flag_signaling_nans = saved_signaling_nans;\
15162 flag_trapping_math = saved_trapping_math;\
15163 flag_rounding_math = saved_rounding_math;\
15164 flag_trapv = saved_trapv;\
15165 folding_initializer = saved_folding_initializer;
15166
15167 tree
15168 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15169 tree type, tree op)
15170 {
15171 tree result;
15172 START_FOLD_INIT;
15173
15174 result = fold_build1_loc (loc, code, type, op);
15175
15176 END_FOLD_INIT;
15177 return result;
15178 }
15179
15180 tree
15181 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15182 tree type, tree op0, tree op1)
15183 {
15184 tree result;
15185 START_FOLD_INIT;
15186
15187 result = fold_build2_loc (loc, code, type, op0, op1);
15188
15189 END_FOLD_INIT;
15190 return result;
15191 }
15192
15193 tree
15194 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15195 int nargs, tree *argarray)
15196 {
15197 tree result;
15198 START_FOLD_INIT;
15199
15200 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15201
15202 END_FOLD_INIT;
15203 return result;
15204 }
15205
15206 #undef START_FOLD_INIT
15207 #undef END_FOLD_INIT
15208
15209 /* Determine if first argument is a multiple of second argument. Return 0 if
15210 it is not, or we cannot easily determined it to be.
15211
15212 An example of the sort of thing we care about (at this point; this routine
15213 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15214 fold cases do now) is discovering that
15215
15216 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15217
15218 is a multiple of
15219
15220 SAVE_EXPR (J * 8)
15221
15222 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15223
15224 This code also handles discovering that
15225
15226 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15227
15228 is a multiple of 8 so we don't have to worry about dealing with a
15229 possible remainder.
15230
15231 Note that we *look* inside a SAVE_EXPR only to determine how it was
15232 calculated; it is not safe for fold to do much of anything else with the
15233 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15234 at run time. For example, the latter example above *cannot* be implemented
15235 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15236 evaluation time of the original SAVE_EXPR is not necessarily the same at
15237 the time the new expression is evaluated. The only optimization of this
15238 sort that would be valid is changing
15239
15240 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15241
15242 divided by 8 to
15243
15244 SAVE_EXPR (I) * SAVE_EXPR (J)
15245
15246 (where the same SAVE_EXPR (J) is used in the original and the
15247 transformed version). */
15248
15249 int
15250 multiple_of_p (tree type, const_tree top, const_tree bottom)
15251 {
15252 if (operand_equal_p (top, bottom, 0))
15253 return 1;
15254
15255 if (TREE_CODE (type) != INTEGER_TYPE)
15256 return 0;
15257
15258 switch (TREE_CODE (top))
15259 {
15260 case BIT_AND_EXPR:
15261 /* Bitwise and provides a power of two multiple. If the mask is
15262 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15263 if (!integer_pow2p (bottom))
15264 return 0;
15265 /* FALLTHRU */
15266
15267 case MULT_EXPR:
15268 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15269 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15270
15271 case PLUS_EXPR:
15272 case MINUS_EXPR:
15273 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15274 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15275
15276 case LSHIFT_EXPR:
15277 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15278 {
15279 tree op1, t1;
15280
15281 op1 = TREE_OPERAND (top, 1);
15282 /* const_binop may not detect overflow correctly,
15283 so check for it explicitly here. */
15284 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15285 && 0 != (t1 = fold_convert (type,
15286 const_binop (LSHIFT_EXPR,
15287 size_one_node,
15288 op1)))
15289 && !TREE_OVERFLOW (t1))
15290 return multiple_of_p (type, t1, bottom);
15291 }
15292 return 0;
15293
15294 case NOP_EXPR:
15295 /* Can't handle conversions from non-integral or wider integral type. */
15296 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15297 || (TYPE_PRECISION (type)
15298 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15299 return 0;
15300
15301 /* .. fall through ... */
15302
15303 case SAVE_EXPR:
15304 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15305
15306 case COND_EXPR:
15307 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15308 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15309
15310 case INTEGER_CST:
15311 if (TREE_CODE (bottom) != INTEGER_CST
15312 || integer_zerop (bottom)
15313 || (TYPE_UNSIGNED (type)
15314 && (tree_int_cst_sgn (top) < 0
15315 || tree_int_cst_sgn (bottom) < 0)))
15316 return 0;
15317 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15318 top, bottom));
15319
15320 default:
15321 return 0;
15322 }
15323 }
15324
15325 /* Return true if CODE or TYPE is known to be non-negative. */
15326
15327 static bool
15328 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15329 {
15330 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15331 && truth_value_p (code))
15332 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15333 have a signed:1 type (where the value is -1 and 0). */
15334 return true;
15335 return false;
15336 }
15337
15338 /* Return true if (CODE OP0) is known to be non-negative. If the return
15339 value is based on the assumption that signed overflow is undefined,
15340 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15341 *STRICT_OVERFLOW_P. */
15342
15343 bool
15344 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15345 bool *strict_overflow_p)
15346 {
15347 if (TYPE_UNSIGNED (type))
15348 return true;
15349
15350 switch (code)
15351 {
15352 case ABS_EXPR:
15353 /* We can't return 1 if flag_wrapv is set because
15354 ABS_EXPR<INT_MIN> = INT_MIN. */
15355 if (!INTEGRAL_TYPE_P (type))
15356 return true;
15357 if (TYPE_OVERFLOW_UNDEFINED (type))
15358 {
15359 *strict_overflow_p = true;
15360 return true;
15361 }
15362 break;
15363
15364 case NON_LVALUE_EXPR:
15365 case FLOAT_EXPR:
15366 case FIX_TRUNC_EXPR:
15367 return tree_expr_nonnegative_warnv_p (op0,
15368 strict_overflow_p);
15369
15370 case NOP_EXPR:
15371 {
15372 tree inner_type = TREE_TYPE (op0);
15373 tree outer_type = type;
15374
15375 if (TREE_CODE (outer_type) == REAL_TYPE)
15376 {
15377 if (TREE_CODE (inner_type) == REAL_TYPE)
15378 return tree_expr_nonnegative_warnv_p (op0,
15379 strict_overflow_p);
15380 if (INTEGRAL_TYPE_P (inner_type))
15381 {
15382 if (TYPE_UNSIGNED (inner_type))
15383 return true;
15384 return tree_expr_nonnegative_warnv_p (op0,
15385 strict_overflow_p);
15386 }
15387 }
15388 else if (INTEGRAL_TYPE_P (outer_type))
15389 {
15390 if (TREE_CODE (inner_type) == REAL_TYPE)
15391 return tree_expr_nonnegative_warnv_p (op0,
15392 strict_overflow_p);
15393 if (INTEGRAL_TYPE_P (inner_type))
15394 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15395 && TYPE_UNSIGNED (inner_type);
15396 }
15397 }
15398 break;
15399
15400 default:
15401 return tree_simple_nonnegative_warnv_p (code, type);
15402 }
15403
15404 /* We don't know sign of `t', so be conservative and return false. */
15405 return false;
15406 }
15407
15408 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15409 value is based on the assumption that signed overflow is undefined,
15410 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15411 *STRICT_OVERFLOW_P. */
15412
15413 bool
15414 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15415 tree op1, bool *strict_overflow_p)
15416 {
15417 if (TYPE_UNSIGNED (type))
15418 return true;
15419
15420 switch (code)
15421 {
15422 case POINTER_PLUS_EXPR:
15423 case PLUS_EXPR:
15424 if (FLOAT_TYPE_P (type))
15425 return (tree_expr_nonnegative_warnv_p (op0,
15426 strict_overflow_p)
15427 && tree_expr_nonnegative_warnv_p (op1,
15428 strict_overflow_p));
15429
15430 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15431 both unsigned and at least 2 bits shorter than the result. */
15432 if (TREE_CODE (type) == INTEGER_TYPE
15433 && TREE_CODE (op0) == NOP_EXPR
15434 && TREE_CODE (op1) == NOP_EXPR)
15435 {
15436 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15437 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15438 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15439 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15440 {
15441 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15442 TYPE_PRECISION (inner2)) + 1;
15443 return prec < TYPE_PRECISION (type);
15444 }
15445 }
15446 break;
15447
15448 case MULT_EXPR:
15449 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15450 {
15451 /* x * x is always non-negative for floating point x
15452 or without overflow. */
15453 if (operand_equal_p (op0, op1, 0)
15454 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15455 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15456 {
15457 if (TYPE_OVERFLOW_UNDEFINED (type))
15458 *strict_overflow_p = true;
15459 return true;
15460 }
15461 }
15462
15463 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15464 both unsigned and their total bits is shorter than the result. */
15465 if (TREE_CODE (type) == INTEGER_TYPE
15466 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15467 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15468 {
15469 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15470 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15471 : TREE_TYPE (op0);
15472 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15473 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15474 : TREE_TYPE (op1);
15475
15476 bool unsigned0 = TYPE_UNSIGNED (inner0);
15477 bool unsigned1 = TYPE_UNSIGNED (inner1);
15478
15479 if (TREE_CODE (op0) == INTEGER_CST)
15480 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15481
15482 if (TREE_CODE (op1) == INTEGER_CST)
15483 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15484
15485 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15486 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15487 {
15488 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15489 ? tree_int_cst_min_precision (op0, UNSIGNED)
15490 : TYPE_PRECISION (inner0);
15491
15492 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15493 ? tree_int_cst_min_precision (op1, UNSIGNED)
15494 : TYPE_PRECISION (inner1);
15495
15496 return precision0 + precision1 < TYPE_PRECISION (type);
15497 }
15498 }
15499 return false;
15500
15501 case BIT_AND_EXPR:
15502 case MAX_EXPR:
15503 return (tree_expr_nonnegative_warnv_p (op0,
15504 strict_overflow_p)
15505 || tree_expr_nonnegative_warnv_p (op1,
15506 strict_overflow_p));
15507
15508 case BIT_IOR_EXPR:
15509 case BIT_XOR_EXPR:
15510 case MIN_EXPR:
15511 case RDIV_EXPR:
15512 case TRUNC_DIV_EXPR:
15513 case CEIL_DIV_EXPR:
15514 case FLOOR_DIV_EXPR:
15515 case ROUND_DIV_EXPR:
15516 return (tree_expr_nonnegative_warnv_p (op0,
15517 strict_overflow_p)
15518 && tree_expr_nonnegative_warnv_p (op1,
15519 strict_overflow_p));
15520
15521 case TRUNC_MOD_EXPR:
15522 case CEIL_MOD_EXPR:
15523 case FLOOR_MOD_EXPR:
15524 case ROUND_MOD_EXPR:
15525 return tree_expr_nonnegative_warnv_p (op0,
15526 strict_overflow_p);
15527 default:
15528 return tree_simple_nonnegative_warnv_p (code, type);
15529 }
15530
15531 /* We don't know sign of `t', so be conservative and return false. */
15532 return false;
15533 }
15534
15535 /* Return true if T is known to be non-negative. If the return
15536 value is based on the assumption that signed overflow is undefined,
15537 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15538 *STRICT_OVERFLOW_P. */
15539
15540 bool
15541 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15542 {
15543 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15544 return true;
15545
15546 switch (TREE_CODE (t))
15547 {
15548 case INTEGER_CST:
15549 return tree_int_cst_sgn (t) >= 0;
15550
15551 case REAL_CST:
15552 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15553
15554 case FIXED_CST:
15555 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15556
15557 case COND_EXPR:
15558 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15559 strict_overflow_p)
15560 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15561 strict_overflow_p));
15562 default:
15563 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15564 TREE_TYPE (t));
15565 }
15566 /* We don't know sign of `t', so be conservative and return false. */
15567 return false;
15568 }
15569
15570 /* Return true if T is known to be non-negative. If the return
15571 value is based on the assumption that signed overflow is undefined,
15572 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15573 *STRICT_OVERFLOW_P. */
15574
15575 bool
15576 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15577 tree arg0, tree arg1, bool *strict_overflow_p)
15578 {
15579 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15580 switch (DECL_FUNCTION_CODE (fndecl))
15581 {
15582 CASE_FLT_FN (BUILT_IN_ACOS):
15583 CASE_FLT_FN (BUILT_IN_ACOSH):
15584 CASE_FLT_FN (BUILT_IN_CABS):
15585 CASE_FLT_FN (BUILT_IN_COSH):
15586 CASE_FLT_FN (BUILT_IN_ERFC):
15587 CASE_FLT_FN (BUILT_IN_EXP):
15588 CASE_FLT_FN (BUILT_IN_EXP10):
15589 CASE_FLT_FN (BUILT_IN_EXP2):
15590 CASE_FLT_FN (BUILT_IN_FABS):
15591 CASE_FLT_FN (BUILT_IN_FDIM):
15592 CASE_FLT_FN (BUILT_IN_HYPOT):
15593 CASE_FLT_FN (BUILT_IN_POW10):
15594 CASE_INT_FN (BUILT_IN_FFS):
15595 CASE_INT_FN (BUILT_IN_PARITY):
15596 CASE_INT_FN (BUILT_IN_POPCOUNT):
15597 CASE_INT_FN (BUILT_IN_CLZ):
15598 CASE_INT_FN (BUILT_IN_CLRSB):
15599 case BUILT_IN_BSWAP32:
15600 case BUILT_IN_BSWAP64:
15601 /* Always true. */
15602 return true;
15603
15604 CASE_FLT_FN (BUILT_IN_SQRT):
15605 /* sqrt(-0.0) is -0.0. */
15606 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15607 return true;
15608 return tree_expr_nonnegative_warnv_p (arg0,
15609 strict_overflow_p);
15610
15611 CASE_FLT_FN (BUILT_IN_ASINH):
15612 CASE_FLT_FN (BUILT_IN_ATAN):
15613 CASE_FLT_FN (BUILT_IN_ATANH):
15614 CASE_FLT_FN (BUILT_IN_CBRT):
15615 CASE_FLT_FN (BUILT_IN_CEIL):
15616 CASE_FLT_FN (BUILT_IN_ERF):
15617 CASE_FLT_FN (BUILT_IN_EXPM1):
15618 CASE_FLT_FN (BUILT_IN_FLOOR):
15619 CASE_FLT_FN (BUILT_IN_FMOD):
15620 CASE_FLT_FN (BUILT_IN_FREXP):
15621 CASE_FLT_FN (BUILT_IN_ICEIL):
15622 CASE_FLT_FN (BUILT_IN_IFLOOR):
15623 CASE_FLT_FN (BUILT_IN_IRINT):
15624 CASE_FLT_FN (BUILT_IN_IROUND):
15625 CASE_FLT_FN (BUILT_IN_LCEIL):
15626 CASE_FLT_FN (BUILT_IN_LDEXP):
15627 CASE_FLT_FN (BUILT_IN_LFLOOR):
15628 CASE_FLT_FN (BUILT_IN_LLCEIL):
15629 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15630 CASE_FLT_FN (BUILT_IN_LLRINT):
15631 CASE_FLT_FN (BUILT_IN_LLROUND):
15632 CASE_FLT_FN (BUILT_IN_LRINT):
15633 CASE_FLT_FN (BUILT_IN_LROUND):
15634 CASE_FLT_FN (BUILT_IN_MODF):
15635 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15636 CASE_FLT_FN (BUILT_IN_RINT):
15637 CASE_FLT_FN (BUILT_IN_ROUND):
15638 CASE_FLT_FN (BUILT_IN_SCALB):
15639 CASE_FLT_FN (BUILT_IN_SCALBLN):
15640 CASE_FLT_FN (BUILT_IN_SCALBN):
15641 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15642 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15643 CASE_FLT_FN (BUILT_IN_SINH):
15644 CASE_FLT_FN (BUILT_IN_TANH):
15645 CASE_FLT_FN (BUILT_IN_TRUNC):
15646 /* True if the 1st argument is nonnegative. */
15647 return tree_expr_nonnegative_warnv_p (arg0,
15648 strict_overflow_p);
15649
15650 CASE_FLT_FN (BUILT_IN_FMAX):
15651 /* True if the 1st OR 2nd arguments are nonnegative. */
15652 return (tree_expr_nonnegative_warnv_p (arg0,
15653 strict_overflow_p)
15654 || (tree_expr_nonnegative_warnv_p (arg1,
15655 strict_overflow_p)));
15656
15657 CASE_FLT_FN (BUILT_IN_FMIN):
15658 /* True if the 1st AND 2nd arguments are nonnegative. */
15659 return (tree_expr_nonnegative_warnv_p (arg0,
15660 strict_overflow_p)
15661 && (tree_expr_nonnegative_warnv_p (arg1,
15662 strict_overflow_p)));
15663
15664 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15665 /* True if the 2nd argument is nonnegative. */
15666 return tree_expr_nonnegative_warnv_p (arg1,
15667 strict_overflow_p);
15668
15669 CASE_FLT_FN (BUILT_IN_POWI):
15670 /* True if the 1st argument is nonnegative or the second
15671 argument is an even integer. */
15672 if (TREE_CODE (arg1) == INTEGER_CST
15673 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15674 return true;
15675 return tree_expr_nonnegative_warnv_p (arg0,
15676 strict_overflow_p);
15677
15678 CASE_FLT_FN (BUILT_IN_POW):
15679 /* True if the 1st argument is nonnegative or the second
15680 argument is an even integer valued real. */
15681 if (TREE_CODE (arg1) == REAL_CST)
15682 {
15683 REAL_VALUE_TYPE c;
15684 HOST_WIDE_INT n;
15685
15686 c = TREE_REAL_CST (arg1);
15687 n = real_to_integer (&c);
15688 if ((n & 1) == 0)
15689 {
15690 REAL_VALUE_TYPE cint;
15691 real_from_integer (&cint, VOIDmode, n, SIGNED);
15692 if (real_identical (&c, &cint))
15693 return true;
15694 }
15695 }
15696 return tree_expr_nonnegative_warnv_p (arg0,
15697 strict_overflow_p);
15698
15699 default:
15700 break;
15701 }
15702 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15703 type);
15704 }
15705
15706 /* Return true if T is known to be non-negative. If the return
15707 value is based on the assumption that signed overflow is undefined,
15708 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15709 *STRICT_OVERFLOW_P. */
15710
15711 static bool
15712 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15713 {
15714 enum tree_code code = TREE_CODE (t);
15715 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15716 return true;
15717
15718 switch (code)
15719 {
15720 case TARGET_EXPR:
15721 {
15722 tree temp = TARGET_EXPR_SLOT (t);
15723 t = TARGET_EXPR_INITIAL (t);
15724
15725 /* If the initializer is non-void, then it's a normal expression
15726 that will be assigned to the slot. */
15727 if (!VOID_TYPE_P (t))
15728 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15729
15730 /* Otherwise, the initializer sets the slot in some way. One common
15731 way is an assignment statement at the end of the initializer. */
15732 while (1)
15733 {
15734 if (TREE_CODE (t) == BIND_EXPR)
15735 t = expr_last (BIND_EXPR_BODY (t));
15736 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15737 || TREE_CODE (t) == TRY_CATCH_EXPR)
15738 t = expr_last (TREE_OPERAND (t, 0));
15739 else if (TREE_CODE (t) == STATEMENT_LIST)
15740 t = expr_last (t);
15741 else
15742 break;
15743 }
15744 if (TREE_CODE (t) == MODIFY_EXPR
15745 && TREE_OPERAND (t, 0) == temp)
15746 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15747 strict_overflow_p);
15748
15749 return false;
15750 }
15751
15752 case CALL_EXPR:
15753 {
15754 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15755 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15756
15757 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15758 get_callee_fndecl (t),
15759 arg0,
15760 arg1,
15761 strict_overflow_p);
15762 }
15763 case COMPOUND_EXPR:
15764 case MODIFY_EXPR:
15765 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15766 strict_overflow_p);
15767 case BIND_EXPR:
15768 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15769 strict_overflow_p);
15770 case SAVE_EXPR:
15771 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15772 strict_overflow_p);
15773
15774 default:
15775 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15776 TREE_TYPE (t));
15777 }
15778
15779 /* We don't know sign of `t', so be conservative and return false. */
15780 return false;
15781 }
15782
15783 /* Return true if T is known to be non-negative. If the return
15784 value is based on the assumption that signed overflow is undefined,
15785 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15786 *STRICT_OVERFLOW_P. */
15787
15788 bool
15789 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15790 {
15791 enum tree_code code;
15792 if (t == error_mark_node)
15793 return false;
15794
15795 code = TREE_CODE (t);
15796 switch (TREE_CODE_CLASS (code))
15797 {
15798 case tcc_binary:
15799 case tcc_comparison:
15800 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15801 TREE_TYPE (t),
15802 TREE_OPERAND (t, 0),
15803 TREE_OPERAND (t, 1),
15804 strict_overflow_p);
15805
15806 case tcc_unary:
15807 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15808 TREE_TYPE (t),
15809 TREE_OPERAND (t, 0),
15810 strict_overflow_p);
15811
15812 case tcc_constant:
15813 case tcc_declaration:
15814 case tcc_reference:
15815 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15816
15817 default:
15818 break;
15819 }
15820
15821 switch (code)
15822 {
15823 case TRUTH_AND_EXPR:
15824 case TRUTH_OR_EXPR:
15825 case TRUTH_XOR_EXPR:
15826 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15827 TREE_TYPE (t),
15828 TREE_OPERAND (t, 0),
15829 TREE_OPERAND (t, 1),
15830 strict_overflow_p);
15831 case TRUTH_NOT_EXPR:
15832 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15833 TREE_TYPE (t),
15834 TREE_OPERAND (t, 0),
15835 strict_overflow_p);
15836
15837 case COND_EXPR:
15838 case CONSTRUCTOR:
15839 case OBJ_TYPE_REF:
15840 case ASSERT_EXPR:
15841 case ADDR_EXPR:
15842 case WITH_SIZE_EXPR:
15843 case SSA_NAME:
15844 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15845
15846 default:
15847 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15848 }
15849 }
15850
15851 /* Return true if `t' is known to be non-negative. Handle warnings
15852 about undefined signed overflow. */
15853
15854 bool
15855 tree_expr_nonnegative_p (tree t)
15856 {
15857 bool ret, strict_overflow_p;
15858
15859 strict_overflow_p = false;
15860 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15861 if (strict_overflow_p)
15862 fold_overflow_warning (("assuming signed overflow does not occur when "
15863 "determining that expression is always "
15864 "non-negative"),
15865 WARN_STRICT_OVERFLOW_MISC);
15866 return ret;
15867 }
15868
15869
15870 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15871 For floating point we further ensure that T is not denormal.
15872 Similar logic is present in nonzero_address in rtlanal.h.
15873
15874 If the return value is based on the assumption that signed overflow
15875 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15876 change *STRICT_OVERFLOW_P. */
15877
15878 bool
15879 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15880 bool *strict_overflow_p)
15881 {
15882 switch (code)
15883 {
15884 case ABS_EXPR:
15885 return tree_expr_nonzero_warnv_p (op0,
15886 strict_overflow_p);
15887
15888 case NOP_EXPR:
15889 {
15890 tree inner_type = TREE_TYPE (op0);
15891 tree outer_type = type;
15892
15893 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15894 && tree_expr_nonzero_warnv_p (op0,
15895 strict_overflow_p));
15896 }
15897 break;
15898
15899 case NON_LVALUE_EXPR:
15900 return tree_expr_nonzero_warnv_p (op0,
15901 strict_overflow_p);
15902
15903 default:
15904 break;
15905 }
15906
15907 return false;
15908 }
15909
15910 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15911 For floating point we further ensure that T is not denormal.
15912 Similar logic is present in nonzero_address in rtlanal.h.
15913
15914 If the return value is based on the assumption that signed overflow
15915 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15916 change *STRICT_OVERFLOW_P. */
15917
15918 bool
15919 tree_binary_nonzero_warnv_p (enum tree_code code,
15920 tree type,
15921 tree op0,
15922 tree op1, bool *strict_overflow_p)
15923 {
15924 bool sub_strict_overflow_p;
15925 switch (code)
15926 {
15927 case POINTER_PLUS_EXPR:
15928 case PLUS_EXPR:
15929 if (TYPE_OVERFLOW_UNDEFINED (type))
15930 {
15931 /* With the presence of negative values it is hard
15932 to say something. */
15933 sub_strict_overflow_p = false;
15934 if (!tree_expr_nonnegative_warnv_p (op0,
15935 &sub_strict_overflow_p)
15936 || !tree_expr_nonnegative_warnv_p (op1,
15937 &sub_strict_overflow_p))
15938 return false;
15939 /* One of operands must be positive and the other non-negative. */
15940 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15941 overflows, on a twos-complement machine the sum of two
15942 nonnegative numbers can never be zero. */
15943 return (tree_expr_nonzero_warnv_p (op0,
15944 strict_overflow_p)
15945 || tree_expr_nonzero_warnv_p (op1,
15946 strict_overflow_p));
15947 }
15948 break;
15949
15950 case MULT_EXPR:
15951 if (TYPE_OVERFLOW_UNDEFINED (type))
15952 {
15953 if (tree_expr_nonzero_warnv_p (op0,
15954 strict_overflow_p)
15955 && tree_expr_nonzero_warnv_p (op1,
15956 strict_overflow_p))
15957 {
15958 *strict_overflow_p = true;
15959 return true;
15960 }
15961 }
15962 break;
15963
15964 case MIN_EXPR:
15965 sub_strict_overflow_p = false;
15966 if (tree_expr_nonzero_warnv_p (op0,
15967 &sub_strict_overflow_p)
15968 && tree_expr_nonzero_warnv_p (op1,
15969 &sub_strict_overflow_p))
15970 {
15971 if (sub_strict_overflow_p)
15972 *strict_overflow_p = true;
15973 }
15974 break;
15975
15976 case MAX_EXPR:
15977 sub_strict_overflow_p = false;
15978 if (tree_expr_nonzero_warnv_p (op0,
15979 &sub_strict_overflow_p))
15980 {
15981 if (sub_strict_overflow_p)
15982 *strict_overflow_p = true;
15983
15984 /* When both operands are nonzero, then MAX must be too. */
15985 if (tree_expr_nonzero_warnv_p (op1,
15986 strict_overflow_p))
15987 return true;
15988
15989 /* MAX where operand 0 is positive is positive. */
15990 return tree_expr_nonnegative_warnv_p (op0,
15991 strict_overflow_p);
15992 }
15993 /* MAX where operand 1 is positive is positive. */
15994 else if (tree_expr_nonzero_warnv_p (op1,
15995 &sub_strict_overflow_p)
15996 && tree_expr_nonnegative_warnv_p (op1,
15997 &sub_strict_overflow_p))
15998 {
15999 if (sub_strict_overflow_p)
16000 *strict_overflow_p = true;
16001 return true;
16002 }
16003 break;
16004
16005 case BIT_IOR_EXPR:
16006 return (tree_expr_nonzero_warnv_p (op1,
16007 strict_overflow_p)
16008 || tree_expr_nonzero_warnv_p (op0,
16009 strict_overflow_p));
16010
16011 default:
16012 break;
16013 }
16014
16015 return false;
16016 }
16017
16018 /* Return true when T is an address and is known to be nonzero.
16019 For floating point we further ensure that T is not denormal.
16020 Similar logic is present in nonzero_address in rtlanal.h.
16021
16022 If the return value is based on the assumption that signed overflow
16023 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16024 change *STRICT_OVERFLOW_P. */
16025
16026 bool
16027 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16028 {
16029 bool sub_strict_overflow_p;
16030 switch (TREE_CODE (t))
16031 {
16032 case INTEGER_CST:
16033 return !integer_zerop (t);
16034
16035 case ADDR_EXPR:
16036 {
16037 tree base = TREE_OPERAND (t, 0);
16038 if (!DECL_P (base))
16039 base = get_base_address (base);
16040
16041 if (!base)
16042 return false;
16043
16044 /* Weak declarations may link to NULL. Other things may also be NULL
16045 so protect with -fdelete-null-pointer-checks; but not variables
16046 allocated on the stack. */
16047 if (DECL_P (base)
16048 && (flag_delete_null_pointer_checks
16049 || (DECL_CONTEXT (base)
16050 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16051 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16052 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16053
16054 /* Constants are never weak. */
16055 if (CONSTANT_CLASS_P (base))
16056 return true;
16057
16058 return false;
16059 }
16060
16061 case COND_EXPR:
16062 sub_strict_overflow_p = false;
16063 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16064 &sub_strict_overflow_p)
16065 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16066 &sub_strict_overflow_p))
16067 {
16068 if (sub_strict_overflow_p)
16069 *strict_overflow_p = true;
16070 return true;
16071 }
16072 break;
16073
16074 default:
16075 break;
16076 }
16077 return false;
16078 }
16079
16080 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16081 attempt to fold the expression to a constant without modifying TYPE,
16082 OP0 or OP1.
16083
16084 If the expression could be simplified to a constant, then return
16085 the constant. If the expression would not be simplified to a
16086 constant, then return NULL_TREE. */
16087
16088 tree
16089 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16090 {
16091 tree tem = fold_binary (code, type, op0, op1);
16092 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16093 }
16094
16095 /* Given the components of a unary expression CODE, TYPE and OP0,
16096 attempt to fold the expression to a constant without modifying
16097 TYPE or OP0.
16098
16099 If the expression could be simplified to a constant, then return
16100 the constant. If the expression would not be simplified to a
16101 constant, then return NULL_TREE. */
16102
16103 tree
16104 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16105 {
16106 tree tem = fold_unary (code, type, op0);
16107 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16108 }
16109
16110 /* If EXP represents referencing an element in a constant string
16111 (either via pointer arithmetic or array indexing), return the
16112 tree representing the value accessed, otherwise return NULL. */
16113
16114 tree
16115 fold_read_from_constant_string (tree exp)
16116 {
16117 if ((TREE_CODE (exp) == INDIRECT_REF
16118 || TREE_CODE (exp) == ARRAY_REF)
16119 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16120 {
16121 tree exp1 = TREE_OPERAND (exp, 0);
16122 tree index;
16123 tree string;
16124 location_t loc = EXPR_LOCATION (exp);
16125
16126 if (TREE_CODE (exp) == INDIRECT_REF)
16127 string = string_constant (exp1, &index);
16128 else
16129 {
16130 tree low_bound = array_ref_low_bound (exp);
16131 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16132
16133 /* Optimize the special-case of a zero lower bound.
16134
16135 We convert the low_bound to sizetype to avoid some problems
16136 with constant folding. (E.g. suppose the lower bound is 1,
16137 and its mode is QI. Without the conversion,l (ARRAY
16138 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16139 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16140 if (! integer_zerop (low_bound))
16141 index = size_diffop_loc (loc, index,
16142 fold_convert_loc (loc, sizetype, low_bound));
16143
16144 string = exp1;
16145 }
16146
16147 if (string
16148 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16149 && TREE_CODE (string) == STRING_CST
16150 && TREE_CODE (index) == INTEGER_CST
16151 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16152 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16153 == MODE_INT)
16154 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16155 return build_int_cst_type (TREE_TYPE (exp),
16156 (TREE_STRING_POINTER (string)
16157 [TREE_INT_CST_LOW (index)]));
16158 }
16159 return NULL;
16160 }
16161
16162 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16163 an integer constant, real, or fixed-point constant.
16164
16165 TYPE is the type of the result. */
16166
16167 static tree
16168 fold_negate_const (tree arg0, tree type)
16169 {
16170 tree t = NULL_TREE;
16171
16172 switch (TREE_CODE (arg0))
16173 {
16174 case INTEGER_CST:
16175 {
16176 bool overflow;
16177 wide_int val = wi::neg (arg0, &overflow);
16178 t = force_fit_type (type, val, 1,
16179 (overflow | TREE_OVERFLOW (arg0))
16180 && !TYPE_UNSIGNED (type));
16181 break;
16182 }
16183
16184 case REAL_CST:
16185 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16186 break;
16187
16188 case FIXED_CST:
16189 {
16190 FIXED_VALUE_TYPE f;
16191 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16192 &(TREE_FIXED_CST (arg0)), NULL,
16193 TYPE_SATURATING (type));
16194 t = build_fixed (type, f);
16195 /* Propagate overflow flags. */
16196 if (overflow_p | TREE_OVERFLOW (arg0))
16197 TREE_OVERFLOW (t) = 1;
16198 break;
16199 }
16200
16201 default:
16202 gcc_unreachable ();
16203 }
16204
16205 return t;
16206 }
16207
16208 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16209 an integer constant or real constant.
16210
16211 TYPE is the type of the result. */
16212
16213 tree
16214 fold_abs_const (tree arg0, tree type)
16215 {
16216 tree t = NULL_TREE;
16217
16218 switch (TREE_CODE (arg0))
16219 {
16220 case INTEGER_CST:
16221 {
16222 /* If the value is unsigned or non-negative, then the absolute value
16223 is the same as the ordinary value. */
16224 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16225 t = arg0;
16226
16227 /* If the value is negative, then the absolute value is
16228 its negation. */
16229 else
16230 {
16231 bool overflow;
16232 wide_int val = wi::neg (arg0, &overflow);
16233 t = force_fit_type (type, val, -1,
16234 overflow | TREE_OVERFLOW (arg0));
16235 }
16236 }
16237 break;
16238
16239 case REAL_CST:
16240 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16241 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16242 else
16243 t = arg0;
16244 break;
16245
16246 default:
16247 gcc_unreachable ();
16248 }
16249
16250 return t;
16251 }
16252
16253 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16254 constant. TYPE is the type of the result. */
16255
16256 static tree
16257 fold_not_const (const_tree arg0, tree type)
16258 {
16259 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16260
16261 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16262 }
16263
16264 /* Given CODE, a relational operator, the target type, TYPE and two
16265 constant operands OP0 and OP1, return the result of the
16266 relational operation. If the result is not a compile time
16267 constant, then return NULL_TREE. */
16268
16269 static tree
16270 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16271 {
16272 int result, invert;
16273
16274 /* From here on, the only cases we handle are when the result is
16275 known to be a constant. */
16276
16277 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16278 {
16279 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16280 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16281
16282 /* Handle the cases where either operand is a NaN. */
16283 if (real_isnan (c0) || real_isnan (c1))
16284 {
16285 switch (code)
16286 {
16287 case EQ_EXPR:
16288 case ORDERED_EXPR:
16289 result = 0;
16290 break;
16291
16292 case NE_EXPR:
16293 case UNORDERED_EXPR:
16294 case UNLT_EXPR:
16295 case UNLE_EXPR:
16296 case UNGT_EXPR:
16297 case UNGE_EXPR:
16298 case UNEQ_EXPR:
16299 result = 1;
16300 break;
16301
16302 case LT_EXPR:
16303 case LE_EXPR:
16304 case GT_EXPR:
16305 case GE_EXPR:
16306 case LTGT_EXPR:
16307 if (flag_trapping_math)
16308 return NULL_TREE;
16309 result = 0;
16310 break;
16311
16312 default:
16313 gcc_unreachable ();
16314 }
16315
16316 return constant_boolean_node (result, type);
16317 }
16318
16319 return constant_boolean_node (real_compare (code, c0, c1), type);
16320 }
16321
16322 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16323 {
16324 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16325 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16326 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16327 }
16328
16329 /* Handle equality/inequality of complex constants. */
16330 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16331 {
16332 tree rcond = fold_relational_const (code, type,
16333 TREE_REALPART (op0),
16334 TREE_REALPART (op1));
16335 tree icond = fold_relational_const (code, type,
16336 TREE_IMAGPART (op0),
16337 TREE_IMAGPART (op1));
16338 if (code == EQ_EXPR)
16339 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16340 else if (code == NE_EXPR)
16341 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16342 else
16343 return NULL_TREE;
16344 }
16345
16346 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16347 {
16348 unsigned count = VECTOR_CST_NELTS (op0);
16349 tree *elts = XALLOCAVEC (tree, count);
16350 gcc_assert (VECTOR_CST_NELTS (op1) == count
16351 && TYPE_VECTOR_SUBPARTS (type) == count);
16352
16353 for (unsigned i = 0; i < count; i++)
16354 {
16355 tree elem_type = TREE_TYPE (type);
16356 tree elem0 = VECTOR_CST_ELT (op0, i);
16357 tree elem1 = VECTOR_CST_ELT (op1, i);
16358
16359 tree tem = fold_relational_const (code, elem_type,
16360 elem0, elem1);
16361
16362 if (tem == NULL_TREE)
16363 return NULL_TREE;
16364
16365 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16366 }
16367
16368 return build_vector (type, elts);
16369 }
16370
16371 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16372
16373 To compute GT, swap the arguments and do LT.
16374 To compute GE, do LT and invert the result.
16375 To compute LE, swap the arguments, do LT and invert the result.
16376 To compute NE, do EQ and invert the result.
16377
16378 Therefore, the code below must handle only EQ and LT. */
16379
16380 if (code == LE_EXPR || code == GT_EXPR)
16381 {
16382 tree tem = op0;
16383 op0 = op1;
16384 op1 = tem;
16385 code = swap_tree_comparison (code);
16386 }
16387
16388 /* Note that it is safe to invert for real values here because we
16389 have already handled the one case that it matters. */
16390
16391 invert = 0;
16392 if (code == NE_EXPR || code == GE_EXPR)
16393 {
16394 invert = 1;
16395 code = invert_tree_comparison (code, false);
16396 }
16397
16398 /* Compute a result for LT or EQ if args permit;
16399 Otherwise return T. */
16400 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16401 {
16402 if (code == EQ_EXPR)
16403 result = tree_int_cst_equal (op0, op1);
16404 else
16405 result = tree_int_cst_lt (op0, op1);
16406 }
16407 else
16408 return NULL_TREE;
16409
16410 if (invert)
16411 result ^= 1;
16412 return constant_boolean_node (result, type);
16413 }
16414
16415 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16416 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16417 itself. */
16418
16419 tree
16420 fold_build_cleanup_point_expr (tree type, tree expr)
16421 {
16422 /* If the expression does not have side effects then we don't have to wrap
16423 it with a cleanup point expression. */
16424 if (!TREE_SIDE_EFFECTS (expr))
16425 return expr;
16426
16427 /* If the expression is a return, check to see if the expression inside the
16428 return has no side effects or the right hand side of the modify expression
16429 inside the return. If either don't have side effects set we don't need to
16430 wrap the expression in a cleanup point expression. Note we don't check the
16431 left hand side of the modify because it should always be a return decl. */
16432 if (TREE_CODE (expr) == RETURN_EXPR)
16433 {
16434 tree op = TREE_OPERAND (expr, 0);
16435 if (!op || !TREE_SIDE_EFFECTS (op))
16436 return expr;
16437 op = TREE_OPERAND (op, 1);
16438 if (!TREE_SIDE_EFFECTS (op))
16439 return expr;
16440 }
16441
16442 return build1 (CLEANUP_POINT_EXPR, type, expr);
16443 }
16444
16445 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16446 of an indirection through OP0, or NULL_TREE if no simplification is
16447 possible. */
16448
16449 tree
16450 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16451 {
16452 tree sub = op0;
16453 tree subtype;
16454
16455 STRIP_NOPS (sub);
16456 subtype = TREE_TYPE (sub);
16457 if (!POINTER_TYPE_P (subtype))
16458 return NULL_TREE;
16459
16460 if (TREE_CODE (sub) == ADDR_EXPR)
16461 {
16462 tree op = TREE_OPERAND (sub, 0);
16463 tree optype = TREE_TYPE (op);
16464 /* *&CONST_DECL -> to the value of the const decl. */
16465 if (TREE_CODE (op) == CONST_DECL)
16466 return DECL_INITIAL (op);
16467 /* *&p => p; make sure to handle *&"str"[cst] here. */
16468 if (type == optype)
16469 {
16470 tree fop = fold_read_from_constant_string (op);
16471 if (fop)
16472 return fop;
16473 else
16474 return op;
16475 }
16476 /* *(foo *)&fooarray => fooarray[0] */
16477 else if (TREE_CODE (optype) == ARRAY_TYPE
16478 && type == TREE_TYPE (optype)
16479 && (!in_gimple_form
16480 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16481 {
16482 tree type_domain = TYPE_DOMAIN (optype);
16483 tree min_val = size_zero_node;
16484 if (type_domain && TYPE_MIN_VALUE (type_domain))
16485 min_val = TYPE_MIN_VALUE (type_domain);
16486 if (in_gimple_form
16487 && TREE_CODE (min_val) != INTEGER_CST)
16488 return NULL_TREE;
16489 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16490 NULL_TREE, NULL_TREE);
16491 }
16492 /* *(foo *)&complexfoo => __real__ complexfoo */
16493 else if (TREE_CODE (optype) == COMPLEX_TYPE
16494 && type == TREE_TYPE (optype))
16495 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16496 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16497 else if (TREE_CODE (optype) == VECTOR_TYPE
16498 && type == TREE_TYPE (optype))
16499 {
16500 tree part_width = TYPE_SIZE (type);
16501 tree index = bitsize_int (0);
16502 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16503 }
16504 }
16505
16506 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16507 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16508 {
16509 tree op00 = TREE_OPERAND (sub, 0);
16510 tree op01 = TREE_OPERAND (sub, 1);
16511
16512 STRIP_NOPS (op00);
16513 if (TREE_CODE (op00) == ADDR_EXPR)
16514 {
16515 tree op00type;
16516 op00 = TREE_OPERAND (op00, 0);
16517 op00type = TREE_TYPE (op00);
16518
16519 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16520 if (TREE_CODE (op00type) == VECTOR_TYPE
16521 && type == TREE_TYPE (op00type))
16522 {
16523 HOST_WIDE_INT offset = tree_to_shwi (op01);
16524 tree part_width = TYPE_SIZE (type);
16525 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16526 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16527 tree index = bitsize_int (indexi);
16528
16529 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16530 return fold_build3_loc (loc,
16531 BIT_FIELD_REF, type, op00,
16532 part_width, index);
16533
16534 }
16535 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16536 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16537 && type == TREE_TYPE (op00type))
16538 {
16539 tree size = TYPE_SIZE_UNIT (type);
16540 if (tree_int_cst_equal (size, op01))
16541 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16542 }
16543 /* ((foo *)&fooarray)[1] => fooarray[1] */
16544 else if (TREE_CODE (op00type) == ARRAY_TYPE
16545 && type == TREE_TYPE (op00type))
16546 {
16547 tree type_domain = TYPE_DOMAIN (op00type);
16548 tree min_val = size_zero_node;
16549 if (type_domain && TYPE_MIN_VALUE (type_domain))
16550 min_val = TYPE_MIN_VALUE (type_domain);
16551 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16552 TYPE_SIZE_UNIT (type));
16553 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16554 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16555 NULL_TREE, NULL_TREE);
16556 }
16557 }
16558 }
16559
16560 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16561 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16562 && type == TREE_TYPE (TREE_TYPE (subtype))
16563 && (!in_gimple_form
16564 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16565 {
16566 tree type_domain;
16567 tree min_val = size_zero_node;
16568 sub = build_fold_indirect_ref_loc (loc, sub);
16569 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16570 if (type_domain && TYPE_MIN_VALUE (type_domain))
16571 min_val = TYPE_MIN_VALUE (type_domain);
16572 if (in_gimple_form
16573 && TREE_CODE (min_val) != INTEGER_CST)
16574 return NULL_TREE;
16575 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16576 NULL_TREE);
16577 }
16578
16579 return NULL_TREE;
16580 }
16581
16582 /* Builds an expression for an indirection through T, simplifying some
16583 cases. */
16584
16585 tree
16586 build_fold_indirect_ref_loc (location_t loc, tree t)
16587 {
16588 tree type = TREE_TYPE (TREE_TYPE (t));
16589 tree sub = fold_indirect_ref_1 (loc, type, t);
16590
16591 if (sub)
16592 return sub;
16593
16594 return build1_loc (loc, INDIRECT_REF, type, t);
16595 }
16596
16597 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16598
16599 tree
16600 fold_indirect_ref_loc (location_t loc, tree t)
16601 {
16602 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16603
16604 if (sub)
16605 return sub;
16606 else
16607 return t;
16608 }
16609
16610 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16611 whose result is ignored. The type of the returned tree need not be
16612 the same as the original expression. */
16613
16614 tree
16615 fold_ignored_result (tree t)
16616 {
16617 if (!TREE_SIDE_EFFECTS (t))
16618 return integer_zero_node;
16619
16620 for (;;)
16621 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16622 {
16623 case tcc_unary:
16624 t = TREE_OPERAND (t, 0);
16625 break;
16626
16627 case tcc_binary:
16628 case tcc_comparison:
16629 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16630 t = TREE_OPERAND (t, 0);
16631 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16632 t = TREE_OPERAND (t, 1);
16633 else
16634 return t;
16635 break;
16636
16637 case tcc_expression:
16638 switch (TREE_CODE (t))
16639 {
16640 case COMPOUND_EXPR:
16641 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16642 return t;
16643 t = TREE_OPERAND (t, 0);
16644 break;
16645
16646 case COND_EXPR:
16647 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16648 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16649 return t;
16650 t = TREE_OPERAND (t, 0);
16651 break;
16652
16653 default:
16654 return t;
16655 }
16656 break;
16657
16658 default:
16659 return t;
16660 }
16661 }
16662
16663 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16664
16665 tree
16666 round_up_loc (location_t loc, tree value, int divisor)
16667 {
16668 tree div = NULL_TREE;
16669
16670 gcc_assert (divisor > 0);
16671 if (divisor == 1)
16672 return value;
16673
16674 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16675 have to do anything. Only do this when we are not given a const,
16676 because in that case, this check is more expensive than just
16677 doing it. */
16678 if (TREE_CODE (value) != INTEGER_CST)
16679 {
16680 div = build_int_cst (TREE_TYPE (value), divisor);
16681
16682 if (multiple_of_p (TREE_TYPE (value), value, div))
16683 return value;
16684 }
16685
16686 /* If divisor is a power of two, simplify this to bit manipulation. */
16687 if (divisor == (divisor & -divisor))
16688 {
16689 if (TREE_CODE (value) == INTEGER_CST)
16690 {
16691 wide_int val = value;
16692 bool overflow_p;
16693
16694 if ((val & (divisor - 1)) == 0)
16695 return value;
16696
16697 overflow_p = TREE_OVERFLOW (value);
16698 val &= ~(divisor - 1);
16699 val += divisor;
16700 if (val == 0)
16701 overflow_p = true;
16702
16703 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16704 }
16705 else
16706 {
16707 tree t;
16708
16709 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16710 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16711 t = build_int_cst (TREE_TYPE (value), -divisor);
16712 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16713 }
16714 }
16715 else
16716 {
16717 if (!div)
16718 div = build_int_cst (TREE_TYPE (value), divisor);
16719 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16720 value = size_binop_loc (loc, MULT_EXPR, value, div);
16721 }
16722
16723 return value;
16724 }
16725
16726 /* Likewise, but round down. */
16727
16728 tree
16729 round_down_loc (location_t loc, tree value, int divisor)
16730 {
16731 tree div = NULL_TREE;
16732
16733 gcc_assert (divisor > 0);
16734 if (divisor == 1)
16735 return value;
16736
16737 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16738 have to do anything. Only do this when we are not given a const,
16739 because in that case, this check is more expensive than just
16740 doing it. */
16741 if (TREE_CODE (value) != INTEGER_CST)
16742 {
16743 div = build_int_cst (TREE_TYPE (value), divisor);
16744
16745 if (multiple_of_p (TREE_TYPE (value), value, div))
16746 return value;
16747 }
16748
16749 /* If divisor is a power of two, simplify this to bit manipulation. */
16750 if (divisor == (divisor & -divisor))
16751 {
16752 tree t;
16753
16754 t = build_int_cst (TREE_TYPE (value), -divisor);
16755 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16756 }
16757 else
16758 {
16759 if (!div)
16760 div = build_int_cst (TREE_TYPE (value), divisor);
16761 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16762 value = size_binop_loc (loc, MULT_EXPR, value, div);
16763 }
16764
16765 return value;
16766 }
16767
16768 /* Returns the pointer to the base of the object addressed by EXP and
16769 extracts the information about the offset of the access, storing it
16770 to PBITPOS and POFFSET. */
16771
16772 static tree
16773 split_address_to_core_and_offset (tree exp,
16774 HOST_WIDE_INT *pbitpos, tree *poffset)
16775 {
16776 tree core;
16777 enum machine_mode mode;
16778 int unsignedp, volatilep;
16779 HOST_WIDE_INT bitsize;
16780 location_t loc = EXPR_LOCATION (exp);
16781
16782 if (TREE_CODE (exp) == ADDR_EXPR)
16783 {
16784 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16785 poffset, &mode, &unsignedp, &volatilep,
16786 false);
16787 core = build_fold_addr_expr_loc (loc, core);
16788 }
16789 else
16790 {
16791 core = exp;
16792 *pbitpos = 0;
16793 *poffset = NULL_TREE;
16794 }
16795
16796 return core;
16797 }
16798
16799 /* Returns true if addresses of E1 and E2 differ by a constant, false
16800 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16801
16802 bool
16803 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16804 {
16805 tree core1, core2;
16806 HOST_WIDE_INT bitpos1, bitpos2;
16807 tree toffset1, toffset2, tdiff, type;
16808
16809 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16810 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16811
16812 if (bitpos1 % BITS_PER_UNIT != 0
16813 || bitpos2 % BITS_PER_UNIT != 0
16814 || !operand_equal_p (core1, core2, 0))
16815 return false;
16816
16817 if (toffset1 && toffset2)
16818 {
16819 type = TREE_TYPE (toffset1);
16820 if (type != TREE_TYPE (toffset2))
16821 toffset2 = fold_convert (type, toffset2);
16822
16823 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16824 if (!cst_and_fits_in_hwi (tdiff))
16825 return false;
16826
16827 *diff = int_cst_value (tdiff);
16828 }
16829 else if (toffset1 || toffset2)
16830 {
16831 /* If only one of the offsets is non-constant, the difference cannot
16832 be a constant. */
16833 return false;
16834 }
16835 else
16836 *diff = 0;
16837
16838 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16839 return true;
16840 }
16841
16842 /* Simplify the floating point expression EXP when the sign of the
16843 result is not significant. Return NULL_TREE if no simplification
16844 is possible. */
16845
16846 tree
16847 fold_strip_sign_ops (tree exp)
16848 {
16849 tree arg0, arg1;
16850 location_t loc = EXPR_LOCATION (exp);
16851
16852 switch (TREE_CODE (exp))
16853 {
16854 case ABS_EXPR:
16855 case NEGATE_EXPR:
16856 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16857 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16858
16859 case MULT_EXPR:
16860 case RDIV_EXPR:
16861 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16862 return NULL_TREE;
16863 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16864 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16865 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16866 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16867 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16868 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16869 break;
16870
16871 case COMPOUND_EXPR:
16872 arg0 = TREE_OPERAND (exp, 0);
16873 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16874 if (arg1)
16875 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16876 break;
16877
16878 case COND_EXPR:
16879 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16880 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16881 if (arg0 || arg1)
16882 return fold_build3_loc (loc,
16883 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16884 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16885 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16886 break;
16887
16888 case CALL_EXPR:
16889 {
16890 const enum built_in_function fcode = builtin_mathfn_code (exp);
16891 switch (fcode)
16892 {
16893 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16894 /* Strip copysign function call, return the 1st argument. */
16895 arg0 = CALL_EXPR_ARG (exp, 0);
16896 arg1 = CALL_EXPR_ARG (exp, 1);
16897 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16898
16899 default:
16900 /* Strip sign ops from the argument of "odd" math functions. */
16901 if (negate_mathfn_p (fcode))
16902 {
16903 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16904 if (arg0)
16905 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16906 }
16907 break;
16908 }
16909 }
16910 break;
16911
16912 default:
16913 break;
16914 }
16915 return NULL_TREE;
16916 }