re PR other/49752 (Non-existing struct `tree_type' in fold_checksum_tree in fold...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide and size_binop.
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
40
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
64
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
68
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
89 };
90
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
119 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
143
144 static location_t
145 expr_location_or (tree t, location_t loc)
146 {
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
149 }
150
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
153
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
156 {
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
162 {
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
165 }
166 return x;
167 }
168
169
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
173 addition.
174
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
177 sign. */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
179 \f
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
183
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
186 {
187 double_int quo, rem;
188 int uns;
189
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
196 uns = false;
197
198 quo = double_int_divmod (tree_to_double_int (arg1),
199 tree_to_double_int (arg2),
200 uns, code, &rem);
201
202 if (double_int_zero_p (rem))
203 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
204
205 return NULL_TREE;
206 }
207 \f
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
215 used. */
216
217 static int fold_deferring_overflow_warnings;
218
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
223
224 static const char* fold_deferred_overflow_warning;
225
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
228
229 static enum warn_strict_overflow_code fold_deferred_overflow_code;
230
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
233
234 void
235 fold_defer_overflow_warnings (void)
236 {
237 ++fold_deferring_overflow_warnings;
238 }
239
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
247 deferred code. */
248
249 void
250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
251 {
252 const char *warnmsg;
253 location_t locus;
254
255 gcc_assert (fold_deferring_overflow_warnings > 0);
256 --fold_deferring_overflow_warnings;
257 if (fold_deferring_overflow_warnings > 0)
258 {
259 if (fold_deferred_overflow_warning != NULL
260 && code != 0
261 && code < (int) fold_deferred_overflow_code)
262 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
263 return;
264 }
265
266 warnmsg = fold_deferred_overflow_warning;
267 fold_deferred_overflow_warning = NULL;
268
269 if (!issue || warnmsg == NULL)
270 return;
271
272 if (gimple_no_warning_p (stmt))
273 return;
274
275 /* Use the smallest code level when deciding to issue the
276 warning. */
277 if (code == 0 || code > (int) fold_deferred_overflow_code)
278 code = fold_deferred_overflow_code;
279
280 if (!issue_strict_overflow_warning (code))
281 return;
282
283 if (stmt == NULL)
284 locus = input_location;
285 else
286 locus = gimple_location (stmt);
287 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 }
289
290 /* Stop deferring overflow warnings, ignoring any deferred
291 warnings. */
292
293 void
294 fold_undefer_and_ignore_overflow_warnings (void)
295 {
296 fold_undefer_overflow_warnings (false, NULL, 0);
297 }
298
299 /* Whether we are deferring overflow warnings. */
300
301 bool
302 fold_deferring_overflow_warnings_p (void)
303 {
304 return fold_deferring_overflow_warnings > 0;
305 }
306
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
309
310 static void
311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
312 {
313 if (fold_deferring_overflow_warnings > 0)
314 {
315 if (fold_deferred_overflow_warning == NULL
316 || wc < fold_deferred_overflow_code)
317 {
318 fold_deferred_overflow_warning = gmsgid;
319 fold_deferred_overflow_code = wc;
320 }
321 }
322 else if (issue_strict_overflow_warning (wc))
323 warning (OPT_Wstrict_overflow, gmsgid);
324 }
325 \f
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
328
329 static bool
330 negate_mathfn_p (enum built_in_function code)
331 {
332 switch (code)
333 {
334 CASE_FLT_FN (BUILT_IN_ASIN):
335 CASE_FLT_FN (BUILT_IN_ASINH):
336 CASE_FLT_FN (BUILT_IN_ATAN):
337 CASE_FLT_FN (BUILT_IN_ATANH):
338 CASE_FLT_FN (BUILT_IN_CASIN):
339 CASE_FLT_FN (BUILT_IN_CASINH):
340 CASE_FLT_FN (BUILT_IN_CATAN):
341 CASE_FLT_FN (BUILT_IN_CATANH):
342 CASE_FLT_FN (BUILT_IN_CBRT):
343 CASE_FLT_FN (BUILT_IN_CPROJ):
344 CASE_FLT_FN (BUILT_IN_CSIN):
345 CASE_FLT_FN (BUILT_IN_CSINH):
346 CASE_FLT_FN (BUILT_IN_CTAN):
347 CASE_FLT_FN (BUILT_IN_CTANH):
348 CASE_FLT_FN (BUILT_IN_ERF):
349 CASE_FLT_FN (BUILT_IN_LLROUND):
350 CASE_FLT_FN (BUILT_IN_LROUND):
351 CASE_FLT_FN (BUILT_IN_ROUND):
352 CASE_FLT_FN (BUILT_IN_SIN):
353 CASE_FLT_FN (BUILT_IN_SINH):
354 CASE_FLT_FN (BUILT_IN_TAN):
355 CASE_FLT_FN (BUILT_IN_TANH):
356 CASE_FLT_FN (BUILT_IN_TRUNC):
357 return true;
358
359 CASE_FLT_FN (BUILT_IN_LLRINT):
360 CASE_FLT_FN (BUILT_IN_LRINT):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT):
362 CASE_FLT_FN (BUILT_IN_RINT):
363 return !flag_rounding_math;
364
365 default:
366 break;
367 }
368 return false;
369 }
370
371 /* Check whether we may negate an integer constant T without causing
372 overflow. */
373
374 bool
375 may_negate_without_overflow_p (const_tree t)
376 {
377 unsigned HOST_WIDE_INT val;
378 unsigned int prec;
379 tree type;
380
381 gcc_assert (TREE_CODE (t) == INTEGER_CST);
382
383 type = TREE_TYPE (t);
384 if (TYPE_UNSIGNED (type))
385 return false;
386
387 prec = TYPE_PRECISION (type);
388 if (prec > HOST_BITS_PER_WIDE_INT)
389 {
390 if (TREE_INT_CST_LOW (t) != 0)
391 return true;
392 prec -= HOST_BITS_PER_WIDE_INT;
393 val = TREE_INT_CST_HIGH (t);
394 }
395 else
396 val = TREE_INT_CST_LOW (t);
397 if (prec < HOST_BITS_PER_WIDE_INT)
398 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
399 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
400 }
401
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
404
405 static bool
406 negate_expr_p (tree t)
407 {
408 tree type;
409
410 if (t == 0)
411 return false;
412
413 type = TREE_TYPE (t);
414
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
417 {
418 case INTEGER_CST:
419 if (TYPE_OVERFLOW_WRAPS (type))
420 return true;
421
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
427
428 case FIXED_CST:
429 case NEGATE_EXPR:
430 return true;
431
432 case REAL_CST:
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
436
437 case COMPLEX_CST:
438 return negate_expr_p (TREE_REALPART (t))
439 && negate_expr_p (TREE_IMAGPART (t));
440
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
444
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
447
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
459
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
466
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
470
471 /* Fall through. */
472
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
478
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case FLOOR_DIV_EXPR:
482 case CEIL_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
488 overflow. */
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
491 break;
492 return negate_expr_p (TREE_OPERAND (t, 1))
493 || negate_expr_p (TREE_OPERAND (t, 0));
494
495 case NOP_EXPR:
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type) == REAL_TYPE)
498 {
499 tree tem = strip_float_extensions (t);
500 if (tem != t)
501 return negate_expr_p (tem);
502 }
503 break;
504
505 case CALL_EXPR:
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t)))
508 return negate_expr_p (CALL_EXPR_ARG (t, 0));
509 break;
510
511 case RSHIFT_EXPR:
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
514 {
515 tree op1 = TREE_OPERAND (t, 1);
516 if (TREE_INT_CST_HIGH (op1) == 0
517 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
518 == TREE_INT_CST_LOW (op1))
519 return true;
520 }
521 break;
522
523 default:
524 break;
525 }
526 return false;
527 }
528
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
532 returned. */
533
534 static tree
535 fold_negate_expr (location_t loc, tree t)
536 {
537 tree type = TREE_TYPE (t);
538 tree tem;
539
540 switch (TREE_CODE (t))
541 {
542 /* Convert - (~A) to A + 1. */
543 case BIT_NOT_EXPR:
544 if (INTEGRAL_TYPE_P (type))
545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546 build_int_cst (type, 1));
547 break;
548
549 case INTEGER_CST:
550 tem = fold_negate_const (t, type);
551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 || !TYPE_OVERFLOW_TRAPS (type))
553 return tem;
554 break;
555
556 case REAL_CST:
557 tem = fold_negate_const (t, type);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
560 return tem;
561 break;
562
563 case FIXED_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case COMPLEX_CST:
568 {
569 tree rpart = negate_expr (TREE_REALPART (t));
570 tree ipart = negate_expr (TREE_IMAGPART (t));
571
572 if ((TREE_CODE (rpart) == REAL_CST
573 && TREE_CODE (ipart) == REAL_CST)
574 || (TREE_CODE (rpart) == INTEGER_CST
575 && TREE_CODE (ipart) == INTEGER_CST))
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case COMPLEX_EXPR:
581 if (negate_expr_p (t))
582 return fold_build2_loc (loc, COMPLEX_EXPR, type,
583 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
584 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
585 break;
586
587 case CONJ_EXPR:
588 if (negate_expr_p (t))
589 return fold_build1_loc (loc, CONJ_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
591 break;
592
593 case NEGATE_EXPR:
594 return TREE_OPERAND (t, 0);
595
596 case PLUS_EXPR:
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
599 {
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t, 1))
602 && reorder_operands_p (TREE_OPERAND (t, 0),
603 TREE_OPERAND (t, 1)))
604 {
605 tem = negate_expr (TREE_OPERAND (t, 1));
606 return fold_build2_loc (loc, MINUS_EXPR, type,
607 tem, TREE_OPERAND (t, 0));
608 }
609
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t, 0)))
612 {
613 tem = negate_expr (TREE_OPERAND (t, 0));
614 return fold_build2_loc (loc, MINUS_EXPR, type,
615 tem, TREE_OPERAND (t, 1));
616 }
617 }
618 break;
619
620 case MINUS_EXPR:
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
624 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
627 break;
628
629 case MULT_EXPR:
630 if (TYPE_UNSIGNED (type))
631 break;
632
633 /* Fall through. */
634
635 case RDIV_EXPR:
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
637 {
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
646 }
647 break;
648
649 case TRUNC_DIV_EXPR:
650 case ROUND_DIV_EXPR:
651 case FLOOR_DIV_EXPR:
652 case CEIL_DIV_EXPR:
653 case EXACT_DIV_EXPR:
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
658 overflow. */
659 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
660 {
661 const char * const warnmsg = G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
665 {
666 if (INTEGRAL_TYPE_P (type)
667 && (TREE_CODE (tem) != INTEGER_CST
668 || integer_onep (tem)))
669 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 TREE_OPERAND (t, 0), negate_expr (tem));
672 }
673 tem = TREE_OPERAND (t, 0);
674 if (negate_expr_p (tem))
675 {
676 if (INTEGRAL_TYPE_P (type)
677 && (TREE_CODE (tem) != INTEGER_CST
678 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 negate_expr (tem), TREE_OPERAND (t, 1));
682 }
683 }
684 break;
685
686 case NOP_EXPR:
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type) == REAL_TYPE)
689 {
690 tem = strip_float_extensions (t);
691 if (tem != t && negate_expr_p (tem))
692 return fold_convert_loc (loc, type, negate_expr (tem));
693 }
694 break;
695
696 case CALL_EXPR:
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t))
699 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
700 {
701 tree fndecl, arg;
702
703 fndecl = get_callee_fndecl (t);
704 arg = negate_expr (CALL_EXPR_ARG (t, 0));
705 return build_call_expr_loc (loc, fndecl, 1, arg);
706 }
707 break;
708
709 case RSHIFT_EXPR:
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
712 {
713 tree op1 = TREE_OPERAND (t, 1);
714 if (TREE_INT_CST_HIGH (op1) == 0
715 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
716 == TREE_INT_CST_LOW (op1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
772
773 If IN is itself a literal or constant, return it as appropriate.
774
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
777
778 static tree
779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
780 tree *minus_litp, int negate_p)
781 {
782 tree var = 0;
783
784 *conp = 0;
785 *litp = 0;
786 *minus_litp = 0;
787
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
790
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
793 *litp = in;
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
803 {
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
808
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
816
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
821
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
825 var = in;
826 else if (op0 != 0)
827 var = op0;
828 else
829 var = op1, neg_var_p = neg1_p;
830
831 /* Now do any needed negations. */
832 if (neg_litp_p)
833 *minus_litp = *litp, *litp = 0;
834 if (neg_conp_p)
835 *conp = negate_expr (*conp);
836 if (neg_var_p)
837 var = negate_expr (var);
838 }
839 else if (TREE_CONSTANT (in))
840 *conp = in;
841 else
842 var = in;
843
844 if (negate_p)
845 {
846 if (*litp)
847 *minus_litp = *litp, *litp = 0;
848 else if (*minus_litp)
849 *litp = *minus_litp, *minus_litp = 0;
850 *conp = negate_expr (*conp);
851 var = negate_expr (var);
852 }
853
854 return var;
855 }
856
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
861
862 static tree
863 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
864 {
865 if (t1 == 0)
866 return t2;
867 else if (t2 == 0)
868 return t1;
869
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
874 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
875 {
876 if (code == PLUS_EXPR)
877 {
878 if (TREE_CODE (t1) == NEGATE_EXPR)
879 return build2_loc (loc, MINUS_EXPR, type,
880 fold_convert_loc (loc, type, t2),
881 fold_convert_loc (loc, type,
882 TREE_OPERAND (t1, 0)));
883 else if (TREE_CODE (t2) == NEGATE_EXPR)
884 return build2_loc (loc, MINUS_EXPR, type,
885 fold_convert_loc (loc, type, t1),
886 fold_convert_loc (loc, type,
887 TREE_OPERAND (t2, 0)));
888 else if (integer_zerop (t2))
889 return fold_convert_loc (loc, type, t1);
890 }
891 else if (code == MINUS_EXPR)
892 {
893 if (integer_zerop (t2))
894 return fold_convert_loc (loc, type, t1);
895 }
896
897 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
898 fold_convert_loc (loc, type, t2));
899 }
900
901 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
903 }
904 \f
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
907
908 static bool
909 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
910 {
911 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
912 return false;
913 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
914 return false;
915
916 switch (code)
917 {
918 case LSHIFT_EXPR:
919 case RSHIFT_EXPR:
920 case LROTATE_EXPR:
921 case RROTATE_EXPR:
922 return true;
923
924 default:
925 break;
926 }
927
928 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
929 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
930 && TYPE_MODE (type1) == TYPE_MODE (type2);
931 }
932
933
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
937
938 tree
939 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
940 {
941 double_int op1, op2, res, tmp;
942 tree t;
943 tree type = TREE_TYPE (arg1);
944 bool uns = TYPE_UNSIGNED (type);
945 bool is_sizetype
946 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
947 bool overflow = false;
948
949 op1 = tree_to_double_int (arg1);
950 op2 = tree_to_double_int (arg2);
951
952 switch (code)
953 {
954 case BIT_IOR_EXPR:
955 res = double_int_ior (op1, op2);
956 break;
957
958 case BIT_XOR_EXPR:
959 res = double_int_xor (op1, op2);
960 break;
961
962 case BIT_AND_EXPR:
963 res = double_int_and (op1, op2);
964 break;
965
966 case RSHIFT_EXPR:
967 res = double_int_rshift (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type), !uns);
969 break;
970
971 case LSHIFT_EXPR:
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res = double_int_lshift (op1, double_int_to_shwi (op2),
976 TYPE_PRECISION (type), !uns);
977 break;
978
979 case RROTATE_EXPR:
980 res = double_int_rrotate (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type));
982 break;
983
984 case LROTATE_EXPR:
985 res = double_int_lrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
987 break;
988
989 case PLUS_EXPR:
990 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
991 &res.low, &res.high);
992 break;
993
994 case MINUS_EXPR:
995 neg_double (op2.low, op2.high, &res.low, &res.high);
996 add_double (op1.low, op1.high, res.low, res.high,
997 &res.low, &res.high);
998 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
999 break;
1000
1001 case MULT_EXPR:
1002 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1003 &res.low, &res.high);
1004 break;
1005
1006 case TRUNC_DIV_EXPR:
1007 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1008 case EXACT_DIV_EXPR:
1009 /* This is a shortcut for a common special case. */
1010 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1011 && !TREE_OVERFLOW (arg1)
1012 && !TREE_OVERFLOW (arg2)
1013 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1014 {
1015 if (code == CEIL_DIV_EXPR)
1016 op1.low += op2.low - 1;
1017
1018 res.low = op1.low / op2.low, res.high = 0;
1019 break;
1020 }
1021
1022 /* ... fall through ... */
1023
1024 case ROUND_DIV_EXPR:
1025 if (double_int_zero_p (op2))
1026 return NULL_TREE;
1027 if (double_int_one_p (op2))
1028 {
1029 res = op1;
1030 break;
1031 }
1032 if (double_int_equal_p (op1, op2)
1033 && ! double_int_zero_p (op1))
1034 {
1035 res = double_int_one;
1036 break;
1037 }
1038 overflow = div_and_round_double (code, uns,
1039 op1.low, op1.high, op2.low, op2.high,
1040 &res.low, &res.high,
1041 &tmp.low, &tmp.high);
1042 break;
1043
1044 case TRUNC_MOD_EXPR:
1045 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1046 /* This is a shortcut for a common special case. */
1047 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1048 && !TREE_OVERFLOW (arg1)
1049 && !TREE_OVERFLOW (arg2)
1050 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1051 {
1052 if (code == CEIL_MOD_EXPR)
1053 op1.low += op2.low - 1;
1054 res.low = op1.low % op2.low, res.high = 0;
1055 break;
1056 }
1057
1058 /* ... fall through ... */
1059
1060 case ROUND_MOD_EXPR:
1061 if (double_int_zero_p (op2))
1062 return NULL_TREE;
1063 overflow = div_and_round_double (code, uns,
1064 op1.low, op1.high, op2.low, op2.high,
1065 &tmp.low, &tmp.high,
1066 &res.low, &res.high);
1067 break;
1068
1069 case MIN_EXPR:
1070 res = double_int_min (op1, op2, uns);
1071 break;
1072
1073 case MAX_EXPR:
1074 res = double_int_max (op1, op2, uns);
1075 break;
1076
1077 default:
1078 return NULL_TREE;
1079 }
1080
1081 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1082 ((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1084
1085 return t;
1086 }
1087
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1092
1093 static tree
1094 const_binop (enum tree_code code, tree arg1, tree arg2)
1095 {
1096 /* Sanity check for the recursive cases. */
1097 if (!arg1 || !arg2)
1098 return NULL_TREE;
1099
1100 STRIP_NOPS (arg1);
1101 STRIP_NOPS (arg2);
1102
1103 if (TREE_CODE (arg1) == INTEGER_CST)
1104 return int_const_binop (code, arg1, arg2);
1105
1106 if (TREE_CODE (arg1) == REAL_CST)
1107 {
1108 enum machine_mode mode;
1109 REAL_VALUE_TYPE d1;
1110 REAL_VALUE_TYPE d2;
1111 REAL_VALUE_TYPE value;
1112 REAL_VALUE_TYPE result;
1113 bool inexact;
1114 tree t, type;
1115
1116 /* The following codes are handled by real_arithmetic. */
1117 switch (code)
1118 {
1119 case PLUS_EXPR:
1120 case MINUS_EXPR:
1121 case MULT_EXPR:
1122 case RDIV_EXPR:
1123 case MIN_EXPR:
1124 case MAX_EXPR:
1125 break;
1126
1127 default:
1128 return NULL_TREE;
1129 }
1130
1131 d1 = TREE_REAL_CST (arg1);
1132 d2 = TREE_REAL_CST (arg2);
1133
1134 type = TREE_TYPE (arg1);
1135 mode = TYPE_MODE (type);
1136
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode)
1140 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1141 return NULL_TREE;
1142
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code == RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2, dconst0)
1147 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1148 return NULL_TREE;
1149
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1))
1153 return arg1;
1154 else if (REAL_VALUE_ISNAN (d2))
1155 return arg2;
1156
1157 inexact = real_arithmetic (&value, code, &d1, &d2);
1158 real_convert (&result, mode, &value);
1159
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode)
1164 && REAL_VALUE_ISINF (result)
1165 && !REAL_VALUE_ISINF (d1)
1166 && !REAL_VALUE_ISINF (d2))
1167 return NULL_TREE;
1168
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1175 && (inexact || !real_identical (&result, &value)))
1176 return NULL_TREE;
1177
1178 t = build_real (type, result);
1179
1180 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1181 return t;
1182 }
1183
1184 if (TREE_CODE (arg1) == FIXED_CST)
1185 {
1186 FIXED_VALUE_TYPE f1;
1187 FIXED_VALUE_TYPE f2;
1188 FIXED_VALUE_TYPE result;
1189 tree t, type;
1190 int sat_p;
1191 bool overflow_p;
1192
1193 /* The following codes are handled by fixed_arithmetic. */
1194 switch (code)
1195 {
1196 case PLUS_EXPR:
1197 case MINUS_EXPR:
1198 case MULT_EXPR:
1199 case TRUNC_DIV_EXPR:
1200 f2 = TREE_FIXED_CST (arg2);
1201 break;
1202
1203 case LSHIFT_EXPR:
1204 case RSHIFT_EXPR:
1205 f2.data.high = TREE_INT_CST_HIGH (arg2);
1206 f2.data.low = TREE_INT_CST_LOW (arg2);
1207 f2.mode = SImode;
1208 break;
1209
1210 default:
1211 return NULL_TREE;
1212 }
1213
1214 f1 = TREE_FIXED_CST (arg1);
1215 type = TREE_TYPE (arg1);
1216 sat_p = TYPE_SATURATING (type);
1217 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1218 t = build_fixed (type, result);
1219 /* Propagate overflow flags. */
1220 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1221 TREE_OVERFLOW (t) = 1;
1222 return t;
1223 }
1224
1225 if (TREE_CODE (arg1) == COMPLEX_CST)
1226 {
1227 tree type = TREE_TYPE (arg1);
1228 tree r1 = TREE_REALPART (arg1);
1229 tree i1 = TREE_IMAGPART (arg1);
1230 tree r2 = TREE_REALPART (arg2);
1231 tree i2 = TREE_IMAGPART (arg2);
1232 tree real, imag;
1233
1234 switch (code)
1235 {
1236 case PLUS_EXPR:
1237 case MINUS_EXPR:
1238 real = const_binop (code, r1, r2);
1239 imag = const_binop (code, i1, i2);
1240 break;
1241
1242 case MULT_EXPR:
1243 if (COMPLEX_FLOAT_TYPE_P (type))
1244 return do_mpc_arg2 (arg1, arg2, type,
1245 /* do_nonfinite= */ folding_initializer,
1246 mpc_mul);
1247
1248 real = const_binop (MINUS_EXPR,
1249 const_binop (MULT_EXPR, r1, r2),
1250 const_binop (MULT_EXPR, i1, i2));
1251 imag = const_binop (PLUS_EXPR,
1252 const_binop (MULT_EXPR, r1, i2),
1253 const_binop (MULT_EXPR, i1, r2));
1254 break;
1255
1256 case RDIV_EXPR:
1257 if (COMPLEX_FLOAT_TYPE_P (type))
1258 return do_mpc_arg2 (arg1, arg2, type,
1259 /* do_nonfinite= */ folding_initializer,
1260 mpc_div);
1261 /* Fallthru ... */
1262 case TRUNC_DIV_EXPR:
1263 case CEIL_DIV_EXPR:
1264 case FLOOR_DIV_EXPR:
1265 case ROUND_DIV_EXPR:
1266 if (flag_complex_method == 0)
1267 {
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1270
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1273 t = br*br + bi*bi
1274 */
1275 tree magsquared
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r2, r2),
1278 const_binop (MULT_EXPR, i2, i2));
1279 tree t1
1280 = const_binop (PLUS_EXPR,
1281 const_binop (MULT_EXPR, r1, r2),
1282 const_binop (MULT_EXPR, i1, i2));
1283 tree t2
1284 = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, i1, r2),
1286 const_binop (MULT_EXPR, r1, i2));
1287
1288 real = const_binop (code, t1, magsquared);
1289 imag = const_binop (code, t2, magsquared);
1290 }
1291 else
1292 {
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1295
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1299 fold_abs_const (r2, TREE_TYPE (type)),
1300 fold_abs_const (i2, TREE_TYPE (type)));
1301
1302 if (integer_nonzerop (compare))
1303 {
1304 /* In the TRUE branch, we compute
1305 ratio = br/bi;
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1309 tr = tr / div;
1310 ti = ti / div; */
1311 tree ratio = const_binop (code, r2, i2);
1312 tree div = const_binop (PLUS_EXPR, i2,
1313 const_binop (MULT_EXPR, r2, ratio));
1314 real = const_binop (MULT_EXPR, r1, ratio);
1315 real = const_binop (PLUS_EXPR, real, i1);
1316 real = const_binop (code, real, div);
1317
1318 imag = const_binop (MULT_EXPR, i1, ratio);
1319 imag = const_binop (MINUS_EXPR, imag, r1);
1320 imag = const_binop (code, imag, div);
1321 }
1322 else
1323 {
1324 /* In the FALSE branch, we compute
1325 ratio = d/c;
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1329 tr = tr / div;
1330 ti = ti / div; */
1331 tree ratio = const_binop (code, i2, r2);
1332 tree div = const_binop (PLUS_EXPR, r2,
1333 const_binop (MULT_EXPR, i2, ratio));
1334
1335 real = const_binop (MULT_EXPR, i1, ratio);
1336 real = const_binop (PLUS_EXPR, real, r1);
1337 real = const_binop (code, real, div);
1338
1339 imag = const_binop (MULT_EXPR, r1, ratio);
1340 imag = const_binop (MINUS_EXPR, i1, imag);
1341 imag = const_binop (code, imag, div);
1342 }
1343 }
1344 break;
1345
1346 default:
1347 return NULL_TREE;
1348 }
1349
1350 if (real && imag)
1351 return build_complex (type, real, imag);
1352 }
1353
1354 if (TREE_CODE (arg1) == VECTOR_CST)
1355 {
1356 tree type = TREE_TYPE(arg1);
1357 int count = TYPE_VECTOR_SUBPARTS (type), i;
1358 tree elements1, elements2, list = NULL_TREE;
1359
1360 if(TREE_CODE(arg2) != VECTOR_CST)
1361 return NULL_TREE;
1362
1363 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1364 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1365
1366 for (i = 0; i < count; i++)
1367 {
1368 tree elem1, elem2, elem;
1369
1370 /* The trailing elements can be empty and should be treated as 0 */
1371 if(!elements1)
1372 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1373 else
1374 {
1375 elem1 = TREE_VALUE(elements1);
1376 elements1 = TREE_CHAIN (elements1);
1377 }
1378
1379 if(!elements2)
1380 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1381 else
1382 {
1383 elem2 = TREE_VALUE(elements2);
1384 elements2 = TREE_CHAIN (elements2);
1385 }
1386
1387 elem = const_binop (code, elem1, elem2);
1388
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem == NULL_TREE)
1392 return NULL_TREE;
1393
1394 list = tree_cons (NULL_TREE, elem, list);
1395 }
1396 return build_vector(type, nreverse(list));
1397 }
1398 return NULL_TREE;
1399 }
1400
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1403
1404 tree
1405 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1406 {
1407 return build_int_cst (sizetype_tab[(int) kind], number);
1408 }
1409 \f
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1414
1415 tree
1416 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1417 {
1418 tree type = TREE_TYPE (arg0);
1419
1420 if (arg0 == error_mark_node || arg1 == error_mark_node)
1421 return error_mark_node;
1422
1423 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1424 TREE_TYPE (arg1)));
1425
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1428 {
1429 /* And some specific cases even faster than that. */
1430 if (code == PLUS_EXPR)
1431 {
1432 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1433 return arg1;
1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1435 return arg0;
1436 }
1437 else if (code == MINUS_EXPR)
1438 {
1439 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 return arg0;
1441 }
1442 else if (code == MULT_EXPR)
1443 {
1444 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1445 return arg1;
1446 }
1447
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code, arg0, arg1);
1450 }
1451
1452 return fold_build2_loc (loc, code, type, arg0, arg1);
1453 }
1454
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1458
1459 tree
1460 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1461 {
1462 tree type = TREE_TYPE (arg0);
1463 tree ctype;
1464
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1466 TREE_TYPE (arg1)));
1467
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type))
1470 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1471
1472 if (type == sizetype)
1473 ctype = ssizetype;
1474 else if (type == bitsizetype)
1475 ctype = sbitsizetype;
1476 else
1477 ctype = signed_type_for (type);
1478
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1483 return size_binop_loc (loc, MINUS_EXPR,
1484 fold_convert_loc (loc, ctype, arg0),
1485 fold_convert_loc (loc, ctype, arg1));
1486
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0, arg1))
1492 return build_int_cst (ctype, 0);
1493 else if (tree_int_cst_lt (arg1, arg0))
1494 return fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1496 else
1497 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1498 fold_convert_loc (loc, ctype,
1499 size_binop_loc (loc,
1500 MINUS_EXPR,
1501 arg1, arg0)));
1502 }
1503 \f
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1506
1507 static tree
1508 fold_convert_const_int_from_int (tree type, const_tree arg1)
1509 {
1510 tree t;
1511
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t = force_fit_type_double (type, tree_to_double_int (arg1),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1516 (TREE_INT_CST_HIGH (arg1) < 0
1517 && (TYPE_UNSIGNED (type)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1519 | TREE_OVERFLOW (arg1));
1520
1521 return t;
1522 }
1523
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1526
1527 static tree
1528 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1529 {
1530 int overflow = 0;
1531 tree t;
1532
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1541
1542 double_int val;
1543 REAL_VALUE_TYPE r;
1544 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1545
1546 switch (code)
1547 {
1548 case FIX_TRUNC_EXPR:
1549 real_trunc (&r, VOIDmode, &x);
1550 break;
1551
1552 default:
1553 gcc_unreachable ();
1554 }
1555
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r))
1558 {
1559 overflow = 1;
1560 val = double_int_zero;
1561 }
1562
1563 /* See if R is less than the lower bound or greater than the
1564 upper bound. */
1565
1566 if (! overflow)
1567 {
1568 tree lt = TYPE_MIN_VALUE (type);
1569 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1570 if (REAL_VALUES_LESS (r, l))
1571 {
1572 overflow = 1;
1573 val = tree_to_double_int (lt);
1574 }
1575 }
1576
1577 if (! overflow)
1578 {
1579 tree ut = TYPE_MAX_VALUE (type);
1580 if (ut)
1581 {
1582 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1583 if (REAL_VALUES_LESS (u, r))
1584 {
1585 overflow = 1;
1586 val = tree_to_double_int (ut);
1587 }
1588 }
1589 }
1590
1591 if (! overflow)
1592 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1593
1594 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1595 return t;
1596 }
1597
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1600
1601 static tree
1602 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1603 {
1604 tree t;
1605 double_int temp, temp_trunc;
1606 unsigned int mode;
1607
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp = TREE_FIXED_CST (arg1).data;
1610 mode = TREE_FIXED_CST (arg1).mode;
1611 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1612 {
1613 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1614 HOST_BITS_PER_DOUBLE_INT,
1615 SIGNED_FIXED_POINT_MODE_P (mode));
1616
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1619 HOST_BITS_PER_DOUBLE_INT,
1620 SIGNED_FIXED_POINT_MODE_P (mode));
1621 }
1622 else
1623 {
1624 temp = double_int_zero;
1625 temp_trunc = double_int_zero;
1626 }
1627
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode)
1631 && double_int_negative_p (temp_trunc)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1633 temp = double_int_add (temp, double_int_one);
1634
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t = force_fit_type_double (type, temp, -1,
1638 (double_int_negative_p (temp)
1639 && (TYPE_UNSIGNED (type)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1641 | TREE_OVERFLOW (arg1));
1642
1643 return t;
1644 }
1645
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1648
1649 static tree
1650 fold_convert_const_real_from_real (tree type, const_tree arg1)
1651 {
1652 REAL_VALUE_TYPE value;
1653 tree t;
1654
1655 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1656 t = build_real (type, value);
1657
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1664 TREE_OVERFLOW (t) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_NANS (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1671 && REAL_VALUE_ISINF (value)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1673 TREE_OVERFLOW (t) = 1;
1674 else
1675 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1676 return t;
1677 }
1678
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1681
1682 static tree
1683 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1684 {
1685 REAL_VALUE_TYPE value;
1686 tree t;
1687
1688 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1689 t = build_real (type, value);
1690
1691 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1692 return t;
1693 }
1694
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1697
1698 static tree
1699 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1700 {
1701 FIXED_VALUE_TYPE value;
1702 tree t;
1703 bool overflow_p;
1704
1705 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1706 TYPE_SATURATING (type));
1707 t = build_fixed (type, value);
1708
1709 /* Propagate overflow flags. */
1710 if (overflow_p | TREE_OVERFLOW (arg1))
1711 TREE_OVERFLOW (t) = 1;
1712 return t;
1713 }
1714
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1717
1718 static tree
1719 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1720 {
1721 FIXED_VALUE_TYPE value;
1722 tree t;
1723 bool overflow_p;
1724
1725 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1726 TREE_INT_CST (arg1),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1728 TYPE_SATURATING (type));
1729 t = build_fixed (type, value);
1730
1731 /* Propagate overflow flags. */
1732 if (overflow_p | TREE_OVERFLOW (arg1))
1733 TREE_OVERFLOW (t) = 1;
1734 return t;
1735 }
1736
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1739
1740 static tree
1741 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1742 {
1743 FIXED_VALUE_TYPE value;
1744 tree t;
1745 bool overflow_p;
1746
1747 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1748 &TREE_REAL_CST (arg1),
1749 TYPE_SATURATING (type));
1750 t = build_fixed (type, value);
1751
1752 /* Propagate overflow flags. */
1753 if (overflow_p | TREE_OVERFLOW (arg1))
1754 TREE_OVERFLOW (t) = 1;
1755 return t;
1756 }
1757
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1760
1761 static tree
1762 fold_convert_const (enum tree_code code, tree type, tree arg1)
1763 {
1764 if (TREE_TYPE (arg1) == type)
1765 return arg1;
1766
1767 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1768 || TREE_CODE (type) == OFFSET_TYPE)
1769 {
1770 if (TREE_CODE (arg1) == INTEGER_CST)
1771 return fold_convert_const_int_from_int (type, arg1);
1772 else if (TREE_CODE (arg1) == REAL_CST)
1773 return fold_convert_const_int_from_real (code, type, arg1);
1774 else if (TREE_CODE (arg1) == FIXED_CST)
1775 return fold_convert_const_int_from_fixed (type, arg1);
1776 }
1777 else if (TREE_CODE (type) == REAL_TYPE)
1778 {
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return build_real_from_int_cst (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_real_from_real (type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_real_from_fixed (type, arg1);
1785 }
1786 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1787 {
1788 if (TREE_CODE (arg1) == FIXED_CST)
1789 return fold_convert_const_fixed_from_fixed (type, arg1);
1790 else if (TREE_CODE (arg1) == INTEGER_CST)
1791 return fold_convert_const_fixed_from_int (type, arg1);
1792 else if (TREE_CODE (arg1) == REAL_CST)
1793 return fold_convert_const_fixed_from_real (type, arg1);
1794 }
1795 return NULL_TREE;
1796 }
1797
1798 /* Construct a vector of zero elements of vector type TYPE. */
1799
1800 static tree
1801 build_zero_vector (tree type)
1802 {
1803 tree t;
1804
1805 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1806 return build_vector_from_val (type, t);
1807 }
1808
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1810
1811 bool
1812 fold_convertible_p (const_tree type, const_tree arg)
1813 {
1814 tree orig = TREE_TYPE (arg);
1815
1816 if (type == orig)
1817 return true;
1818
1819 if (TREE_CODE (arg) == ERROR_MARK
1820 || TREE_CODE (type) == ERROR_MARK
1821 || TREE_CODE (orig) == ERROR_MARK)
1822 return false;
1823
1824 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1825 return true;
1826
1827 switch (TREE_CODE (type))
1828 {
1829 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1830 case POINTER_TYPE: case REFERENCE_TYPE:
1831 case OFFSET_TYPE:
1832 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1833 || TREE_CODE (orig) == OFFSET_TYPE)
1834 return true;
1835 return (TREE_CODE (orig) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1837
1838 case REAL_TYPE:
1839 case FIXED_POINT_TYPE:
1840 case COMPLEX_TYPE:
1841 case VECTOR_TYPE:
1842 case VOID_TYPE:
1843 return TREE_CODE (type) == TREE_CODE (orig);
1844
1845 default:
1846 return false;
1847 }
1848 }
1849
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1852
1853 tree
1854 fold_convert_loc (location_t loc, tree type, tree arg)
1855 {
1856 tree orig = TREE_TYPE (arg);
1857 tree tem;
1858
1859 if (type == orig)
1860 return arg;
1861
1862 if (TREE_CODE (arg) == ERROR_MARK
1863 || TREE_CODE (type) == ERROR_MARK
1864 || TREE_CODE (orig) == ERROR_MARK)
1865 return error_mark_node;
1866
1867 switch (TREE_CODE (type))
1868 {
1869 case POINTER_TYPE:
1870 case REFERENCE_TYPE:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1875 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1876 /* fall through */
1877
1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1879 case OFFSET_TYPE:
1880 if (TREE_CODE (arg) == INTEGER_CST)
1881 {
1882 tem = fold_convert_const (NOP_EXPR, type, arg);
1883 if (tem != NULL_TREE)
1884 return tem;
1885 }
1886 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1887 || TREE_CODE (orig) == OFFSET_TYPE)
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1889 if (TREE_CODE (orig) == COMPLEX_TYPE)
1890 return fold_convert_loc (loc, type,
1891 fold_build1_loc (loc, REALPART_EXPR,
1892 TREE_TYPE (orig), arg));
1893 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1895 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1896
1897 case REAL_TYPE:
1898 if (TREE_CODE (arg) == INTEGER_CST)
1899 {
1900 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1901 if (tem != NULL_TREE)
1902 return tem;
1903 }
1904 else if (TREE_CODE (arg) == REAL_CST)
1905 {
1906 tem = fold_convert_const (NOP_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1908 return tem;
1909 }
1910 else if (TREE_CODE (arg) == FIXED_CST)
1911 {
1912 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 return tem;
1915 }
1916
1917 switch (TREE_CODE (orig))
1918 {
1919 case INTEGER_TYPE:
1920 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1921 case POINTER_TYPE: case REFERENCE_TYPE:
1922 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1923
1924 case REAL_TYPE:
1925 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1926
1927 case FIXED_POINT_TYPE:
1928 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1929
1930 case COMPLEX_TYPE:
1931 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1932 return fold_convert_loc (loc, type, tem);
1933
1934 default:
1935 gcc_unreachable ();
1936 }
1937
1938 case FIXED_POINT_TYPE:
1939 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1940 || TREE_CODE (arg) == REAL_CST)
1941 {
1942 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1944 goto fold_convert_exit;
1945 }
1946
1947 switch (TREE_CODE (orig))
1948 {
1949 case FIXED_POINT_TYPE:
1950 case INTEGER_TYPE:
1951 case ENUMERAL_TYPE:
1952 case BOOLEAN_TYPE:
1953 case REAL_TYPE:
1954 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1955
1956 case COMPLEX_TYPE:
1957 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1958 return fold_convert_loc (loc, type, tem);
1959
1960 default:
1961 gcc_unreachable ();
1962 }
1963
1964 case COMPLEX_TYPE:
1965 switch (TREE_CODE (orig))
1966 {
1967 case INTEGER_TYPE:
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1970 case REAL_TYPE:
1971 case FIXED_POINT_TYPE:
1972 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1973 fold_convert_loc (loc, TREE_TYPE (type), arg),
1974 fold_convert_loc (loc, TREE_TYPE (type),
1975 integer_zero_node));
1976 case COMPLEX_TYPE:
1977 {
1978 tree rpart, ipart;
1979
1980 if (TREE_CODE (arg) == COMPLEX_EXPR)
1981 {
1982 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1983 TREE_OPERAND (arg, 0));
1984 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1985 TREE_OPERAND (arg, 1));
1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1987 }
1988
1989 arg = save_expr (arg);
1990 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1992 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1993 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1994 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1995 }
1996
1997 default:
1998 gcc_unreachable ();
1999 }
2000
2001 case VECTOR_TYPE:
2002 if (integer_zerop (arg))
2003 return build_zero_vector (type);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2006 || TREE_CODE (orig) == VECTOR_TYPE);
2007 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2008
2009 case VOID_TYPE:
2010 tem = fold_ignored_result (arg);
2011 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2012
2013 default:
2014 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2015 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2016 gcc_unreachable ();
2017 }
2018 fold_convert_exit:
2019 protected_set_expr_location_unshare (tem, loc);
2020 return tem;
2021 }
2022 \f
2023 /* Return false if expr can be assumed not to be an lvalue, true
2024 otherwise. */
2025
2026 static bool
2027 maybe_lvalue_p (const_tree x)
2028 {
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x))
2031 {
2032 case VAR_DECL:
2033 case PARM_DECL:
2034 case RESULT_DECL:
2035 case LABEL_DECL:
2036 case FUNCTION_DECL:
2037 case SSA_NAME:
2038
2039 case COMPONENT_REF:
2040 case MEM_REF:
2041 case INDIRECT_REF:
2042 case ARRAY_REF:
2043 case ARRAY_RANGE_REF:
2044 case BIT_FIELD_REF:
2045 case OBJ_TYPE_REF:
2046
2047 case REALPART_EXPR:
2048 case IMAGPART_EXPR:
2049 case PREINCREMENT_EXPR:
2050 case PREDECREMENT_EXPR:
2051 case SAVE_EXPR:
2052 case TRY_CATCH_EXPR:
2053 case WITH_CLEANUP_EXPR:
2054 case COMPOUND_EXPR:
2055 case MODIFY_EXPR:
2056 case TARGET_EXPR:
2057 case COND_EXPR:
2058 case BIND_EXPR:
2059 break;
2060
2061 default:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2064 break;
2065 return false;
2066 }
2067
2068 return true;
2069 }
2070
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2072
2073 tree
2074 non_lvalue_loc (location_t loc, tree x)
2075 {
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2077 us. */
2078 if (in_gimple_form)
2079 return x;
2080
2081 if (! maybe_lvalue_p (x))
2082 return x;
2083 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2084 }
2085
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2088
2089 int pedantic_lvalues;
2090
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2093
2094 static tree
2095 pedantic_non_lvalue_loc (location_t loc, tree x)
2096 {
2097 if (pedantic_lvalues)
2098 return non_lvalue_loc (loc, x);
2099
2100 return protected_set_expr_location_unshare (x, loc);
2101 }
2102 \f
2103 /* Given a tree comparison code, return the code that is the logical inverse
2104 of the given code. It is not safe to do this for floating-point
2105 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2106 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2107
2108 enum tree_code
2109 invert_tree_comparison (enum tree_code code, bool honor_nans)
2110 {
2111 if (honor_nans && flag_trapping_math)
2112 return ERROR_MARK;
2113
2114 switch (code)
2115 {
2116 case EQ_EXPR:
2117 return NE_EXPR;
2118 case NE_EXPR:
2119 return EQ_EXPR;
2120 case GT_EXPR:
2121 return honor_nans ? UNLE_EXPR : LE_EXPR;
2122 case GE_EXPR:
2123 return honor_nans ? UNLT_EXPR : LT_EXPR;
2124 case LT_EXPR:
2125 return honor_nans ? UNGE_EXPR : GE_EXPR;
2126 case LE_EXPR:
2127 return honor_nans ? UNGT_EXPR : GT_EXPR;
2128 case LTGT_EXPR:
2129 return UNEQ_EXPR;
2130 case UNEQ_EXPR:
2131 return LTGT_EXPR;
2132 case UNGT_EXPR:
2133 return LE_EXPR;
2134 case UNGE_EXPR:
2135 return LT_EXPR;
2136 case UNLT_EXPR:
2137 return GE_EXPR;
2138 case UNLE_EXPR:
2139 return GT_EXPR;
2140 case ORDERED_EXPR:
2141 return UNORDERED_EXPR;
2142 case UNORDERED_EXPR:
2143 return ORDERED_EXPR;
2144 default:
2145 gcc_unreachable ();
2146 }
2147 }
2148
2149 /* Similar, but return the comparison that results if the operands are
2150 swapped. This is safe for floating-point. */
2151
2152 enum tree_code
2153 swap_tree_comparison (enum tree_code code)
2154 {
2155 switch (code)
2156 {
2157 case EQ_EXPR:
2158 case NE_EXPR:
2159 case ORDERED_EXPR:
2160 case UNORDERED_EXPR:
2161 case LTGT_EXPR:
2162 case UNEQ_EXPR:
2163 return code;
2164 case GT_EXPR:
2165 return LT_EXPR;
2166 case GE_EXPR:
2167 return LE_EXPR;
2168 case LT_EXPR:
2169 return GT_EXPR;
2170 case LE_EXPR:
2171 return GE_EXPR;
2172 case UNGT_EXPR:
2173 return UNLT_EXPR;
2174 case UNGE_EXPR:
2175 return UNLE_EXPR;
2176 case UNLT_EXPR:
2177 return UNGT_EXPR;
2178 case UNLE_EXPR:
2179 return UNGE_EXPR;
2180 default:
2181 gcc_unreachable ();
2182 }
2183 }
2184
2185
2186 /* Convert a comparison tree code from an enum tree_code representation
2187 into a compcode bit-based encoding. This function is the inverse of
2188 compcode_to_comparison. */
2189
2190 static enum comparison_code
2191 comparison_to_compcode (enum tree_code code)
2192 {
2193 switch (code)
2194 {
2195 case LT_EXPR:
2196 return COMPCODE_LT;
2197 case EQ_EXPR:
2198 return COMPCODE_EQ;
2199 case LE_EXPR:
2200 return COMPCODE_LE;
2201 case GT_EXPR:
2202 return COMPCODE_GT;
2203 case NE_EXPR:
2204 return COMPCODE_NE;
2205 case GE_EXPR:
2206 return COMPCODE_GE;
2207 case ORDERED_EXPR:
2208 return COMPCODE_ORD;
2209 case UNORDERED_EXPR:
2210 return COMPCODE_UNORD;
2211 case UNLT_EXPR:
2212 return COMPCODE_UNLT;
2213 case UNEQ_EXPR:
2214 return COMPCODE_UNEQ;
2215 case UNLE_EXPR:
2216 return COMPCODE_UNLE;
2217 case UNGT_EXPR:
2218 return COMPCODE_UNGT;
2219 case LTGT_EXPR:
2220 return COMPCODE_LTGT;
2221 case UNGE_EXPR:
2222 return COMPCODE_UNGE;
2223 default:
2224 gcc_unreachable ();
2225 }
2226 }
2227
2228 /* Convert a compcode bit-based encoding of a comparison operator back
2229 to GCC's enum tree_code representation. This function is the
2230 inverse of comparison_to_compcode. */
2231
2232 static enum tree_code
2233 compcode_to_comparison (enum comparison_code code)
2234 {
2235 switch (code)
2236 {
2237 case COMPCODE_LT:
2238 return LT_EXPR;
2239 case COMPCODE_EQ:
2240 return EQ_EXPR;
2241 case COMPCODE_LE:
2242 return LE_EXPR;
2243 case COMPCODE_GT:
2244 return GT_EXPR;
2245 case COMPCODE_NE:
2246 return NE_EXPR;
2247 case COMPCODE_GE:
2248 return GE_EXPR;
2249 case COMPCODE_ORD:
2250 return ORDERED_EXPR;
2251 case COMPCODE_UNORD:
2252 return UNORDERED_EXPR;
2253 case COMPCODE_UNLT:
2254 return UNLT_EXPR;
2255 case COMPCODE_UNEQ:
2256 return UNEQ_EXPR;
2257 case COMPCODE_UNLE:
2258 return UNLE_EXPR;
2259 case COMPCODE_UNGT:
2260 return UNGT_EXPR;
2261 case COMPCODE_LTGT:
2262 return LTGT_EXPR;
2263 case COMPCODE_UNGE:
2264 return UNGE_EXPR;
2265 default:
2266 gcc_unreachable ();
2267 }
2268 }
2269
2270 /* Return a tree for the comparison which is the combination of
2271 doing the AND or OR (depending on CODE) of the two operations LCODE
2272 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2273 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2274 if this makes the transformation invalid. */
2275
2276 tree
2277 combine_comparisons (location_t loc,
2278 enum tree_code code, enum tree_code lcode,
2279 enum tree_code rcode, tree truth_type,
2280 tree ll_arg, tree lr_arg)
2281 {
2282 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2283 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2284 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2285 int compcode;
2286
2287 switch (code)
2288 {
2289 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2290 compcode = lcompcode & rcompcode;
2291 break;
2292
2293 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2294 compcode = lcompcode | rcompcode;
2295 break;
2296
2297 default:
2298 return NULL_TREE;
2299 }
2300
2301 if (!honor_nans)
2302 {
2303 /* Eliminate unordered comparisons, as well as LTGT and ORD
2304 which are not used unless the mode has NaNs. */
2305 compcode &= ~COMPCODE_UNORD;
2306 if (compcode == COMPCODE_LTGT)
2307 compcode = COMPCODE_NE;
2308 else if (compcode == COMPCODE_ORD)
2309 compcode = COMPCODE_TRUE;
2310 }
2311 else if (flag_trapping_math)
2312 {
2313 /* Check that the original operation and the optimized ones will trap
2314 under the same condition. */
2315 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2316 && (lcompcode != COMPCODE_EQ)
2317 && (lcompcode != COMPCODE_ORD);
2318 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2319 && (rcompcode != COMPCODE_EQ)
2320 && (rcompcode != COMPCODE_ORD);
2321 bool trap = (compcode & COMPCODE_UNORD) == 0
2322 && (compcode != COMPCODE_EQ)
2323 && (compcode != COMPCODE_ORD);
2324
2325 /* In a short-circuited boolean expression the LHS might be
2326 such that the RHS, if evaluated, will never trap. For
2327 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2328 if neither x nor y is NaN. (This is a mixed blessing: for
2329 example, the expression above will never trap, hence
2330 optimizing it to x < y would be invalid). */
2331 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2332 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2333 rtrap = false;
2334
2335 /* If the comparison was short-circuited, and only the RHS
2336 trapped, we may now generate a spurious trap. */
2337 if (rtrap && !ltrap
2338 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2339 return NULL_TREE;
2340
2341 /* If we changed the conditions that cause a trap, we lose. */
2342 if ((ltrap || rtrap) != trap)
2343 return NULL_TREE;
2344 }
2345
2346 if (compcode == COMPCODE_TRUE)
2347 return constant_boolean_node (true, truth_type);
2348 else if (compcode == COMPCODE_FALSE)
2349 return constant_boolean_node (false, truth_type);
2350 else
2351 {
2352 enum tree_code tcode;
2353
2354 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2355 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2356 }
2357 }
2358 \f
2359 /* Return nonzero if two operands (typically of the same tree node)
2360 are necessarily equal. If either argument has side-effects this
2361 function returns zero. FLAGS modifies behavior as follows:
2362
2363 If OEP_ONLY_CONST is set, only return nonzero for constants.
2364 This function tests whether the operands are indistinguishable;
2365 it does not test whether they are equal using C's == operation.
2366 The distinction is important for IEEE floating point, because
2367 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2368 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2369
2370 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2371 even though it may hold multiple values during a function.
2372 This is because a GCC tree node guarantees that nothing else is
2373 executed between the evaluation of its "operands" (which may often
2374 be evaluated in arbitrary order). Hence if the operands themselves
2375 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2376 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2377 unset means assuming isochronic (or instantaneous) tree equivalence.
2378 Unless comparing arbitrary expression trees, such as from different
2379 statements, this flag can usually be left unset.
2380
2381 If OEP_PURE_SAME is set, then pure functions with identical arguments
2382 are considered the same. It is used when the caller has other ways
2383 to ensure that global memory is unchanged in between. */
2384
2385 int
2386 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2387 {
2388 /* If either is ERROR_MARK, they aren't equal. */
2389 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2390 || TREE_TYPE (arg0) == error_mark_node
2391 || TREE_TYPE (arg1) == error_mark_node)
2392 return 0;
2393
2394 /* Similar, if either does not have a type (like a released SSA name),
2395 they aren't equal. */
2396 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2397 return 0;
2398
2399 /* Check equality of integer constants before bailing out due to
2400 precision differences. */
2401 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2402 return tree_int_cst_equal (arg0, arg1);
2403
2404 /* If both types don't have the same signedness, then we can't consider
2405 them equal. We must check this before the STRIP_NOPS calls
2406 because they may change the signedness of the arguments. As pointers
2407 strictly don't have a signedness, require either two pointers or
2408 two non-pointers as well. */
2409 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2410 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2411 return 0;
2412
2413 /* We cannot consider pointers to different address space equal. */
2414 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2417 return 0;
2418
2419 /* If both types don't have the same precision, then it is not safe
2420 to strip NOPs. */
2421 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2422 return 0;
2423
2424 STRIP_NOPS (arg0);
2425 STRIP_NOPS (arg1);
2426
2427 /* In case both args are comparisons but with different comparison
2428 code, try to swap the comparison operands of one arg to produce
2429 a match and compare that variant. */
2430 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2431 && COMPARISON_CLASS_P (arg0)
2432 && COMPARISON_CLASS_P (arg1))
2433 {
2434 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2435
2436 if (TREE_CODE (arg0) == swap_code)
2437 return operand_equal_p (TREE_OPERAND (arg0, 0),
2438 TREE_OPERAND (arg1, 1), flags)
2439 && operand_equal_p (TREE_OPERAND (arg0, 1),
2440 TREE_OPERAND (arg1, 0), flags);
2441 }
2442
2443 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2444 /* This is needed for conversions and for COMPONENT_REF.
2445 Might as well play it safe and always test this. */
2446 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2447 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2448 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2449 return 0;
2450
2451 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2452 We don't care about side effects in that case because the SAVE_EXPR
2453 takes care of that for us. In all other cases, two expressions are
2454 equal if they have no side effects. If we have two identical
2455 expressions with side effects that should be treated the same due
2456 to the only side effects being identical SAVE_EXPR's, that will
2457 be detected in the recursive calls below.
2458 If we are taking an invariant address of two identical objects
2459 they are necessarily equal as well. */
2460 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2461 && (TREE_CODE (arg0) == SAVE_EXPR
2462 || (flags & OEP_CONSTANT_ADDRESS_OF)
2463 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2464 return 1;
2465
2466 /* Next handle constant cases, those for which we can return 1 even
2467 if ONLY_CONST is set. */
2468 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2469 switch (TREE_CODE (arg0))
2470 {
2471 case INTEGER_CST:
2472 return tree_int_cst_equal (arg0, arg1);
2473
2474 case FIXED_CST:
2475 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2476 TREE_FIXED_CST (arg1));
2477
2478 case REAL_CST:
2479 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2480 TREE_REAL_CST (arg1)))
2481 return 1;
2482
2483
2484 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2485 {
2486 /* If we do not distinguish between signed and unsigned zero,
2487 consider them equal. */
2488 if (real_zerop (arg0) && real_zerop (arg1))
2489 return 1;
2490 }
2491 return 0;
2492
2493 case VECTOR_CST:
2494 {
2495 tree v1, v2;
2496
2497 v1 = TREE_VECTOR_CST_ELTS (arg0);
2498 v2 = TREE_VECTOR_CST_ELTS (arg1);
2499 while (v1 && v2)
2500 {
2501 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2502 flags))
2503 return 0;
2504 v1 = TREE_CHAIN (v1);
2505 v2 = TREE_CHAIN (v2);
2506 }
2507
2508 return v1 == v2;
2509 }
2510
2511 case COMPLEX_CST:
2512 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2513 flags)
2514 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2515 flags));
2516
2517 case STRING_CST:
2518 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2519 && ! memcmp (TREE_STRING_POINTER (arg0),
2520 TREE_STRING_POINTER (arg1),
2521 TREE_STRING_LENGTH (arg0)));
2522
2523 case ADDR_EXPR:
2524 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2525 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2526 ? OEP_CONSTANT_ADDRESS_OF : 0);
2527 default:
2528 break;
2529 }
2530
2531 if (flags & OEP_ONLY_CONST)
2532 return 0;
2533
2534 /* Define macros to test an operand from arg0 and arg1 for equality and a
2535 variant that allows null and views null as being different from any
2536 non-null value. In the latter case, if either is null, the both
2537 must be; otherwise, do the normal comparison. */
2538 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2539 TREE_OPERAND (arg1, N), flags)
2540
2541 #define OP_SAME_WITH_NULL(N) \
2542 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2543 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2544
2545 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2546 {
2547 case tcc_unary:
2548 /* Two conversions are equal only if signedness and modes match. */
2549 switch (TREE_CODE (arg0))
2550 {
2551 CASE_CONVERT:
2552 case FIX_TRUNC_EXPR:
2553 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2554 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2555 return 0;
2556 break;
2557 default:
2558 break;
2559 }
2560
2561 return OP_SAME (0);
2562
2563
2564 case tcc_comparison:
2565 case tcc_binary:
2566 if (OP_SAME (0) && OP_SAME (1))
2567 return 1;
2568
2569 /* For commutative ops, allow the other order. */
2570 return (commutative_tree_code (TREE_CODE (arg0))
2571 && operand_equal_p (TREE_OPERAND (arg0, 0),
2572 TREE_OPERAND (arg1, 1), flags)
2573 && operand_equal_p (TREE_OPERAND (arg0, 1),
2574 TREE_OPERAND (arg1, 0), flags));
2575
2576 case tcc_reference:
2577 /* If either of the pointer (or reference) expressions we are
2578 dereferencing contain a side effect, these cannot be equal. */
2579 if (TREE_SIDE_EFFECTS (arg0)
2580 || TREE_SIDE_EFFECTS (arg1))
2581 return 0;
2582
2583 switch (TREE_CODE (arg0))
2584 {
2585 case INDIRECT_REF:
2586 case REALPART_EXPR:
2587 case IMAGPART_EXPR:
2588 return OP_SAME (0);
2589
2590 case MEM_REF:
2591 /* Require equal access sizes, and similar pointer types.
2592 We can have incomplete types for array references of
2593 variable-sized arrays from the Fortran frontent
2594 though. */
2595 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2596 || (TYPE_SIZE (TREE_TYPE (arg0))
2597 && TYPE_SIZE (TREE_TYPE (arg1))
2598 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2599 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2600 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2601 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2602 && OP_SAME (0) && OP_SAME (1));
2603
2604 case ARRAY_REF:
2605 case ARRAY_RANGE_REF:
2606 /* Operands 2 and 3 may be null.
2607 Compare the array index by value if it is constant first as we
2608 may have different types but same value here. */
2609 return (OP_SAME (0)
2610 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 1))
2612 || OP_SAME (1))
2613 && OP_SAME_WITH_NULL (2)
2614 && OP_SAME_WITH_NULL (3));
2615
2616 case COMPONENT_REF:
2617 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2618 may be NULL when we're called to compare MEM_EXPRs. */
2619 return OP_SAME_WITH_NULL (0)
2620 && OP_SAME (1)
2621 && OP_SAME_WITH_NULL (2);
2622
2623 case BIT_FIELD_REF:
2624 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2625
2626 default:
2627 return 0;
2628 }
2629
2630 case tcc_expression:
2631 switch (TREE_CODE (arg0))
2632 {
2633 case ADDR_EXPR:
2634 case TRUTH_NOT_EXPR:
2635 return OP_SAME (0);
2636
2637 case TRUTH_ANDIF_EXPR:
2638 case TRUTH_ORIF_EXPR:
2639 return OP_SAME (0) && OP_SAME (1);
2640
2641 case FMA_EXPR:
2642 case WIDEN_MULT_PLUS_EXPR:
2643 case WIDEN_MULT_MINUS_EXPR:
2644 if (!OP_SAME (2))
2645 return 0;
2646 /* The multiplcation operands are commutative. */
2647 /* FALLTHRU */
2648
2649 case TRUTH_AND_EXPR:
2650 case TRUTH_OR_EXPR:
2651 case TRUTH_XOR_EXPR:
2652 if (OP_SAME (0) && OP_SAME (1))
2653 return 1;
2654
2655 /* Otherwise take into account this is a commutative operation. */
2656 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2657 TREE_OPERAND (arg1, 1), flags)
2658 && operand_equal_p (TREE_OPERAND (arg0, 1),
2659 TREE_OPERAND (arg1, 0), flags));
2660
2661 case COND_EXPR:
2662 case VEC_COND_EXPR:
2663 case DOT_PROD_EXPR:
2664 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2665
2666 default:
2667 return 0;
2668 }
2669
2670 case tcc_vl_exp:
2671 switch (TREE_CODE (arg0))
2672 {
2673 case CALL_EXPR:
2674 /* If the CALL_EXPRs call different functions, then they
2675 clearly can not be equal. */
2676 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2677 flags))
2678 return 0;
2679
2680 {
2681 unsigned int cef = call_expr_flags (arg0);
2682 if (flags & OEP_PURE_SAME)
2683 cef &= ECF_CONST | ECF_PURE;
2684 else
2685 cef &= ECF_CONST;
2686 if (!cef)
2687 return 0;
2688 }
2689
2690 /* Now see if all the arguments are the same. */
2691 {
2692 const_call_expr_arg_iterator iter0, iter1;
2693 const_tree a0, a1;
2694 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2695 a1 = first_const_call_expr_arg (arg1, &iter1);
2696 a0 && a1;
2697 a0 = next_const_call_expr_arg (&iter0),
2698 a1 = next_const_call_expr_arg (&iter1))
2699 if (! operand_equal_p (a0, a1, flags))
2700 return 0;
2701
2702 /* If we get here and both argument lists are exhausted
2703 then the CALL_EXPRs are equal. */
2704 return ! (a0 || a1);
2705 }
2706 default:
2707 return 0;
2708 }
2709
2710 case tcc_declaration:
2711 /* Consider __builtin_sqrt equal to sqrt. */
2712 return (TREE_CODE (arg0) == FUNCTION_DECL
2713 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2714 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2715 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2716
2717 default:
2718 return 0;
2719 }
2720
2721 #undef OP_SAME
2722 #undef OP_SAME_WITH_NULL
2723 }
2724 \f
2725 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2726 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2727
2728 When in doubt, return 0. */
2729
2730 static int
2731 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2732 {
2733 int unsignedp1, unsignedpo;
2734 tree primarg0, primarg1, primother;
2735 unsigned int correct_width;
2736
2737 if (operand_equal_p (arg0, arg1, 0))
2738 return 1;
2739
2740 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2741 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2742 return 0;
2743
2744 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2745 and see if the inner values are the same. This removes any
2746 signedness comparison, which doesn't matter here. */
2747 primarg0 = arg0, primarg1 = arg1;
2748 STRIP_NOPS (primarg0);
2749 STRIP_NOPS (primarg1);
2750 if (operand_equal_p (primarg0, primarg1, 0))
2751 return 1;
2752
2753 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2754 actual comparison operand, ARG0.
2755
2756 First throw away any conversions to wider types
2757 already present in the operands. */
2758
2759 primarg1 = get_narrower (arg1, &unsignedp1);
2760 primother = get_narrower (other, &unsignedpo);
2761
2762 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2763 if (unsignedp1 == unsignedpo
2764 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2765 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2766 {
2767 tree type = TREE_TYPE (arg0);
2768
2769 /* Make sure shorter operand is extended the right way
2770 to match the longer operand. */
2771 primarg1 = fold_convert (signed_or_unsigned_type_for
2772 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2773
2774 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2775 return 1;
2776 }
2777
2778 return 0;
2779 }
2780 \f
2781 /* See if ARG is an expression that is either a comparison or is performing
2782 arithmetic on comparisons. The comparisons must only be comparing
2783 two different values, which will be stored in *CVAL1 and *CVAL2; if
2784 they are nonzero it means that some operands have already been found.
2785 No variables may be used anywhere else in the expression except in the
2786 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2787 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2788
2789 If this is true, return 1. Otherwise, return zero. */
2790
2791 static int
2792 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2793 {
2794 enum tree_code code = TREE_CODE (arg);
2795 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2796
2797 /* We can handle some of the tcc_expression cases here. */
2798 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2799 tclass = tcc_unary;
2800 else if (tclass == tcc_expression
2801 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2802 || code == COMPOUND_EXPR))
2803 tclass = tcc_binary;
2804
2805 else if (tclass == tcc_expression && code == SAVE_EXPR
2806 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2807 {
2808 /* If we've already found a CVAL1 or CVAL2, this expression is
2809 two complex to handle. */
2810 if (*cval1 || *cval2)
2811 return 0;
2812
2813 tclass = tcc_unary;
2814 *save_p = 1;
2815 }
2816
2817 switch (tclass)
2818 {
2819 case tcc_unary:
2820 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2821
2822 case tcc_binary:
2823 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2824 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2825 cval1, cval2, save_p));
2826
2827 case tcc_constant:
2828 return 1;
2829
2830 case tcc_expression:
2831 if (code == COND_EXPR)
2832 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2833 cval1, cval2, save_p)
2834 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2835 cval1, cval2, save_p)
2836 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2837 cval1, cval2, save_p));
2838 return 0;
2839
2840 case tcc_comparison:
2841 /* First see if we can handle the first operand, then the second. For
2842 the second operand, we know *CVAL1 can't be zero. It must be that
2843 one side of the comparison is each of the values; test for the
2844 case where this isn't true by failing if the two operands
2845 are the same. */
2846
2847 if (operand_equal_p (TREE_OPERAND (arg, 0),
2848 TREE_OPERAND (arg, 1), 0))
2849 return 0;
2850
2851 if (*cval1 == 0)
2852 *cval1 = TREE_OPERAND (arg, 0);
2853 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2854 ;
2855 else if (*cval2 == 0)
2856 *cval2 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2858 ;
2859 else
2860 return 0;
2861
2862 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2863 ;
2864 else if (*cval2 == 0)
2865 *cval2 = TREE_OPERAND (arg, 1);
2866 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2867 ;
2868 else
2869 return 0;
2870
2871 return 1;
2872
2873 default:
2874 return 0;
2875 }
2876 }
2877 \f
2878 /* ARG is a tree that is known to contain just arithmetic operations and
2879 comparisons. Evaluate the operations in the tree substituting NEW0 for
2880 any occurrence of OLD0 as an operand of a comparison and likewise for
2881 NEW1 and OLD1. */
2882
2883 static tree
2884 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2885 tree old1, tree new1)
2886 {
2887 tree type = TREE_TYPE (arg);
2888 enum tree_code code = TREE_CODE (arg);
2889 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2890
2891 /* We can handle some of the tcc_expression cases here. */
2892 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2893 tclass = tcc_unary;
2894 else if (tclass == tcc_expression
2895 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2896 tclass = tcc_binary;
2897
2898 switch (tclass)
2899 {
2900 case tcc_unary:
2901 return fold_build1_loc (loc, code, type,
2902 eval_subst (loc, TREE_OPERAND (arg, 0),
2903 old0, new0, old1, new1));
2904
2905 case tcc_binary:
2906 return fold_build2_loc (loc, code, type,
2907 eval_subst (loc, TREE_OPERAND (arg, 0),
2908 old0, new0, old1, new1),
2909 eval_subst (loc, TREE_OPERAND (arg, 1),
2910 old0, new0, old1, new1));
2911
2912 case tcc_expression:
2913 switch (code)
2914 {
2915 case SAVE_EXPR:
2916 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2917 old1, new1);
2918
2919 case COMPOUND_EXPR:
2920 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2921 old1, new1);
2922
2923 case COND_EXPR:
2924 return fold_build3_loc (loc, code, type,
2925 eval_subst (loc, TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1),
2927 eval_subst (loc, TREE_OPERAND (arg, 1),
2928 old0, new0, old1, new1),
2929 eval_subst (loc, TREE_OPERAND (arg, 2),
2930 old0, new0, old1, new1));
2931 default:
2932 break;
2933 }
2934 /* Fall through - ??? */
2935
2936 case tcc_comparison:
2937 {
2938 tree arg0 = TREE_OPERAND (arg, 0);
2939 tree arg1 = TREE_OPERAND (arg, 1);
2940
2941 /* We need to check both for exact equality and tree equality. The
2942 former will be true if the operand has a side-effect. In that
2943 case, we know the operand occurred exactly once. */
2944
2945 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2946 arg0 = new0;
2947 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2948 arg0 = new1;
2949
2950 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2951 arg1 = new0;
2952 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2953 arg1 = new1;
2954
2955 return fold_build2_loc (loc, code, type, arg0, arg1);
2956 }
2957
2958 default:
2959 return arg;
2960 }
2961 }
2962 \f
2963 /* Return a tree for the case when the result of an expression is RESULT
2964 converted to TYPE and OMITTED was previously an operand of the expression
2965 but is now not needed (e.g., we folded OMITTED * 0).
2966
2967 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2968 the conversion of RESULT to TYPE. */
2969
2970 tree
2971 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2972 {
2973 tree t = fold_convert_loc (loc, type, result);
2974
2975 /* If the resulting operand is an empty statement, just return the omitted
2976 statement casted to void. */
2977 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2978 return build1_loc (loc, NOP_EXPR, void_type_node,
2979 fold_ignored_result (omitted));
2980
2981 if (TREE_SIDE_EFFECTS (omitted))
2982 return build2_loc (loc, COMPOUND_EXPR, type,
2983 fold_ignored_result (omitted), t);
2984
2985 return non_lvalue_loc (loc, t);
2986 }
2987
2988 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2989
2990 static tree
2991 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2992 tree omitted)
2993 {
2994 tree t = fold_convert_loc (loc, type, result);
2995
2996 /* If the resulting operand is an empty statement, just return the omitted
2997 statement casted to void. */
2998 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2999 return build1_loc (loc, NOP_EXPR, void_type_node,
3000 fold_ignored_result (omitted));
3001
3002 if (TREE_SIDE_EFFECTS (omitted))
3003 return build2_loc (loc, COMPOUND_EXPR, type,
3004 fold_ignored_result (omitted), t);
3005
3006 return pedantic_non_lvalue_loc (loc, t);
3007 }
3008
3009 /* Return a tree for the case when the result of an expression is RESULT
3010 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3011 of the expression but are now not needed.
3012
3013 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3014 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3015 evaluated before OMITTED2. Otherwise, if neither has side effects,
3016 just do the conversion of RESULT to TYPE. */
3017
3018 tree
3019 omit_two_operands_loc (location_t loc, tree type, tree result,
3020 tree omitted1, tree omitted2)
3021 {
3022 tree t = fold_convert_loc (loc, type, result);
3023
3024 if (TREE_SIDE_EFFECTS (omitted2))
3025 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3026 if (TREE_SIDE_EFFECTS (omitted1))
3027 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3028
3029 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3030 }
3031
3032 \f
3033 /* Return a simplified tree node for the truth-negation of ARG. This
3034 never alters ARG itself. We assume that ARG is an operation that
3035 returns a truth value (0 or 1).
3036
3037 FIXME: one would think we would fold the result, but it causes
3038 problems with the dominator optimizer. */
3039
3040 tree
3041 fold_truth_not_expr (location_t loc, tree arg)
3042 {
3043 tree type = TREE_TYPE (arg);
3044 enum tree_code code = TREE_CODE (arg);
3045 location_t loc1, loc2;
3046
3047 /* If this is a comparison, we can simply invert it, except for
3048 floating-point non-equality comparisons, in which case we just
3049 enclose a TRUTH_NOT_EXPR around what we have. */
3050
3051 if (TREE_CODE_CLASS (code) == tcc_comparison)
3052 {
3053 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3054 if (FLOAT_TYPE_P (op_type)
3055 && flag_trapping_math
3056 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3057 && code != NE_EXPR && code != EQ_EXPR)
3058 return NULL_TREE;
3059
3060 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3061 if (code == ERROR_MARK)
3062 return NULL_TREE;
3063
3064 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (arg, 1));
3066 }
3067
3068 switch (code)
3069 {
3070 case INTEGER_CST:
3071 return constant_boolean_node (integer_zerop (arg), type);
3072
3073 case TRUTH_AND_EXPR:
3074 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3075 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3076 return build2_loc (loc, TRUTH_OR_EXPR, type,
3077 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3078 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3079
3080 case TRUTH_OR_EXPR:
3081 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3082 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3083 return build2_loc (loc, TRUTH_AND_EXPR, type,
3084 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3085 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3086
3087 case TRUTH_XOR_EXPR:
3088 /* Here we can invert either operand. We invert the first operand
3089 unless the second operand is a TRUTH_NOT_EXPR in which case our
3090 result is the XOR of the first operand with the inside of the
3091 negation of the second operand. */
3092
3093 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3094 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3095 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3096 else
3097 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3098 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3099 TREE_OPERAND (arg, 1));
3100
3101 case TRUTH_ANDIF_EXPR:
3102 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3103 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3104 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3105 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3106 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3107
3108 case TRUTH_ORIF_EXPR:
3109 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3110 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3111 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3112 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3113 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3114
3115 case TRUTH_NOT_EXPR:
3116 return TREE_OPERAND (arg, 0);
3117
3118 case COND_EXPR:
3119 {
3120 tree arg1 = TREE_OPERAND (arg, 1);
3121 tree arg2 = TREE_OPERAND (arg, 2);
3122
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3124 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3125
3126 /* A COND_EXPR may have a throw as one operand, which
3127 then has void type. Just leave void operands
3128 as they are. */
3129 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3130 VOID_TYPE_P (TREE_TYPE (arg1))
3131 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3132 VOID_TYPE_P (TREE_TYPE (arg2))
3133 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3134 }
3135
3136 case COMPOUND_EXPR:
3137 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3138 return build2_loc (loc, COMPOUND_EXPR, type,
3139 TREE_OPERAND (arg, 0),
3140 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3141
3142 case NON_LVALUE_EXPR:
3143 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3144 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3145
3146 CASE_CONVERT:
3147 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3148 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3149
3150 /* ... fall through ... */
3151
3152 case FLOAT_EXPR:
3153 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3154 return build1_loc (loc, TREE_CODE (arg), type,
3155 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3156
3157 case BIT_AND_EXPR:
3158 if (!integer_onep (TREE_OPERAND (arg, 1)))
3159 return NULL_TREE;
3160 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3161
3162 case SAVE_EXPR:
3163 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3164
3165 case CLEANUP_POINT_EXPR:
3166 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3167 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3168 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3169
3170 default:
3171 return NULL_TREE;
3172 }
3173 }
3174
3175 /* Return a simplified tree node for the truth-negation of ARG. This
3176 never alters ARG itself. We assume that ARG is an operation that
3177 returns a truth value (0 or 1).
3178
3179 FIXME: one would think we would fold the result, but it causes
3180 problems with the dominator optimizer. */
3181
3182 tree
3183 invert_truthvalue_loc (location_t loc, tree arg)
3184 {
3185 tree tem;
3186
3187 if (TREE_CODE (arg) == ERROR_MARK)
3188 return arg;
3189
3190 tem = fold_truth_not_expr (loc, arg);
3191 if (!tem)
3192 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3193
3194 return tem;
3195 }
3196
3197 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3198 operands are another bit-wise operation with a common input. If so,
3199 distribute the bit operations to save an operation and possibly two if
3200 constants are involved. For example, convert
3201 (A | B) & (A | C) into A | (B & C)
3202 Further simplification will occur if B and C are constants.
3203
3204 If this optimization cannot be done, 0 will be returned. */
3205
3206 static tree
3207 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3208 tree arg0, tree arg1)
3209 {
3210 tree common;
3211 tree left, right;
3212
3213 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3214 || TREE_CODE (arg0) == code
3215 || (TREE_CODE (arg0) != BIT_AND_EXPR
3216 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3217 return 0;
3218
3219 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3220 {
3221 common = TREE_OPERAND (arg0, 0);
3222 left = TREE_OPERAND (arg0, 1);
3223 right = TREE_OPERAND (arg1, 1);
3224 }
3225 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3226 {
3227 common = TREE_OPERAND (arg0, 0);
3228 left = TREE_OPERAND (arg0, 1);
3229 right = TREE_OPERAND (arg1, 0);
3230 }
3231 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3232 {
3233 common = TREE_OPERAND (arg0, 1);
3234 left = TREE_OPERAND (arg0, 0);
3235 right = TREE_OPERAND (arg1, 1);
3236 }
3237 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3238 {
3239 common = TREE_OPERAND (arg0, 1);
3240 left = TREE_OPERAND (arg0, 0);
3241 right = TREE_OPERAND (arg1, 0);
3242 }
3243 else
3244 return 0;
3245
3246 common = fold_convert_loc (loc, type, common);
3247 left = fold_convert_loc (loc, type, left);
3248 right = fold_convert_loc (loc, type, right);
3249 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3250 fold_build2_loc (loc, code, type, left, right));
3251 }
3252
3253 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3254 with code CODE. This optimization is unsafe. */
3255 static tree
3256 distribute_real_division (location_t loc, enum tree_code code, tree type,
3257 tree arg0, tree arg1)
3258 {
3259 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3260 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3261
3262 /* (A / C) +- (B / C) -> (A +- B) / C. */
3263 if (mul0 == mul1
3264 && operand_equal_p (TREE_OPERAND (arg0, 1),
3265 TREE_OPERAND (arg1, 1), 0))
3266 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3267 fold_build2_loc (loc, code, type,
3268 TREE_OPERAND (arg0, 0),
3269 TREE_OPERAND (arg1, 0)),
3270 TREE_OPERAND (arg0, 1));
3271
3272 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3273 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3274 TREE_OPERAND (arg1, 0), 0)
3275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3276 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3277 {
3278 REAL_VALUE_TYPE r0, r1;
3279 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3280 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3281 if (!mul0)
3282 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3283 if (!mul1)
3284 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3285 real_arithmetic (&r0, code, &r0, &r1);
3286 return fold_build2_loc (loc, MULT_EXPR, type,
3287 TREE_OPERAND (arg0, 0),
3288 build_real (type, r0));
3289 }
3290
3291 return NULL_TREE;
3292 }
3293 \f
3294 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3295 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3296
3297 static tree
3298 make_bit_field_ref (location_t loc, tree inner, tree type,
3299 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3300 {
3301 tree result, bftype;
3302
3303 if (bitpos == 0)
3304 {
3305 tree size = TYPE_SIZE (TREE_TYPE (inner));
3306 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3307 || POINTER_TYPE_P (TREE_TYPE (inner)))
3308 && host_integerp (size, 0)
3309 && tree_low_cst (size, 0) == bitsize)
3310 return fold_convert_loc (loc, type, inner);
3311 }
3312
3313 bftype = type;
3314 if (TYPE_PRECISION (bftype) != bitsize
3315 || TYPE_UNSIGNED (bftype) == !unsignedp)
3316 bftype = build_nonstandard_integer_type (bitsize, 0);
3317
3318 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3319 size_int (bitsize), bitsize_int (bitpos));
3320
3321 if (bftype != type)
3322 result = fold_convert_loc (loc, type, result);
3323
3324 return result;
3325 }
3326
3327 /* Optimize a bit-field compare.
3328
3329 There are two cases: First is a compare against a constant and the
3330 second is a comparison of two items where the fields are at the same
3331 bit position relative to the start of a chunk (byte, halfword, word)
3332 large enough to contain it. In these cases we can avoid the shift
3333 implicit in bitfield extractions.
3334
3335 For constants, we emit a compare of the shifted constant with the
3336 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3337 compared. For two fields at the same position, we do the ANDs with the
3338 similar mask and compare the result of the ANDs.
3339
3340 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3341 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3342 are the left and right operands of the comparison, respectively.
3343
3344 If the optimization described above can be done, we return the resulting
3345 tree. Otherwise we return zero. */
3346
3347 static tree
3348 optimize_bit_field_compare (location_t loc, enum tree_code code,
3349 tree compare_type, tree lhs, tree rhs)
3350 {
3351 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3352 tree type = TREE_TYPE (lhs);
3353 tree signed_type, unsigned_type;
3354 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3355 enum machine_mode lmode, rmode, nmode;
3356 int lunsignedp, runsignedp;
3357 int lvolatilep = 0, rvolatilep = 0;
3358 tree linner, rinner = NULL_TREE;
3359 tree mask;
3360 tree offset;
3361
3362 /* Get all the information about the extractions being done. If the bit size
3363 if the same as the size of the underlying object, we aren't doing an
3364 extraction at all and so can do nothing. We also don't want to
3365 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3366 then will no longer be able to replace it. */
3367 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3368 &lunsignedp, &lvolatilep, false);
3369 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3370 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3371 return 0;
3372
3373 if (!const_p)
3374 {
3375 /* If this is not a constant, we can only do something if bit positions,
3376 sizes, and signedness are the same. */
3377 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3378 &runsignedp, &rvolatilep, false);
3379
3380 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3381 || lunsignedp != runsignedp || offset != 0
3382 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3383 return 0;
3384 }
3385
3386 /* See if we can find a mode to refer to this field. We should be able to,
3387 but fail if we can't. */
3388 if (lvolatilep
3389 && GET_MODE_BITSIZE (lmode) > 0
3390 && flag_strict_volatile_bitfields > 0)
3391 nmode = lmode;
3392 else
3393 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3394 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3395 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3396 TYPE_ALIGN (TREE_TYPE (rinner))),
3397 word_mode, lvolatilep || rvolatilep);
3398 if (nmode == VOIDmode)
3399 return 0;
3400
3401 /* Set signed and unsigned types of the precision of this mode for the
3402 shifts below. */
3403 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3404 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3405
3406 /* Compute the bit position and size for the new reference and our offset
3407 within it. If the new reference is the same size as the original, we
3408 won't optimize anything, so return zero. */
3409 nbitsize = GET_MODE_BITSIZE (nmode);
3410 nbitpos = lbitpos & ~ (nbitsize - 1);
3411 lbitpos -= nbitpos;
3412 if (nbitsize == lbitsize)
3413 return 0;
3414
3415 if (BYTES_BIG_ENDIAN)
3416 lbitpos = nbitsize - lbitsize - lbitpos;
3417
3418 /* Make the mask to be used against the extracted field. */
3419 mask = build_int_cst_type (unsigned_type, -1);
3420 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3421 mask = const_binop (RSHIFT_EXPR, mask,
3422 size_int (nbitsize - lbitsize - lbitpos));
3423
3424 if (! const_p)
3425 /* If not comparing with constant, just rework the comparison
3426 and return. */
3427 return fold_build2_loc (loc, code, compare_type,
3428 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3429 make_bit_field_ref (loc, linner,
3430 unsigned_type,
3431 nbitsize, nbitpos,
3432 1),
3433 mask),
3434 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3435 make_bit_field_ref (loc, rinner,
3436 unsigned_type,
3437 nbitsize, nbitpos,
3438 1),
3439 mask));
3440
3441 /* Otherwise, we are handling the constant case. See if the constant is too
3442 big for the field. Warn and return a tree of for 0 (false) if so. We do
3443 this not only for its own sake, but to avoid having to test for this
3444 error case below. If we didn't, we might generate wrong code.
3445
3446 For unsigned fields, the constant shifted right by the field length should
3447 be all zero. For signed fields, the high-order bits should agree with
3448 the sign bit. */
3449
3450 if (lunsignedp)
3451 {
3452 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3453 fold_convert_loc (loc,
3454 unsigned_type, rhs),
3455 size_int (lbitsize))))
3456 {
3457 warning (0, "comparison is always %d due to width of bit-field",
3458 code == NE_EXPR);
3459 return constant_boolean_node (code == NE_EXPR, compare_type);
3460 }
3461 }
3462 else
3463 {
3464 tree tem = const_binop (RSHIFT_EXPR,
3465 fold_convert_loc (loc, signed_type, rhs),
3466 size_int (lbitsize - 1));
3467 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3468 {
3469 warning (0, "comparison is always %d due to width of bit-field",
3470 code == NE_EXPR);
3471 return constant_boolean_node (code == NE_EXPR, compare_type);
3472 }
3473 }
3474
3475 /* Single-bit compares should always be against zero. */
3476 if (lbitsize == 1 && ! integer_zerop (rhs))
3477 {
3478 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3479 rhs = build_int_cst (type, 0);
3480 }
3481
3482 /* Make a new bitfield reference, shift the constant over the
3483 appropriate number of bits and mask it with the computed mask
3484 (in case this was a signed field). If we changed it, make a new one. */
3485 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3486 if (lvolatilep)
3487 {
3488 TREE_SIDE_EFFECTS (lhs) = 1;
3489 TREE_THIS_VOLATILE (lhs) = 1;
3490 }
3491
3492 rhs = const_binop (BIT_AND_EXPR,
3493 const_binop (LSHIFT_EXPR,
3494 fold_convert_loc (loc, unsigned_type, rhs),
3495 size_int (lbitpos)),
3496 mask);
3497
3498 lhs = build2_loc (loc, code, compare_type,
3499 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3500 return lhs;
3501 }
3502 \f
3503 /* Subroutine for fold_truthop: decode a field reference.
3504
3505 If EXP is a comparison reference, we return the innermost reference.
3506
3507 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3508 set to the starting bit number.
3509
3510 If the innermost field can be completely contained in a mode-sized
3511 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3512
3513 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3514 otherwise it is not changed.
3515
3516 *PUNSIGNEDP is set to the signedness of the field.
3517
3518 *PMASK is set to the mask used. This is either contained in a
3519 BIT_AND_EXPR or derived from the width of the field.
3520
3521 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3522
3523 Return 0 if this is not a component reference or is one that we can't
3524 do anything with. */
3525
3526 static tree
3527 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3528 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3529 int *punsignedp, int *pvolatilep,
3530 tree *pmask, tree *pand_mask)
3531 {
3532 tree outer_type = 0;
3533 tree and_mask = 0;
3534 tree mask, inner, offset;
3535 tree unsigned_type;
3536 unsigned int precision;
3537
3538 /* All the optimizations using this function assume integer fields.
3539 There are problems with FP fields since the type_for_size call
3540 below can fail for, e.g., XFmode. */
3541 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3542 return 0;
3543
3544 /* We are interested in the bare arrangement of bits, so strip everything
3545 that doesn't affect the machine mode. However, record the type of the
3546 outermost expression if it may matter below. */
3547 if (CONVERT_EXPR_P (exp)
3548 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3549 outer_type = TREE_TYPE (exp);
3550 STRIP_NOPS (exp);
3551
3552 if (TREE_CODE (exp) == BIT_AND_EXPR)
3553 {
3554 and_mask = TREE_OPERAND (exp, 1);
3555 exp = TREE_OPERAND (exp, 0);
3556 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3557 if (TREE_CODE (and_mask) != INTEGER_CST)
3558 return 0;
3559 }
3560
3561 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3562 punsignedp, pvolatilep, false);
3563 if ((inner == exp && and_mask == 0)
3564 || *pbitsize < 0 || offset != 0
3565 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3566 return 0;
3567
3568 /* If the number of bits in the reference is the same as the bitsize of
3569 the outer type, then the outer type gives the signedness. Otherwise
3570 (in case of a small bitfield) the signedness is unchanged. */
3571 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3572 *punsignedp = TYPE_UNSIGNED (outer_type);
3573
3574 /* Compute the mask to access the bitfield. */
3575 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3576 precision = TYPE_PRECISION (unsigned_type);
3577
3578 mask = build_int_cst_type (unsigned_type, -1);
3579
3580 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3581 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3582
3583 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3584 if (and_mask != 0)
3585 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3586 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3587
3588 *pmask = mask;
3589 *pand_mask = and_mask;
3590 return inner;
3591 }
3592
3593 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3594 bit positions. */
3595
3596 static int
3597 all_ones_mask_p (const_tree mask, int size)
3598 {
3599 tree type = TREE_TYPE (mask);
3600 unsigned int precision = TYPE_PRECISION (type);
3601 tree tmask;
3602
3603 tmask = build_int_cst_type (signed_type_for (type), -1);
3604
3605 return
3606 tree_int_cst_equal (mask,
3607 const_binop (RSHIFT_EXPR,
3608 const_binop (LSHIFT_EXPR, tmask,
3609 size_int (precision - size)),
3610 size_int (precision - size)));
3611 }
3612
3613 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3614 represents the sign bit of EXP's type. If EXP represents a sign
3615 or zero extension, also test VAL against the unextended type.
3616 The return value is the (sub)expression whose sign bit is VAL,
3617 or NULL_TREE otherwise. */
3618
3619 static tree
3620 sign_bit_p (tree exp, const_tree val)
3621 {
3622 unsigned HOST_WIDE_INT mask_lo, lo;
3623 HOST_WIDE_INT mask_hi, hi;
3624 int width;
3625 tree t;
3626
3627 /* Tree EXP must have an integral type. */
3628 t = TREE_TYPE (exp);
3629 if (! INTEGRAL_TYPE_P (t))
3630 return NULL_TREE;
3631
3632 /* Tree VAL must be an integer constant. */
3633 if (TREE_CODE (val) != INTEGER_CST
3634 || TREE_OVERFLOW (val))
3635 return NULL_TREE;
3636
3637 width = TYPE_PRECISION (t);
3638 if (width > HOST_BITS_PER_WIDE_INT)
3639 {
3640 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3641 lo = 0;
3642
3643 mask_hi = ((unsigned HOST_WIDE_INT) -1
3644 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3645 mask_lo = -1;
3646 }
3647 else
3648 {
3649 hi = 0;
3650 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3651
3652 mask_hi = 0;
3653 mask_lo = ((unsigned HOST_WIDE_INT) -1
3654 >> (HOST_BITS_PER_WIDE_INT - width));
3655 }
3656
3657 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3658 treat VAL as if it were unsigned. */
3659 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3660 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3661 return exp;
3662
3663 /* Handle extension from a narrower type. */
3664 if (TREE_CODE (exp) == NOP_EXPR
3665 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3666 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3667
3668 return NULL_TREE;
3669 }
3670
3671 /* Subroutine for fold_truthop: determine if an operand is simple enough
3672 to be evaluated unconditionally. */
3673
3674 static int
3675 simple_operand_p (const_tree exp)
3676 {
3677 /* Strip any conversions that don't change the machine mode. */
3678 STRIP_NOPS (exp);
3679
3680 return (CONSTANT_CLASS_P (exp)
3681 || TREE_CODE (exp) == SSA_NAME
3682 || (DECL_P (exp)
3683 && ! TREE_ADDRESSABLE (exp)
3684 && ! TREE_THIS_VOLATILE (exp)
3685 && ! DECL_NONLOCAL (exp)
3686 /* Don't regard global variables as simple. They may be
3687 allocated in ways unknown to the compiler (shared memory,
3688 #pragma weak, etc). */
3689 && ! TREE_PUBLIC (exp)
3690 && ! DECL_EXTERNAL (exp)
3691 /* Loading a static variable is unduly expensive, but global
3692 registers aren't expensive. */
3693 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3694 }
3695 \f
3696 /* The following functions are subroutines to fold_range_test and allow it to
3697 try to change a logical combination of comparisons into a range test.
3698
3699 For example, both
3700 X == 2 || X == 3 || X == 4 || X == 5
3701 and
3702 X >= 2 && X <= 5
3703 are converted to
3704 (unsigned) (X - 2) <= 3
3705
3706 We describe each set of comparisons as being either inside or outside
3707 a range, using a variable named like IN_P, and then describe the
3708 range with a lower and upper bound. If one of the bounds is omitted,
3709 it represents either the highest or lowest value of the type.
3710
3711 In the comments below, we represent a range by two numbers in brackets
3712 preceded by a "+" to designate being inside that range, or a "-" to
3713 designate being outside that range, so the condition can be inverted by
3714 flipping the prefix. An omitted bound is represented by a "-". For
3715 example, "- [-, 10]" means being outside the range starting at the lowest
3716 possible value and ending at 10, in other words, being greater than 10.
3717 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3718 always false.
3719
3720 We set up things so that the missing bounds are handled in a consistent
3721 manner so neither a missing bound nor "true" and "false" need to be
3722 handled using a special case. */
3723
3724 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3725 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3726 and UPPER1_P are nonzero if the respective argument is an upper bound
3727 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3728 must be specified for a comparison. ARG1 will be converted to ARG0's
3729 type if both are specified. */
3730
3731 static tree
3732 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3733 tree arg1, int upper1_p)
3734 {
3735 tree tem;
3736 int result;
3737 int sgn0, sgn1;
3738
3739 /* If neither arg represents infinity, do the normal operation.
3740 Else, if not a comparison, return infinity. Else handle the special
3741 comparison rules. Note that most of the cases below won't occur, but
3742 are handled for consistency. */
3743
3744 if (arg0 != 0 && arg1 != 0)
3745 {
3746 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3747 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3748 STRIP_NOPS (tem);
3749 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3750 }
3751
3752 if (TREE_CODE_CLASS (code) != tcc_comparison)
3753 return 0;
3754
3755 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3756 for neither. In real maths, we cannot assume open ended ranges are
3757 the same. But, this is computer arithmetic, where numbers are finite.
3758 We can therefore make the transformation of any unbounded range with
3759 the value Z, Z being greater than any representable number. This permits
3760 us to treat unbounded ranges as equal. */
3761 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3762 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3763 switch (code)
3764 {
3765 case EQ_EXPR:
3766 result = sgn0 == sgn1;
3767 break;
3768 case NE_EXPR:
3769 result = sgn0 != sgn1;
3770 break;
3771 case LT_EXPR:
3772 result = sgn0 < sgn1;
3773 break;
3774 case LE_EXPR:
3775 result = sgn0 <= sgn1;
3776 break;
3777 case GT_EXPR:
3778 result = sgn0 > sgn1;
3779 break;
3780 case GE_EXPR:
3781 result = sgn0 >= sgn1;
3782 break;
3783 default:
3784 gcc_unreachable ();
3785 }
3786
3787 return constant_boolean_node (result, type);
3788 }
3789 \f
3790 /* Helper routine for make_range. Perform one step for it, return
3791 new expression if the loop should continue or NULL_TREE if it should
3792 stop. */
3793
3794 tree
3795 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3796 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3797 bool *strict_overflow_p)
3798 {
3799 tree arg0_type = TREE_TYPE (arg0);
3800 tree n_low, n_high, low = *p_low, high = *p_high;
3801 int in_p = *p_in_p, n_in_p;
3802
3803 switch (code)
3804 {
3805 case TRUTH_NOT_EXPR:
3806 *p_in_p = ! in_p;
3807 return arg0;
3808
3809 case EQ_EXPR: case NE_EXPR:
3810 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3811 /* We can only do something if the range is testing for zero
3812 and if the second operand is an integer constant. Note that
3813 saying something is "in" the range we make is done by
3814 complementing IN_P since it will set in the initial case of
3815 being not equal to zero; "out" is leaving it alone. */
3816 if (low == NULL_TREE || high == NULL_TREE
3817 || ! integer_zerop (low) || ! integer_zerop (high)
3818 || TREE_CODE (arg1) != INTEGER_CST)
3819 return NULL_TREE;
3820
3821 switch (code)
3822 {
3823 case NE_EXPR: /* - [c, c] */
3824 low = high = arg1;
3825 break;
3826 case EQ_EXPR: /* + [c, c] */
3827 in_p = ! in_p, low = high = arg1;
3828 break;
3829 case GT_EXPR: /* - [-, c] */
3830 low = 0, high = arg1;
3831 break;
3832 case GE_EXPR: /* + [c, -] */
3833 in_p = ! in_p, low = arg1, high = 0;
3834 break;
3835 case LT_EXPR: /* - [c, -] */
3836 low = arg1, high = 0;
3837 break;
3838 case LE_EXPR: /* + [-, c] */
3839 in_p = ! in_p, low = 0, high = arg1;
3840 break;
3841 default:
3842 gcc_unreachable ();
3843 }
3844
3845 /* If this is an unsigned comparison, we also know that EXP is
3846 greater than or equal to zero. We base the range tests we make
3847 on that fact, so we record it here so we can parse existing
3848 range tests. We test arg0_type since often the return type
3849 of, e.g. EQ_EXPR, is boolean. */
3850 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3851 {
3852 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3853 in_p, low, high, 1,
3854 build_int_cst (arg0_type, 0),
3855 NULL_TREE))
3856 return NULL_TREE;
3857
3858 in_p = n_in_p, low = n_low, high = n_high;
3859
3860 /* If the high bound is missing, but we have a nonzero low
3861 bound, reverse the range so it goes from zero to the low bound
3862 minus 1. */
3863 if (high == 0 && low && ! integer_zerop (low))
3864 {
3865 in_p = ! in_p;
3866 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3867 integer_one_node, 0);
3868 low = build_int_cst (arg0_type, 0);
3869 }
3870 }
3871
3872 *p_low = low;
3873 *p_high = high;
3874 *p_in_p = in_p;
3875 return arg0;
3876
3877 case NEGATE_EXPR:
3878 /* (-x) IN [a,b] -> x in [-b, -a] */
3879 n_low = range_binop (MINUS_EXPR, exp_type,
3880 build_int_cst (exp_type, 0),
3881 0, high, 1);
3882 n_high = range_binop (MINUS_EXPR, exp_type,
3883 build_int_cst (exp_type, 0),
3884 0, low, 0);
3885 if (n_high != 0 && TREE_OVERFLOW (n_high))
3886 return NULL_TREE;
3887 goto normalize;
3888
3889 case BIT_NOT_EXPR:
3890 /* ~ X -> -X - 1 */
3891 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3892 build_int_cst (exp_type, 1));
3893
3894 case PLUS_EXPR:
3895 case MINUS_EXPR:
3896 if (TREE_CODE (arg1) != INTEGER_CST)
3897 return NULL_TREE;
3898
3899 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3900 move a constant to the other side. */
3901 if (!TYPE_UNSIGNED (arg0_type)
3902 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3903 return NULL_TREE;
3904
3905 /* If EXP is signed, any overflow in the computation is undefined,
3906 so we don't worry about it so long as our computations on
3907 the bounds don't overflow. For unsigned, overflow is defined
3908 and this is exactly the right thing. */
3909 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3910 arg0_type, low, 0, arg1, 0);
3911 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3912 arg0_type, high, 1, arg1, 0);
3913 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3914 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3915 return NULL_TREE;
3916
3917 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3918 *strict_overflow_p = true;
3919
3920 normalize:
3921 /* Check for an unsigned range which has wrapped around the maximum
3922 value thus making n_high < n_low, and normalize it. */
3923 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3924 {
3925 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3926 integer_one_node, 0);
3927 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3928 integer_one_node, 0);
3929
3930 /* If the range is of the form +/- [ x+1, x ], we won't
3931 be able to normalize it. But then, it represents the
3932 whole range or the empty set, so make it
3933 +/- [ -, - ]. */
3934 if (tree_int_cst_equal (n_low, low)
3935 && tree_int_cst_equal (n_high, high))
3936 low = high = 0;
3937 else
3938 in_p = ! in_p;
3939 }
3940 else
3941 low = n_low, high = n_high;
3942
3943 *p_low = low;
3944 *p_high = high;
3945 *p_in_p = in_p;
3946 return arg0;
3947
3948 CASE_CONVERT:
3949 case NON_LVALUE_EXPR:
3950 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3951 return NULL_TREE;
3952
3953 if (! INTEGRAL_TYPE_P (arg0_type)
3954 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3955 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3956 return NULL_TREE;
3957
3958 n_low = low, n_high = high;
3959
3960 if (n_low != 0)
3961 n_low = fold_convert_loc (loc, arg0_type, n_low);
3962
3963 if (n_high != 0)
3964 n_high = fold_convert_loc (loc, arg0_type, n_high);
3965
3966 /* If we're converting arg0 from an unsigned type, to exp,
3967 a signed type, we will be doing the comparison as unsigned.
3968 The tests above have already verified that LOW and HIGH
3969 are both positive.
3970
3971 So we have to ensure that we will handle large unsigned
3972 values the same way that the current signed bounds treat
3973 negative values. */
3974
3975 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3976 {
3977 tree high_positive;
3978 tree equiv_type;
3979 /* For fixed-point modes, we need to pass the saturating flag
3980 as the 2nd parameter. */
3981 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3982 equiv_type
3983 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
3984 TYPE_SATURATING (arg0_type));
3985 else
3986 equiv_type
3987 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
3988
3989 /* A range without an upper bound is, naturally, unbounded.
3990 Since convert would have cropped a very large value, use
3991 the max value for the destination type. */
3992 high_positive
3993 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3994 : TYPE_MAX_VALUE (arg0_type);
3995
3996 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3997 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
3998 fold_convert_loc (loc, arg0_type,
3999 high_positive),
4000 build_int_cst (arg0_type, 1));
4001
4002 /* If the low bound is specified, "and" the range with the
4003 range for which the original unsigned value will be
4004 positive. */
4005 if (low != 0)
4006 {
4007 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4008 1, fold_convert_loc (loc, arg0_type,
4009 integer_zero_node),
4010 high_positive))
4011 return NULL_TREE;
4012
4013 in_p = (n_in_p == in_p);
4014 }
4015 else
4016 {
4017 /* Otherwise, "or" the range with the range of the input
4018 that will be interpreted as negative. */
4019 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4020 1, fold_convert_loc (loc, arg0_type,
4021 integer_zero_node),
4022 high_positive))
4023 return NULL_TREE;
4024
4025 in_p = (in_p != n_in_p);
4026 }
4027 }
4028
4029 *p_low = n_low;
4030 *p_high = n_high;
4031 *p_in_p = in_p;
4032 return arg0;
4033
4034 default:
4035 return NULL_TREE;
4036 }
4037 }
4038
4039 /* Given EXP, a logical expression, set the range it is testing into
4040 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4041 actually being tested. *PLOW and *PHIGH will be made of the same
4042 type as the returned expression. If EXP is not a comparison, we
4043 will most likely not be returning a useful value and range. Set
4044 *STRICT_OVERFLOW_P to true if the return value is only valid
4045 because signed overflow is undefined; otherwise, do not change
4046 *STRICT_OVERFLOW_P. */
4047
4048 tree
4049 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4050 bool *strict_overflow_p)
4051 {
4052 enum tree_code code;
4053 tree arg0, arg1 = NULL_TREE;
4054 tree exp_type, nexp;
4055 int in_p;
4056 tree low, high;
4057 location_t loc = EXPR_LOCATION (exp);
4058
4059 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4060 and see if we can refine the range. Some of the cases below may not
4061 happen, but it doesn't seem worth worrying about this. We "continue"
4062 the outer loop when we've changed something; otherwise we "break"
4063 the switch, which will "break" the while. */
4064
4065 in_p = 0;
4066 low = high = build_int_cst (TREE_TYPE (exp), 0);
4067
4068 while (1)
4069 {
4070 code = TREE_CODE (exp);
4071 exp_type = TREE_TYPE (exp);
4072 arg0 = NULL_TREE;
4073
4074 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4075 {
4076 if (TREE_OPERAND_LENGTH (exp) > 0)
4077 arg0 = TREE_OPERAND (exp, 0);
4078 if (TREE_CODE_CLASS (code) == tcc_binary
4079 || TREE_CODE_CLASS (code) == tcc_comparison
4080 || (TREE_CODE_CLASS (code) == tcc_expression
4081 && TREE_OPERAND_LENGTH (exp) > 1))
4082 arg1 = TREE_OPERAND (exp, 1);
4083 }
4084 if (arg0 == NULL_TREE)
4085 break;
4086
4087 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4088 &high, &in_p, strict_overflow_p);
4089 if (nexp == NULL_TREE)
4090 break;
4091 exp = nexp;
4092 }
4093
4094 /* If EXP is a constant, we can evaluate whether this is true or false. */
4095 if (TREE_CODE (exp) == INTEGER_CST)
4096 {
4097 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4098 exp, 0, low, 0))
4099 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4100 exp, 1, high, 1)));
4101 low = high = 0;
4102 exp = 0;
4103 }
4104
4105 *pin_p = in_p, *plow = low, *phigh = high;
4106 return exp;
4107 }
4108 \f
4109 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4110 type, TYPE, return an expression to test if EXP is in (or out of, depending
4111 on IN_P) the range. Return 0 if the test couldn't be created. */
4112
4113 tree
4114 build_range_check (location_t loc, tree type, tree exp, int in_p,
4115 tree low, tree high)
4116 {
4117 tree etype = TREE_TYPE (exp), value;
4118
4119 #ifdef HAVE_canonicalize_funcptr_for_compare
4120 /* Disable this optimization for function pointer expressions
4121 on targets that require function pointer canonicalization. */
4122 if (HAVE_canonicalize_funcptr_for_compare
4123 && TREE_CODE (etype) == POINTER_TYPE
4124 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4125 return NULL_TREE;
4126 #endif
4127
4128 if (! in_p)
4129 {
4130 value = build_range_check (loc, type, exp, 1, low, high);
4131 if (value != 0)
4132 return invert_truthvalue_loc (loc, value);
4133
4134 return 0;
4135 }
4136
4137 if (low == 0 && high == 0)
4138 return build_int_cst (type, 1);
4139
4140 if (low == 0)
4141 return fold_build2_loc (loc, LE_EXPR, type, exp,
4142 fold_convert_loc (loc, etype, high));
4143
4144 if (high == 0)
4145 return fold_build2_loc (loc, GE_EXPR, type, exp,
4146 fold_convert_loc (loc, etype, low));
4147
4148 if (operand_equal_p (low, high, 0))
4149 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4150 fold_convert_loc (loc, etype, low));
4151
4152 if (integer_zerop (low))
4153 {
4154 if (! TYPE_UNSIGNED (etype))
4155 {
4156 etype = unsigned_type_for (etype);
4157 high = fold_convert_loc (loc, etype, high);
4158 exp = fold_convert_loc (loc, etype, exp);
4159 }
4160 return build_range_check (loc, type, exp, 1, 0, high);
4161 }
4162
4163 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4164 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4165 {
4166 unsigned HOST_WIDE_INT lo;
4167 HOST_WIDE_INT hi;
4168 int prec;
4169
4170 prec = TYPE_PRECISION (etype);
4171 if (prec <= HOST_BITS_PER_WIDE_INT)
4172 {
4173 hi = 0;
4174 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4175 }
4176 else
4177 {
4178 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4179 lo = (unsigned HOST_WIDE_INT) -1;
4180 }
4181
4182 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4183 {
4184 if (TYPE_UNSIGNED (etype))
4185 {
4186 tree signed_etype = signed_type_for (etype);
4187 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4188 etype
4189 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4190 else
4191 etype = signed_etype;
4192 exp = fold_convert_loc (loc, etype, exp);
4193 }
4194 return fold_build2_loc (loc, GT_EXPR, type, exp,
4195 build_int_cst (etype, 0));
4196 }
4197 }
4198
4199 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4200 This requires wrap-around arithmetics for the type of the expression.
4201 First make sure that arithmetics in this type is valid, then make sure
4202 that it wraps around. */
4203 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4204 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4205 TYPE_UNSIGNED (etype));
4206
4207 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4208 {
4209 tree utype, minv, maxv;
4210
4211 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4212 for the type in question, as we rely on this here. */
4213 utype = unsigned_type_for (etype);
4214 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4215 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4216 integer_one_node, 1);
4217 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4218
4219 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4220 minv, 1, maxv, 1)))
4221 etype = utype;
4222 else
4223 return 0;
4224 }
4225
4226 high = fold_convert_loc (loc, etype, high);
4227 low = fold_convert_loc (loc, etype, low);
4228 exp = fold_convert_loc (loc, etype, exp);
4229
4230 value = const_binop (MINUS_EXPR, high, low);
4231
4232
4233 if (POINTER_TYPE_P (etype))
4234 {
4235 if (value != 0 && !TREE_OVERFLOW (value))
4236 {
4237 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4238 return build_range_check (loc, type,
4239 fold_build_pointer_plus_loc (loc, exp, low),
4240 1, build_int_cst (etype, 0), value);
4241 }
4242 return 0;
4243 }
4244
4245 if (value != 0 && !TREE_OVERFLOW (value))
4246 return build_range_check (loc, type,
4247 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4248 1, build_int_cst (etype, 0), value);
4249
4250 return 0;
4251 }
4252 \f
4253 /* Return the predecessor of VAL in its type, handling the infinite case. */
4254
4255 static tree
4256 range_predecessor (tree val)
4257 {
4258 tree type = TREE_TYPE (val);
4259
4260 if (INTEGRAL_TYPE_P (type)
4261 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4262 return 0;
4263 else
4264 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4265 }
4266
4267 /* Return the successor of VAL in its type, handling the infinite case. */
4268
4269 static tree
4270 range_successor (tree val)
4271 {
4272 tree type = TREE_TYPE (val);
4273
4274 if (INTEGRAL_TYPE_P (type)
4275 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4276 return 0;
4277 else
4278 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4279 }
4280
4281 /* Given two ranges, see if we can merge them into one. Return 1 if we
4282 can, 0 if we can't. Set the output range into the specified parameters. */
4283
4284 bool
4285 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4286 tree high0, int in1_p, tree low1, tree high1)
4287 {
4288 int no_overlap;
4289 int subset;
4290 int temp;
4291 tree tem;
4292 int in_p;
4293 tree low, high;
4294 int lowequal = ((low0 == 0 && low1 == 0)
4295 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4296 low0, 0, low1, 0)));
4297 int highequal = ((high0 == 0 && high1 == 0)
4298 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4299 high0, 1, high1, 1)));
4300
4301 /* Make range 0 be the range that starts first, or ends last if they
4302 start at the same value. Swap them if it isn't. */
4303 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4304 low0, 0, low1, 0))
4305 || (lowequal
4306 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4307 high1, 1, high0, 1))))
4308 {
4309 temp = in0_p, in0_p = in1_p, in1_p = temp;
4310 tem = low0, low0 = low1, low1 = tem;
4311 tem = high0, high0 = high1, high1 = tem;
4312 }
4313
4314 /* Now flag two cases, whether the ranges are disjoint or whether the
4315 second range is totally subsumed in the first. Note that the tests
4316 below are simplified by the ones above. */
4317 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4318 high0, 1, low1, 0));
4319 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4320 high1, 1, high0, 1));
4321
4322 /* We now have four cases, depending on whether we are including or
4323 excluding the two ranges. */
4324 if (in0_p && in1_p)
4325 {
4326 /* If they don't overlap, the result is false. If the second range
4327 is a subset it is the result. Otherwise, the range is from the start
4328 of the second to the end of the first. */
4329 if (no_overlap)
4330 in_p = 0, low = high = 0;
4331 else if (subset)
4332 in_p = 1, low = low1, high = high1;
4333 else
4334 in_p = 1, low = low1, high = high0;
4335 }
4336
4337 else if (in0_p && ! in1_p)
4338 {
4339 /* If they don't overlap, the result is the first range. If they are
4340 equal, the result is false. If the second range is a subset of the
4341 first, and the ranges begin at the same place, we go from just after
4342 the end of the second range to the end of the first. If the second
4343 range is not a subset of the first, or if it is a subset and both
4344 ranges end at the same place, the range starts at the start of the
4345 first range and ends just before the second range.
4346 Otherwise, we can't describe this as a single range. */
4347 if (no_overlap)
4348 in_p = 1, low = low0, high = high0;
4349 else if (lowequal && highequal)
4350 in_p = 0, low = high = 0;
4351 else if (subset && lowequal)
4352 {
4353 low = range_successor (high1);
4354 high = high0;
4355 in_p = 1;
4356 if (low == 0)
4357 {
4358 /* We are in the weird situation where high0 > high1 but
4359 high1 has no successor. Punt. */
4360 return 0;
4361 }
4362 }
4363 else if (! subset || highequal)
4364 {
4365 low = low0;
4366 high = range_predecessor (low1);
4367 in_p = 1;
4368 if (high == 0)
4369 {
4370 /* low0 < low1 but low1 has no predecessor. Punt. */
4371 return 0;
4372 }
4373 }
4374 else
4375 return 0;
4376 }
4377
4378 else if (! in0_p && in1_p)
4379 {
4380 /* If they don't overlap, the result is the second range. If the second
4381 is a subset of the first, the result is false. Otherwise,
4382 the range starts just after the first range and ends at the
4383 end of the second. */
4384 if (no_overlap)
4385 in_p = 1, low = low1, high = high1;
4386 else if (subset || highequal)
4387 in_p = 0, low = high = 0;
4388 else
4389 {
4390 low = range_successor (high0);
4391 high = high1;
4392 in_p = 1;
4393 if (low == 0)
4394 {
4395 /* high1 > high0 but high0 has no successor. Punt. */
4396 return 0;
4397 }
4398 }
4399 }
4400
4401 else
4402 {
4403 /* The case where we are excluding both ranges. Here the complex case
4404 is if they don't overlap. In that case, the only time we have a
4405 range is if they are adjacent. If the second is a subset of the
4406 first, the result is the first. Otherwise, the range to exclude
4407 starts at the beginning of the first range and ends at the end of the
4408 second. */
4409 if (no_overlap)
4410 {
4411 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4412 range_successor (high0),
4413 1, low1, 0)))
4414 in_p = 0, low = low0, high = high1;
4415 else
4416 {
4417 /* Canonicalize - [min, x] into - [-, x]. */
4418 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4419 switch (TREE_CODE (TREE_TYPE (low0)))
4420 {
4421 case ENUMERAL_TYPE:
4422 if (TYPE_PRECISION (TREE_TYPE (low0))
4423 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4424 break;
4425 /* FALLTHROUGH */
4426 case INTEGER_TYPE:
4427 if (tree_int_cst_equal (low0,
4428 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4429 low0 = 0;
4430 break;
4431 case POINTER_TYPE:
4432 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4433 && integer_zerop (low0))
4434 low0 = 0;
4435 break;
4436 default:
4437 break;
4438 }
4439
4440 /* Canonicalize - [x, max] into - [x, -]. */
4441 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4442 switch (TREE_CODE (TREE_TYPE (high1)))
4443 {
4444 case ENUMERAL_TYPE:
4445 if (TYPE_PRECISION (TREE_TYPE (high1))
4446 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4447 break;
4448 /* FALLTHROUGH */
4449 case INTEGER_TYPE:
4450 if (tree_int_cst_equal (high1,
4451 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4452 high1 = 0;
4453 break;
4454 case POINTER_TYPE:
4455 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4456 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4457 high1, 1,
4458 integer_one_node, 1)))
4459 high1 = 0;
4460 break;
4461 default:
4462 break;
4463 }
4464
4465 /* The ranges might be also adjacent between the maximum and
4466 minimum values of the given type. For
4467 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4468 return + [x + 1, y - 1]. */
4469 if (low0 == 0 && high1 == 0)
4470 {
4471 low = range_successor (high0);
4472 high = range_predecessor (low1);
4473 if (low == 0 || high == 0)
4474 return 0;
4475
4476 in_p = 1;
4477 }
4478 else
4479 return 0;
4480 }
4481 }
4482 else if (subset)
4483 in_p = 0, low = low0, high = high0;
4484 else
4485 in_p = 0, low = low0, high = high1;
4486 }
4487
4488 *pin_p = in_p, *plow = low, *phigh = high;
4489 return 1;
4490 }
4491 \f
4492
4493 /* Subroutine of fold, looking inside expressions of the form
4494 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4495 of the COND_EXPR. This function is being used also to optimize
4496 A op B ? C : A, by reversing the comparison first.
4497
4498 Return a folded expression whose code is not a COND_EXPR
4499 anymore, or NULL_TREE if no folding opportunity is found. */
4500
4501 static tree
4502 fold_cond_expr_with_comparison (location_t loc, tree type,
4503 tree arg0, tree arg1, tree arg2)
4504 {
4505 enum tree_code comp_code = TREE_CODE (arg0);
4506 tree arg00 = TREE_OPERAND (arg0, 0);
4507 tree arg01 = TREE_OPERAND (arg0, 1);
4508 tree arg1_type = TREE_TYPE (arg1);
4509 tree tem;
4510
4511 STRIP_NOPS (arg1);
4512 STRIP_NOPS (arg2);
4513
4514 /* If we have A op 0 ? A : -A, consider applying the following
4515 transformations:
4516
4517 A == 0? A : -A same as -A
4518 A != 0? A : -A same as A
4519 A >= 0? A : -A same as abs (A)
4520 A > 0? A : -A same as abs (A)
4521 A <= 0? A : -A same as -abs (A)
4522 A < 0? A : -A same as -abs (A)
4523
4524 None of these transformations work for modes with signed
4525 zeros. If A is +/-0, the first two transformations will
4526 change the sign of the result (from +0 to -0, or vice
4527 versa). The last four will fix the sign of the result,
4528 even though the original expressions could be positive or
4529 negative, depending on the sign of A.
4530
4531 Note that all these transformations are correct if A is
4532 NaN, since the two alternatives (A and -A) are also NaNs. */
4533 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4534 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4535 ? real_zerop (arg01)
4536 : integer_zerop (arg01))
4537 && ((TREE_CODE (arg2) == NEGATE_EXPR
4538 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4539 /* In the case that A is of the form X-Y, '-A' (arg2) may
4540 have already been folded to Y-X, check for that. */
4541 || (TREE_CODE (arg1) == MINUS_EXPR
4542 && TREE_CODE (arg2) == MINUS_EXPR
4543 && operand_equal_p (TREE_OPERAND (arg1, 0),
4544 TREE_OPERAND (arg2, 1), 0)
4545 && operand_equal_p (TREE_OPERAND (arg1, 1),
4546 TREE_OPERAND (arg2, 0), 0))))
4547 switch (comp_code)
4548 {
4549 case EQ_EXPR:
4550 case UNEQ_EXPR:
4551 tem = fold_convert_loc (loc, arg1_type, arg1);
4552 return pedantic_non_lvalue_loc (loc,
4553 fold_convert_loc (loc, type,
4554 negate_expr (tem)));
4555 case NE_EXPR:
4556 case LTGT_EXPR:
4557 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4558 case UNGE_EXPR:
4559 case UNGT_EXPR:
4560 if (flag_trapping_math)
4561 break;
4562 /* Fall through. */
4563 case GE_EXPR:
4564 case GT_EXPR:
4565 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4566 arg1 = fold_convert_loc (loc, signed_type_for
4567 (TREE_TYPE (arg1)), arg1);
4568 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4569 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4570 case UNLE_EXPR:
4571 case UNLT_EXPR:
4572 if (flag_trapping_math)
4573 break;
4574 case LE_EXPR:
4575 case LT_EXPR:
4576 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4577 arg1 = fold_convert_loc (loc, signed_type_for
4578 (TREE_TYPE (arg1)), arg1);
4579 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4580 return negate_expr (fold_convert_loc (loc, type, tem));
4581 default:
4582 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4583 break;
4584 }
4585
4586 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4587 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4588 both transformations are correct when A is NaN: A != 0
4589 is then true, and A == 0 is false. */
4590
4591 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4592 && integer_zerop (arg01) && integer_zerop (arg2))
4593 {
4594 if (comp_code == NE_EXPR)
4595 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4596 else if (comp_code == EQ_EXPR)
4597 return build_int_cst (type, 0);
4598 }
4599
4600 /* Try some transformations of A op B ? A : B.
4601
4602 A == B? A : B same as B
4603 A != B? A : B same as A
4604 A >= B? A : B same as max (A, B)
4605 A > B? A : B same as max (B, A)
4606 A <= B? A : B same as min (A, B)
4607 A < B? A : B same as min (B, A)
4608
4609 As above, these transformations don't work in the presence
4610 of signed zeros. For example, if A and B are zeros of
4611 opposite sign, the first two transformations will change
4612 the sign of the result. In the last four, the original
4613 expressions give different results for (A=+0, B=-0) and
4614 (A=-0, B=+0), but the transformed expressions do not.
4615
4616 The first two transformations are correct if either A or B
4617 is a NaN. In the first transformation, the condition will
4618 be false, and B will indeed be chosen. In the case of the
4619 second transformation, the condition A != B will be true,
4620 and A will be chosen.
4621
4622 The conversions to max() and min() are not correct if B is
4623 a number and A is not. The conditions in the original
4624 expressions will be false, so all four give B. The min()
4625 and max() versions would give a NaN instead. */
4626 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4627 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4628 /* Avoid these transformations if the COND_EXPR may be used
4629 as an lvalue in the C++ front-end. PR c++/19199. */
4630 && (in_gimple_form
4631 || (strcmp (lang_hooks.name, "GNU C++") != 0
4632 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4633 || ! maybe_lvalue_p (arg1)
4634 || ! maybe_lvalue_p (arg2)))
4635 {
4636 tree comp_op0 = arg00;
4637 tree comp_op1 = arg01;
4638 tree comp_type = TREE_TYPE (comp_op0);
4639
4640 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4641 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4642 {
4643 comp_type = type;
4644 comp_op0 = arg1;
4645 comp_op1 = arg2;
4646 }
4647
4648 switch (comp_code)
4649 {
4650 case EQ_EXPR:
4651 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4652 case NE_EXPR:
4653 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4654 case LE_EXPR:
4655 case LT_EXPR:
4656 case UNLE_EXPR:
4657 case UNLT_EXPR:
4658 /* In C++ a ?: expression can be an lvalue, so put the
4659 operand which will be used if they are equal first
4660 so that we can convert this back to the
4661 corresponding COND_EXPR. */
4662 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4663 {
4664 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4665 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4666 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4667 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4668 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4669 comp_op1, comp_op0);
4670 return pedantic_non_lvalue_loc (loc,
4671 fold_convert_loc (loc, type, tem));
4672 }
4673 break;
4674 case GE_EXPR:
4675 case GT_EXPR:
4676 case UNGE_EXPR:
4677 case UNGT_EXPR:
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4679 {
4680 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4681 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4682 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4683 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4684 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4685 comp_op1, comp_op0);
4686 return pedantic_non_lvalue_loc (loc,
4687 fold_convert_loc (loc, type, tem));
4688 }
4689 break;
4690 case UNEQ_EXPR:
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4692 return pedantic_non_lvalue_loc (loc,
4693 fold_convert_loc (loc, type, arg2));
4694 break;
4695 case LTGT_EXPR:
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4697 return pedantic_non_lvalue_loc (loc,
4698 fold_convert_loc (loc, type, arg1));
4699 break;
4700 default:
4701 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4702 break;
4703 }
4704 }
4705
4706 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4707 we might still be able to simplify this. For example,
4708 if C1 is one less or one more than C2, this might have started
4709 out as a MIN or MAX and been transformed by this function.
4710 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4711
4712 if (INTEGRAL_TYPE_P (type)
4713 && TREE_CODE (arg01) == INTEGER_CST
4714 && TREE_CODE (arg2) == INTEGER_CST)
4715 switch (comp_code)
4716 {
4717 case EQ_EXPR:
4718 if (TREE_CODE (arg1) == INTEGER_CST)
4719 break;
4720 /* We can replace A with C1 in this case. */
4721 arg1 = fold_convert_loc (loc, type, arg01);
4722 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4723
4724 case LT_EXPR:
4725 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4726 MIN_EXPR, to preserve the signedness of the comparison. */
4727 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4728 OEP_ONLY_CONST)
4729 && operand_equal_p (arg01,
4730 const_binop (PLUS_EXPR, arg2,
4731 build_int_cst (type, 1)),
4732 OEP_ONLY_CONST))
4733 {
4734 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4735 fold_convert_loc (loc, TREE_TYPE (arg00),
4736 arg2));
4737 return pedantic_non_lvalue_loc (loc,
4738 fold_convert_loc (loc, type, tem));
4739 }
4740 break;
4741
4742 case LE_EXPR:
4743 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4744 as above. */
4745 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4746 OEP_ONLY_CONST)
4747 && operand_equal_p (arg01,
4748 const_binop (MINUS_EXPR, arg2,
4749 build_int_cst (type, 1)),
4750 OEP_ONLY_CONST))
4751 {
4752 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4753 fold_convert_loc (loc, TREE_TYPE (arg00),
4754 arg2));
4755 return pedantic_non_lvalue_loc (loc,
4756 fold_convert_loc (loc, type, tem));
4757 }
4758 break;
4759
4760 case GT_EXPR:
4761 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4762 MAX_EXPR, to preserve the signedness of the comparison. */
4763 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4764 OEP_ONLY_CONST)
4765 && operand_equal_p (arg01,
4766 const_binop (MINUS_EXPR, arg2,
4767 build_int_cst (type, 1)),
4768 OEP_ONLY_CONST))
4769 {
4770 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4771 fold_convert_loc (loc, TREE_TYPE (arg00),
4772 arg2));
4773 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4774 }
4775 break;
4776
4777 case GE_EXPR:
4778 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4779 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4780 OEP_ONLY_CONST)
4781 && operand_equal_p (arg01,
4782 const_binop (PLUS_EXPR, arg2,
4783 build_int_cst (type, 1)),
4784 OEP_ONLY_CONST))
4785 {
4786 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4787 fold_convert_loc (loc, TREE_TYPE (arg00),
4788 arg2));
4789 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4790 }
4791 break;
4792 case NE_EXPR:
4793 break;
4794 default:
4795 gcc_unreachable ();
4796 }
4797
4798 return NULL_TREE;
4799 }
4800
4801
4802 \f
4803 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4804 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4805 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4806 false) >= 2)
4807 #endif
4808
4809 /* EXP is some logical combination of boolean tests. See if we can
4810 merge it into some range test. Return the new tree if so. */
4811
4812 static tree
4813 fold_range_test (location_t loc, enum tree_code code, tree type,
4814 tree op0, tree op1)
4815 {
4816 int or_op = (code == TRUTH_ORIF_EXPR
4817 || code == TRUTH_OR_EXPR);
4818 int in0_p, in1_p, in_p;
4819 tree low0, low1, low, high0, high1, high;
4820 bool strict_overflow_p = false;
4821 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4822 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4823 tree tem;
4824 const char * const warnmsg = G_("assuming signed overflow does not occur "
4825 "when simplifying range test");
4826
4827 /* If this is an OR operation, invert both sides; we will invert
4828 again at the end. */
4829 if (or_op)
4830 in0_p = ! in0_p, in1_p = ! in1_p;
4831
4832 /* If both expressions are the same, if we can merge the ranges, and we
4833 can build the range test, return it or it inverted. If one of the
4834 ranges is always true or always false, consider it to be the same
4835 expression as the other. */
4836 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4837 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4838 in1_p, low1, high1)
4839 && 0 != (tem = (build_range_check (loc, type,
4840 lhs != 0 ? lhs
4841 : rhs != 0 ? rhs : integer_zero_node,
4842 in_p, low, high))))
4843 {
4844 if (strict_overflow_p)
4845 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4846 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4847 }
4848
4849 /* On machines where the branch cost is expensive, if this is a
4850 short-circuited branch and the underlying object on both sides
4851 is the same, make a non-short-circuit operation. */
4852 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4853 && lhs != 0 && rhs != 0
4854 && (code == TRUTH_ANDIF_EXPR
4855 || code == TRUTH_ORIF_EXPR)
4856 && operand_equal_p (lhs, rhs, 0))
4857 {
4858 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4859 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4860 which cases we can't do this. */
4861 if (simple_operand_p (lhs))
4862 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4863 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4864 type, op0, op1);
4865
4866 else if (!lang_hooks.decls.global_bindings_p ()
4867 && !CONTAINS_PLACEHOLDER_P (lhs))
4868 {
4869 tree common = save_expr (lhs);
4870
4871 if (0 != (lhs = build_range_check (loc, type, common,
4872 or_op ? ! in0_p : in0_p,
4873 low0, high0))
4874 && (0 != (rhs = build_range_check (loc, type, common,
4875 or_op ? ! in1_p : in1_p,
4876 low1, high1))))
4877 {
4878 if (strict_overflow_p)
4879 fold_overflow_warning (warnmsg,
4880 WARN_STRICT_OVERFLOW_COMPARISON);
4881 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4882 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4883 type, lhs, rhs);
4884 }
4885 }
4886 }
4887
4888 return 0;
4889 }
4890 \f
4891 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4892 bit value. Arrange things so the extra bits will be set to zero if and
4893 only if C is signed-extended to its full width. If MASK is nonzero,
4894 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4895
4896 static tree
4897 unextend (tree c, int p, int unsignedp, tree mask)
4898 {
4899 tree type = TREE_TYPE (c);
4900 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4901 tree temp;
4902
4903 if (p == modesize || unsignedp)
4904 return c;
4905
4906 /* We work by getting just the sign bit into the low-order bit, then
4907 into the high-order bit, then sign-extend. We then XOR that value
4908 with C. */
4909 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4910 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4911
4912 /* We must use a signed type in order to get an arithmetic right shift.
4913 However, we must also avoid introducing accidental overflows, so that
4914 a subsequent call to integer_zerop will work. Hence we must
4915 do the type conversion here. At this point, the constant is either
4916 zero or one, and the conversion to a signed type can never overflow.
4917 We could get an overflow if this conversion is done anywhere else. */
4918 if (TYPE_UNSIGNED (type))
4919 temp = fold_convert (signed_type_for (type), temp);
4920
4921 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4922 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4923 if (mask != 0)
4924 temp = const_binop (BIT_AND_EXPR, temp,
4925 fold_convert (TREE_TYPE (c), mask));
4926 /* If necessary, convert the type back to match the type of C. */
4927 if (TYPE_UNSIGNED (type))
4928 temp = fold_convert (type, temp);
4929
4930 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4931 }
4932 \f
4933 /* For an expression that has the form
4934 (A && B) || ~B
4935 or
4936 (A || B) && ~B,
4937 we can drop one of the inner expressions and simplify to
4938 A || ~B
4939 or
4940 A && ~B
4941 LOC is the location of the resulting expression. OP is the inner
4942 logical operation; the left-hand side in the examples above, while CMPOP
4943 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4944 removing a condition that guards another, as in
4945 (A != NULL && A->...) || A == NULL
4946 which we must not transform. If RHS_ONLY is true, only eliminate the
4947 right-most operand of the inner logical operation. */
4948
4949 static tree
4950 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4951 bool rhs_only)
4952 {
4953 tree type = TREE_TYPE (cmpop);
4954 enum tree_code code = TREE_CODE (cmpop);
4955 enum tree_code truthop_code = TREE_CODE (op);
4956 tree lhs = TREE_OPERAND (op, 0);
4957 tree rhs = TREE_OPERAND (op, 1);
4958 tree orig_lhs = lhs, orig_rhs = rhs;
4959 enum tree_code rhs_code = TREE_CODE (rhs);
4960 enum tree_code lhs_code = TREE_CODE (lhs);
4961 enum tree_code inv_code;
4962
4963 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4964 return NULL_TREE;
4965
4966 if (TREE_CODE_CLASS (code) != tcc_comparison)
4967 return NULL_TREE;
4968
4969 if (rhs_code == truthop_code)
4970 {
4971 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4972 if (newrhs != NULL_TREE)
4973 {
4974 rhs = newrhs;
4975 rhs_code = TREE_CODE (rhs);
4976 }
4977 }
4978 if (lhs_code == truthop_code && !rhs_only)
4979 {
4980 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4981 if (newlhs != NULL_TREE)
4982 {
4983 lhs = newlhs;
4984 lhs_code = TREE_CODE (lhs);
4985 }
4986 }
4987
4988 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4989 if (inv_code == rhs_code
4990 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4991 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4992 return lhs;
4993 if (!rhs_only && inv_code == lhs_code
4994 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4995 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4996 return rhs;
4997 if (rhs != orig_rhs || lhs != orig_lhs)
4998 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
4999 lhs, rhs);
5000 return NULL_TREE;
5001 }
5002
5003 /* Find ways of folding logical expressions of LHS and RHS:
5004 Try to merge two comparisons to the same innermost item.
5005 Look for range tests like "ch >= '0' && ch <= '9'".
5006 Look for combinations of simple terms on machines with expensive branches
5007 and evaluate the RHS unconditionally.
5008
5009 For example, if we have p->a == 2 && p->b == 4 and we can make an
5010 object large enough to span both A and B, we can do this with a comparison
5011 against the object ANDed with the a mask.
5012
5013 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5014 operations to do this with one comparison.
5015
5016 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5017 function and the one above.
5018
5019 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5020 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5021
5022 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5023 two operands.
5024
5025 We return the simplified tree or 0 if no optimization is possible. */
5026
5027 static tree
5028 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5029 tree lhs, tree rhs)
5030 {
5031 /* If this is the "or" of two comparisons, we can do something if
5032 the comparisons are NE_EXPR. If this is the "and", we can do something
5033 if the comparisons are EQ_EXPR. I.e.,
5034 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5035
5036 WANTED_CODE is this operation code. For single bit fields, we can
5037 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5038 comparison for one-bit fields. */
5039
5040 enum tree_code wanted_code;
5041 enum tree_code lcode, rcode;
5042 tree ll_arg, lr_arg, rl_arg, rr_arg;
5043 tree ll_inner, lr_inner, rl_inner, rr_inner;
5044 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5045 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5046 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5047 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5048 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5049 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5050 enum machine_mode lnmode, rnmode;
5051 tree ll_mask, lr_mask, rl_mask, rr_mask;
5052 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5053 tree l_const, r_const;
5054 tree lntype, rntype, result;
5055 HOST_WIDE_INT first_bit, end_bit;
5056 int volatilep;
5057 tree orig_lhs = lhs, orig_rhs = rhs;
5058 enum tree_code orig_code = code;
5059
5060 /* Start by getting the comparison codes. Fail if anything is volatile.
5061 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5062 it were surrounded with a NE_EXPR. */
5063
5064 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5065 return 0;
5066
5067 lcode = TREE_CODE (lhs);
5068 rcode = TREE_CODE (rhs);
5069
5070 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5071 {
5072 lhs = build2 (NE_EXPR, truth_type, lhs,
5073 build_int_cst (TREE_TYPE (lhs), 0));
5074 lcode = NE_EXPR;
5075 }
5076
5077 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5078 {
5079 rhs = build2 (NE_EXPR, truth_type, rhs,
5080 build_int_cst (TREE_TYPE (rhs), 0));
5081 rcode = NE_EXPR;
5082 }
5083
5084 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5085 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5086 return 0;
5087
5088 ll_arg = TREE_OPERAND (lhs, 0);
5089 lr_arg = TREE_OPERAND (lhs, 1);
5090 rl_arg = TREE_OPERAND (rhs, 0);
5091 rr_arg = TREE_OPERAND (rhs, 1);
5092
5093 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5094 if (simple_operand_p (ll_arg)
5095 && simple_operand_p (lr_arg))
5096 {
5097 if (operand_equal_p (ll_arg, rl_arg, 0)
5098 && operand_equal_p (lr_arg, rr_arg, 0))
5099 {
5100 result = combine_comparisons (loc, code, lcode, rcode,
5101 truth_type, ll_arg, lr_arg);
5102 if (result)
5103 return result;
5104 }
5105 else if (operand_equal_p (ll_arg, rr_arg, 0)
5106 && operand_equal_p (lr_arg, rl_arg, 0))
5107 {
5108 result = combine_comparisons (loc, code, lcode,
5109 swap_tree_comparison (rcode),
5110 truth_type, ll_arg, lr_arg);
5111 if (result)
5112 return result;
5113 }
5114 }
5115
5116 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5117 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5118
5119 /* If the RHS can be evaluated unconditionally and its operands are
5120 simple, it wins to evaluate the RHS unconditionally on machines
5121 with expensive branches. In this case, this isn't a comparison
5122 that can be merged. Avoid doing this if the RHS is a floating-point
5123 comparison since those can trap. */
5124
5125 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5126 false) >= 2
5127 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5128 && simple_operand_p (rl_arg)
5129 && simple_operand_p (rr_arg))
5130 {
5131 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5132 if (code == TRUTH_OR_EXPR
5133 && lcode == NE_EXPR && integer_zerop (lr_arg)
5134 && rcode == NE_EXPR && integer_zerop (rr_arg)
5135 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5136 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5137 return build2_loc (loc, NE_EXPR, truth_type,
5138 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5139 ll_arg, rl_arg),
5140 build_int_cst (TREE_TYPE (ll_arg), 0));
5141
5142 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5143 if (code == TRUTH_AND_EXPR
5144 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5145 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5146 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5147 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5148 return build2_loc (loc, EQ_EXPR, truth_type,
5149 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5150 ll_arg, rl_arg),
5151 build_int_cst (TREE_TYPE (ll_arg), 0));
5152
5153 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5154 {
5155 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5156 return build2_loc (loc, code, truth_type, lhs, rhs);
5157 return NULL_TREE;
5158 }
5159 }
5160
5161 /* See if the comparisons can be merged. Then get all the parameters for
5162 each side. */
5163
5164 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5165 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5166 return 0;
5167
5168 volatilep = 0;
5169 ll_inner = decode_field_reference (loc, ll_arg,
5170 &ll_bitsize, &ll_bitpos, &ll_mode,
5171 &ll_unsignedp, &volatilep, &ll_mask,
5172 &ll_and_mask);
5173 lr_inner = decode_field_reference (loc, lr_arg,
5174 &lr_bitsize, &lr_bitpos, &lr_mode,
5175 &lr_unsignedp, &volatilep, &lr_mask,
5176 &lr_and_mask);
5177 rl_inner = decode_field_reference (loc, rl_arg,
5178 &rl_bitsize, &rl_bitpos, &rl_mode,
5179 &rl_unsignedp, &volatilep, &rl_mask,
5180 &rl_and_mask);
5181 rr_inner = decode_field_reference (loc, rr_arg,
5182 &rr_bitsize, &rr_bitpos, &rr_mode,
5183 &rr_unsignedp, &volatilep, &rr_mask,
5184 &rr_and_mask);
5185
5186 /* It must be true that the inner operation on the lhs of each
5187 comparison must be the same if we are to be able to do anything.
5188 Then see if we have constants. If not, the same must be true for
5189 the rhs's. */
5190 if (volatilep || ll_inner == 0 || rl_inner == 0
5191 || ! operand_equal_p (ll_inner, rl_inner, 0))
5192 return 0;
5193
5194 if (TREE_CODE (lr_arg) == INTEGER_CST
5195 && TREE_CODE (rr_arg) == INTEGER_CST)
5196 l_const = lr_arg, r_const = rr_arg;
5197 else if (lr_inner == 0 || rr_inner == 0
5198 || ! operand_equal_p (lr_inner, rr_inner, 0))
5199 return 0;
5200 else
5201 l_const = r_const = 0;
5202
5203 /* If either comparison code is not correct for our logical operation,
5204 fail. However, we can convert a one-bit comparison against zero into
5205 the opposite comparison against that bit being set in the field. */
5206
5207 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5208 if (lcode != wanted_code)
5209 {
5210 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5211 {
5212 /* Make the left operand unsigned, since we are only interested
5213 in the value of one bit. Otherwise we are doing the wrong
5214 thing below. */
5215 ll_unsignedp = 1;
5216 l_const = ll_mask;
5217 }
5218 else
5219 return 0;
5220 }
5221
5222 /* This is analogous to the code for l_const above. */
5223 if (rcode != wanted_code)
5224 {
5225 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5226 {
5227 rl_unsignedp = 1;
5228 r_const = rl_mask;
5229 }
5230 else
5231 return 0;
5232 }
5233
5234 /* See if we can find a mode that contains both fields being compared on
5235 the left. If we can't, fail. Otherwise, update all constants and masks
5236 to be relative to a field of that size. */
5237 first_bit = MIN (ll_bitpos, rl_bitpos);
5238 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5239 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5240 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5241 volatilep);
5242 if (lnmode == VOIDmode)
5243 return 0;
5244
5245 lnbitsize = GET_MODE_BITSIZE (lnmode);
5246 lnbitpos = first_bit & ~ (lnbitsize - 1);
5247 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5248 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5249
5250 if (BYTES_BIG_ENDIAN)
5251 {
5252 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5253 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5254 }
5255
5256 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5257 size_int (xll_bitpos));
5258 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5259 size_int (xrl_bitpos));
5260
5261 if (l_const)
5262 {
5263 l_const = fold_convert_loc (loc, lntype, l_const);
5264 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5265 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5266 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5267 fold_build1_loc (loc, BIT_NOT_EXPR,
5268 lntype, ll_mask))))
5269 {
5270 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5271
5272 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5273 }
5274 }
5275 if (r_const)
5276 {
5277 r_const = fold_convert_loc (loc, lntype, r_const);
5278 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5279 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5280 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5281 fold_build1_loc (loc, BIT_NOT_EXPR,
5282 lntype, rl_mask))))
5283 {
5284 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5285
5286 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5287 }
5288 }
5289
5290 /* If the right sides are not constant, do the same for it. Also,
5291 disallow this optimization if a size or signedness mismatch occurs
5292 between the left and right sides. */
5293 if (l_const == 0)
5294 {
5295 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5296 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5297 /* Make sure the two fields on the right
5298 correspond to the left without being swapped. */
5299 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5300 return 0;
5301
5302 first_bit = MIN (lr_bitpos, rr_bitpos);
5303 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5304 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5305 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5306 volatilep);
5307 if (rnmode == VOIDmode)
5308 return 0;
5309
5310 rnbitsize = GET_MODE_BITSIZE (rnmode);
5311 rnbitpos = first_bit & ~ (rnbitsize - 1);
5312 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5313 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5314
5315 if (BYTES_BIG_ENDIAN)
5316 {
5317 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5318 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5319 }
5320
5321 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5322 rntype, lr_mask),
5323 size_int (xlr_bitpos));
5324 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5325 rntype, rr_mask),
5326 size_int (xrr_bitpos));
5327
5328 /* Make a mask that corresponds to both fields being compared.
5329 Do this for both items being compared. If the operands are the
5330 same size and the bits being compared are in the same position
5331 then we can do this by masking both and comparing the masked
5332 results. */
5333 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5334 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5335 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5336 {
5337 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5338 ll_unsignedp || rl_unsignedp);
5339 if (! all_ones_mask_p (ll_mask, lnbitsize))
5340 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5341
5342 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5343 lr_unsignedp || rr_unsignedp);
5344 if (! all_ones_mask_p (lr_mask, rnbitsize))
5345 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5346
5347 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5348 }
5349
5350 /* There is still another way we can do something: If both pairs of
5351 fields being compared are adjacent, we may be able to make a wider
5352 field containing them both.
5353
5354 Note that we still must mask the lhs/rhs expressions. Furthermore,
5355 the mask must be shifted to account for the shift done by
5356 make_bit_field_ref. */
5357 if ((ll_bitsize + ll_bitpos == rl_bitpos
5358 && lr_bitsize + lr_bitpos == rr_bitpos)
5359 || (ll_bitpos == rl_bitpos + rl_bitsize
5360 && lr_bitpos == rr_bitpos + rr_bitsize))
5361 {
5362 tree type;
5363
5364 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5365 ll_bitsize + rl_bitsize,
5366 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5367 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5368 lr_bitsize + rr_bitsize,
5369 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5370
5371 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5372 size_int (MIN (xll_bitpos, xrl_bitpos)));
5373 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5374 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5375
5376 /* Convert to the smaller type before masking out unwanted bits. */
5377 type = lntype;
5378 if (lntype != rntype)
5379 {
5380 if (lnbitsize > rnbitsize)
5381 {
5382 lhs = fold_convert_loc (loc, rntype, lhs);
5383 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5384 type = rntype;
5385 }
5386 else if (lnbitsize < rnbitsize)
5387 {
5388 rhs = fold_convert_loc (loc, lntype, rhs);
5389 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5390 type = lntype;
5391 }
5392 }
5393
5394 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5395 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5396
5397 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5398 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5399
5400 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5401 }
5402
5403 return 0;
5404 }
5405
5406 /* Handle the case of comparisons with constants. If there is something in
5407 common between the masks, those bits of the constants must be the same.
5408 If not, the condition is always false. Test for this to avoid generating
5409 incorrect code below. */
5410 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5411 if (! integer_zerop (result)
5412 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5413 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5414 {
5415 if (wanted_code == NE_EXPR)
5416 {
5417 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5418 return constant_boolean_node (true, truth_type);
5419 }
5420 else
5421 {
5422 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5423 return constant_boolean_node (false, truth_type);
5424 }
5425 }
5426
5427 /* Construct the expression we will return. First get the component
5428 reference we will make. Unless the mask is all ones the width of
5429 that field, perform the mask operation. Then compare with the
5430 merged constant. */
5431 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5432 ll_unsignedp || rl_unsignedp);
5433
5434 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5435 if (! all_ones_mask_p (ll_mask, lnbitsize))
5436 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5437
5438 return build2_loc (loc, wanted_code, truth_type, result,
5439 const_binop (BIT_IOR_EXPR, l_const, r_const));
5440 }
5441 \f
5442 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5443 constant. */
5444
5445 static tree
5446 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5447 tree op0, tree op1)
5448 {
5449 tree arg0 = op0;
5450 enum tree_code op_code;
5451 tree comp_const;
5452 tree minmax_const;
5453 int consts_equal, consts_lt;
5454 tree inner;
5455
5456 STRIP_SIGN_NOPS (arg0);
5457
5458 op_code = TREE_CODE (arg0);
5459 minmax_const = TREE_OPERAND (arg0, 1);
5460 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5461 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5462 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5463 inner = TREE_OPERAND (arg0, 0);
5464
5465 /* If something does not permit us to optimize, return the original tree. */
5466 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5467 || TREE_CODE (comp_const) != INTEGER_CST
5468 || TREE_OVERFLOW (comp_const)
5469 || TREE_CODE (minmax_const) != INTEGER_CST
5470 || TREE_OVERFLOW (minmax_const))
5471 return NULL_TREE;
5472
5473 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5474 and GT_EXPR, doing the rest with recursive calls using logical
5475 simplifications. */
5476 switch (code)
5477 {
5478 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5479 {
5480 tree tem
5481 = optimize_minmax_comparison (loc,
5482 invert_tree_comparison (code, false),
5483 type, op0, op1);
5484 if (tem)
5485 return invert_truthvalue_loc (loc, tem);
5486 return NULL_TREE;
5487 }
5488
5489 case GE_EXPR:
5490 return
5491 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5492 optimize_minmax_comparison
5493 (loc, EQ_EXPR, type, arg0, comp_const),
5494 optimize_minmax_comparison
5495 (loc, GT_EXPR, type, arg0, comp_const));
5496
5497 case EQ_EXPR:
5498 if (op_code == MAX_EXPR && consts_equal)
5499 /* MAX (X, 0) == 0 -> X <= 0 */
5500 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5501
5502 else if (op_code == MAX_EXPR && consts_lt)
5503 /* MAX (X, 0) == 5 -> X == 5 */
5504 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5505
5506 else if (op_code == MAX_EXPR)
5507 /* MAX (X, 0) == -1 -> false */
5508 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5509
5510 else if (consts_equal)
5511 /* MIN (X, 0) == 0 -> X >= 0 */
5512 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5513
5514 else if (consts_lt)
5515 /* MIN (X, 0) == 5 -> false */
5516 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5517
5518 else
5519 /* MIN (X, 0) == -1 -> X == -1 */
5520 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5521
5522 case GT_EXPR:
5523 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5524 /* MAX (X, 0) > 0 -> X > 0
5525 MAX (X, 0) > 5 -> X > 5 */
5526 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5527
5528 else if (op_code == MAX_EXPR)
5529 /* MAX (X, 0) > -1 -> true */
5530 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5531
5532 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5533 /* MIN (X, 0) > 0 -> false
5534 MIN (X, 0) > 5 -> false */
5535 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5536
5537 else
5538 /* MIN (X, 0) > -1 -> X > -1 */
5539 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5540
5541 default:
5542 return NULL_TREE;
5543 }
5544 }
5545 \f
5546 /* T is an integer expression that is being multiplied, divided, or taken a
5547 modulus (CODE says which and what kind of divide or modulus) by a
5548 constant C. See if we can eliminate that operation by folding it with
5549 other operations already in T. WIDE_TYPE, if non-null, is a type that
5550 should be used for the computation if wider than our type.
5551
5552 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5553 (X * 2) + (Y * 4). We must, however, be assured that either the original
5554 expression would not overflow or that overflow is undefined for the type
5555 in the language in question.
5556
5557 If we return a non-null expression, it is an equivalent form of the
5558 original computation, but need not be in the original type.
5559
5560 We set *STRICT_OVERFLOW_P to true if the return values depends on
5561 signed overflow being undefined. Otherwise we do not change
5562 *STRICT_OVERFLOW_P. */
5563
5564 static tree
5565 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5566 bool *strict_overflow_p)
5567 {
5568 /* To avoid exponential search depth, refuse to allow recursion past
5569 three levels. Beyond that (1) it's highly unlikely that we'll find
5570 something interesting and (2) we've probably processed it before
5571 when we built the inner expression. */
5572
5573 static int depth;
5574 tree ret;
5575
5576 if (depth > 3)
5577 return NULL;
5578
5579 depth++;
5580 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5581 depth--;
5582
5583 return ret;
5584 }
5585
5586 static tree
5587 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5588 bool *strict_overflow_p)
5589 {
5590 tree type = TREE_TYPE (t);
5591 enum tree_code tcode = TREE_CODE (t);
5592 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5593 > GET_MODE_SIZE (TYPE_MODE (type)))
5594 ? wide_type : type);
5595 tree t1, t2;
5596 int same_p = tcode == code;
5597 tree op0 = NULL_TREE, op1 = NULL_TREE;
5598 bool sub_strict_overflow_p;
5599
5600 /* Don't deal with constants of zero here; they confuse the code below. */
5601 if (integer_zerop (c))
5602 return NULL_TREE;
5603
5604 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5605 op0 = TREE_OPERAND (t, 0);
5606
5607 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5608 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5609
5610 /* Note that we need not handle conditional operations here since fold
5611 already handles those cases. So just do arithmetic here. */
5612 switch (tcode)
5613 {
5614 case INTEGER_CST:
5615 /* For a constant, we can always simplify if we are a multiply
5616 or (for divide and modulus) if it is a multiple of our constant. */
5617 if (code == MULT_EXPR
5618 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5619 return const_binop (code, fold_convert (ctype, t),
5620 fold_convert (ctype, c));
5621 break;
5622
5623 CASE_CONVERT: case NON_LVALUE_EXPR:
5624 /* If op0 is an expression ... */
5625 if ((COMPARISON_CLASS_P (op0)
5626 || UNARY_CLASS_P (op0)
5627 || BINARY_CLASS_P (op0)
5628 || VL_EXP_CLASS_P (op0)
5629 || EXPRESSION_CLASS_P (op0))
5630 /* ... and has wrapping overflow, and its type is smaller
5631 than ctype, then we cannot pass through as widening. */
5632 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5633 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5634 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5635 && (TYPE_PRECISION (ctype)
5636 > TYPE_PRECISION (TREE_TYPE (op0))))
5637 /* ... or this is a truncation (t is narrower than op0),
5638 then we cannot pass through this narrowing. */
5639 || (TYPE_PRECISION (type)
5640 < TYPE_PRECISION (TREE_TYPE (op0)))
5641 /* ... or signedness changes for division or modulus,
5642 then we cannot pass through this conversion. */
5643 || (code != MULT_EXPR
5644 && (TYPE_UNSIGNED (ctype)
5645 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5646 /* ... or has undefined overflow while the converted to
5647 type has not, we cannot do the operation in the inner type
5648 as that would introduce undefined overflow. */
5649 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5650 && !TYPE_OVERFLOW_UNDEFINED (type))))
5651 break;
5652
5653 /* Pass the constant down and see if we can make a simplification. If
5654 we can, replace this expression with the inner simplification for
5655 possible later conversion to our or some other type. */
5656 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5657 && TREE_CODE (t2) == INTEGER_CST
5658 && !TREE_OVERFLOW (t2)
5659 && (0 != (t1 = extract_muldiv (op0, t2, code,
5660 code == MULT_EXPR
5661 ? ctype : NULL_TREE,
5662 strict_overflow_p))))
5663 return t1;
5664 break;
5665
5666 case ABS_EXPR:
5667 /* If widening the type changes it from signed to unsigned, then we
5668 must avoid building ABS_EXPR itself as unsigned. */
5669 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5670 {
5671 tree cstype = (*signed_type_for) (ctype);
5672 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5673 != 0)
5674 {
5675 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5676 return fold_convert (ctype, t1);
5677 }
5678 break;
5679 }
5680 /* If the constant is negative, we cannot simplify this. */
5681 if (tree_int_cst_sgn (c) == -1)
5682 break;
5683 /* FALLTHROUGH */
5684 case NEGATE_EXPR:
5685 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5686 != 0)
5687 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5688 break;
5689
5690 case MIN_EXPR: case MAX_EXPR:
5691 /* If widening the type changes the signedness, then we can't perform
5692 this optimization as that changes the result. */
5693 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5694 break;
5695
5696 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5697 sub_strict_overflow_p = false;
5698 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5699 &sub_strict_overflow_p)) != 0
5700 && (t2 = extract_muldiv (op1, c, code, wide_type,
5701 &sub_strict_overflow_p)) != 0)
5702 {
5703 if (tree_int_cst_sgn (c) < 0)
5704 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5705 if (sub_strict_overflow_p)
5706 *strict_overflow_p = true;
5707 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5708 fold_convert (ctype, t2));
5709 }
5710 break;
5711
5712 case LSHIFT_EXPR: case RSHIFT_EXPR:
5713 /* If the second operand is constant, this is a multiplication
5714 or floor division, by a power of two, so we can treat it that
5715 way unless the multiplier or divisor overflows. Signed
5716 left-shift overflow is implementation-defined rather than
5717 undefined in C90, so do not convert signed left shift into
5718 multiplication. */
5719 if (TREE_CODE (op1) == INTEGER_CST
5720 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5721 /* const_binop may not detect overflow correctly,
5722 so check for it explicitly here. */
5723 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5724 && TREE_INT_CST_HIGH (op1) == 0
5725 && 0 != (t1 = fold_convert (ctype,
5726 const_binop (LSHIFT_EXPR,
5727 size_one_node,
5728 op1)))
5729 && !TREE_OVERFLOW (t1))
5730 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5731 ? MULT_EXPR : FLOOR_DIV_EXPR,
5732 ctype,
5733 fold_convert (ctype, op0),
5734 t1),
5735 c, code, wide_type, strict_overflow_p);
5736 break;
5737
5738 case PLUS_EXPR: case MINUS_EXPR:
5739 /* See if we can eliminate the operation on both sides. If we can, we
5740 can return a new PLUS or MINUS. If we can't, the only remaining
5741 cases where we can do anything are if the second operand is a
5742 constant. */
5743 sub_strict_overflow_p = false;
5744 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5745 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5746 if (t1 != 0 && t2 != 0
5747 && (code == MULT_EXPR
5748 /* If not multiplication, we can only do this if both operands
5749 are divisible by c. */
5750 || (multiple_of_p (ctype, op0, c)
5751 && multiple_of_p (ctype, op1, c))))
5752 {
5753 if (sub_strict_overflow_p)
5754 *strict_overflow_p = true;
5755 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5756 fold_convert (ctype, t2));
5757 }
5758
5759 /* If this was a subtraction, negate OP1 and set it to be an addition.
5760 This simplifies the logic below. */
5761 if (tcode == MINUS_EXPR)
5762 {
5763 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5764 /* If OP1 was not easily negatable, the constant may be OP0. */
5765 if (TREE_CODE (op0) == INTEGER_CST)
5766 {
5767 tree tem = op0;
5768 op0 = op1;
5769 op1 = tem;
5770 tem = t1;
5771 t1 = t2;
5772 t2 = tem;
5773 }
5774 }
5775
5776 if (TREE_CODE (op1) != INTEGER_CST)
5777 break;
5778
5779 /* If either OP1 or C are negative, this optimization is not safe for
5780 some of the division and remainder types while for others we need
5781 to change the code. */
5782 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5783 {
5784 if (code == CEIL_DIV_EXPR)
5785 code = FLOOR_DIV_EXPR;
5786 else if (code == FLOOR_DIV_EXPR)
5787 code = CEIL_DIV_EXPR;
5788 else if (code != MULT_EXPR
5789 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5790 break;
5791 }
5792
5793 /* If it's a multiply or a division/modulus operation of a multiple
5794 of our constant, do the operation and verify it doesn't overflow. */
5795 if (code == MULT_EXPR
5796 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5797 {
5798 op1 = const_binop (code, fold_convert (ctype, op1),
5799 fold_convert (ctype, c));
5800 /* We allow the constant to overflow with wrapping semantics. */
5801 if (op1 == 0
5802 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5803 break;
5804 }
5805 else
5806 break;
5807
5808 /* If we have an unsigned type is not a sizetype, we cannot widen
5809 the operation since it will change the result if the original
5810 computation overflowed. */
5811 if (TYPE_UNSIGNED (ctype)
5812 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5813 && ctype != type)
5814 break;
5815
5816 /* If we were able to eliminate our operation from the first side,
5817 apply our operation to the second side and reform the PLUS. */
5818 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5819 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5820
5821 /* The last case is if we are a multiply. In that case, we can
5822 apply the distributive law to commute the multiply and addition
5823 if the multiplication of the constants doesn't overflow. */
5824 if (code == MULT_EXPR)
5825 return fold_build2 (tcode, ctype,
5826 fold_build2 (code, ctype,
5827 fold_convert (ctype, op0),
5828 fold_convert (ctype, c)),
5829 op1);
5830
5831 break;
5832
5833 case MULT_EXPR:
5834 /* We have a special case here if we are doing something like
5835 (C * 8) % 4 since we know that's zero. */
5836 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5837 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5838 /* If the multiplication can overflow we cannot optimize this.
5839 ??? Until we can properly mark individual operations as
5840 not overflowing we need to treat sizetype special here as
5841 stor-layout relies on this opimization to make
5842 DECL_FIELD_BIT_OFFSET always a constant. */
5843 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5844 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5845 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5846 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5847 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5848 {
5849 *strict_overflow_p = true;
5850 return omit_one_operand (type, integer_zero_node, op0);
5851 }
5852
5853 /* ... fall through ... */
5854
5855 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5856 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5857 /* If we can extract our operation from the LHS, do so and return a
5858 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5859 do something only if the second operand is a constant. */
5860 if (same_p
5861 && (t1 = extract_muldiv (op0, c, code, wide_type,
5862 strict_overflow_p)) != 0)
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5864 fold_convert (ctype, op1));
5865 else if (tcode == MULT_EXPR && code == MULT_EXPR
5866 && (t1 = extract_muldiv (op1, c, code, wide_type,
5867 strict_overflow_p)) != 0)
5868 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5869 fold_convert (ctype, t1));
5870 else if (TREE_CODE (op1) != INTEGER_CST)
5871 return 0;
5872
5873 /* If these are the same operation types, we can associate them
5874 assuming no overflow. */
5875 if (tcode == code)
5876 {
5877 double_int mul;
5878 int overflow_p;
5879 mul = double_int_mul_with_sign
5880 (double_int_ext
5881 (tree_to_double_int (op1),
5882 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5883 double_int_ext
5884 (tree_to_double_int (c),
5885 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5886 false, &overflow_p);
5887 overflow_p = (((!TYPE_UNSIGNED (ctype)
5888 || (TREE_CODE (ctype) == INTEGER_TYPE
5889 && TYPE_IS_SIZETYPE (ctype)))
5890 && overflow_p)
5891 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5892 if (!double_int_fits_to_tree_p (ctype, mul)
5893 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5894 || !TYPE_UNSIGNED (ctype)
5895 || (TREE_CODE (ctype) == INTEGER_TYPE
5896 && TYPE_IS_SIZETYPE (ctype))))
5897 overflow_p = 1;
5898 if (!overflow_p)
5899 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5900 double_int_to_tree (ctype, mul));
5901 }
5902
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5907
5908 If we have an unsigned type that is not a sizetype, we cannot do
5909 this since it will change the result if the original computation
5910 overflowed. */
5911 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5912 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5913 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5914 || (tcode == MULT_EXPR
5915 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5916 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5917 && code != MULT_EXPR)))
5918 {
5919 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5920 {
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5922 *strict_overflow_p = true;
5923 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5924 fold_convert (ctype,
5925 const_binop (TRUNC_DIV_EXPR,
5926 op1, c)));
5927 }
5928 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5929 {
5930 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5931 *strict_overflow_p = true;
5932 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5933 fold_convert (ctype,
5934 const_binop (TRUNC_DIV_EXPR,
5935 c, op1)));
5936 }
5937 }
5938 break;
5939
5940 default:
5941 break;
5942 }
5943
5944 return 0;
5945 }
5946 \f
5947 /* Return a node which has the indicated constant VALUE (either 0 or
5948 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5949 and is of the indicated TYPE. */
5950
5951 tree
5952 constant_boolean_node (bool value, tree type)
5953 {
5954 if (type == integer_type_node)
5955 return value ? integer_one_node : integer_zero_node;
5956 else if (type == boolean_type_node)
5957 return value ? boolean_true_node : boolean_false_node;
5958 else if (TREE_CODE (type) == VECTOR_TYPE)
5959 return build_vector_from_val (type,
5960 build_int_cst (TREE_TYPE (type),
5961 value ? -1 : 0));
5962 else
5963 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5964 }
5965
5966
5967 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5968 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5969 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5970 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5971 COND is the first argument to CODE; otherwise (as in the example
5972 given here), it is the second argument. TYPE is the type of the
5973 original expression. Return NULL_TREE if no simplification is
5974 possible. */
5975
5976 static tree
5977 fold_binary_op_with_conditional_arg (location_t loc,
5978 enum tree_code code,
5979 tree type, tree op0, tree op1,
5980 tree cond, tree arg, int cond_first_p)
5981 {
5982 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5983 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5984 tree test, true_value, false_value;
5985 tree lhs = NULL_TREE;
5986 tree rhs = NULL_TREE;
5987
5988 if (TREE_CODE (cond) == COND_EXPR)
5989 {
5990 test = TREE_OPERAND (cond, 0);
5991 true_value = TREE_OPERAND (cond, 1);
5992 false_value = TREE_OPERAND (cond, 2);
5993 /* If this operand throws an expression, then it does not make
5994 sense to try to perform a logical or arithmetic operation
5995 involving it. */
5996 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5997 lhs = true_value;
5998 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5999 rhs = false_value;
6000 }
6001 else
6002 {
6003 tree testtype = TREE_TYPE (cond);
6004 test = cond;
6005 true_value = constant_boolean_node (true, testtype);
6006 false_value = constant_boolean_node (false, testtype);
6007 }
6008
6009 /* This transformation is only worthwhile if we don't have to wrap ARG
6010 in a SAVE_EXPR and the operation can be simplified on at least one
6011 of the branches once its pushed inside the COND_EXPR. */
6012 if (!TREE_CONSTANT (arg)
6013 && (TREE_SIDE_EFFECTS (arg)
6014 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6015 return NULL_TREE;
6016
6017 arg = fold_convert_loc (loc, arg_type, arg);
6018 if (lhs == 0)
6019 {
6020 true_value = fold_convert_loc (loc, cond_type, true_value);
6021 if (cond_first_p)
6022 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6023 else
6024 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6025 }
6026 if (rhs == 0)
6027 {
6028 false_value = fold_convert_loc (loc, cond_type, false_value);
6029 if (cond_first_p)
6030 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6031 else
6032 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6033 }
6034
6035 /* Check that we have simplified at least one of the branches. */
6036 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6037 return NULL_TREE;
6038
6039 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6040 }
6041
6042 \f
6043 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6044
6045 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6046 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6047 ADDEND is the same as X.
6048
6049 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6050 and finite. The problematic cases are when X is zero, and its mode
6051 has signed zeros. In the case of rounding towards -infinity,
6052 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6053 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6054
6055 bool
6056 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6057 {
6058 if (!real_zerop (addend))
6059 return false;
6060
6061 /* Don't allow the fold with -fsignaling-nans. */
6062 if (HONOR_SNANS (TYPE_MODE (type)))
6063 return false;
6064
6065 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6066 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6067 return true;
6068
6069 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6070 if (TREE_CODE (addend) == REAL_CST
6071 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6072 negate = !negate;
6073
6074 /* The mode has signed zeros, and we have to honor their sign.
6075 In this situation, there is only one case we can return true for.
6076 X - 0 is the same as X unless rounding towards -infinity is
6077 supported. */
6078 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6079 }
6080
6081 /* Subroutine of fold() that checks comparisons of built-in math
6082 functions against real constants.
6083
6084 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6085 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6086 is the type of the result and ARG0 and ARG1 are the operands of the
6087 comparison. ARG1 must be a TREE_REAL_CST.
6088
6089 The function returns the constant folded tree if a simplification
6090 can be made, and NULL_TREE otherwise. */
6091
6092 static tree
6093 fold_mathfn_compare (location_t loc,
6094 enum built_in_function fcode, enum tree_code code,
6095 tree type, tree arg0, tree arg1)
6096 {
6097 REAL_VALUE_TYPE c;
6098
6099 if (BUILTIN_SQRT_P (fcode))
6100 {
6101 tree arg = CALL_EXPR_ARG (arg0, 0);
6102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6103
6104 c = TREE_REAL_CST (arg1);
6105 if (REAL_VALUE_NEGATIVE (c))
6106 {
6107 /* sqrt(x) < y is always false, if y is negative. */
6108 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6109 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6110
6111 /* sqrt(x) > y is always true, if y is negative and we
6112 don't care about NaNs, i.e. negative values of x. */
6113 if (code == NE_EXPR || !HONOR_NANS (mode))
6114 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6115
6116 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6117 return fold_build2_loc (loc, GE_EXPR, type, arg,
6118 build_real (TREE_TYPE (arg), dconst0));
6119 }
6120 else if (code == GT_EXPR || code == GE_EXPR)
6121 {
6122 REAL_VALUE_TYPE c2;
6123
6124 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6125 real_convert (&c2, mode, &c2);
6126
6127 if (REAL_VALUE_ISINF (c2))
6128 {
6129 /* sqrt(x) > y is x == +Inf, when y is very large. */
6130 if (HONOR_INFINITIES (mode))
6131 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6132 build_real (TREE_TYPE (arg), c2));
6133
6134 /* sqrt(x) > y is always false, when y is very large
6135 and we don't care about infinities. */
6136 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6137 }
6138
6139 /* sqrt(x) > c is the same as x > c*c. */
6140 return fold_build2_loc (loc, code, type, arg,
6141 build_real (TREE_TYPE (arg), c2));
6142 }
6143 else if (code == LT_EXPR || code == LE_EXPR)
6144 {
6145 REAL_VALUE_TYPE c2;
6146
6147 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6148 real_convert (&c2, mode, &c2);
6149
6150 if (REAL_VALUE_ISINF (c2))
6151 {
6152 /* sqrt(x) < y is always true, when y is a very large
6153 value and we don't care about NaNs or Infinities. */
6154 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6155 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6156
6157 /* sqrt(x) < y is x != +Inf when y is very large and we
6158 don't care about NaNs. */
6159 if (! HONOR_NANS (mode))
6160 return fold_build2_loc (loc, NE_EXPR, type, arg,
6161 build_real (TREE_TYPE (arg), c2));
6162
6163 /* sqrt(x) < y is x >= 0 when y is very large and we
6164 don't care about Infinities. */
6165 if (! HONOR_INFINITIES (mode))
6166 return fold_build2_loc (loc, GE_EXPR, type, arg,
6167 build_real (TREE_TYPE (arg), dconst0));
6168
6169 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6170 arg = save_expr (arg);
6171 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6172 fold_build2_loc (loc, GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg),
6174 dconst0)),
6175 fold_build2_loc (loc, NE_EXPR, type, arg,
6176 build_real (TREE_TYPE (arg),
6177 c2)));
6178 }
6179
6180 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6181 if (! HONOR_NANS (mode))
6182 return fold_build2_loc (loc, code, type, arg,
6183 build_real (TREE_TYPE (arg), c2));
6184
6185 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6186 arg = save_expr (arg);
6187 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6188 fold_build2_loc (loc, GE_EXPR, type, arg,
6189 build_real (TREE_TYPE (arg),
6190 dconst0)),
6191 fold_build2_loc (loc, code, type, arg,
6192 build_real (TREE_TYPE (arg),
6193 c2)));
6194 }
6195 }
6196
6197 return NULL_TREE;
6198 }
6199
6200 /* Subroutine of fold() that optimizes comparisons against Infinities,
6201 either +Inf or -Inf.
6202
6203 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6204 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6205 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6206
6207 The function returns the constant folded tree if a simplification
6208 can be made, and NULL_TREE otherwise. */
6209
6210 static tree
6211 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6212 tree arg0, tree arg1)
6213 {
6214 enum machine_mode mode;
6215 REAL_VALUE_TYPE max;
6216 tree temp;
6217 bool neg;
6218
6219 mode = TYPE_MODE (TREE_TYPE (arg0));
6220
6221 /* For negative infinity swap the sense of the comparison. */
6222 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6223 if (neg)
6224 code = swap_tree_comparison (code);
6225
6226 switch (code)
6227 {
6228 case GT_EXPR:
6229 /* x > +Inf is always false, if with ignore sNANs. */
6230 if (HONOR_SNANS (mode))
6231 return NULL_TREE;
6232 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6233
6234 case LE_EXPR:
6235 /* x <= +Inf is always true, if we don't case about NaNs. */
6236 if (! HONOR_NANS (mode))
6237 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6238
6239 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6240 arg0 = save_expr (arg0);
6241 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6242
6243 case EQ_EXPR:
6244 case GE_EXPR:
6245 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6246 real_maxval (&max, neg, mode);
6247 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6248 arg0, build_real (TREE_TYPE (arg0), max));
6249
6250 case LT_EXPR:
6251 /* x < +Inf is always equal to x <= DBL_MAX. */
6252 real_maxval (&max, neg, mode);
6253 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6255
6256 case NE_EXPR:
6257 /* x != +Inf is always equal to !(x > DBL_MAX). */
6258 real_maxval (&max, neg, mode);
6259 if (! HONOR_NANS (mode))
6260 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6261 arg0, build_real (TREE_TYPE (arg0), max));
6262
6263 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6264 arg0, build_real (TREE_TYPE (arg0), max));
6265 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6266
6267 default:
6268 break;
6269 }
6270
6271 return NULL_TREE;
6272 }
6273
6274 /* Subroutine of fold() that optimizes comparisons of a division by
6275 a nonzero integer constant against an integer constant, i.e.
6276 X/C1 op C2.
6277
6278 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6279 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6280 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6281
6282 The function returns the constant folded tree if a simplification
6283 can be made, and NULL_TREE otherwise. */
6284
6285 static tree
6286 fold_div_compare (location_t loc,
6287 enum tree_code code, tree type, tree arg0, tree arg1)
6288 {
6289 tree prod, tmp, hi, lo;
6290 tree arg00 = TREE_OPERAND (arg0, 0);
6291 tree arg01 = TREE_OPERAND (arg0, 1);
6292 double_int val;
6293 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6294 bool neg_overflow;
6295 int overflow;
6296
6297 /* We have to do this the hard way to detect unsigned overflow.
6298 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6299 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6300 TREE_INT_CST_HIGH (arg01),
6301 TREE_INT_CST_LOW (arg1),
6302 TREE_INT_CST_HIGH (arg1),
6303 &val.low, &val.high, unsigned_p);
6304 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6305 neg_overflow = false;
6306
6307 if (unsigned_p)
6308 {
6309 tmp = int_const_binop (MINUS_EXPR, arg01,
6310 build_int_cst (TREE_TYPE (arg01), 1));
6311 lo = prod;
6312
6313 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6314 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6315 TREE_INT_CST_HIGH (prod),
6316 TREE_INT_CST_LOW (tmp),
6317 TREE_INT_CST_HIGH (tmp),
6318 &val.low, &val.high, unsigned_p);
6319 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6320 -1, overflow | TREE_OVERFLOW (prod));
6321 }
6322 else if (tree_int_cst_sgn (arg01) >= 0)
6323 {
6324 tmp = int_const_binop (MINUS_EXPR, arg01,
6325 build_int_cst (TREE_TYPE (arg01), 1));
6326 switch (tree_int_cst_sgn (arg1))
6327 {
6328 case -1:
6329 neg_overflow = true;
6330 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6331 hi = prod;
6332 break;
6333
6334 case 0:
6335 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6336 hi = tmp;
6337 break;
6338
6339 case 1:
6340 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6341 lo = prod;
6342 break;
6343
6344 default:
6345 gcc_unreachable ();
6346 }
6347 }
6348 else
6349 {
6350 /* A negative divisor reverses the relational operators. */
6351 code = swap_tree_comparison (code);
6352
6353 tmp = int_const_binop (PLUS_EXPR, arg01,
6354 build_int_cst (TREE_TYPE (arg01), 1));
6355 switch (tree_int_cst_sgn (arg1))
6356 {
6357 case -1:
6358 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6359 lo = prod;
6360 break;
6361
6362 case 0:
6363 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6364 lo = tmp;
6365 break;
6366
6367 case 1:
6368 neg_overflow = true;
6369 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6370 hi = prod;
6371 break;
6372
6373 default:
6374 gcc_unreachable ();
6375 }
6376 }
6377
6378 switch (code)
6379 {
6380 case EQ_EXPR:
6381 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6382 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6383 if (TREE_OVERFLOW (hi))
6384 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6385 if (TREE_OVERFLOW (lo))
6386 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6387 return build_range_check (loc, type, arg00, 1, lo, hi);
6388
6389 case NE_EXPR:
6390 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6391 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6392 if (TREE_OVERFLOW (hi))
6393 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6394 if (TREE_OVERFLOW (lo))
6395 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6396 return build_range_check (loc, type, arg00, 0, lo, hi);
6397
6398 case LT_EXPR:
6399 if (TREE_OVERFLOW (lo))
6400 {
6401 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6402 return omit_one_operand_loc (loc, type, tmp, arg00);
6403 }
6404 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6405
6406 case LE_EXPR:
6407 if (TREE_OVERFLOW (hi))
6408 {
6409 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6410 return omit_one_operand_loc (loc, type, tmp, arg00);
6411 }
6412 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6413
6414 case GT_EXPR:
6415 if (TREE_OVERFLOW (hi))
6416 {
6417 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6418 return omit_one_operand_loc (loc, type, tmp, arg00);
6419 }
6420 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6421
6422 case GE_EXPR:
6423 if (TREE_OVERFLOW (lo))
6424 {
6425 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6426 return omit_one_operand_loc (loc, type, tmp, arg00);
6427 }
6428 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6429
6430 default:
6431 break;
6432 }
6433
6434 return NULL_TREE;
6435 }
6436
6437
6438 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6439 equality/inequality test, then return a simplified form of the test
6440 using a sign testing. Otherwise return NULL. TYPE is the desired
6441 result type. */
6442
6443 static tree
6444 fold_single_bit_test_into_sign_test (location_t loc,
6445 enum tree_code code, tree arg0, tree arg1,
6446 tree result_type)
6447 {
6448 /* If this is testing a single bit, we can optimize the test. */
6449 if ((code == NE_EXPR || code == EQ_EXPR)
6450 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6451 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6452 {
6453 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6454 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6455 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6456
6457 if (arg00 != NULL_TREE
6458 /* This is only a win if casting to a signed type is cheap,
6459 i.e. when arg00's type is not a partial mode. */
6460 && TYPE_PRECISION (TREE_TYPE (arg00))
6461 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6462 {
6463 tree stype = signed_type_for (TREE_TYPE (arg00));
6464 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6465 result_type,
6466 fold_convert_loc (loc, stype, arg00),
6467 build_int_cst (stype, 0));
6468 }
6469 }
6470
6471 return NULL_TREE;
6472 }
6473
6474 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6475 equality/inequality test, then return a simplified form of
6476 the test using shifts and logical operations. Otherwise return
6477 NULL. TYPE is the desired result type. */
6478
6479 tree
6480 fold_single_bit_test (location_t loc, enum tree_code code,
6481 tree arg0, tree arg1, tree result_type)
6482 {
6483 /* If this is testing a single bit, we can optimize the test. */
6484 if ((code == NE_EXPR || code == EQ_EXPR)
6485 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6486 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6487 {
6488 tree inner = TREE_OPERAND (arg0, 0);
6489 tree type = TREE_TYPE (arg0);
6490 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6491 enum machine_mode operand_mode = TYPE_MODE (type);
6492 int ops_unsigned;
6493 tree signed_type, unsigned_type, intermediate_type;
6494 tree tem, one;
6495
6496 /* First, see if we can fold the single bit test into a sign-bit
6497 test. */
6498 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6499 result_type);
6500 if (tem)
6501 return tem;
6502
6503 /* Otherwise we have (A & C) != 0 where C is a single bit,
6504 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6505 Similarly for (A & C) == 0. */
6506
6507 /* If INNER is a right shift of a constant and it plus BITNUM does
6508 not overflow, adjust BITNUM and INNER. */
6509 if (TREE_CODE (inner) == RSHIFT_EXPR
6510 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6511 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6512 && bitnum < TYPE_PRECISION (type)
6513 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6514 bitnum - TYPE_PRECISION (type)))
6515 {
6516 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6517 inner = TREE_OPERAND (inner, 0);
6518 }
6519
6520 /* If we are going to be able to omit the AND below, we must do our
6521 operations as unsigned. If we must use the AND, we have a choice.
6522 Normally unsigned is faster, but for some machines signed is. */
6523 #ifdef LOAD_EXTEND_OP
6524 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6525 && !flag_syntax_only) ? 0 : 1;
6526 #else
6527 ops_unsigned = 1;
6528 #endif
6529
6530 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6531 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6532 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6533 inner = fold_convert_loc (loc, intermediate_type, inner);
6534
6535 if (bitnum != 0)
6536 inner = build2 (RSHIFT_EXPR, intermediate_type,
6537 inner, size_int (bitnum));
6538
6539 one = build_int_cst (intermediate_type, 1);
6540
6541 if (code == EQ_EXPR)
6542 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6543
6544 /* Put the AND last so it can combine with more things. */
6545 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6546
6547 /* Make sure to return the proper type. */
6548 inner = fold_convert_loc (loc, result_type, inner);
6549
6550 return inner;
6551 }
6552 return NULL_TREE;
6553 }
6554
6555 /* Check whether we are allowed to reorder operands arg0 and arg1,
6556 such that the evaluation of arg1 occurs before arg0. */
6557
6558 static bool
6559 reorder_operands_p (const_tree arg0, const_tree arg1)
6560 {
6561 if (! flag_evaluation_order)
6562 return true;
6563 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6564 return true;
6565 return ! TREE_SIDE_EFFECTS (arg0)
6566 && ! TREE_SIDE_EFFECTS (arg1);
6567 }
6568
6569 /* Test whether it is preferable two swap two operands, ARG0 and
6570 ARG1, for example because ARG0 is an integer constant and ARG1
6571 isn't. If REORDER is true, only recommend swapping if we can
6572 evaluate the operands in reverse order. */
6573
6574 bool
6575 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6576 {
6577 STRIP_SIGN_NOPS (arg0);
6578 STRIP_SIGN_NOPS (arg1);
6579
6580 if (TREE_CODE (arg1) == INTEGER_CST)
6581 return 0;
6582 if (TREE_CODE (arg0) == INTEGER_CST)
6583 return 1;
6584
6585 if (TREE_CODE (arg1) == REAL_CST)
6586 return 0;
6587 if (TREE_CODE (arg0) == REAL_CST)
6588 return 1;
6589
6590 if (TREE_CODE (arg1) == FIXED_CST)
6591 return 0;
6592 if (TREE_CODE (arg0) == FIXED_CST)
6593 return 1;
6594
6595 if (TREE_CODE (arg1) == COMPLEX_CST)
6596 return 0;
6597 if (TREE_CODE (arg0) == COMPLEX_CST)
6598 return 1;
6599
6600 if (TREE_CONSTANT (arg1))
6601 return 0;
6602 if (TREE_CONSTANT (arg0))
6603 return 1;
6604
6605 if (optimize_function_for_size_p (cfun))
6606 return 0;
6607
6608 if (reorder && flag_evaluation_order
6609 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6610 return 0;
6611
6612 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6613 for commutative and comparison operators. Ensuring a canonical
6614 form allows the optimizers to find additional redundancies without
6615 having to explicitly check for both orderings. */
6616 if (TREE_CODE (arg0) == SSA_NAME
6617 && TREE_CODE (arg1) == SSA_NAME
6618 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6619 return 1;
6620
6621 /* Put SSA_NAMEs last. */
6622 if (TREE_CODE (arg1) == SSA_NAME)
6623 return 0;
6624 if (TREE_CODE (arg0) == SSA_NAME)
6625 return 1;
6626
6627 /* Put variables last. */
6628 if (DECL_P (arg1))
6629 return 0;
6630 if (DECL_P (arg0))
6631 return 1;
6632
6633 return 0;
6634 }
6635
6636 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6637 ARG0 is extended to a wider type. */
6638
6639 static tree
6640 fold_widened_comparison (location_t loc, enum tree_code code,
6641 tree type, tree arg0, tree arg1)
6642 {
6643 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6644 tree arg1_unw;
6645 tree shorter_type, outer_type;
6646 tree min, max;
6647 bool above, below;
6648
6649 if (arg0_unw == arg0)
6650 return NULL_TREE;
6651 shorter_type = TREE_TYPE (arg0_unw);
6652
6653 #ifdef HAVE_canonicalize_funcptr_for_compare
6654 /* Disable this optimization if we're casting a function pointer
6655 type on targets that require function pointer canonicalization. */
6656 if (HAVE_canonicalize_funcptr_for_compare
6657 && TREE_CODE (shorter_type) == POINTER_TYPE
6658 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6659 return NULL_TREE;
6660 #endif
6661
6662 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6663 return NULL_TREE;
6664
6665 arg1_unw = get_unwidened (arg1, NULL_TREE);
6666
6667 /* If possible, express the comparison in the shorter mode. */
6668 if ((code == EQ_EXPR || code == NE_EXPR
6669 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6670 && (TREE_TYPE (arg1_unw) == shorter_type
6671 || ((TYPE_PRECISION (shorter_type)
6672 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6673 && (TYPE_UNSIGNED (shorter_type)
6674 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6675 || (TREE_CODE (arg1_unw) == INTEGER_CST
6676 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6677 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6678 && int_fits_type_p (arg1_unw, shorter_type))))
6679 return fold_build2_loc (loc, code, type, arg0_unw,
6680 fold_convert_loc (loc, shorter_type, arg1_unw));
6681
6682 if (TREE_CODE (arg1_unw) != INTEGER_CST
6683 || TREE_CODE (shorter_type) != INTEGER_TYPE
6684 || !int_fits_type_p (arg1_unw, shorter_type))
6685 return NULL_TREE;
6686
6687 /* If we are comparing with the integer that does not fit into the range
6688 of the shorter type, the result is known. */
6689 outer_type = TREE_TYPE (arg1_unw);
6690 min = lower_bound_in_type (outer_type, shorter_type);
6691 max = upper_bound_in_type (outer_type, shorter_type);
6692
6693 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6694 max, arg1_unw));
6695 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6696 arg1_unw, min));
6697
6698 switch (code)
6699 {
6700 case EQ_EXPR:
6701 if (above || below)
6702 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6703 break;
6704
6705 case NE_EXPR:
6706 if (above || below)
6707 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6708 break;
6709
6710 case LT_EXPR:
6711 case LE_EXPR:
6712 if (above)
6713 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6714 else if (below)
6715 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6716
6717 case GT_EXPR:
6718 case GE_EXPR:
6719 if (above)
6720 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6721 else if (below)
6722 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6723
6724 default:
6725 break;
6726 }
6727
6728 return NULL_TREE;
6729 }
6730
6731 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6732 ARG0 just the signedness is changed. */
6733
6734 static tree
6735 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6736 tree arg0, tree arg1)
6737 {
6738 tree arg0_inner;
6739 tree inner_type, outer_type;
6740
6741 if (!CONVERT_EXPR_P (arg0))
6742 return NULL_TREE;
6743
6744 outer_type = TREE_TYPE (arg0);
6745 arg0_inner = TREE_OPERAND (arg0, 0);
6746 inner_type = TREE_TYPE (arg0_inner);
6747
6748 #ifdef HAVE_canonicalize_funcptr_for_compare
6749 /* Disable this optimization if we're casting a function pointer
6750 type on targets that require function pointer canonicalization. */
6751 if (HAVE_canonicalize_funcptr_for_compare
6752 && TREE_CODE (inner_type) == POINTER_TYPE
6753 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6754 return NULL_TREE;
6755 #endif
6756
6757 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6758 return NULL_TREE;
6759
6760 if (TREE_CODE (arg1) != INTEGER_CST
6761 && !(CONVERT_EXPR_P (arg1)
6762 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6763 return NULL_TREE;
6764
6765 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6766 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6767 && code != NE_EXPR
6768 && code != EQ_EXPR)
6769 return NULL_TREE;
6770
6771 if (TREE_CODE (arg1) == INTEGER_CST)
6772 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6773 0, TREE_OVERFLOW (arg1));
6774 else
6775 arg1 = fold_convert_loc (loc, inner_type, arg1);
6776
6777 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6778 }
6779
6780 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6781 step of the array. Reconstructs s and delta in the case of s *
6782 delta being an integer constant (and thus already folded). ADDR is
6783 the address. MULT is the multiplicative expression. If the
6784 function succeeds, the new address expression is returned.
6785 Otherwise NULL_TREE is returned. LOC is the location of the
6786 resulting expression. */
6787
6788 static tree
6789 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6790 {
6791 tree s, delta, step;
6792 tree ref = TREE_OPERAND (addr, 0), pref;
6793 tree ret, pos;
6794 tree itype;
6795 bool mdim = false;
6796
6797 /* Strip the nops that might be added when converting op1 to sizetype. */
6798 STRIP_NOPS (op1);
6799
6800 /* Canonicalize op1 into a possibly non-constant delta
6801 and an INTEGER_CST s. */
6802 if (TREE_CODE (op1) == MULT_EXPR)
6803 {
6804 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6805
6806 STRIP_NOPS (arg0);
6807 STRIP_NOPS (arg1);
6808
6809 if (TREE_CODE (arg0) == INTEGER_CST)
6810 {
6811 s = arg0;
6812 delta = arg1;
6813 }
6814 else if (TREE_CODE (arg1) == INTEGER_CST)
6815 {
6816 s = arg1;
6817 delta = arg0;
6818 }
6819 else
6820 return NULL_TREE;
6821 }
6822 else if (TREE_CODE (op1) == INTEGER_CST)
6823 {
6824 delta = op1;
6825 s = NULL_TREE;
6826 }
6827 else
6828 {
6829 /* Simulate we are delta * 1. */
6830 delta = op1;
6831 s = integer_one_node;
6832 }
6833
6834 for (;; ref = TREE_OPERAND (ref, 0))
6835 {
6836 if (TREE_CODE (ref) == ARRAY_REF)
6837 {
6838 tree domain;
6839
6840 /* Remember if this was a multi-dimensional array. */
6841 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6842 mdim = true;
6843
6844 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6845 if (! domain)
6846 continue;
6847 itype = TREE_TYPE (domain);
6848
6849 step = array_ref_element_size (ref);
6850 if (TREE_CODE (step) != INTEGER_CST)
6851 continue;
6852
6853 if (s)
6854 {
6855 if (! tree_int_cst_equal (step, s))
6856 continue;
6857 }
6858 else
6859 {
6860 /* Try if delta is a multiple of step. */
6861 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6862 if (! tmp)
6863 continue;
6864 delta = tmp;
6865 }
6866
6867 /* Only fold here if we can verify we do not overflow one
6868 dimension of a multi-dimensional array. */
6869 if (mdim)
6870 {
6871 tree tmp;
6872
6873 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6874 || !TYPE_MAX_VALUE (domain)
6875 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6876 continue;
6877
6878 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6879 fold_convert_loc (loc, itype,
6880 TREE_OPERAND (ref, 1)),
6881 fold_convert_loc (loc, itype, delta));
6882 if (!tmp
6883 || TREE_CODE (tmp) != INTEGER_CST
6884 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6885 continue;
6886 }
6887
6888 break;
6889 }
6890 else if (TREE_CODE (ref) == COMPONENT_REF
6891 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6892 {
6893 tree domain;
6894
6895 /* Remember if this was a multi-dimensional array. */
6896 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6897 mdim = true;
6898
6899 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6900 if (! domain)
6901 continue;
6902 itype = TREE_TYPE (domain);
6903
6904 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6905 if (TREE_CODE (step) != INTEGER_CST)
6906 continue;
6907
6908 if (s)
6909 {
6910 if (! tree_int_cst_equal (step, s))
6911 continue;
6912 }
6913 else
6914 {
6915 /* Try if delta is a multiple of step. */
6916 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6917 if (! tmp)
6918 continue;
6919 delta = tmp;
6920 }
6921
6922 /* Only fold here if we can verify we do not overflow one
6923 dimension of a multi-dimensional array. */
6924 if (mdim)
6925 {
6926 tree tmp;
6927
6928 if (!TYPE_MIN_VALUE (domain)
6929 || !TYPE_MAX_VALUE (domain)
6930 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6931 continue;
6932
6933 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6934 fold_convert_loc (loc, itype,
6935 TYPE_MIN_VALUE (domain)),
6936 fold_convert_loc (loc, itype, delta));
6937 if (TREE_CODE (tmp) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6939 continue;
6940 }
6941
6942 break;
6943 }
6944 else
6945 mdim = false;
6946
6947 if (!handled_component_p (ref))
6948 return NULL_TREE;
6949 }
6950
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6953
6954 pref = TREE_OPERAND (addr, 0);
6955 ret = copy_node (pref);
6956 SET_EXPR_LOCATION (ret, loc);
6957 pos = ret;
6958
6959 while (pref != ref)
6960 {
6961 pref = TREE_OPERAND (pref, 0);
6962 TREE_OPERAND (pos, 0) = copy_node (pref);
6963 pos = TREE_OPERAND (pos, 0);
6964 }
6965
6966 if (TREE_CODE (ref) == ARRAY_REF)
6967 {
6968 TREE_OPERAND (pos, 1)
6969 = fold_build2_loc (loc, PLUS_EXPR, itype,
6970 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6971 fold_convert_loc (loc, itype, delta));
6972 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6973 }
6974 else if (TREE_CODE (ref) == COMPONENT_REF)
6975 {
6976 gcc_assert (ret == pos);
6977 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6978 fold_build2_loc
6979 (loc, PLUS_EXPR, itype,
6980 fold_convert_loc (loc, itype,
6981 TYPE_MIN_VALUE
6982 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6983 fold_convert_loc (loc, itype, delta)),
6984 NULL_TREE, NULL_TREE);
6985 return build_fold_addr_expr_loc (loc, ret);
6986 }
6987 else
6988 gcc_unreachable ();
6989 }
6990
6991
6992 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6993 means A >= Y && A != MAX, but in this case we know that
6994 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6995
6996 static tree
6997 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6998 {
6999 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7000
7001 if (TREE_CODE (bound) == LT_EXPR)
7002 a = TREE_OPERAND (bound, 0);
7003 else if (TREE_CODE (bound) == GT_EXPR)
7004 a = TREE_OPERAND (bound, 1);
7005 else
7006 return NULL_TREE;
7007
7008 typea = TREE_TYPE (a);
7009 if (!INTEGRAL_TYPE_P (typea)
7010 && !POINTER_TYPE_P (typea))
7011 return NULL_TREE;
7012
7013 if (TREE_CODE (ineq) == LT_EXPR)
7014 {
7015 a1 = TREE_OPERAND (ineq, 1);
7016 y = TREE_OPERAND (ineq, 0);
7017 }
7018 else if (TREE_CODE (ineq) == GT_EXPR)
7019 {
7020 a1 = TREE_OPERAND (ineq, 0);
7021 y = TREE_OPERAND (ineq, 1);
7022 }
7023 else
7024 return NULL_TREE;
7025
7026 if (TREE_TYPE (a1) != typea)
7027 return NULL_TREE;
7028
7029 if (POINTER_TYPE_P (typea))
7030 {
7031 /* Convert the pointer types into integer before taking the difference. */
7032 tree ta = fold_convert_loc (loc, ssizetype, a);
7033 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7034 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7035 }
7036 else
7037 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7038
7039 if (!diff || !integer_onep (diff))
7040 return NULL_TREE;
7041
7042 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7043 }
7044
7045 /* Fold a sum or difference of at least one multiplication.
7046 Returns the folded tree or NULL if no simplification could be made. */
7047
7048 static tree
7049 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7050 tree arg0, tree arg1)
7051 {
7052 tree arg00, arg01, arg10, arg11;
7053 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7054
7055 /* (A * C) +- (B * C) -> (A+-B) * C.
7056 (A * C) +- A -> A * (C+-1).
7057 We are most concerned about the case where C is a constant,
7058 but other combinations show up during loop reduction. Since
7059 it is not difficult, try all four possibilities. */
7060
7061 if (TREE_CODE (arg0) == MULT_EXPR)
7062 {
7063 arg00 = TREE_OPERAND (arg0, 0);
7064 arg01 = TREE_OPERAND (arg0, 1);
7065 }
7066 else if (TREE_CODE (arg0) == INTEGER_CST)
7067 {
7068 arg00 = build_one_cst (type);
7069 arg01 = arg0;
7070 }
7071 else
7072 {
7073 /* We cannot generate constant 1 for fract. */
7074 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7075 return NULL_TREE;
7076 arg00 = arg0;
7077 arg01 = build_one_cst (type);
7078 }
7079 if (TREE_CODE (arg1) == MULT_EXPR)
7080 {
7081 arg10 = TREE_OPERAND (arg1, 0);
7082 arg11 = TREE_OPERAND (arg1, 1);
7083 }
7084 else if (TREE_CODE (arg1) == INTEGER_CST)
7085 {
7086 arg10 = build_one_cst (type);
7087 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7088 the purpose of this canonicalization. */
7089 if (TREE_INT_CST_HIGH (arg1) == -1
7090 && negate_expr_p (arg1)
7091 && code == PLUS_EXPR)
7092 {
7093 arg11 = negate_expr (arg1);
7094 code = MINUS_EXPR;
7095 }
7096 else
7097 arg11 = arg1;
7098 }
7099 else
7100 {
7101 /* We cannot generate constant 1 for fract. */
7102 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7103 return NULL_TREE;
7104 arg10 = arg1;
7105 arg11 = build_one_cst (type);
7106 }
7107 same = NULL_TREE;
7108
7109 if (operand_equal_p (arg01, arg11, 0))
7110 same = arg01, alt0 = arg00, alt1 = arg10;
7111 else if (operand_equal_p (arg00, arg10, 0))
7112 same = arg00, alt0 = arg01, alt1 = arg11;
7113 else if (operand_equal_p (arg00, arg11, 0))
7114 same = arg00, alt0 = arg01, alt1 = arg10;
7115 else if (operand_equal_p (arg01, arg10, 0))
7116 same = arg01, alt0 = arg00, alt1 = arg11;
7117
7118 /* No identical multiplicands; see if we can find a common
7119 power-of-two factor in non-power-of-two multiplies. This
7120 can help in multi-dimensional array access. */
7121 else if (host_integerp (arg01, 0)
7122 && host_integerp (arg11, 0))
7123 {
7124 HOST_WIDE_INT int01, int11, tmp;
7125 bool swap = false;
7126 tree maybe_same;
7127 int01 = TREE_INT_CST_LOW (arg01);
7128 int11 = TREE_INT_CST_LOW (arg11);
7129
7130 /* Move min of absolute values to int11. */
7131 if (absu_hwi (int01) < absu_hwi (int11))
7132 {
7133 tmp = int01, int01 = int11, int11 = tmp;
7134 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7135 maybe_same = arg01;
7136 swap = true;
7137 }
7138 else
7139 maybe_same = arg11;
7140
7141 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7142 /* The remainder should not be a constant, otherwise we
7143 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7144 increased the number of multiplications necessary. */
7145 && TREE_CODE (arg10) != INTEGER_CST)
7146 {
7147 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7148 build_int_cst (TREE_TYPE (arg00),
7149 int01 / int11));
7150 alt1 = arg10;
7151 same = maybe_same;
7152 if (swap)
7153 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7154 }
7155 }
7156
7157 if (same)
7158 return fold_build2_loc (loc, MULT_EXPR, type,
7159 fold_build2_loc (loc, code, type,
7160 fold_convert_loc (loc, type, alt0),
7161 fold_convert_loc (loc, type, alt1)),
7162 fold_convert_loc (loc, type, same));
7163
7164 return NULL_TREE;
7165 }
7166
7167 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7168 specified by EXPR into the buffer PTR of length LEN bytes.
7169 Return the number of bytes placed in the buffer, or zero
7170 upon failure. */
7171
7172 static int
7173 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7174 {
7175 tree type = TREE_TYPE (expr);
7176 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7177 int byte, offset, word, words;
7178 unsigned char value;
7179
7180 if (total_bytes > len)
7181 return 0;
7182 words = total_bytes / UNITS_PER_WORD;
7183
7184 for (byte = 0; byte < total_bytes; byte++)
7185 {
7186 int bitpos = byte * BITS_PER_UNIT;
7187 if (bitpos < HOST_BITS_PER_WIDE_INT)
7188 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7189 else
7190 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7191 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7192
7193 if (total_bytes > UNITS_PER_WORD)
7194 {
7195 word = byte / UNITS_PER_WORD;
7196 if (WORDS_BIG_ENDIAN)
7197 word = (words - 1) - word;
7198 offset = word * UNITS_PER_WORD;
7199 if (BYTES_BIG_ENDIAN)
7200 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7201 else
7202 offset += byte % UNITS_PER_WORD;
7203 }
7204 else
7205 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7206 ptr[offset] = value;
7207 }
7208 return total_bytes;
7209 }
7210
7211
7212 /* Subroutine of native_encode_expr. Encode the REAL_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7215 upon failure. */
7216
7217 static int
7218 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7219 {
7220 tree type = TREE_TYPE (expr);
7221 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7222 int byte, offset, word, words, bitpos;
7223 unsigned char value;
7224
7225 /* There are always 32 bits in each long, no matter the size of
7226 the hosts long. We handle floating point representations with
7227 up to 192 bits. */
7228 long tmp[6];
7229
7230 if (total_bytes > len)
7231 return 0;
7232 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7233
7234 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7235
7236 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7237 bitpos += BITS_PER_UNIT)
7238 {
7239 byte = (bitpos / BITS_PER_UNIT) & 3;
7240 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7241
7242 if (UNITS_PER_WORD < 4)
7243 {
7244 word = byte / UNITS_PER_WORD;
7245 if (WORDS_BIG_ENDIAN)
7246 word = (words - 1) - word;
7247 offset = word * UNITS_PER_WORD;
7248 if (BYTES_BIG_ENDIAN)
7249 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7250 else
7251 offset += byte % UNITS_PER_WORD;
7252 }
7253 else
7254 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7255 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7256 }
7257 return total_bytes;
7258 }
7259
7260 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7263 upon failure. */
7264
7265 static int
7266 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7267 {
7268 int rsize, isize;
7269 tree part;
7270
7271 part = TREE_REALPART (expr);
7272 rsize = native_encode_expr (part, ptr, len);
7273 if (rsize == 0)
7274 return 0;
7275 part = TREE_IMAGPART (expr);
7276 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7277 if (isize != rsize)
7278 return 0;
7279 return rsize + isize;
7280 }
7281
7282
7283 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7284 specified by EXPR into the buffer PTR of length LEN bytes.
7285 Return the number of bytes placed in the buffer, or zero
7286 upon failure. */
7287
7288 static int
7289 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7290 {
7291 int i, size, offset, count;
7292 tree itype, elem, elements;
7293
7294 offset = 0;
7295 elements = TREE_VECTOR_CST_ELTS (expr);
7296 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7297 itype = TREE_TYPE (TREE_TYPE (expr));
7298 size = GET_MODE_SIZE (TYPE_MODE (itype));
7299 for (i = 0; i < count; i++)
7300 {
7301 if (elements)
7302 {
7303 elem = TREE_VALUE (elements);
7304 elements = TREE_CHAIN (elements);
7305 }
7306 else
7307 elem = NULL_TREE;
7308
7309 if (elem)
7310 {
7311 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7312 return 0;
7313 }
7314 else
7315 {
7316 if (offset + size > len)
7317 return 0;
7318 memset (ptr+offset, 0, size);
7319 }
7320 offset += size;
7321 }
7322 return offset;
7323 }
7324
7325
7326 /* Subroutine of native_encode_expr. Encode the STRING_CST
7327 specified by EXPR into the buffer PTR of length LEN bytes.
7328 Return the number of bytes placed in the buffer, or zero
7329 upon failure. */
7330
7331 static int
7332 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7333 {
7334 tree type = TREE_TYPE (expr);
7335 HOST_WIDE_INT total_bytes;
7336
7337 if (TREE_CODE (type) != ARRAY_TYPE
7338 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7339 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7340 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7341 return 0;
7342 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7343 if (total_bytes > len)
7344 return 0;
7345 if (TREE_STRING_LENGTH (expr) < total_bytes)
7346 {
7347 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7348 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7349 total_bytes - TREE_STRING_LENGTH (expr));
7350 }
7351 else
7352 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7353 return total_bytes;
7354 }
7355
7356
7357 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7358 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7359 buffer PTR of length LEN bytes. Return the number of bytes
7360 placed in the buffer, or zero upon failure. */
7361
7362 int
7363 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7364 {
7365 switch (TREE_CODE (expr))
7366 {
7367 case INTEGER_CST:
7368 return native_encode_int (expr, ptr, len);
7369
7370 case REAL_CST:
7371 return native_encode_real (expr, ptr, len);
7372
7373 case COMPLEX_CST:
7374 return native_encode_complex (expr, ptr, len);
7375
7376 case VECTOR_CST:
7377 return native_encode_vector (expr, ptr, len);
7378
7379 case STRING_CST:
7380 return native_encode_string (expr, ptr, len);
7381
7382 default:
7383 return 0;
7384 }
7385 }
7386
7387
7388 /* Subroutine of native_interpret_expr. Interpret the contents of
7389 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7390 If the buffer cannot be interpreted, return NULL_TREE. */
7391
7392 static tree
7393 native_interpret_int (tree type, const unsigned char *ptr, int len)
7394 {
7395 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7396 int byte, offset, word, words;
7397 unsigned char value;
7398 double_int result;
7399
7400 if (total_bytes > len)
7401 return NULL_TREE;
7402 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7403 return NULL_TREE;
7404
7405 result = double_int_zero;
7406 words = total_bytes / UNITS_PER_WORD;
7407
7408 for (byte = 0; byte < total_bytes; byte++)
7409 {
7410 int bitpos = byte * BITS_PER_UNIT;
7411 if (total_bytes > UNITS_PER_WORD)
7412 {
7413 word = byte / UNITS_PER_WORD;
7414 if (WORDS_BIG_ENDIAN)
7415 word = (words - 1) - word;
7416 offset = word * UNITS_PER_WORD;
7417 if (BYTES_BIG_ENDIAN)
7418 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7419 else
7420 offset += byte % UNITS_PER_WORD;
7421 }
7422 else
7423 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7424 value = ptr[offset];
7425
7426 if (bitpos < HOST_BITS_PER_WIDE_INT)
7427 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7428 else
7429 result.high |= (unsigned HOST_WIDE_INT) value
7430 << (bitpos - HOST_BITS_PER_WIDE_INT);
7431 }
7432
7433 return double_int_to_tree (type, result);
7434 }
7435
7436
7437 /* Subroutine of native_interpret_expr. Interpret the contents of
7438 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7439 If the buffer cannot be interpreted, return NULL_TREE. */
7440
7441 static tree
7442 native_interpret_real (tree type, const unsigned char *ptr, int len)
7443 {
7444 enum machine_mode mode = TYPE_MODE (type);
7445 int total_bytes = GET_MODE_SIZE (mode);
7446 int byte, offset, word, words, bitpos;
7447 unsigned char value;
7448 /* There are always 32 bits in each long, no matter the size of
7449 the hosts long. We handle floating point representations with
7450 up to 192 bits. */
7451 REAL_VALUE_TYPE r;
7452 long tmp[6];
7453
7454 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7455 if (total_bytes > len || total_bytes > 24)
7456 return NULL_TREE;
7457 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7458
7459 memset (tmp, 0, sizeof (tmp));
7460 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7461 bitpos += BITS_PER_UNIT)
7462 {
7463 byte = (bitpos / BITS_PER_UNIT) & 3;
7464 if (UNITS_PER_WORD < 4)
7465 {
7466 word = byte / UNITS_PER_WORD;
7467 if (WORDS_BIG_ENDIAN)
7468 word = (words - 1) - word;
7469 offset = word * UNITS_PER_WORD;
7470 if (BYTES_BIG_ENDIAN)
7471 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7472 else
7473 offset += byte % UNITS_PER_WORD;
7474 }
7475 else
7476 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7477 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7478
7479 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7480 }
7481
7482 real_from_target (&r, tmp, mode);
7483 return build_real (type, r);
7484 }
7485
7486
7487 /* Subroutine of native_interpret_expr. Interpret the contents of
7488 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7489 If the buffer cannot be interpreted, return NULL_TREE. */
7490
7491 static tree
7492 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7493 {
7494 tree etype, rpart, ipart;
7495 int size;
7496
7497 etype = TREE_TYPE (type);
7498 size = GET_MODE_SIZE (TYPE_MODE (etype));
7499 if (size * 2 > len)
7500 return NULL_TREE;
7501 rpart = native_interpret_expr (etype, ptr, size);
7502 if (!rpart)
7503 return NULL_TREE;
7504 ipart = native_interpret_expr (etype, ptr+size, size);
7505 if (!ipart)
7506 return NULL_TREE;
7507 return build_complex (type, rpart, ipart);
7508 }
7509
7510
7511 /* Subroutine of native_interpret_expr. Interpret the contents of
7512 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7513 If the buffer cannot be interpreted, return NULL_TREE. */
7514
7515 static tree
7516 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7517 {
7518 tree etype, elem, elements;
7519 int i, size, count;
7520
7521 etype = TREE_TYPE (type);
7522 size = GET_MODE_SIZE (TYPE_MODE (etype));
7523 count = TYPE_VECTOR_SUBPARTS (type);
7524 if (size * count > len)
7525 return NULL_TREE;
7526
7527 elements = NULL_TREE;
7528 for (i = count - 1; i >= 0; i--)
7529 {
7530 elem = native_interpret_expr (etype, ptr+(i*size), size);
7531 if (!elem)
7532 return NULL_TREE;
7533 elements = tree_cons (NULL_TREE, elem, elements);
7534 }
7535 return build_vector (type, elements);
7536 }
7537
7538
7539 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7540 the buffer PTR of length LEN as a constant of type TYPE. For
7541 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7542 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7543 return NULL_TREE. */
7544
7545 tree
7546 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7547 {
7548 switch (TREE_CODE (type))
7549 {
7550 case INTEGER_TYPE:
7551 case ENUMERAL_TYPE:
7552 case BOOLEAN_TYPE:
7553 return native_interpret_int (type, ptr, len);
7554
7555 case REAL_TYPE:
7556 return native_interpret_real (type, ptr, len);
7557
7558 case COMPLEX_TYPE:
7559 return native_interpret_complex (type, ptr, len);
7560
7561 case VECTOR_TYPE:
7562 return native_interpret_vector (type, ptr, len);
7563
7564 default:
7565 return NULL_TREE;
7566 }
7567 }
7568
7569
7570 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7571 TYPE at compile-time. If we're unable to perform the conversion
7572 return NULL_TREE. */
7573
7574 static tree
7575 fold_view_convert_expr (tree type, tree expr)
7576 {
7577 /* We support up to 512-bit values (for V8DFmode). */
7578 unsigned char buffer[64];
7579 int len;
7580
7581 /* Check that the host and target are sane. */
7582 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7583 return NULL_TREE;
7584
7585 len = native_encode_expr (expr, buffer, sizeof (buffer));
7586 if (len == 0)
7587 return NULL_TREE;
7588
7589 return native_interpret_expr (type, buffer, len);
7590 }
7591
7592 /* Build an expression for the address of T. Folds away INDIRECT_REF
7593 to avoid confusing the gimplify process. */
7594
7595 tree
7596 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7597 {
7598 /* The size of the object is not relevant when talking about its address. */
7599 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7600 t = TREE_OPERAND (t, 0);
7601
7602 if (TREE_CODE (t) == INDIRECT_REF)
7603 {
7604 t = TREE_OPERAND (t, 0);
7605
7606 if (TREE_TYPE (t) != ptrtype)
7607 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7608 }
7609 else if (TREE_CODE (t) == MEM_REF
7610 && integer_zerop (TREE_OPERAND (t, 1)))
7611 return TREE_OPERAND (t, 0);
7612 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7613 {
7614 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7615
7616 if (TREE_TYPE (t) != ptrtype)
7617 t = fold_convert_loc (loc, ptrtype, t);
7618 }
7619 else
7620 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7621
7622 return t;
7623 }
7624
7625 /* Build an expression for the address of T. */
7626
7627 tree
7628 build_fold_addr_expr_loc (location_t loc, tree t)
7629 {
7630 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7631
7632 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7633 }
7634
7635 /* Fold a unary expression of code CODE and type TYPE with operand
7636 OP0. Return the folded expression if folding is successful.
7637 Otherwise, return NULL_TREE. */
7638
7639 tree
7640 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7641 {
7642 tree tem;
7643 tree arg0;
7644 enum tree_code_class kind = TREE_CODE_CLASS (code);
7645
7646 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7647 && TREE_CODE_LENGTH (code) == 1);
7648
7649 arg0 = op0;
7650 if (arg0)
7651 {
7652 if (CONVERT_EXPR_CODE_P (code)
7653 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7654 {
7655 /* Don't use STRIP_NOPS, because signedness of argument type
7656 matters. */
7657 STRIP_SIGN_NOPS (arg0);
7658 }
7659 else
7660 {
7661 /* Strip any conversions that don't change the mode. This
7662 is safe for every expression, except for a comparison
7663 expression because its signedness is derived from its
7664 operands.
7665
7666 Note that this is done as an internal manipulation within
7667 the constant folder, in order to find the simplest
7668 representation of the arguments so that their form can be
7669 studied. In any cases, the appropriate type conversions
7670 should be put back in the tree that will get out of the
7671 constant folder. */
7672 STRIP_NOPS (arg0);
7673 }
7674 }
7675
7676 if (TREE_CODE_CLASS (code) == tcc_unary)
7677 {
7678 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7679 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7680 fold_build1_loc (loc, code, type,
7681 fold_convert_loc (loc, TREE_TYPE (op0),
7682 TREE_OPERAND (arg0, 1))));
7683 else if (TREE_CODE (arg0) == COND_EXPR)
7684 {
7685 tree arg01 = TREE_OPERAND (arg0, 1);
7686 tree arg02 = TREE_OPERAND (arg0, 2);
7687 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7688 arg01 = fold_build1_loc (loc, code, type,
7689 fold_convert_loc (loc,
7690 TREE_TYPE (op0), arg01));
7691 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7692 arg02 = fold_build1_loc (loc, code, type,
7693 fold_convert_loc (loc,
7694 TREE_TYPE (op0), arg02));
7695 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7696 arg01, arg02);
7697
7698 /* If this was a conversion, and all we did was to move into
7699 inside the COND_EXPR, bring it back out. But leave it if
7700 it is a conversion from integer to integer and the
7701 result precision is no wider than a word since such a
7702 conversion is cheap and may be optimized away by combine,
7703 while it couldn't if it were outside the COND_EXPR. Then return
7704 so we don't get into an infinite recursion loop taking the
7705 conversion out and then back in. */
7706
7707 if ((CONVERT_EXPR_CODE_P (code)
7708 || code == NON_LVALUE_EXPR)
7709 && TREE_CODE (tem) == COND_EXPR
7710 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7711 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7712 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7713 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7714 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7715 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7716 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7717 && (INTEGRAL_TYPE_P
7718 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7719 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7720 || flag_syntax_only))
7721 tem = build1_loc (loc, code, type,
7722 build3 (COND_EXPR,
7723 TREE_TYPE (TREE_OPERAND
7724 (TREE_OPERAND (tem, 1), 0)),
7725 TREE_OPERAND (tem, 0),
7726 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7727 TREE_OPERAND (TREE_OPERAND (tem, 2),
7728 0)));
7729 return tem;
7730 }
7731 }
7732
7733 switch (code)
7734 {
7735 case PAREN_EXPR:
7736 /* Re-association barriers around constants and other re-association
7737 barriers can be removed. */
7738 if (CONSTANT_CLASS_P (op0)
7739 || TREE_CODE (op0) == PAREN_EXPR)
7740 return fold_convert_loc (loc, type, op0);
7741 return NULL_TREE;
7742
7743 CASE_CONVERT:
7744 case FLOAT_EXPR:
7745 case FIX_TRUNC_EXPR:
7746 if (TREE_TYPE (op0) == type)
7747 return op0;
7748
7749 if (COMPARISON_CLASS_P (op0))
7750 {
7751 /* If we have (type) (a CMP b) and type is an integral type, return
7752 new expression involving the new type. Canonicalize
7753 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7754 non-integral type.
7755 Do not fold the result as that would not simplify further, also
7756 folding again results in recursions. */
7757 if (TREE_CODE (type) == BOOLEAN_TYPE)
7758 return build2_loc (loc, TREE_CODE (op0), type,
7759 TREE_OPERAND (op0, 0),
7760 TREE_OPERAND (op0, 1));
7761 else if (!INTEGRAL_TYPE_P (type))
7762 return build3_loc (loc, COND_EXPR, type, op0,
7763 constant_boolean_node (true, type),
7764 constant_boolean_node (false, type));
7765 }
7766
7767 /* Handle cases of two conversions in a row. */
7768 if (CONVERT_EXPR_P (op0))
7769 {
7770 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7771 tree inter_type = TREE_TYPE (op0);
7772 int inside_int = INTEGRAL_TYPE_P (inside_type);
7773 int inside_ptr = POINTER_TYPE_P (inside_type);
7774 int inside_float = FLOAT_TYPE_P (inside_type);
7775 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7776 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7777 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7778 int inter_int = INTEGRAL_TYPE_P (inter_type);
7779 int inter_ptr = POINTER_TYPE_P (inter_type);
7780 int inter_float = FLOAT_TYPE_P (inter_type);
7781 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7782 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7783 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7784 int final_int = INTEGRAL_TYPE_P (type);
7785 int final_ptr = POINTER_TYPE_P (type);
7786 int final_float = FLOAT_TYPE_P (type);
7787 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7788 unsigned int final_prec = TYPE_PRECISION (type);
7789 int final_unsignedp = TYPE_UNSIGNED (type);
7790
7791 /* In addition to the cases of two conversions in a row
7792 handled below, if we are converting something to its own
7793 type via an object of identical or wider precision, neither
7794 conversion is needed. */
7795 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7796 && (((inter_int || inter_ptr) && final_int)
7797 || (inter_float && final_float))
7798 && inter_prec >= final_prec)
7799 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7800
7801 /* Likewise, if the intermediate and initial types are either both
7802 float or both integer, we don't need the middle conversion if the
7803 former is wider than the latter and doesn't change the signedness
7804 (for integers). Avoid this if the final type is a pointer since
7805 then we sometimes need the middle conversion. Likewise if the
7806 final type has a precision not equal to the size of its mode. */
7807 if (((inter_int && inside_int)
7808 || (inter_float && inside_float)
7809 || (inter_vec && inside_vec))
7810 && inter_prec >= inside_prec
7811 && (inter_float || inter_vec
7812 || inter_unsignedp == inside_unsignedp)
7813 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7814 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7815 && ! final_ptr
7816 && (! final_vec || inter_prec == inside_prec))
7817 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7818
7819 /* If we have a sign-extension of a zero-extended value, we can
7820 replace that by a single zero-extension. */
7821 if (inside_int && inter_int && final_int
7822 && inside_prec < inter_prec && inter_prec < final_prec
7823 && inside_unsignedp && !inter_unsignedp)
7824 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7825
7826 /* Two conversions in a row are not needed unless:
7827 - some conversion is floating-point (overstrict for now), or
7828 - some conversion is a vector (overstrict for now), or
7829 - the intermediate type is narrower than both initial and
7830 final, or
7831 - the intermediate type and innermost type differ in signedness,
7832 and the outermost type is wider than the intermediate, or
7833 - the initial type is a pointer type and the precisions of the
7834 intermediate and final types differ, or
7835 - the final type is a pointer type and the precisions of the
7836 initial and intermediate types differ. */
7837 if (! inside_float && ! inter_float && ! final_float
7838 && ! inside_vec && ! inter_vec && ! final_vec
7839 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7840 && ! (inside_int && inter_int
7841 && inter_unsignedp != inside_unsignedp
7842 && inter_prec < final_prec)
7843 && ((inter_unsignedp && inter_prec > inside_prec)
7844 == (final_unsignedp && final_prec > inter_prec))
7845 && ! (inside_ptr && inter_prec != final_prec)
7846 && ! (final_ptr && inside_prec != inter_prec)
7847 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7848 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7849 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7850 }
7851
7852 /* Handle (T *)&A.B.C for A being of type T and B and C
7853 living at offset zero. This occurs frequently in
7854 C++ upcasting and then accessing the base. */
7855 if (TREE_CODE (op0) == ADDR_EXPR
7856 && POINTER_TYPE_P (type)
7857 && handled_component_p (TREE_OPERAND (op0, 0)))
7858 {
7859 HOST_WIDE_INT bitsize, bitpos;
7860 tree offset;
7861 enum machine_mode mode;
7862 int unsignedp, volatilep;
7863 tree base = TREE_OPERAND (op0, 0);
7864 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7865 &mode, &unsignedp, &volatilep, false);
7866 /* If the reference was to a (constant) zero offset, we can use
7867 the address of the base if it has the same base type
7868 as the result type and the pointer type is unqualified. */
7869 if (! offset && bitpos == 0
7870 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7871 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7872 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7873 return fold_convert_loc (loc, type,
7874 build_fold_addr_expr_loc (loc, base));
7875 }
7876
7877 if (TREE_CODE (op0) == MODIFY_EXPR
7878 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7879 /* Detect assigning a bitfield. */
7880 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7881 && DECL_BIT_FIELD
7882 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7883 {
7884 /* Don't leave an assignment inside a conversion
7885 unless assigning a bitfield. */
7886 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7887 /* First do the assignment, then return converted constant. */
7888 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7889 TREE_NO_WARNING (tem) = 1;
7890 TREE_USED (tem) = 1;
7891 return tem;
7892 }
7893
7894 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7895 constants (if x has signed type, the sign bit cannot be set
7896 in c). This folds extension into the BIT_AND_EXPR.
7897 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7898 very likely don't have maximal range for their precision and this
7899 transformation effectively doesn't preserve non-maximal ranges. */
7900 if (TREE_CODE (type) == INTEGER_TYPE
7901 && TREE_CODE (op0) == BIT_AND_EXPR
7902 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7903 {
7904 tree and_expr = op0;
7905 tree and0 = TREE_OPERAND (and_expr, 0);
7906 tree and1 = TREE_OPERAND (and_expr, 1);
7907 int change = 0;
7908
7909 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7910 || (TYPE_PRECISION (type)
7911 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7912 change = 1;
7913 else if (TYPE_PRECISION (TREE_TYPE (and1))
7914 <= HOST_BITS_PER_WIDE_INT
7915 && host_integerp (and1, 1))
7916 {
7917 unsigned HOST_WIDE_INT cst;
7918
7919 cst = tree_low_cst (and1, 1);
7920 cst &= (HOST_WIDE_INT) -1
7921 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7922 change = (cst == 0);
7923 #ifdef LOAD_EXTEND_OP
7924 if (change
7925 && !flag_syntax_only
7926 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7927 == ZERO_EXTEND))
7928 {
7929 tree uns = unsigned_type_for (TREE_TYPE (and0));
7930 and0 = fold_convert_loc (loc, uns, and0);
7931 and1 = fold_convert_loc (loc, uns, and1);
7932 }
7933 #endif
7934 }
7935 if (change)
7936 {
7937 tem = force_fit_type_double (type, tree_to_double_int (and1),
7938 0, TREE_OVERFLOW (and1));
7939 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7940 fold_convert_loc (loc, type, and0), tem);
7941 }
7942 }
7943
7944 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7945 when one of the new casts will fold away. Conservatively we assume
7946 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7947 if (POINTER_TYPE_P (type)
7948 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7949 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7950 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7951 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7952 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7953 {
7954 tree arg00 = TREE_OPERAND (arg0, 0);
7955 tree arg01 = TREE_OPERAND (arg0, 1);
7956
7957 return fold_build_pointer_plus_loc
7958 (loc, fold_convert_loc (loc, type, arg00), arg01);
7959 }
7960
7961 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7962 of the same precision, and X is an integer type not narrower than
7963 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7964 if (INTEGRAL_TYPE_P (type)
7965 && TREE_CODE (op0) == BIT_NOT_EXPR
7966 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7967 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7968 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7969 {
7970 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7971 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7972 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7973 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7974 fold_convert_loc (loc, type, tem));
7975 }
7976
7977 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7978 type of X and Y (integer types only). */
7979 if (INTEGRAL_TYPE_P (type)
7980 && TREE_CODE (op0) == MULT_EXPR
7981 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7982 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7983 {
7984 /* Be careful not to introduce new overflows. */
7985 tree mult_type;
7986 if (TYPE_OVERFLOW_WRAPS (type))
7987 mult_type = type;
7988 else
7989 mult_type = unsigned_type_for (type);
7990
7991 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7992 {
7993 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7994 fold_convert_loc (loc, mult_type,
7995 TREE_OPERAND (op0, 0)),
7996 fold_convert_loc (loc, mult_type,
7997 TREE_OPERAND (op0, 1)));
7998 return fold_convert_loc (loc, type, tem);
7999 }
8000 }
8001
8002 tem = fold_convert_const (code, type, op0);
8003 return tem ? tem : NULL_TREE;
8004
8005 case ADDR_SPACE_CONVERT_EXPR:
8006 if (integer_zerop (arg0))
8007 return fold_convert_const (code, type, arg0);
8008 return NULL_TREE;
8009
8010 case FIXED_CONVERT_EXPR:
8011 tem = fold_convert_const (code, type, arg0);
8012 return tem ? tem : NULL_TREE;
8013
8014 case VIEW_CONVERT_EXPR:
8015 if (TREE_TYPE (op0) == type)
8016 return op0;
8017 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8018 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8019 type, TREE_OPERAND (op0, 0));
8020 if (TREE_CODE (op0) == MEM_REF)
8021 return fold_build2_loc (loc, MEM_REF, type,
8022 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8023
8024 /* For integral conversions with the same precision or pointer
8025 conversions use a NOP_EXPR instead. */
8026 if ((INTEGRAL_TYPE_P (type)
8027 || POINTER_TYPE_P (type))
8028 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8029 || POINTER_TYPE_P (TREE_TYPE (op0)))
8030 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8031 return fold_convert_loc (loc, type, op0);
8032
8033 /* Strip inner integral conversions that do not change the precision. */
8034 if (CONVERT_EXPR_P (op0)
8035 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8036 || POINTER_TYPE_P (TREE_TYPE (op0)))
8037 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8038 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8039 && (TYPE_PRECISION (TREE_TYPE (op0))
8040 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8041 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8042 type, TREE_OPERAND (op0, 0));
8043
8044 return fold_view_convert_expr (type, op0);
8045
8046 case NEGATE_EXPR:
8047 tem = fold_negate_expr (loc, arg0);
8048 if (tem)
8049 return fold_convert_loc (loc, type, tem);
8050 return NULL_TREE;
8051
8052 case ABS_EXPR:
8053 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8054 return fold_abs_const (arg0, type);
8055 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8056 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8057 /* Convert fabs((double)float) into (double)fabsf(float). */
8058 else if (TREE_CODE (arg0) == NOP_EXPR
8059 && TREE_CODE (type) == REAL_TYPE)
8060 {
8061 tree targ0 = strip_float_extensions (arg0);
8062 if (targ0 != arg0)
8063 return fold_convert_loc (loc, type,
8064 fold_build1_loc (loc, ABS_EXPR,
8065 TREE_TYPE (targ0),
8066 targ0));
8067 }
8068 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8069 else if (TREE_CODE (arg0) == ABS_EXPR)
8070 return arg0;
8071 else if (tree_expr_nonnegative_p (arg0))
8072 return arg0;
8073
8074 /* Strip sign ops from argument. */
8075 if (TREE_CODE (type) == REAL_TYPE)
8076 {
8077 tem = fold_strip_sign_ops (arg0);
8078 if (tem)
8079 return fold_build1_loc (loc, ABS_EXPR, type,
8080 fold_convert_loc (loc, type, tem));
8081 }
8082 return NULL_TREE;
8083
8084 case CONJ_EXPR:
8085 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8086 return fold_convert_loc (loc, type, arg0);
8087 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8088 {
8089 tree itype = TREE_TYPE (type);
8090 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8091 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8092 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8093 negate_expr (ipart));
8094 }
8095 if (TREE_CODE (arg0) == COMPLEX_CST)
8096 {
8097 tree itype = TREE_TYPE (type);
8098 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8099 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8100 return build_complex (type, rpart, negate_expr (ipart));
8101 }
8102 if (TREE_CODE (arg0) == CONJ_EXPR)
8103 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8104 return NULL_TREE;
8105
8106 case BIT_NOT_EXPR:
8107 if (TREE_CODE (arg0) == INTEGER_CST)
8108 return fold_not_const (arg0, type);
8109 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8110 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8111 /* Convert ~ (-A) to A - 1. */
8112 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8113 return fold_build2_loc (loc, MINUS_EXPR, type,
8114 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8115 build_int_cst (type, 1));
8116 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8117 else if (INTEGRAL_TYPE_P (type)
8118 && ((TREE_CODE (arg0) == MINUS_EXPR
8119 && integer_onep (TREE_OPERAND (arg0, 1)))
8120 || (TREE_CODE (arg0) == PLUS_EXPR
8121 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8122 return fold_build1_loc (loc, NEGATE_EXPR, type,
8123 fold_convert_loc (loc, type,
8124 TREE_OPERAND (arg0, 0)));
8125 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8126 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8127 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8128 fold_convert_loc (loc, type,
8129 TREE_OPERAND (arg0, 0)))))
8130 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8131 fold_convert_loc (loc, type,
8132 TREE_OPERAND (arg0, 1)));
8133 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8134 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8135 fold_convert_loc (loc, type,
8136 TREE_OPERAND (arg0, 1)))))
8137 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8138 fold_convert_loc (loc, type,
8139 TREE_OPERAND (arg0, 0)), tem);
8140 /* Perform BIT_NOT_EXPR on each element individually. */
8141 else if (TREE_CODE (arg0) == VECTOR_CST)
8142 {
8143 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8144 int count = TYPE_VECTOR_SUBPARTS (type), i;
8145
8146 for (i = 0; i < count; i++)
8147 {
8148 if (elements)
8149 {
8150 elem = TREE_VALUE (elements);
8151 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8152 if (elem == NULL_TREE)
8153 break;
8154 elements = TREE_CHAIN (elements);
8155 }
8156 else
8157 elem = build_int_cst (TREE_TYPE (type), -1);
8158 list = tree_cons (NULL_TREE, elem, list);
8159 }
8160 if (i == count)
8161 return build_vector (type, nreverse (list));
8162 }
8163
8164 return NULL_TREE;
8165
8166 case TRUTH_NOT_EXPR:
8167 /* The argument to invert_truthvalue must have Boolean type. */
8168 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8169 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8170
8171 /* Note that the operand of this must be an int
8172 and its values must be 0 or 1.
8173 ("true" is a fixed value perhaps depending on the language,
8174 but we don't handle values other than 1 correctly yet.) */
8175 tem = fold_truth_not_expr (loc, arg0);
8176 if (!tem)
8177 return NULL_TREE;
8178 return fold_convert_loc (loc, type, tem);
8179
8180 case REALPART_EXPR:
8181 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8182 return fold_convert_loc (loc, type, arg0);
8183 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8184 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8185 TREE_OPERAND (arg0, 1));
8186 if (TREE_CODE (arg0) == COMPLEX_CST)
8187 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8188 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8189 {
8190 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8191 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8192 fold_build1_loc (loc, REALPART_EXPR, itype,
8193 TREE_OPERAND (arg0, 0)),
8194 fold_build1_loc (loc, REALPART_EXPR, itype,
8195 TREE_OPERAND (arg0, 1)));
8196 return fold_convert_loc (loc, type, tem);
8197 }
8198 if (TREE_CODE (arg0) == CONJ_EXPR)
8199 {
8200 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8201 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8202 TREE_OPERAND (arg0, 0));
8203 return fold_convert_loc (loc, type, tem);
8204 }
8205 if (TREE_CODE (arg0) == CALL_EXPR)
8206 {
8207 tree fn = get_callee_fndecl (arg0);
8208 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8209 switch (DECL_FUNCTION_CODE (fn))
8210 {
8211 CASE_FLT_FN (BUILT_IN_CEXPI):
8212 fn = mathfn_built_in (type, BUILT_IN_COS);
8213 if (fn)
8214 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8215 break;
8216
8217 default:
8218 break;
8219 }
8220 }
8221 return NULL_TREE;
8222
8223 case IMAGPART_EXPR:
8224 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8225 return build_zero_cst (type);
8226 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8227 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8228 TREE_OPERAND (arg0, 0));
8229 if (TREE_CODE (arg0) == COMPLEX_CST)
8230 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8231 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8232 {
8233 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8234 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8235 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8236 TREE_OPERAND (arg0, 0)),
8237 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8238 TREE_OPERAND (arg0, 1)));
8239 return fold_convert_loc (loc, type, tem);
8240 }
8241 if (TREE_CODE (arg0) == CONJ_EXPR)
8242 {
8243 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8244 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8245 return fold_convert_loc (loc, type, negate_expr (tem));
8246 }
8247 if (TREE_CODE (arg0) == CALL_EXPR)
8248 {
8249 tree fn = get_callee_fndecl (arg0);
8250 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8251 switch (DECL_FUNCTION_CODE (fn))
8252 {
8253 CASE_FLT_FN (BUILT_IN_CEXPI):
8254 fn = mathfn_built_in (type, BUILT_IN_SIN);
8255 if (fn)
8256 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8257 break;
8258
8259 default:
8260 break;
8261 }
8262 }
8263 return NULL_TREE;
8264
8265 case INDIRECT_REF:
8266 /* Fold *&X to X if X is an lvalue. */
8267 if (TREE_CODE (op0) == ADDR_EXPR)
8268 {
8269 tree op00 = TREE_OPERAND (op0, 0);
8270 if ((TREE_CODE (op00) == VAR_DECL
8271 || TREE_CODE (op00) == PARM_DECL
8272 || TREE_CODE (op00) == RESULT_DECL)
8273 && !TREE_READONLY (op00))
8274 return op00;
8275 }
8276 return NULL_TREE;
8277
8278 default:
8279 return NULL_TREE;
8280 } /* switch (code) */
8281 }
8282
8283
8284 /* If the operation was a conversion do _not_ mark a resulting constant
8285 with TREE_OVERFLOW if the original constant was not. These conversions
8286 have implementation defined behavior and retaining the TREE_OVERFLOW
8287 flag here would confuse later passes such as VRP. */
8288 tree
8289 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8290 tree type, tree op0)
8291 {
8292 tree res = fold_unary_loc (loc, code, type, op0);
8293 if (res
8294 && TREE_CODE (res) == INTEGER_CST
8295 && TREE_CODE (op0) == INTEGER_CST
8296 && CONVERT_EXPR_CODE_P (code))
8297 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8298
8299 return res;
8300 }
8301
8302 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8303 operands OP0 and OP1. LOC is the location of the resulting expression.
8304 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8305 Return the folded expression if folding is successful. Otherwise,
8306 return NULL_TREE. */
8307 static tree
8308 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8309 tree arg0, tree arg1, tree op0, tree op1)
8310 {
8311 tree tem;
8312
8313 /* We only do these simplifications if we are optimizing. */
8314 if (!optimize)
8315 return NULL_TREE;
8316
8317 /* Check for things like (A || B) && (A || C). We can convert this
8318 to A || (B && C). Note that either operator can be any of the four
8319 truth and/or operations and the transformation will still be
8320 valid. Also note that we only care about order for the
8321 ANDIF and ORIF operators. If B contains side effects, this
8322 might change the truth-value of A. */
8323 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8324 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8325 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8326 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8327 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8328 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8329 {
8330 tree a00 = TREE_OPERAND (arg0, 0);
8331 tree a01 = TREE_OPERAND (arg0, 1);
8332 tree a10 = TREE_OPERAND (arg1, 0);
8333 tree a11 = TREE_OPERAND (arg1, 1);
8334 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8335 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8336 && (code == TRUTH_AND_EXPR
8337 || code == TRUTH_OR_EXPR));
8338
8339 if (operand_equal_p (a00, a10, 0))
8340 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8341 fold_build2_loc (loc, code, type, a01, a11));
8342 else if (commutative && operand_equal_p (a00, a11, 0))
8343 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8344 fold_build2_loc (loc, code, type, a01, a10));
8345 else if (commutative && operand_equal_p (a01, a10, 0))
8346 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8347 fold_build2_loc (loc, code, type, a00, a11));
8348
8349 /* This case if tricky because we must either have commutative
8350 operators or else A10 must not have side-effects. */
8351
8352 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8353 && operand_equal_p (a01, a11, 0))
8354 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8355 fold_build2_loc (loc, code, type, a00, a10),
8356 a01);
8357 }
8358
8359 /* See if we can build a range comparison. */
8360 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8361 return tem;
8362
8363 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8364 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8365 {
8366 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8367 if (tem)
8368 return fold_build2_loc (loc, code, type, tem, arg1);
8369 }
8370
8371 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8372 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8373 {
8374 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8375 if (tem)
8376 return fold_build2_loc (loc, code, type, arg0, tem);
8377 }
8378
8379 /* Check for the possibility of merging component references. If our
8380 lhs is another similar operation, try to merge its rhs with our
8381 rhs. Then try to merge our lhs and rhs. */
8382 if (TREE_CODE (arg0) == code
8383 && 0 != (tem = fold_truthop (loc, code, type,
8384 TREE_OPERAND (arg0, 1), arg1)))
8385 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8386
8387 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
8388 return tem;
8389
8390 return NULL_TREE;
8391 }
8392
8393 /* Fold a binary expression of code CODE and type TYPE with operands
8394 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8395 Return the folded expression if folding is successful. Otherwise,
8396 return NULL_TREE. */
8397
8398 static tree
8399 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8400 {
8401 enum tree_code compl_code;
8402
8403 if (code == MIN_EXPR)
8404 compl_code = MAX_EXPR;
8405 else if (code == MAX_EXPR)
8406 compl_code = MIN_EXPR;
8407 else
8408 gcc_unreachable ();
8409
8410 /* MIN (MAX (a, b), b) == b. */
8411 if (TREE_CODE (op0) == compl_code
8412 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8413 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8414
8415 /* MIN (MAX (b, a), b) == b. */
8416 if (TREE_CODE (op0) == compl_code
8417 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8418 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8419 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8420
8421 /* MIN (a, MAX (a, b)) == a. */
8422 if (TREE_CODE (op1) == compl_code
8423 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8424 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8425 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8426
8427 /* MIN (a, MAX (b, a)) == a. */
8428 if (TREE_CODE (op1) == compl_code
8429 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8430 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8431 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8432
8433 return NULL_TREE;
8434 }
8435
8436 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8437 by changing CODE to reduce the magnitude of constants involved in
8438 ARG0 of the comparison.
8439 Returns a canonicalized comparison tree if a simplification was
8440 possible, otherwise returns NULL_TREE.
8441 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8442 valid if signed overflow is undefined. */
8443
8444 static tree
8445 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8446 tree arg0, tree arg1,
8447 bool *strict_overflow_p)
8448 {
8449 enum tree_code code0 = TREE_CODE (arg0);
8450 tree t, cst0 = NULL_TREE;
8451 int sgn0;
8452 bool swap = false;
8453
8454 /* Match A +- CST code arg1 and CST code arg1. We can change the
8455 first form only if overflow is undefined. */
8456 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8457 /* In principle pointers also have undefined overflow behavior,
8458 but that causes problems elsewhere. */
8459 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8460 && (code0 == MINUS_EXPR
8461 || code0 == PLUS_EXPR)
8462 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8463 || code0 == INTEGER_CST))
8464 return NULL_TREE;
8465
8466 /* Identify the constant in arg0 and its sign. */
8467 if (code0 == INTEGER_CST)
8468 cst0 = arg0;
8469 else
8470 cst0 = TREE_OPERAND (arg0, 1);
8471 sgn0 = tree_int_cst_sgn (cst0);
8472
8473 /* Overflowed constants and zero will cause problems. */
8474 if (integer_zerop (cst0)
8475 || TREE_OVERFLOW (cst0))
8476 return NULL_TREE;
8477
8478 /* See if we can reduce the magnitude of the constant in
8479 arg0 by changing the comparison code. */
8480 if (code0 == INTEGER_CST)
8481 {
8482 /* CST <= arg1 -> CST-1 < arg1. */
8483 if (code == LE_EXPR && sgn0 == 1)
8484 code = LT_EXPR;
8485 /* -CST < arg1 -> -CST-1 <= arg1. */
8486 else if (code == LT_EXPR && sgn0 == -1)
8487 code = LE_EXPR;
8488 /* CST > arg1 -> CST-1 >= arg1. */
8489 else if (code == GT_EXPR && sgn0 == 1)
8490 code = GE_EXPR;
8491 /* -CST >= arg1 -> -CST-1 > arg1. */
8492 else if (code == GE_EXPR && sgn0 == -1)
8493 code = GT_EXPR;
8494 else
8495 return NULL_TREE;
8496 /* arg1 code' CST' might be more canonical. */
8497 swap = true;
8498 }
8499 else
8500 {
8501 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8502 if (code == LT_EXPR
8503 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8504 code = LE_EXPR;
8505 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8506 else if (code == GT_EXPR
8507 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8508 code = GE_EXPR;
8509 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8510 else if (code == LE_EXPR
8511 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8512 code = LT_EXPR;
8513 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8514 else if (code == GE_EXPR
8515 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8516 code = GT_EXPR;
8517 else
8518 return NULL_TREE;
8519 *strict_overflow_p = true;
8520 }
8521
8522 /* Now build the constant reduced in magnitude. But not if that
8523 would produce one outside of its types range. */
8524 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8525 && ((sgn0 == 1
8526 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8527 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8528 || (sgn0 == -1
8529 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8530 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8531 /* We cannot swap the comparison here as that would cause us to
8532 endlessly recurse. */
8533 return NULL_TREE;
8534
8535 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8536 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8537 if (code0 != INTEGER_CST)
8538 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8539 t = fold_convert (TREE_TYPE (arg1), t);
8540
8541 /* If swapping might yield to a more canonical form, do so. */
8542 if (swap)
8543 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8544 else
8545 return fold_build2_loc (loc, code, type, t, arg1);
8546 }
8547
8548 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8549 overflow further. Try to decrease the magnitude of constants involved
8550 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8551 and put sole constants at the second argument position.
8552 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8553
8554 static tree
8555 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8556 tree arg0, tree arg1)
8557 {
8558 tree t;
8559 bool strict_overflow_p;
8560 const char * const warnmsg = G_("assuming signed overflow does not occur "
8561 "when reducing constant in comparison");
8562
8563 /* Try canonicalization by simplifying arg0. */
8564 strict_overflow_p = false;
8565 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8566 &strict_overflow_p);
8567 if (t)
8568 {
8569 if (strict_overflow_p)
8570 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8571 return t;
8572 }
8573
8574 /* Try canonicalization by simplifying arg1 using the swapped
8575 comparison. */
8576 code = swap_tree_comparison (code);
8577 strict_overflow_p = false;
8578 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8579 &strict_overflow_p);
8580 if (t && strict_overflow_p)
8581 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8582 return t;
8583 }
8584
8585 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8586 space. This is used to avoid issuing overflow warnings for
8587 expressions like &p->x which can not wrap. */
8588
8589 static bool
8590 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8591 {
8592 unsigned HOST_WIDE_INT offset_low, total_low;
8593 HOST_WIDE_INT size, offset_high, total_high;
8594
8595 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8596 return true;
8597
8598 if (bitpos < 0)
8599 return true;
8600
8601 if (offset == NULL_TREE)
8602 {
8603 offset_low = 0;
8604 offset_high = 0;
8605 }
8606 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8607 return true;
8608 else
8609 {
8610 offset_low = TREE_INT_CST_LOW (offset);
8611 offset_high = TREE_INT_CST_HIGH (offset);
8612 }
8613
8614 if (add_double_with_sign (offset_low, offset_high,
8615 bitpos / BITS_PER_UNIT, 0,
8616 &total_low, &total_high,
8617 true))
8618 return true;
8619
8620 if (total_high != 0)
8621 return true;
8622
8623 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8624 if (size <= 0)
8625 return true;
8626
8627 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8628 array. */
8629 if (TREE_CODE (base) == ADDR_EXPR)
8630 {
8631 HOST_WIDE_INT base_size;
8632
8633 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8634 if (base_size > 0 && size < base_size)
8635 size = base_size;
8636 }
8637
8638 return total_low > (unsigned HOST_WIDE_INT) size;
8639 }
8640
8641 /* Subroutine of fold_binary. This routine performs all of the
8642 transformations that are common to the equality/inequality
8643 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8644 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8645 fold_binary should call fold_binary. Fold a comparison with
8646 tree code CODE and type TYPE with operands OP0 and OP1. Return
8647 the folded comparison or NULL_TREE. */
8648
8649 static tree
8650 fold_comparison (location_t loc, enum tree_code code, tree type,
8651 tree op0, tree op1)
8652 {
8653 tree arg0, arg1, tem;
8654
8655 arg0 = op0;
8656 arg1 = op1;
8657
8658 STRIP_SIGN_NOPS (arg0);
8659 STRIP_SIGN_NOPS (arg1);
8660
8661 tem = fold_relational_const (code, type, arg0, arg1);
8662 if (tem != NULL_TREE)
8663 return tem;
8664
8665 /* If one arg is a real or integer constant, put it last. */
8666 if (tree_swap_operands_p (arg0, arg1, true))
8667 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8668
8669 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8670 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8671 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8672 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8673 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8674 && (TREE_CODE (arg1) == INTEGER_CST
8675 && !TREE_OVERFLOW (arg1)))
8676 {
8677 tree const1 = TREE_OPERAND (arg0, 1);
8678 tree const2 = arg1;
8679 tree variable = TREE_OPERAND (arg0, 0);
8680 tree lhs;
8681 int lhs_add;
8682 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8683
8684 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8685 TREE_TYPE (arg1), const2, const1);
8686
8687 /* If the constant operation overflowed this can be
8688 simplified as a comparison against INT_MAX/INT_MIN. */
8689 if (TREE_CODE (lhs) == INTEGER_CST
8690 && TREE_OVERFLOW (lhs))
8691 {
8692 int const1_sgn = tree_int_cst_sgn (const1);
8693 enum tree_code code2 = code;
8694
8695 /* Get the sign of the constant on the lhs if the
8696 operation were VARIABLE + CONST1. */
8697 if (TREE_CODE (arg0) == MINUS_EXPR)
8698 const1_sgn = -const1_sgn;
8699
8700 /* The sign of the constant determines if we overflowed
8701 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8702 Canonicalize to the INT_MIN overflow by swapping the comparison
8703 if necessary. */
8704 if (const1_sgn == -1)
8705 code2 = swap_tree_comparison (code);
8706
8707 /* We now can look at the canonicalized case
8708 VARIABLE + 1 CODE2 INT_MIN
8709 and decide on the result. */
8710 if (code2 == LT_EXPR
8711 || code2 == LE_EXPR
8712 || code2 == EQ_EXPR)
8713 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8714 else if (code2 == NE_EXPR
8715 || code2 == GE_EXPR
8716 || code2 == GT_EXPR)
8717 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8718 }
8719
8720 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8721 && (TREE_CODE (lhs) != INTEGER_CST
8722 || !TREE_OVERFLOW (lhs)))
8723 {
8724 if (code != EQ_EXPR && code != NE_EXPR)
8725 fold_overflow_warning ("assuming signed overflow does not occur "
8726 "when changing X +- C1 cmp C2 to "
8727 "X cmp C1 +- C2",
8728 WARN_STRICT_OVERFLOW_COMPARISON);
8729 return fold_build2_loc (loc, code, type, variable, lhs);
8730 }
8731 }
8732
8733 /* For comparisons of pointers we can decompose it to a compile time
8734 comparison of the base objects and the offsets into the object.
8735 This requires at least one operand being an ADDR_EXPR or a
8736 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8737 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8738 && (TREE_CODE (arg0) == ADDR_EXPR
8739 || TREE_CODE (arg1) == ADDR_EXPR
8740 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8741 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8742 {
8743 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8744 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8745 enum machine_mode mode;
8746 int volatilep, unsignedp;
8747 bool indirect_base0 = false, indirect_base1 = false;
8748
8749 /* Get base and offset for the access. Strip ADDR_EXPR for
8750 get_inner_reference, but put it back by stripping INDIRECT_REF
8751 off the base object if possible. indirect_baseN will be true
8752 if baseN is not an address but refers to the object itself. */
8753 base0 = arg0;
8754 if (TREE_CODE (arg0) == ADDR_EXPR)
8755 {
8756 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8757 &bitsize, &bitpos0, &offset0, &mode,
8758 &unsignedp, &volatilep, false);
8759 if (TREE_CODE (base0) == INDIRECT_REF)
8760 base0 = TREE_OPERAND (base0, 0);
8761 else
8762 indirect_base0 = true;
8763 }
8764 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8765 {
8766 base0 = TREE_OPERAND (arg0, 0);
8767 STRIP_SIGN_NOPS (base0);
8768 if (TREE_CODE (base0) == ADDR_EXPR)
8769 {
8770 base0 = TREE_OPERAND (base0, 0);
8771 indirect_base0 = true;
8772 }
8773 offset0 = TREE_OPERAND (arg0, 1);
8774 }
8775
8776 base1 = arg1;
8777 if (TREE_CODE (arg1) == ADDR_EXPR)
8778 {
8779 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8780 &bitsize, &bitpos1, &offset1, &mode,
8781 &unsignedp, &volatilep, false);
8782 if (TREE_CODE (base1) == INDIRECT_REF)
8783 base1 = TREE_OPERAND (base1, 0);
8784 else
8785 indirect_base1 = true;
8786 }
8787 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8788 {
8789 base1 = TREE_OPERAND (arg1, 0);
8790 STRIP_SIGN_NOPS (base1);
8791 if (TREE_CODE (base1) == ADDR_EXPR)
8792 {
8793 base1 = TREE_OPERAND (base1, 0);
8794 indirect_base1 = true;
8795 }
8796 offset1 = TREE_OPERAND (arg1, 1);
8797 }
8798
8799 /* A local variable can never be pointed to by
8800 the default SSA name of an incoming parameter. */
8801 if ((TREE_CODE (arg0) == ADDR_EXPR
8802 && indirect_base0
8803 && TREE_CODE (base0) == VAR_DECL
8804 && auto_var_in_fn_p (base0, current_function_decl)
8805 && !indirect_base1
8806 && TREE_CODE (base1) == SSA_NAME
8807 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8808 && SSA_NAME_IS_DEFAULT_DEF (base1))
8809 || (TREE_CODE (arg1) == ADDR_EXPR
8810 && indirect_base1
8811 && TREE_CODE (base1) == VAR_DECL
8812 && auto_var_in_fn_p (base1, current_function_decl)
8813 && !indirect_base0
8814 && TREE_CODE (base0) == SSA_NAME
8815 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8816 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8817 {
8818 if (code == NE_EXPR)
8819 return constant_boolean_node (1, type);
8820 else if (code == EQ_EXPR)
8821 return constant_boolean_node (0, type);
8822 }
8823 /* If we have equivalent bases we might be able to simplify. */
8824 else if (indirect_base0 == indirect_base1
8825 && operand_equal_p (base0, base1, 0))
8826 {
8827 /* We can fold this expression to a constant if the non-constant
8828 offset parts are equal. */
8829 if ((offset0 == offset1
8830 || (offset0 && offset1
8831 && operand_equal_p (offset0, offset1, 0)))
8832 && (code == EQ_EXPR
8833 || code == NE_EXPR
8834 || (indirect_base0 && DECL_P (base0))
8835 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8836
8837 {
8838 if (code != EQ_EXPR
8839 && code != NE_EXPR
8840 && bitpos0 != bitpos1
8841 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8842 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8843 fold_overflow_warning (("assuming pointer wraparound does not "
8844 "occur when comparing P +- C1 with "
8845 "P +- C2"),
8846 WARN_STRICT_OVERFLOW_CONDITIONAL);
8847
8848 switch (code)
8849 {
8850 case EQ_EXPR:
8851 return constant_boolean_node (bitpos0 == bitpos1, type);
8852 case NE_EXPR:
8853 return constant_boolean_node (bitpos0 != bitpos1, type);
8854 case LT_EXPR:
8855 return constant_boolean_node (bitpos0 < bitpos1, type);
8856 case LE_EXPR:
8857 return constant_boolean_node (bitpos0 <= bitpos1, type);
8858 case GE_EXPR:
8859 return constant_boolean_node (bitpos0 >= bitpos1, type);
8860 case GT_EXPR:
8861 return constant_boolean_node (bitpos0 > bitpos1, type);
8862 default:;
8863 }
8864 }
8865 /* We can simplify the comparison to a comparison of the variable
8866 offset parts if the constant offset parts are equal.
8867 Be careful to use signed size type here because otherwise we
8868 mess with array offsets in the wrong way. This is possible
8869 because pointer arithmetic is restricted to retain within an
8870 object and overflow on pointer differences is undefined as of
8871 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8872 else if (bitpos0 == bitpos1
8873 && ((code == EQ_EXPR || code == NE_EXPR)
8874 || (indirect_base0 && DECL_P (base0))
8875 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8876 {
8877 /* By converting to signed size type we cover middle-end pointer
8878 arithmetic which operates on unsigned pointer types of size
8879 type size and ARRAY_REF offsets which are properly sign or
8880 zero extended from their type in case it is narrower than
8881 size type. */
8882 if (offset0 == NULL_TREE)
8883 offset0 = build_int_cst (ssizetype, 0);
8884 else
8885 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8886 if (offset1 == NULL_TREE)
8887 offset1 = build_int_cst (ssizetype, 0);
8888 else
8889 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8890
8891 if (code != EQ_EXPR
8892 && code != NE_EXPR
8893 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8894 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8895 fold_overflow_warning (("assuming pointer wraparound does not "
8896 "occur when comparing P +- C1 with "
8897 "P +- C2"),
8898 WARN_STRICT_OVERFLOW_COMPARISON);
8899
8900 return fold_build2_loc (loc, code, type, offset0, offset1);
8901 }
8902 }
8903 /* For non-equal bases we can simplify if they are addresses
8904 of local binding decls or constants. */
8905 else if (indirect_base0 && indirect_base1
8906 /* We know that !operand_equal_p (base0, base1, 0)
8907 because the if condition was false. But make
8908 sure two decls are not the same. */
8909 && base0 != base1
8910 && TREE_CODE (arg0) == ADDR_EXPR
8911 && TREE_CODE (arg1) == ADDR_EXPR
8912 && (((TREE_CODE (base0) == VAR_DECL
8913 || TREE_CODE (base0) == PARM_DECL)
8914 && (targetm.binds_local_p (base0)
8915 || CONSTANT_CLASS_P (base1)))
8916 || CONSTANT_CLASS_P (base0))
8917 && (((TREE_CODE (base1) == VAR_DECL
8918 || TREE_CODE (base1) == PARM_DECL)
8919 && (targetm.binds_local_p (base1)
8920 || CONSTANT_CLASS_P (base0)))
8921 || CONSTANT_CLASS_P (base1)))
8922 {
8923 if (code == EQ_EXPR)
8924 return omit_two_operands_loc (loc, type, boolean_false_node,
8925 arg0, arg1);
8926 else if (code == NE_EXPR)
8927 return omit_two_operands_loc (loc, type, boolean_true_node,
8928 arg0, arg1);
8929 }
8930 /* For equal offsets we can simplify to a comparison of the
8931 base addresses. */
8932 else if (bitpos0 == bitpos1
8933 && (indirect_base0
8934 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8935 && (indirect_base1
8936 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8937 && ((offset0 == offset1)
8938 || (offset0 && offset1
8939 && operand_equal_p (offset0, offset1, 0))))
8940 {
8941 if (indirect_base0)
8942 base0 = build_fold_addr_expr_loc (loc, base0);
8943 if (indirect_base1)
8944 base1 = build_fold_addr_expr_loc (loc, base1);
8945 return fold_build2_loc (loc, code, type, base0, base1);
8946 }
8947 }
8948
8949 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8950 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8951 the resulting offset is smaller in absolute value than the
8952 original one. */
8953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8954 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8955 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8956 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8957 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8958 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8959 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8960 {
8961 tree const1 = TREE_OPERAND (arg0, 1);
8962 tree const2 = TREE_OPERAND (arg1, 1);
8963 tree variable1 = TREE_OPERAND (arg0, 0);
8964 tree variable2 = TREE_OPERAND (arg1, 0);
8965 tree cst;
8966 const char * const warnmsg = G_("assuming signed overflow does not "
8967 "occur when combining constants around "
8968 "a comparison");
8969
8970 /* Put the constant on the side where it doesn't overflow and is
8971 of lower absolute value than before. */
8972 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8973 ? MINUS_EXPR : PLUS_EXPR,
8974 const2, const1);
8975 if (!TREE_OVERFLOW (cst)
8976 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8977 {
8978 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8979 return fold_build2_loc (loc, code, type,
8980 variable1,
8981 fold_build2_loc (loc,
8982 TREE_CODE (arg1), TREE_TYPE (arg1),
8983 variable2, cst));
8984 }
8985
8986 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8987 ? MINUS_EXPR : PLUS_EXPR,
8988 const1, const2);
8989 if (!TREE_OVERFLOW (cst)
8990 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8991 {
8992 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8993 return fold_build2_loc (loc, code, type,
8994 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8995 variable1, cst),
8996 variable2);
8997 }
8998 }
8999
9000 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9001 signed arithmetic case. That form is created by the compiler
9002 often enough for folding it to be of value. One example is in
9003 computing loop trip counts after Operator Strength Reduction. */
9004 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9005 && TREE_CODE (arg0) == MULT_EXPR
9006 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9007 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9008 && integer_zerop (arg1))
9009 {
9010 tree const1 = TREE_OPERAND (arg0, 1);
9011 tree const2 = arg1; /* zero */
9012 tree variable1 = TREE_OPERAND (arg0, 0);
9013 enum tree_code cmp_code = code;
9014
9015 /* Handle unfolded multiplication by zero. */
9016 if (integer_zerop (const1))
9017 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9018
9019 fold_overflow_warning (("assuming signed overflow does not occur when "
9020 "eliminating multiplication in comparison "
9021 "with zero"),
9022 WARN_STRICT_OVERFLOW_COMPARISON);
9023
9024 /* If const1 is negative we swap the sense of the comparison. */
9025 if (tree_int_cst_sgn (const1) < 0)
9026 cmp_code = swap_tree_comparison (cmp_code);
9027
9028 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9029 }
9030
9031 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9032 if (tem)
9033 return tem;
9034
9035 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9036 {
9037 tree targ0 = strip_float_extensions (arg0);
9038 tree targ1 = strip_float_extensions (arg1);
9039 tree newtype = TREE_TYPE (targ0);
9040
9041 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9042 newtype = TREE_TYPE (targ1);
9043
9044 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9045 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9046 return fold_build2_loc (loc, code, type,
9047 fold_convert_loc (loc, newtype, targ0),
9048 fold_convert_loc (loc, newtype, targ1));
9049
9050 /* (-a) CMP (-b) -> b CMP a */
9051 if (TREE_CODE (arg0) == NEGATE_EXPR
9052 && TREE_CODE (arg1) == NEGATE_EXPR)
9053 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9054 TREE_OPERAND (arg0, 0));
9055
9056 if (TREE_CODE (arg1) == REAL_CST)
9057 {
9058 REAL_VALUE_TYPE cst;
9059 cst = TREE_REAL_CST (arg1);
9060
9061 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9062 if (TREE_CODE (arg0) == NEGATE_EXPR)
9063 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9064 TREE_OPERAND (arg0, 0),
9065 build_real (TREE_TYPE (arg1),
9066 real_value_negate (&cst)));
9067
9068 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9069 /* a CMP (-0) -> a CMP 0 */
9070 if (REAL_VALUE_MINUS_ZERO (cst))
9071 return fold_build2_loc (loc, code, type, arg0,
9072 build_real (TREE_TYPE (arg1), dconst0));
9073
9074 /* x != NaN is always true, other ops are always false. */
9075 if (REAL_VALUE_ISNAN (cst)
9076 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9077 {
9078 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9079 return omit_one_operand_loc (loc, type, tem, arg0);
9080 }
9081
9082 /* Fold comparisons against infinity. */
9083 if (REAL_VALUE_ISINF (cst)
9084 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9085 {
9086 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9087 if (tem != NULL_TREE)
9088 return tem;
9089 }
9090 }
9091
9092 /* If this is a comparison of a real constant with a PLUS_EXPR
9093 or a MINUS_EXPR of a real constant, we can convert it into a
9094 comparison with a revised real constant as long as no overflow
9095 occurs when unsafe_math_optimizations are enabled. */
9096 if (flag_unsafe_math_optimizations
9097 && TREE_CODE (arg1) == REAL_CST
9098 && (TREE_CODE (arg0) == PLUS_EXPR
9099 || TREE_CODE (arg0) == MINUS_EXPR)
9100 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9101 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9102 ? MINUS_EXPR : PLUS_EXPR,
9103 arg1, TREE_OPERAND (arg0, 1)))
9104 && !TREE_OVERFLOW (tem))
9105 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9106
9107 /* Likewise, we can simplify a comparison of a real constant with
9108 a MINUS_EXPR whose first operand is also a real constant, i.e.
9109 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9110 floating-point types only if -fassociative-math is set. */
9111 if (flag_associative_math
9112 && TREE_CODE (arg1) == REAL_CST
9113 && TREE_CODE (arg0) == MINUS_EXPR
9114 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9115 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9116 arg1))
9117 && !TREE_OVERFLOW (tem))
9118 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9119 TREE_OPERAND (arg0, 1), tem);
9120
9121 /* Fold comparisons against built-in math functions. */
9122 if (TREE_CODE (arg1) == REAL_CST
9123 && flag_unsafe_math_optimizations
9124 && ! flag_errno_math)
9125 {
9126 enum built_in_function fcode = builtin_mathfn_code (arg0);
9127
9128 if (fcode != END_BUILTINS)
9129 {
9130 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9131 if (tem != NULL_TREE)
9132 return tem;
9133 }
9134 }
9135 }
9136
9137 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9138 && CONVERT_EXPR_P (arg0))
9139 {
9140 /* If we are widening one operand of an integer comparison,
9141 see if the other operand is similarly being widened. Perhaps we
9142 can do the comparison in the narrower type. */
9143 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9144 if (tem)
9145 return tem;
9146
9147 /* Or if we are changing signedness. */
9148 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9149 if (tem)
9150 return tem;
9151 }
9152
9153 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9154 constant, we can simplify it. */
9155 if (TREE_CODE (arg1) == INTEGER_CST
9156 && (TREE_CODE (arg0) == MIN_EXPR
9157 || TREE_CODE (arg0) == MAX_EXPR)
9158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9159 {
9160 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9161 if (tem)
9162 return tem;
9163 }
9164
9165 /* Simplify comparison of something with itself. (For IEEE
9166 floating-point, we can only do some of these simplifications.) */
9167 if (operand_equal_p (arg0, arg1, 0))
9168 {
9169 switch (code)
9170 {
9171 case EQ_EXPR:
9172 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9173 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9174 return constant_boolean_node (1, type);
9175 break;
9176
9177 case GE_EXPR:
9178 case LE_EXPR:
9179 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9180 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9181 return constant_boolean_node (1, type);
9182 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9183
9184 case NE_EXPR:
9185 /* For NE, we can only do this simplification if integer
9186 or we don't honor IEEE floating point NaNs. */
9187 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9188 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9189 break;
9190 /* ... fall through ... */
9191 case GT_EXPR:
9192 case LT_EXPR:
9193 return constant_boolean_node (0, type);
9194 default:
9195 gcc_unreachable ();
9196 }
9197 }
9198
9199 /* If we are comparing an expression that just has comparisons
9200 of two integer values, arithmetic expressions of those comparisons,
9201 and constants, we can simplify it. There are only three cases
9202 to check: the two values can either be equal, the first can be
9203 greater, or the second can be greater. Fold the expression for
9204 those three values. Since each value must be 0 or 1, we have
9205 eight possibilities, each of which corresponds to the constant 0
9206 or 1 or one of the six possible comparisons.
9207
9208 This handles common cases like (a > b) == 0 but also handles
9209 expressions like ((x > y) - (y > x)) > 0, which supposedly
9210 occur in macroized code. */
9211
9212 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9213 {
9214 tree cval1 = 0, cval2 = 0;
9215 int save_p = 0;
9216
9217 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9218 /* Don't handle degenerate cases here; they should already
9219 have been handled anyway. */
9220 && cval1 != 0 && cval2 != 0
9221 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9222 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9223 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9224 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9225 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9226 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9227 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9228 {
9229 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9230 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9231
9232 /* We can't just pass T to eval_subst in case cval1 or cval2
9233 was the same as ARG1. */
9234
9235 tree high_result
9236 = fold_build2_loc (loc, code, type,
9237 eval_subst (loc, arg0, cval1, maxval,
9238 cval2, minval),
9239 arg1);
9240 tree equal_result
9241 = fold_build2_loc (loc, code, type,
9242 eval_subst (loc, arg0, cval1, maxval,
9243 cval2, maxval),
9244 arg1);
9245 tree low_result
9246 = fold_build2_loc (loc, code, type,
9247 eval_subst (loc, arg0, cval1, minval,
9248 cval2, maxval),
9249 arg1);
9250
9251 /* All three of these results should be 0 or 1. Confirm they are.
9252 Then use those values to select the proper code to use. */
9253
9254 if (TREE_CODE (high_result) == INTEGER_CST
9255 && TREE_CODE (equal_result) == INTEGER_CST
9256 && TREE_CODE (low_result) == INTEGER_CST)
9257 {
9258 /* Make a 3-bit mask with the high-order bit being the
9259 value for `>', the next for '=', and the low for '<'. */
9260 switch ((integer_onep (high_result) * 4)
9261 + (integer_onep (equal_result) * 2)
9262 + integer_onep (low_result))
9263 {
9264 case 0:
9265 /* Always false. */
9266 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9267 case 1:
9268 code = LT_EXPR;
9269 break;
9270 case 2:
9271 code = EQ_EXPR;
9272 break;
9273 case 3:
9274 code = LE_EXPR;
9275 break;
9276 case 4:
9277 code = GT_EXPR;
9278 break;
9279 case 5:
9280 code = NE_EXPR;
9281 break;
9282 case 6:
9283 code = GE_EXPR;
9284 break;
9285 case 7:
9286 /* Always true. */
9287 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9288 }
9289
9290 if (save_p)
9291 {
9292 tem = save_expr (build2 (code, type, cval1, cval2));
9293 SET_EXPR_LOCATION (tem, loc);
9294 return tem;
9295 }
9296 return fold_build2_loc (loc, code, type, cval1, cval2);
9297 }
9298 }
9299 }
9300
9301 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9302 into a single range test. */
9303 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9304 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9305 && TREE_CODE (arg1) == INTEGER_CST
9306 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9307 && !integer_zerop (TREE_OPERAND (arg0, 1))
9308 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9309 && !TREE_OVERFLOW (arg1))
9310 {
9311 tem = fold_div_compare (loc, code, type, arg0, arg1);
9312 if (tem != NULL_TREE)
9313 return tem;
9314 }
9315
9316 /* Fold ~X op ~Y as Y op X. */
9317 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9318 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9319 {
9320 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9321 return fold_build2_loc (loc, code, type,
9322 fold_convert_loc (loc, cmp_type,
9323 TREE_OPERAND (arg1, 0)),
9324 TREE_OPERAND (arg0, 0));
9325 }
9326
9327 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9328 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9329 && TREE_CODE (arg1) == INTEGER_CST)
9330 {
9331 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9332 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9333 TREE_OPERAND (arg0, 0),
9334 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9335 fold_convert_loc (loc, cmp_type, arg1)));
9336 }
9337
9338 return NULL_TREE;
9339 }
9340
9341
9342 /* Subroutine of fold_binary. Optimize complex multiplications of the
9343 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9344 argument EXPR represents the expression "z" of type TYPE. */
9345
9346 static tree
9347 fold_mult_zconjz (location_t loc, tree type, tree expr)
9348 {
9349 tree itype = TREE_TYPE (type);
9350 tree rpart, ipart, tem;
9351
9352 if (TREE_CODE (expr) == COMPLEX_EXPR)
9353 {
9354 rpart = TREE_OPERAND (expr, 0);
9355 ipart = TREE_OPERAND (expr, 1);
9356 }
9357 else if (TREE_CODE (expr) == COMPLEX_CST)
9358 {
9359 rpart = TREE_REALPART (expr);
9360 ipart = TREE_IMAGPART (expr);
9361 }
9362 else
9363 {
9364 expr = save_expr (expr);
9365 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9366 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9367 }
9368
9369 rpart = save_expr (rpart);
9370 ipart = save_expr (ipart);
9371 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9372 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9373 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9374 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9375 build_zero_cst (itype));
9376 }
9377
9378
9379 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9380 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9381 guarantees that P and N have the same least significant log2(M) bits.
9382 N is not otherwise constrained. In particular, N is not normalized to
9383 0 <= N < M as is common. In general, the precise value of P is unknown.
9384 M is chosen as large as possible such that constant N can be determined.
9385
9386 Returns M and sets *RESIDUE to N.
9387
9388 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9389 account. This is not always possible due to PR 35705.
9390 */
9391
9392 static unsigned HOST_WIDE_INT
9393 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9394 bool allow_func_align)
9395 {
9396 enum tree_code code;
9397
9398 *residue = 0;
9399
9400 code = TREE_CODE (expr);
9401 if (code == ADDR_EXPR)
9402 {
9403 unsigned int bitalign;
9404 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9405 *residue /= BITS_PER_UNIT;
9406 return bitalign / BITS_PER_UNIT;
9407 }
9408 else if (code == POINTER_PLUS_EXPR)
9409 {
9410 tree op0, op1;
9411 unsigned HOST_WIDE_INT modulus;
9412 enum tree_code inner_code;
9413
9414 op0 = TREE_OPERAND (expr, 0);
9415 STRIP_NOPS (op0);
9416 modulus = get_pointer_modulus_and_residue (op0, residue,
9417 allow_func_align);
9418
9419 op1 = TREE_OPERAND (expr, 1);
9420 STRIP_NOPS (op1);
9421 inner_code = TREE_CODE (op1);
9422 if (inner_code == INTEGER_CST)
9423 {
9424 *residue += TREE_INT_CST_LOW (op1);
9425 return modulus;
9426 }
9427 else if (inner_code == MULT_EXPR)
9428 {
9429 op1 = TREE_OPERAND (op1, 1);
9430 if (TREE_CODE (op1) == INTEGER_CST)
9431 {
9432 unsigned HOST_WIDE_INT align;
9433
9434 /* Compute the greatest power-of-2 divisor of op1. */
9435 align = TREE_INT_CST_LOW (op1);
9436 align &= -align;
9437
9438 /* If align is non-zero and less than *modulus, replace
9439 *modulus with align., If align is 0, then either op1 is 0
9440 or the greatest power-of-2 divisor of op1 doesn't fit in an
9441 unsigned HOST_WIDE_INT. In either case, no additional
9442 constraint is imposed. */
9443 if (align)
9444 modulus = MIN (modulus, align);
9445
9446 return modulus;
9447 }
9448 }
9449 }
9450
9451 /* If we get here, we were unable to determine anything useful about the
9452 expression. */
9453 return 1;
9454 }
9455
9456
9457 /* Fold a binary expression of code CODE and type TYPE with operands
9458 OP0 and OP1. LOC is the location of the resulting expression.
9459 Return the folded expression if folding is successful. Otherwise,
9460 return NULL_TREE. */
9461
9462 tree
9463 fold_binary_loc (location_t loc,
9464 enum tree_code code, tree type, tree op0, tree op1)
9465 {
9466 enum tree_code_class kind = TREE_CODE_CLASS (code);
9467 tree arg0, arg1, tem;
9468 tree t1 = NULL_TREE;
9469 bool strict_overflow_p;
9470
9471 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9472 && TREE_CODE_LENGTH (code) == 2
9473 && op0 != NULL_TREE
9474 && op1 != NULL_TREE);
9475
9476 arg0 = op0;
9477 arg1 = op1;
9478
9479 /* Strip any conversions that don't change the mode. This is
9480 safe for every expression, except for a comparison expression
9481 because its signedness is derived from its operands. So, in
9482 the latter case, only strip conversions that don't change the
9483 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9484 preserved.
9485
9486 Note that this is done as an internal manipulation within the
9487 constant folder, in order to find the simplest representation
9488 of the arguments so that their form can be studied. In any
9489 cases, the appropriate type conversions should be put back in
9490 the tree that will get out of the constant folder. */
9491
9492 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9493 {
9494 STRIP_SIGN_NOPS (arg0);
9495 STRIP_SIGN_NOPS (arg1);
9496 }
9497 else
9498 {
9499 STRIP_NOPS (arg0);
9500 STRIP_NOPS (arg1);
9501 }
9502
9503 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9504 constant but we can't do arithmetic on them. */
9505 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9506 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9507 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9508 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9509 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9510 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9511 {
9512 if (kind == tcc_binary)
9513 {
9514 /* Make sure type and arg0 have the same saturating flag. */
9515 gcc_assert (TYPE_SATURATING (type)
9516 == TYPE_SATURATING (TREE_TYPE (arg0)));
9517 tem = const_binop (code, arg0, arg1);
9518 }
9519 else if (kind == tcc_comparison)
9520 tem = fold_relational_const (code, type, arg0, arg1);
9521 else
9522 tem = NULL_TREE;
9523
9524 if (tem != NULL_TREE)
9525 {
9526 if (TREE_TYPE (tem) != type)
9527 tem = fold_convert_loc (loc, type, tem);
9528 return tem;
9529 }
9530 }
9531
9532 /* If this is a commutative operation, and ARG0 is a constant, move it
9533 to ARG1 to reduce the number of tests below. */
9534 if (commutative_tree_code (code)
9535 && tree_swap_operands_p (arg0, arg1, true))
9536 return fold_build2_loc (loc, code, type, op1, op0);
9537
9538 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9539
9540 First check for cases where an arithmetic operation is applied to a
9541 compound, conditional, or comparison operation. Push the arithmetic
9542 operation inside the compound or conditional to see if any folding
9543 can then be done. Convert comparison to conditional for this purpose.
9544 The also optimizes non-constant cases that used to be done in
9545 expand_expr.
9546
9547 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9548 one of the operands is a comparison and the other is a comparison, a
9549 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9550 code below would make the expression more complex. Change it to a
9551 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9552 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9553
9554 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9555 || code == EQ_EXPR || code == NE_EXPR)
9556 && ((truth_value_p (TREE_CODE (arg0))
9557 && (truth_value_p (TREE_CODE (arg1))
9558 || (TREE_CODE (arg1) == BIT_AND_EXPR
9559 && integer_onep (TREE_OPERAND (arg1, 1)))))
9560 || (truth_value_p (TREE_CODE (arg1))
9561 && (truth_value_p (TREE_CODE (arg0))
9562 || (TREE_CODE (arg0) == BIT_AND_EXPR
9563 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9564 {
9565 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9566 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9567 : TRUTH_XOR_EXPR,
9568 boolean_type_node,
9569 fold_convert_loc (loc, boolean_type_node, arg0),
9570 fold_convert_loc (loc, boolean_type_node, arg1));
9571
9572 if (code == EQ_EXPR)
9573 tem = invert_truthvalue_loc (loc, tem);
9574
9575 return fold_convert_loc (loc, type, tem);
9576 }
9577
9578 if (TREE_CODE_CLASS (code) == tcc_binary
9579 || TREE_CODE_CLASS (code) == tcc_comparison)
9580 {
9581 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9582 {
9583 tem = fold_build2_loc (loc, code, type,
9584 fold_convert_loc (loc, TREE_TYPE (op0),
9585 TREE_OPERAND (arg0, 1)), op1);
9586 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9587 tem);
9588 }
9589 if (TREE_CODE (arg1) == COMPOUND_EXPR
9590 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9591 {
9592 tem = fold_build2_loc (loc, code, type, op0,
9593 fold_convert_loc (loc, TREE_TYPE (op1),
9594 TREE_OPERAND (arg1, 1)));
9595 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9596 tem);
9597 }
9598
9599 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9600 {
9601 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9602 arg0, arg1,
9603 /*cond_first_p=*/1);
9604 if (tem != NULL_TREE)
9605 return tem;
9606 }
9607
9608 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9609 {
9610 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9611 arg1, arg0,
9612 /*cond_first_p=*/0);
9613 if (tem != NULL_TREE)
9614 return tem;
9615 }
9616 }
9617
9618 switch (code)
9619 {
9620 case MEM_REF:
9621 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9622 if (TREE_CODE (arg0) == ADDR_EXPR
9623 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9624 {
9625 tree iref = TREE_OPERAND (arg0, 0);
9626 return fold_build2 (MEM_REF, type,
9627 TREE_OPERAND (iref, 0),
9628 int_const_binop (PLUS_EXPR, arg1,
9629 TREE_OPERAND (iref, 1)));
9630 }
9631
9632 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9633 if (TREE_CODE (arg0) == ADDR_EXPR
9634 && handled_component_p (TREE_OPERAND (arg0, 0)))
9635 {
9636 tree base;
9637 HOST_WIDE_INT coffset;
9638 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9639 &coffset);
9640 if (!base)
9641 return NULL_TREE;
9642 return fold_build2 (MEM_REF, type,
9643 build_fold_addr_expr (base),
9644 int_const_binop (PLUS_EXPR, arg1,
9645 size_int (coffset)));
9646 }
9647
9648 return NULL_TREE;
9649
9650 case POINTER_PLUS_EXPR:
9651 /* 0 +p index -> (type)index */
9652 if (integer_zerop (arg0))
9653 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9654
9655 /* PTR +p 0 -> PTR */
9656 if (integer_zerop (arg1))
9657 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9658
9659 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9660 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9661 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9662 return fold_convert_loc (loc, type,
9663 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9664 fold_convert_loc (loc, sizetype,
9665 arg1),
9666 fold_convert_loc (loc, sizetype,
9667 arg0)));
9668
9669 /* (PTR +p B) +p A -> PTR +p (B + A) */
9670 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9671 {
9672 tree inner;
9673 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9674 tree arg00 = TREE_OPERAND (arg0, 0);
9675 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9676 arg01, fold_convert_loc (loc, sizetype, arg1));
9677 return fold_convert_loc (loc, type,
9678 fold_build_pointer_plus_loc (loc,
9679 arg00, inner));
9680 }
9681
9682 /* PTR_CST +p CST -> CST1 */
9683 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9684 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9685 fold_convert_loc (loc, type, arg1));
9686
9687 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9688 of the array. Loop optimizer sometimes produce this type of
9689 expressions. */
9690 if (TREE_CODE (arg0) == ADDR_EXPR)
9691 {
9692 tem = try_move_mult_to_index (loc, arg0,
9693 fold_convert_loc (loc, sizetype, arg1));
9694 if (tem)
9695 return fold_convert_loc (loc, type, tem);
9696 }
9697
9698 return NULL_TREE;
9699
9700 case PLUS_EXPR:
9701 /* A + (-B) -> A - B */
9702 if (TREE_CODE (arg1) == NEGATE_EXPR)
9703 return fold_build2_loc (loc, MINUS_EXPR, type,
9704 fold_convert_loc (loc, type, arg0),
9705 fold_convert_loc (loc, type,
9706 TREE_OPERAND (arg1, 0)));
9707 /* (-A) + B -> B - A */
9708 if (TREE_CODE (arg0) == NEGATE_EXPR
9709 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9710 return fold_build2_loc (loc, MINUS_EXPR, type,
9711 fold_convert_loc (loc, type, arg1),
9712 fold_convert_loc (loc, type,
9713 TREE_OPERAND (arg0, 0)));
9714
9715 if (INTEGRAL_TYPE_P (type))
9716 {
9717 /* Convert ~A + 1 to -A. */
9718 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9719 && integer_onep (arg1))
9720 return fold_build1_loc (loc, NEGATE_EXPR, type,
9721 fold_convert_loc (loc, type,
9722 TREE_OPERAND (arg0, 0)));
9723
9724 /* ~X + X is -1. */
9725 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9726 && !TYPE_OVERFLOW_TRAPS (type))
9727 {
9728 tree tem = TREE_OPERAND (arg0, 0);
9729
9730 STRIP_NOPS (tem);
9731 if (operand_equal_p (tem, arg1, 0))
9732 {
9733 t1 = build_int_cst_type (type, -1);
9734 return omit_one_operand_loc (loc, type, t1, arg1);
9735 }
9736 }
9737
9738 /* X + ~X is -1. */
9739 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9740 && !TYPE_OVERFLOW_TRAPS (type))
9741 {
9742 tree tem = TREE_OPERAND (arg1, 0);
9743
9744 STRIP_NOPS (tem);
9745 if (operand_equal_p (arg0, tem, 0))
9746 {
9747 t1 = build_int_cst_type (type, -1);
9748 return omit_one_operand_loc (loc, type, t1, arg0);
9749 }
9750 }
9751
9752 /* X + (X / CST) * -CST is X % CST. */
9753 if (TREE_CODE (arg1) == MULT_EXPR
9754 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9755 && operand_equal_p (arg0,
9756 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9757 {
9758 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9759 tree cst1 = TREE_OPERAND (arg1, 1);
9760 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9761 cst1, cst0);
9762 if (sum && integer_zerop (sum))
9763 return fold_convert_loc (loc, type,
9764 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9765 TREE_TYPE (arg0), arg0,
9766 cst0));
9767 }
9768 }
9769
9770 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9771 same or one. Make sure type is not saturating.
9772 fold_plusminus_mult_expr will re-associate. */
9773 if ((TREE_CODE (arg0) == MULT_EXPR
9774 || TREE_CODE (arg1) == MULT_EXPR)
9775 && !TYPE_SATURATING (type)
9776 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9777 {
9778 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9779 if (tem)
9780 return tem;
9781 }
9782
9783 if (! FLOAT_TYPE_P (type))
9784 {
9785 if (integer_zerop (arg1))
9786 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9787
9788 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9789 with a constant, and the two constants have no bits in common,
9790 we should treat this as a BIT_IOR_EXPR since this may produce more
9791 simplifications. */
9792 if (TREE_CODE (arg0) == BIT_AND_EXPR
9793 && TREE_CODE (arg1) == BIT_AND_EXPR
9794 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9795 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9796 && integer_zerop (const_binop (BIT_AND_EXPR,
9797 TREE_OPERAND (arg0, 1),
9798 TREE_OPERAND (arg1, 1))))
9799 {
9800 code = BIT_IOR_EXPR;
9801 goto bit_ior;
9802 }
9803
9804 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9805 (plus (plus (mult) (mult)) (foo)) so that we can
9806 take advantage of the factoring cases below. */
9807 if (TYPE_OVERFLOW_WRAPS (type)
9808 && (((TREE_CODE (arg0) == PLUS_EXPR
9809 || TREE_CODE (arg0) == MINUS_EXPR)
9810 && TREE_CODE (arg1) == MULT_EXPR)
9811 || ((TREE_CODE (arg1) == PLUS_EXPR
9812 || TREE_CODE (arg1) == MINUS_EXPR)
9813 && TREE_CODE (arg0) == MULT_EXPR)))
9814 {
9815 tree parg0, parg1, parg, marg;
9816 enum tree_code pcode;
9817
9818 if (TREE_CODE (arg1) == MULT_EXPR)
9819 parg = arg0, marg = arg1;
9820 else
9821 parg = arg1, marg = arg0;
9822 pcode = TREE_CODE (parg);
9823 parg0 = TREE_OPERAND (parg, 0);
9824 parg1 = TREE_OPERAND (parg, 1);
9825 STRIP_NOPS (parg0);
9826 STRIP_NOPS (parg1);
9827
9828 if (TREE_CODE (parg0) == MULT_EXPR
9829 && TREE_CODE (parg1) != MULT_EXPR)
9830 return fold_build2_loc (loc, pcode, type,
9831 fold_build2_loc (loc, PLUS_EXPR, type,
9832 fold_convert_loc (loc, type,
9833 parg0),
9834 fold_convert_loc (loc, type,
9835 marg)),
9836 fold_convert_loc (loc, type, parg1));
9837 if (TREE_CODE (parg0) != MULT_EXPR
9838 && TREE_CODE (parg1) == MULT_EXPR)
9839 return
9840 fold_build2_loc (loc, PLUS_EXPR, type,
9841 fold_convert_loc (loc, type, parg0),
9842 fold_build2_loc (loc, pcode, type,
9843 fold_convert_loc (loc, type, marg),
9844 fold_convert_loc (loc, type,
9845 parg1)));
9846 }
9847 }
9848 else
9849 {
9850 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9851 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9852 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9853
9854 /* Likewise if the operands are reversed. */
9855 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9856 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9857
9858 /* Convert X + -C into X - C. */
9859 if (TREE_CODE (arg1) == REAL_CST
9860 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9861 {
9862 tem = fold_negate_const (arg1, type);
9863 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9864 return fold_build2_loc (loc, MINUS_EXPR, type,
9865 fold_convert_loc (loc, type, arg0),
9866 fold_convert_loc (loc, type, tem));
9867 }
9868
9869 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9870 to __complex__ ( x, y ). This is not the same for SNaNs or
9871 if signed zeros are involved. */
9872 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9873 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9874 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9875 {
9876 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9877 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9878 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9879 bool arg0rz = false, arg0iz = false;
9880 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9881 || (arg0i && (arg0iz = real_zerop (arg0i))))
9882 {
9883 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9884 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9885 if (arg0rz && arg1i && real_zerop (arg1i))
9886 {
9887 tree rp = arg1r ? arg1r
9888 : build1 (REALPART_EXPR, rtype, arg1);
9889 tree ip = arg0i ? arg0i
9890 : build1 (IMAGPART_EXPR, rtype, arg0);
9891 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9892 }
9893 else if (arg0iz && arg1r && real_zerop (arg1r))
9894 {
9895 tree rp = arg0r ? arg0r
9896 : build1 (REALPART_EXPR, rtype, arg0);
9897 tree ip = arg1i ? arg1i
9898 : build1 (IMAGPART_EXPR, rtype, arg1);
9899 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9900 }
9901 }
9902 }
9903
9904 if (flag_unsafe_math_optimizations
9905 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9906 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9907 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9908 return tem;
9909
9910 /* Convert x+x into x*2.0. */
9911 if (operand_equal_p (arg0, arg1, 0)
9912 && SCALAR_FLOAT_TYPE_P (type))
9913 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9914 build_real (type, dconst2));
9915
9916 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9917 We associate floats only if the user has specified
9918 -fassociative-math. */
9919 if (flag_associative_math
9920 && TREE_CODE (arg1) == PLUS_EXPR
9921 && TREE_CODE (arg0) != MULT_EXPR)
9922 {
9923 tree tree10 = TREE_OPERAND (arg1, 0);
9924 tree tree11 = TREE_OPERAND (arg1, 1);
9925 if (TREE_CODE (tree11) == MULT_EXPR
9926 && TREE_CODE (tree10) == MULT_EXPR)
9927 {
9928 tree tree0;
9929 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9930 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9931 }
9932 }
9933 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9934 We associate floats only if the user has specified
9935 -fassociative-math. */
9936 if (flag_associative_math
9937 && TREE_CODE (arg0) == PLUS_EXPR
9938 && TREE_CODE (arg1) != MULT_EXPR)
9939 {
9940 tree tree00 = TREE_OPERAND (arg0, 0);
9941 tree tree01 = TREE_OPERAND (arg0, 1);
9942 if (TREE_CODE (tree01) == MULT_EXPR
9943 && TREE_CODE (tree00) == MULT_EXPR)
9944 {
9945 tree tree0;
9946 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9947 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9948 }
9949 }
9950 }
9951
9952 bit_rotate:
9953 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9954 is a rotate of A by C1 bits. */
9955 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9956 is a rotate of A by B bits. */
9957 {
9958 enum tree_code code0, code1;
9959 tree rtype;
9960 code0 = TREE_CODE (arg0);
9961 code1 = TREE_CODE (arg1);
9962 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9963 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9964 && operand_equal_p (TREE_OPERAND (arg0, 0),
9965 TREE_OPERAND (arg1, 0), 0)
9966 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9967 TYPE_UNSIGNED (rtype))
9968 /* Only create rotates in complete modes. Other cases are not
9969 expanded properly. */
9970 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9971 {
9972 tree tree01, tree11;
9973 enum tree_code code01, code11;
9974
9975 tree01 = TREE_OPERAND (arg0, 1);
9976 tree11 = TREE_OPERAND (arg1, 1);
9977 STRIP_NOPS (tree01);
9978 STRIP_NOPS (tree11);
9979 code01 = TREE_CODE (tree01);
9980 code11 = TREE_CODE (tree11);
9981 if (code01 == INTEGER_CST
9982 && code11 == INTEGER_CST
9983 && TREE_INT_CST_HIGH (tree01) == 0
9984 && TREE_INT_CST_HIGH (tree11) == 0
9985 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9986 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9987 {
9988 tem = build2_loc (loc, LROTATE_EXPR,
9989 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9990 TREE_OPERAND (arg0, 0),
9991 code0 == LSHIFT_EXPR ? tree01 : tree11);
9992 return fold_convert_loc (loc, type, tem);
9993 }
9994 else if (code11 == MINUS_EXPR)
9995 {
9996 tree tree110, tree111;
9997 tree110 = TREE_OPERAND (tree11, 0);
9998 tree111 = TREE_OPERAND (tree11, 1);
9999 STRIP_NOPS (tree110);
10000 STRIP_NOPS (tree111);
10001 if (TREE_CODE (tree110) == INTEGER_CST
10002 && 0 == compare_tree_int (tree110,
10003 TYPE_PRECISION
10004 (TREE_TYPE (TREE_OPERAND
10005 (arg0, 0))))
10006 && operand_equal_p (tree01, tree111, 0))
10007 return
10008 fold_convert_loc (loc, type,
10009 build2 ((code0 == LSHIFT_EXPR
10010 ? LROTATE_EXPR
10011 : RROTATE_EXPR),
10012 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10013 TREE_OPERAND (arg0, 0), tree01));
10014 }
10015 else if (code01 == MINUS_EXPR)
10016 {
10017 tree tree010, tree011;
10018 tree010 = TREE_OPERAND (tree01, 0);
10019 tree011 = TREE_OPERAND (tree01, 1);
10020 STRIP_NOPS (tree010);
10021 STRIP_NOPS (tree011);
10022 if (TREE_CODE (tree010) == INTEGER_CST
10023 && 0 == compare_tree_int (tree010,
10024 TYPE_PRECISION
10025 (TREE_TYPE (TREE_OPERAND
10026 (arg0, 0))))
10027 && operand_equal_p (tree11, tree011, 0))
10028 return fold_convert_loc
10029 (loc, type,
10030 build2 ((code0 != LSHIFT_EXPR
10031 ? LROTATE_EXPR
10032 : RROTATE_EXPR),
10033 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10034 TREE_OPERAND (arg0, 0), tree11));
10035 }
10036 }
10037 }
10038
10039 associate:
10040 /* In most languages, can't associate operations on floats through
10041 parentheses. Rather than remember where the parentheses were, we
10042 don't associate floats at all, unless the user has specified
10043 -fassociative-math.
10044 And, we need to make sure type is not saturating. */
10045
10046 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10047 && !TYPE_SATURATING (type))
10048 {
10049 tree var0, con0, lit0, minus_lit0;
10050 tree var1, con1, lit1, minus_lit1;
10051 bool ok = true;
10052
10053 /* Split both trees into variables, constants, and literals. Then
10054 associate each group together, the constants with literals,
10055 then the result with variables. This increases the chances of
10056 literals being recombined later and of generating relocatable
10057 expressions for the sum of a constant and literal. */
10058 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10059 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10060 code == MINUS_EXPR);
10061
10062 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10063 if (code == MINUS_EXPR)
10064 code = PLUS_EXPR;
10065
10066 /* With undefined overflow we can only associate constants with one
10067 variable, and constants whose association doesn't overflow. */
10068 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10069 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10070 {
10071 if (var0 && var1)
10072 {
10073 tree tmp0 = var0;
10074 tree tmp1 = var1;
10075
10076 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10077 tmp0 = TREE_OPERAND (tmp0, 0);
10078 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10079 tmp1 = TREE_OPERAND (tmp1, 0);
10080 /* The only case we can still associate with two variables
10081 is if they are the same, modulo negation. */
10082 if (!operand_equal_p (tmp0, tmp1, 0))
10083 ok = false;
10084 }
10085
10086 if (ok && lit0 && lit1)
10087 {
10088 tree tmp0 = fold_convert (type, lit0);
10089 tree tmp1 = fold_convert (type, lit1);
10090
10091 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10092 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10093 ok = false;
10094 }
10095 }
10096
10097 /* Only do something if we found more than two objects. Otherwise,
10098 nothing has changed and we risk infinite recursion. */
10099 if (ok
10100 && (2 < ((var0 != 0) + (var1 != 0)
10101 + (con0 != 0) + (con1 != 0)
10102 + (lit0 != 0) + (lit1 != 0)
10103 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10104 {
10105 var0 = associate_trees (loc, var0, var1, code, type);
10106 con0 = associate_trees (loc, con0, con1, code, type);
10107 lit0 = associate_trees (loc, lit0, lit1, code, type);
10108 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10109
10110 /* Preserve the MINUS_EXPR if the negative part of the literal is
10111 greater than the positive part. Otherwise, the multiplicative
10112 folding code (i.e extract_muldiv) may be fooled in case
10113 unsigned constants are subtracted, like in the following
10114 example: ((X*2 + 4) - 8U)/2. */
10115 if (minus_lit0 && lit0)
10116 {
10117 if (TREE_CODE (lit0) == INTEGER_CST
10118 && TREE_CODE (minus_lit0) == INTEGER_CST
10119 && tree_int_cst_lt (lit0, minus_lit0))
10120 {
10121 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10122 MINUS_EXPR, type);
10123 lit0 = 0;
10124 }
10125 else
10126 {
10127 lit0 = associate_trees (loc, lit0, minus_lit0,
10128 MINUS_EXPR, type);
10129 minus_lit0 = 0;
10130 }
10131 }
10132 if (minus_lit0)
10133 {
10134 if (con0 == 0)
10135 return
10136 fold_convert_loc (loc, type,
10137 associate_trees (loc, var0, minus_lit0,
10138 MINUS_EXPR, type));
10139 else
10140 {
10141 con0 = associate_trees (loc, con0, minus_lit0,
10142 MINUS_EXPR, type);
10143 return
10144 fold_convert_loc (loc, type,
10145 associate_trees (loc, var0, con0,
10146 PLUS_EXPR, type));
10147 }
10148 }
10149
10150 con0 = associate_trees (loc, con0, lit0, code, type);
10151 return
10152 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10153 code, type));
10154 }
10155 }
10156
10157 return NULL_TREE;
10158
10159 case MINUS_EXPR:
10160 /* Pointer simplifications for subtraction, simple reassociations. */
10161 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10162 {
10163 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10164 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10165 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10166 {
10167 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10168 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10169 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10170 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10171 return fold_build2_loc (loc, PLUS_EXPR, type,
10172 fold_build2_loc (loc, MINUS_EXPR, type,
10173 arg00, arg10),
10174 fold_build2_loc (loc, MINUS_EXPR, type,
10175 arg01, arg11));
10176 }
10177 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10178 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10179 {
10180 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10181 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10182 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10183 fold_convert_loc (loc, type, arg1));
10184 if (tmp)
10185 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10186 }
10187 }
10188 /* A - (-B) -> A + B */
10189 if (TREE_CODE (arg1) == NEGATE_EXPR)
10190 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10191 fold_convert_loc (loc, type,
10192 TREE_OPERAND (arg1, 0)));
10193 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10194 if (TREE_CODE (arg0) == NEGATE_EXPR
10195 && (FLOAT_TYPE_P (type)
10196 || INTEGRAL_TYPE_P (type))
10197 && negate_expr_p (arg1)
10198 && reorder_operands_p (arg0, arg1))
10199 return fold_build2_loc (loc, MINUS_EXPR, type,
10200 fold_convert_loc (loc, type,
10201 negate_expr (arg1)),
10202 fold_convert_loc (loc, type,
10203 TREE_OPERAND (arg0, 0)));
10204 /* Convert -A - 1 to ~A. */
10205 if (INTEGRAL_TYPE_P (type)
10206 && TREE_CODE (arg0) == NEGATE_EXPR
10207 && integer_onep (arg1)
10208 && !TYPE_OVERFLOW_TRAPS (type))
10209 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10210 fold_convert_loc (loc, type,
10211 TREE_OPERAND (arg0, 0)));
10212
10213 /* Convert -1 - A to ~A. */
10214 if (INTEGRAL_TYPE_P (type)
10215 && integer_all_onesp (arg0))
10216 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10217
10218
10219 /* X - (X / CST) * CST is X % CST. */
10220 if (INTEGRAL_TYPE_P (type)
10221 && TREE_CODE (arg1) == MULT_EXPR
10222 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10223 && operand_equal_p (arg0,
10224 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10225 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10226 TREE_OPERAND (arg1, 1), 0))
10227 return
10228 fold_convert_loc (loc, type,
10229 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10230 arg0, TREE_OPERAND (arg1, 1)));
10231
10232 if (! FLOAT_TYPE_P (type))
10233 {
10234 if (integer_zerop (arg0))
10235 return negate_expr (fold_convert_loc (loc, type, arg1));
10236 if (integer_zerop (arg1))
10237 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10238
10239 /* Fold A - (A & B) into ~B & A. */
10240 if (!TREE_SIDE_EFFECTS (arg0)
10241 && TREE_CODE (arg1) == BIT_AND_EXPR)
10242 {
10243 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10244 {
10245 tree arg10 = fold_convert_loc (loc, type,
10246 TREE_OPERAND (arg1, 0));
10247 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10248 fold_build1_loc (loc, BIT_NOT_EXPR,
10249 type, arg10),
10250 fold_convert_loc (loc, type, arg0));
10251 }
10252 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10253 {
10254 tree arg11 = fold_convert_loc (loc,
10255 type, TREE_OPERAND (arg1, 1));
10256 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10257 fold_build1_loc (loc, BIT_NOT_EXPR,
10258 type, arg11),
10259 fold_convert_loc (loc, type, arg0));
10260 }
10261 }
10262
10263 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10264 any power of 2 minus 1. */
10265 if (TREE_CODE (arg0) == BIT_AND_EXPR
10266 && TREE_CODE (arg1) == BIT_AND_EXPR
10267 && operand_equal_p (TREE_OPERAND (arg0, 0),
10268 TREE_OPERAND (arg1, 0), 0))
10269 {
10270 tree mask0 = TREE_OPERAND (arg0, 1);
10271 tree mask1 = TREE_OPERAND (arg1, 1);
10272 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10273
10274 if (operand_equal_p (tem, mask1, 0))
10275 {
10276 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10277 TREE_OPERAND (arg0, 0), mask1);
10278 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10279 }
10280 }
10281 }
10282
10283 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10284 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10285 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10286
10287 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10288 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10289 (-ARG1 + ARG0) reduces to -ARG1. */
10290 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10291 return negate_expr (fold_convert_loc (loc, type, arg1));
10292
10293 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10294 __complex__ ( x, -y ). This is not the same for SNaNs or if
10295 signed zeros are involved. */
10296 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10298 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10299 {
10300 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10301 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10302 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10303 bool arg0rz = false, arg0iz = false;
10304 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10305 || (arg0i && (arg0iz = real_zerop (arg0i))))
10306 {
10307 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10308 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10309 if (arg0rz && arg1i && real_zerop (arg1i))
10310 {
10311 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10312 arg1r ? arg1r
10313 : build1 (REALPART_EXPR, rtype, arg1));
10314 tree ip = arg0i ? arg0i
10315 : build1 (IMAGPART_EXPR, rtype, arg0);
10316 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10317 }
10318 else if (arg0iz && arg1r && real_zerop (arg1r))
10319 {
10320 tree rp = arg0r ? arg0r
10321 : build1 (REALPART_EXPR, rtype, arg0);
10322 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10323 arg1i ? arg1i
10324 : build1 (IMAGPART_EXPR, rtype, arg1));
10325 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10326 }
10327 }
10328 }
10329
10330 /* Fold &x - &x. This can happen from &x.foo - &x.
10331 This is unsafe for certain floats even in non-IEEE formats.
10332 In IEEE, it is unsafe because it does wrong for NaNs.
10333 Also note that operand_equal_p is always false if an operand
10334 is volatile. */
10335
10336 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10337 && operand_equal_p (arg0, arg1, 0))
10338 return build_zero_cst (type);
10339
10340 /* A - B -> A + (-B) if B is easily negatable. */
10341 if (negate_expr_p (arg1)
10342 && ((FLOAT_TYPE_P (type)
10343 /* Avoid this transformation if B is a positive REAL_CST. */
10344 && (TREE_CODE (arg1) != REAL_CST
10345 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10346 || INTEGRAL_TYPE_P (type)))
10347 return fold_build2_loc (loc, PLUS_EXPR, type,
10348 fold_convert_loc (loc, type, arg0),
10349 fold_convert_loc (loc, type,
10350 negate_expr (arg1)));
10351
10352 /* Try folding difference of addresses. */
10353 {
10354 HOST_WIDE_INT diff;
10355
10356 if ((TREE_CODE (arg0) == ADDR_EXPR
10357 || TREE_CODE (arg1) == ADDR_EXPR)
10358 && ptr_difference_const (arg0, arg1, &diff))
10359 return build_int_cst_type (type, diff);
10360 }
10361
10362 /* Fold &a[i] - &a[j] to i-j. */
10363 if (TREE_CODE (arg0) == ADDR_EXPR
10364 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10365 && TREE_CODE (arg1) == ADDR_EXPR
10366 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10367 {
10368 tree aref0 = TREE_OPERAND (arg0, 0);
10369 tree aref1 = TREE_OPERAND (arg1, 0);
10370 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10371 TREE_OPERAND (aref1, 0), 0))
10372 {
10373 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10374 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10375 tree esz = array_ref_element_size (aref0);
10376 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10377 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10378 fold_convert_loc (loc, type, esz));
10379
10380 }
10381 }
10382
10383 if (FLOAT_TYPE_P (type)
10384 && flag_unsafe_math_optimizations
10385 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10386 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10387 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10388 return tem;
10389
10390 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10391 same or one. Make sure type is not saturating.
10392 fold_plusminus_mult_expr will re-associate. */
10393 if ((TREE_CODE (arg0) == MULT_EXPR
10394 || TREE_CODE (arg1) == MULT_EXPR)
10395 && !TYPE_SATURATING (type)
10396 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10397 {
10398 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10399 if (tem)
10400 return tem;
10401 }
10402
10403 goto associate;
10404
10405 case MULT_EXPR:
10406 /* (-A) * (-B) -> A * B */
10407 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10408 return fold_build2_loc (loc, MULT_EXPR, type,
10409 fold_convert_loc (loc, type,
10410 TREE_OPERAND (arg0, 0)),
10411 fold_convert_loc (loc, type,
10412 negate_expr (arg1)));
10413 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10414 return fold_build2_loc (loc, MULT_EXPR, type,
10415 fold_convert_loc (loc, type,
10416 negate_expr (arg0)),
10417 fold_convert_loc (loc, type,
10418 TREE_OPERAND (arg1, 0)));
10419
10420 if (! FLOAT_TYPE_P (type))
10421 {
10422 if (integer_zerop (arg1))
10423 return omit_one_operand_loc (loc, type, arg1, arg0);
10424 if (integer_onep (arg1))
10425 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10426 /* Transform x * -1 into -x. Make sure to do the negation
10427 on the original operand with conversions not stripped
10428 because we can only strip non-sign-changing conversions. */
10429 if (integer_all_onesp (arg1))
10430 return fold_convert_loc (loc, type, negate_expr (op0));
10431 /* Transform x * -C into -x * C if x is easily negatable. */
10432 if (TREE_CODE (arg1) == INTEGER_CST
10433 && tree_int_cst_sgn (arg1) == -1
10434 && negate_expr_p (arg0)
10435 && (tem = negate_expr (arg1)) != arg1
10436 && !TREE_OVERFLOW (tem))
10437 return fold_build2_loc (loc, MULT_EXPR, type,
10438 fold_convert_loc (loc, type,
10439 negate_expr (arg0)),
10440 tem);
10441
10442 /* (a * (1 << b)) is (a << b) */
10443 if (TREE_CODE (arg1) == LSHIFT_EXPR
10444 && integer_onep (TREE_OPERAND (arg1, 0)))
10445 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10446 TREE_OPERAND (arg1, 1));
10447 if (TREE_CODE (arg0) == LSHIFT_EXPR
10448 && integer_onep (TREE_OPERAND (arg0, 0)))
10449 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10450 TREE_OPERAND (arg0, 1));
10451
10452 /* (A + A) * C -> A * 2 * C */
10453 if (TREE_CODE (arg0) == PLUS_EXPR
10454 && TREE_CODE (arg1) == INTEGER_CST
10455 && operand_equal_p (TREE_OPERAND (arg0, 0),
10456 TREE_OPERAND (arg0, 1), 0))
10457 return fold_build2_loc (loc, MULT_EXPR, type,
10458 omit_one_operand_loc (loc, type,
10459 TREE_OPERAND (arg0, 0),
10460 TREE_OPERAND (arg0, 1)),
10461 fold_build2_loc (loc, MULT_EXPR, type,
10462 build_int_cst (type, 2) , arg1));
10463
10464 strict_overflow_p = false;
10465 if (TREE_CODE (arg1) == INTEGER_CST
10466 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10467 &strict_overflow_p)))
10468 {
10469 if (strict_overflow_p)
10470 fold_overflow_warning (("assuming signed overflow does not "
10471 "occur when simplifying "
10472 "multiplication"),
10473 WARN_STRICT_OVERFLOW_MISC);
10474 return fold_convert_loc (loc, type, tem);
10475 }
10476
10477 /* Optimize z * conj(z) for integer complex numbers. */
10478 if (TREE_CODE (arg0) == CONJ_EXPR
10479 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10480 return fold_mult_zconjz (loc, type, arg1);
10481 if (TREE_CODE (arg1) == CONJ_EXPR
10482 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10483 return fold_mult_zconjz (loc, type, arg0);
10484 }
10485 else
10486 {
10487 /* Maybe fold x * 0 to 0. The expressions aren't the same
10488 when x is NaN, since x * 0 is also NaN. Nor are they the
10489 same in modes with signed zeros, since multiplying a
10490 negative value by 0 gives -0, not +0. */
10491 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10492 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10493 && real_zerop (arg1))
10494 return omit_one_operand_loc (loc, type, arg1, arg0);
10495 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10496 Likewise for complex arithmetic with signed zeros. */
10497 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10498 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10499 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10500 && real_onep (arg1))
10501 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10502
10503 /* Transform x * -1.0 into -x. */
10504 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10505 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10506 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10507 && real_minus_onep (arg1))
10508 return fold_convert_loc (loc, type, negate_expr (arg0));
10509
10510 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10511 the result for floating point types due to rounding so it is applied
10512 only if -fassociative-math was specify. */
10513 if (flag_associative_math
10514 && TREE_CODE (arg0) == RDIV_EXPR
10515 && TREE_CODE (arg1) == REAL_CST
10516 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10517 {
10518 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10519 arg1);
10520 if (tem)
10521 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10522 TREE_OPERAND (arg0, 1));
10523 }
10524
10525 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10526 if (operand_equal_p (arg0, arg1, 0))
10527 {
10528 tree tem = fold_strip_sign_ops (arg0);
10529 if (tem != NULL_TREE)
10530 {
10531 tem = fold_convert_loc (loc, type, tem);
10532 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10533 }
10534 }
10535
10536 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10537 This is not the same for NaNs or if signed zeros are
10538 involved. */
10539 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10540 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10541 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10542 && TREE_CODE (arg1) == COMPLEX_CST
10543 && real_zerop (TREE_REALPART (arg1)))
10544 {
10545 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10546 if (real_onep (TREE_IMAGPART (arg1)))
10547 return
10548 fold_build2_loc (loc, COMPLEX_EXPR, type,
10549 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10550 rtype, arg0)),
10551 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10552 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10553 return
10554 fold_build2_loc (loc, COMPLEX_EXPR, type,
10555 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10556 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10557 rtype, arg0)));
10558 }
10559
10560 /* Optimize z * conj(z) for floating point complex numbers.
10561 Guarded by flag_unsafe_math_optimizations as non-finite
10562 imaginary components don't produce scalar results. */
10563 if (flag_unsafe_math_optimizations
10564 && TREE_CODE (arg0) == CONJ_EXPR
10565 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10566 return fold_mult_zconjz (loc, type, arg1);
10567 if (flag_unsafe_math_optimizations
10568 && TREE_CODE (arg1) == CONJ_EXPR
10569 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10570 return fold_mult_zconjz (loc, type, arg0);
10571
10572 if (flag_unsafe_math_optimizations)
10573 {
10574 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10575 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10576
10577 /* Optimizations of root(...)*root(...). */
10578 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10579 {
10580 tree rootfn, arg;
10581 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10582 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10583
10584 /* Optimize sqrt(x)*sqrt(x) as x. */
10585 if (BUILTIN_SQRT_P (fcode0)
10586 && operand_equal_p (arg00, arg10, 0)
10587 && ! HONOR_SNANS (TYPE_MODE (type)))
10588 return arg00;
10589
10590 /* Optimize root(x)*root(y) as root(x*y). */
10591 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10592 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10593 return build_call_expr_loc (loc, rootfn, 1, arg);
10594 }
10595
10596 /* Optimize expN(x)*expN(y) as expN(x+y). */
10597 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10598 {
10599 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10600 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10601 CALL_EXPR_ARG (arg0, 0),
10602 CALL_EXPR_ARG (arg1, 0));
10603 return build_call_expr_loc (loc, expfn, 1, arg);
10604 }
10605
10606 /* Optimizations of pow(...)*pow(...). */
10607 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10608 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10609 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10610 {
10611 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10612 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10613 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10614 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10615
10616 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10617 if (operand_equal_p (arg01, arg11, 0))
10618 {
10619 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10620 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10621 arg00, arg10);
10622 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10623 }
10624
10625 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10626 if (operand_equal_p (arg00, arg10, 0))
10627 {
10628 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10629 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10630 arg01, arg11);
10631 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10632 }
10633 }
10634
10635 /* Optimize tan(x)*cos(x) as sin(x). */
10636 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10637 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10638 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10639 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10640 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10641 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10642 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10643 CALL_EXPR_ARG (arg1, 0), 0))
10644 {
10645 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10646
10647 if (sinfn != NULL_TREE)
10648 return build_call_expr_loc (loc, sinfn, 1,
10649 CALL_EXPR_ARG (arg0, 0));
10650 }
10651
10652 /* Optimize x*pow(x,c) as pow(x,c+1). */
10653 if (fcode1 == BUILT_IN_POW
10654 || fcode1 == BUILT_IN_POWF
10655 || fcode1 == BUILT_IN_POWL)
10656 {
10657 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10658 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10659 if (TREE_CODE (arg11) == REAL_CST
10660 && !TREE_OVERFLOW (arg11)
10661 && operand_equal_p (arg0, arg10, 0))
10662 {
10663 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10664 REAL_VALUE_TYPE c;
10665 tree arg;
10666
10667 c = TREE_REAL_CST (arg11);
10668 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10669 arg = build_real (type, c);
10670 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10671 }
10672 }
10673
10674 /* Optimize pow(x,c)*x as pow(x,c+1). */
10675 if (fcode0 == BUILT_IN_POW
10676 || fcode0 == BUILT_IN_POWF
10677 || fcode0 == BUILT_IN_POWL)
10678 {
10679 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10680 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10681 if (TREE_CODE (arg01) == REAL_CST
10682 && !TREE_OVERFLOW (arg01)
10683 && operand_equal_p (arg1, arg00, 0))
10684 {
10685 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10686 REAL_VALUE_TYPE c;
10687 tree arg;
10688
10689 c = TREE_REAL_CST (arg01);
10690 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10691 arg = build_real (type, c);
10692 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10693 }
10694 }
10695
10696 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10697 if (!in_gimple_form
10698 && optimize_function_for_speed_p (cfun)
10699 && operand_equal_p (arg0, arg1, 0))
10700 {
10701 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10702
10703 if (powfn)
10704 {
10705 tree arg = build_real (type, dconst2);
10706 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10707 }
10708 }
10709 }
10710 }
10711 goto associate;
10712
10713 case BIT_IOR_EXPR:
10714 bit_ior:
10715 if (integer_all_onesp (arg1))
10716 return omit_one_operand_loc (loc, type, arg1, arg0);
10717 if (integer_zerop (arg1))
10718 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10719 if (operand_equal_p (arg0, arg1, 0))
10720 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10721
10722 /* ~X | X is -1. */
10723 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10724 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10725 {
10726 t1 = build_zero_cst (type);
10727 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10728 return omit_one_operand_loc (loc, type, t1, arg1);
10729 }
10730
10731 /* X | ~X is -1. */
10732 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10733 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10734 {
10735 t1 = build_zero_cst (type);
10736 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10737 return omit_one_operand_loc (loc, type, t1, arg0);
10738 }
10739
10740 /* Canonicalize (X & C1) | C2. */
10741 if (TREE_CODE (arg0) == BIT_AND_EXPR
10742 && TREE_CODE (arg1) == INTEGER_CST
10743 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10744 {
10745 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10746 int width = TYPE_PRECISION (type), w;
10747 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10748 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10749 hi2 = TREE_INT_CST_HIGH (arg1);
10750 lo2 = TREE_INT_CST_LOW (arg1);
10751
10752 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10753 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10754 return omit_one_operand_loc (loc, type, arg1,
10755 TREE_OPERAND (arg0, 0));
10756
10757 if (width > HOST_BITS_PER_WIDE_INT)
10758 {
10759 mhi = (unsigned HOST_WIDE_INT) -1
10760 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10761 mlo = -1;
10762 }
10763 else
10764 {
10765 mhi = 0;
10766 mlo = (unsigned HOST_WIDE_INT) -1
10767 >> (HOST_BITS_PER_WIDE_INT - width);
10768 }
10769
10770 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10771 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10772 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10773 TREE_OPERAND (arg0, 0), arg1);
10774
10775 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10776 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10777 mode which allows further optimizations. */
10778 hi1 &= mhi;
10779 lo1 &= mlo;
10780 hi2 &= mhi;
10781 lo2 &= mlo;
10782 hi3 = hi1 & ~hi2;
10783 lo3 = lo1 & ~lo2;
10784 for (w = BITS_PER_UNIT;
10785 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10786 w <<= 1)
10787 {
10788 unsigned HOST_WIDE_INT mask
10789 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10790 if (((lo1 | lo2) & mask) == mask
10791 && (lo1 & ~mask) == 0 && hi1 == 0)
10792 {
10793 hi3 = 0;
10794 lo3 = mask;
10795 break;
10796 }
10797 }
10798 if (hi3 != hi1 || lo3 != lo1)
10799 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10800 fold_build2_loc (loc, BIT_AND_EXPR, type,
10801 TREE_OPERAND (arg0, 0),
10802 build_int_cst_wide (type,
10803 lo3, hi3)),
10804 arg1);
10805 }
10806
10807 /* (X & Y) | Y is (X, Y). */
10808 if (TREE_CODE (arg0) == BIT_AND_EXPR
10809 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10810 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10811 /* (X & Y) | X is (Y, X). */
10812 if (TREE_CODE (arg0) == BIT_AND_EXPR
10813 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10814 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10815 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10816 /* X | (X & Y) is (Y, X). */
10817 if (TREE_CODE (arg1) == BIT_AND_EXPR
10818 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10819 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10820 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10821 /* X | (Y & X) is (Y, X). */
10822 if (TREE_CODE (arg1) == BIT_AND_EXPR
10823 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10824 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10825 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10826
10827 /* (X & ~Y) | (~X & Y) is X ^ Y */
10828 if (TREE_CODE (arg0) == BIT_AND_EXPR
10829 && TREE_CODE (arg1) == BIT_AND_EXPR)
10830 {
10831 tree a0, a1, l0, l1, n0, n1;
10832
10833 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10834 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10835
10836 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10837 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10838
10839 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10840 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10841
10842 if ((operand_equal_p (n0, a0, 0)
10843 && operand_equal_p (n1, a1, 0))
10844 || (operand_equal_p (n0, a1, 0)
10845 && operand_equal_p (n1, a0, 0)))
10846 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10847 }
10848
10849 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10850 if (t1 != NULL_TREE)
10851 return t1;
10852
10853 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10854
10855 This results in more efficient code for machines without a NAND
10856 instruction. Combine will canonicalize to the first form
10857 which will allow use of NAND instructions provided by the
10858 backend if they exist. */
10859 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10860 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10861 {
10862 return
10863 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10864 build2 (BIT_AND_EXPR, type,
10865 fold_convert_loc (loc, type,
10866 TREE_OPERAND (arg0, 0)),
10867 fold_convert_loc (loc, type,
10868 TREE_OPERAND (arg1, 0))));
10869 }
10870
10871 /* See if this can be simplified into a rotate first. If that
10872 is unsuccessful continue in the association code. */
10873 goto bit_rotate;
10874
10875 case BIT_XOR_EXPR:
10876 if (integer_zerop (arg1))
10877 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10878 if (integer_all_onesp (arg1))
10879 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10880 if (operand_equal_p (arg0, arg1, 0))
10881 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10882
10883 /* ~X ^ X is -1. */
10884 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10885 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10886 {
10887 t1 = build_zero_cst (type);
10888 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10889 return omit_one_operand_loc (loc, type, t1, arg1);
10890 }
10891
10892 /* X ^ ~X is -1. */
10893 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10894 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10895 {
10896 t1 = build_zero_cst (type);
10897 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10898 return omit_one_operand_loc (loc, type, t1, arg0);
10899 }
10900
10901 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10902 with a constant, and the two constants have no bits in common,
10903 we should treat this as a BIT_IOR_EXPR since this may produce more
10904 simplifications. */
10905 if (TREE_CODE (arg0) == BIT_AND_EXPR
10906 && TREE_CODE (arg1) == BIT_AND_EXPR
10907 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10908 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10909 && integer_zerop (const_binop (BIT_AND_EXPR,
10910 TREE_OPERAND (arg0, 1),
10911 TREE_OPERAND (arg1, 1))))
10912 {
10913 code = BIT_IOR_EXPR;
10914 goto bit_ior;
10915 }
10916
10917 /* (X | Y) ^ X -> Y & ~ X*/
10918 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10919 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10920 {
10921 tree t2 = TREE_OPERAND (arg0, 1);
10922 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10923 arg1);
10924 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10925 fold_convert_loc (loc, type, t2),
10926 fold_convert_loc (loc, type, t1));
10927 return t1;
10928 }
10929
10930 /* (Y | X) ^ X -> Y & ~ X*/
10931 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10932 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10933 {
10934 tree t2 = TREE_OPERAND (arg0, 0);
10935 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10936 arg1);
10937 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10938 fold_convert_loc (loc, type, t2),
10939 fold_convert_loc (loc, type, t1));
10940 return t1;
10941 }
10942
10943 /* X ^ (X | Y) -> Y & ~ X*/
10944 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10945 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10946 {
10947 tree t2 = TREE_OPERAND (arg1, 1);
10948 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10949 arg0);
10950 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10951 fold_convert_loc (loc, type, t2),
10952 fold_convert_loc (loc, type, t1));
10953 return t1;
10954 }
10955
10956 /* X ^ (Y | X) -> Y & ~ X*/
10957 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10958 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10959 {
10960 tree t2 = TREE_OPERAND (arg1, 0);
10961 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10962 arg0);
10963 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10964 fold_convert_loc (loc, type, t2),
10965 fold_convert_loc (loc, type, t1));
10966 return t1;
10967 }
10968
10969 /* Convert ~X ^ ~Y to X ^ Y. */
10970 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10971 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10972 return fold_build2_loc (loc, code, type,
10973 fold_convert_loc (loc, type,
10974 TREE_OPERAND (arg0, 0)),
10975 fold_convert_loc (loc, type,
10976 TREE_OPERAND (arg1, 0)));
10977
10978 /* Convert ~X ^ C to X ^ ~C. */
10979 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10980 && TREE_CODE (arg1) == INTEGER_CST)
10981 return fold_build2_loc (loc, code, type,
10982 fold_convert_loc (loc, type,
10983 TREE_OPERAND (arg0, 0)),
10984 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10985
10986 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10987 if (TREE_CODE (arg0) == BIT_AND_EXPR
10988 && integer_onep (TREE_OPERAND (arg0, 1))
10989 && integer_onep (arg1))
10990 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10991 build_int_cst (TREE_TYPE (arg0), 0));
10992
10993 /* Fold (X & Y) ^ Y as ~X & Y. */
10994 if (TREE_CODE (arg0) == BIT_AND_EXPR
10995 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10996 {
10997 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10998 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10999 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11000 fold_convert_loc (loc, type, arg1));
11001 }
11002 /* Fold (X & Y) ^ X as ~Y & X. */
11003 if (TREE_CODE (arg0) == BIT_AND_EXPR
11004 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11005 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11006 {
11007 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11008 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11009 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11010 fold_convert_loc (loc, type, arg1));
11011 }
11012 /* Fold X ^ (X & Y) as X & ~Y. */
11013 if (TREE_CODE (arg1) == BIT_AND_EXPR
11014 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11015 {
11016 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11017 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11018 fold_convert_loc (loc, type, arg0),
11019 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11020 }
11021 /* Fold X ^ (Y & X) as ~Y & X. */
11022 if (TREE_CODE (arg1) == BIT_AND_EXPR
11023 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11024 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11025 {
11026 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11027 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11028 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11029 fold_convert_loc (loc, type, arg0));
11030 }
11031
11032 /* See if this can be simplified into a rotate first. If that
11033 is unsuccessful continue in the association code. */
11034 goto bit_rotate;
11035
11036 case BIT_AND_EXPR:
11037 if (integer_all_onesp (arg1))
11038 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11039 if (integer_zerop (arg1))
11040 return omit_one_operand_loc (loc, type, arg1, arg0);
11041 if (operand_equal_p (arg0, arg1, 0))
11042 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11043
11044 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11045 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11046 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11047 || (TREE_CODE (arg0) == EQ_EXPR
11048 && integer_zerop (TREE_OPERAND (arg0, 1))))
11049 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11050 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11051
11052 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11053 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11054 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11055 || (TREE_CODE (arg1) == EQ_EXPR
11056 && integer_zerop (TREE_OPERAND (arg1, 1))))
11057 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11058 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11059
11060 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11061 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11062 && TREE_CODE (arg1) == INTEGER_CST
11063 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11064 {
11065 tree tmp1 = fold_convert_loc (loc, type, arg1);
11066 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11067 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11068 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11069 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11070 return
11071 fold_convert_loc (loc, type,
11072 fold_build2_loc (loc, BIT_IOR_EXPR,
11073 type, tmp2, tmp3));
11074 }
11075
11076 /* (X | Y) & Y is (X, Y). */
11077 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11078 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11079 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11080 /* (X | Y) & X is (Y, X). */
11081 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11082 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11083 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11084 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11085 /* X & (X | Y) is (Y, X). */
11086 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11087 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11088 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11089 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11090 /* X & (Y | X) is (Y, X). */
11091 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11092 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11093 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11094 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11095
11096 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11097 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11098 && integer_onep (TREE_OPERAND (arg0, 1))
11099 && integer_onep (arg1))
11100 {
11101 tem = TREE_OPERAND (arg0, 0);
11102 return fold_build2_loc (loc, EQ_EXPR, type,
11103 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11104 build_int_cst (TREE_TYPE (tem), 1)),
11105 build_int_cst (TREE_TYPE (tem), 0));
11106 }
11107 /* Fold ~X & 1 as (X & 1) == 0. */
11108 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11109 && integer_onep (arg1))
11110 {
11111 tem = TREE_OPERAND (arg0, 0);
11112 return fold_build2_loc (loc, EQ_EXPR, type,
11113 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11114 build_int_cst (TREE_TYPE (tem), 1)),
11115 build_int_cst (TREE_TYPE (tem), 0));
11116 }
11117 /* Fold !X & 1 as X == 0. */
11118 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11119 && integer_onep (arg1))
11120 {
11121 tem = TREE_OPERAND (arg0, 0);
11122 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11123 build_int_cst (TREE_TYPE (tem), 0));
11124 }
11125
11126 /* Fold (X ^ Y) & Y as ~X & Y. */
11127 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11128 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11129 {
11130 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11131 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11132 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11133 fold_convert_loc (loc, type, arg1));
11134 }
11135 /* Fold (X ^ Y) & X as ~Y & X. */
11136 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11137 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11138 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11139 {
11140 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11141 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11142 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11143 fold_convert_loc (loc, type, arg1));
11144 }
11145 /* Fold X & (X ^ Y) as X & ~Y. */
11146 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11147 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11148 {
11149 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11150 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11151 fold_convert_loc (loc, type, arg0),
11152 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11153 }
11154 /* Fold X & (Y ^ X) as ~Y & X. */
11155 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11156 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11157 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11158 {
11159 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11160 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11161 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11162 fold_convert_loc (loc, type, arg0));
11163 }
11164
11165 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11166 ((A & N) + B) & M -> (A + B) & M
11167 Similarly if (N & M) == 0,
11168 ((A | N) + B) & M -> (A + B) & M
11169 and for - instead of + (or unary - instead of +)
11170 and/or ^ instead of |.
11171 If B is constant and (B & M) == 0, fold into A & M. */
11172 if (host_integerp (arg1, 1))
11173 {
11174 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11175 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11176 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11177 && (TREE_CODE (arg0) == PLUS_EXPR
11178 || TREE_CODE (arg0) == MINUS_EXPR
11179 || TREE_CODE (arg0) == NEGATE_EXPR)
11180 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11181 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11182 {
11183 tree pmop[2];
11184 int which = 0;
11185 unsigned HOST_WIDE_INT cst0;
11186
11187 /* Now we know that arg0 is (C + D) or (C - D) or
11188 -C and arg1 (M) is == (1LL << cst) - 1.
11189 Store C into PMOP[0] and D into PMOP[1]. */
11190 pmop[0] = TREE_OPERAND (arg0, 0);
11191 pmop[1] = NULL;
11192 if (TREE_CODE (arg0) != NEGATE_EXPR)
11193 {
11194 pmop[1] = TREE_OPERAND (arg0, 1);
11195 which = 1;
11196 }
11197
11198 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11199 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11200 & cst1) != cst1)
11201 which = -1;
11202
11203 for (; which >= 0; which--)
11204 switch (TREE_CODE (pmop[which]))
11205 {
11206 case BIT_AND_EXPR:
11207 case BIT_IOR_EXPR:
11208 case BIT_XOR_EXPR:
11209 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11210 != INTEGER_CST)
11211 break;
11212 /* tree_low_cst not used, because we don't care about
11213 the upper bits. */
11214 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11215 cst0 &= cst1;
11216 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11217 {
11218 if (cst0 != cst1)
11219 break;
11220 }
11221 else if (cst0 != 0)
11222 break;
11223 /* If C or D is of the form (A & N) where
11224 (N & M) == M, or of the form (A | N) or
11225 (A ^ N) where (N & M) == 0, replace it with A. */
11226 pmop[which] = TREE_OPERAND (pmop[which], 0);
11227 break;
11228 case INTEGER_CST:
11229 /* If C or D is a N where (N & M) == 0, it can be
11230 omitted (assumed 0). */
11231 if ((TREE_CODE (arg0) == PLUS_EXPR
11232 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11233 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11234 pmop[which] = NULL;
11235 break;
11236 default:
11237 break;
11238 }
11239
11240 /* Only build anything new if we optimized one or both arguments
11241 above. */
11242 if (pmop[0] != TREE_OPERAND (arg0, 0)
11243 || (TREE_CODE (arg0) != NEGATE_EXPR
11244 && pmop[1] != TREE_OPERAND (arg0, 1)))
11245 {
11246 tree utype = TREE_TYPE (arg0);
11247 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11248 {
11249 /* Perform the operations in a type that has defined
11250 overflow behavior. */
11251 utype = unsigned_type_for (TREE_TYPE (arg0));
11252 if (pmop[0] != NULL)
11253 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11254 if (pmop[1] != NULL)
11255 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11256 }
11257
11258 if (TREE_CODE (arg0) == NEGATE_EXPR)
11259 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11260 else if (TREE_CODE (arg0) == PLUS_EXPR)
11261 {
11262 if (pmop[0] != NULL && pmop[1] != NULL)
11263 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11264 pmop[0], pmop[1]);
11265 else if (pmop[0] != NULL)
11266 tem = pmop[0];
11267 else if (pmop[1] != NULL)
11268 tem = pmop[1];
11269 else
11270 return build_int_cst (type, 0);
11271 }
11272 else if (pmop[0] == NULL)
11273 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11274 else
11275 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11276 pmop[0], pmop[1]);
11277 /* TEM is now the new binary +, - or unary - replacement. */
11278 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11279 fold_convert_loc (loc, utype, arg1));
11280 return fold_convert_loc (loc, type, tem);
11281 }
11282 }
11283 }
11284
11285 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11286 if (t1 != NULL_TREE)
11287 return t1;
11288 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11289 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11290 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11291 {
11292 unsigned int prec
11293 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11294
11295 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11296 && (~TREE_INT_CST_LOW (arg1)
11297 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11298 return
11299 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11300 }
11301
11302 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11303
11304 This results in more efficient code for machines without a NOR
11305 instruction. Combine will canonicalize to the first form
11306 which will allow use of NOR instructions provided by the
11307 backend if they exist. */
11308 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11309 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11310 {
11311 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11312 build2 (BIT_IOR_EXPR, type,
11313 fold_convert_loc (loc, type,
11314 TREE_OPERAND (arg0, 0)),
11315 fold_convert_loc (loc, type,
11316 TREE_OPERAND (arg1, 0))));
11317 }
11318
11319 /* If arg0 is derived from the address of an object or function, we may
11320 be able to fold this expression using the object or function's
11321 alignment. */
11322 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11323 {
11324 unsigned HOST_WIDE_INT modulus, residue;
11325 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11326
11327 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11328 integer_onep (arg1));
11329
11330 /* This works because modulus is a power of 2. If this weren't the
11331 case, we'd have to replace it by its greatest power-of-2
11332 divisor: modulus & -modulus. */
11333 if (low < modulus)
11334 return build_int_cst (type, residue & low);
11335 }
11336
11337 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11338 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11339 if the new mask might be further optimized. */
11340 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11341 || TREE_CODE (arg0) == RSHIFT_EXPR)
11342 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11343 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11344 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11345 < TYPE_PRECISION (TREE_TYPE (arg0))
11346 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11347 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11348 {
11349 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11350 unsigned HOST_WIDE_INT mask
11351 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11352 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11353 tree shift_type = TREE_TYPE (arg0);
11354
11355 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11356 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11357 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11358 && TYPE_PRECISION (TREE_TYPE (arg0))
11359 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11360 {
11361 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11362 tree arg00 = TREE_OPERAND (arg0, 0);
11363 /* See if more bits can be proven as zero because of
11364 zero extension. */
11365 if (TREE_CODE (arg00) == NOP_EXPR
11366 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11367 {
11368 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11369 if (TYPE_PRECISION (inner_type)
11370 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11371 && TYPE_PRECISION (inner_type) < prec)
11372 {
11373 prec = TYPE_PRECISION (inner_type);
11374 /* See if we can shorten the right shift. */
11375 if (shiftc < prec)
11376 shift_type = inner_type;
11377 }
11378 }
11379 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11380 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11381 zerobits <<= prec - shiftc;
11382 /* For arithmetic shift if sign bit could be set, zerobits
11383 can contain actually sign bits, so no transformation is
11384 possible, unless MASK masks them all away. In that
11385 case the shift needs to be converted into logical shift. */
11386 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11387 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11388 {
11389 if ((mask & zerobits) == 0)
11390 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11391 else
11392 zerobits = 0;
11393 }
11394 }
11395
11396 /* ((X << 16) & 0xff00) is (X, 0). */
11397 if ((mask & zerobits) == mask)
11398 return omit_one_operand_loc (loc, type,
11399 build_int_cst (type, 0), arg0);
11400
11401 newmask = mask | zerobits;
11402 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11403 {
11404 unsigned int prec;
11405
11406 /* Only do the transformation if NEWMASK is some integer
11407 mode's mask. */
11408 for (prec = BITS_PER_UNIT;
11409 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11410 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11411 break;
11412 if (prec < HOST_BITS_PER_WIDE_INT
11413 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11414 {
11415 tree newmaskt;
11416
11417 if (shift_type != TREE_TYPE (arg0))
11418 {
11419 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11420 fold_convert_loc (loc, shift_type,
11421 TREE_OPERAND (arg0, 0)),
11422 TREE_OPERAND (arg0, 1));
11423 tem = fold_convert_loc (loc, type, tem);
11424 }
11425 else
11426 tem = op0;
11427 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11428 if (!tree_int_cst_equal (newmaskt, arg1))
11429 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11430 }
11431 }
11432 }
11433
11434 goto associate;
11435
11436 case RDIV_EXPR:
11437 /* Don't touch a floating-point divide by zero unless the mode
11438 of the constant can represent infinity. */
11439 if (TREE_CODE (arg1) == REAL_CST
11440 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11441 && real_zerop (arg1))
11442 return NULL_TREE;
11443
11444 /* Optimize A / A to 1.0 if we don't care about
11445 NaNs or Infinities. Skip the transformation
11446 for non-real operands. */
11447 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11448 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11449 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11450 && operand_equal_p (arg0, arg1, 0))
11451 {
11452 tree r = build_real (TREE_TYPE (arg0), dconst1);
11453
11454 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11455 }
11456
11457 /* The complex version of the above A / A optimization. */
11458 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11459 && operand_equal_p (arg0, arg1, 0))
11460 {
11461 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11462 if (! HONOR_NANS (TYPE_MODE (elem_type))
11463 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11464 {
11465 tree r = build_real (elem_type, dconst1);
11466 /* omit_two_operands will call fold_convert for us. */
11467 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11468 }
11469 }
11470
11471 /* (-A) / (-B) -> A / B */
11472 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11473 return fold_build2_loc (loc, RDIV_EXPR, type,
11474 TREE_OPERAND (arg0, 0),
11475 negate_expr (arg1));
11476 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11477 return fold_build2_loc (loc, RDIV_EXPR, type,
11478 negate_expr (arg0),
11479 TREE_OPERAND (arg1, 0));
11480
11481 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11482 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11483 && real_onep (arg1))
11484 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11485
11486 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11487 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11488 && real_minus_onep (arg1))
11489 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11490 negate_expr (arg0)));
11491
11492 /* If ARG1 is a constant, we can convert this to a multiply by the
11493 reciprocal. This does not have the same rounding properties,
11494 so only do this if -freciprocal-math. We can actually
11495 always safely do it if ARG1 is a power of two, but it's hard to
11496 tell if it is or not in a portable manner. */
11497 if (TREE_CODE (arg1) == REAL_CST)
11498 {
11499 if (flag_reciprocal_math
11500 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11501 arg1)))
11502 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11503 /* Find the reciprocal if optimizing and the result is exact. */
11504 if (optimize)
11505 {
11506 REAL_VALUE_TYPE r;
11507 r = TREE_REAL_CST (arg1);
11508 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11509 {
11510 tem = build_real (type, r);
11511 return fold_build2_loc (loc, MULT_EXPR, type,
11512 fold_convert_loc (loc, type, arg0), tem);
11513 }
11514 }
11515 }
11516 /* Convert A/B/C to A/(B*C). */
11517 if (flag_reciprocal_math
11518 && TREE_CODE (arg0) == RDIV_EXPR)
11519 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11520 fold_build2_loc (loc, MULT_EXPR, type,
11521 TREE_OPERAND (arg0, 1), arg1));
11522
11523 /* Convert A/(B/C) to (A/B)*C. */
11524 if (flag_reciprocal_math
11525 && TREE_CODE (arg1) == RDIV_EXPR)
11526 return fold_build2_loc (loc, MULT_EXPR, type,
11527 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11528 TREE_OPERAND (arg1, 0)),
11529 TREE_OPERAND (arg1, 1));
11530
11531 /* Convert C1/(X*C2) into (C1/C2)/X. */
11532 if (flag_reciprocal_math
11533 && TREE_CODE (arg1) == MULT_EXPR
11534 && TREE_CODE (arg0) == REAL_CST
11535 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11536 {
11537 tree tem = const_binop (RDIV_EXPR, arg0,
11538 TREE_OPERAND (arg1, 1));
11539 if (tem)
11540 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11541 TREE_OPERAND (arg1, 0));
11542 }
11543
11544 if (flag_unsafe_math_optimizations)
11545 {
11546 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11547 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11548
11549 /* Optimize sin(x)/cos(x) as tan(x). */
11550 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11551 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11552 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11553 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11554 CALL_EXPR_ARG (arg1, 0), 0))
11555 {
11556 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11557
11558 if (tanfn != NULL_TREE)
11559 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11560 }
11561
11562 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11563 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11564 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11565 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11566 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11567 CALL_EXPR_ARG (arg1, 0), 0))
11568 {
11569 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11570
11571 if (tanfn != NULL_TREE)
11572 {
11573 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11574 CALL_EXPR_ARG (arg0, 0));
11575 return fold_build2_loc (loc, RDIV_EXPR, type,
11576 build_real (type, dconst1), tmp);
11577 }
11578 }
11579
11580 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11581 NaNs or Infinities. */
11582 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11583 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11584 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11585 {
11586 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11587 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11588
11589 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11590 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11591 && operand_equal_p (arg00, arg01, 0))
11592 {
11593 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11594
11595 if (cosfn != NULL_TREE)
11596 return build_call_expr_loc (loc, cosfn, 1, arg00);
11597 }
11598 }
11599
11600 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11601 NaNs or Infinities. */
11602 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11603 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11604 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11605 {
11606 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11607 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11608
11609 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11610 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11611 && operand_equal_p (arg00, arg01, 0))
11612 {
11613 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11614
11615 if (cosfn != NULL_TREE)
11616 {
11617 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11618 return fold_build2_loc (loc, RDIV_EXPR, type,
11619 build_real (type, dconst1),
11620 tmp);
11621 }
11622 }
11623 }
11624
11625 /* Optimize pow(x,c)/x as pow(x,c-1). */
11626 if (fcode0 == BUILT_IN_POW
11627 || fcode0 == BUILT_IN_POWF
11628 || fcode0 == BUILT_IN_POWL)
11629 {
11630 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11631 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11632 if (TREE_CODE (arg01) == REAL_CST
11633 && !TREE_OVERFLOW (arg01)
11634 && operand_equal_p (arg1, arg00, 0))
11635 {
11636 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11637 REAL_VALUE_TYPE c;
11638 tree arg;
11639
11640 c = TREE_REAL_CST (arg01);
11641 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11642 arg = build_real (type, c);
11643 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11644 }
11645 }
11646
11647 /* Optimize a/root(b/c) into a*root(c/b). */
11648 if (BUILTIN_ROOT_P (fcode1))
11649 {
11650 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11651
11652 if (TREE_CODE (rootarg) == RDIV_EXPR)
11653 {
11654 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11655 tree b = TREE_OPERAND (rootarg, 0);
11656 tree c = TREE_OPERAND (rootarg, 1);
11657
11658 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11659
11660 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11661 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11662 }
11663 }
11664
11665 /* Optimize x/expN(y) into x*expN(-y). */
11666 if (BUILTIN_EXPONENT_P (fcode1))
11667 {
11668 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11669 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11670 arg1 = build_call_expr_loc (loc,
11671 expfn, 1,
11672 fold_convert_loc (loc, type, arg));
11673 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11674 }
11675
11676 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11677 if (fcode1 == BUILT_IN_POW
11678 || fcode1 == BUILT_IN_POWF
11679 || fcode1 == BUILT_IN_POWL)
11680 {
11681 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11682 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11683 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11684 tree neg11 = fold_convert_loc (loc, type,
11685 negate_expr (arg11));
11686 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11687 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11688 }
11689 }
11690 return NULL_TREE;
11691
11692 case TRUNC_DIV_EXPR:
11693 /* Optimize (X & (-A)) / A where A is a power of 2,
11694 to X >> log2(A) */
11695 if (TREE_CODE (arg0) == BIT_AND_EXPR
11696 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11697 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11698 {
11699 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11700 arg1, TREE_OPERAND (arg0, 1));
11701 if (sum && integer_zerop (sum)) {
11702 unsigned long pow2;
11703
11704 if (TREE_INT_CST_LOW (arg1))
11705 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11706 else
11707 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11708 + HOST_BITS_PER_WIDE_INT;
11709
11710 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11711 TREE_OPERAND (arg0, 0),
11712 build_int_cst (integer_type_node, pow2));
11713 }
11714 }
11715
11716 /* Fall thru */
11717
11718 case FLOOR_DIV_EXPR:
11719 /* Simplify A / (B << N) where A and B are positive and B is
11720 a power of 2, to A >> (N + log2(B)). */
11721 strict_overflow_p = false;
11722 if (TREE_CODE (arg1) == LSHIFT_EXPR
11723 && (TYPE_UNSIGNED (type)
11724 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11725 {
11726 tree sval = TREE_OPERAND (arg1, 0);
11727 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11728 {
11729 tree sh_cnt = TREE_OPERAND (arg1, 1);
11730 unsigned long pow2;
11731
11732 if (TREE_INT_CST_LOW (sval))
11733 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11734 else
11735 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11736 + HOST_BITS_PER_WIDE_INT;
11737
11738 if (strict_overflow_p)
11739 fold_overflow_warning (("assuming signed overflow does not "
11740 "occur when simplifying A / (B << N)"),
11741 WARN_STRICT_OVERFLOW_MISC);
11742
11743 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11744 sh_cnt,
11745 build_int_cst (TREE_TYPE (sh_cnt),
11746 pow2));
11747 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11748 fold_convert_loc (loc, type, arg0), sh_cnt);
11749 }
11750 }
11751
11752 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11753 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11754 if (INTEGRAL_TYPE_P (type)
11755 && TYPE_UNSIGNED (type)
11756 && code == FLOOR_DIV_EXPR)
11757 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11758
11759 /* Fall thru */
11760
11761 case ROUND_DIV_EXPR:
11762 case CEIL_DIV_EXPR:
11763 case EXACT_DIV_EXPR:
11764 if (integer_onep (arg1))
11765 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11766 if (integer_zerop (arg1))
11767 return NULL_TREE;
11768 /* X / -1 is -X. */
11769 if (!TYPE_UNSIGNED (type)
11770 && TREE_CODE (arg1) == INTEGER_CST
11771 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11772 && TREE_INT_CST_HIGH (arg1) == -1)
11773 return fold_convert_loc (loc, type, negate_expr (arg0));
11774
11775 /* Convert -A / -B to A / B when the type is signed and overflow is
11776 undefined. */
11777 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11778 && TREE_CODE (arg0) == NEGATE_EXPR
11779 && negate_expr_p (arg1))
11780 {
11781 if (INTEGRAL_TYPE_P (type))
11782 fold_overflow_warning (("assuming signed overflow does not occur "
11783 "when distributing negation across "
11784 "division"),
11785 WARN_STRICT_OVERFLOW_MISC);
11786 return fold_build2_loc (loc, code, type,
11787 fold_convert_loc (loc, type,
11788 TREE_OPERAND (arg0, 0)),
11789 fold_convert_loc (loc, type,
11790 negate_expr (arg1)));
11791 }
11792 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11793 && TREE_CODE (arg1) == NEGATE_EXPR
11794 && negate_expr_p (arg0))
11795 {
11796 if (INTEGRAL_TYPE_P (type))
11797 fold_overflow_warning (("assuming signed overflow does not occur "
11798 "when distributing negation across "
11799 "division"),
11800 WARN_STRICT_OVERFLOW_MISC);
11801 return fold_build2_loc (loc, code, type,
11802 fold_convert_loc (loc, type,
11803 negate_expr (arg0)),
11804 fold_convert_loc (loc, type,
11805 TREE_OPERAND (arg1, 0)));
11806 }
11807
11808 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11809 operation, EXACT_DIV_EXPR.
11810
11811 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11812 At one time others generated faster code, it's not clear if they do
11813 after the last round to changes to the DIV code in expmed.c. */
11814 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11815 && multiple_of_p (type, arg0, arg1))
11816 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11817
11818 strict_overflow_p = false;
11819 if (TREE_CODE (arg1) == INTEGER_CST
11820 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11821 &strict_overflow_p)))
11822 {
11823 if (strict_overflow_p)
11824 fold_overflow_warning (("assuming signed overflow does not occur "
11825 "when simplifying division"),
11826 WARN_STRICT_OVERFLOW_MISC);
11827 return fold_convert_loc (loc, type, tem);
11828 }
11829
11830 return NULL_TREE;
11831
11832 case CEIL_MOD_EXPR:
11833 case FLOOR_MOD_EXPR:
11834 case ROUND_MOD_EXPR:
11835 case TRUNC_MOD_EXPR:
11836 /* X % 1 is always zero, but be sure to preserve any side
11837 effects in X. */
11838 if (integer_onep (arg1))
11839 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11840
11841 /* X % 0, return X % 0 unchanged so that we can get the
11842 proper warnings and errors. */
11843 if (integer_zerop (arg1))
11844 return NULL_TREE;
11845
11846 /* 0 % X is always zero, but be sure to preserve any side
11847 effects in X. Place this after checking for X == 0. */
11848 if (integer_zerop (arg0))
11849 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11850
11851 /* X % -1 is zero. */
11852 if (!TYPE_UNSIGNED (type)
11853 && TREE_CODE (arg1) == INTEGER_CST
11854 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11855 && TREE_INT_CST_HIGH (arg1) == -1)
11856 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11857
11858 /* X % -C is the same as X % C. */
11859 if (code == TRUNC_MOD_EXPR
11860 && !TYPE_UNSIGNED (type)
11861 && TREE_CODE (arg1) == INTEGER_CST
11862 && !TREE_OVERFLOW (arg1)
11863 && TREE_INT_CST_HIGH (arg1) < 0
11864 && !TYPE_OVERFLOW_TRAPS (type)
11865 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11866 && !sign_bit_p (arg1, arg1))
11867 return fold_build2_loc (loc, code, type,
11868 fold_convert_loc (loc, type, arg0),
11869 fold_convert_loc (loc, type,
11870 negate_expr (arg1)));
11871
11872 /* X % -Y is the same as X % Y. */
11873 if (code == TRUNC_MOD_EXPR
11874 && !TYPE_UNSIGNED (type)
11875 && TREE_CODE (arg1) == NEGATE_EXPR
11876 && !TYPE_OVERFLOW_TRAPS (type))
11877 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11878 fold_convert_loc (loc, type,
11879 TREE_OPERAND (arg1, 0)));
11880
11881 strict_overflow_p = false;
11882 if (TREE_CODE (arg1) == INTEGER_CST
11883 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11884 &strict_overflow_p)))
11885 {
11886 if (strict_overflow_p)
11887 fold_overflow_warning (("assuming signed overflow does not occur "
11888 "when simplifying modulus"),
11889 WARN_STRICT_OVERFLOW_MISC);
11890 return fold_convert_loc (loc, type, tem);
11891 }
11892
11893 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11894 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11895 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11896 && (TYPE_UNSIGNED (type)
11897 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11898 {
11899 tree c = arg1;
11900 /* Also optimize A % (C << N) where C is a power of 2,
11901 to A & ((C << N) - 1). */
11902 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11903 c = TREE_OPERAND (arg1, 0);
11904
11905 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11906 {
11907 tree mask
11908 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11909 build_int_cst (TREE_TYPE (arg1), 1));
11910 if (strict_overflow_p)
11911 fold_overflow_warning (("assuming signed overflow does not "
11912 "occur when simplifying "
11913 "X % (power of two)"),
11914 WARN_STRICT_OVERFLOW_MISC);
11915 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11916 fold_convert_loc (loc, type, arg0),
11917 fold_convert_loc (loc, type, mask));
11918 }
11919 }
11920
11921 return NULL_TREE;
11922
11923 case LROTATE_EXPR:
11924 case RROTATE_EXPR:
11925 if (integer_all_onesp (arg0))
11926 return omit_one_operand_loc (loc, type, arg0, arg1);
11927 goto shift;
11928
11929 case RSHIFT_EXPR:
11930 /* Optimize -1 >> x for arithmetic right shifts. */
11931 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11932 && tree_expr_nonnegative_p (arg1))
11933 return omit_one_operand_loc (loc, type, arg0, arg1);
11934 /* ... fall through ... */
11935
11936 case LSHIFT_EXPR:
11937 shift:
11938 if (integer_zerop (arg1))
11939 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11940 if (integer_zerop (arg0))
11941 return omit_one_operand_loc (loc, type, arg0, arg1);
11942
11943 /* Since negative shift count is not well-defined,
11944 don't try to compute it in the compiler. */
11945 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11946 return NULL_TREE;
11947
11948 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11949 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11950 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11951 && host_integerp (TREE_OPERAND (arg0, 1), false)
11952 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11953 {
11954 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11955 + TREE_INT_CST_LOW (arg1));
11956
11957 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11958 being well defined. */
11959 if (low >= TYPE_PRECISION (type))
11960 {
11961 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11962 low = low % TYPE_PRECISION (type);
11963 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11964 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11965 TREE_OPERAND (arg0, 0));
11966 else
11967 low = TYPE_PRECISION (type) - 1;
11968 }
11969
11970 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11971 build_int_cst (type, low));
11972 }
11973
11974 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11975 into x & ((unsigned)-1 >> c) for unsigned types. */
11976 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11977 || (TYPE_UNSIGNED (type)
11978 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11979 && host_integerp (arg1, false)
11980 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11981 && host_integerp (TREE_OPERAND (arg0, 1), false)
11982 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11983 {
11984 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11985 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11986 tree lshift;
11987 tree arg00;
11988
11989 if (low0 == low1)
11990 {
11991 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11992
11993 lshift = build_int_cst (type, -1);
11994 lshift = int_const_binop (code, lshift, arg1);
11995
11996 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11997 }
11998 }
11999
12000 /* Rewrite an LROTATE_EXPR by a constant into an
12001 RROTATE_EXPR by a new constant. */
12002 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12003 {
12004 tree tem = build_int_cst (TREE_TYPE (arg1),
12005 TYPE_PRECISION (type));
12006 tem = const_binop (MINUS_EXPR, tem, arg1);
12007 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12008 }
12009
12010 /* If we have a rotate of a bit operation with the rotate count and
12011 the second operand of the bit operation both constant,
12012 permute the two operations. */
12013 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12014 && (TREE_CODE (arg0) == BIT_AND_EXPR
12015 || TREE_CODE (arg0) == BIT_IOR_EXPR
12016 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12017 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12018 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12019 fold_build2_loc (loc, code, type,
12020 TREE_OPERAND (arg0, 0), arg1),
12021 fold_build2_loc (loc, code, type,
12022 TREE_OPERAND (arg0, 1), arg1));
12023
12024 /* Two consecutive rotates adding up to the precision of the
12025 type can be ignored. */
12026 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12027 && TREE_CODE (arg0) == RROTATE_EXPR
12028 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12029 && TREE_INT_CST_HIGH (arg1) == 0
12030 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12031 && ((TREE_INT_CST_LOW (arg1)
12032 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12033 == (unsigned int) TYPE_PRECISION (type)))
12034 return TREE_OPERAND (arg0, 0);
12035
12036 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12037 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12038 if the latter can be further optimized. */
12039 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12040 && TREE_CODE (arg0) == BIT_AND_EXPR
12041 && TREE_CODE (arg1) == INTEGER_CST
12042 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12043 {
12044 tree mask = fold_build2_loc (loc, code, type,
12045 fold_convert_loc (loc, type,
12046 TREE_OPERAND (arg0, 1)),
12047 arg1);
12048 tree shift = fold_build2_loc (loc, code, type,
12049 fold_convert_loc (loc, type,
12050 TREE_OPERAND (arg0, 0)),
12051 arg1);
12052 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12053 if (tem)
12054 return tem;
12055 }
12056
12057 return NULL_TREE;
12058
12059 case MIN_EXPR:
12060 if (operand_equal_p (arg0, arg1, 0))
12061 return omit_one_operand_loc (loc, type, arg0, arg1);
12062 if (INTEGRAL_TYPE_P (type)
12063 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12064 return omit_one_operand_loc (loc, type, arg1, arg0);
12065 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12066 if (tem)
12067 return tem;
12068 goto associate;
12069
12070 case MAX_EXPR:
12071 if (operand_equal_p (arg0, arg1, 0))
12072 return omit_one_operand_loc (loc, type, arg0, arg1);
12073 if (INTEGRAL_TYPE_P (type)
12074 && TYPE_MAX_VALUE (type)
12075 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12076 return omit_one_operand_loc (loc, type, arg1, arg0);
12077 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12078 if (tem)
12079 return tem;
12080 goto associate;
12081
12082 case TRUTH_ANDIF_EXPR:
12083 /* Note that the operands of this must be ints
12084 and their values must be 0 or 1.
12085 ("true" is a fixed value perhaps depending on the language.) */
12086 /* If first arg is constant zero, return it. */
12087 if (integer_zerop (arg0))
12088 return fold_convert_loc (loc, type, arg0);
12089 case TRUTH_AND_EXPR:
12090 /* If either arg is constant true, drop it. */
12091 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12092 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12093 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12094 /* Preserve sequence points. */
12095 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12096 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12097 /* If second arg is constant zero, result is zero, but first arg
12098 must be evaluated. */
12099 if (integer_zerop (arg1))
12100 return omit_one_operand_loc (loc, type, arg1, arg0);
12101 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12102 case will be handled here. */
12103 if (integer_zerop (arg0))
12104 return omit_one_operand_loc (loc, type, arg0, arg1);
12105
12106 /* !X && X is always false. */
12107 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12108 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12109 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12110 /* X && !X is always false. */
12111 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12112 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12113 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12114
12115 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12116 means A >= Y && A != MAX, but in this case we know that
12117 A < X <= MAX. */
12118
12119 if (!TREE_SIDE_EFFECTS (arg0)
12120 && !TREE_SIDE_EFFECTS (arg1))
12121 {
12122 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12123 if (tem && !operand_equal_p (tem, arg0, 0))
12124 return fold_build2_loc (loc, code, type, tem, arg1);
12125
12126 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12127 if (tem && !operand_equal_p (tem, arg1, 0))
12128 return fold_build2_loc (loc, code, type, arg0, tem);
12129 }
12130
12131 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12132 != NULL_TREE)
12133 return tem;
12134
12135 return NULL_TREE;
12136
12137 case TRUTH_ORIF_EXPR:
12138 /* Note that the operands of this must be ints
12139 and their values must be 0 or true.
12140 ("true" is a fixed value perhaps depending on the language.) */
12141 /* If first arg is constant true, return it. */
12142 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12143 return fold_convert_loc (loc, type, arg0);
12144 case TRUTH_OR_EXPR:
12145 /* If either arg is constant zero, drop it. */
12146 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12147 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12148 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12149 /* Preserve sequence points. */
12150 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12151 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12152 /* If second arg is constant true, result is true, but we must
12153 evaluate first arg. */
12154 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12155 return omit_one_operand_loc (loc, type, arg1, arg0);
12156 /* Likewise for first arg, but note this only occurs here for
12157 TRUTH_OR_EXPR. */
12158 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12159 return omit_one_operand_loc (loc, type, arg0, arg1);
12160
12161 /* !X || X is always true. */
12162 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12164 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12165 /* X || !X is always true. */
12166 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12167 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12168 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12169
12170 /* (X && !Y) || (!X && Y) is X ^ Y */
12171 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12172 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12173 {
12174 tree a0, a1, l0, l1, n0, n1;
12175
12176 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12177 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12178
12179 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12180 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12181
12182 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12183 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12184
12185 if ((operand_equal_p (n0, a0, 0)
12186 && operand_equal_p (n1, a1, 0))
12187 || (operand_equal_p (n0, a1, 0)
12188 && operand_equal_p (n1, a0, 0)))
12189 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12190 }
12191
12192 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12193 != NULL_TREE)
12194 return tem;
12195
12196 return NULL_TREE;
12197
12198 case TRUTH_XOR_EXPR:
12199 /* If the second arg is constant zero, drop it. */
12200 if (integer_zerop (arg1))
12201 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12202 /* If the second arg is constant true, this is a logical inversion. */
12203 if (integer_onep (arg1))
12204 {
12205 /* Only call invert_truthvalue if operand is a truth value. */
12206 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12207 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12208 else
12209 tem = invert_truthvalue_loc (loc, arg0);
12210 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12211 }
12212 /* Identical arguments cancel to zero. */
12213 if (operand_equal_p (arg0, arg1, 0))
12214 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12215
12216 /* !X ^ X is always true. */
12217 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12218 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12219 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12220
12221 /* X ^ !X is always true. */
12222 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12223 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12224 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12225
12226 return NULL_TREE;
12227
12228 case EQ_EXPR:
12229 case NE_EXPR:
12230 STRIP_NOPS (arg0);
12231 STRIP_NOPS (arg1);
12232
12233 tem = fold_comparison (loc, code, type, op0, op1);
12234 if (tem != NULL_TREE)
12235 return tem;
12236
12237 /* bool_var != 0 becomes bool_var. */
12238 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12239 && code == NE_EXPR)
12240 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12241
12242 /* bool_var == 1 becomes bool_var. */
12243 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12244 && code == EQ_EXPR)
12245 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12246
12247 /* bool_var != 1 becomes !bool_var. */
12248 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12249 && code == NE_EXPR)
12250 return fold_convert_loc (loc, type,
12251 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12252 TREE_TYPE (arg0), arg0));
12253
12254 /* bool_var == 0 becomes !bool_var. */
12255 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12256 && code == EQ_EXPR)
12257 return fold_convert_loc (loc, type,
12258 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12259 TREE_TYPE (arg0), arg0));
12260
12261 /* !exp != 0 becomes !exp */
12262 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12263 && code == NE_EXPR)
12264 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12265
12266 /* If this is an equality comparison of the address of two non-weak,
12267 unaliased symbols neither of which are extern (since we do not
12268 have access to attributes for externs), then we know the result. */
12269 if (TREE_CODE (arg0) == ADDR_EXPR
12270 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12271 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12272 && ! lookup_attribute ("alias",
12273 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12274 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12275 && TREE_CODE (arg1) == ADDR_EXPR
12276 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12277 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12278 && ! lookup_attribute ("alias",
12279 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12280 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12281 {
12282 /* We know that we're looking at the address of two
12283 non-weak, unaliased, static _DECL nodes.
12284
12285 It is both wasteful and incorrect to call operand_equal_p
12286 to compare the two ADDR_EXPR nodes. It is wasteful in that
12287 all we need to do is test pointer equality for the arguments
12288 to the two ADDR_EXPR nodes. It is incorrect to use
12289 operand_equal_p as that function is NOT equivalent to a
12290 C equality test. It can in fact return false for two
12291 objects which would test as equal using the C equality
12292 operator. */
12293 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12294 return constant_boolean_node (equal
12295 ? code == EQ_EXPR : code != EQ_EXPR,
12296 type);
12297 }
12298
12299 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12300 a MINUS_EXPR of a constant, we can convert it into a comparison with
12301 a revised constant as long as no overflow occurs. */
12302 if (TREE_CODE (arg1) == INTEGER_CST
12303 && (TREE_CODE (arg0) == PLUS_EXPR
12304 || TREE_CODE (arg0) == MINUS_EXPR)
12305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12306 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12307 ? MINUS_EXPR : PLUS_EXPR,
12308 fold_convert_loc (loc, TREE_TYPE (arg0),
12309 arg1),
12310 TREE_OPERAND (arg0, 1)))
12311 && !TREE_OVERFLOW (tem))
12312 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12313
12314 /* Similarly for a NEGATE_EXPR. */
12315 if (TREE_CODE (arg0) == NEGATE_EXPR
12316 && TREE_CODE (arg1) == INTEGER_CST
12317 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12318 arg1)))
12319 && TREE_CODE (tem) == INTEGER_CST
12320 && !TREE_OVERFLOW (tem))
12321 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12322
12323 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12324 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12325 && TREE_CODE (arg1) == INTEGER_CST
12326 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12327 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12328 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12329 fold_convert_loc (loc,
12330 TREE_TYPE (arg0),
12331 arg1),
12332 TREE_OPERAND (arg0, 1)));
12333
12334 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12335 if ((TREE_CODE (arg0) == PLUS_EXPR
12336 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12337 || TREE_CODE (arg0) == MINUS_EXPR)
12338 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12339 0)),
12340 arg1, 0)
12341 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12342 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12343 {
12344 tree val = TREE_OPERAND (arg0, 1);
12345 return omit_two_operands_loc (loc, type,
12346 fold_build2_loc (loc, code, type,
12347 val,
12348 build_int_cst (TREE_TYPE (val),
12349 0)),
12350 TREE_OPERAND (arg0, 0), arg1);
12351 }
12352
12353 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12354 if (TREE_CODE (arg0) == MINUS_EXPR
12355 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12356 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12357 1)),
12358 arg1, 0)
12359 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12360 {
12361 return omit_two_operands_loc (loc, type,
12362 code == NE_EXPR
12363 ? boolean_true_node : boolean_false_node,
12364 TREE_OPERAND (arg0, 1), arg1);
12365 }
12366
12367 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12368 for !=. Don't do this for ordered comparisons due to overflow. */
12369 if (TREE_CODE (arg0) == MINUS_EXPR
12370 && integer_zerop (arg1))
12371 return fold_build2_loc (loc, code, type,
12372 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12373
12374 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12375 if (TREE_CODE (arg0) == ABS_EXPR
12376 && (integer_zerop (arg1) || real_zerop (arg1)))
12377 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12378
12379 /* If this is an EQ or NE comparison with zero and ARG0 is
12380 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12381 two operations, but the latter can be done in one less insn
12382 on machines that have only two-operand insns or on which a
12383 constant cannot be the first operand. */
12384 if (TREE_CODE (arg0) == BIT_AND_EXPR
12385 && integer_zerop (arg1))
12386 {
12387 tree arg00 = TREE_OPERAND (arg0, 0);
12388 tree arg01 = TREE_OPERAND (arg0, 1);
12389 if (TREE_CODE (arg00) == LSHIFT_EXPR
12390 && integer_onep (TREE_OPERAND (arg00, 0)))
12391 {
12392 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12393 arg01, TREE_OPERAND (arg00, 1));
12394 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12395 build_int_cst (TREE_TYPE (arg0), 1));
12396 return fold_build2_loc (loc, code, type,
12397 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12398 arg1);
12399 }
12400 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12401 && integer_onep (TREE_OPERAND (arg01, 0)))
12402 {
12403 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12404 arg00, TREE_OPERAND (arg01, 1));
12405 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12406 build_int_cst (TREE_TYPE (arg0), 1));
12407 return fold_build2_loc (loc, code, type,
12408 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12409 arg1);
12410 }
12411 }
12412
12413 /* If this is an NE or EQ comparison of zero against the result of a
12414 signed MOD operation whose second operand is a power of 2, make
12415 the MOD operation unsigned since it is simpler and equivalent. */
12416 if (integer_zerop (arg1)
12417 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12418 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12419 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12420 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12421 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12422 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12423 {
12424 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12425 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12426 fold_convert_loc (loc, newtype,
12427 TREE_OPERAND (arg0, 0)),
12428 fold_convert_loc (loc, newtype,
12429 TREE_OPERAND (arg0, 1)));
12430
12431 return fold_build2_loc (loc, code, type, newmod,
12432 fold_convert_loc (loc, newtype, arg1));
12433 }
12434
12435 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12436 C1 is a valid shift constant, and C2 is a power of two, i.e.
12437 a single bit. */
12438 if (TREE_CODE (arg0) == BIT_AND_EXPR
12439 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12440 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12441 == INTEGER_CST
12442 && integer_pow2p (TREE_OPERAND (arg0, 1))
12443 && integer_zerop (arg1))
12444 {
12445 tree itype = TREE_TYPE (arg0);
12446 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12447 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12448
12449 /* Check for a valid shift count. */
12450 if (TREE_INT_CST_HIGH (arg001) == 0
12451 && TREE_INT_CST_LOW (arg001) < prec)
12452 {
12453 tree arg01 = TREE_OPERAND (arg0, 1);
12454 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12455 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12456 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12457 can be rewritten as (X & (C2 << C1)) != 0. */
12458 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12459 {
12460 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12461 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12462 return fold_build2_loc (loc, code, type, tem,
12463 fold_convert_loc (loc, itype, arg1));
12464 }
12465 /* Otherwise, for signed (arithmetic) shifts,
12466 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12467 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12468 else if (!TYPE_UNSIGNED (itype))
12469 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12470 arg000, build_int_cst (itype, 0));
12471 /* Otherwise, of unsigned (logical) shifts,
12472 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12473 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12474 else
12475 return omit_one_operand_loc (loc, type,
12476 code == EQ_EXPR ? integer_one_node
12477 : integer_zero_node,
12478 arg000);
12479 }
12480 }
12481
12482 /* If we have (A & C) == C where C is a power of 2, convert this into
12483 (A & C) != 0. Similarly for NE_EXPR. */
12484 if (TREE_CODE (arg0) == BIT_AND_EXPR
12485 && integer_pow2p (TREE_OPERAND (arg0, 1))
12486 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12487 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12488 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12489 integer_zero_node));
12490
12491 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12492 bit, then fold the expression into A < 0 or A >= 0. */
12493 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12494 if (tem)
12495 return tem;
12496
12497 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12498 Similarly for NE_EXPR. */
12499 if (TREE_CODE (arg0) == BIT_AND_EXPR
12500 && TREE_CODE (arg1) == INTEGER_CST
12501 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12502 {
12503 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12504 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12505 TREE_OPERAND (arg0, 1));
12506 tree dandnotc
12507 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12508 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12509 notc);
12510 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12511 if (integer_nonzerop (dandnotc))
12512 return omit_one_operand_loc (loc, type, rslt, arg0);
12513 }
12514
12515 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12516 Similarly for NE_EXPR. */
12517 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12518 && TREE_CODE (arg1) == INTEGER_CST
12519 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12520 {
12521 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12522 tree candnotd
12523 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12524 TREE_OPERAND (arg0, 1),
12525 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12526 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12527 if (integer_nonzerop (candnotd))
12528 return omit_one_operand_loc (loc, type, rslt, arg0);
12529 }
12530
12531 /* If this is a comparison of a field, we may be able to simplify it. */
12532 if ((TREE_CODE (arg0) == COMPONENT_REF
12533 || TREE_CODE (arg0) == BIT_FIELD_REF)
12534 /* Handle the constant case even without -O
12535 to make sure the warnings are given. */
12536 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12537 {
12538 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12539 if (t1)
12540 return t1;
12541 }
12542
12543 /* Optimize comparisons of strlen vs zero to a compare of the
12544 first character of the string vs zero. To wit,
12545 strlen(ptr) == 0 => *ptr == 0
12546 strlen(ptr) != 0 => *ptr != 0
12547 Other cases should reduce to one of these two (or a constant)
12548 due to the return value of strlen being unsigned. */
12549 if (TREE_CODE (arg0) == CALL_EXPR
12550 && integer_zerop (arg1))
12551 {
12552 tree fndecl = get_callee_fndecl (arg0);
12553
12554 if (fndecl
12555 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12556 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12557 && call_expr_nargs (arg0) == 1
12558 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12559 {
12560 tree iref = build_fold_indirect_ref_loc (loc,
12561 CALL_EXPR_ARG (arg0, 0));
12562 return fold_build2_loc (loc, code, type, iref,
12563 build_int_cst (TREE_TYPE (iref), 0));
12564 }
12565 }
12566
12567 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12568 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12569 if (TREE_CODE (arg0) == RSHIFT_EXPR
12570 && integer_zerop (arg1)
12571 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12572 {
12573 tree arg00 = TREE_OPERAND (arg0, 0);
12574 tree arg01 = TREE_OPERAND (arg0, 1);
12575 tree itype = TREE_TYPE (arg00);
12576 if (TREE_INT_CST_HIGH (arg01) == 0
12577 && TREE_INT_CST_LOW (arg01)
12578 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12579 {
12580 if (TYPE_UNSIGNED (itype))
12581 {
12582 itype = signed_type_for (itype);
12583 arg00 = fold_convert_loc (loc, itype, arg00);
12584 }
12585 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12586 type, arg00, build_int_cst (itype, 0));
12587 }
12588 }
12589
12590 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12591 if (integer_zerop (arg1)
12592 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12593 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12594 TREE_OPERAND (arg0, 1));
12595
12596 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12597 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12598 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12599 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12600 build_int_cst (TREE_TYPE (arg0), 0));
12601 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12602 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12603 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12604 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12605 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12606 build_int_cst (TREE_TYPE (arg0), 0));
12607
12608 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12609 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12610 && TREE_CODE (arg1) == INTEGER_CST
12611 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12612 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12613 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12614 TREE_OPERAND (arg0, 1), arg1));
12615
12616 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12617 (X & C) == 0 when C is a single bit. */
12618 if (TREE_CODE (arg0) == BIT_AND_EXPR
12619 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12620 && integer_zerop (arg1)
12621 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12622 {
12623 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12624 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12625 TREE_OPERAND (arg0, 1));
12626 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12627 type, tem,
12628 fold_convert_loc (loc, TREE_TYPE (arg0),
12629 arg1));
12630 }
12631
12632 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12633 constant C is a power of two, i.e. a single bit. */
12634 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12635 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12636 && integer_zerop (arg1)
12637 && integer_pow2p (TREE_OPERAND (arg0, 1))
12638 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12639 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12640 {
12641 tree arg00 = TREE_OPERAND (arg0, 0);
12642 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12643 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12644 }
12645
12646 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12647 when is C is a power of two, i.e. a single bit. */
12648 if (TREE_CODE (arg0) == BIT_AND_EXPR
12649 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12650 && integer_zerop (arg1)
12651 && integer_pow2p (TREE_OPERAND (arg0, 1))
12652 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12653 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12654 {
12655 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12656 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12657 arg000, TREE_OPERAND (arg0, 1));
12658 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12659 tem, build_int_cst (TREE_TYPE (tem), 0));
12660 }
12661
12662 if (integer_zerop (arg1)
12663 && tree_expr_nonzero_p (arg0))
12664 {
12665 tree res = constant_boolean_node (code==NE_EXPR, type);
12666 return omit_one_operand_loc (loc, type, res, arg0);
12667 }
12668
12669 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12670 if (TREE_CODE (arg0) == NEGATE_EXPR
12671 && TREE_CODE (arg1) == NEGATE_EXPR)
12672 return fold_build2_loc (loc, code, type,
12673 TREE_OPERAND (arg0, 0),
12674 fold_convert_loc (loc, TREE_TYPE (arg0),
12675 TREE_OPERAND (arg1, 0)));
12676
12677 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12678 if (TREE_CODE (arg0) == BIT_AND_EXPR
12679 && TREE_CODE (arg1) == BIT_AND_EXPR)
12680 {
12681 tree arg00 = TREE_OPERAND (arg0, 0);
12682 tree arg01 = TREE_OPERAND (arg0, 1);
12683 tree arg10 = TREE_OPERAND (arg1, 0);
12684 tree arg11 = TREE_OPERAND (arg1, 1);
12685 tree itype = TREE_TYPE (arg0);
12686
12687 if (operand_equal_p (arg01, arg11, 0))
12688 return fold_build2_loc (loc, code, type,
12689 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12690 fold_build2_loc (loc,
12691 BIT_XOR_EXPR, itype,
12692 arg00, arg10),
12693 arg01),
12694 build_int_cst (itype, 0));
12695
12696 if (operand_equal_p (arg01, arg10, 0))
12697 return fold_build2_loc (loc, code, type,
12698 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12699 fold_build2_loc (loc,
12700 BIT_XOR_EXPR, itype,
12701 arg00, arg11),
12702 arg01),
12703 build_int_cst (itype, 0));
12704
12705 if (operand_equal_p (arg00, arg11, 0))
12706 return fold_build2_loc (loc, code, type,
12707 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12708 fold_build2_loc (loc,
12709 BIT_XOR_EXPR, itype,
12710 arg01, arg10),
12711 arg00),
12712 build_int_cst (itype, 0));
12713
12714 if (operand_equal_p (arg00, arg10, 0))
12715 return fold_build2_loc (loc, code, type,
12716 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12717 fold_build2_loc (loc,
12718 BIT_XOR_EXPR, itype,
12719 arg01, arg11),
12720 arg00),
12721 build_int_cst (itype, 0));
12722 }
12723
12724 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12725 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12726 {
12727 tree arg00 = TREE_OPERAND (arg0, 0);
12728 tree arg01 = TREE_OPERAND (arg0, 1);
12729 tree arg10 = TREE_OPERAND (arg1, 0);
12730 tree arg11 = TREE_OPERAND (arg1, 1);
12731 tree itype = TREE_TYPE (arg0);
12732
12733 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12734 operand_equal_p guarantees no side-effects so we don't need
12735 to use omit_one_operand on Z. */
12736 if (operand_equal_p (arg01, arg11, 0))
12737 return fold_build2_loc (loc, code, type, arg00,
12738 fold_convert_loc (loc, TREE_TYPE (arg00),
12739 arg10));
12740 if (operand_equal_p (arg01, arg10, 0))
12741 return fold_build2_loc (loc, code, type, arg00,
12742 fold_convert_loc (loc, TREE_TYPE (arg00),
12743 arg11));
12744 if (operand_equal_p (arg00, arg11, 0))
12745 return fold_build2_loc (loc, code, type, arg01,
12746 fold_convert_loc (loc, TREE_TYPE (arg01),
12747 arg10));
12748 if (operand_equal_p (arg00, arg10, 0))
12749 return fold_build2_loc (loc, code, type, arg01,
12750 fold_convert_loc (loc, TREE_TYPE (arg01),
12751 arg11));
12752
12753 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12754 if (TREE_CODE (arg01) == INTEGER_CST
12755 && TREE_CODE (arg11) == INTEGER_CST)
12756 {
12757 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12758 fold_convert_loc (loc, itype, arg11));
12759 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12760 return fold_build2_loc (loc, code, type, tem,
12761 fold_convert_loc (loc, itype, arg10));
12762 }
12763 }
12764
12765 /* Attempt to simplify equality/inequality comparisons of complex
12766 values. Only lower the comparison if the result is known or
12767 can be simplified to a single scalar comparison. */
12768 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12769 || TREE_CODE (arg0) == COMPLEX_CST)
12770 && (TREE_CODE (arg1) == COMPLEX_EXPR
12771 || TREE_CODE (arg1) == COMPLEX_CST))
12772 {
12773 tree real0, imag0, real1, imag1;
12774 tree rcond, icond;
12775
12776 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12777 {
12778 real0 = TREE_OPERAND (arg0, 0);
12779 imag0 = TREE_OPERAND (arg0, 1);
12780 }
12781 else
12782 {
12783 real0 = TREE_REALPART (arg0);
12784 imag0 = TREE_IMAGPART (arg0);
12785 }
12786
12787 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12788 {
12789 real1 = TREE_OPERAND (arg1, 0);
12790 imag1 = TREE_OPERAND (arg1, 1);
12791 }
12792 else
12793 {
12794 real1 = TREE_REALPART (arg1);
12795 imag1 = TREE_IMAGPART (arg1);
12796 }
12797
12798 rcond = fold_binary_loc (loc, code, type, real0, real1);
12799 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12800 {
12801 if (integer_zerop (rcond))
12802 {
12803 if (code == EQ_EXPR)
12804 return omit_two_operands_loc (loc, type, boolean_false_node,
12805 imag0, imag1);
12806 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12807 }
12808 else
12809 {
12810 if (code == NE_EXPR)
12811 return omit_two_operands_loc (loc, type, boolean_true_node,
12812 imag0, imag1);
12813 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12814 }
12815 }
12816
12817 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12818 if (icond && TREE_CODE (icond) == INTEGER_CST)
12819 {
12820 if (integer_zerop (icond))
12821 {
12822 if (code == EQ_EXPR)
12823 return omit_two_operands_loc (loc, type, boolean_false_node,
12824 real0, real1);
12825 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12826 }
12827 else
12828 {
12829 if (code == NE_EXPR)
12830 return omit_two_operands_loc (loc, type, boolean_true_node,
12831 real0, real1);
12832 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12833 }
12834 }
12835 }
12836
12837 return NULL_TREE;
12838
12839 case LT_EXPR:
12840 case GT_EXPR:
12841 case LE_EXPR:
12842 case GE_EXPR:
12843 tem = fold_comparison (loc, code, type, op0, op1);
12844 if (tem != NULL_TREE)
12845 return tem;
12846
12847 /* Transform comparisons of the form X +- C CMP X. */
12848 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12849 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12850 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12851 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12852 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12853 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12854 {
12855 tree arg01 = TREE_OPERAND (arg0, 1);
12856 enum tree_code code0 = TREE_CODE (arg0);
12857 int is_positive;
12858
12859 if (TREE_CODE (arg01) == REAL_CST)
12860 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12861 else
12862 is_positive = tree_int_cst_sgn (arg01);
12863
12864 /* (X - c) > X becomes false. */
12865 if (code == GT_EXPR
12866 && ((code0 == MINUS_EXPR && is_positive >= 0)
12867 || (code0 == PLUS_EXPR && is_positive <= 0)))
12868 {
12869 if (TREE_CODE (arg01) == INTEGER_CST
12870 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12871 fold_overflow_warning (("assuming signed overflow does not "
12872 "occur when assuming that (X - c) > X "
12873 "is always false"),
12874 WARN_STRICT_OVERFLOW_ALL);
12875 return constant_boolean_node (0, type);
12876 }
12877
12878 /* Likewise (X + c) < X becomes false. */
12879 if (code == LT_EXPR
12880 && ((code0 == PLUS_EXPR && is_positive >= 0)
12881 || (code0 == MINUS_EXPR && is_positive <= 0)))
12882 {
12883 if (TREE_CODE (arg01) == INTEGER_CST
12884 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12885 fold_overflow_warning (("assuming signed overflow does not "
12886 "occur when assuming that "
12887 "(X + c) < X is always false"),
12888 WARN_STRICT_OVERFLOW_ALL);
12889 return constant_boolean_node (0, type);
12890 }
12891
12892 /* Convert (X - c) <= X to true. */
12893 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12894 && code == LE_EXPR
12895 && ((code0 == MINUS_EXPR && is_positive >= 0)
12896 || (code0 == PLUS_EXPR && is_positive <= 0)))
12897 {
12898 if (TREE_CODE (arg01) == INTEGER_CST
12899 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12900 fold_overflow_warning (("assuming signed overflow does not "
12901 "occur when assuming that "
12902 "(X - c) <= X is always true"),
12903 WARN_STRICT_OVERFLOW_ALL);
12904 return constant_boolean_node (1, type);
12905 }
12906
12907 /* Convert (X + c) >= X to true. */
12908 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12909 && code == GE_EXPR
12910 && ((code0 == PLUS_EXPR && is_positive >= 0)
12911 || (code0 == MINUS_EXPR && is_positive <= 0)))
12912 {
12913 if (TREE_CODE (arg01) == INTEGER_CST
12914 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12915 fold_overflow_warning (("assuming signed overflow does not "
12916 "occur when assuming that "
12917 "(X + c) >= X is always true"),
12918 WARN_STRICT_OVERFLOW_ALL);
12919 return constant_boolean_node (1, type);
12920 }
12921
12922 if (TREE_CODE (arg01) == INTEGER_CST)
12923 {
12924 /* Convert X + c > X and X - c < X to true for integers. */
12925 if (code == GT_EXPR
12926 && ((code0 == PLUS_EXPR && is_positive > 0)
12927 || (code0 == MINUS_EXPR && is_positive < 0)))
12928 {
12929 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12930 fold_overflow_warning (("assuming signed overflow does "
12931 "not occur when assuming that "
12932 "(X + c) > X is always true"),
12933 WARN_STRICT_OVERFLOW_ALL);
12934 return constant_boolean_node (1, type);
12935 }
12936
12937 if (code == LT_EXPR
12938 && ((code0 == MINUS_EXPR && is_positive > 0)
12939 || (code0 == PLUS_EXPR && is_positive < 0)))
12940 {
12941 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12942 fold_overflow_warning (("assuming signed overflow does "
12943 "not occur when assuming that "
12944 "(X - c) < X is always true"),
12945 WARN_STRICT_OVERFLOW_ALL);
12946 return constant_boolean_node (1, type);
12947 }
12948
12949 /* Convert X + c <= X and X - c >= X to false for integers. */
12950 if (code == LE_EXPR
12951 && ((code0 == PLUS_EXPR && is_positive > 0)
12952 || (code0 == MINUS_EXPR && is_positive < 0)))
12953 {
12954 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12955 fold_overflow_warning (("assuming signed overflow does "
12956 "not occur when assuming that "
12957 "(X + c) <= X is always false"),
12958 WARN_STRICT_OVERFLOW_ALL);
12959 return constant_boolean_node (0, type);
12960 }
12961
12962 if (code == GE_EXPR
12963 && ((code0 == MINUS_EXPR && is_positive > 0)
12964 || (code0 == PLUS_EXPR && is_positive < 0)))
12965 {
12966 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12967 fold_overflow_warning (("assuming signed overflow does "
12968 "not occur when assuming that "
12969 "(X - c) >= X is always false"),
12970 WARN_STRICT_OVERFLOW_ALL);
12971 return constant_boolean_node (0, type);
12972 }
12973 }
12974 }
12975
12976 /* Comparisons with the highest or lowest possible integer of
12977 the specified precision will have known values. */
12978 {
12979 tree arg1_type = TREE_TYPE (arg1);
12980 unsigned int width = TYPE_PRECISION (arg1_type);
12981
12982 if (TREE_CODE (arg1) == INTEGER_CST
12983 && width <= 2 * HOST_BITS_PER_WIDE_INT
12984 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12985 {
12986 HOST_WIDE_INT signed_max_hi;
12987 unsigned HOST_WIDE_INT signed_max_lo;
12988 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12989
12990 if (width <= HOST_BITS_PER_WIDE_INT)
12991 {
12992 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12993 - 1;
12994 signed_max_hi = 0;
12995 max_hi = 0;
12996
12997 if (TYPE_UNSIGNED (arg1_type))
12998 {
12999 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13000 min_lo = 0;
13001 min_hi = 0;
13002 }
13003 else
13004 {
13005 max_lo = signed_max_lo;
13006 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13007 min_hi = -1;
13008 }
13009 }
13010 else
13011 {
13012 width -= HOST_BITS_PER_WIDE_INT;
13013 signed_max_lo = -1;
13014 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13015 - 1;
13016 max_lo = -1;
13017 min_lo = 0;
13018
13019 if (TYPE_UNSIGNED (arg1_type))
13020 {
13021 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13022 min_hi = 0;
13023 }
13024 else
13025 {
13026 max_hi = signed_max_hi;
13027 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13028 }
13029 }
13030
13031 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13032 && TREE_INT_CST_LOW (arg1) == max_lo)
13033 switch (code)
13034 {
13035 case GT_EXPR:
13036 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13037
13038 case GE_EXPR:
13039 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13040
13041 case LE_EXPR:
13042 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13043
13044 case LT_EXPR:
13045 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13046
13047 /* The GE_EXPR and LT_EXPR cases above are not normally
13048 reached because of previous transformations. */
13049
13050 default:
13051 break;
13052 }
13053 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13054 == max_hi
13055 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13056 switch (code)
13057 {
13058 case GT_EXPR:
13059 arg1 = const_binop (PLUS_EXPR, arg1,
13060 build_int_cst (TREE_TYPE (arg1), 1));
13061 return fold_build2_loc (loc, EQ_EXPR, type,
13062 fold_convert_loc (loc,
13063 TREE_TYPE (arg1), arg0),
13064 arg1);
13065 case LE_EXPR:
13066 arg1 = const_binop (PLUS_EXPR, arg1,
13067 build_int_cst (TREE_TYPE (arg1), 1));
13068 return fold_build2_loc (loc, NE_EXPR, type,
13069 fold_convert_loc (loc, TREE_TYPE (arg1),
13070 arg0),
13071 arg1);
13072 default:
13073 break;
13074 }
13075 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13076 == min_hi
13077 && TREE_INT_CST_LOW (arg1) == min_lo)
13078 switch (code)
13079 {
13080 case LT_EXPR:
13081 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13082
13083 case LE_EXPR:
13084 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13085
13086 case GE_EXPR:
13087 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13088
13089 case GT_EXPR:
13090 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13091
13092 default:
13093 break;
13094 }
13095 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13096 == min_hi
13097 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13098 switch (code)
13099 {
13100 case GE_EXPR:
13101 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13102 return fold_build2_loc (loc, NE_EXPR, type,
13103 fold_convert_loc (loc,
13104 TREE_TYPE (arg1), arg0),
13105 arg1);
13106 case LT_EXPR:
13107 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13108 return fold_build2_loc (loc, EQ_EXPR, type,
13109 fold_convert_loc (loc, TREE_TYPE (arg1),
13110 arg0),
13111 arg1);
13112 default:
13113 break;
13114 }
13115
13116 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13117 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13118 && TYPE_UNSIGNED (arg1_type)
13119 /* We will flip the signedness of the comparison operator
13120 associated with the mode of arg1, so the sign bit is
13121 specified by this mode. Check that arg1 is the signed
13122 max associated with this sign bit. */
13123 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13124 /* signed_type does not work on pointer types. */
13125 && INTEGRAL_TYPE_P (arg1_type))
13126 {
13127 /* The following case also applies to X < signed_max+1
13128 and X >= signed_max+1 because previous transformations. */
13129 if (code == LE_EXPR || code == GT_EXPR)
13130 {
13131 tree st;
13132 st = signed_type_for (TREE_TYPE (arg1));
13133 return fold_build2_loc (loc,
13134 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13135 type, fold_convert_loc (loc, st, arg0),
13136 build_int_cst (st, 0));
13137 }
13138 }
13139 }
13140 }
13141
13142 /* If we are comparing an ABS_EXPR with a constant, we can
13143 convert all the cases into explicit comparisons, but they may
13144 well not be faster than doing the ABS and one comparison.
13145 But ABS (X) <= C is a range comparison, which becomes a subtraction
13146 and a comparison, and is probably faster. */
13147 if (code == LE_EXPR
13148 && TREE_CODE (arg1) == INTEGER_CST
13149 && TREE_CODE (arg0) == ABS_EXPR
13150 && ! TREE_SIDE_EFFECTS (arg0)
13151 && (0 != (tem = negate_expr (arg1)))
13152 && TREE_CODE (tem) == INTEGER_CST
13153 && !TREE_OVERFLOW (tem))
13154 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13155 build2 (GE_EXPR, type,
13156 TREE_OPERAND (arg0, 0), tem),
13157 build2 (LE_EXPR, type,
13158 TREE_OPERAND (arg0, 0), arg1));
13159
13160 /* Convert ABS_EXPR<x> >= 0 to true. */
13161 strict_overflow_p = false;
13162 if (code == GE_EXPR
13163 && (integer_zerop (arg1)
13164 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13165 && real_zerop (arg1)))
13166 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13167 {
13168 if (strict_overflow_p)
13169 fold_overflow_warning (("assuming signed overflow does not occur "
13170 "when simplifying comparison of "
13171 "absolute value and zero"),
13172 WARN_STRICT_OVERFLOW_CONDITIONAL);
13173 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13174 }
13175
13176 /* Convert ABS_EXPR<x> < 0 to false. */
13177 strict_overflow_p = false;
13178 if (code == LT_EXPR
13179 && (integer_zerop (arg1) || real_zerop (arg1))
13180 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13181 {
13182 if (strict_overflow_p)
13183 fold_overflow_warning (("assuming signed overflow does not occur "
13184 "when simplifying comparison of "
13185 "absolute value and zero"),
13186 WARN_STRICT_OVERFLOW_CONDITIONAL);
13187 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13188 }
13189
13190 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13191 and similarly for >= into !=. */
13192 if ((code == LT_EXPR || code == GE_EXPR)
13193 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13194 && TREE_CODE (arg1) == LSHIFT_EXPR
13195 && integer_onep (TREE_OPERAND (arg1, 0)))
13196 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13197 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13198 TREE_OPERAND (arg1, 1)),
13199 build_int_cst (TREE_TYPE (arg0), 0));
13200
13201 if ((code == LT_EXPR || code == GE_EXPR)
13202 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13203 && CONVERT_EXPR_P (arg1)
13204 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13205 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13206 {
13207 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13208 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13209 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13210 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13211 build_int_cst (TREE_TYPE (arg0), 0));
13212 }
13213
13214 return NULL_TREE;
13215
13216 case UNORDERED_EXPR:
13217 case ORDERED_EXPR:
13218 case UNLT_EXPR:
13219 case UNLE_EXPR:
13220 case UNGT_EXPR:
13221 case UNGE_EXPR:
13222 case UNEQ_EXPR:
13223 case LTGT_EXPR:
13224 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13225 {
13226 t1 = fold_relational_const (code, type, arg0, arg1);
13227 if (t1 != NULL_TREE)
13228 return t1;
13229 }
13230
13231 /* If the first operand is NaN, the result is constant. */
13232 if (TREE_CODE (arg0) == REAL_CST
13233 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13234 && (code != LTGT_EXPR || ! flag_trapping_math))
13235 {
13236 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13237 ? integer_zero_node
13238 : integer_one_node;
13239 return omit_one_operand_loc (loc, type, t1, arg1);
13240 }
13241
13242 /* If the second operand is NaN, the result is constant. */
13243 if (TREE_CODE (arg1) == REAL_CST
13244 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13245 && (code != LTGT_EXPR || ! flag_trapping_math))
13246 {
13247 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13248 ? integer_zero_node
13249 : integer_one_node;
13250 return omit_one_operand_loc (loc, type, t1, arg0);
13251 }
13252
13253 /* Simplify unordered comparison of something with itself. */
13254 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13255 && operand_equal_p (arg0, arg1, 0))
13256 return constant_boolean_node (1, type);
13257
13258 if (code == LTGT_EXPR
13259 && !flag_trapping_math
13260 && operand_equal_p (arg0, arg1, 0))
13261 return constant_boolean_node (0, type);
13262
13263 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13264 {
13265 tree targ0 = strip_float_extensions (arg0);
13266 tree targ1 = strip_float_extensions (arg1);
13267 tree newtype = TREE_TYPE (targ0);
13268
13269 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13270 newtype = TREE_TYPE (targ1);
13271
13272 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13273 return fold_build2_loc (loc, code, type,
13274 fold_convert_loc (loc, newtype, targ0),
13275 fold_convert_loc (loc, newtype, targ1));
13276 }
13277
13278 return NULL_TREE;
13279
13280 case COMPOUND_EXPR:
13281 /* When pedantic, a compound expression can be neither an lvalue
13282 nor an integer constant expression. */
13283 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13284 return NULL_TREE;
13285 /* Don't let (0, 0) be null pointer constant. */
13286 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13287 : fold_convert_loc (loc, type, arg1);
13288 return pedantic_non_lvalue_loc (loc, tem);
13289
13290 case COMPLEX_EXPR:
13291 if ((TREE_CODE (arg0) == REAL_CST
13292 && TREE_CODE (arg1) == REAL_CST)
13293 || (TREE_CODE (arg0) == INTEGER_CST
13294 && TREE_CODE (arg1) == INTEGER_CST))
13295 return build_complex (type, arg0, arg1);
13296 if (TREE_CODE (arg0) == REALPART_EXPR
13297 && TREE_CODE (arg1) == IMAGPART_EXPR
13298 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13299 && operand_equal_p (TREE_OPERAND (arg0, 0),
13300 TREE_OPERAND (arg1, 0), 0))
13301 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13302 TREE_OPERAND (arg1, 0));
13303 return NULL_TREE;
13304
13305 case ASSERT_EXPR:
13306 /* An ASSERT_EXPR should never be passed to fold_binary. */
13307 gcc_unreachable ();
13308
13309 default:
13310 return NULL_TREE;
13311 } /* switch (code) */
13312 }
13313
13314 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13315 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13316 of GOTO_EXPR. */
13317
13318 static tree
13319 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13320 {
13321 switch (TREE_CODE (*tp))
13322 {
13323 case LABEL_EXPR:
13324 return *tp;
13325
13326 case GOTO_EXPR:
13327 *walk_subtrees = 0;
13328
13329 /* ... fall through ... */
13330
13331 default:
13332 return NULL_TREE;
13333 }
13334 }
13335
13336 /* Return whether the sub-tree ST contains a label which is accessible from
13337 outside the sub-tree. */
13338
13339 static bool
13340 contains_label_p (tree st)
13341 {
13342 return
13343 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13344 }
13345
13346 /* Fold a ternary expression of code CODE and type TYPE with operands
13347 OP0, OP1, and OP2. Return the folded expression if folding is
13348 successful. Otherwise, return NULL_TREE. */
13349
13350 tree
13351 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13352 tree op0, tree op1, tree op2)
13353 {
13354 tree tem;
13355 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13356 enum tree_code_class kind = TREE_CODE_CLASS (code);
13357
13358 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13359 && TREE_CODE_LENGTH (code) == 3);
13360
13361 /* Strip any conversions that don't change the mode. This is safe
13362 for every expression, except for a comparison expression because
13363 its signedness is derived from its operands. So, in the latter
13364 case, only strip conversions that don't change the signedness.
13365
13366 Note that this is done as an internal manipulation within the
13367 constant folder, in order to find the simplest representation of
13368 the arguments so that their form can be studied. In any cases,
13369 the appropriate type conversions should be put back in the tree
13370 that will get out of the constant folder. */
13371 if (op0)
13372 {
13373 arg0 = op0;
13374 STRIP_NOPS (arg0);
13375 }
13376
13377 if (op1)
13378 {
13379 arg1 = op1;
13380 STRIP_NOPS (arg1);
13381 }
13382
13383 if (op2)
13384 {
13385 arg2 = op2;
13386 STRIP_NOPS (arg2);
13387 }
13388
13389 switch (code)
13390 {
13391 case COMPONENT_REF:
13392 if (TREE_CODE (arg0) == CONSTRUCTOR
13393 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13394 {
13395 unsigned HOST_WIDE_INT idx;
13396 tree field, value;
13397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13398 if (field == arg1)
13399 return value;
13400 }
13401 return NULL_TREE;
13402
13403 case COND_EXPR:
13404 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13405 so all simple results must be passed through pedantic_non_lvalue. */
13406 if (TREE_CODE (arg0) == INTEGER_CST)
13407 {
13408 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13409 tem = integer_zerop (arg0) ? op2 : op1;
13410 /* Only optimize constant conditions when the selected branch
13411 has the same type as the COND_EXPR. This avoids optimizing
13412 away "c ? x : throw", where the throw has a void type.
13413 Avoid throwing away that operand which contains label. */
13414 if ((!TREE_SIDE_EFFECTS (unused_op)
13415 || !contains_label_p (unused_op))
13416 && (! VOID_TYPE_P (TREE_TYPE (tem))
13417 || VOID_TYPE_P (type)))
13418 return pedantic_non_lvalue_loc (loc, tem);
13419 return NULL_TREE;
13420 }
13421 if (operand_equal_p (arg1, op2, 0))
13422 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13423
13424 /* If we have A op B ? A : C, we may be able to convert this to a
13425 simpler expression, depending on the operation and the values
13426 of B and C. Signed zeros prevent all of these transformations,
13427 for reasons given above each one.
13428
13429 Also try swapping the arguments and inverting the conditional. */
13430 if (COMPARISON_CLASS_P (arg0)
13431 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13432 arg1, TREE_OPERAND (arg0, 1))
13433 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13434 {
13435 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13436 if (tem)
13437 return tem;
13438 }
13439
13440 if (COMPARISON_CLASS_P (arg0)
13441 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13442 op2,
13443 TREE_OPERAND (arg0, 1))
13444 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13445 {
13446 location_t loc0 = expr_location_or (arg0, loc);
13447 tem = fold_truth_not_expr (loc0, arg0);
13448 if (tem && COMPARISON_CLASS_P (tem))
13449 {
13450 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13451 if (tem)
13452 return tem;
13453 }
13454 }
13455
13456 /* If the second operand is simpler than the third, swap them
13457 since that produces better jump optimization results. */
13458 if (truth_value_p (TREE_CODE (arg0))
13459 && tree_swap_operands_p (op1, op2, false))
13460 {
13461 location_t loc0 = expr_location_or (arg0, loc);
13462 /* See if this can be inverted. If it can't, possibly because
13463 it was a floating-point inequality comparison, don't do
13464 anything. */
13465 tem = fold_truth_not_expr (loc0, arg0);
13466 if (tem)
13467 return fold_build3_loc (loc, code, type, tem, op2, op1);
13468 }
13469
13470 /* Convert A ? 1 : 0 to simply A. */
13471 if (integer_onep (op1)
13472 && integer_zerop (op2)
13473 /* If we try to convert OP0 to our type, the
13474 call to fold will try to move the conversion inside
13475 a COND, which will recurse. In that case, the COND_EXPR
13476 is probably the best choice, so leave it alone. */
13477 && type == TREE_TYPE (arg0))
13478 return pedantic_non_lvalue_loc (loc, arg0);
13479
13480 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13481 over COND_EXPR in cases such as floating point comparisons. */
13482 if (integer_zerop (op1)
13483 && integer_onep (op2)
13484 && truth_value_p (TREE_CODE (arg0)))
13485 return pedantic_non_lvalue_loc (loc,
13486 fold_convert_loc (loc, type,
13487 invert_truthvalue_loc (loc,
13488 arg0)));
13489
13490 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13491 if (TREE_CODE (arg0) == LT_EXPR
13492 && integer_zerop (TREE_OPERAND (arg0, 1))
13493 && integer_zerop (op2)
13494 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13495 {
13496 /* sign_bit_p only checks ARG1 bits within A's precision.
13497 If <sign bit of A> has wider type than A, bits outside
13498 of A's precision in <sign bit of A> need to be checked.
13499 If they are all 0, this optimization needs to be done
13500 in unsigned A's type, if they are all 1 in signed A's type,
13501 otherwise this can't be done. */
13502 if (TYPE_PRECISION (TREE_TYPE (tem))
13503 < TYPE_PRECISION (TREE_TYPE (arg1))
13504 && TYPE_PRECISION (TREE_TYPE (tem))
13505 < TYPE_PRECISION (type))
13506 {
13507 unsigned HOST_WIDE_INT mask_lo;
13508 HOST_WIDE_INT mask_hi;
13509 int inner_width, outer_width;
13510 tree tem_type;
13511
13512 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13513 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13514 if (outer_width > TYPE_PRECISION (type))
13515 outer_width = TYPE_PRECISION (type);
13516
13517 if (outer_width > HOST_BITS_PER_WIDE_INT)
13518 {
13519 mask_hi = ((unsigned HOST_WIDE_INT) -1
13520 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13521 mask_lo = -1;
13522 }
13523 else
13524 {
13525 mask_hi = 0;
13526 mask_lo = ((unsigned HOST_WIDE_INT) -1
13527 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13528 }
13529 if (inner_width > HOST_BITS_PER_WIDE_INT)
13530 {
13531 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13532 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13533 mask_lo = 0;
13534 }
13535 else
13536 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13537 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13538
13539 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13540 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13541 {
13542 tem_type = signed_type_for (TREE_TYPE (tem));
13543 tem = fold_convert_loc (loc, tem_type, tem);
13544 }
13545 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13546 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13547 {
13548 tem_type = unsigned_type_for (TREE_TYPE (tem));
13549 tem = fold_convert_loc (loc, tem_type, tem);
13550 }
13551 else
13552 tem = NULL;
13553 }
13554
13555 if (tem)
13556 return
13557 fold_convert_loc (loc, type,
13558 fold_build2_loc (loc, BIT_AND_EXPR,
13559 TREE_TYPE (tem), tem,
13560 fold_convert_loc (loc,
13561 TREE_TYPE (tem),
13562 arg1)));
13563 }
13564
13565 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13566 already handled above. */
13567 if (TREE_CODE (arg0) == BIT_AND_EXPR
13568 && integer_onep (TREE_OPERAND (arg0, 1))
13569 && integer_zerop (op2)
13570 && integer_pow2p (arg1))
13571 {
13572 tree tem = TREE_OPERAND (arg0, 0);
13573 STRIP_NOPS (tem);
13574 if (TREE_CODE (tem) == RSHIFT_EXPR
13575 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13576 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13577 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13578 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13579 TREE_OPERAND (tem, 0), arg1);
13580 }
13581
13582 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13583 is probably obsolete because the first operand should be a
13584 truth value (that's why we have the two cases above), but let's
13585 leave it in until we can confirm this for all front-ends. */
13586 if (integer_zerop (op2)
13587 && TREE_CODE (arg0) == NE_EXPR
13588 && integer_zerop (TREE_OPERAND (arg0, 1))
13589 && integer_pow2p (arg1)
13590 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13591 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13592 arg1, OEP_ONLY_CONST))
13593 return pedantic_non_lvalue_loc (loc,
13594 fold_convert_loc (loc, type,
13595 TREE_OPERAND (arg0, 0)));
13596
13597 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13598 if (integer_zerop (op2)
13599 && truth_value_p (TREE_CODE (arg0))
13600 && truth_value_p (TREE_CODE (arg1)))
13601 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13602 fold_convert_loc (loc, type, arg0),
13603 arg1);
13604
13605 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13606 if (integer_onep (op2)
13607 && truth_value_p (TREE_CODE (arg0))
13608 && truth_value_p (TREE_CODE (arg1)))
13609 {
13610 location_t loc0 = expr_location_or (arg0, loc);
13611 /* Only perform transformation if ARG0 is easily inverted. */
13612 tem = fold_truth_not_expr (loc0, arg0);
13613 if (tem)
13614 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13615 fold_convert_loc (loc, type, tem),
13616 arg1);
13617 }
13618
13619 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13620 if (integer_zerop (arg1)
13621 && truth_value_p (TREE_CODE (arg0))
13622 && truth_value_p (TREE_CODE (op2)))
13623 {
13624 location_t loc0 = expr_location_or (arg0, loc);
13625 /* Only perform transformation if ARG0 is easily inverted. */
13626 tem = fold_truth_not_expr (loc0, arg0);
13627 if (tem)
13628 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13629 fold_convert_loc (loc, type, tem),
13630 op2);
13631 }
13632
13633 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13634 if (integer_onep (arg1)
13635 && truth_value_p (TREE_CODE (arg0))
13636 && truth_value_p (TREE_CODE (op2)))
13637 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13638 fold_convert_loc (loc, type, arg0),
13639 op2);
13640
13641 return NULL_TREE;
13642
13643 case CALL_EXPR:
13644 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13645 of fold_ternary on them. */
13646 gcc_unreachable ();
13647
13648 case BIT_FIELD_REF:
13649 if ((TREE_CODE (arg0) == VECTOR_CST
13650 || TREE_CODE (arg0) == CONSTRUCTOR)
13651 && type == TREE_TYPE (TREE_TYPE (arg0)))
13652 {
13653 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13654 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13655
13656 if (width != 0
13657 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13658 && (idx % width) == 0
13659 && (idx = idx / width)
13660 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13661 {
13662 if (TREE_CODE (arg0) == VECTOR_CST)
13663 {
13664 tree elements = TREE_VECTOR_CST_ELTS (arg0);
13665 while (idx-- > 0 && elements)
13666 elements = TREE_CHAIN (elements);
13667 if (elements)
13668 return TREE_VALUE (elements);
13669 }
13670 else if (idx < CONSTRUCTOR_NELTS (arg0))
13671 return CONSTRUCTOR_ELT (arg0, idx)->value;
13672 return build_zero_cst (type);
13673 }
13674 }
13675
13676 /* A bit-field-ref that referenced the full argument can be stripped. */
13677 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13678 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13679 && integer_zerop (op2))
13680 return fold_convert_loc (loc, type, arg0);
13681
13682 return NULL_TREE;
13683
13684 case FMA_EXPR:
13685 /* For integers we can decompose the FMA if possible. */
13686 if (TREE_CODE (arg0) == INTEGER_CST
13687 && TREE_CODE (arg1) == INTEGER_CST)
13688 return fold_build2_loc (loc, PLUS_EXPR, type,
13689 const_binop (MULT_EXPR, arg0, arg1), arg2);
13690 if (integer_zerop (arg2))
13691 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13692
13693 return fold_fma (loc, type, arg0, arg1, arg2);
13694
13695 default:
13696 return NULL_TREE;
13697 } /* switch (code) */
13698 }
13699
13700 /* Perform constant folding and related simplification of EXPR.
13701 The related simplifications include x*1 => x, x*0 => 0, etc.,
13702 and application of the associative law.
13703 NOP_EXPR conversions may be removed freely (as long as we
13704 are careful not to change the type of the overall expression).
13705 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13706 but we can constant-fold them if they have constant operands. */
13707
13708 #ifdef ENABLE_FOLD_CHECKING
13709 # define fold(x) fold_1 (x)
13710 static tree fold_1 (tree);
13711 static
13712 #endif
13713 tree
13714 fold (tree expr)
13715 {
13716 const tree t = expr;
13717 enum tree_code code = TREE_CODE (t);
13718 enum tree_code_class kind = TREE_CODE_CLASS (code);
13719 tree tem;
13720 location_t loc = EXPR_LOCATION (expr);
13721
13722 /* Return right away if a constant. */
13723 if (kind == tcc_constant)
13724 return t;
13725
13726 /* CALL_EXPR-like objects with variable numbers of operands are
13727 treated specially. */
13728 if (kind == tcc_vl_exp)
13729 {
13730 if (code == CALL_EXPR)
13731 {
13732 tem = fold_call_expr (loc, expr, false);
13733 return tem ? tem : expr;
13734 }
13735 return expr;
13736 }
13737
13738 if (IS_EXPR_CODE_CLASS (kind))
13739 {
13740 tree type = TREE_TYPE (t);
13741 tree op0, op1, op2;
13742
13743 switch (TREE_CODE_LENGTH (code))
13744 {
13745 case 1:
13746 op0 = TREE_OPERAND (t, 0);
13747 tem = fold_unary_loc (loc, code, type, op0);
13748 return tem ? tem : expr;
13749 case 2:
13750 op0 = TREE_OPERAND (t, 0);
13751 op1 = TREE_OPERAND (t, 1);
13752 tem = fold_binary_loc (loc, code, type, op0, op1);
13753 return tem ? tem : expr;
13754 case 3:
13755 op0 = TREE_OPERAND (t, 0);
13756 op1 = TREE_OPERAND (t, 1);
13757 op2 = TREE_OPERAND (t, 2);
13758 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13759 return tem ? tem : expr;
13760 default:
13761 break;
13762 }
13763 }
13764
13765 switch (code)
13766 {
13767 case ARRAY_REF:
13768 {
13769 tree op0 = TREE_OPERAND (t, 0);
13770 tree op1 = TREE_OPERAND (t, 1);
13771
13772 if (TREE_CODE (op1) == INTEGER_CST
13773 && TREE_CODE (op0) == CONSTRUCTOR
13774 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13775 {
13776 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13777 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13778 unsigned HOST_WIDE_INT begin = 0;
13779
13780 /* Find a matching index by means of a binary search. */
13781 while (begin != end)
13782 {
13783 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13784 tree index = VEC_index (constructor_elt, elts, middle)->index;
13785
13786 if (TREE_CODE (index) == INTEGER_CST
13787 && tree_int_cst_lt (index, op1))
13788 begin = middle + 1;
13789 else if (TREE_CODE (index) == INTEGER_CST
13790 && tree_int_cst_lt (op1, index))
13791 end = middle;
13792 else if (TREE_CODE (index) == RANGE_EXPR
13793 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13794 begin = middle + 1;
13795 else if (TREE_CODE (index) == RANGE_EXPR
13796 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13797 end = middle;
13798 else
13799 return VEC_index (constructor_elt, elts, middle)->value;
13800 }
13801 }
13802
13803 return t;
13804 }
13805
13806 case CONST_DECL:
13807 return fold (DECL_INITIAL (t));
13808
13809 default:
13810 return t;
13811 } /* switch (code) */
13812 }
13813
13814 #ifdef ENABLE_FOLD_CHECKING
13815 #undef fold
13816
13817 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13818 static void fold_check_failed (const_tree, const_tree);
13819 void print_fold_checksum (const_tree);
13820
13821 /* When --enable-checking=fold, compute a digest of expr before
13822 and after actual fold call to see if fold did not accidentally
13823 change original expr. */
13824
13825 tree
13826 fold (tree expr)
13827 {
13828 tree ret;
13829 struct md5_ctx ctx;
13830 unsigned char checksum_before[16], checksum_after[16];
13831 htab_t ht;
13832
13833 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13834 md5_init_ctx (&ctx);
13835 fold_checksum_tree (expr, &ctx, ht);
13836 md5_finish_ctx (&ctx, checksum_before);
13837 htab_empty (ht);
13838
13839 ret = fold_1 (expr);
13840
13841 md5_init_ctx (&ctx);
13842 fold_checksum_tree (expr, &ctx, ht);
13843 md5_finish_ctx (&ctx, checksum_after);
13844 htab_delete (ht);
13845
13846 if (memcmp (checksum_before, checksum_after, 16))
13847 fold_check_failed (expr, ret);
13848
13849 return ret;
13850 }
13851
13852 void
13853 print_fold_checksum (const_tree expr)
13854 {
13855 struct md5_ctx ctx;
13856 unsigned char checksum[16], cnt;
13857 htab_t ht;
13858
13859 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13860 md5_init_ctx (&ctx);
13861 fold_checksum_tree (expr, &ctx, ht);
13862 md5_finish_ctx (&ctx, checksum);
13863 htab_delete (ht);
13864 for (cnt = 0; cnt < 16; ++cnt)
13865 fprintf (stderr, "%02x", checksum[cnt]);
13866 putc ('\n', stderr);
13867 }
13868
13869 static void
13870 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13871 {
13872 internal_error ("fold check: original tree changed by fold");
13873 }
13874
13875 static void
13876 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13877 {
13878 void **slot;
13879 enum tree_code code;
13880 union tree_node buf;
13881 int i, len;
13882
13883 recursive_label:
13884 if (expr == NULL)
13885 return;
13886 slot = (void **) htab_find_slot (ht, expr, INSERT);
13887 if (*slot != NULL)
13888 return;
13889 *slot = CONST_CAST_TREE (expr);
13890 code = TREE_CODE (expr);
13891 if (TREE_CODE_CLASS (code) == tcc_declaration
13892 && DECL_ASSEMBLER_NAME_SET_P (expr))
13893 {
13894 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13895 memcpy ((char *) &buf, expr, tree_size (expr));
13896 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13897 expr = (tree) &buf;
13898 }
13899 else if (TREE_CODE_CLASS (code) == tcc_type
13900 && (TYPE_POINTER_TO (expr)
13901 || TYPE_REFERENCE_TO (expr)
13902 || TYPE_CACHED_VALUES_P (expr)
13903 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13904 || TYPE_NEXT_VARIANT (expr)))
13905 {
13906 /* Allow these fields to be modified. */
13907 tree tmp;
13908 memcpy ((char *) &buf, expr, tree_size (expr));
13909 expr = tmp = (tree) &buf;
13910 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13911 TYPE_POINTER_TO (tmp) = NULL;
13912 TYPE_REFERENCE_TO (tmp) = NULL;
13913 TYPE_NEXT_VARIANT (tmp) = NULL;
13914 if (TYPE_CACHED_VALUES_P (tmp))
13915 {
13916 TYPE_CACHED_VALUES_P (tmp) = 0;
13917 TYPE_CACHED_VALUES (tmp) = NULL;
13918 }
13919 }
13920 md5_process_bytes (expr, tree_size (expr), ctx);
13921 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13922 if (TREE_CODE_CLASS (code) != tcc_type
13923 && TREE_CODE_CLASS (code) != tcc_declaration
13924 && code != TREE_LIST
13925 && code != SSA_NAME
13926 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13927 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13928 switch (TREE_CODE_CLASS (code))
13929 {
13930 case tcc_constant:
13931 switch (code)
13932 {
13933 case STRING_CST:
13934 md5_process_bytes (TREE_STRING_POINTER (expr),
13935 TREE_STRING_LENGTH (expr), ctx);
13936 break;
13937 case COMPLEX_CST:
13938 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13939 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13940 break;
13941 case VECTOR_CST:
13942 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13943 break;
13944 default:
13945 break;
13946 }
13947 break;
13948 case tcc_exceptional:
13949 switch (code)
13950 {
13951 case TREE_LIST:
13952 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13953 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13954 expr = TREE_CHAIN (expr);
13955 goto recursive_label;
13956 break;
13957 case TREE_VEC:
13958 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13959 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13960 break;
13961 default:
13962 break;
13963 }
13964 break;
13965 case tcc_expression:
13966 case tcc_reference:
13967 case tcc_comparison:
13968 case tcc_unary:
13969 case tcc_binary:
13970 case tcc_statement:
13971 case tcc_vl_exp:
13972 len = TREE_OPERAND_LENGTH (expr);
13973 for (i = 0; i < len; ++i)
13974 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13975 break;
13976 case tcc_declaration:
13977 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13978 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13979 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13980 {
13981 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13982 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13983 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13984 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13985 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13986 }
13987 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13988 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13989
13990 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13991 {
13992 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13993 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13994 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13995 }
13996 break;
13997 case tcc_type:
13998 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13999 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14000 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14001 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14002 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14003 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14004 if (INTEGRAL_TYPE_P (expr)
14005 || SCALAR_FLOAT_TYPE_P (expr))
14006 {
14007 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14008 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14009 }
14010 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14011 if (TREE_CODE (expr) == RECORD_TYPE
14012 || TREE_CODE (expr) == UNION_TYPE
14013 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14014 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14015 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14016 break;
14017 default:
14018 break;
14019 }
14020 }
14021
14022 /* Helper function for outputting the checksum of a tree T. When
14023 debugging with gdb, you can "define mynext" to be "next" followed
14024 by "call debug_fold_checksum (op0)", then just trace down till the
14025 outputs differ. */
14026
14027 DEBUG_FUNCTION void
14028 debug_fold_checksum (const_tree t)
14029 {
14030 int i;
14031 unsigned char checksum[16];
14032 struct md5_ctx ctx;
14033 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14034
14035 md5_init_ctx (&ctx);
14036 fold_checksum_tree (t, &ctx, ht);
14037 md5_finish_ctx (&ctx, checksum);
14038 htab_empty (ht);
14039
14040 for (i = 0; i < 16; i++)
14041 fprintf (stderr, "%d ", checksum[i]);
14042
14043 fprintf (stderr, "\n");
14044 }
14045
14046 #endif
14047
14048 /* Fold a unary tree expression with code CODE of type TYPE with an
14049 operand OP0. LOC is the location of the resulting expression.
14050 Return a folded expression if successful. Otherwise, return a tree
14051 expression with code CODE of type TYPE with an operand OP0. */
14052
14053 tree
14054 fold_build1_stat_loc (location_t loc,
14055 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14056 {
14057 tree tem;
14058 #ifdef ENABLE_FOLD_CHECKING
14059 unsigned char checksum_before[16], checksum_after[16];
14060 struct md5_ctx ctx;
14061 htab_t ht;
14062
14063 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14064 md5_init_ctx (&ctx);
14065 fold_checksum_tree (op0, &ctx, ht);
14066 md5_finish_ctx (&ctx, checksum_before);
14067 htab_empty (ht);
14068 #endif
14069
14070 tem = fold_unary_loc (loc, code, type, op0);
14071 if (!tem)
14072 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14073
14074 #ifdef ENABLE_FOLD_CHECKING
14075 md5_init_ctx (&ctx);
14076 fold_checksum_tree (op0, &ctx, ht);
14077 md5_finish_ctx (&ctx, checksum_after);
14078 htab_delete (ht);
14079
14080 if (memcmp (checksum_before, checksum_after, 16))
14081 fold_check_failed (op0, tem);
14082 #endif
14083 return tem;
14084 }
14085
14086 /* Fold a binary tree expression with code CODE of type TYPE with
14087 operands OP0 and OP1. LOC is the location of the resulting
14088 expression. Return a folded expression if successful. Otherwise,
14089 return a tree expression with code CODE of type TYPE with operands
14090 OP0 and OP1. */
14091
14092 tree
14093 fold_build2_stat_loc (location_t loc,
14094 enum tree_code code, tree type, tree op0, tree op1
14095 MEM_STAT_DECL)
14096 {
14097 tree tem;
14098 #ifdef ENABLE_FOLD_CHECKING
14099 unsigned char checksum_before_op0[16],
14100 checksum_before_op1[16],
14101 checksum_after_op0[16],
14102 checksum_after_op1[16];
14103 struct md5_ctx ctx;
14104 htab_t ht;
14105
14106 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14107 md5_init_ctx (&ctx);
14108 fold_checksum_tree (op0, &ctx, ht);
14109 md5_finish_ctx (&ctx, checksum_before_op0);
14110 htab_empty (ht);
14111
14112 md5_init_ctx (&ctx);
14113 fold_checksum_tree (op1, &ctx, ht);
14114 md5_finish_ctx (&ctx, checksum_before_op1);
14115 htab_empty (ht);
14116 #endif
14117
14118 tem = fold_binary_loc (loc, code, type, op0, op1);
14119 if (!tem)
14120 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14121
14122 #ifdef ENABLE_FOLD_CHECKING
14123 md5_init_ctx (&ctx);
14124 fold_checksum_tree (op0, &ctx, ht);
14125 md5_finish_ctx (&ctx, checksum_after_op0);
14126 htab_empty (ht);
14127
14128 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14129 fold_check_failed (op0, tem);
14130
14131 md5_init_ctx (&ctx);
14132 fold_checksum_tree (op1, &ctx, ht);
14133 md5_finish_ctx (&ctx, checksum_after_op1);
14134 htab_delete (ht);
14135
14136 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14137 fold_check_failed (op1, tem);
14138 #endif
14139 return tem;
14140 }
14141
14142 /* Fold a ternary tree expression with code CODE of type TYPE with
14143 operands OP0, OP1, and OP2. Return a folded expression if
14144 successful. Otherwise, return a tree expression with code CODE of
14145 type TYPE with operands OP0, OP1, and OP2. */
14146
14147 tree
14148 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14149 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14150 {
14151 tree tem;
14152 #ifdef ENABLE_FOLD_CHECKING
14153 unsigned char checksum_before_op0[16],
14154 checksum_before_op1[16],
14155 checksum_before_op2[16],
14156 checksum_after_op0[16],
14157 checksum_after_op1[16],
14158 checksum_after_op2[16];
14159 struct md5_ctx ctx;
14160 htab_t ht;
14161
14162 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14163 md5_init_ctx (&ctx);
14164 fold_checksum_tree (op0, &ctx, ht);
14165 md5_finish_ctx (&ctx, checksum_before_op0);
14166 htab_empty (ht);
14167
14168 md5_init_ctx (&ctx);
14169 fold_checksum_tree (op1, &ctx, ht);
14170 md5_finish_ctx (&ctx, checksum_before_op1);
14171 htab_empty (ht);
14172
14173 md5_init_ctx (&ctx);
14174 fold_checksum_tree (op2, &ctx, ht);
14175 md5_finish_ctx (&ctx, checksum_before_op2);
14176 htab_empty (ht);
14177 #endif
14178
14179 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14180 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14181 if (!tem)
14182 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14183
14184 #ifdef ENABLE_FOLD_CHECKING
14185 md5_init_ctx (&ctx);
14186 fold_checksum_tree (op0, &ctx, ht);
14187 md5_finish_ctx (&ctx, checksum_after_op0);
14188 htab_empty (ht);
14189
14190 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14191 fold_check_failed (op0, tem);
14192
14193 md5_init_ctx (&ctx);
14194 fold_checksum_tree (op1, &ctx, ht);
14195 md5_finish_ctx (&ctx, checksum_after_op1);
14196 htab_empty (ht);
14197
14198 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14199 fold_check_failed (op1, tem);
14200
14201 md5_init_ctx (&ctx);
14202 fold_checksum_tree (op2, &ctx, ht);
14203 md5_finish_ctx (&ctx, checksum_after_op2);
14204 htab_delete (ht);
14205
14206 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14207 fold_check_failed (op2, tem);
14208 #endif
14209 return tem;
14210 }
14211
14212 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14213 arguments in ARGARRAY, and a null static chain.
14214 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14215 of type TYPE from the given operands as constructed by build_call_array. */
14216
14217 tree
14218 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14219 int nargs, tree *argarray)
14220 {
14221 tree tem;
14222 #ifdef ENABLE_FOLD_CHECKING
14223 unsigned char checksum_before_fn[16],
14224 checksum_before_arglist[16],
14225 checksum_after_fn[16],
14226 checksum_after_arglist[16];
14227 struct md5_ctx ctx;
14228 htab_t ht;
14229 int i;
14230
14231 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14232 md5_init_ctx (&ctx);
14233 fold_checksum_tree (fn, &ctx, ht);
14234 md5_finish_ctx (&ctx, checksum_before_fn);
14235 htab_empty (ht);
14236
14237 md5_init_ctx (&ctx);
14238 for (i = 0; i < nargs; i++)
14239 fold_checksum_tree (argarray[i], &ctx, ht);
14240 md5_finish_ctx (&ctx, checksum_before_arglist);
14241 htab_empty (ht);
14242 #endif
14243
14244 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14245
14246 #ifdef ENABLE_FOLD_CHECKING
14247 md5_init_ctx (&ctx);
14248 fold_checksum_tree (fn, &ctx, ht);
14249 md5_finish_ctx (&ctx, checksum_after_fn);
14250 htab_empty (ht);
14251
14252 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14253 fold_check_failed (fn, tem);
14254
14255 md5_init_ctx (&ctx);
14256 for (i = 0; i < nargs; i++)
14257 fold_checksum_tree (argarray[i], &ctx, ht);
14258 md5_finish_ctx (&ctx, checksum_after_arglist);
14259 htab_delete (ht);
14260
14261 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14262 fold_check_failed (NULL_TREE, tem);
14263 #endif
14264 return tem;
14265 }
14266
14267 /* Perform constant folding and related simplification of initializer
14268 expression EXPR. These behave identically to "fold_buildN" but ignore
14269 potential run-time traps and exceptions that fold must preserve. */
14270
14271 #define START_FOLD_INIT \
14272 int saved_signaling_nans = flag_signaling_nans;\
14273 int saved_trapping_math = flag_trapping_math;\
14274 int saved_rounding_math = flag_rounding_math;\
14275 int saved_trapv = flag_trapv;\
14276 int saved_folding_initializer = folding_initializer;\
14277 flag_signaling_nans = 0;\
14278 flag_trapping_math = 0;\
14279 flag_rounding_math = 0;\
14280 flag_trapv = 0;\
14281 folding_initializer = 1;
14282
14283 #define END_FOLD_INIT \
14284 flag_signaling_nans = saved_signaling_nans;\
14285 flag_trapping_math = saved_trapping_math;\
14286 flag_rounding_math = saved_rounding_math;\
14287 flag_trapv = saved_trapv;\
14288 folding_initializer = saved_folding_initializer;
14289
14290 tree
14291 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14292 tree type, tree op)
14293 {
14294 tree result;
14295 START_FOLD_INIT;
14296
14297 result = fold_build1_loc (loc, code, type, op);
14298
14299 END_FOLD_INIT;
14300 return result;
14301 }
14302
14303 tree
14304 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14305 tree type, tree op0, tree op1)
14306 {
14307 tree result;
14308 START_FOLD_INIT;
14309
14310 result = fold_build2_loc (loc, code, type, op0, op1);
14311
14312 END_FOLD_INIT;
14313 return result;
14314 }
14315
14316 tree
14317 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14318 tree type, tree op0, tree op1, tree op2)
14319 {
14320 tree result;
14321 START_FOLD_INIT;
14322
14323 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14324
14325 END_FOLD_INIT;
14326 return result;
14327 }
14328
14329 tree
14330 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14331 int nargs, tree *argarray)
14332 {
14333 tree result;
14334 START_FOLD_INIT;
14335
14336 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14337
14338 END_FOLD_INIT;
14339 return result;
14340 }
14341
14342 #undef START_FOLD_INIT
14343 #undef END_FOLD_INIT
14344
14345 /* Determine if first argument is a multiple of second argument. Return 0 if
14346 it is not, or we cannot easily determined it to be.
14347
14348 An example of the sort of thing we care about (at this point; this routine
14349 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14350 fold cases do now) is discovering that
14351
14352 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14353
14354 is a multiple of
14355
14356 SAVE_EXPR (J * 8)
14357
14358 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14359
14360 This code also handles discovering that
14361
14362 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14363
14364 is a multiple of 8 so we don't have to worry about dealing with a
14365 possible remainder.
14366
14367 Note that we *look* inside a SAVE_EXPR only to determine how it was
14368 calculated; it is not safe for fold to do much of anything else with the
14369 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14370 at run time. For example, the latter example above *cannot* be implemented
14371 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14372 evaluation time of the original SAVE_EXPR is not necessarily the same at
14373 the time the new expression is evaluated. The only optimization of this
14374 sort that would be valid is changing
14375
14376 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14377
14378 divided by 8 to
14379
14380 SAVE_EXPR (I) * SAVE_EXPR (J)
14381
14382 (where the same SAVE_EXPR (J) is used in the original and the
14383 transformed version). */
14384
14385 int
14386 multiple_of_p (tree type, const_tree top, const_tree bottom)
14387 {
14388 if (operand_equal_p (top, bottom, 0))
14389 return 1;
14390
14391 if (TREE_CODE (type) != INTEGER_TYPE)
14392 return 0;
14393
14394 switch (TREE_CODE (top))
14395 {
14396 case BIT_AND_EXPR:
14397 /* Bitwise and provides a power of two multiple. If the mask is
14398 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14399 if (!integer_pow2p (bottom))
14400 return 0;
14401 /* FALLTHRU */
14402
14403 case MULT_EXPR:
14404 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14405 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14406
14407 case PLUS_EXPR:
14408 case MINUS_EXPR:
14409 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14410 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14411
14412 case LSHIFT_EXPR:
14413 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14414 {
14415 tree op1, t1;
14416
14417 op1 = TREE_OPERAND (top, 1);
14418 /* const_binop may not detect overflow correctly,
14419 so check for it explicitly here. */
14420 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14421 > TREE_INT_CST_LOW (op1)
14422 && TREE_INT_CST_HIGH (op1) == 0
14423 && 0 != (t1 = fold_convert (type,
14424 const_binop (LSHIFT_EXPR,
14425 size_one_node,
14426 op1)))
14427 && !TREE_OVERFLOW (t1))
14428 return multiple_of_p (type, t1, bottom);
14429 }
14430 return 0;
14431
14432 case NOP_EXPR:
14433 /* Can't handle conversions from non-integral or wider integral type. */
14434 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14435 || (TYPE_PRECISION (type)
14436 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14437 return 0;
14438
14439 /* .. fall through ... */
14440
14441 case SAVE_EXPR:
14442 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14443
14444 case COND_EXPR:
14445 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14446 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14447
14448 case INTEGER_CST:
14449 if (TREE_CODE (bottom) != INTEGER_CST
14450 || integer_zerop (bottom)
14451 || (TYPE_UNSIGNED (type)
14452 && (tree_int_cst_sgn (top) < 0
14453 || tree_int_cst_sgn (bottom) < 0)))
14454 return 0;
14455 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14456 top, bottom));
14457
14458 default:
14459 return 0;
14460 }
14461 }
14462
14463 /* Return true if CODE or TYPE is known to be non-negative. */
14464
14465 static bool
14466 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14467 {
14468 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14469 && truth_value_p (code))
14470 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14471 have a signed:1 type (where the value is -1 and 0). */
14472 return true;
14473 return false;
14474 }
14475
14476 /* Return true if (CODE OP0) is known to be non-negative. If the return
14477 value is based on the assumption that signed overflow is undefined,
14478 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14479 *STRICT_OVERFLOW_P. */
14480
14481 bool
14482 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14483 bool *strict_overflow_p)
14484 {
14485 if (TYPE_UNSIGNED (type))
14486 return true;
14487
14488 switch (code)
14489 {
14490 case ABS_EXPR:
14491 /* We can't return 1 if flag_wrapv is set because
14492 ABS_EXPR<INT_MIN> = INT_MIN. */
14493 if (!INTEGRAL_TYPE_P (type))
14494 return true;
14495 if (TYPE_OVERFLOW_UNDEFINED (type))
14496 {
14497 *strict_overflow_p = true;
14498 return true;
14499 }
14500 break;
14501
14502 case NON_LVALUE_EXPR:
14503 case FLOAT_EXPR:
14504 case FIX_TRUNC_EXPR:
14505 return tree_expr_nonnegative_warnv_p (op0,
14506 strict_overflow_p);
14507
14508 case NOP_EXPR:
14509 {
14510 tree inner_type = TREE_TYPE (op0);
14511 tree outer_type = type;
14512
14513 if (TREE_CODE (outer_type) == REAL_TYPE)
14514 {
14515 if (TREE_CODE (inner_type) == REAL_TYPE)
14516 return tree_expr_nonnegative_warnv_p (op0,
14517 strict_overflow_p);
14518 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14519 {
14520 if (TYPE_UNSIGNED (inner_type))
14521 return true;
14522 return tree_expr_nonnegative_warnv_p (op0,
14523 strict_overflow_p);
14524 }
14525 }
14526 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14527 {
14528 if (TREE_CODE (inner_type) == REAL_TYPE)
14529 return tree_expr_nonnegative_warnv_p (op0,
14530 strict_overflow_p);
14531 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14532 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14533 && TYPE_UNSIGNED (inner_type);
14534 }
14535 }
14536 break;
14537
14538 default:
14539 return tree_simple_nonnegative_warnv_p (code, type);
14540 }
14541
14542 /* We don't know sign of `t', so be conservative and return false. */
14543 return false;
14544 }
14545
14546 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14547 value is based on the assumption that signed overflow is undefined,
14548 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14549 *STRICT_OVERFLOW_P. */
14550
14551 bool
14552 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14553 tree op1, bool *strict_overflow_p)
14554 {
14555 if (TYPE_UNSIGNED (type))
14556 return true;
14557
14558 switch (code)
14559 {
14560 case POINTER_PLUS_EXPR:
14561 case PLUS_EXPR:
14562 if (FLOAT_TYPE_P (type))
14563 return (tree_expr_nonnegative_warnv_p (op0,
14564 strict_overflow_p)
14565 && tree_expr_nonnegative_warnv_p (op1,
14566 strict_overflow_p));
14567
14568 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14569 both unsigned and at least 2 bits shorter than the result. */
14570 if (TREE_CODE (type) == INTEGER_TYPE
14571 && TREE_CODE (op0) == NOP_EXPR
14572 && TREE_CODE (op1) == NOP_EXPR)
14573 {
14574 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14575 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14576 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14577 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14578 {
14579 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14580 TYPE_PRECISION (inner2)) + 1;
14581 return prec < TYPE_PRECISION (type);
14582 }
14583 }
14584 break;
14585
14586 case MULT_EXPR:
14587 if (FLOAT_TYPE_P (type))
14588 {
14589 /* x * x for floating point x is always non-negative. */
14590 if (operand_equal_p (op0, op1, 0))
14591 return true;
14592 return (tree_expr_nonnegative_warnv_p (op0,
14593 strict_overflow_p)
14594 && tree_expr_nonnegative_warnv_p (op1,
14595 strict_overflow_p));
14596 }
14597
14598 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14599 both unsigned and their total bits is shorter than the result. */
14600 if (TREE_CODE (type) == INTEGER_TYPE
14601 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14602 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14603 {
14604 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14605 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14606 : TREE_TYPE (op0);
14607 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14608 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14609 : TREE_TYPE (op1);
14610
14611 bool unsigned0 = TYPE_UNSIGNED (inner0);
14612 bool unsigned1 = TYPE_UNSIGNED (inner1);
14613
14614 if (TREE_CODE (op0) == INTEGER_CST)
14615 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14616
14617 if (TREE_CODE (op1) == INTEGER_CST)
14618 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14619
14620 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14621 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14622 {
14623 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14624 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14625 : TYPE_PRECISION (inner0);
14626
14627 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14628 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14629 : TYPE_PRECISION (inner1);
14630
14631 return precision0 + precision1 < TYPE_PRECISION (type);
14632 }
14633 }
14634 return false;
14635
14636 case BIT_AND_EXPR:
14637 case MAX_EXPR:
14638 return (tree_expr_nonnegative_warnv_p (op0,
14639 strict_overflow_p)
14640 || tree_expr_nonnegative_warnv_p (op1,
14641 strict_overflow_p));
14642
14643 case BIT_IOR_EXPR:
14644 case BIT_XOR_EXPR:
14645 case MIN_EXPR:
14646 case RDIV_EXPR:
14647 case TRUNC_DIV_EXPR:
14648 case CEIL_DIV_EXPR:
14649 case FLOOR_DIV_EXPR:
14650 case ROUND_DIV_EXPR:
14651 return (tree_expr_nonnegative_warnv_p (op0,
14652 strict_overflow_p)
14653 && tree_expr_nonnegative_warnv_p (op1,
14654 strict_overflow_p));
14655
14656 case TRUNC_MOD_EXPR:
14657 case CEIL_MOD_EXPR:
14658 case FLOOR_MOD_EXPR:
14659 case ROUND_MOD_EXPR:
14660 return tree_expr_nonnegative_warnv_p (op0,
14661 strict_overflow_p);
14662 default:
14663 return tree_simple_nonnegative_warnv_p (code, type);
14664 }
14665
14666 /* We don't know sign of `t', so be conservative and return false. */
14667 return false;
14668 }
14669
14670 /* Return true if T is known to be non-negative. If the return
14671 value is based on the assumption that signed overflow is undefined,
14672 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14673 *STRICT_OVERFLOW_P. */
14674
14675 bool
14676 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14677 {
14678 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14679 return true;
14680
14681 switch (TREE_CODE (t))
14682 {
14683 case INTEGER_CST:
14684 return tree_int_cst_sgn (t) >= 0;
14685
14686 case REAL_CST:
14687 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14688
14689 case FIXED_CST:
14690 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14691
14692 case COND_EXPR:
14693 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14694 strict_overflow_p)
14695 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14696 strict_overflow_p));
14697 default:
14698 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14699 TREE_TYPE (t));
14700 }
14701 /* We don't know sign of `t', so be conservative and return false. */
14702 return false;
14703 }
14704
14705 /* Return true if T is known to be non-negative. If the return
14706 value is based on the assumption that signed overflow is undefined,
14707 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14708 *STRICT_OVERFLOW_P. */
14709
14710 bool
14711 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14712 tree arg0, tree arg1, bool *strict_overflow_p)
14713 {
14714 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14715 switch (DECL_FUNCTION_CODE (fndecl))
14716 {
14717 CASE_FLT_FN (BUILT_IN_ACOS):
14718 CASE_FLT_FN (BUILT_IN_ACOSH):
14719 CASE_FLT_FN (BUILT_IN_CABS):
14720 CASE_FLT_FN (BUILT_IN_COSH):
14721 CASE_FLT_FN (BUILT_IN_ERFC):
14722 CASE_FLT_FN (BUILT_IN_EXP):
14723 CASE_FLT_FN (BUILT_IN_EXP10):
14724 CASE_FLT_FN (BUILT_IN_EXP2):
14725 CASE_FLT_FN (BUILT_IN_FABS):
14726 CASE_FLT_FN (BUILT_IN_FDIM):
14727 CASE_FLT_FN (BUILT_IN_HYPOT):
14728 CASE_FLT_FN (BUILT_IN_POW10):
14729 CASE_INT_FN (BUILT_IN_FFS):
14730 CASE_INT_FN (BUILT_IN_PARITY):
14731 CASE_INT_FN (BUILT_IN_POPCOUNT):
14732 case BUILT_IN_BSWAP32:
14733 case BUILT_IN_BSWAP64:
14734 /* Always true. */
14735 return true;
14736
14737 CASE_FLT_FN (BUILT_IN_SQRT):
14738 /* sqrt(-0.0) is -0.0. */
14739 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14740 return true;
14741 return tree_expr_nonnegative_warnv_p (arg0,
14742 strict_overflow_p);
14743
14744 CASE_FLT_FN (BUILT_IN_ASINH):
14745 CASE_FLT_FN (BUILT_IN_ATAN):
14746 CASE_FLT_FN (BUILT_IN_ATANH):
14747 CASE_FLT_FN (BUILT_IN_CBRT):
14748 CASE_FLT_FN (BUILT_IN_CEIL):
14749 CASE_FLT_FN (BUILT_IN_ERF):
14750 CASE_FLT_FN (BUILT_IN_EXPM1):
14751 CASE_FLT_FN (BUILT_IN_FLOOR):
14752 CASE_FLT_FN (BUILT_IN_FMOD):
14753 CASE_FLT_FN (BUILT_IN_FREXP):
14754 CASE_FLT_FN (BUILT_IN_ICEIL):
14755 CASE_FLT_FN (BUILT_IN_IFLOOR):
14756 CASE_FLT_FN (BUILT_IN_IRINT):
14757 CASE_FLT_FN (BUILT_IN_IROUND):
14758 CASE_FLT_FN (BUILT_IN_LCEIL):
14759 CASE_FLT_FN (BUILT_IN_LDEXP):
14760 CASE_FLT_FN (BUILT_IN_LFLOOR):
14761 CASE_FLT_FN (BUILT_IN_LLCEIL):
14762 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14763 CASE_FLT_FN (BUILT_IN_LLRINT):
14764 CASE_FLT_FN (BUILT_IN_LLROUND):
14765 CASE_FLT_FN (BUILT_IN_LRINT):
14766 CASE_FLT_FN (BUILT_IN_LROUND):
14767 CASE_FLT_FN (BUILT_IN_MODF):
14768 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14769 CASE_FLT_FN (BUILT_IN_RINT):
14770 CASE_FLT_FN (BUILT_IN_ROUND):
14771 CASE_FLT_FN (BUILT_IN_SCALB):
14772 CASE_FLT_FN (BUILT_IN_SCALBLN):
14773 CASE_FLT_FN (BUILT_IN_SCALBN):
14774 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14775 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14776 CASE_FLT_FN (BUILT_IN_SINH):
14777 CASE_FLT_FN (BUILT_IN_TANH):
14778 CASE_FLT_FN (BUILT_IN_TRUNC):
14779 /* True if the 1st argument is nonnegative. */
14780 return tree_expr_nonnegative_warnv_p (arg0,
14781 strict_overflow_p);
14782
14783 CASE_FLT_FN (BUILT_IN_FMAX):
14784 /* True if the 1st OR 2nd arguments are nonnegative. */
14785 return (tree_expr_nonnegative_warnv_p (arg0,
14786 strict_overflow_p)
14787 || (tree_expr_nonnegative_warnv_p (arg1,
14788 strict_overflow_p)));
14789
14790 CASE_FLT_FN (BUILT_IN_FMIN):
14791 /* True if the 1st AND 2nd arguments are nonnegative. */
14792 return (tree_expr_nonnegative_warnv_p (arg0,
14793 strict_overflow_p)
14794 && (tree_expr_nonnegative_warnv_p (arg1,
14795 strict_overflow_p)));
14796
14797 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14798 /* True if the 2nd argument is nonnegative. */
14799 return tree_expr_nonnegative_warnv_p (arg1,
14800 strict_overflow_p);
14801
14802 CASE_FLT_FN (BUILT_IN_POWI):
14803 /* True if the 1st argument is nonnegative or the second
14804 argument is an even integer. */
14805 if (TREE_CODE (arg1) == INTEGER_CST
14806 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14807 return true;
14808 return tree_expr_nonnegative_warnv_p (arg0,
14809 strict_overflow_p);
14810
14811 CASE_FLT_FN (BUILT_IN_POW):
14812 /* True if the 1st argument is nonnegative or the second
14813 argument is an even integer valued real. */
14814 if (TREE_CODE (arg1) == REAL_CST)
14815 {
14816 REAL_VALUE_TYPE c;
14817 HOST_WIDE_INT n;
14818
14819 c = TREE_REAL_CST (arg1);
14820 n = real_to_integer (&c);
14821 if ((n & 1) == 0)
14822 {
14823 REAL_VALUE_TYPE cint;
14824 real_from_integer (&cint, VOIDmode, n,
14825 n < 0 ? -1 : 0, 0);
14826 if (real_identical (&c, &cint))
14827 return true;
14828 }
14829 }
14830 return tree_expr_nonnegative_warnv_p (arg0,
14831 strict_overflow_p);
14832
14833 default:
14834 break;
14835 }
14836 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14837 type);
14838 }
14839
14840 /* Return true if T is known to be non-negative. If the return
14841 value is based on the assumption that signed overflow is undefined,
14842 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14843 *STRICT_OVERFLOW_P. */
14844
14845 bool
14846 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14847 {
14848 enum tree_code code = TREE_CODE (t);
14849 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14850 return true;
14851
14852 switch (code)
14853 {
14854 case TARGET_EXPR:
14855 {
14856 tree temp = TARGET_EXPR_SLOT (t);
14857 t = TARGET_EXPR_INITIAL (t);
14858
14859 /* If the initializer is non-void, then it's a normal expression
14860 that will be assigned to the slot. */
14861 if (!VOID_TYPE_P (t))
14862 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14863
14864 /* Otherwise, the initializer sets the slot in some way. One common
14865 way is an assignment statement at the end of the initializer. */
14866 while (1)
14867 {
14868 if (TREE_CODE (t) == BIND_EXPR)
14869 t = expr_last (BIND_EXPR_BODY (t));
14870 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14871 || TREE_CODE (t) == TRY_CATCH_EXPR)
14872 t = expr_last (TREE_OPERAND (t, 0));
14873 else if (TREE_CODE (t) == STATEMENT_LIST)
14874 t = expr_last (t);
14875 else
14876 break;
14877 }
14878 if (TREE_CODE (t) == MODIFY_EXPR
14879 && TREE_OPERAND (t, 0) == temp)
14880 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14881 strict_overflow_p);
14882
14883 return false;
14884 }
14885
14886 case CALL_EXPR:
14887 {
14888 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14889 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14890
14891 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14892 get_callee_fndecl (t),
14893 arg0,
14894 arg1,
14895 strict_overflow_p);
14896 }
14897 case COMPOUND_EXPR:
14898 case MODIFY_EXPR:
14899 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14900 strict_overflow_p);
14901 case BIND_EXPR:
14902 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14903 strict_overflow_p);
14904 case SAVE_EXPR:
14905 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14906 strict_overflow_p);
14907
14908 default:
14909 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14910 TREE_TYPE (t));
14911 }
14912
14913 /* We don't know sign of `t', so be conservative and return false. */
14914 return false;
14915 }
14916
14917 /* Return true if T is known to be non-negative. If the return
14918 value is based on the assumption that signed overflow is undefined,
14919 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14920 *STRICT_OVERFLOW_P. */
14921
14922 bool
14923 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14924 {
14925 enum tree_code code;
14926 if (t == error_mark_node)
14927 return false;
14928
14929 code = TREE_CODE (t);
14930 switch (TREE_CODE_CLASS (code))
14931 {
14932 case tcc_binary:
14933 case tcc_comparison:
14934 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14935 TREE_TYPE (t),
14936 TREE_OPERAND (t, 0),
14937 TREE_OPERAND (t, 1),
14938 strict_overflow_p);
14939
14940 case tcc_unary:
14941 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14942 TREE_TYPE (t),
14943 TREE_OPERAND (t, 0),
14944 strict_overflow_p);
14945
14946 case tcc_constant:
14947 case tcc_declaration:
14948 case tcc_reference:
14949 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14950
14951 default:
14952 break;
14953 }
14954
14955 switch (code)
14956 {
14957 case TRUTH_AND_EXPR:
14958 case TRUTH_OR_EXPR:
14959 case TRUTH_XOR_EXPR:
14960 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14961 TREE_TYPE (t),
14962 TREE_OPERAND (t, 0),
14963 TREE_OPERAND (t, 1),
14964 strict_overflow_p);
14965 case TRUTH_NOT_EXPR:
14966 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14967 TREE_TYPE (t),
14968 TREE_OPERAND (t, 0),
14969 strict_overflow_p);
14970
14971 case COND_EXPR:
14972 case CONSTRUCTOR:
14973 case OBJ_TYPE_REF:
14974 case ASSERT_EXPR:
14975 case ADDR_EXPR:
14976 case WITH_SIZE_EXPR:
14977 case SSA_NAME:
14978 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14979
14980 default:
14981 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14982 }
14983 }
14984
14985 /* Return true if `t' is known to be non-negative. Handle warnings
14986 about undefined signed overflow. */
14987
14988 bool
14989 tree_expr_nonnegative_p (tree t)
14990 {
14991 bool ret, strict_overflow_p;
14992
14993 strict_overflow_p = false;
14994 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14995 if (strict_overflow_p)
14996 fold_overflow_warning (("assuming signed overflow does not occur when "
14997 "determining that expression is always "
14998 "non-negative"),
14999 WARN_STRICT_OVERFLOW_MISC);
15000 return ret;
15001 }
15002
15003
15004 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15005 For floating point we further ensure that T is not denormal.
15006 Similar logic is present in nonzero_address in rtlanal.h.
15007
15008 If the return value is based on the assumption that signed overflow
15009 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15010 change *STRICT_OVERFLOW_P. */
15011
15012 bool
15013 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15014 bool *strict_overflow_p)
15015 {
15016 switch (code)
15017 {
15018 case ABS_EXPR:
15019 return tree_expr_nonzero_warnv_p (op0,
15020 strict_overflow_p);
15021
15022 case NOP_EXPR:
15023 {
15024 tree inner_type = TREE_TYPE (op0);
15025 tree outer_type = type;
15026
15027 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15028 && tree_expr_nonzero_warnv_p (op0,
15029 strict_overflow_p));
15030 }
15031 break;
15032
15033 case NON_LVALUE_EXPR:
15034 return tree_expr_nonzero_warnv_p (op0,
15035 strict_overflow_p);
15036
15037 default:
15038 break;
15039 }
15040
15041 return false;
15042 }
15043
15044 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15045 For floating point we further ensure that T is not denormal.
15046 Similar logic is present in nonzero_address in rtlanal.h.
15047
15048 If the return value is based on the assumption that signed overflow
15049 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15050 change *STRICT_OVERFLOW_P. */
15051
15052 bool
15053 tree_binary_nonzero_warnv_p (enum tree_code code,
15054 tree type,
15055 tree op0,
15056 tree op1, bool *strict_overflow_p)
15057 {
15058 bool sub_strict_overflow_p;
15059 switch (code)
15060 {
15061 case POINTER_PLUS_EXPR:
15062 case PLUS_EXPR:
15063 if (TYPE_OVERFLOW_UNDEFINED (type))
15064 {
15065 /* With the presence of negative values it is hard
15066 to say something. */
15067 sub_strict_overflow_p = false;
15068 if (!tree_expr_nonnegative_warnv_p (op0,
15069 &sub_strict_overflow_p)
15070 || !tree_expr_nonnegative_warnv_p (op1,
15071 &sub_strict_overflow_p))
15072 return false;
15073 /* One of operands must be positive and the other non-negative. */
15074 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15075 overflows, on a twos-complement machine the sum of two
15076 nonnegative numbers can never be zero. */
15077 return (tree_expr_nonzero_warnv_p (op0,
15078 strict_overflow_p)
15079 || tree_expr_nonzero_warnv_p (op1,
15080 strict_overflow_p));
15081 }
15082 break;
15083
15084 case MULT_EXPR:
15085 if (TYPE_OVERFLOW_UNDEFINED (type))
15086 {
15087 if (tree_expr_nonzero_warnv_p (op0,
15088 strict_overflow_p)
15089 && tree_expr_nonzero_warnv_p (op1,
15090 strict_overflow_p))
15091 {
15092 *strict_overflow_p = true;
15093 return true;
15094 }
15095 }
15096 break;
15097
15098 case MIN_EXPR:
15099 sub_strict_overflow_p = false;
15100 if (tree_expr_nonzero_warnv_p (op0,
15101 &sub_strict_overflow_p)
15102 && tree_expr_nonzero_warnv_p (op1,
15103 &sub_strict_overflow_p))
15104 {
15105 if (sub_strict_overflow_p)
15106 *strict_overflow_p = true;
15107 }
15108 break;
15109
15110 case MAX_EXPR:
15111 sub_strict_overflow_p = false;
15112 if (tree_expr_nonzero_warnv_p (op0,
15113 &sub_strict_overflow_p))
15114 {
15115 if (sub_strict_overflow_p)
15116 *strict_overflow_p = true;
15117
15118 /* When both operands are nonzero, then MAX must be too. */
15119 if (tree_expr_nonzero_warnv_p (op1,
15120 strict_overflow_p))
15121 return true;
15122
15123 /* MAX where operand 0 is positive is positive. */
15124 return tree_expr_nonnegative_warnv_p (op0,
15125 strict_overflow_p);
15126 }
15127 /* MAX where operand 1 is positive is positive. */
15128 else if (tree_expr_nonzero_warnv_p (op1,
15129 &sub_strict_overflow_p)
15130 && tree_expr_nonnegative_warnv_p (op1,
15131 &sub_strict_overflow_p))
15132 {
15133 if (sub_strict_overflow_p)
15134 *strict_overflow_p = true;
15135 return true;
15136 }
15137 break;
15138
15139 case BIT_IOR_EXPR:
15140 return (tree_expr_nonzero_warnv_p (op1,
15141 strict_overflow_p)
15142 || tree_expr_nonzero_warnv_p (op0,
15143 strict_overflow_p));
15144
15145 default:
15146 break;
15147 }
15148
15149 return false;
15150 }
15151
15152 /* Return true when T is an address and is known to be nonzero.
15153 For floating point we further ensure that T is not denormal.
15154 Similar logic is present in nonzero_address in rtlanal.h.
15155
15156 If the return value is based on the assumption that signed overflow
15157 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15158 change *STRICT_OVERFLOW_P. */
15159
15160 bool
15161 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15162 {
15163 bool sub_strict_overflow_p;
15164 switch (TREE_CODE (t))
15165 {
15166 case INTEGER_CST:
15167 return !integer_zerop (t);
15168
15169 case ADDR_EXPR:
15170 {
15171 tree base = TREE_OPERAND (t, 0);
15172 if (!DECL_P (base))
15173 base = get_base_address (base);
15174
15175 if (!base)
15176 return false;
15177
15178 /* Weak declarations may link to NULL. Other things may also be NULL
15179 so protect with -fdelete-null-pointer-checks; but not variables
15180 allocated on the stack. */
15181 if (DECL_P (base)
15182 && (flag_delete_null_pointer_checks
15183 || (DECL_CONTEXT (base)
15184 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15185 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15186 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15187
15188 /* Constants are never weak. */
15189 if (CONSTANT_CLASS_P (base))
15190 return true;
15191
15192 return false;
15193 }
15194
15195 case COND_EXPR:
15196 sub_strict_overflow_p = false;
15197 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15198 &sub_strict_overflow_p)
15199 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15200 &sub_strict_overflow_p))
15201 {
15202 if (sub_strict_overflow_p)
15203 *strict_overflow_p = true;
15204 return true;
15205 }
15206 break;
15207
15208 default:
15209 break;
15210 }
15211 return false;
15212 }
15213
15214 /* Return true when T is an address and is known to be nonzero.
15215 For floating point we further ensure that T is not denormal.
15216 Similar logic is present in nonzero_address in rtlanal.h.
15217
15218 If the return value is based on the assumption that signed overflow
15219 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15220 change *STRICT_OVERFLOW_P. */
15221
15222 bool
15223 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15224 {
15225 tree type = TREE_TYPE (t);
15226 enum tree_code code;
15227
15228 /* Doing something useful for floating point would need more work. */
15229 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15230 return false;
15231
15232 code = TREE_CODE (t);
15233 switch (TREE_CODE_CLASS (code))
15234 {
15235 case tcc_unary:
15236 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15237 strict_overflow_p);
15238 case tcc_binary:
15239 case tcc_comparison:
15240 return tree_binary_nonzero_warnv_p (code, type,
15241 TREE_OPERAND (t, 0),
15242 TREE_OPERAND (t, 1),
15243 strict_overflow_p);
15244 case tcc_constant:
15245 case tcc_declaration:
15246 case tcc_reference:
15247 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15248
15249 default:
15250 break;
15251 }
15252
15253 switch (code)
15254 {
15255 case TRUTH_NOT_EXPR:
15256 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15257 strict_overflow_p);
15258
15259 case TRUTH_AND_EXPR:
15260 case TRUTH_OR_EXPR:
15261 case TRUTH_XOR_EXPR:
15262 return tree_binary_nonzero_warnv_p (code, type,
15263 TREE_OPERAND (t, 0),
15264 TREE_OPERAND (t, 1),
15265 strict_overflow_p);
15266
15267 case COND_EXPR:
15268 case CONSTRUCTOR:
15269 case OBJ_TYPE_REF:
15270 case ASSERT_EXPR:
15271 case ADDR_EXPR:
15272 case WITH_SIZE_EXPR:
15273 case SSA_NAME:
15274 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15275
15276 case COMPOUND_EXPR:
15277 case MODIFY_EXPR:
15278 case BIND_EXPR:
15279 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15280 strict_overflow_p);
15281
15282 case SAVE_EXPR:
15283 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15284 strict_overflow_p);
15285
15286 case CALL_EXPR:
15287 return alloca_call_p (t);
15288
15289 default:
15290 break;
15291 }
15292 return false;
15293 }
15294
15295 /* Return true when T is an address and is known to be nonzero.
15296 Handle warnings about undefined signed overflow. */
15297
15298 bool
15299 tree_expr_nonzero_p (tree t)
15300 {
15301 bool ret, strict_overflow_p;
15302
15303 strict_overflow_p = false;
15304 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15305 if (strict_overflow_p)
15306 fold_overflow_warning (("assuming signed overflow does not occur when "
15307 "determining that expression is always "
15308 "non-zero"),
15309 WARN_STRICT_OVERFLOW_MISC);
15310 return ret;
15311 }
15312
15313 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15314 attempt to fold the expression to a constant without modifying TYPE,
15315 OP0 or OP1.
15316
15317 If the expression could be simplified to a constant, then return
15318 the constant. If the expression would not be simplified to a
15319 constant, then return NULL_TREE. */
15320
15321 tree
15322 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15323 {
15324 tree tem = fold_binary (code, type, op0, op1);
15325 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15326 }
15327
15328 /* Given the components of a unary expression CODE, TYPE and OP0,
15329 attempt to fold the expression to a constant without modifying
15330 TYPE or OP0.
15331
15332 If the expression could be simplified to a constant, then return
15333 the constant. If the expression would not be simplified to a
15334 constant, then return NULL_TREE. */
15335
15336 tree
15337 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15338 {
15339 tree tem = fold_unary (code, type, op0);
15340 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15341 }
15342
15343 /* If EXP represents referencing an element in a constant string
15344 (either via pointer arithmetic or array indexing), return the
15345 tree representing the value accessed, otherwise return NULL. */
15346
15347 tree
15348 fold_read_from_constant_string (tree exp)
15349 {
15350 if ((TREE_CODE (exp) == INDIRECT_REF
15351 || TREE_CODE (exp) == ARRAY_REF)
15352 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15353 {
15354 tree exp1 = TREE_OPERAND (exp, 0);
15355 tree index;
15356 tree string;
15357 location_t loc = EXPR_LOCATION (exp);
15358
15359 if (TREE_CODE (exp) == INDIRECT_REF)
15360 string = string_constant (exp1, &index);
15361 else
15362 {
15363 tree low_bound = array_ref_low_bound (exp);
15364 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15365
15366 /* Optimize the special-case of a zero lower bound.
15367
15368 We convert the low_bound to sizetype to avoid some problems
15369 with constant folding. (E.g. suppose the lower bound is 1,
15370 and its mode is QI. Without the conversion,l (ARRAY
15371 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15372 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15373 if (! integer_zerop (low_bound))
15374 index = size_diffop_loc (loc, index,
15375 fold_convert_loc (loc, sizetype, low_bound));
15376
15377 string = exp1;
15378 }
15379
15380 if (string
15381 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15382 && TREE_CODE (string) == STRING_CST
15383 && TREE_CODE (index) == INTEGER_CST
15384 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15385 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15386 == MODE_INT)
15387 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15388 return build_int_cst_type (TREE_TYPE (exp),
15389 (TREE_STRING_POINTER (string)
15390 [TREE_INT_CST_LOW (index)]));
15391 }
15392 return NULL;
15393 }
15394
15395 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15396 an integer constant, real, or fixed-point constant.
15397
15398 TYPE is the type of the result. */
15399
15400 static tree
15401 fold_negate_const (tree arg0, tree type)
15402 {
15403 tree t = NULL_TREE;
15404
15405 switch (TREE_CODE (arg0))
15406 {
15407 case INTEGER_CST:
15408 {
15409 double_int val = tree_to_double_int (arg0);
15410 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15411
15412 t = force_fit_type_double (type, val, 1,
15413 (overflow | TREE_OVERFLOW (arg0))
15414 && !TYPE_UNSIGNED (type));
15415 break;
15416 }
15417
15418 case REAL_CST:
15419 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15420 break;
15421
15422 case FIXED_CST:
15423 {
15424 FIXED_VALUE_TYPE f;
15425 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15426 &(TREE_FIXED_CST (arg0)), NULL,
15427 TYPE_SATURATING (type));
15428 t = build_fixed (type, f);
15429 /* Propagate overflow flags. */
15430 if (overflow_p | TREE_OVERFLOW (arg0))
15431 TREE_OVERFLOW (t) = 1;
15432 break;
15433 }
15434
15435 default:
15436 gcc_unreachable ();
15437 }
15438
15439 return t;
15440 }
15441
15442 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15443 an integer constant or real constant.
15444
15445 TYPE is the type of the result. */
15446
15447 tree
15448 fold_abs_const (tree arg0, tree type)
15449 {
15450 tree t = NULL_TREE;
15451
15452 switch (TREE_CODE (arg0))
15453 {
15454 case INTEGER_CST:
15455 {
15456 double_int val = tree_to_double_int (arg0);
15457
15458 /* If the value is unsigned or non-negative, then the absolute value
15459 is the same as the ordinary value. */
15460 if (TYPE_UNSIGNED (type)
15461 || !double_int_negative_p (val))
15462 t = arg0;
15463
15464 /* If the value is negative, then the absolute value is
15465 its negation. */
15466 else
15467 {
15468 int overflow;
15469
15470 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15471 t = force_fit_type_double (type, val, -1,
15472 overflow | TREE_OVERFLOW (arg0));
15473 }
15474 }
15475 break;
15476
15477 case REAL_CST:
15478 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15479 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15480 else
15481 t = arg0;
15482 break;
15483
15484 default:
15485 gcc_unreachable ();
15486 }
15487
15488 return t;
15489 }
15490
15491 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15492 constant. TYPE is the type of the result. */
15493
15494 static tree
15495 fold_not_const (const_tree arg0, tree type)
15496 {
15497 double_int val;
15498
15499 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15500
15501 val = double_int_not (tree_to_double_int (arg0));
15502 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15503 }
15504
15505 /* Given CODE, a relational operator, the target type, TYPE and two
15506 constant operands OP0 and OP1, return the result of the
15507 relational operation. If the result is not a compile time
15508 constant, then return NULL_TREE. */
15509
15510 static tree
15511 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15512 {
15513 int result, invert;
15514
15515 /* From here on, the only cases we handle are when the result is
15516 known to be a constant. */
15517
15518 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15519 {
15520 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15521 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15522
15523 /* Handle the cases where either operand is a NaN. */
15524 if (real_isnan (c0) || real_isnan (c1))
15525 {
15526 switch (code)
15527 {
15528 case EQ_EXPR:
15529 case ORDERED_EXPR:
15530 result = 0;
15531 break;
15532
15533 case NE_EXPR:
15534 case UNORDERED_EXPR:
15535 case UNLT_EXPR:
15536 case UNLE_EXPR:
15537 case UNGT_EXPR:
15538 case UNGE_EXPR:
15539 case UNEQ_EXPR:
15540 result = 1;
15541 break;
15542
15543 case LT_EXPR:
15544 case LE_EXPR:
15545 case GT_EXPR:
15546 case GE_EXPR:
15547 case LTGT_EXPR:
15548 if (flag_trapping_math)
15549 return NULL_TREE;
15550 result = 0;
15551 break;
15552
15553 default:
15554 gcc_unreachable ();
15555 }
15556
15557 return constant_boolean_node (result, type);
15558 }
15559
15560 return constant_boolean_node (real_compare (code, c0, c1), type);
15561 }
15562
15563 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15564 {
15565 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15566 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15567 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15568 }
15569
15570 /* Handle equality/inequality of complex constants. */
15571 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15572 {
15573 tree rcond = fold_relational_const (code, type,
15574 TREE_REALPART (op0),
15575 TREE_REALPART (op1));
15576 tree icond = fold_relational_const (code, type,
15577 TREE_IMAGPART (op0),
15578 TREE_IMAGPART (op1));
15579 if (code == EQ_EXPR)
15580 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15581 else if (code == NE_EXPR)
15582 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15583 else
15584 return NULL_TREE;
15585 }
15586
15587 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15588
15589 To compute GT, swap the arguments and do LT.
15590 To compute GE, do LT and invert the result.
15591 To compute LE, swap the arguments, do LT and invert the result.
15592 To compute NE, do EQ and invert the result.
15593
15594 Therefore, the code below must handle only EQ and LT. */
15595
15596 if (code == LE_EXPR || code == GT_EXPR)
15597 {
15598 tree tem = op0;
15599 op0 = op1;
15600 op1 = tem;
15601 code = swap_tree_comparison (code);
15602 }
15603
15604 /* Note that it is safe to invert for real values here because we
15605 have already handled the one case that it matters. */
15606
15607 invert = 0;
15608 if (code == NE_EXPR || code == GE_EXPR)
15609 {
15610 invert = 1;
15611 code = invert_tree_comparison (code, false);
15612 }
15613
15614 /* Compute a result for LT or EQ if args permit;
15615 Otherwise return T. */
15616 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15617 {
15618 if (code == EQ_EXPR)
15619 result = tree_int_cst_equal (op0, op1);
15620 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15621 result = INT_CST_LT_UNSIGNED (op0, op1);
15622 else
15623 result = INT_CST_LT (op0, op1);
15624 }
15625 else
15626 return NULL_TREE;
15627
15628 if (invert)
15629 result ^= 1;
15630 return constant_boolean_node (result, type);
15631 }
15632
15633 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15634 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15635 itself. */
15636
15637 tree
15638 fold_build_cleanup_point_expr (tree type, tree expr)
15639 {
15640 /* If the expression does not have side effects then we don't have to wrap
15641 it with a cleanup point expression. */
15642 if (!TREE_SIDE_EFFECTS (expr))
15643 return expr;
15644
15645 /* If the expression is a return, check to see if the expression inside the
15646 return has no side effects or the right hand side of the modify expression
15647 inside the return. If either don't have side effects set we don't need to
15648 wrap the expression in a cleanup point expression. Note we don't check the
15649 left hand side of the modify because it should always be a return decl. */
15650 if (TREE_CODE (expr) == RETURN_EXPR)
15651 {
15652 tree op = TREE_OPERAND (expr, 0);
15653 if (!op || !TREE_SIDE_EFFECTS (op))
15654 return expr;
15655 op = TREE_OPERAND (op, 1);
15656 if (!TREE_SIDE_EFFECTS (op))
15657 return expr;
15658 }
15659
15660 return build1 (CLEANUP_POINT_EXPR, type, expr);
15661 }
15662
15663 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15664 of an indirection through OP0, or NULL_TREE if no simplification is
15665 possible. */
15666
15667 tree
15668 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15669 {
15670 tree sub = op0;
15671 tree subtype;
15672
15673 STRIP_NOPS (sub);
15674 subtype = TREE_TYPE (sub);
15675 if (!POINTER_TYPE_P (subtype))
15676 return NULL_TREE;
15677
15678 if (TREE_CODE (sub) == ADDR_EXPR)
15679 {
15680 tree op = TREE_OPERAND (sub, 0);
15681 tree optype = TREE_TYPE (op);
15682 /* *&CONST_DECL -> to the value of the const decl. */
15683 if (TREE_CODE (op) == CONST_DECL)
15684 return DECL_INITIAL (op);
15685 /* *&p => p; make sure to handle *&"str"[cst] here. */
15686 if (type == optype)
15687 {
15688 tree fop = fold_read_from_constant_string (op);
15689 if (fop)
15690 return fop;
15691 else
15692 return op;
15693 }
15694 /* *(foo *)&fooarray => fooarray[0] */
15695 else if (TREE_CODE (optype) == ARRAY_TYPE
15696 && type == TREE_TYPE (optype)
15697 && (!in_gimple_form
15698 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15699 {
15700 tree type_domain = TYPE_DOMAIN (optype);
15701 tree min_val = size_zero_node;
15702 if (type_domain && TYPE_MIN_VALUE (type_domain))
15703 min_val = TYPE_MIN_VALUE (type_domain);
15704 if (in_gimple_form
15705 && TREE_CODE (min_val) != INTEGER_CST)
15706 return NULL_TREE;
15707 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15708 NULL_TREE, NULL_TREE);
15709 }
15710 /* *(foo *)&complexfoo => __real__ complexfoo */
15711 else if (TREE_CODE (optype) == COMPLEX_TYPE
15712 && type == TREE_TYPE (optype))
15713 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15714 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15715 else if (TREE_CODE (optype) == VECTOR_TYPE
15716 && type == TREE_TYPE (optype))
15717 {
15718 tree part_width = TYPE_SIZE (type);
15719 tree index = bitsize_int (0);
15720 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15721 }
15722 }
15723
15724 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15725 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15726 {
15727 tree op00 = TREE_OPERAND (sub, 0);
15728 tree op01 = TREE_OPERAND (sub, 1);
15729
15730 STRIP_NOPS (op00);
15731 if (TREE_CODE (op00) == ADDR_EXPR)
15732 {
15733 tree op00type;
15734 op00 = TREE_OPERAND (op00, 0);
15735 op00type = TREE_TYPE (op00);
15736
15737 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15738 if (TREE_CODE (op00type) == VECTOR_TYPE
15739 && type == TREE_TYPE (op00type))
15740 {
15741 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15742 tree part_width = TYPE_SIZE (type);
15743 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15744 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15745 tree index = bitsize_int (indexi);
15746
15747 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15748 return fold_build3_loc (loc,
15749 BIT_FIELD_REF, type, op00,
15750 part_width, index);
15751
15752 }
15753 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15754 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15755 && type == TREE_TYPE (op00type))
15756 {
15757 tree size = TYPE_SIZE_UNIT (type);
15758 if (tree_int_cst_equal (size, op01))
15759 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15760 }
15761 /* ((foo *)&fooarray)[1] => fooarray[1] */
15762 else if (TREE_CODE (op00type) == ARRAY_TYPE
15763 && type == TREE_TYPE (op00type))
15764 {
15765 tree type_domain = TYPE_DOMAIN (op00type);
15766 tree min_val = size_zero_node;
15767 if (type_domain && TYPE_MIN_VALUE (type_domain))
15768 min_val = TYPE_MIN_VALUE (type_domain);
15769 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15770 TYPE_SIZE_UNIT (type));
15771 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15772 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15773 NULL_TREE, NULL_TREE);
15774 }
15775 }
15776 }
15777
15778 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15779 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15780 && type == TREE_TYPE (TREE_TYPE (subtype))
15781 && (!in_gimple_form
15782 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15783 {
15784 tree type_domain;
15785 tree min_val = size_zero_node;
15786 sub = build_fold_indirect_ref_loc (loc, sub);
15787 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15788 if (type_domain && TYPE_MIN_VALUE (type_domain))
15789 min_val = TYPE_MIN_VALUE (type_domain);
15790 if (in_gimple_form
15791 && TREE_CODE (min_val) != INTEGER_CST)
15792 return NULL_TREE;
15793 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15794 NULL_TREE);
15795 }
15796
15797 return NULL_TREE;
15798 }
15799
15800 /* Builds an expression for an indirection through T, simplifying some
15801 cases. */
15802
15803 tree
15804 build_fold_indirect_ref_loc (location_t loc, tree t)
15805 {
15806 tree type = TREE_TYPE (TREE_TYPE (t));
15807 tree sub = fold_indirect_ref_1 (loc, type, t);
15808
15809 if (sub)
15810 return sub;
15811
15812 return build1_loc (loc, INDIRECT_REF, type, t);
15813 }
15814
15815 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15816
15817 tree
15818 fold_indirect_ref_loc (location_t loc, tree t)
15819 {
15820 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15821
15822 if (sub)
15823 return sub;
15824 else
15825 return t;
15826 }
15827
15828 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15829 whose result is ignored. The type of the returned tree need not be
15830 the same as the original expression. */
15831
15832 tree
15833 fold_ignored_result (tree t)
15834 {
15835 if (!TREE_SIDE_EFFECTS (t))
15836 return integer_zero_node;
15837
15838 for (;;)
15839 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15840 {
15841 case tcc_unary:
15842 t = TREE_OPERAND (t, 0);
15843 break;
15844
15845 case tcc_binary:
15846 case tcc_comparison:
15847 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15848 t = TREE_OPERAND (t, 0);
15849 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15850 t = TREE_OPERAND (t, 1);
15851 else
15852 return t;
15853 break;
15854
15855 case tcc_expression:
15856 switch (TREE_CODE (t))
15857 {
15858 case COMPOUND_EXPR:
15859 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15860 return t;
15861 t = TREE_OPERAND (t, 0);
15862 break;
15863
15864 case COND_EXPR:
15865 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15866 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15867 return t;
15868 t = TREE_OPERAND (t, 0);
15869 break;
15870
15871 default:
15872 return t;
15873 }
15874 break;
15875
15876 default:
15877 return t;
15878 }
15879 }
15880
15881 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15882 This can only be applied to objects of a sizetype. */
15883
15884 tree
15885 round_up_loc (location_t loc, tree value, int divisor)
15886 {
15887 tree div = NULL_TREE;
15888
15889 gcc_assert (divisor > 0);
15890 if (divisor == 1)
15891 return value;
15892
15893 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15894 have to do anything. Only do this when we are not given a const,
15895 because in that case, this check is more expensive than just
15896 doing it. */
15897 if (TREE_CODE (value) != INTEGER_CST)
15898 {
15899 div = build_int_cst (TREE_TYPE (value), divisor);
15900
15901 if (multiple_of_p (TREE_TYPE (value), value, div))
15902 return value;
15903 }
15904
15905 /* If divisor is a power of two, simplify this to bit manipulation. */
15906 if (divisor == (divisor & -divisor))
15907 {
15908 if (TREE_CODE (value) == INTEGER_CST)
15909 {
15910 double_int val = tree_to_double_int (value);
15911 bool overflow_p;
15912
15913 if ((val.low & (divisor - 1)) == 0)
15914 return value;
15915
15916 overflow_p = TREE_OVERFLOW (value);
15917 val.low &= ~(divisor - 1);
15918 val.low += divisor;
15919 if (val.low == 0)
15920 {
15921 val.high++;
15922 if (val.high == 0)
15923 overflow_p = true;
15924 }
15925
15926 return force_fit_type_double (TREE_TYPE (value), val,
15927 -1, overflow_p);
15928 }
15929 else
15930 {
15931 tree t;
15932
15933 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15934 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15935 t = build_int_cst (TREE_TYPE (value), -divisor);
15936 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15937 }
15938 }
15939 else
15940 {
15941 if (!div)
15942 div = build_int_cst (TREE_TYPE (value), divisor);
15943 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15944 value = size_binop_loc (loc, MULT_EXPR, value, div);
15945 }
15946
15947 return value;
15948 }
15949
15950 /* Likewise, but round down. */
15951
15952 tree
15953 round_down_loc (location_t loc, tree value, int divisor)
15954 {
15955 tree div = NULL_TREE;
15956
15957 gcc_assert (divisor > 0);
15958 if (divisor == 1)
15959 return value;
15960
15961 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15962 have to do anything. Only do this when we are not given a const,
15963 because in that case, this check is more expensive than just
15964 doing it. */
15965 if (TREE_CODE (value) != INTEGER_CST)
15966 {
15967 div = build_int_cst (TREE_TYPE (value), divisor);
15968
15969 if (multiple_of_p (TREE_TYPE (value), value, div))
15970 return value;
15971 }
15972
15973 /* If divisor is a power of two, simplify this to bit manipulation. */
15974 if (divisor == (divisor & -divisor))
15975 {
15976 tree t;
15977
15978 t = build_int_cst (TREE_TYPE (value), -divisor);
15979 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15980 }
15981 else
15982 {
15983 if (!div)
15984 div = build_int_cst (TREE_TYPE (value), divisor);
15985 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15986 value = size_binop_loc (loc, MULT_EXPR, value, div);
15987 }
15988
15989 return value;
15990 }
15991
15992 /* Returns the pointer to the base of the object addressed by EXP and
15993 extracts the information about the offset of the access, storing it
15994 to PBITPOS and POFFSET. */
15995
15996 static tree
15997 split_address_to_core_and_offset (tree exp,
15998 HOST_WIDE_INT *pbitpos, tree *poffset)
15999 {
16000 tree core;
16001 enum machine_mode mode;
16002 int unsignedp, volatilep;
16003 HOST_WIDE_INT bitsize;
16004 location_t loc = EXPR_LOCATION (exp);
16005
16006 if (TREE_CODE (exp) == ADDR_EXPR)
16007 {
16008 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16009 poffset, &mode, &unsignedp, &volatilep,
16010 false);
16011 core = build_fold_addr_expr_loc (loc, core);
16012 }
16013 else
16014 {
16015 core = exp;
16016 *pbitpos = 0;
16017 *poffset = NULL_TREE;
16018 }
16019
16020 return core;
16021 }
16022
16023 /* Returns true if addresses of E1 and E2 differ by a constant, false
16024 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16025
16026 bool
16027 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16028 {
16029 tree core1, core2;
16030 HOST_WIDE_INT bitpos1, bitpos2;
16031 tree toffset1, toffset2, tdiff, type;
16032
16033 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16034 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16035
16036 if (bitpos1 % BITS_PER_UNIT != 0
16037 || bitpos2 % BITS_PER_UNIT != 0
16038 || !operand_equal_p (core1, core2, 0))
16039 return false;
16040
16041 if (toffset1 && toffset2)
16042 {
16043 type = TREE_TYPE (toffset1);
16044 if (type != TREE_TYPE (toffset2))
16045 toffset2 = fold_convert (type, toffset2);
16046
16047 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16048 if (!cst_and_fits_in_hwi (tdiff))
16049 return false;
16050
16051 *diff = int_cst_value (tdiff);
16052 }
16053 else if (toffset1 || toffset2)
16054 {
16055 /* If only one of the offsets is non-constant, the difference cannot
16056 be a constant. */
16057 return false;
16058 }
16059 else
16060 *diff = 0;
16061
16062 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16063 return true;
16064 }
16065
16066 /* Simplify the floating point expression EXP when the sign of the
16067 result is not significant. Return NULL_TREE if no simplification
16068 is possible. */
16069
16070 tree
16071 fold_strip_sign_ops (tree exp)
16072 {
16073 tree arg0, arg1;
16074 location_t loc = EXPR_LOCATION (exp);
16075
16076 switch (TREE_CODE (exp))
16077 {
16078 case ABS_EXPR:
16079 case NEGATE_EXPR:
16080 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16081 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16082
16083 case MULT_EXPR:
16084 case RDIV_EXPR:
16085 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16086 return NULL_TREE;
16087 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16088 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16089 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16090 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16091 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16092 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16093 break;
16094
16095 case COMPOUND_EXPR:
16096 arg0 = TREE_OPERAND (exp, 0);
16097 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16098 if (arg1)
16099 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16100 break;
16101
16102 case COND_EXPR:
16103 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16104 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16105 if (arg0 || arg1)
16106 return fold_build3_loc (loc,
16107 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16108 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16109 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16110 break;
16111
16112 case CALL_EXPR:
16113 {
16114 const enum built_in_function fcode = builtin_mathfn_code (exp);
16115 switch (fcode)
16116 {
16117 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16118 /* Strip copysign function call, return the 1st argument. */
16119 arg0 = CALL_EXPR_ARG (exp, 0);
16120 arg1 = CALL_EXPR_ARG (exp, 1);
16121 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16122
16123 default:
16124 /* Strip sign ops from the argument of "odd" math functions. */
16125 if (negate_mathfn_p (fcode))
16126 {
16127 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16128 if (arg0)
16129 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16130 }
16131 break;
16132 }
16133 }
16134 break;
16135
16136 default:
16137 break;
16138 }
16139 return NULL_TREE;
16140 }