79ebd8b5701841eff8944b7d7a74f968d001a22d
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide and size_binop.
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
40
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
64
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
68
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
89 };
90
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
119 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
143
144 static location_t
145 expr_location_or (tree t, location_t loc)
146 {
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
149 }
150
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
153
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
156 {
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
162 {
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
165 }
166 return x;
167 }
168
169
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
173 addition.
174
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
177 sign. */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
179 \f
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
183
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
186 {
187 double_int quo, rem;
188 int uns;
189
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
196 uns = false;
197
198 quo = double_int_divmod (tree_to_double_int (arg1),
199 tree_to_double_int (arg2),
200 uns, code, &rem);
201
202 if (double_int_zero_p (rem))
203 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
204
205 return NULL_TREE;
206 }
207 \f
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
215 used. */
216
217 static int fold_deferring_overflow_warnings;
218
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
223
224 static const char* fold_deferred_overflow_warning;
225
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
228
229 static enum warn_strict_overflow_code fold_deferred_overflow_code;
230
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
233
234 void
235 fold_defer_overflow_warnings (void)
236 {
237 ++fold_deferring_overflow_warnings;
238 }
239
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
247 deferred code. */
248
249 void
250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
251 {
252 const char *warnmsg;
253 location_t locus;
254
255 gcc_assert (fold_deferring_overflow_warnings > 0);
256 --fold_deferring_overflow_warnings;
257 if (fold_deferring_overflow_warnings > 0)
258 {
259 if (fold_deferred_overflow_warning != NULL
260 && code != 0
261 && code < (int) fold_deferred_overflow_code)
262 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
263 return;
264 }
265
266 warnmsg = fold_deferred_overflow_warning;
267 fold_deferred_overflow_warning = NULL;
268
269 if (!issue || warnmsg == NULL)
270 return;
271
272 if (gimple_no_warning_p (stmt))
273 return;
274
275 /* Use the smallest code level when deciding to issue the
276 warning. */
277 if (code == 0 || code > (int) fold_deferred_overflow_code)
278 code = fold_deferred_overflow_code;
279
280 if (!issue_strict_overflow_warning (code))
281 return;
282
283 if (stmt == NULL)
284 locus = input_location;
285 else
286 locus = gimple_location (stmt);
287 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 }
289
290 /* Stop deferring overflow warnings, ignoring any deferred
291 warnings. */
292
293 void
294 fold_undefer_and_ignore_overflow_warnings (void)
295 {
296 fold_undefer_overflow_warnings (false, NULL, 0);
297 }
298
299 /* Whether we are deferring overflow warnings. */
300
301 bool
302 fold_deferring_overflow_warnings_p (void)
303 {
304 return fold_deferring_overflow_warnings > 0;
305 }
306
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
309
310 static void
311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
312 {
313 if (fold_deferring_overflow_warnings > 0)
314 {
315 if (fold_deferred_overflow_warning == NULL
316 || wc < fold_deferred_overflow_code)
317 {
318 fold_deferred_overflow_warning = gmsgid;
319 fold_deferred_overflow_code = wc;
320 }
321 }
322 else if (issue_strict_overflow_warning (wc))
323 warning (OPT_Wstrict_overflow, gmsgid);
324 }
325 \f
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
328
329 static bool
330 negate_mathfn_p (enum built_in_function code)
331 {
332 switch (code)
333 {
334 CASE_FLT_FN (BUILT_IN_ASIN):
335 CASE_FLT_FN (BUILT_IN_ASINH):
336 CASE_FLT_FN (BUILT_IN_ATAN):
337 CASE_FLT_FN (BUILT_IN_ATANH):
338 CASE_FLT_FN (BUILT_IN_CASIN):
339 CASE_FLT_FN (BUILT_IN_CASINH):
340 CASE_FLT_FN (BUILT_IN_CATAN):
341 CASE_FLT_FN (BUILT_IN_CATANH):
342 CASE_FLT_FN (BUILT_IN_CBRT):
343 CASE_FLT_FN (BUILT_IN_CPROJ):
344 CASE_FLT_FN (BUILT_IN_CSIN):
345 CASE_FLT_FN (BUILT_IN_CSINH):
346 CASE_FLT_FN (BUILT_IN_CTAN):
347 CASE_FLT_FN (BUILT_IN_CTANH):
348 CASE_FLT_FN (BUILT_IN_ERF):
349 CASE_FLT_FN (BUILT_IN_LLROUND):
350 CASE_FLT_FN (BUILT_IN_LROUND):
351 CASE_FLT_FN (BUILT_IN_ROUND):
352 CASE_FLT_FN (BUILT_IN_SIN):
353 CASE_FLT_FN (BUILT_IN_SINH):
354 CASE_FLT_FN (BUILT_IN_TAN):
355 CASE_FLT_FN (BUILT_IN_TANH):
356 CASE_FLT_FN (BUILT_IN_TRUNC):
357 return true;
358
359 CASE_FLT_FN (BUILT_IN_LLRINT):
360 CASE_FLT_FN (BUILT_IN_LRINT):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT):
362 CASE_FLT_FN (BUILT_IN_RINT):
363 return !flag_rounding_math;
364
365 default:
366 break;
367 }
368 return false;
369 }
370
371 /* Check whether we may negate an integer constant T without causing
372 overflow. */
373
374 bool
375 may_negate_without_overflow_p (const_tree t)
376 {
377 unsigned HOST_WIDE_INT val;
378 unsigned int prec;
379 tree type;
380
381 gcc_assert (TREE_CODE (t) == INTEGER_CST);
382
383 type = TREE_TYPE (t);
384 if (TYPE_UNSIGNED (type))
385 return false;
386
387 prec = TYPE_PRECISION (type);
388 if (prec > HOST_BITS_PER_WIDE_INT)
389 {
390 if (TREE_INT_CST_LOW (t) != 0)
391 return true;
392 prec -= HOST_BITS_PER_WIDE_INT;
393 val = TREE_INT_CST_HIGH (t);
394 }
395 else
396 val = TREE_INT_CST_LOW (t);
397 if (prec < HOST_BITS_PER_WIDE_INT)
398 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
399 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
400 }
401
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
404
405 static bool
406 negate_expr_p (tree t)
407 {
408 tree type;
409
410 if (t == 0)
411 return false;
412
413 type = TREE_TYPE (t);
414
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
417 {
418 case INTEGER_CST:
419 if (TYPE_OVERFLOW_WRAPS (type))
420 return true;
421
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
427
428 case FIXED_CST:
429 case NEGATE_EXPR:
430 return true;
431
432 case REAL_CST:
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
436
437 case COMPLEX_CST:
438 return negate_expr_p (TREE_REALPART (t))
439 && negate_expr_p (TREE_IMAGPART (t));
440
441 case COMPLEX_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
444
445 case CONJ_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0));
447
448 case PLUS_EXPR:
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
451 return false;
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
456 return true;
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
459
460 case MINUS_EXPR:
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
466
467 case MULT_EXPR:
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
469 break;
470
471 /* Fall through. */
472
473 case RDIV_EXPR:
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477 break;
478
479 case TRUNC_DIV_EXPR:
480 case ROUND_DIV_EXPR:
481 case FLOOR_DIV_EXPR:
482 case CEIL_DIV_EXPR:
483 case EXACT_DIV_EXPR:
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
488 overflow. */
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
491 break;
492 return negate_expr_p (TREE_OPERAND (t, 1))
493 || negate_expr_p (TREE_OPERAND (t, 0));
494
495 case NOP_EXPR:
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type) == REAL_TYPE)
498 {
499 tree tem = strip_float_extensions (t);
500 if (tem != t)
501 return negate_expr_p (tem);
502 }
503 break;
504
505 case CALL_EXPR:
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t)))
508 return negate_expr_p (CALL_EXPR_ARG (t, 0));
509 break;
510
511 case RSHIFT_EXPR:
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
514 {
515 tree op1 = TREE_OPERAND (t, 1);
516 if (TREE_INT_CST_HIGH (op1) == 0
517 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
518 == TREE_INT_CST_LOW (op1))
519 return true;
520 }
521 break;
522
523 default:
524 break;
525 }
526 return false;
527 }
528
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
532 returned. */
533
534 static tree
535 fold_negate_expr (location_t loc, tree t)
536 {
537 tree type = TREE_TYPE (t);
538 tree tem;
539
540 switch (TREE_CODE (t))
541 {
542 /* Convert - (~A) to A + 1. */
543 case BIT_NOT_EXPR:
544 if (INTEGRAL_TYPE_P (type))
545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546 build_int_cst (type, 1));
547 break;
548
549 case INTEGER_CST:
550 tem = fold_negate_const (t, type);
551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 || !TYPE_OVERFLOW_TRAPS (type))
553 return tem;
554 break;
555
556 case REAL_CST:
557 tem = fold_negate_const (t, type);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
560 return tem;
561 break;
562
563 case FIXED_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case COMPLEX_CST:
568 {
569 tree rpart = negate_expr (TREE_REALPART (t));
570 tree ipart = negate_expr (TREE_IMAGPART (t));
571
572 if ((TREE_CODE (rpart) == REAL_CST
573 && TREE_CODE (ipart) == REAL_CST)
574 || (TREE_CODE (rpart) == INTEGER_CST
575 && TREE_CODE (ipart) == INTEGER_CST))
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case COMPLEX_EXPR:
581 if (negate_expr_p (t))
582 return fold_build2_loc (loc, COMPLEX_EXPR, type,
583 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
584 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
585 break;
586
587 case CONJ_EXPR:
588 if (negate_expr_p (t))
589 return fold_build1_loc (loc, CONJ_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
591 break;
592
593 case NEGATE_EXPR:
594 return TREE_OPERAND (t, 0);
595
596 case PLUS_EXPR:
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
599 {
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t, 1))
602 && reorder_operands_p (TREE_OPERAND (t, 0),
603 TREE_OPERAND (t, 1)))
604 {
605 tem = negate_expr (TREE_OPERAND (t, 1));
606 return fold_build2_loc (loc, MINUS_EXPR, type,
607 tem, TREE_OPERAND (t, 0));
608 }
609
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t, 0)))
612 {
613 tem = negate_expr (TREE_OPERAND (t, 0));
614 return fold_build2_loc (loc, MINUS_EXPR, type,
615 tem, TREE_OPERAND (t, 1));
616 }
617 }
618 break;
619
620 case MINUS_EXPR:
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
624 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
627 break;
628
629 case MULT_EXPR:
630 if (TYPE_UNSIGNED (type))
631 break;
632
633 /* Fall through. */
634
635 case RDIV_EXPR:
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
637 {
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
646 }
647 break;
648
649 case TRUNC_DIV_EXPR:
650 case ROUND_DIV_EXPR:
651 case FLOOR_DIV_EXPR:
652 case CEIL_DIV_EXPR:
653 case EXACT_DIV_EXPR:
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
658 overflow. */
659 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
660 {
661 const char * const warnmsg = G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
665 {
666 if (INTEGRAL_TYPE_P (type)
667 && (TREE_CODE (tem) != INTEGER_CST
668 || integer_onep (tem)))
669 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 TREE_OPERAND (t, 0), negate_expr (tem));
672 }
673 tem = TREE_OPERAND (t, 0);
674 if (negate_expr_p (tem))
675 {
676 if (INTEGRAL_TYPE_P (type)
677 && (TREE_CODE (tem) != INTEGER_CST
678 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 negate_expr (tem), TREE_OPERAND (t, 1));
682 }
683 }
684 break;
685
686 case NOP_EXPR:
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type) == REAL_TYPE)
689 {
690 tem = strip_float_extensions (t);
691 if (tem != t && negate_expr_p (tem))
692 return fold_convert_loc (loc, type, negate_expr (tem));
693 }
694 break;
695
696 case CALL_EXPR:
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t))
699 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
700 {
701 tree fndecl, arg;
702
703 fndecl = get_callee_fndecl (t);
704 arg = negate_expr (CALL_EXPR_ARG (t, 0));
705 return build_call_expr_loc (loc, fndecl, 1, arg);
706 }
707 break;
708
709 case RSHIFT_EXPR:
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
712 {
713 tree op1 = TREE_OPERAND (t, 1);
714 if (TREE_INT_CST_HIGH (op1) == 0
715 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
716 == TREE_INT_CST_LOW (op1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
772
773 If IN is itself a literal or constant, return it as appropriate.
774
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
777
778 static tree
779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
780 tree *minus_litp, int negate_p)
781 {
782 tree var = 0;
783
784 *conp = 0;
785 *litp = 0;
786 *minus_litp = 0;
787
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
790
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
793 *litp = in;
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
803 {
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
808
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
816
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
821
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
825 var = in;
826 else if (op0 != 0)
827 var = op0;
828 else
829 var = op1, neg_var_p = neg1_p;
830
831 /* Now do any needed negations. */
832 if (neg_litp_p)
833 *minus_litp = *litp, *litp = 0;
834 if (neg_conp_p)
835 *conp = negate_expr (*conp);
836 if (neg_var_p)
837 var = negate_expr (var);
838 }
839 else if (TREE_CONSTANT (in))
840 *conp = in;
841 else
842 var = in;
843
844 if (negate_p)
845 {
846 if (*litp)
847 *minus_litp = *litp, *litp = 0;
848 else if (*minus_litp)
849 *litp = *minus_litp, *minus_litp = 0;
850 *conp = negate_expr (*conp);
851 var = negate_expr (var);
852 }
853
854 return var;
855 }
856
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
861
862 static tree
863 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
864 {
865 if (t1 == 0)
866 return t2;
867 else if (t2 == 0)
868 return t1;
869
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
874 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
875 {
876 if (code == PLUS_EXPR)
877 {
878 if (TREE_CODE (t1) == NEGATE_EXPR)
879 return build2_loc (loc, MINUS_EXPR, type,
880 fold_convert_loc (loc, type, t2),
881 fold_convert_loc (loc, type,
882 TREE_OPERAND (t1, 0)));
883 else if (TREE_CODE (t2) == NEGATE_EXPR)
884 return build2_loc (loc, MINUS_EXPR, type,
885 fold_convert_loc (loc, type, t1),
886 fold_convert_loc (loc, type,
887 TREE_OPERAND (t2, 0)));
888 else if (integer_zerop (t2))
889 return fold_convert_loc (loc, type, t1);
890 }
891 else if (code == MINUS_EXPR)
892 {
893 if (integer_zerop (t2))
894 return fold_convert_loc (loc, type, t1);
895 }
896
897 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
898 fold_convert_loc (loc, type, t2));
899 }
900
901 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
903 }
904 \f
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
907
908 static bool
909 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
910 {
911 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
912 return false;
913 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
914 return false;
915
916 switch (code)
917 {
918 case LSHIFT_EXPR:
919 case RSHIFT_EXPR:
920 case LROTATE_EXPR:
921 case RROTATE_EXPR:
922 return true;
923
924 default:
925 break;
926 }
927
928 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
929 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
930 && TYPE_MODE (type1) == TYPE_MODE (type2);
931 }
932
933
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
937
938 tree
939 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
940 {
941 double_int op1, op2, res, tmp;
942 tree t;
943 tree type = TREE_TYPE (arg1);
944 bool uns = TYPE_UNSIGNED (type);
945 bool is_sizetype
946 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
947 bool overflow = false;
948
949 op1 = tree_to_double_int (arg1);
950 op2 = tree_to_double_int (arg2);
951
952 switch (code)
953 {
954 case BIT_IOR_EXPR:
955 res = double_int_ior (op1, op2);
956 break;
957
958 case BIT_XOR_EXPR:
959 res = double_int_xor (op1, op2);
960 break;
961
962 case BIT_AND_EXPR:
963 res = double_int_and (op1, op2);
964 break;
965
966 case RSHIFT_EXPR:
967 res = double_int_rshift (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type), !uns);
969 break;
970
971 case LSHIFT_EXPR:
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res = double_int_lshift (op1, double_int_to_shwi (op2),
976 TYPE_PRECISION (type), !uns);
977 break;
978
979 case RROTATE_EXPR:
980 res = double_int_rrotate (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type));
982 break;
983
984 case LROTATE_EXPR:
985 res = double_int_lrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
987 break;
988
989 case PLUS_EXPR:
990 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
991 &res.low, &res.high);
992 break;
993
994 case MINUS_EXPR:
995 neg_double (op2.low, op2.high, &res.low, &res.high);
996 add_double (op1.low, op1.high, res.low, res.high,
997 &res.low, &res.high);
998 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
999 break;
1000
1001 case MULT_EXPR:
1002 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1003 &res.low, &res.high);
1004 break;
1005
1006 case TRUNC_DIV_EXPR:
1007 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1008 case EXACT_DIV_EXPR:
1009 /* This is a shortcut for a common special case. */
1010 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1011 && !TREE_OVERFLOW (arg1)
1012 && !TREE_OVERFLOW (arg2)
1013 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1014 {
1015 if (code == CEIL_DIV_EXPR)
1016 op1.low += op2.low - 1;
1017
1018 res.low = op1.low / op2.low, res.high = 0;
1019 break;
1020 }
1021
1022 /* ... fall through ... */
1023
1024 case ROUND_DIV_EXPR:
1025 if (double_int_zero_p (op2))
1026 return NULL_TREE;
1027 if (double_int_one_p (op2))
1028 {
1029 res = op1;
1030 break;
1031 }
1032 if (double_int_equal_p (op1, op2)
1033 && ! double_int_zero_p (op1))
1034 {
1035 res = double_int_one;
1036 break;
1037 }
1038 overflow = div_and_round_double (code, uns,
1039 op1.low, op1.high, op2.low, op2.high,
1040 &res.low, &res.high,
1041 &tmp.low, &tmp.high);
1042 break;
1043
1044 case TRUNC_MOD_EXPR:
1045 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1046 /* This is a shortcut for a common special case. */
1047 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1048 && !TREE_OVERFLOW (arg1)
1049 && !TREE_OVERFLOW (arg2)
1050 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1051 {
1052 if (code == CEIL_MOD_EXPR)
1053 op1.low += op2.low - 1;
1054 res.low = op1.low % op2.low, res.high = 0;
1055 break;
1056 }
1057
1058 /* ... fall through ... */
1059
1060 case ROUND_MOD_EXPR:
1061 if (double_int_zero_p (op2))
1062 return NULL_TREE;
1063 overflow = div_and_round_double (code, uns,
1064 op1.low, op1.high, op2.low, op2.high,
1065 &tmp.low, &tmp.high,
1066 &res.low, &res.high);
1067 break;
1068
1069 case MIN_EXPR:
1070 res = double_int_min (op1, op2, uns);
1071 break;
1072
1073 case MAX_EXPR:
1074 res = double_int_max (op1, op2, uns);
1075 break;
1076
1077 default:
1078 return NULL_TREE;
1079 }
1080
1081 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1082 ((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1084
1085 return t;
1086 }
1087
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1092
1093 static tree
1094 const_binop (enum tree_code code, tree arg1, tree arg2)
1095 {
1096 /* Sanity check for the recursive cases. */
1097 if (!arg1 || !arg2)
1098 return NULL_TREE;
1099
1100 STRIP_NOPS (arg1);
1101 STRIP_NOPS (arg2);
1102
1103 if (TREE_CODE (arg1) == INTEGER_CST)
1104 return int_const_binop (code, arg1, arg2);
1105
1106 if (TREE_CODE (arg1) == REAL_CST)
1107 {
1108 enum machine_mode mode;
1109 REAL_VALUE_TYPE d1;
1110 REAL_VALUE_TYPE d2;
1111 REAL_VALUE_TYPE value;
1112 REAL_VALUE_TYPE result;
1113 bool inexact;
1114 tree t, type;
1115
1116 /* The following codes are handled by real_arithmetic. */
1117 switch (code)
1118 {
1119 case PLUS_EXPR:
1120 case MINUS_EXPR:
1121 case MULT_EXPR:
1122 case RDIV_EXPR:
1123 case MIN_EXPR:
1124 case MAX_EXPR:
1125 break;
1126
1127 default:
1128 return NULL_TREE;
1129 }
1130
1131 d1 = TREE_REAL_CST (arg1);
1132 d2 = TREE_REAL_CST (arg2);
1133
1134 type = TREE_TYPE (arg1);
1135 mode = TYPE_MODE (type);
1136
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode)
1140 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1141 return NULL_TREE;
1142
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code == RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2, dconst0)
1147 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1148 return NULL_TREE;
1149
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1))
1153 return arg1;
1154 else if (REAL_VALUE_ISNAN (d2))
1155 return arg2;
1156
1157 inexact = real_arithmetic (&value, code, &d1, &d2);
1158 real_convert (&result, mode, &value);
1159
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode)
1164 && REAL_VALUE_ISINF (result)
1165 && !REAL_VALUE_ISINF (d1)
1166 && !REAL_VALUE_ISINF (d2))
1167 return NULL_TREE;
1168
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1175 && (inexact || !real_identical (&result, &value)))
1176 return NULL_TREE;
1177
1178 t = build_real (type, result);
1179
1180 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1181 return t;
1182 }
1183
1184 if (TREE_CODE (arg1) == FIXED_CST)
1185 {
1186 FIXED_VALUE_TYPE f1;
1187 FIXED_VALUE_TYPE f2;
1188 FIXED_VALUE_TYPE result;
1189 tree t, type;
1190 int sat_p;
1191 bool overflow_p;
1192
1193 /* The following codes are handled by fixed_arithmetic. */
1194 switch (code)
1195 {
1196 case PLUS_EXPR:
1197 case MINUS_EXPR:
1198 case MULT_EXPR:
1199 case TRUNC_DIV_EXPR:
1200 f2 = TREE_FIXED_CST (arg2);
1201 break;
1202
1203 case LSHIFT_EXPR:
1204 case RSHIFT_EXPR:
1205 f2.data.high = TREE_INT_CST_HIGH (arg2);
1206 f2.data.low = TREE_INT_CST_LOW (arg2);
1207 f2.mode = SImode;
1208 break;
1209
1210 default:
1211 return NULL_TREE;
1212 }
1213
1214 f1 = TREE_FIXED_CST (arg1);
1215 type = TREE_TYPE (arg1);
1216 sat_p = TYPE_SATURATING (type);
1217 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1218 t = build_fixed (type, result);
1219 /* Propagate overflow flags. */
1220 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1221 TREE_OVERFLOW (t) = 1;
1222 return t;
1223 }
1224
1225 if (TREE_CODE (arg1) == COMPLEX_CST)
1226 {
1227 tree type = TREE_TYPE (arg1);
1228 tree r1 = TREE_REALPART (arg1);
1229 tree i1 = TREE_IMAGPART (arg1);
1230 tree r2 = TREE_REALPART (arg2);
1231 tree i2 = TREE_IMAGPART (arg2);
1232 tree real, imag;
1233
1234 switch (code)
1235 {
1236 case PLUS_EXPR:
1237 case MINUS_EXPR:
1238 real = const_binop (code, r1, r2);
1239 imag = const_binop (code, i1, i2);
1240 break;
1241
1242 case MULT_EXPR:
1243 if (COMPLEX_FLOAT_TYPE_P (type))
1244 return do_mpc_arg2 (arg1, arg2, type,
1245 /* do_nonfinite= */ folding_initializer,
1246 mpc_mul);
1247
1248 real = const_binop (MINUS_EXPR,
1249 const_binop (MULT_EXPR, r1, r2),
1250 const_binop (MULT_EXPR, i1, i2));
1251 imag = const_binop (PLUS_EXPR,
1252 const_binop (MULT_EXPR, r1, i2),
1253 const_binop (MULT_EXPR, i1, r2));
1254 break;
1255
1256 case RDIV_EXPR:
1257 if (COMPLEX_FLOAT_TYPE_P (type))
1258 return do_mpc_arg2 (arg1, arg2, type,
1259 /* do_nonfinite= */ folding_initializer,
1260 mpc_div);
1261 /* Fallthru ... */
1262 case TRUNC_DIV_EXPR:
1263 case CEIL_DIV_EXPR:
1264 case FLOOR_DIV_EXPR:
1265 case ROUND_DIV_EXPR:
1266 if (flag_complex_method == 0)
1267 {
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1270
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1273 t = br*br + bi*bi
1274 */
1275 tree magsquared
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r2, r2),
1278 const_binop (MULT_EXPR, i2, i2));
1279 tree t1
1280 = const_binop (PLUS_EXPR,
1281 const_binop (MULT_EXPR, r1, r2),
1282 const_binop (MULT_EXPR, i1, i2));
1283 tree t2
1284 = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, i1, r2),
1286 const_binop (MULT_EXPR, r1, i2));
1287
1288 real = const_binop (code, t1, magsquared);
1289 imag = const_binop (code, t2, magsquared);
1290 }
1291 else
1292 {
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1295
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1299 fold_abs_const (r2, TREE_TYPE (type)),
1300 fold_abs_const (i2, TREE_TYPE (type)));
1301
1302 if (integer_nonzerop (compare))
1303 {
1304 /* In the TRUE branch, we compute
1305 ratio = br/bi;
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1309 tr = tr / div;
1310 ti = ti / div; */
1311 tree ratio = const_binop (code, r2, i2);
1312 tree div = const_binop (PLUS_EXPR, i2,
1313 const_binop (MULT_EXPR, r2, ratio));
1314 real = const_binop (MULT_EXPR, r1, ratio);
1315 real = const_binop (PLUS_EXPR, real, i1);
1316 real = const_binop (code, real, div);
1317
1318 imag = const_binop (MULT_EXPR, i1, ratio);
1319 imag = const_binop (MINUS_EXPR, imag, r1);
1320 imag = const_binop (code, imag, div);
1321 }
1322 else
1323 {
1324 /* In the FALSE branch, we compute
1325 ratio = d/c;
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1329 tr = tr / div;
1330 ti = ti / div; */
1331 tree ratio = const_binop (code, i2, r2);
1332 tree div = const_binop (PLUS_EXPR, r2,
1333 const_binop (MULT_EXPR, i2, ratio));
1334
1335 real = const_binop (MULT_EXPR, i1, ratio);
1336 real = const_binop (PLUS_EXPR, real, r1);
1337 real = const_binop (code, real, div);
1338
1339 imag = const_binop (MULT_EXPR, r1, ratio);
1340 imag = const_binop (MINUS_EXPR, i1, imag);
1341 imag = const_binop (code, imag, div);
1342 }
1343 }
1344 break;
1345
1346 default:
1347 return NULL_TREE;
1348 }
1349
1350 if (real && imag)
1351 return build_complex (type, real, imag);
1352 }
1353
1354 if (TREE_CODE (arg1) == VECTOR_CST)
1355 {
1356 tree type = TREE_TYPE(arg1);
1357 int count = TYPE_VECTOR_SUBPARTS (type), i;
1358 tree elements1, elements2, list = NULL_TREE;
1359
1360 if(TREE_CODE(arg2) != VECTOR_CST)
1361 return NULL_TREE;
1362
1363 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1364 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1365
1366 for (i = 0; i < count; i++)
1367 {
1368 tree elem1, elem2, elem;
1369
1370 /* The trailing elements can be empty and should be treated as 0 */
1371 if(!elements1)
1372 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1373 else
1374 {
1375 elem1 = TREE_VALUE(elements1);
1376 elements1 = TREE_CHAIN (elements1);
1377 }
1378
1379 if(!elements2)
1380 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1381 else
1382 {
1383 elem2 = TREE_VALUE(elements2);
1384 elements2 = TREE_CHAIN (elements2);
1385 }
1386
1387 elem = const_binop (code, elem1, elem2);
1388
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem == NULL_TREE)
1392 return NULL_TREE;
1393
1394 list = tree_cons (NULL_TREE, elem, list);
1395 }
1396 return build_vector(type, nreverse(list));
1397 }
1398 return NULL_TREE;
1399 }
1400
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1403
1404 tree
1405 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1406 {
1407 return build_int_cst (sizetype_tab[(int) kind], number);
1408 }
1409 \f
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1414
1415 tree
1416 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1417 {
1418 tree type = TREE_TYPE (arg0);
1419
1420 if (arg0 == error_mark_node || arg1 == error_mark_node)
1421 return error_mark_node;
1422
1423 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1424 TREE_TYPE (arg1)));
1425
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1428 {
1429 /* And some specific cases even faster than that. */
1430 if (code == PLUS_EXPR)
1431 {
1432 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1433 return arg1;
1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1435 return arg0;
1436 }
1437 else if (code == MINUS_EXPR)
1438 {
1439 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 return arg0;
1441 }
1442 else if (code == MULT_EXPR)
1443 {
1444 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1445 return arg1;
1446 }
1447
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code, arg0, arg1);
1450 }
1451
1452 return fold_build2_loc (loc, code, type, arg0, arg1);
1453 }
1454
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1458
1459 tree
1460 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1461 {
1462 tree type = TREE_TYPE (arg0);
1463 tree ctype;
1464
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1466 TREE_TYPE (arg1)));
1467
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type))
1470 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1471
1472 if (type == sizetype)
1473 ctype = ssizetype;
1474 else if (type == bitsizetype)
1475 ctype = sbitsizetype;
1476 else
1477 ctype = signed_type_for (type);
1478
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1483 return size_binop_loc (loc, MINUS_EXPR,
1484 fold_convert_loc (loc, ctype, arg0),
1485 fold_convert_loc (loc, ctype, arg1));
1486
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0, arg1))
1492 return build_int_cst (ctype, 0);
1493 else if (tree_int_cst_lt (arg1, arg0))
1494 return fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1496 else
1497 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1498 fold_convert_loc (loc, ctype,
1499 size_binop_loc (loc,
1500 MINUS_EXPR,
1501 arg1, arg0)));
1502 }
1503 \f
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1506
1507 static tree
1508 fold_convert_const_int_from_int (tree type, const_tree arg1)
1509 {
1510 tree t;
1511
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t = force_fit_type_double (type, tree_to_double_int (arg1),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1516 (TREE_INT_CST_HIGH (arg1) < 0
1517 && (TYPE_UNSIGNED (type)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1519 | TREE_OVERFLOW (arg1));
1520
1521 return t;
1522 }
1523
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1526
1527 static tree
1528 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1529 {
1530 int overflow = 0;
1531 tree t;
1532
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1541
1542 double_int val;
1543 REAL_VALUE_TYPE r;
1544 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1545
1546 switch (code)
1547 {
1548 case FIX_TRUNC_EXPR:
1549 real_trunc (&r, VOIDmode, &x);
1550 break;
1551
1552 default:
1553 gcc_unreachable ();
1554 }
1555
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r))
1558 {
1559 overflow = 1;
1560 val = double_int_zero;
1561 }
1562
1563 /* See if R is less than the lower bound or greater than the
1564 upper bound. */
1565
1566 if (! overflow)
1567 {
1568 tree lt = TYPE_MIN_VALUE (type);
1569 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1570 if (REAL_VALUES_LESS (r, l))
1571 {
1572 overflow = 1;
1573 val = tree_to_double_int (lt);
1574 }
1575 }
1576
1577 if (! overflow)
1578 {
1579 tree ut = TYPE_MAX_VALUE (type);
1580 if (ut)
1581 {
1582 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1583 if (REAL_VALUES_LESS (u, r))
1584 {
1585 overflow = 1;
1586 val = tree_to_double_int (ut);
1587 }
1588 }
1589 }
1590
1591 if (! overflow)
1592 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1593
1594 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1595 return t;
1596 }
1597
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1600
1601 static tree
1602 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1603 {
1604 tree t;
1605 double_int temp, temp_trunc;
1606 unsigned int mode;
1607
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp = TREE_FIXED_CST (arg1).data;
1610 mode = TREE_FIXED_CST (arg1).mode;
1611 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1612 {
1613 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1614 HOST_BITS_PER_DOUBLE_INT,
1615 SIGNED_FIXED_POINT_MODE_P (mode));
1616
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1619 HOST_BITS_PER_DOUBLE_INT,
1620 SIGNED_FIXED_POINT_MODE_P (mode));
1621 }
1622 else
1623 {
1624 temp = double_int_zero;
1625 temp_trunc = double_int_zero;
1626 }
1627
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode)
1631 && double_int_negative_p (temp_trunc)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1633 temp = double_int_add (temp, double_int_one);
1634
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t = force_fit_type_double (type, temp, -1,
1638 (double_int_negative_p (temp)
1639 && (TYPE_UNSIGNED (type)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1641 | TREE_OVERFLOW (arg1));
1642
1643 return t;
1644 }
1645
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1648
1649 static tree
1650 fold_convert_const_real_from_real (tree type, const_tree arg1)
1651 {
1652 REAL_VALUE_TYPE value;
1653 tree t;
1654
1655 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1656 t = build_real (type, value);
1657
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1664 TREE_OVERFLOW (t) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_NANS (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1671 && REAL_VALUE_ISINF (value)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1673 TREE_OVERFLOW (t) = 1;
1674 else
1675 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1676 return t;
1677 }
1678
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1681
1682 static tree
1683 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1684 {
1685 REAL_VALUE_TYPE value;
1686 tree t;
1687
1688 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1689 t = build_real (type, value);
1690
1691 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1692 return t;
1693 }
1694
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1697
1698 static tree
1699 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1700 {
1701 FIXED_VALUE_TYPE value;
1702 tree t;
1703 bool overflow_p;
1704
1705 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1706 TYPE_SATURATING (type));
1707 t = build_fixed (type, value);
1708
1709 /* Propagate overflow flags. */
1710 if (overflow_p | TREE_OVERFLOW (arg1))
1711 TREE_OVERFLOW (t) = 1;
1712 return t;
1713 }
1714
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1717
1718 static tree
1719 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1720 {
1721 FIXED_VALUE_TYPE value;
1722 tree t;
1723 bool overflow_p;
1724
1725 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1726 TREE_INT_CST (arg1),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1728 TYPE_SATURATING (type));
1729 t = build_fixed (type, value);
1730
1731 /* Propagate overflow flags. */
1732 if (overflow_p | TREE_OVERFLOW (arg1))
1733 TREE_OVERFLOW (t) = 1;
1734 return t;
1735 }
1736
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1739
1740 static tree
1741 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1742 {
1743 FIXED_VALUE_TYPE value;
1744 tree t;
1745 bool overflow_p;
1746
1747 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1748 &TREE_REAL_CST (arg1),
1749 TYPE_SATURATING (type));
1750 t = build_fixed (type, value);
1751
1752 /* Propagate overflow flags. */
1753 if (overflow_p | TREE_OVERFLOW (arg1))
1754 TREE_OVERFLOW (t) = 1;
1755 return t;
1756 }
1757
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1760
1761 static tree
1762 fold_convert_const (enum tree_code code, tree type, tree arg1)
1763 {
1764 if (TREE_TYPE (arg1) == type)
1765 return arg1;
1766
1767 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1768 || TREE_CODE (type) == OFFSET_TYPE)
1769 {
1770 if (TREE_CODE (arg1) == INTEGER_CST)
1771 return fold_convert_const_int_from_int (type, arg1);
1772 else if (TREE_CODE (arg1) == REAL_CST)
1773 return fold_convert_const_int_from_real (code, type, arg1);
1774 else if (TREE_CODE (arg1) == FIXED_CST)
1775 return fold_convert_const_int_from_fixed (type, arg1);
1776 }
1777 else if (TREE_CODE (type) == REAL_TYPE)
1778 {
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return build_real_from_int_cst (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_real_from_real (type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_real_from_fixed (type, arg1);
1785 }
1786 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1787 {
1788 if (TREE_CODE (arg1) == FIXED_CST)
1789 return fold_convert_const_fixed_from_fixed (type, arg1);
1790 else if (TREE_CODE (arg1) == INTEGER_CST)
1791 return fold_convert_const_fixed_from_int (type, arg1);
1792 else if (TREE_CODE (arg1) == REAL_CST)
1793 return fold_convert_const_fixed_from_real (type, arg1);
1794 }
1795 return NULL_TREE;
1796 }
1797
1798 /* Construct a vector of zero elements of vector type TYPE. */
1799
1800 static tree
1801 build_zero_vector (tree type)
1802 {
1803 tree t;
1804
1805 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1806 return build_vector_from_val (type, t);
1807 }
1808
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1810
1811 bool
1812 fold_convertible_p (const_tree type, const_tree arg)
1813 {
1814 tree orig = TREE_TYPE (arg);
1815
1816 if (type == orig)
1817 return true;
1818
1819 if (TREE_CODE (arg) == ERROR_MARK
1820 || TREE_CODE (type) == ERROR_MARK
1821 || TREE_CODE (orig) == ERROR_MARK)
1822 return false;
1823
1824 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1825 return true;
1826
1827 switch (TREE_CODE (type))
1828 {
1829 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1830 case POINTER_TYPE: case REFERENCE_TYPE:
1831 case OFFSET_TYPE:
1832 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1833 || TREE_CODE (orig) == OFFSET_TYPE)
1834 return true;
1835 return (TREE_CODE (orig) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1837
1838 case REAL_TYPE:
1839 case FIXED_POINT_TYPE:
1840 case COMPLEX_TYPE:
1841 case VECTOR_TYPE:
1842 case VOID_TYPE:
1843 return TREE_CODE (type) == TREE_CODE (orig);
1844
1845 default:
1846 return false;
1847 }
1848 }
1849
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1852
1853 tree
1854 fold_convert_loc (location_t loc, tree type, tree arg)
1855 {
1856 tree orig = TREE_TYPE (arg);
1857 tree tem;
1858
1859 if (type == orig)
1860 return arg;
1861
1862 if (TREE_CODE (arg) == ERROR_MARK
1863 || TREE_CODE (type) == ERROR_MARK
1864 || TREE_CODE (orig) == ERROR_MARK)
1865 return error_mark_node;
1866
1867 switch (TREE_CODE (type))
1868 {
1869 case POINTER_TYPE:
1870 case REFERENCE_TYPE:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1875 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1876 /* fall through */
1877
1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1879 case OFFSET_TYPE:
1880 if (TREE_CODE (arg) == INTEGER_CST)
1881 {
1882 tem = fold_convert_const (NOP_EXPR, type, arg);
1883 if (tem != NULL_TREE)
1884 return tem;
1885 }
1886 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1887 || TREE_CODE (orig) == OFFSET_TYPE)
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1889 if (TREE_CODE (orig) == COMPLEX_TYPE)
1890 return fold_convert_loc (loc, type,
1891 fold_build1_loc (loc, REALPART_EXPR,
1892 TREE_TYPE (orig), arg));
1893 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1895 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1896
1897 case REAL_TYPE:
1898 if (TREE_CODE (arg) == INTEGER_CST)
1899 {
1900 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1901 if (tem != NULL_TREE)
1902 return tem;
1903 }
1904 else if (TREE_CODE (arg) == REAL_CST)
1905 {
1906 tem = fold_convert_const (NOP_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1908 return tem;
1909 }
1910 else if (TREE_CODE (arg) == FIXED_CST)
1911 {
1912 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 return tem;
1915 }
1916
1917 switch (TREE_CODE (orig))
1918 {
1919 case INTEGER_TYPE:
1920 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1921 case POINTER_TYPE: case REFERENCE_TYPE:
1922 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1923
1924 case REAL_TYPE:
1925 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1926
1927 case FIXED_POINT_TYPE:
1928 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1929
1930 case COMPLEX_TYPE:
1931 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1932 return fold_convert_loc (loc, type, tem);
1933
1934 default:
1935 gcc_unreachable ();
1936 }
1937
1938 case FIXED_POINT_TYPE:
1939 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1940 || TREE_CODE (arg) == REAL_CST)
1941 {
1942 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1944 goto fold_convert_exit;
1945 }
1946
1947 switch (TREE_CODE (orig))
1948 {
1949 case FIXED_POINT_TYPE:
1950 case INTEGER_TYPE:
1951 case ENUMERAL_TYPE:
1952 case BOOLEAN_TYPE:
1953 case REAL_TYPE:
1954 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1955
1956 case COMPLEX_TYPE:
1957 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1958 return fold_convert_loc (loc, type, tem);
1959
1960 default:
1961 gcc_unreachable ();
1962 }
1963
1964 case COMPLEX_TYPE:
1965 switch (TREE_CODE (orig))
1966 {
1967 case INTEGER_TYPE:
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1970 case REAL_TYPE:
1971 case FIXED_POINT_TYPE:
1972 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1973 fold_convert_loc (loc, TREE_TYPE (type), arg),
1974 fold_convert_loc (loc, TREE_TYPE (type),
1975 integer_zero_node));
1976 case COMPLEX_TYPE:
1977 {
1978 tree rpart, ipart;
1979
1980 if (TREE_CODE (arg) == COMPLEX_EXPR)
1981 {
1982 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1983 TREE_OPERAND (arg, 0));
1984 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1985 TREE_OPERAND (arg, 1));
1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1987 }
1988
1989 arg = save_expr (arg);
1990 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1992 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1993 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1994 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1995 }
1996
1997 default:
1998 gcc_unreachable ();
1999 }
2000
2001 case VECTOR_TYPE:
2002 if (integer_zerop (arg))
2003 return build_zero_vector (type);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2006 || TREE_CODE (orig) == VECTOR_TYPE);
2007 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2008
2009 case VOID_TYPE:
2010 tem = fold_ignored_result (arg);
2011 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2012
2013 default:
2014 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2015 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2016 gcc_unreachable ();
2017 }
2018 fold_convert_exit:
2019 protected_set_expr_location_unshare (tem, loc);
2020 return tem;
2021 }
2022 \f
2023 /* Return false if expr can be assumed not to be an lvalue, true
2024 otherwise. */
2025
2026 static bool
2027 maybe_lvalue_p (const_tree x)
2028 {
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x))
2031 {
2032 case VAR_DECL:
2033 case PARM_DECL:
2034 case RESULT_DECL:
2035 case LABEL_DECL:
2036 case FUNCTION_DECL:
2037 case SSA_NAME:
2038
2039 case COMPONENT_REF:
2040 case MEM_REF:
2041 case INDIRECT_REF:
2042 case ARRAY_REF:
2043 case ARRAY_RANGE_REF:
2044 case BIT_FIELD_REF:
2045 case OBJ_TYPE_REF:
2046
2047 case REALPART_EXPR:
2048 case IMAGPART_EXPR:
2049 case PREINCREMENT_EXPR:
2050 case PREDECREMENT_EXPR:
2051 case SAVE_EXPR:
2052 case TRY_CATCH_EXPR:
2053 case WITH_CLEANUP_EXPR:
2054 case COMPOUND_EXPR:
2055 case MODIFY_EXPR:
2056 case TARGET_EXPR:
2057 case COND_EXPR:
2058 case BIND_EXPR:
2059 break;
2060
2061 default:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2064 break;
2065 return false;
2066 }
2067
2068 return true;
2069 }
2070
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2072
2073 tree
2074 non_lvalue_loc (location_t loc, tree x)
2075 {
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2077 us. */
2078 if (in_gimple_form)
2079 return x;
2080
2081 if (! maybe_lvalue_p (x))
2082 return x;
2083 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2084 }
2085
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2088
2089 int pedantic_lvalues;
2090
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2093
2094 static tree
2095 pedantic_non_lvalue_loc (location_t loc, tree x)
2096 {
2097 if (pedantic_lvalues)
2098 return non_lvalue_loc (loc, x);
2099
2100 return protected_set_expr_location_unshare (x, loc);
2101 }
2102 \f
2103 /* Given a tree comparison code, return the code that is the logical inverse
2104 of the given code. It is not safe to do this for floating-point
2105 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2106 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2107
2108 enum tree_code
2109 invert_tree_comparison (enum tree_code code, bool honor_nans)
2110 {
2111 if (honor_nans && flag_trapping_math)
2112 return ERROR_MARK;
2113
2114 switch (code)
2115 {
2116 case EQ_EXPR:
2117 return NE_EXPR;
2118 case NE_EXPR:
2119 return EQ_EXPR;
2120 case GT_EXPR:
2121 return honor_nans ? UNLE_EXPR : LE_EXPR;
2122 case GE_EXPR:
2123 return honor_nans ? UNLT_EXPR : LT_EXPR;
2124 case LT_EXPR:
2125 return honor_nans ? UNGE_EXPR : GE_EXPR;
2126 case LE_EXPR:
2127 return honor_nans ? UNGT_EXPR : GT_EXPR;
2128 case LTGT_EXPR:
2129 return UNEQ_EXPR;
2130 case UNEQ_EXPR:
2131 return LTGT_EXPR;
2132 case UNGT_EXPR:
2133 return LE_EXPR;
2134 case UNGE_EXPR:
2135 return LT_EXPR;
2136 case UNLT_EXPR:
2137 return GE_EXPR;
2138 case UNLE_EXPR:
2139 return GT_EXPR;
2140 case ORDERED_EXPR:
2141 return UNORDERED_EXPR;
2142 case UNORDERED_EXPR:
2143 return ORDERED_EXPR;
2144 default:
2145 gcc_unreachable ();
2146 }
2147 }
2148
2149 /* Similar, but return the comparison that results if the operands are
2150 swapped. This is safe for floating-point. */
2151
2152 enum tree_code
2153 swap_tree_comparison (enum tree_code code)
2154 {
2155 switch (code)
2156 {
2157 case EQ_EXPR:
2158 case NE_EXPR:
2159 case ORDERED_EXPR:
2160 case UNORDERED_EXPR:
2161 case LTGT_EXPR:
2162 case UNEQ_EXPR:
2163 return code;
2164 case GT_EXPR:
2165 return LT_EXPR;
2166 case GE_EXPR:
2167 return LE_EXPR;
2168 case LT_EXPR:
2169 return GT_EXPR;
2170 case LE_EXPR:
2171 return GE_EXPR;
2172 case UNGT_EXPR:
2173 return UNLT_EXPR;
2174 case UNGE_EXPR:
2175 return UNLE_EXPR;
2176 case UNLT_EXPR:
2177 return UNGT_EXPR;
2178 case UNLE_EXPR:
2179 return UNGE_EXPR;
2180 default:
2181 gcc_unreachable ();
2182 }
2183 }
2184
2185
2186 /* Convert a comparison tree code from an enum tree_code representation
2187 into a compcode bit-based encoding. This function is the inverse of
2188 compcode_to_comparison. */
2189
2190 static enum comparison_code
2191 comparison_to_compcode (enum tree_code code)
2192 {
2193 switch (code)
2194 {
2195 case LT_EXPR:
2196 return COMPCODE_LT;
2197 case EQ_EXPR:
2198 return COMPCODE_EQ;
2199 case LE_EXPR:
2200 return COMPCODE_LE;
2201 case GT_EXPR:
2202 return COMPCODE_GT;
2203 case NE_EXPR:
2204 return COMPCODE_NE;
2205 case GE_EXPR:
2206 return COMPCODE_GE;
2207 case ORDERED_EXPR:
2208 return COMPCODE_ORD;
2209 case UNORDERED_EXPR:
2210 return COMPCODE_UNORD;
2211 case UNLT_EXPR:
2212 return COMPCODE_UNLT;
2213 case UNEQ_EXPR:
2214 return COMPCODE_UNEQ;
2215 case UNLE_EXPR:
2216 return COMPCODE_UNLE;
2217 case UNGT_EXPR:
2218 return COMPCODE_UNGT;
2219 case LTGT_EXPR:
2220 return COMPCODE_LTGT;
2221 case UNGE_EXPR:
2222 return COMPCODE_UNGE;
2223 default:
2224 gcc_unreachable ();
2225 }
2226 }
2227
2228 /* Convert a compcode bit-based encoding of a comparison operator back
2229 to GCC's enum tree_code representation. This function is the
2230 inverse of comparison_to_compcode. */
2231
2232 static enum tree_code
2233 compcode_to_comparison (enum comparison_code code)
2234 {
2235 switch (code)
2236 {
2237 case COMPCODE_LT:
2238 return LT_EXPR;
2239 case COMPCODE_EQ:
2240 return EQ_EXPR;
2241 case COMPCODE_LE:
2242 return LE_EXPR;
2243 case COMPCODE_GT:
2244 return GT_EXPR;
2245 case COMPCODE_NE:
2246 return NE_EXPR;
2247 case COMPCODE_GE:
2248 return GE_EXPR;
2249 case COMPCODE_ORD:
2250 return ORDERED_EXPR;
2251 case COMPCODE_UNORD:
2252 return UNORDERED_EXPR;
2253 case COMPCODE_UNLT:
2254 return UNLT_EXPR;
2255 case COMPCODE_UNEQ:
2256 return UNEQ_EXPR;
2257 case COMPCODE_UNLE:
2258 return UNLE_EXPR;
2259 case COMPCODE_UNGT:
2260 return UNGT_EXPR;
2261 case COMPCODE_LTGT:
2262 return LTGT_EXPR;
2263 case COMPCODE_UNGE:
2264 return UNGE_EXPR;
2265 default:
2266 gcc_unreachable ();
2267 }
2268 }
2269
2270 /* Return a tree for the comparison which is the combination of
2271 doing the AND or OR (depending on CODE) of the two operations LCODE
2272 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2273 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2274 if this makes the transformation invalid. */
2275
2276 tree
2277 combine_comparisons (location_t loc,
2278 enum tree_code code, enum tree_code lcode,
2279 enum tree_code rcode, tree truth_type,
2280 tree ll_arg, tree lr_arg)
2281 {
2282 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2283 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2284 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2285 int compcode;
2286
2287 switch (code)
2288 {
2289 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2290 compcode = lcompcode & rcompcode;
2291 break;
2292
2293 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2294 compcode = lcompcode | rcompcode;
2295 break;
2296
2297 default:
2298 return NULL_TREE;
2299 }
2300
2301 if (!honor_nans)
2302 {
2303 /* Eliminate unordered comparisons, as well as LTGT and ORD
2304 which are not used unless the mode has NaNs. */
2305 compcode &= ~COMPCODE_UNORD;
2306 if (compcode == COMPCODE_LTGT)
2307 compcode = COMPCODE_NE;
2308 else if (compcode == COMPCODE_ORD)
2309 compcode = COMPCODE_TRUE;
2310 }
2311 else if (flag_trapping_math)
2312 {
2313 /* Check that the original operation and the optimized ones will trap
2314 under the same condition. */
2315 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2316 && (lcompcode != COMPCODE_EQ)
2317 && (lcompcode != COMPCODE_ORD);
2318 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2319 && (rcompcode != COMPCODE_EQ)
2320 && (rcompcode != COMPCODE_ORD);
2321 bool trap = (compcode & COMPCODE_UNORD) == 0
2322 && (compcode != COMPCODE_EQ)
2323 && (compcode != COMPCODE_ORD);
2324
2325 /* In a short-circuited boolean expression the LHS might be
2326 such that the RHS, if evaluated, will never trap. For
2327 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2328 if neither x nor y is NaN. (This is a mixed blessing: for
2329 example, the expression above will never trap, hence
2330 optimizing it to x < y would be invalid). */
2331 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2332 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2333 rtrap = false;
2334
2335 /* If the comparison was short-circuited, and only the RHS
2336 trapped, we may now generate a spurious trap. */
2337 if (rtrap && !ltrap
2338 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2339 return NULL_TREE;
2340
2341 /* If we changed the conditions that cause a trap, we lose. */
2342 if ((ltrap || rtrap) != trap)
2343 return NULL_TREE;
2344 }
2345
2346 if (compcode == COMPCODE_TRUE)
2347 return constant_boolean_node (true, truth_type);
2348 else if (compcode == COMPCODE_FALSE)
2349 return constant_boolean_node (false, truth_type);
2350 else
2351 {
2352 enum tree_code tcode;
2353
2354 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2355 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2356 }
2357 }
2358 \f
2359 /* Return nonzero if two operands (typically of the same tree node)
2360 are necessarily equal. If either argument has side-effects this
2361 function returns zero. FLAGS modifies behavior as follows:
2362
2363 If OEP_ONLY_CONST is set, only return nonzero for constants.
2364 This function tests whether the operands are indistinguishable;
2365 it does not test whether they are equal using C's == operation.
2366 The distinction is important for IEEE floating point, because
2367 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2368 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2369
2370 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2371 even though it may hold multiple values during a function.
2372 This is because a GCC tree node guarantees that nothing else is
2373 executed between the evaluation of its "operands" (which may often
2374 be evaluated in arbitrary order). Hence if the operands themselves
2375 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2376 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2377 unset means assuming isochronic (or instantaneous) tree equivalence.
2378 Unless comparing arbitrary expression trees, such as from different
2379 statements, this flag can usually be left unset.
2380
2381 If OEP_PURE_SAME is set, then pure functions with identical arguments
2382 are considered the same. It is used when the caller has other ways
2383 to ensure that global memory is unchanged in between. */
2384
2385 int
2386 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2387 {
2388 /* If either is ERROR_MARK, they aren't equal. */
2389 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2390 || TREE_TYPE (arg0) == error_mark_node
2391 || TREE_TYPE (arg1) == error_mark_node)
2392 return 0;
2393
2394 /* Similar, if either does not have a type (like a released SSA name),
2395 they aren't equal. */
2396 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2397 return 0;
2398
2399 /* Check equality of integer constants before bailing out due to
2400 precision differences. */
2401 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2402 return tree_int_cst_equal (arg0, arg1);
2403
2404 /* If both types don't have the same signedness, then we can't consider
2405 them equal. We must check this before the STRIP_NOPS calls
2406 because they may change the signedness of the arguments. As pointers
2407 strictly don't have a signedness, require either two pointers or
2408 two non-pointers as well. */
2409 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2410 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2411 return 0;
2412
2413 /* We cannot consider pointers to different address space equal. */
2414 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2417 return 0;
2418
2419 /* If both types don't have the same precision, then it is not safe
2420 to strip NOPs. */
2421 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2422 return 0;
2423
2424 STRIP_NOPS (arg0);
2425 STRIP_NOPS (arg1);
2426
2427 /* In case both args are comparisons but with different comparison
2428 code, try to swap the comparison operands of one arg to produce
2429 a match and compare that variant. */
2430 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2431 && COMPARISON_CLASS_P (arg0)
2432 && COMPARISON_CLASS_P (arg1))
2433 {
2434 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2435
2436 if (TREE_CODE (arg0) == swap_code)
2437 return operand_equal_p (TREE_OPERAND (arg0, 0),
2438 TREE_OPERAND (arg1, 1), flags)
2439 && operand_equal_p (TREE_OPERAND (arg0, 1),
2440 TREE_OPERAND (arg1, 0), flags);
2441 }
2442
2443 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2444 /* This is needed for conversions and for COMPONENT_REF.
2445 Might as well play it safe and always test this. */
2446 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2447 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2448 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2449 return 0;
2450
2451 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2452 We don't care about side effects in that case because the SAVE_EXPR
2453 takes care of that for us. In all other cases, two expressions are
2454 equal if they have no side effects. If we have two identical
2455 expressions with side effects that should be treated the same due
2456 to the only side effects being identical SAVE_EXPR's, that will
2457 be detected in the recursive calls below.
2458 If we are taking an invariant address of two identical objects
2459 they are necessarily equal as well. */
2460 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2461 && (TREE_CODE (arg0) == SAVE_EXPR
2462 || (flags & OEP_CONSTANT_ADDRESS_OF)
2463 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2464 return 1;
2465
2466 /* Next handle constant cases, those for which we can return 1 even
2467 if ONLY_CONST is set. */
2468 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2469 switch (TREE_CODE (arg0))
2470 {
2471 case INTEGER_CST:
2472 return tree_int_cst_equal (arg0, arg1);
2473
2474 case FIXED_CST:
2475 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2476 TREE_FIXED_CST (arg1));
2477
2478 case REAL_CST:
2479 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2480 TREE_REAL_CST (arg1)))
2481 return 1;
2482
2483
2484 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2485 {
2486 /* If we do not distinguish between signed and unsigned zero,
2487 consider them equal. */
2488 if (real_zerop (arg0) && real_zerop (arg1))
2489 return 1;
2490 }
2491 return 0;
2492
2493 case VECTOR_CST:
2494 {
2495 tree v1, v2;
2496
2497 v1 = TREE_VECTOR_CST_ELTS (arg0);
2498 v2 = TREE_VECTOR_CST_ELTS (arg1);
2499 while (v1 && v2)
2500 {
2501 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2502 flags))
2503 return 0;
2504 v1 = TREE_CHAIN (v1);
2505 v2 = TREE_CHAIN (v2);
2506 }
2507
2508 return v1 == v2;
2509 }
2510
2511 case COMPLEX_CST:
2512 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2513 flags)
2514 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2515 flags));
2516
2517 case STRING_CST:
2518 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2519 && ! memcmp (TREE_STRING_POINTER (arg0),
2520 TREE_STRING_POINTER (arg1),
2521 TREE_STRING_LENGTH (arg0)));
2522
2523 case ADDR_EXPR:
2524 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2525 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2526 ? OEP_CONSTANT_ADDRESS_OF : 0);
2527 default:
2528 break;
2529 }
2530
2531 if (flags & OEP_ONLY_CONST)
2532 return 0;
2533
2534 /* Define macros to test an operand from arg0 and arg1 for equality and a
2535 variant that allows null and views null as being different from any
2536 non-null value. In the latter case, if either is null, the both
2537 must be; otherwise, do the normal comparison. */
2538 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2539 TREE_OPERAND (arg1, N), flags)
2540
2541 #define OP_SAME_WITH_NULL(N) \
2542 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2543 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2544
2545 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2546 {
2547 case tcc_unary:
2548 /* Two conversions are equal only if signedness and modes match. */
2549 switch (TREE_CODE (arg0))
2550 {
2551 CASE_CONVERT:
2552 case FIX_TRUNC_EXPR:
2553 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2554 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2555 return 0;
2556 break;
2557 default:
2558 break;
2559 }
2560
2561 return OP_SAME (0);
2562
2563
2564 case tcc_comparison:
2565 case tcc_binary:
2566 if (OP_SAME (0) && OP_SAME (1))
2567 return 1;
2568
2569 /* For commutative ops, allow the other order. */
2570 return (commutative_tree_code (TREE_CODE (arg0))
2571 && operand_equal_p (TREE_OPERAND (arg0, 0),
2572 TREE_OPERAND (arg1, 1), flags)
2573 && operand_equal_p (TREE_OPERAND (arg0, 1),
2574 TREE_OPERAND (arg1, 0), flags));
2575
2576 case tcc_reference:
2577 /* If either of the pointer (or reference) expressions we are
2578 dereferencing contain a side effect, these cannot be equal. */
2579 if (TREE_SIDE_EFFECTS (arg0)
2580 || TREE_SIDE_EFFECTS (arg1))
2581 return 0;
2582
2583 switch (TREE_CODE (arg0))
2584 {
2585 case INDIRECT_REF:
2586 case REALPART_EXPR:
2587 case IMAGPART_EXPR:
2588 return OP_SAME (0);
2589
2590 case MEM_REF:
2591 /* Require equal access sizes, and similar pointer types.
2592 We can have incomplete types for array references of
2593 variable-sized arrays from the Fortran frontent
2594 though. */
2595 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2596 || (TYPE_SIZE (TREE_TYPE (arg0))
2597 && TYPE_SIZE (TREE_TYPE (arg1))
2598 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2599 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2600 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2601 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2602 && OP_SAME (0) && OP_SAME (1));
2603
2604 case ARRAY_REF:
2605 case ARRAY_RANGE_REF:
2606 /* Operands 2 and 3 may be null.
2607 Compare the array index by value if it is constant first as we
2608 may have different types but same value here. */
2609 return (OP_SAME (0)
2610 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2611 TREE_OPERAND (arg1, 1))
2612 || OP_SAME (1))
2613 && OP_SAME_WITH_NULL (2)
2614 && OP_SAME_WITH_NULL (3));
2615
2616 case COMPONENT_REF:
2617 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2618 may be NULL when we're called to compare MEM_EXPRs. */
2619 return OP_SAME_WITH_NULL (0)
2620 && OP_SAME (1)
2621 && OP_SAME_WITH_NULL (2);
2622
2623 case BIT_FIELD_REF:
2624 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2625
2626 default:
2627 return 0;
2628 }
2629
2630 case tcc_expression:
2631 switch (TREE_CODE (arg0))
2632 {
2633 case ADDR_EXPR:
2634 case TRUTH_NOT_EXPR:
2635 return OP_SAME (0);
2636
2637 case TRUTH_ANDIF_EXPR:
2638 case TRUTH_ORIF_EXPR:
2639 return OP_SAME (0) && OP_SAME (1);
2640
2641 case FMA_EXPR:
2642 case WIDEN_MULT_PLUS_EXPR:
2643 case WIDEN_MULT_MINUS_EXPR:
2644 if (!OP_SAME (2))
2645 return 0;
2646 /* The multiplcation operands are commutative. */
2647 /* FALLTHRU */
2648
2649 case TRUTH_AND_EXPR:
2650 case TRUTH_OR_EXPR:
2651 case TRUTH_XOR_EXPR:
2652 if (OP_SAME (0) && OP_SAME (1))
2653 return 1;
2654
2655 /* Otherwise take into account this is a commutative operation. */
2656 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2657 TREE_OPERAND (arg1, 1), flags)
2658 && operand_equal_p (TREE_OPERAND (arg0, 1),
2659 TREE_OPERAND (arg1, 0), flags));
2660
2661 case COND_EXPR:
2662 case VEC_COND_EXPR:
2663 case DOT_PROD_EXPR:
2664 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2665
2666 default:
2667 return 0;
2668 }
2669
2670 case tcc_vl_exp:
2671 switch (TREE_CODE (arg0))
2672 {
2673 case CALL_EXPR:
2674 /* If the CALL_EXPRs call different functions, then they
2675 clearly can not be equal. */
2676 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2677 flags))
2678 return 0;
2679
2680 {
2681 unsigned int cef = call_expr_flags (arg0);
2682 if (flags & OEP_PURE_SAME)
2683 cef &= ECF_CONST | ECF_PURE;
2684 else
2685 cef &= ECF_CONST;
2686 if (!cef)
2687 return 0;
2688 }
2689
2690 /* Now see if all the arguments are the same. */
2691 {
2692 const_call_expr_arg_iterator iter0, iter1;
2693 const_tree a0, a1;
2694 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2695 a1 = first_const_call_expr_arg (arg1, &iter1);
2696 a0 && a1;
2697 a0 = next_const_call_expr_arg (&iter0),
2698 a1 = next_const_call_expr_arg (&iter1))
2699 if (! operand_equal_p (a0, a1, flags))
2700 return 0;
2701
2702 /* If we get here and both argument lists are exhausted
2703 then the CALL_EXPRs are equal. */
2704 return ! (a0 || a1);
2705 }
2706 default:
2707 return 0;
2708 }
2709
2710 case tcc_declaration:
2711 /* Consider __builtin_sqrt equal to sqrt. */
2712 return (TREE_CODE (arg0) == FUNCTION_DECL
2713 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2714 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2715 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2716
2717 default:
2718 return 0;
2719 }
2720
2721 #undef OP_SAME
2722 #undef OP_SAME_WITH_NULL
2723 }
2724 \f
2725 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2726 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2727
2728 When in doubt, return 0. */
2729
2730 static int
2731 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2732 {
2733 int unsignedp1, unsignedpo;
2734 tree primarg0, primarg1, primother;
2735 unsigned int correct_width;
2736
2737 if (operand_equal_p (arg0, arg1, 0))
2738 return 1;
2739
2740 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2741 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2742 return 0;
2743
2744 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2745 and see if the inner values are the same. This removes any
2746 signedness comparison, which doesn't matter here. */
2747 primarg0 = arg0, primarg1 = arg1;
2748 STRIP_NOPS (primarg0);
2749 STRIP_NOPS (primarg1);
2750 if (operand_equal_p (primarg0, primarg1, 0))
2751 return 1;
2752
2753 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2754 actual comparison operand, ARG0.
2755
2756 First throw away any conversions to wider types
2757 already present in the operands. */
2758
2759 primarg1 = get_narrower (arg1, &unsignedp1);
2760 primother = get_narrower (other, &unsignedpo);
2761
2762 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2763 if (unsignedp1 == unsignedpo
2764 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2765 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2766 {
2767 tree type = TREE_TYPE (arg0);
2768
2769 /* Make sure shorter operand is extended the right way
2770 to match the longer operand. */
2771 primarg1 = fold_convert (signed_or_unsigned_type_for
2772 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2773
2774 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2775 return 1;
2776 }
2777
2778 return 0;
2779 }
2780 \f
2781 /* See if ARG is an expression that is either a comparison or is performing
2782 arithmetic on comparisons. The comparisons must only be comparing
2783 two different values, which will be stored in *CVAL1 and *CVAL2; if
2784 they are nonzero it means that some operands have already been found.
2785 No variables may be used anywhere else in the expression except in the
2786 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2787 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2788
2789 If this is true, return 1. Otherwise, return zero. */
2790
2791 static int
2792 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2793 {
2794 enum tree_code code = TREE_CODE (arg);
2795 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2796
2797 /* We can handle some of the tcc_expression cases here. */
2798 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2799 tclass = tcc_unary;
2800 else if (tclass == tcc_expression
2801 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2802 || code == COMPOUND_EXPR))
2803 tclass = tcc_binary;
2804
2805 else if (tclass == tcc_expression && code == SAVE_EXPR
2806 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2807 {
2808 /* If we've already found a CVAL1 or CVAL2, this expression is
2809 two complex to handle. */
2810 if (*cval1 || *cval2)
2811 return 0;
2812
2813 tclass = tcc_unary;
2814 *save_p = 1;
2815 }
2816
2817 switch (tclass)
2818 {
2819 case tcc_unary:
2820 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2821
2822 case tcc_binary:
2823 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2824 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2825 cval1, cval2, save_p));
2826
2827 case tcc_constant:
2828 return 1;
2829
2830 case tcc_expression:
2831 if (code == COND_EXPR)
2832 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2833 cval1, cval2, save_p)
2834 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2835 cval1, cval2, save_p)
2836 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2837 cval1, cval2, save_p));
2838 return 0;
2839
2840 case tcc_comparison:
2841 /* First see if we can handle the first operand, then the second. For
2842 the second operand, we know *CVAL1 can't be zero. It must be that
2843 one side of the comparison is each of the values; test for the
2844 case where this isn't true by failing if the two operands
2845 are the same. */
2846
2847 if (operand_equal_p (TREE_OPERAND (arg, 0),
2848 TREE_OPERAND (arg, 1), 0))
2849 return 0;
2850
2851 if (*cval1 == 0)
2852 *cval1 = TREE_OPERAND (arg, 0);
2853 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2854 ;
2855 else if (*cval2 == 0)
2856 *cval2 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2858 ;
2859 else
2860 return 0;
2861
2862 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2863 ;
2864 else if (*cval2 == 0)
2865 *cval2 = TREE_OPERAND (arg, 1);
2866 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2867 ;
2868 else
2869 return 0;
2870
2871 return 1;
2872
2873 default:
2874 return 0;
2875 }
2876 }
2877 \f
2878 /* ARG is a tree that is known to contain just arithmetic operations and
2879 comparisons. Evaluate the operations in the tree substituting NEW0 for
2880 any occurrence of OLD0 as an operand of a comparison and likewise for
2881 NEW1 and OLD1. */
2882
2883 static tree
2884 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2885 tree old1, tree new1)
2886 {
2887 tree type = TREE_TYPE (arg);
2888 enum tree_code code = TREE_CODE (arg);
2889 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2890
2891 /* We can handle some of the tcc_expression cases here. */
2892 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2893 tclass = tcc_unary;
2894 else if (tclass == tcc_expression
2895 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2896 tclass = tcc_binary;
2897
2898 switch (tclass)
2899 {
2900 case tcc_unary:
2901 return fold_build1_loc (loc, code, type,
2902 eval_subst (loc, TREE_OPERAND (arg, 0),
2903 old0, new0, old1, new1));
2904
2905 case tcc_binary:
2906 return fold_build2_loc (loc, code, type,
2907 eval_subst (loc, TREE_OPERAND (arg, 0),
2908 old0, new0, old1, new1),
2909 eval_subst (loc, TREE_OPERAND (arg, 1),
2910 old0, new0, old1, new1));
2911
2912 case tcc_expression:
2913 switch (code)
2914 {
2915 case SAVE_EXPR:
2916 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2917 old1, new1);
2918
2919 case COMPOUND_EXPR:
2920 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2921 old1, new1);
2922
2923 case COND_EXPR:
2924 return fold_build3_loc (loc, code, type,
2925 eval_subst (loc, TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1),
2927 eval_subst (loc, TREE_OPERAND (arg, 1),
2928 old0, new0, old1, new1),
2929 eval_subst (loc, TREE_OPERAND (arg, 2),
2930 old0, new0, old1, new1));
2931 default:
2932 break;
2933 }
2934 /* Fall through - ??? */
2935
2936 case tcc_comparison:
2937 {
2938 tree arg0 = TREE_OPERAND (arg, 0);
2939 tree arg1 = TREE_OPERAND (arg, 1);
2940
2941 /* We need to check both for exact equality and tree equality. The
2942 former will be true if the operand has a side-effect. In that
2943 case, we know the operand occurred exactly once. */
2944
2945 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2946 arg0 = new0;
2947 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2948 arg0 = new1;
2949
2950 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2951 arg1 = new0;
2952 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2953 arg1 = new1;
2954
2955 return fold_build2_loc (loc, code, type, arg0, arg1);
2956 }
2957
2958 default:
2959 return arg;
2960 }
2961 }
2962 \f
2963 /* Return a tree for the case when the result of an expression is RESULT
2964 converted to TYPE and OMITTED was previously an operand of the expression
2965 but is now not needed (e.g., we folded OMITTED * 0).
2966
2967 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2968 the conversion of RESULT to TYPE. */
2969
2970 tree
2971 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2972 {
2973 tree t = fold_convert_loc (loc, type, result);
2974
2975 /* If the resulting operand is an empty statement, just return the omitted
2976 statement casted to void. */
2977 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2978 return build1_loc (loc, NOP_EXPR, void_type_node,
2979 fold_ignored_result (omitted));
2980
2981 if (TREE_SIDE_EFFECTS (omitted))
2982 return build2_loc (loc, COMPOUND_EXPR, type,
2983 fold_ignored_result (omitted), t);
2984
2985 return non_lvalue_loc (loc, t);
2986 }
2987
2988 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2989
2990 static tree
2991 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2992 tree omitted)
2993 {
2994 tree t = fold_convert_loc (loc, type, result);
2995
2996 /* If the resulting operand is an empty statement, just return the omitted
2997 statement casted to void. */
2998 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2999 return build1_loc (loc, NOP_EXPR, void_type_node,
3000 fold_ignored_result (omitted));
3001
3002 if (TREE_SIDE_EFFECTS (omitted))
3003 return build2_loc (loc, COMPOUND_EXPR, type,
3004 fold_ignored_result (omitted), t);
3005
3006 return pedantic_non_lvalue_loc (loc, t);
3007 }
3008
3009 /* Return a tree for the case when the result of an expression is RESULT
3010 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3011 of the expression but are now not needed.
3012
3013 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3014 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3015 evaluated before OMITTED2. Otherwise, if neither has side effects,
3016 just do the conversion of RESULT to TYPE. */
3017
3018 tree
3019 omit_two_operands_loc (location_t loc, tree type, tree result,
3020 tree omitted1, tree omitted2)
3021 {
3022 tree t = fold_convert_loc (loc, type, result);
3023
3024 if (TREE_SIDE_EFFECTS (omitted2))
3025 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3026 if (TREE_SIDE_EFFECTS (omitted1))
3027 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3028
3029 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3030 }
3031
3032 \f
3033 /* Return a simplified tree node for the truth-negation of ARG. This
3034 never alters ARG itself. We assume that ARG is an operation that
3035 returns a truth value (0 or 1).
3036
3037 FIXME: one would think we would fold the result, but it causes
3038 problems with the dominator optimizer. */
3039
3040 tree
3041 fold_truth_not_expr (location_t loc, tree arg)
3042 {
3043 tree type = TREE_TYPE (arg);
3044 enum tree_code code = TREE_CODE (arg);
3045 location_t loc1, loc2;
3046
3047 /* If this is a comparison, we can simply invert it, except for
3048 floating-point non-equality comparisons, in which case we just
3049 enclose a TRUTH_NOT_EXPR around what we have. */
3050
3051 if (TREE_CODE_CLASS (code) == tcc_comparison)
3052 {
3053 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3054 if (FLOAT_TYPE_P (op_type)
3055 && flag_trapping_math
3056 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3057 && code != NE_EXPR && code != EQ_EXPR)
3058 return NULL_TREE;
3059
3060 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3061 if (code == ERROR_MARK)
3062 return NULL_TREE;
3063
3064 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3065 TREE_OPERAND (arg, 1));
3066 }
3067
3068 switch (code)
3069 {
3070 case INTEGER_CST:
3071 return constant_boolean_node (integer_zerop (arg), type);
3072
3073 case TRUTH_AND_EXPR:
3074 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3075 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3076 return build2_loc (loc, TRUTH_OR_EXPR, type,
3077 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3078 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3079
3080 case TRUTH_OR_EXPR:
3081 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3082 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3083 return build2_loc (loc, TRUTH_AND_EXPR, type,
3084 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3085 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3086
3087 case TRUTH_XOR_EXPR:
3088 /* Here we can invert either operand. We invert the first operand
3089 unless the second operand is a TRUTH_NOT_EXPR in which case our
3090 result is the XOR of the first operand with the inside of the
3091 negation of the second operand. */
3092
3093 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3094 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3095 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3096 else
3097 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3098 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3099 TREE_OPERAND (arg, 1));
3100
3101 case TRUTH_ANDIF_EXPR:
3102 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3103 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3104 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3105 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3106 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3107
3108 case TRUTH_ORIF_EXPR:
3109 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3110 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3111 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3112 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3113 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3114
3115 case TRUTH_NOT_EXPR:
3116 return TREE_OPERAND (arg, 0);
3117
3118 case COND_EXPR:
3119 {
3120 tree arg1 = TREE_OPERAND (arg, 1);
3121 tree arg2 = TREE_OPERAND (arg, 2);
3122
3123 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3124 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3125
3126 /* A COND_EXPR may have a throw as one operand, which
3127 then has void type. Just leave void operands
3128 as they are. */
3129 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3130 VOID_TYPE_P (TREE_TYPE (arg1))
3131 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3132 VOID_TYPE_P (TREE_TYPE (arg2))
3133 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3134 }
3135
3136 case COMPOUND_EXPR:
3137 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3138 return build2_loc (loc, COMPOUND_EXPR, type,
3139 TREE_OPERAND (arg, 0),
3140 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3141
3142 case NON_LVALUE_EXPR:
3143 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3144 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3145
3146 CASE_CONVERT:
3147 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3148 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3149
3150 /* ... fall through ... */
3151
3152 case FLOAT_EXPR:
3153 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3154 return build1_loc (loc, TREE_CODE (arg), type,
3155 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3156
3157 case BIT_AND_EXPR:
3158 if (!integer_onep (TREE_OPERAND (arg, 1)))
3159 return NULL_TREE;
3160 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3161
3162 case SAVE_EXPR:
3163 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3164
3165 case CLEANUP_POINT_EXPR:
3166 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3167 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3168 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3169
3170 default:
3171 return NULL_TREE;
3172 }
3173 }
3174
3175 /* Return a simplified tree node for the truth-negation of ARG. This
3176 never alters ARG itself. We assume that ARG is an operation that
3177 returns a truth value (0 or 1).
3178
3179 FIXME: one would think we would fold the result, but it causes
3180 problems with the dominator optimizer. */
3181
3182 tree
3183 invert_truthvalue_loc (location_t loc, tree arg)
3184 {
3185 tree tem;
3186
3187 if (TREE_CODE (arg) == ERROR_MARK)
3188 return arg;
3189
3190 tem = fold_truth_not_expr (loc, arg);
3191 if (!tem)
3192 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3193
3194 return tem;
3195 }
3196
3197 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3198 operands are another bit-wise operation with a common input. If so,
3199 distribute the bit operations to save an operation and possibly two if
3200 constants are involved. For example, convert
3201 (A | B) & (A | C) into A | (B & C)
3202 Further simplification will occur if B and C are constants.
3203
3204 If this optimization cannot be done, 0 will be returned. */
3205
3206 static tree
3207 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3208 tree arg0, tree arg1)
3209 {
3210 tree common;
3211 tree left, right;
3212
3213 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3214 || TREE_CODE (arg0) == code
3215 || (TREE_CODE (arg0) != BIT_AND_EXPR
3216 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3217 return 0;
3218
3219 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3220 {
3221 common = TREE_OPERAND (arg0, 0);
3222 left = TREE_OPERAND (arg0, 1);
3223 right = TREE_OPERAND (arg1, 1);
3224 }
3225 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3226 {
3227 common = TREE_OPERAND (arg0, 0);
3228 left = TREE_OPERAND (arg0, 1);
3229 right = TREE_OPERAND (arg1, 0);
3230 }
3231 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3232 {
3233 common = TREE_OPERAND (arg0, 1);
3234 left = TREE_OPERAND (arg0, 0);
3235 right = TREE_OPERAND (arg1, 1);
3236 }
3237 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3238 {
3239 common = TREE_OPERAND (arg0, 1);
3240 left = TREE_OPERAND (arg0, 0);
3241 right = TREE_OPERAND (arg1, 0);
3242 }
3243 else
3244 return 0;
3245
3246 common = fold_convert_loc (loc, type, common);
3247 left = fold_convert_loc (loc, type, left);
3248 right = fold_convert_loc (loc, type, right);
3249 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3250 fold_build2_loc (loc, code, type, left, right));
3251 }
3252
3253 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3254 with code CODE. This optimization is unsafe. */
3255 static tree
3256 distribute_real_division (location_t loc, enum tree_code code, tree type,
3257 tree arg0, tree arg1)
3258 {
3259 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3260 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3261
3262 /* (A / C) +- (B / C) -> (A +- B) / C. */
3263 if (mul0 == mul1
3264 && operand_equal_p (TREE_OPERAND (arg0, 1),
3265 TREE_OPERAND (arg1, 1), 0))
3266 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3267 fold_build2_loc (loc, code, type,
3268 TREE_OPERAND (arg0, 0),
3269 TREE_OPERAND (arg1, 0)),
3270 TREE_OPERAND (arg0, 1));
3271
3272 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3273 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3274 TREE_OPERAND (arg1, 0), 0)
3275 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3276 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3277 {
3278 REAL_VALUE_TYPE r0, r1;
3279 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3280 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3281 if (!mul0)
3282 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3283 if (!mul1)
3284 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3285 real_arithmetic (&r0, code, &r0, &r1);
3286 return fold_build2_loc (loc, MULT_EXPR, type,
3287 TREE_OPERAND (arg0, 0),
3288 build_real (type, r0));
3289 }
3290
3291 return NULL_TREE;
3292 }
3293 \f
3294 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3295 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3296
3297 static tree
3298 make_bit_field_ref (location_t loc, tree inner, tree type,
3299 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3300 {
3301 tree result, bftype;
3302
3303 if (bitpos == 0)
3304 {
3305 tree size = TYPE_SIZE (TREE_TYPE (inner));
3306 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3307 || POINTER_TYPE_P (TREE_TYPE (inner)))
3308 && host_integerp (size, 0)
3309 && tree_low_cst (size, 0) == bitsize)
3310 return fold_convert_loc (loc, type, inner);
3311 }
3312
3313 bftype = type;
3314 if (TYPE_PRECISION (bftype) != bitsize
3315 || TYPE_UNSIGNED (bftype) == !unsignedp)
3316 bftype = build_nonstandard_integer_type (bitsize, 0);
3317
3318 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3319 size_int (bitsize), bitsize_int (bitpos));
3320
3321 if (bftype != type)
3322 result = fold_convert_loc (loc, type, result);
3323
3324 return result;
3325 }
3326
3327 /* Optimize a bit-field compare.
3328
3329 There are two cases: First is a compare against a constant and the
3330 second is a comparison of two items where the fields are at the same
3331 bit position relative to the start of a chunk (byte, halfword, word)
3332 large enough to contain it. In these cases we can avoid the shift
3333 implicit in bitfield extractions.
3334
3335 For constants, we emit a compare of the shifted constant with the
3336 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3337 compared. For two fields at the same position, we do the ANDs with the
3338 similar mask and compare the result of the ANDs.
3339
3340 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3341 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3342 are the left and right operands of the comparison, respectively.
3343
3344 If the optimization described above can be done, we return the resulting
3345 tree. Otherwise we return zero. */
3346
3347 static tree
3348 optimize_bit_field_compare (location_t loc, enum tree_code code,
3349 tree compare_type, tree lhs, tree rhs)
3350 {
3351 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3352 tree type = TREE_TYPE (lhs);
3353 tree signed_type, unsigned_type;
3354 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3355 enum machine_mode lmode, rmode, nmode;
3356 int lunsignedp, runsignedp;
3357 int lvolatilep = 0, rvolatilep = 0;
3358 tree linner, rinner = NULL_TREE;
3359 tree mask;
3360 tree offset;
3361
3362 /* Get all the information about the extractions being done. If the bit size
3363 if the same as the size of the underlying object, we aren't doing an
3364 extraction at all and so can do nothing. We also don't want to
3365 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3366 then will no longer be able to replace it. */
3367 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3368 &lunsignedp, &lvolatilep, false);
3369 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3370 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3371 return 0;
3372
3373 if (!const_p)
3374 {
3375 /* If this is not a constant, we can only do something if bit positions,
3376 sizes, and signedness are the same. */
3377 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3378 &runsignedp, &rvolatilep, false);
3379
3380 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3381 || lunsignedp != runsignedp || offset != 0
3382 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3383 return 0;
3384 }
3385
3386 /* See if we can find a mode to refer to this field. We should be able to,
3387 but fail if we can't. */
3388 if (lvolatilep
3389 && GET_MODE_BITSIZE (lmode) > 0
3390 && flag_strict_volatile_bitfields > 0)
3391 nmode = lmode;
3392 else
3393 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3394 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3395 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3396 TYPE_ALIGN (TREE_TYPE (rinner))),
3397 word_mode, lvolatilep || rvolatilep);
3398 if (nmode == VOIDmode)
3399 return 0;
3400
3401 /* Set signed and unsigned types of the precision of this mode for the
3402 shifts below. */
3403 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3404 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3405
3406 /* Compute the bit position and size for the new reference and our offset
3407 within it. If the new reference is the same size as the original, we
3408 won't optimize anything, so return zero. */
3409 nbitsize = GET_MODE_BITSIZE (nmode);
3410 nbitpos = lbitpos & ~ (nbitsize - 1);
3411 lbitpos -= nbitpos;
3412 if (nbitsize == lbitsize)
3413 return 0;
3414
3415 if (BYTES_BIG_ENDIAN)
3416 lbitpos = nbitsize - lbitsize - lbitpos;
3417
3418 /* Make the mask to be used against the extracted field. */
3419 mask = build_int_cst_type (unsigned_type, -1);
3420 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3421 mask = const_binop (RSHIFT_EXPR, mask,
3422 size_int (nbitsize - lbitsize - lbitpos));
3423
3424 if (! const_p)
3425 /* If not comparing with constant, just rework the comparison
3426 and return. */
3427 return fold_build2_loc (loc, code, compare_type,
3428 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3429 make_bit_field_ref (loc, linner,
3430 unsigned_type,
3431 nbitsize, nbitpos,
3432 1),
3433 mask),
3434 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3435 make_bit_field_ref (loc, rinner,
3436 unsigned_type,
3437 nbitsize, nbitpos,
3438 1),
3439 mask));
3440
3441 /* Otherwise, we are handling the constant case. See if the constant is too
3442 big for the field. Warn and return a tree of for 0 (false) if so. We do
3443 this not only for its own sake, but to avoid having to test for this
3444 error case below. If we didn't, we might generate wrong code.
3445
3446 For unsigned fields, the constant shifted right by the field length should
3447 be all zero. For signed fields, the high-order bits should agree with
3448 the sign bit. */
3449
3450 if (lunsignedp)
3451 {
3452 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3453 fold_convert_loc (loc,
3454 unsigned_type, rhs),
3455 size_int (lbitsize))))
3456 {
3457 warning (0, "comparison is always %d due to width of bit-field",
3458 code == NE_EXPR);
3459 return constant_boolean_node (code == NE_EXPR, compare_type);
3460 }
3461 }
3462 else
3463 {
3464 tree tem = const_binop (RSHIFT_EXPR,
3465 fold_convert_loc (loc, signed_type, rhs),
3466 size_int (lbitsize - 1));
3467 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3468 {
3469 warning (0, "comparison is always %d due to width of bit-field",
3470 code == NE_EXPR);
3471 return constant_boolean_node (code == NE_EXPR, compare_type);
3472 }
3473 }
3474
3475 /* Single-bit compares should always be against zero. */
3476 if (lbitsize == 1 && ! integer_zerop (rhs))
3477 {
3478 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3479 rhs = build_int_cst (type, 0);
3480 }
3481
3482 /* Make a new bitfield reference, shift the constant over the
3483 appropriate number of bits and mask it with the computed mask
3484 (in case this was a signed field). If we changed it, make a new one. */
3485 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3486 if (lvolatilep)
3487 {
3488 TREE_SIDE_EFFECTS (lhs) = 1;
3489 TREE_THIS_VOLATILE (lhs) = 1;
3490 }
3491
3492 rhs = const_binop (BIT_AND_EXPR,
3493 const_binop (LSHIFT_EXPR,
3494 fold_convert_loc (loc, unsigned_type, rhs),
3495 size_int (lbitpos)),
3496 mask);
3497
3498 lhs = build2_loc (loc, code, compare_type,
3499 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3500 return lhs;
3501 }
3502 \f
3503 /* Subroutine for fold_truthop: decode a field reference.
3504
3505 If EXP is a comparison reference, we return the innermost reference.
3506
3507 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3508 set to the starting bit number.
3509
3510 If the innermost field can be completely contained in a mode-sized
3511 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3512
3513 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3514 otherwise it is not changed.
3515
3516 *PUNSIGNEDP is set to the signedness of the field.
3517
3518 *PMASK is set to the mask used. This is either contained in a
3519 BIT_AND_EXPR or derived from the width of the field.
3520
3521 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3522
3523 Return 0 if this is not a component reference or is one that we can't
3524 do anything with. */
3525
3526 static tree
3527 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3528 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3529 int *punsignedp, int *pvolatilep,
3530 tree *pmask, tree *pand_mask)
3531 {
3532 tree outer_type = 0;
3533 tree and_mask = 0;
3534 tree mask, inner, offset;
3535 tree unsigned_type;
3536 unsigned int precision;
3537
3538 /* All the optimizations using this function assume integer fields.
3539 There are problems with FP fields since the type_for_size call
3540 below can fail for, e.g., XFmode. */
3541 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3542 return 0;
3543
3544 /* We are interested in the bare arrangement of bits, so strip everything
3545 that doesn't affect the machine mode. However, record the type of the
3546 outermost expression if it may matter below. */
3547 if (CONVERT_EXPR_P (exp)
3548 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3549 outer_type = TREE_TYPE (exp);
3550 STRIP_NOPS (exp);
3551
3552 if (TREE_CODE (exp) == BIT_AND_EXPR)
3553 {
3554 and_mask = TREE_OPERAND (exp, 1);
3555 exp = TREE_OPERAND (exp, 0);
3556 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3557 if (TREE_CODE (and_mask) != INTEGER_CST)
3558 return 0;
3559 }
3560
3561 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3562 punsignedp, pvolatilep, false);
3563 if ((inner == exp && and_mask == 0)
3564 || *pbitsize < 0 || offset != 0
3565 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3566 return 0;
3567
3568 /* If the number of bits in the reference is the same as the bitsize of
3569 the outer type, then the outer type gives the signedness. Otherwise
3570 (in case of a small bitfield) the signedness is unchanged. */
3571 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3572 *punsignedp = TYPE_UNSIGNED (outer_type);
3573
3574 /* Compute the mask to access the bitfield. */
3575 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3576 precision = TYPE_PRECISION (unsigned_type);
3577
3578 mask = build_int_cst_type (unsigned_type, -1);
3579
3580 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3581 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3582
3583 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3584 if (and_mask != 0)
3585 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3586 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3587
3588 *pmask = mask;
3589 *pand_mask = and_mask;
3590 return inner;
3591 }
3592
3593 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3594 bit positions. */
3595
3596 static int
3597 all_ones_mask_p (const_tree mask, int size)
3598 {
3599 tree type = TREE_TYPE (mask);
3600 unsigned int precision = TYPE_PRECISION (type);
3601 tree tmask;
3602
3603 tmask = build_int_cst_type (signed_type_for (type), -1);
3604
3605 return
3606 tree_int_cst_equal (mask,
3607 const_binop (RSHIFT_EXPR,
3608 const_binop (LSHIFT_EXPR, tmask,
3609 size_int (precision - size)),
3610 size_int (precision - size)));
3611 }
3612
3613 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3614 represents the sign bit of EXP's type. If EXP represents a sign
3615 or zero extension, also test VAL against the unextended type.
3616 The return value is the (sub)expression whose sign bit is VAL,
3617 or NULL_TREE otherwise. */
3618
3619 static tree
3620 sign_bit_p (tree exp, const_tree val)
3621 {
3622 unsigned HOST_WIDE_INT mask_lo, lo;
3623 HOST_WIDE_INT mask_hi, hi;
3624 int width;
3625 tree t;
3626
3627 /* Tree EXP must have an integral type. */
3628 t = TREE_TYPE (exp);
3629 if (! INTEGRAL_TYPE_P (t))
3630 return NULL_TREE;
3631
3632 /* Tree VAL must be an integer constant. */
3633 if (TREE_CODE (val) != INTEGER_CST
3634 || TREE_OVERFLOW (val))
3635 return NULL_TREE;
3636
3637 width = TYPE_PRECISION (t);
3638 if (width > HOST_BITS_PER_WIDE_INT)
3639 {
3640 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3641 lo = 0;
3642
3643 mask_hi = ((unsigned HOST_WIDE_INT) -1
3644 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3645 mask_lo = -1;
3646 }
3647 else
3648 {
3649 hi = 0;
3650 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3651
3652 mask_hi = 0;
3653 mask_lo = ((unsigned HOST_WIDE_INT) -1
3654 >> (HOST_BITS_PER_WIDE_INT - width));
3655 }
3656
3657 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3658 treat VAL as if it were unsigned. */
3659 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3660 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3661 return exp;
3662
3663 /* Handle extension from a narrower type. */
3664 if (TREE_CODE (exp) == NOP_EXPR
3665 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3666 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3667
3668 return NULL_TREE;
3669 }
3670
3671 /* Subroutine for fold_truthop: determine if an operand is simple enough
3672 to be evaluated unconditionally. */
3673
3674 static int
3675 simple_operand_p (const_tree exp)
3676 {
3677 /* Strip any conversions that don't change the machine mode. */
3678 STRIP_NOPS (exp);
3679
3680 return (CONSTANT_CLASS_P (exp)
3681 || TREE_CODE (exp) == SSA_NAME
3682 || (DECL_P (exp)
3683 && ! TREE_ADDRESSABLE (exp)
3684 && ! TREE_THIS_VOLATILE (exp)
3685 && ! DECL_NONLOCAL (exp)
3686 /* Don't regard global variables as simple. They may be
3687 allocated in ways unknown to the compiler (shared memory,
3688 #pragma weak, etc). */
3689 && ! TREE_PUBLIC (exp)
3690 && ! DECL_EXTERNAL (exp)
3691 /* Loading a static variable is unduly expensive, but global
3692 registers aren't expensive. */
3693 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3694 }
3695 \f
3696 /* The following functions are subroutines to fold_range_test and allow it to
3697 try to change a logical combination of comparisons into a range test.
3698
3699 For example, both
3700 X == 2 || X == 3 || X == 4 || X == 5
3701 and
3702 X >= 2 && X <= 5
3703 are converted to
3704 (unsigned) (X - 2) <= 3
3705
3706 We describe each set of comparisons as being either inside or outside
3707 a range, using a variable named like IN_P, and then describe the
3708 range with a lower and upper bound. If one of the bounds is omitted,
3709 it represents either the highest or lowest value of the type.
3710
3711 In the comments below, we represent a range by two numbers in brackets
3712 preceded by a "+" to designate being inside that range, or a "-" to
3713 designate being outside that range, so the condition can be inverted by
3714 flipping the prefix. An omitted bound is represented by a "-". For
3715 example, "- [-, 10]" means being outside the range starting at the lowest
3716 possible value and ending at 10, in other words, being greater than 10.
3717 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3718 always false.
3719
3720 We set up things so that the missing bounds are handled in a consistent
3721 manner so neither a missing bound nor "true" and "false" need to be
3722 handled using a special case. */
3723
3724 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3725 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3726 and UPPER1_P are nonzero if the respective argument is an upper bound
3727 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3728 must be specified for a comparison. ARG1 will be converted to ARG0's
3729 type if both are specified. */
3730
3731 static tree
3732 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3733 tree arg1, int upper1_p)
3734 {
3735 tree tem;
3736 int result;
3737 int sgn0, sgn1;
3738
3739 /* If neither arg represents infinity, do the normal operation.
3740 Else, if not a comparison, return infinity. Else handle the special
3741 comparison rules. Note that most of the cases below won't occur, but
3742 are handled for consistency. */
3743
3744 if (arg0 != 0 && arg1 != 0)
3745 {
3746 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3747 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3748 STRIP_NOPS (tem);
3749 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3750 }
3751
3752 if (TREE_CODE_CLASS (code) != tcc_comparison)
3753 return 0;
3754
3755 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3756 for neither. In real maths, we cannot assume open ended ranges are
3757 the same. But, this is computer arithmetic, where numbers are finite.
3758 We can therefore make the transformation of any unbounded range with
3759 the value Z, Z being greater than any representable number. This permits
3760 us to treat unbounded ranges as equal. */
3761 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3762 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3763 switch (code)
3764 {
3765 case EQ_EXPR:
3766 result = sgn0 == sgn1;
3767 break;
3768 case NE_EXPR:
3769 result = sgn0 != sgn1;
3770 break;
3771 case LT_EXPR:
3772 result = sgn0 < sgn1;
3773 break;
3774 case LE_EXPR:
3775 result = sgn0 <= sgn1;
3776 break;
3777 case GT_EXPR:
3778 result = sgn0 > sgn1;
3779 break;
3780 case GE_EXPR:
3781 result = sgn0 >= sgn1;
3782 break;
3783 default:
3784 gcc_unreachable ();
3785 }
3786
3787 return constant_boolean_node (result, type);
3788 }
3789 \f
3790 /* Helper routine for make_range. Perform one step for it, return
3791 new expression if the loop should continue or NULL_TREE if it should
3792 stop. */
3793
3794 tree
3795 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3796 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3797 bool *strict_overflow_p)
3798 {
3799 tree arg0_type = TREE_TYPE (arg0);
3800 tree n_low, n_high, low = *p_low, high = *p_high;
3801 int in_p = *p_in_p, n_in_p;
3802
3803 switch (code)
3804 {
3805 case TRUTH_NOT_EXPR:
3806 *p_in_p = ! in_p;
3807 return arg0;
3808
3809 case EQ_EXPR: case NE_EXPR:
3810 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3811 /* We can only do something if the range is testing for zero
3812 and if the second operand is an integer constant. Note that
3813 saying something is "in" the range we make is done by
3814 complementing IN_P since it will set in the initial case of
3815 being not equal to zero; "out" is leaving it alone. */
3816 if (low == NULL_TREE || high == NULL_TREE
3817 || ! integer_zerop (low) || ! integer_zerop (high)
3818 || TREE_CODE (arg1) != INTEGER_CST)
3819 return NULL_TREE;
3820
3821 switch (code)
3822 {
3823 case NE_EXPR: /* - [c, c] */
3824 low = high = arg1;
3825 break;
3826 case EQ_EXPR: /* + [c, c] */
3827 in_p = ! in_p, low = high = arg1;
3828 break;
3829 case GT_EXPR: /* - [-, c] */
3830 low = 0, high = arg1;
3831 break;
3832 case GE_EXPR: /* + [c, -] */
3833 in_p = ! in_p, low = arg1, high = 0;
3834 break;
3835 case LT_EXPR: /* - [c, -] */
3836 low = arg1, high = 0;
3837 break;
3838 case LE_EXPR: /* + [-, c] */
3839 in_p = ! in_p, low = 0, high = arg1;
3840 break;
3841 default:
3842 gcc_unreachable ();
3843 }
3844
3845 /* If this is an unsigned comparison, we also know that EXP is
3846 greater than or equal to zero. We base the range tests we make
3847 on that fact, so we record it here so we can parse existing
3848 range tests. We test arg0_type since often the return type
3849 of, e.g. EQ_EXPR, is boolean. */
3850 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3851 {
3852 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3853 in_p, low, high, 1,
3854 build_int_cst (arg0_type, 0),
3855 NULL_TREE))
3856 return NULL_TREE;
3857
3858 in_p = n_in_p, low = n_low, high = n_high;
3859
3860 /* If the high bound is missing, but we have a nonzero low
3861 bound, reverse the range so it goes from zero to the low bound
3862 minus 1. */
3863 if (high == 0 && low && ! integer_zerop (low))
3864 {
3865 in_p = ! in_p;
3866 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3867 integer_one_node, 0);
3868 low = build_int_cst (arg0_type, 0);
3869 }
3870 }
3871
3872 *p_low = low;
3873 *p_high = high;
3874 *p_in_p = in_p;
3875 return arg0;
3876
3877 case NEGATE_EXPR:
3878 /* (-x) IN [a,b] -> x in [-b, -a] */
3879 n_low = range_binop (MINUS_EXPR, exp_type,
3880 build_int_cst (exp_type, 0),
3881 0, high, 1);
3882 n_high = range_binop (MINUS_EXPR, exp_type,
3883 build_int_cst (exp_type, 0),
3884 0, low, 0);
3885 if (n_high != 0 && TREE_OVERFLOW (n_high))
3886 return NULL_TREE;
3887 goto normalize;
3888
3889 case BIT_NOT_EXPR:
3890 /* ~ X -> -X - 1 */
3891 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3892 build_int_cst (exp_type, 1));
3893
3894 case PLUS_EXPR:
3895 case MINUS_EXPR:
3896 if (TREE_CODE (arg1) != INTEGER_CST)
3897 return NULL_TREE;
3898
3899 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3900 move a constant to the other side. */
3901 if (!TYPE_UNSIGNED (arg0_type)
3902 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3903 return NULL_TREE;
3904
3905 /* If EXP is signed, any overflow in the computation is undefined,
3906 so we don't worry about it so long as our computations on
3907 the bounds don't overflow. For unsigned, overflow is defined
3908 and this is exactly the right thing. */
3909 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3910 arg0_type, low, 0, arg1, 0);
3911 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3912 arg0_type, high, 1, arg1, 0);
3913 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3914 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3915 return NULL_TREE;
3916
3917 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3918 *strict_overflow_p = true;
3919
3920 normalize:
3921 /* Check for an unsigned range which has wrapped around the maximum
3922 value thus making n_high < n_low, and normalize it. */
3923 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3924 {
3925 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3926 integer_one_node, 0);
3927 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3928 integer_one_node, 0);
3929
3930 /* If the range is of the form +/- [ x+1, x ], we won't
3931 be able to normalize it. But then, it represents the
3932 whole range or the empty set, so make it
3933 +/- [ -, - ]. */
3934 if (tree_int_cst_equal (n_low, low)
3935 && tree_int_cst_equal (n_high, high))
3936 low = high = 0;
3937 else
3938 in_p = ! in_p;
3939 }
3940 else
3941 low = n_low, high = n_high;
3942
3943 *p_low = low;
3944 *p_high = high;
3945 *p_in_p = in_p;
3946 return arg0;
3947
3948 CASE_CONVERT:
3949 case NON_LVALUE_EXPR:
3950 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3951 return NULL_TREE;
3952
3953 if (! INTEGRAL_TYPE_P (arg0_type)
3954 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3955 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3956 return NULL_TREE;
3957
3958 n_low = low, n_high = high;
3959
3960 if (n_low != 0)
3961 n_low = fold_convert_loc (loc, arg0_type, n_low);
3962
3963 if (n_high != 0)
3964 n_high = fold_convert_loc (loc, arg0_type, n_high);
3965
3966 /* If we're converting arg0 from an unsigned type, to exp,
3967 a signed type, we will be doing the comparison as unsigned.
3968 The tests above have already verified that LOW and HIGH
3969 are both positive.
3970
3971 So we have to ensure that we will handle large unsigned
3972 values the same way that the current signed bounds treat
3973 negative values. */
3974
3975 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3976 {
3977 tree high_positive;
3978 tree equiv_type;
3979 /* For fixed-point modes, we need to pass the saturating flag
3980 as the 2nd parameter. */
3981 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
3982 equiv_type
3983 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
3984 TYPE_SATURATING (arg0_type));
3985 else
3986 equiv_type
3987 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
3988
3989 /* A range without an upper bound is, naturally, unbounded.
3990 Since convert would have cropped a very large value, use
3991 the max value for the destination type. */
3992 high_positive
3993 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3994 : TYPE_MAX_VALUE (arg0_type);
3995
3996 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3997 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
3998 fold_convert_loc (loc, arg0_type,
3999 high_positive),
4000 build_int_cst (arg0_type, 1));
4001
4002 /* If the low bound is specified, "and" the range with the
4003 range for which the original unsigned value will be
4004 positive. */
4005 if (low != 0)
4006 {
4007 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4008 1, fold_convert_loc (loc, arg0_type,
4009 integer_zero_node),
4010 high_positive))
4011 return NULL_TREE;
4012
4013 in_p = (n_in_p == in_p);
4014 }
4015 else
4016 {
4017 /* Otherwise, "or" the range with the range of the input
4018 that will be interpreted as negative. */
4019 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4020 1, fold_convert_loc (loc, arg0_type,
4021 integer_zero_node),
4022 high_positive))
4023 return NULL_TREE;
4024
4025 in_p = (in_p != n_in_p);
4026 }
4027 }
4028
4029 *p_low = n_low;
4030 *p_high = n_high;
4031 *p_in_p = in_p;
4032 return arg0;
4033
4034 default:
4035 return NULL_TREE;
4036 }
4037 }
4038
4039 /* Given EXP, a logical expression, set the range it is testing into
4040 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4041 actually being tested. *PLOW and *PHIGH will be made of the same
4042 type as the returned expression. If EXP is not a comparison, we
4043 will most likely not be returning a useful value and range. Set
4044 *STRICT_OVERFLOW_P to true if the return value is only valid
4045 because signed overflow is undefined; otherwise, do not change
4046 *STRICT_OVERFLOW_P. */
4047
4048 tree
4049 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4050 bool *strict_overflow_p)
4051 {
4052 enum tree_code code;
4053 tree arg0, arg1 = NULL_TREE;
4054 tree exp_type, nexp;
4055 int in_p;
4056 tree low, high;
4057 location_t loc = EXPR_LOCATION (exp);
4058
4059 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4060 and see if we can refine the range. Some of the cases below may not
4061 happen, but it doesn't seem worth worrying about this. We "continue"
4062 the outer loop when we've changed something; otherwise we "break"
4063 the switch, which will "break" the while. */
4064
4065 in_p = 0;
4066 low = high = build_int_cst (TREE_TYPE (exp), 0);
4067
4068 while (1)
4069 {
4070 code = TREE_CODE (exp);
4071 exp_type = TREE_TYPE (exp);
4072 arg0 = NULL_TREE;
4073
4074 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4075 {
4076 if (TREE_OPERAND_LENGTH (exp) > 0)
4077 arg0 = TREE_OPERAND (exp, 0);
4078 if (TREE_CODE_CLASS (code) == tcc_binary
4079 || TREE_CODE_CLASS (code) == tcc_comparison
4080 || (TREE_CODE_CLASS (code) == tcc_expression
4081 && TREE_OPERAND_LENGTH (exp) > 1))
4082 arg1 = TREE_OPERAND (exp, 1);
4083 }
4084 if (arg0 == NULL_TREE)
4085 break;
4086
4087 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4088 &high, &in_p, strict_overflow_p);
4089 if (nexp == NULL_TREE)
4090 break;
4091 exp = nexp;
4092 }
4093
4094 /* If EXP is a constant, we can evaluate whether this is true or false. */
4095 if (TREE_CODE (exp) == INTEGER_CST)
4096 {
4097 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4098 exp, 0, low, 0))
4099 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4100 exp, 1, high, 1)));
4101 low = high = 0;
4102 exp = 0;
4103 }
4104
4105 *pin_p = in_p, *plow = low, *phigh = high;
4106 return exp;
4107 }
4108 \f
4109 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4110 type, TYPE, return an expression to test if EXP is in (or out of, depending
4111 on IN_P) the range. Return 0 if the test couldn't be created. */
4112
4113 tree
4114 build_range_check (location_t loc, tree type, tree exp, int in_p,
4115 tree low, tree high)
4116 {
4117 tree etype = TREE_TYPE (exp), value;
4118
4119 #ifdef HAVE_canonicalize_funcptr_for_compare
4120 /* Disable this optimization for function pointer expressions
4121 on targets that require function pointer canonicalization. */
4122 if (HAVE_canonicalize_funcptr_for_compare
4123 && TREE_CODE (etype) == POINTER_TYPE
4124 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4125 return NULL_TREE;
4126 #endif
4127
4128 if (! in_p)
4129 {
4130 value = build_range_check (loc, type, exp, 1, low, high);
4131 if (value != 0)
4132 return invert_truthvalue_loc (loc, value);
4133
4134 return 0;
4135 }
4136
4137 if (low == 0 && high == 0)
4138 return build_int_cst (type, 1);
4139
4140 if (low == 0)
4141 return fold_build2_loc (loc, LE_EXPR, type, exp,
4142 fold_convert_loc (loc, etype, high));
4143
4144 if (high == 0)
4145 return fold_build2_loc (loc, GE_EXPR, type, exp,
4146 fold_convert_loc (loc, etype, low));
4147
4148 if (operand_equal_p (low, high, 0))
4149 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4150 fold_convert_loc (loc, etype, low));
4151
4152 if (integer_zerop (low))
4153 {
4154 if (! TYPE_UNSIGNED (etype))
4155 {
4156 etype = unsigned_type_for (etype);
4157 high = fold_convert_loc (loc, etype, high);
4158 exp = fold_convert_loc (loc, etype, exp);
4159 }
4160 return build_range_check (loc, type, exp, 1, 0, high);
4161 }
4162
4163 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4164 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4165 {
4166 unsigned HOST_WIDE_INT lo;
4167 HOST_WIDE_INT hi;
4168 int prec;
4169
4170 prec = TYPE_PRECISION (etype);
4171 if (prec <= HOST_BITS_PER_WIDE_INT)
4172 {
4173 hi = 0;
4174 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4175 }
4176 else
4177 {
4178 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4179 lo = (unsigned HOST_WIDE_INT) -1;
4180 }
4181
4182 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4183 {
4184 if (TYPE_UNSIGNED (etype))
4185 {
4186 tree signed_etype = signed_type_for (etype);
4187 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4188 etype
4189 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4190 else
4191 etype = signed_etype;
4192 exp = fold_convert_loc (loc, etype, exp);
4193 }
4194 return fold_build2_loc (loc, GT_EXPR, type, exp,
4195 build_int_cst (etype, 0));
4196 }
4197 }
4198
4199 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4200 This requires wrap-around arithmetics for the type of the expression.
4201 First make sure that arithmetics in this type is valid, then make sure
4202 that it wraps around. */
4203 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4204 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4205 TYPE_UNSIGNED (etype));
4206
4207 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4208 {
4209 tree utype, minv, maxv;
4210
4211 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4212 for the type in question, as we rely on this here. */
4213 utype = unsigned_type_for (etype);
4214 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4215 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4216 integer_one_node, 1);
4217 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4218
4219 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4220 minv, 1, maxv, 1)))
4221 etype = utype;
4222 else
4223 return 0;
4224 }
4225
4226 high = fold_convert_loc (loc, etype, high);
4227 low = fold_convert_loc (loc, etype, low);
4228 exp = fold_convert_loc (loc, etype, exp);
4229
4230 value = const_binop (MINUS_EXPR, high, low);
4231
4232
4233 if (POINTER_TYPE_P (etype))
4234 {
4235 if (value != 0 && !TREE_OVERFLOW (value))
4236 {
4237 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4238 return build_range_check (loc, type,
4239 fold_build_pointer_plus_loc (loc, exp, low),
4240 1, build_int_cst (etype, 0), value);
4241 }
4242 return 0;
4243 }
4244
4245 if (value != 0 && !TREE_OVERFLOW (value))
4246 return build_range_check (loc, type,
4247 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4248 1, build_int_cst (etype, 0), value);
4249
4250 return 0;
4251 }
4252 \f
4253 /* Return the predecessor of VAL in its type, handling the infinite case. */
4254
4255 static tree
4256 range_predecessor (tree val)
4257 {
4258 tree type = TREE_TYPE (val);
4259
4260 if (INTEGRAL_TYPE_P (type)
4261 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4262 return 0;
4263 else
4264 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4265 }
4266
4267 /* Return the successor of VAL in its type, handling the infinite case. */
4268
4269 static tree
4270 range_successor (tree val)
4271 {
4272 tree type = TREE_TYPE (val);
4273
4274 if (INTEGRAL_TYPE_P (type)
4275 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4276 return 0;
4277 else
4278 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4279 }
4280
4281 /* Given two ranges, see if we can merge them into one. Return 1 if we
4282 can, 0 if we can't. Set the output range into the specified parameters. */
4283
4284 bool
4285 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4286 tree high0, int in1_p, tree low1, tree high1)
4287 {
4288 int no_overlap;
4289 int subset;
4290 int temp;
4291 tree tem;
4292 int in_p;
4293 tree low, high;
4294 int lowequal = ((low0 == 0 && low1 == 0)
4295 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4296 low0, 0, low1, 0)));
4297 int highequal = ((high0 == 0 && high1 == 0)
4298 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4299 high0, 1, high1, 1)));
4300
4301 /* Make range 0 be the range that starts first, or ends last if they
4302 start at the same value. Swap them if it isn't. */
4303 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4304 low0, 0, low1, 0))
4305 || (lowequal
4306 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4307 high1, 1, high0, 1))))
4308 {
4309 temp = in0_p, in0_p = in1_p, in1_p = temp;
4310 tem = low0, low0 = low1, low1 = tem;
4311 tem = high0, high0 = high1, high1 = tem;
4312 }
4313
4314 /* Now flag two cases, whether the ranges are disjoint or whether the
4315 second range is totally subsumed in the first. Note that the tests
4316 below are simplified by the ones above. */
4317 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4318 high0, 1, low1, 0));
4319 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4320 high1, 1, high0, 1));
4321
4322 /* We now have four cases, depending on whether we are including or
4323 excluding the two ranges. */
4324 if (in0_p && in1_p)
4325 {
4326 /* If they don't overlap, the result is false. If the second range
4327 is a subset it is the result. Otherwise, the range is from the start
4328 of the second to the end of the first. */
4329 if (no_overlap)
4330 in_p = 0, low = high = 0;
4331 else if (subset)
4332 in_p = 1, low = low1, high = high1;
4333 else
4334 in_p = 1, low = low1, high = high0;
4335 }
4336
4337 else if (in0_p && ! in1_p)
4338 {
4339 /* If they don't overlap, the result is the first range. If they are
4340 equal, the result is false. If the second range is a subset of the
4341 first, and the ranges begin at the same place, we go from just after
4342 the end of the second range to the end of the first. If the second
4343 range is not a subset of the first, or if it is a subset and both
4344 ranges end at the same place, the range starts at the start of the
4345 first range and ends just before the second range.
4346 Otherwise, we can't describe this as a single range. */
4347 if (no_overlap)
4348 in_p = 1, low = low0, high = high0;
4349 else if (lowequal && highequal)
4350 in_p = 0, low = high = 0;
4351 else if (subset && lowequal)
4352 {
4353 low = range_successor (high1);
4354 high = high0;
4355 in_p = 1;
4356 if (low == 0)
4357 {
4358 /* We are in the weird situation where high0 > high1 but
4359 high1 has no successor. Punt. */
4360 return 0;
4361 }
4362 }
4363 else if (! subset || highequal)
4364 {
4365 low = low0;
4366 high = range_predecessor (low1);
4367 in_p = 1;
4368 if (high == 0)
4369 {
4370 /* low0 < low1 but low1 has no predecessor. Punt. */
4371 return 0;
4372 }
4373 }
4374 else
4375 return 0;
4376 }
4377
4378 else if (! in0_p && in1_p)
4379 {
4380 /* If they don't overlap, the result is the second range. If the second
4381 is a subset of the first, the result is false. Otherwise,
4382 the range starts just after the first range and ends at the
4383 end of the second. */
4384 if (no_overlap)
4385 in_p = 1, low = low1, high = high1;
4386 else if (subset || highequal)
4387 in_p = 0, low = high = 0;
4388 else
4389 {
4390 low = range_successor (high0);
4391 high = high1;
4392 in_p = 1;
4393 if (low == 0)
4394 {
4395 /* high1 > high0 but high0 has no successor. Punt. */
4396 return 0;
4397 }
4398 }
4399 }
4400
4401 else
4402 {
4403 /* The case where we are excluding both ranges. Here the complex case
4404 is if they don't overlap. In that case, the only time we have a
4405 range is if they are adjacent. If the second is a subset of the
4406 first, the result is the first. Otherwise, the range to exclude
4407 starts at the beginning of the first range and ends at the end of the
4408 second. */
4409 if (no_overlap)
4410 {
4411 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4412 range_successor (high0),
4413 1, low1, 0)))
4414 in_p = 0, low = low0, high = high1;
4415 else
4416 {
4417 /* Canonicalize - [min, x] into - [-, x]. */
4418 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4419 switch (TREE_CODE (TREE_TYPE (low0)))
4420 {
4421 case ENUMERAL_TYPE:
4422 if (TYPE_PRECISION (TREE_TYPE (low0))
4423 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4424 break;
4425 /* FALLTHROUGH */
4426 case INTEGER_TYPE:
4427 if (tree_int_cst_equal (low0,
4428 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4429 low0 = 0;
4430 break;
4431 case POINTER_TYPE:
4432 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4433 && integer_zerop (low0))
4434 low0 = 0;
4435 break;
4436 default:
4437 break;
4438 }
4439
4440 /* Canonicalize - [x, max] into - [x, -]. */
4441 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4442 switch (TREE_CODE (TREE_TYPE (high1)))
4443 {
4444 case ENUMERAL_TYPE:
4445 if (TYPE_PRECISION (TREE_TYPE (high1))
4446 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4447 break;
4448 /* FALLTHROUGH */
4449 case INTEGER_TYPE:
4450 if (tree_int_cst_equal (high1,
4451 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4452 high1 = 0;
4453 break;
4454 case POINTER_TYPE:
4455 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4456 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4457 high1, 1,
4458 integer_one_node, 1)))
4459 high1 = 0;
4460 break;
4461 default:
4462 break;
4463 }
4464
4465 /* The ranges might be also adjacent between the maximum and
4466 minimum values of the given type. For
4467 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4468 return + [x + 1, y - 1]. */
4469 if (low0 == 0 && high1 == 0)
4470 {
4471 low = range_successor (high0);
4472 high = range_predecessor (low1);
4473 if (low == 0 || high == 0)
4474 return 0;
4475
4476 in_p = 1;
4477 }
4478 else
4479 return 0;
4480 }
4481 }
4482 else if (subset)
4483 in_p = 0, low = low0, high = high0;
4484 else
4485 in_p = 0, low = low0, high = high1;
4486 }
4487
4488 *pin_p = in_p, *plow = low, *phigh = high;
4489 return 1;
4490 }
4491 \f
4492
4493 /* Subroutine of fold, looking inside expressions of the form
4494 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4495 of the COND_EXPR. This function is being used also to optimize
4496 A op B ? C : A, by reversing the comparison first.
4497
4498 Return a folded expression whose code is not a COND_EXPR
4499 anymore, or NULL_TREE if no folding opportunity is found. */
4500
4501 static tree
4502 fold_cond_expr_with_comparison (location_t loc, tree type,
4503 tree arg0, tree arg1, tree arg2)
4504 {
4505 enum tree_code comp_code = TREE_CODE (arg0);
4506 tree arg00 = TREE_OPERAND (arg0, 0);
4507 tree arg01 = TREE_OPERAND (arg0, 1);
4508 tree arg1_type = TREE_TYPE (arg1);
4509 tree tem;
4510
4511 STRIP_NOPS (arg1);
4512 STRIP_NOPS (arg2);
4513
4514 /* If we have A op 0 ? A : -A, consider applying the following
4515 transformations:
4516
4517 A == 0? A : -A same as -A
4518 A != 0? A : -A same as A
4519 A >= 0? A : -A same as abs (A)
4520 A > 0? A : -A same as abs (A)
4521 A <= 0? A : -A same as -abs (A)
4522 A < 0? A : -A same as -abs (A)
4523
4524 None of these transformations work for modes with signed
4525 zeros. If A is +/-0, the first two transformations will
4526 change the sign of the result (from +0 to -0, or vice
4527 versa). The last four will fix the sign of the result,
4528 even though the original expressions could be positive or
4529 negative, depending on the sign of A.
4530
4531 Note that all these transformations are correct if A is
4532 NaN, since the two alternatives (A and -A) are also NaNs. */
4533 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4534 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4535 ? real_zerop (arg01)
4536 : integer_zerop (arg01))
4537 && ((TREE_CODE (arg2) == NEGATE_EXPR
4538 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4539 /* In the case that A is of the form X-Y, '-A' (arg2) may
4540 have already been folded to Y-X, check for that. */
4541 || (TREE_CODE (arg1) == MINUS_EXPR
4542 && TREE_CODE (arg2) == MINUS_EXPR
4543 && operand_equal_p (TREE_OPERAND (arg1, 0),
4544 TREE_OPERAND (arg2, 1), 0)
4545 && operand_equal_p (TREE_OPERAND (arg1, 1),
4546 TREE_OPERAND (arg2, 0), 0))))
4547 switch (comp_code)
4548 {
4549 case EQ_EXPR:
4550 case UNEQ_EXPR:
4551 tem = fold_convert_loc (loc, arg1_type, arg1);
4552 return pedantic_non_lvalue_loc (loc,
4553 fold_convert_loc (loc, type,
4554 negate_expr (tem)));
4555 case NE_EXPR:
4556 case LTGT_EXPR:
4557 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4558 case UNGE_EXPR:
4559 case UNGT_EXPR:
4560 if (flag_trapping_math)
4561 break;
4562 /* Fall through. */
4563 case GE_EXPR:
4564 case GT_EXPR:
4565 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4566 arg1 = fold_convert_loc (loc, signed_type_for
4567 (TREE_TYPE (arg1)), arg1);
4568 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4569 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4570 case UNLE_EXPR:
4571 case UNLT_EXPR:
4572 if (flag_trapping_math)
4573 break;
4574 case LE_EXPR:
4575 case LT_EXPR:
4576 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4577 arg1 = fold_convert_loc (loc, signed_type_for
4578 (TREE_TYPE (arg1)), arg1);
4579 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4580 return negate_expr (fold_convert_loc (loc, type, tem));
4581 default:
4582 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4583 break;
4584 }
4585
4586 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4587 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4588 both transformations are correct when A is NaN: A != 0
4589 is then true, and A == 0 is false. */
4590
4591 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4592 && integer_zerop (arg01) && integer_zerop (arg2))
4593 {
4594 if (comp_code == NE_EXPR)
4595 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4596 else if (comp_code == EQ_EXPR)
4597 return build_int_cst (type, 0);
4598 }
4599
4600 /* Try some transformations of A op B ? A : B.
4601
4602 A == B? A : B same as B
4603 A != B? A : B same as A
4604 A >= B? A : B same as max (A, B)
4605 A > B? A : B same as max (B, A)
4606 A <= B? A : B same as min (A, B)
4607 A < B? A : B same as min (B, A)
4608
4609 As above, these transformations don't work in the presence
4610 of signed zeros. For example, if A and B are zeros of
4611 opposite sign, the first two transformations will change
4612 the sign of the result. In the last four, the original
4613 expressions give different results for (A=+0, B=-0) and
4614 (A=-0, B=+0), but the transformed expressions do not.
4615
4616 The first two transformations are correct if either A or B
4617 is a NaN. In the first transformation, the condition will
4618 be false, and B will indeed be chosen. In the case of the
4619 second transformation, the condition A != B will be true,
4620 and A will be chosen.
4621
4622 The conversions to max() and min() are not correct if B is
4623 a number and A is not. The conditions in the original
4624 expressions will be false, so all four give B. The min()
4625 and max() versions would give a NaN instead. */
4626 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4627 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4628 /* Avoid these transformations if the COND_EXPR may be used
4629 as an lvalue in the C++ front-end. PR c++/19199. */
4630 && (in_gimple_form
4631 || (strcmp (lang_hooks.name, "GNU C++") != 0
4632 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4633 || ! maybe_lvalue_p (arg1)
4634 || ! maybe_lvalue_p (arg2)))
4635 {
4636 tree comp_op0 = arg00;
4637 tree comp_op1 = arg01;
4638 tree comp_type = TREE_TYPE (comp_op0);
4639
4640 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4641 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4642 {
4643 comp_type = type;
4644 comp_op0 = arg1;
4645 comp_op1 = arg2;
4646 }
4647
4648 switch (comp_code)
4649 {
4650 case EQ_EXPR:
4651 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4652 case NE_EXPR:
4653 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4654 case LE_EXPR:
4655 case LT_EXPR:
4656 case UNLE_EXPR:
4657 case UNLT_EXPR:
4658 /* In C++ a ?: expression can be an lvalue, so put the
4659 operand which will be used if they are equal first
4660 so that we can convert this back to the
4661 corresponding COND_EXPR. */
4662 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4663 {
4664 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4665 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4666 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4667 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4668 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4669 comp_op1, comp_op0);
4670 return pedantic_non_lvalue_loc (loc,
4671 fold_convert_loc (loc, type, tem));
4672 }
4673 break;
4674 case GE_EXPR:
4675 case GT_EXPR:
4676 case UNGE_EXPR:
4677 case UNGT_EXPR:
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4679 {
4680 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4681 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4682 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4683 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4684 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4685 comp_op1, comp_op0);
4686 return pedantic_non_lvalue_loc (loc,
4687 fold_convert_loc (loc, type, tem));
4688 }
4689 break;
4690 case UNEQ_EXPR:
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4692 return pedantic_non_lvalue_loc (loc,
4693 fold_convert_loc (loc, type, arg2));
4694 break;
4695 case LTGT_EXPR:
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4697 return pedantic_non_lvalue_loc (loc,
4698 fold_convert_loc (loc, type, arg1));
4699 break;
4700 default:
4701 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4702 break;
4703 }
4704 }
4705
4706 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4707 we might still be able to simplify this. For example,
4708 if C1 is one less or one more than C2, this might have started
4709 out as a MIN or MAX and been transformed by this function.
4710 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4711
4712 if (INTEGRAL_TYPE_P (type)
4713 && TREE_CODE (arg01) == INTEGER_CST
4714 && TREE_CODE (arg2) == INTEGER_CST)
4715 switch (comp_code)
4716 {
4717 case EQ_EXPR:
4718 if (TREE_CODE (arg1) == INTEGER_CST)
4719 break;
4720 /* We can replace A with C1 in this case. */
4721 arg1 = fold_convert_loc (loc, type, arg01);
4722 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4723
4724 case LT_EXPR:
4725 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4726 MIN_EXPR, to preserve the signedness of the comparison. */
4727 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4728 OEP_ONLY_CONST)
4729 && operand_equal_p (arg01,
4730 const_binop (PLUS_EXPR, arg2,
4731 build_int_cst (type, 1)),
4732 OEP_ONLY_CONST))
4733 {
4734 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4735 fold_convert_loc (loc, TREE_TYPE (arg00),
4736 arg2));
4737 return pedantic_non_lvalue_loc (loc,
4738 fold_convert_loc (loc, type, tem));
4739 }
4740 break;
4741
4742 case LE_EXPR:
4743 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4744 as above. */
4745 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4746 OEP_ONLY_CONST)
4747 && operand_equal_p (arg01,
4748 const_binop (MINUS_EXPR, arg2,
4749 build_int_cst (type, 1)),
4750 OEP_ONLY_CONST))
4751 {
4752 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4753 fold_convert_loc (loc, TREE_TYPE (arg00),
4754 arg2));
4755 return pedantic_non_lvalue_loc (loc,
4756 fold_convert_loc (loc, type, tem));
4757 }
4758 break;
4759
4760 case GT_EXPR:
4761 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4762 MAX_EXPR, to preserve the signedness of the comparison. */
4763 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4764 OEP_ONLY_CONST)
4765 && operand_equal_p (arg01,
4766 const_binop (MINUS_EXPR, arg2,
4767 build_int_cst (type, 1)),
4768 OEP_ONLY_CONST))
4769 {
4770 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4771 fold_convert_loc (loc, TREE_TYPE (arg00),
4772 arg2));
4773 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4774 }
4775 break;
4776
4777 case GE_EXPR:
4778 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4779 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4780 OEP_ONLY_CONST)
4781 && operand_equal_p (arg01,
4782 const_binop (PLUS_EXPR, arg2,
4783 build_int_cst (type, 1)),
4784 OEP_ONLY_CONST))
4785 {
4786 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4787 fold_convert_loc (loc, TREE_TYPE (arg00),
4788 arg2));
4789 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4790 }
4791 break;
4792 case NE_EXPR:
4793 break;
4794 default:
4795 gcc_unreachable ();
4796 }
4797
4798 return NULL_TREE;
4799 }
4800
4801
4802 \f
4803 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4804 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4805 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4806 false) >= 2)
4807 #endif
4808
4809 /* EXP is some logical combination of boolean tests. See if we can
4810 merge it into some range test. Return the new tree if so. */
4811
4812 static tree
4813 fold_range_test (location_t loc, enum tree_code code, tree type,
4814 tree op0, tree op1)
4815 {
4816 int or_op = (code == TRUTH_ORIF_EXPR
4817 || code == TRUTH_OR_EXPR);
4818 int in0_p, in1_p, in_p;
4819 tree low0, low1, low, high0, high1, high;
4820 bool strict_overflow_p = false;
4821 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4822 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4823 tree tem;
4824 const char * const warnmsg = G_("assuming signed overflow does not occur "
4825 "when simplifying range test");
4826
4827 /* If this is an OR operation, invert both sides; we will invert
4828 again at the end. */
4829 if (or_op)
4830 in0_p = ! in0_p, in1_p = ! in1_p;
4831
4832 /* If both expressions are the same, if we can merge the ranges, and we
4833 can build the range test, return it or it inverted. If one of the
4834 ranges is always true or always false, consider it to be the same
4835 expression as the other. */
4836 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4837 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4838 in1_p, low1, high1)
4839 && 0 != (tem = (build_range_check (loc, type,
4840 lhs != 0 ? lhs
4841 : rhs != 0 ? rhs : integer_zero_node,
4842 in_p, low, high))))
4843 {
4844 if (strict_overflow_p)
4845 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4846 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4847 }
4848
4849 /* On machines where the branch cost is expensive, if this is a
4850 short-circuited branch and the underlying object on both sides
4851 is the same, make a non-short-circuit operation. */
4852 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4853 && lhs != 0 && rhs != 0
4854 && (code == TRUTH_ANDIF_EXPR
4855 || code == TRUTH_ORIF_EXPR)
4856 && operand_equal_p (lhs, rhs, 0))
4857 {
4858 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4859 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4860 which cases we can't do this. */
4861 if (simple_operand_p (lhs))
4862 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4863 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4864 type, op0, op1);
4865
4866 else if (!lang_hooks.decls.global_bindings_p ()
4867 && !CONTAINS_PLACEHOLDER_P (lhs))
4868 {
4869 tree common = save_expr (lhs);
4870
4871 if (0 != (lhs = build_range_check (loc, type, common,
4872 or_op ? ! in0_p : in0_p,
4873 low0, high0))
4874 && (0 != (rhs = build_range_check (loc, type, common,
4875 or_op ? ! in1_p : in1_p,
4876 low1, high1))))
4877 {
4878 if (strict_overflow_p)
4879 fold_overflow_warning (warnmsg,
4880 WARN_STRICT_OVERFLOW_COMPARISON);
4881 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4882 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4883 type, lhs, rhs);
4884 }
4885 }
4886 }
4887
4888 return 0;
4889 }
4890 \f
4891 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4892 bit value. Arrange things so the extra bits will be set to zero if and
4893 only if C is signed-extended to its full width. If MASK is nonzero,
4894 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4895
4896 static tree
4897 unextend (tree c, int p, int unsignedp, tree mask)
4898 {
4899 tree type = TREE_TYPE (c);
4900 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4901 tree temp;
4902
4903 if (p == modesize || unsignedp)
4904 return c;
4905
4906 /* We work by getting just the sign bit into the low-order bit, then
4907 into the high-order bit, then sign-extend. We then XOR that value
4908 with C. */
4909 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4910 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4911
4912 /* We must use a signed type in order to get an arithmetic right shift.
4913 However, we must also avoid introducing accidental overflows, so that
4914 a subsequent call to integer_zerop will work. Hence we must
4915 do the type conversion here. At this point, the constant is either
4916 zero or one, and the conversion to a signed type can never overflow.
4917 We could get an overflow if this conversion is done anywhere else. */
4918 if (TYPE_UNSIGNED (type))
4919 temp = fold_convert (signed_type_for (type), temp);
4920
4921 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4922 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4923 if (mask != 0)
4924 temp = const_binop (BIT_AND_EXPR, temp,
4925 fold_convert (TREE_TYPE (c), mask));
4926 /* If necessary, convert the type back to match the type of C. */
4927 if (TYPE_UNSIGNED (type))
4928 temp = fold_convert (type, temp);
4929
4930 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4931 }
4932 \f
4933 /* For an expression that has the form
4934 (A && B) || ~B
4935 or
4936 (A || B) && ~B,
4937 we can drop one of the inner expressions and simplify to
4938 A || ~B
4939 or
4940 A && ~B
4941 LOC is the location of the resulting expression. OP is the inner
4942 logical operation; the left-hand side in the examples above, while CMPOP
4943 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4944 removing a condition that guards another, as in
4945 (A != NULL && A->...) || A == NULL
4946 which we must not transform. If RHS_ONLY is true, only eliminate the
4947 right-most operand of the inner logical operation. */
4948
4949 static tree
4950 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4951 bool rhs_only)
4952 {
4953 tree type = TREE_TYPE (cmpop);
4954 enum tree_code code = TREE_CODE (cmpop);
4955 enum tree_code truthop_code = TREE_CODE (op);
4956 tree lhs = TREE_OPERAND (op, 0);
4957 tree rhs = TREE_OPERAND (op, 1);
4958 tree orig_lhs = lhs, orig_rhs = rhs;
4959 enum tree_code rhs_code = TREE_CODE (rhs);
4960 enum tree_code lhs_code = TREE_CODE (lhs);
4961 enum tree_code inv_code;
4962
4963 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4964 return NULL_TREE;
4965
4966 if (TREE_CODE_CLASS (code) != tcc_comparison)
4967 return NULL_TREE;
4968
4969 if (rhs_code == truthop_code)
4970 {
4971 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4972 if (newrhs != NULL_TREE)
4973 {
4974 rhs = newrhs;
4975 rhs_code = TREE_CODE (rhs);
4976 }
4977 }
4978 if (lhs_code == truthop_code && !rhs_only)
4979 {
4980 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
4981 if (newlhs != NULL_TREE)
4982 {
4983 lhs = newlhs;
4984 lhs_code = TREE_CODE (lhs);
4985 }
4986 }
4987
4988 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
4989 if (inv_code == rhs_code
4990 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
4991 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
4992 return lhs;
4993 if (!rhs_only && inv_code == lhs_code
4994 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
4995 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
4996 return rhs;
4997 if (rhs != orig_rhs || lhs != orig_lhs)
4998 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
4999 lhs, rhs);
5000 return NULL_TREE;
5001 }
5002
5003 /* Find ways of folding logical expressions of LHS and RHS:
5004 Try to merge two comparisons to the same innermost item.
5005 Look for range tests like "ch >= '0' && ch <= '9'".
5006 Look for combinations of simple terms on machines with expensive branches
5007 and evaluate the RHS unconditionally.
5008
5009 For example, if we have p->a == 2 && p->b == 4 and we can make an
5010 object large enough to span both A and B, we can do this with a comparison
5011 against the object ANDed with the a mask.
5012
5013 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5014 operations to do this with one comparison.
5015
5016 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5017 function and the one above.
5018
5019 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5020 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5021
5022 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5023 two operands.
5024
5025 We return the simplified tree or 0 if no optimization is possible. */
5026
5027 static tree
5028 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5029 tree lhs, tree rhs)
5030 {
5031 /* If this is the "or" of two comparisons, we can do something if
5032 the comparisons are NE_EXPR. If this is the "and", we can do something
5033 if the comparisons are EQ_EXPR. I.e.,
5034 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5035
5036 WANTED_CODE is this operation code. For single bit fields, we can
5037 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5038 comparison for one-bit fields. */
5039
5040 enum tree_code wanted_code;
5041 enum tree_code lcode, rcode;
5042 tree ll_arg, lr_arg, rl_arg, rr_arg;
5043 tree ll_inner, lr_inner, rl_inner, rr_inner;
5044 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5045 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5046 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5047 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5048 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5049 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5050 enum machine_mode lnmode, rnmode;
5051 tree ll_mask, lr_mask, rl_mask, rr_mask;
5052 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5053 tree l_const, r_const;
5054 tree lntype, rntype, result;
5055 HOST_WIDE_INT first_bit, end_bit;
5056 int volatilep;
5057 tree orig_lhs = lhs, orig_rhs = rhs;
5058 enum tree_code orig_code = code;
5059
5060 /* Start by getting the comparison codes. Fail if anything is volatile.
5061 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5062 it were surrounded with a NE_EXPR. */
5063
5064 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5065 return 0;
5066
5067 lcode = TREE_CODE (lhs);
5068 rcode = TREE_CODE (rhs);
5069
5070 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5071 {
5072 lhs = build2 (NE_EXPR, truth_type, lhs,
5073 build_int_cst (TREE_TYPE (lhs), 0));
5074 lcode = NE_EXPR;
5075 }
5076
5077 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5078 {
5079 rhs = build2 (NE_EXPR, truth_type, rhs,
5080 build_int_cst (TREE_TYPE (rhs), 0));
5081 rcode = NE_EXPR;
5082 }
5083
5084 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5085 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5086 return 0;
5087
5088 ll_arg = TREE_OPERAND (lhs, 0);
5089 lr_arg = TREE_OPERAND (lhs, 1);
5090 rl_arg = TREE_OPERAND (rhs, 0);
5091 rr_arg = TREE_OPERAND (rhs, 1);
5092
5093 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5094 if (simple_operand_p (ll_arg)
5095 && simple_operand_p (lr_arg))
5096 {
5097 if (operand_equal_p (ll_arg, rl_arg, 0)
5098 && operand_equal_p (lr_arg, rr_arg, 0))
5099 {
5100 result = combine_comparisons (loc, code, lcode, rcode,
5101 truth_type, ll_arg, lr_arg);
5102 if (result)
5103 return result;
5104 }
5105 else if (operand_equal_p (ll_arg, rr_arg, 0)
5106 && operand_equal_p (lr_arg, rl_arg, 0))
5107 {
5108 result = combine_comparisons (loc, code, lcode,
5109 swap_tree_comparison (rcode),
5110 truth_type, ll_arg, lr_arg);
5111 if (result)
5112 return result;
5113 }
5114 }
5115
5116 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5117 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5118
5119 /* If the RHS can be evaluated unconditionally and its operands are
5120 simple, it wins to evaluate the RHS unconditionally on machines
5121 with expensive branches. In this case, this isn't a comparison
5122 that can be merged. Avoid doing this if the RHS is a floating-point
5123 comparison since those can trap. */
5124
5125 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5126 false) >= 2
5127 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5128 && simple_operand_p (rl_arg)
5129 && simple_operand_p (rr_arg))
5130 {
5131 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5132 if (code == TRUTH_OR_EXPR
5133 && lcode == NE_EXPR && integer_zerop (lr_arg)
5134 && rcode == NE_EXPR && integer_zerop (rr_arg)
5135 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5136 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5137 return build2_loc (loc, NE_EXPR, truth_type,
5138 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5139 ll_arg, rl_arg),
5140 build_int_cst (TREE_TYPE (ll_arg), 0));
5141
5142 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5143 if (code == TRUTH_AND_EXPR
5144 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5145 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5146 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5147 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5148 return build2_loc (loc, EQ_EXPR, truth_type,
5149 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5150 ll_arg, rl_arg),
5151 build_int_cst (TREE_TYPE (ll_arg), 0));
5152
5153 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5154 {
5155 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5156 return build2_loc (loc, code, truth_type, lhs, rhs);
5157 return NULL_TREE;
5158 }
5159 }
5160
5161 /* See if the comparisons can be merged. Then get all the parameters for
5162 each side. */
5163
5164 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5165 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5166 return 0;
5167
5168 volatilep = 0;
5169 ll_inner = decode_field_reference (loc, ll_arg,
5170 &ll_bitsize, &ll_bitpos, &ll_mode,
5171 &ll_unsignedp, &volatilep, &ll_mask,
5172 &ll_and_mask);
5173 lr_inner = decode_field_reference (loc, lr_arg,
5174 &lr_bitsize, &lr_bitpos, &lr_mode,
5175 &lr_unsignedp, &volatilep, &lr_mask,
5176 &lr_and_mask);
5177 rl_inner = decode_field_reference (loc, rl_arg,
5178 &rl_bitsize, &rl_bitpos, &rl_mode,
5179 &rl_unsignedp, &volatilep, &rl_mask,
5180 &rl_and_mask);
5181 rr_inner = decode_field_reference (loc, rr_arg,
5182 &rr_bitsize, &rr_bitpos, &rr_mode,
5183 &rr_unsignedp, &volatilep, &rr_mask,
5184 &rr_and_mask);
5185
5186 /* It must be true that the inner operation on the lhs of each
5187 comparison must be the same if we are to be able to do anything.
5188 Then see if we have constants. If not, the same must be true for
5189 the rhs's. */
5190 if (volatilep || ll_inner == 0 || rl_inner == 0
5191 || ! operand_equal_p (ll_inner, rl_inner, 0))
5192 return 0;
5193
5194 if (TREE_CODE (lr_arg) == INTEGER_CST
5195 && TREE_CODE (rr_arg) == INTEGER_CST)
5196 l_const = lr_arg, r_const = rr_arg;
5197 else if (lr_inner == 0 || rr_inner == 0
5198 || ! operand_equal_p (lr_inner, rr_inner, 0))
5199 return 0;
5200 else
5201 l_const = r_const = 0;
5202
5203 /* If either comparison code is not correct for our logical operation,
5204 fail. However, we can convert a one-bit comparison against zero into
5205 the opposite comparison against that bit being set in the field. */
5206
5207 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5208 if (lcode != wanted_code)
5209 {
5210 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5211 {
5212 /* Make the left operand unsigned, since we are only interested
5213 in the value of one bit. Otherwise we are doing the wrong
5214 thing below. */
5215 ll_unsignedp = 1;
5216 l_const = ll_mask;
5217 }
5218 else
5219 return 0;
5220 }
5221
5222 /* This is analogous to the code for l_const above. */
5223 if (rcode != wanted_code)
5224 {
5225 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5226 {
5227 rl_unsignedp = 1;
5228 r_const = rl_mask;
5229 }
5230 else
5231 return 0;
5232 }
5233
5234 /* See if we can find a mode that contains both fields being compared on
5235 the left. If we can't, fail. Otherwise, update all constants and masks
5236 to be relative to a field of that size. */
5237 first_bit = MIN (ll_bitpos, rl_bitpos);
5238 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5239 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5240 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5241 volatilep);
5242 if (lnmode == VOIDmode)
5243 return 0;
5244
5245 lnbitsize = GET_MODE_BITSIZE (lnmode);
5246 lnbitpos = first_bit & ~ (lnbitsize - 1);
5247 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5248 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5249
5250 if (BYTES_BIG_ENDIAN)
5251 {
5252 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5253 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5254 }
5255
5256 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5257 size_int (xll_bitpos));
5258 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5259 size_int (xrl_bitpos));
5260
5261 if (l_const)
5262 {
5263 l_const = fold_convert_loc (loc, lntype, l_const);
5264 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5265 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5266 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5267 fold_build1_loc (loc, BIT_NOT_EXPR,
5268 lntype, ll_mask))))
5269 {
5270 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5271
5272 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5273 }
5274 }
5275 if (r_const)
5276 {
5277 r_const = fold_convert_loc (loc, lntype, r_const);
5278 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5279 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5280 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5281 fold_build1_loc (loc, BIT_NOT_EXPR,
5282 lntype, rl_mask))))
5283 {
5284 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5285
5286 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5287 }
5288 }
5289
5290 /* If the right sides are not constant, do the same for it. Also,
5291 disallow this optimization if a size or signedness mismatch occurs
5292 between the left and right sides. */
5293 if (l_const == 0)
5294 {
5295 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5296 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5297 /* Make sure the two fields on the right
5298 correspond to the left without being swapped. */
5299 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5300 return 0;
5301
5302 first_bit = MIN (lr_bitpos, rr_bitpos);
5303 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5304 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5305 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5306 volatilep);
5307 if (rnmode == VOIDmode)
5308 return 0;
5309
5310 rnbitsize = GET_MODE_BITSIZE (rnmode);
5311 rnbitpos = first_bit & ~ (rnbitsize - 1);
5312 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5313 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5314
5315 if (BYTES_BIG_ENDIAN)
5316 {
5317 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5318 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5319 }
5320
5321 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5322 rntype, lr_mask),
5323 size_int (xlr_bitpos));
5324 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5325 rntype, rr_mask),
5326 size_int (xrr_bitpos));
5327
5328 /* Make a mask that corresponds to both fields being compared.
5329 Do this for both items being compared. If the operands are the
5330 same size and the bits being compared are in the same position
5331 then we can do this by masking both and comparing the masked
5332 results. */
5333 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5334 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5335 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5336 {
5337 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5338 ll_unsignedp || rl_unsignedp);
5339 if (! all_ones_mask_p (ll_mask, lnbitsize))
5340 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5341
5342 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5343 lr_unsignedp || rr_unsignedp);
5344 if (! all_ones_mask_p (lr_mask, rnbitsize))
5345 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5346
5347 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5348 }
5349
5350 /* There is still another way we can do something: If both pairs of
5351 fields being compared are adjacent, we may be able to make a wider
5352 field containing them both.
5353
5354 Note that we still must mask the lhs/rhs expressions. Furthermore,
5355 the mask must be shifted to account for the shift done by
5356 make_bit_field_ref. */
5357 if ((ll_bitsize + ll_bitpos == rl_bitpos
5358 && lr_bitsize + lr_bitpos == rr_bitpos)
5359 || (ll_bitpos == rl_bitpos + rl_bitsize
5360 && lr_bitpos == rr_bitpos + rr_bitsize))
5361 {
5362 tree type;
5363
5364 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5365 ll_bitsize + rl_bitsize,
5366 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5367 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5368 lr_bitsize + rr_bitsize,
5369 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5370
5371 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5372 size_int (MIN (xll_bitpos, xrl_bitpos)));
5373 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5374 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5375
5376 /* Convert to the smaller type before masking out unwanted bits. */
5377 type = lntype;
5378 if (lntype != rntype)
5379 {
5380 if (lnbitsize > rnbitsize)
5381 {
5382 lhs = fold_convert_loc (loc, rntype, lhs);
5383 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5384 type = rntype;
5385 }
5386 else if (lnbitsize < rnbitsize)
5387 {
5388 rhs = fold_convert_loc (loc, lntype, rhs);
5389 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5390 type = lntype;
5391 }
5392 }
5393
5394 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5395 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5396
5397 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5398 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5399
5400 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5401 }
5402
5403 return 0;
5404 }
5405
5406 /* Handle the case of comparisons with constants. If there is something in
5407 common between the masks, those bits of the constants must be the same.
5408 If not, the condition is always false. Test for this to avoid generating
5409 incorrect code below. */
5410 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5411 if (! integer_zerop (result)
5412 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5413 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5414 {
5415 if (wanted_code == NE_EXPR)
5416 {
5417 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5418 return constant_boolean_node (true, truth_type);
5419 }
5420 else
5421 {
5422 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5423 return constant_boolean_node (false, truth_type);
5424 }
5425 }
5426
5427 /* Construct the expression we will return. First get the component
5428 reference we will make. Unless the mask is all ones the width of
5429 that field, perform the mask operation. Then compare with the
5430 merged constant. */
5431 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5432 ll_unsignedp || rl_unsignedp);
5433
5434 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5435 if (! all_ones_mask_p (ll_mask, lnbitsize))
5436 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5437
5438 return build2_loc (loc, wanted_code, truth_type, result,
5439 const_binop (BIT_IOR_EXPR, l_const, r_const));
5440 }
5441 \f
5442 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5443 constant. */
5444
5445 static tree
5446 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5447 tree op0, tree op1)
5448 {
5449 tree arg0 = op0;
5450 enum tree_code op_code;
5451 tree comp_const;
5452 tree minmax_const;
5453 int consts_equal, consts_lt;
5454 tree inner;
5455
5456 STRIP_SIGN_NOPS (arg0);
5457
5458 op_code = TREE_CODE (arg0);
5459 minmax_const = TREE_OPERAND (arg0, 1);
5460 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5461 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5462 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5463 inner = TREE_OPERAND (arg0, 0);
5464
5465 /* If something does not permit us to optimize, return the original tree. */
5466 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5467 || TREE_CODE (comp_const) != INTEGER_CST
5468 || TREE_OVERFLOW (comp_const)
5469 || TREE_CODE (minmax_const) != INTEGER_CST
5470 || TREE_OVERFLOW (minmax_const))
5471 return NULL_TREE;
5472
5473 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5474 and GT_EXPR, doing the rest with recursive calls using logical
5475 simplifications. */
5476 switch (code)
5477 {
5478 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5479 {
5480 tree tem
5481 = optimize_minmax_comparison (loc,
5482 invert_tree_comparison (code, false),
5483 type, op0, op1);
5484 if (tem)
5485 return invert_truthvalue_loc (loc, tem);
5486 return NULL_TREE;
5487 }
5488
5489 case GE_EXPR:
5490 return
5491 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5492 optimize_minmax_comparison
5493 (loc, EQ_EXPR, type, arg0, comp_const),
5494 optimize_minmax_comparison
5495 (loc, GT_EXPR, type, arg0, comp_const));
5496
5497 case EQ_EXPR:
5498 if (op_code == MAX_EXPR && consts_equal)
5499 /* MAX (X, 0) == 0 -> X <= 0 */
5500 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5501
5502 else if (op_code == MAX_EXPR && consts_lt)
5503 /* MAX (X, 0) == 5 -> X == 5 */
5504 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5505
5506 else if (op_code == MAX_EXPR)
5507 /* MAX (X, 0) == -1 -> false */
5508 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5509
5510 else if (consts_equal)
5511 /* MIN (X, 0) == 0 -> X >= 0 */
5512 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5513
5514 else if (consts_lt)
5515 /* MIN (X, 0) == 5 -> false */
5516 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5517
5518 else
5519 /* MIN (X, 0) == -1 -> X == -1 */
5520 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5521
5522 case GT_EXPR:
5523 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5524 /* MAX (X, 0) > 0 -> X > 0
5525 MAX (X, 0) > 5 -> X > 5 */
5526 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5527
5528 else if (op_code == MAX_EXPR)
5529 /* MAX (X, 0) > -1 -> true */
5530 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5531
5532 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5533 /* MIN (X, 0) > 0 -> false
5534 MIN (X, 0) > 5 -> false */
5535 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5536
5537 else
5538 /* MIN (X, 0) > -1 -> X > -1 */
5539 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5540
5541 default:
5542 return NULL_TREE;
5543 }
5544 }
5545 \f
5546 /* T is an integer expression that is being multiplied, divided, or taken a
5547 modulus (CODE says which and what kind of divide or modulus) by a
5548 constant C. See if we can eliminate that operation by folding it with
5549 other operations already in T. WIDE_TYPE, if non-null, is a type that
5550 should be used for the computation if wider than our type.
5551
5552 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5553 (X * 2) + (Y * 4). We must, however, be assured that either the original
5554 expression would not overflow or that overflow is undefined for the type
5555 in the language in question.
5556
5557 If we return a non-null expression, it is an equivalent form of the
5558 original computation, but need not be in the original type.
5559
5560 We set *STRICT_OVERFLOW_P to true if the return values depends on
5561 signed overflow being undefined. Otherwise we do not change
5562 *STRICT_OVERFLOW_P. */
5563
5564 static tree
5565 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5566 bool *strict_overflow_p)
5567 {
5568 /* To avoid exponential search depth, refuse to allow recursion past
5569 three levels. Beyond that (1) it's highly unlikely that we'll find
5570 something interesting and (2) we've probably processed it before
5571 when we built the inner expression. */
5572
5573 static int depth;
5574 tree ret;
5575
5576 if (depth > 3)
5577 return NULL;
5578
5579 depth++;
5580 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5581 depth--;
5582
5583 return ret;
5584 }
5585
5586 static tree
5587 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5588 bool *strict_overflow_p)
5589 {
5590 tree type = TREE_TYPE (t);
5591 enum tree_code tcode = TREE_CODE (t);
5592 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5593 > GET_MODE_SIZE (TYPE_MODE (type)))
5594 ? wide_type : type);
5595 tree t1, t2;
5596 int same_p = tcode == code;
5597 tree op0 = NULL_TREE, op1 = NULL_TREE;
5598 bool sub_strict_overflow_p;
5599
5600 /* Don't deal with constants of zero here; they confuse the code below. */
5601 if (integer_zerop (c))
5602 return NULL_TREE;
5603
5604 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5605 op0 = TREE_OPERAND (t, 0);
5606
5607 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5608 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5609
5610 /* Note that we need not handle conditional operations here since fold
5611 already handles those cases. So just do arithmetic here. */
5612 switch (tcode)
5613 {
5614 case INTEGER_CST:
5615 /* For a constant, we can always simplify if we are a multiply
5616 or (for divide and modulus) if it is a multiple of our constant. */
5617 if (code == MULT_EXPR
5618 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5619 return const_binop (code, fold_convert (ctype, t),
5620 fold_convert (ctype, c));
5621 break;
5622
5623 CASE_CONVERT: case NON_LVALUE_EXPR:
5624 /* If op0 is an expression ... */
5625 if ((COMPARISON_CLASS_P (op0)
5626 || UNARY_CLASS_P (op0)
5627 || BINARY_CLASS_P (op0)
5628 || VL_EXP_CLASS_P (op0)
5629 || EXPRESSION_CLASS_P (op0))
5630 /* ... and has wrapping overflow, and its type is smaller
5631 than ctype, then we cannot pass through as widening. */
5632 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5633 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5634 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5635 && (TYPE_PRECISION (ctype)
5636 > TYPE_PRECISION (TREE_TYPE (op0))))
5637 /* ... or this is a truncation (t is narrower than op0),
5638 then we cannot pass through this narrowing. */
5639 || (TYPE_PRECISION (type)
5640 < TYPE_PRECISION (TREE_TYPE (op0)))
5641 /* ... or signedness changes for division or modulus,
5642 then we cannot pass through this conversion. */
5643 || (code != MULT_EXPR
5644 && (TYPE_UNSIGNED (ctype)
5645 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5646 /* ... or has undefined overflow while the converted to
5647 type has not, we cannot do the operation in the inner type
5648 as that would introduce undefined overflow. */
5649 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5650 && !TYPE_OVERFLOW_UNDEFINED (type))))
5651 break;
5652
5653 /* Pass the constant down and see if we can make a simplification. If
5654 we can, replace this expression with the inner simplification for
5655 possible later conversion to our or some other type. */
5656 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5657 && TREE_CODE (t2) == INTEGER_CST
5658 && !TREE_OVERFLOW (t2)
5659 && (0 != (t1 = extract_muldiv (op0, t2, code,
5660 code == MULT_EXPR
5661 ? ctype : NULL_TREE,
5662 strict_overflow_p))))
5663 return t1;
5664 break;
5665
5666 case ABS_EXPR:
5667 /* If widening the type changes it from signed to unsigned, then we
5668 must avoid building ABS_EXPR itself as unsigned. */
5669 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5670 {
5671 tree cstype = (*signed_type_for) (ctype);
5672 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5673 != 0)
5674 {
5675 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5676 return fold_convert (ctype, t1);
5677 }
5678 break;
5679 }
5680 /* If the constant is negative, we cannot simplify this. */
5681 if (tree_int_cst_sgn (c) == -1)
5682 break;
5683 /* FALLTHROUGH */
5684 case NEGATE_EXPR:
5685 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5686 != 0)
5687 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5688 break;
5689
5690 case MIN_EXPR: case MAX_EXPR:
5691 /* If widening the type changes the signedness, then we can't perform
5692 this optimization as that changes the result. */
5693 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5694 break;
5695
5696 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5697 sub_strict_overflow_p = false;
5698 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5699 &sub_strict_overflow_p)) != 0
5700 && (t2 = extract_muldiv (op1, c, code, wide_type,
5701 &sub_strict_overflow_p)) != 0)
5702 {
5703 if (tree_int_cst_sgn (c) < 0)
5704 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5705 if (sub_strict_overflow_p)
5706 *strict_overflow_p = true;
5707 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5708 fold_convert (ctype, t2));
5709 }
5710 break;
5711
5712 case LSHIFT_EXPR: case RSHIFT_EXPR:
5713 /* If the second operand is constant, this is a multiplication
5714 or floor division, by a power of two, so we can treat it that
5715 way unless the multiplier or divisor overflows. Signed
5716 left-shift overflow is implementation-defined rather than
5717 undefined in C90, so do not convert signed left shift into
5718 multiplication. */
5719 if (TREE_CODE (op1) == INTEGER_CST
5720 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5721 /* const_binop may not detect overflow correctly,
5722 so check for it explicitly here. */
5723 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5724 && TREE_INT_CST_HIGH (op1) == 0
5725 && 0 != (t1 = fold_convert (ctype,
5726 const_binop (LSHIFT_EXPR,
5727 size_one_node,
5728 op1)))
5729 && !TREE_OVERFLOW (t1))
5730 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5731 ? MULT_EXPR : FLOOR_DIV_EXPR,
5732 ctype,
5733 fold_convert (ctype, op0),
5734 t1),
5735 c, code, wide_type, strict_overflow_p);
5736 break;
5737
5738 case PLUS_EXPR: case MINUS_EXPR:
5739 /* See if we can eliminate the operation on both sides. If we can, we
5740 can return a new PLUS or MINUS. If we can't, the only remaining
5741 cases where we can do anything are if the second operand is a
5742 constant. */
5743 sub_strict_overflow_p = false;
5744 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5745 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5746 if (t1 != 0 && t2 != 0
5747 && (code == MULT_EXPR
5748 /* If not multiplication, we can only do this if both operands
5749 are divisible by c. */
5750 || (multiple_of_p (ctype, op0, c)
5751 && multiple_of_p (ctype, op1, c))))
5752 {
5753 if (sub_strict_overflow_p)
5754 *strict_overflow_p = true;
5755 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5756 fold_convert (ctype, t2));
5757 }
5758
5759 /* If this was a subtraction, negate OP1 and set it to be an addition.
5760 This simplifies the logic below. */
5761 if (tcode == MINUS_EXPR)
5762 {
5763 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5764 /* If OP1 was not easily negatable, the constant may be OP0. */
5765 if (TREE_CODE (op0) == INTEGER_CST)
5766 {
5767 tree tem = op0;
5768 op0 = op1;
5769 op1 = tem;
5770 tem = t1;
5771 t1 = t2;
5772 t2 = tem;
5773 }
5774 }
5775
5776 if (TREE_CODE (op1) != INTEGER_CST)
5777 break;
5778
5779 /* If either OP1 or C are negative, this optimization is not safe for
5780 some of the division and remainder types while for others we need
5781 to change the code. */
5782 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5783 {
5784 if (code == CEIL_DIV_EXPR)
5785 code = FLOOR_DIV_EXPR;
5786 else if (code == FLOOR_DIV_EXPR)
5787 code = CEIL_DIV_EXPR;
5788 else if (code != MULT_EXPR
5789 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5790 break;
5791 }
5792
5793 /* If it's a multiply or a division/modulus operation of a multiple
5794 of our constant, do the operation and verify it doesn't overflow. */
5795 if (code == MULT_EXPR
5796 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5797 {
5798 op1 = const_binop (code, fold_convert (ctype, op1),
5799 fold_convert (ctype, c));
5800 /* We allow the constant to overflow with wrapping semantics. */
5801 if (op1 == 0
5802 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5803 break;
5804 }
5805 else
5806 break;
5807
5808 /* If we have an unsigned type is not a sizetype, we cannot widen
5809 the operation since it will change the result if the original
5810 computation overflowed. */
5811 if (TYPE_UNSIGNED (ctype)
5812 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5813 && ctype != type)
5814 break;
5815
5816 /* If we were able to eliminate our operation from the first side,
5817 apply our operation to the second side and reform the PLUS. */
5818 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5819 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5820
5821 /* The last case is if we are a multiply. In that case, we can
5822 apply the distributive law to commute the multiply and addition
5823 if the multiplication of the constants doesn't overflow. */
5824 if (code == MULT_EXPR)
5825 return fold_build2 (tcode, ctype,
5826 fold_build2 (code, ctype,
5827 fold_convert (ctype, op0),
5828 fold_convert (ctype, c)),
5829 op1);
5830
5831 break;
5832
5833 case MULT_EXPR:
5834 /* We have a special case here if we are doing something like
5835 (C * 8) % 4 since we know that's zero. */
5836 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5837 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5838 /* If the multiplication can overflow we cannot optimize this.
5839 ??? Until we can properly mark individual operations as
5840 not overflowing we need to treat sizetype special here as
5841 stor-layout relies on this opimization to make
5842 DECL_FIELD_BIT_OFFSET always a constant. */
5843 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5844 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5845 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5846 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5847 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5848 {
5849 *strict_overflow_p = true;
5850 return omit_one_operand (type, integer_zero_node, op0);
5851 }
5852
5853 /* ... fall through ... */
5854
5855 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5856 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5857 /* If we can extract our operation from the LHS, do so and return a
5858 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5859 do something only if the second operand is a constant. */
5860 if (same_p
5861 && (t1 = extract_muldiv (op0, c, code, wide_type,
5862 strict_overflow_p)) != 0)
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5864 fold_convert (ctype, op1));
5865 else if (tcode == MULT_EXPR && code == MULT_EXPR
5866 && (t1 = extract_muldiv (op1, c, code, wide_type,
5867 strict_overflow_p)) != 0)
5868 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5869 fold_convert (ctype, t1));
5870 else if (TREE_CODE (op1) != INTEGER_CST)
5871 return 0;
5872
5873 /* If these are the same operation types, we can associate them
5874 assuming no overflow. */
5875 if (tcode == code)
5876 {
5877 double_int mul;
5878 int overflow_p;
5879 mul = double_int_mul_with_sign
5880 (double_int_ext
5881 (tree_to_double_int (op1),
5882 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5883 double_int_ext
5884 (tree_to_double_int (c),
5885 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5886 false, &overflow_p);
5887 overflow_p = (((!TYPE_UNSIGNED (ctype)
5888 || (TREE_CODE (ctype) == INTEGER_TYPE
5889 && TYPE_IS_SIZETYPE (ctype)))
5890 && overflow_p)
5891 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5892 if (!double_int_fits_to_tree_p (ctype, mul)
5893 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5894 || !TYPE_UNSIGNED (ctype)
5895 || (TREE_CODE (ctype) == INTEGER_TYPE
5896 && TYPE_IS_SIZETYPE (ctype))))
5897 overflow_p = 1;
5898 if (!overflow_p)
5899 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5900 double_int_to_tree (ctype, mul));
5901 }
5902
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5907
5908 If we have an unsigned type that is not a sizetype, we cannot do
5909 this since it will change the result if the original computation
5910 overflowed. */
5911 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5912 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5913 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5914 || (tcode == MULT_EXPR
5915 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5916 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5917 && code != MULT_EXPR)))
5918 {
5919 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5920 {
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5922 *strict_overflow_p = true;
5923 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5924 fold_convert (ctype,
5925 const_binop (TRUNC_DIV_EXPR,
5926 op1, c)));
5927 }
5928 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5929 {
5930 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5931 *strict_overflow_p = true;
5932 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5933 fold_convert (ctype,
5934 const_binop (TRUNC_DIV_EXPR,
5935 c, op1)));
5936 }
5937 }
5938 break;
5939
5940 default:
5941 break;
5942 }
5943
5944 return 0;
5945 }
5946 \f
5947 /* Return a node which has the indicated constant VALUE (either 0 or
5948 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5949 and is of the indicated TYPE. */
5950
5951 tree
5952 constant_boolean_node (bool value, tree type)
5953 {
5954 if (type == integer_type_node)
5955 return value ? integer_one_node : integer_zero_node;
5956 else if (type == boolean_type_node)
5957 return value ? boolean_true_node : boolean_false_node;
5958 else if (TREE_CODE (type) == VECTOR_TYPE)
5959 return build_vector_from_val (type,
5960 build_int_cst (TREE_TYPE (type),
5961 value ? -1 : 0));
5962 else
5963 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5964 }
5965
5966
5967 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5968 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5969 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5970 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5971 COND is the first argument to CODE; otherwise (as in the example
5972 given here), it is the second argument. TYPE is the type of the
5973 original expression. Return NULL_TREE if no simplification is
5974 possible. */
5975
5976 static tree
5977 fold_binary_op_with_conditional_arg (location_t loc,
5978 enum tree_code code,
5979 tree type, tree op0, tree op1,
5980 tree cond, tree arg, int cond_first_p)
5981 {
5982 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5983 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5984 tree test, true_value, false_value;
5985 tree lhs = NULL_TREE;
5986 tree rhs = NULL_TREE;
5987
5988 if (TREE_CODE (cond) == COND_EXPR)
5989 {
5990 test = TREE_OPERAND (cond, 0);
5991 true_value = TREE_OPERAND (cond, 1);
5992 false_value = TREE_OPERAND (cond, 2);
5993 /* If this operand throws an expression, then it does not make
5994 sense to try to perform a logical or arithmetic operation
5995 involving it. */
5996 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5997 lhs = true_value;
5998 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5999 rhs = false_value;
6000 }
6001 else
6002 {
6003 tree testtype = TREE_TYPE (cond);
6004 test = cond;
6005 true_value = constant_boolean_node (true, testtype);
6006 false_value = constant_boolean_node (false, testtype);
6007 }
6008
6009 /* This transformation is only worthwhile if we don't have to wrap ARG
6010 in a SAVE_EXPR and the operation can be simplified on at least one
6011 of the branches once its pushed inside the COND_EXPR. */
6012 if (!TREE_CONSTANT (arg)
6013 && (TREE_SIDE_EFFECTS (arg)
6014 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6015 return NULL_TREE;
6016
6017 arg = fold_convert_loc (loc, arg_type, arg);
6018 if (lhs == 0)
6019 {
6020 true_value = fold_convert_loc (loc, cond_type, true_value);
6021 if (cond_first_p)
6022 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6023 else
6024 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6025 }
6026 if (rhs == 0)
6027 {
6028 false_value = fold_convert_loc (loc, cond_type, false_value);
6029 if (cond_first_p)
6030 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6031 else
6032 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6033 }
6034
6035 /* Check that we have simplified at least one of the branches. */
6036 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6037 return NULL_TREE;
6038
6039 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6040 }
6041
6042 \f
6043 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6044
6045 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6046 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6047 ADDEND is the same as X.
6048
6049 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6050 and finite. The problematic cases are when X is zero, and its mode
6051 has signed zeros. In the case of rounding towards -infinity,
6052 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6053 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6054
6055 bool
6056 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6057 {
6058 if (!real_zerop (addend))
6059 return false;
6060
6061 /* Don't allow the fold with -fsignaling-nans. */
6062 if (HONOR_SNANS (TYPE_MODE (type)))
6063 return false;
6064
6065 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6066 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6067 return true;
6068
6069 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6070 if (TREE_CODE (addend) == REAL_CST
6071 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6072 negate = !negate;
6073
6074 /* The mode has signed zeros, and we have to honor their sign.
6075 In this situation, there is only one case we can return true for.
6076 X - 0 is the same as X unless rounding towards -infinity is
6077 supported. */
6078 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6079 }
6080
6081 /* Subroutine of fold() that checks comparisons of built-in math
6082 functions against real constants.
6083
6084 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6085 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6086 is the type of the result and ARG0 and ARG1 are the operands of the
6087 comparison. ARG1 must be a TREE_REAL_CST.
6088
6089 The function returns the constant folded tree if a simplification
6090 can be made, and NULL_TREE otherwise. */
6091
6092 static tree
6093 fold_mathfn_compare (location_t loc,
6094 enum built_in_function fcode, enum tree_code code,
6095 tree type, tree arg0, tree arg1)
6096 {
6097 REAL_VALUE_TYPE c;
6098
6099 if (BUILTIN_SQRT_P (fcode))
6100 {
6101 tree arg = CALL_EXPR_ARG (arg0, 0);
6102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6103
6104 c = TREE_REAL_CST (arg1);
6105 if (REAL_VALUE_NEGATIVE (c))
6106 {
6107 /* sqrt(x) < y is always false, if y is negative. */
6108 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6109 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6110
6111 /* sqrt(x) > y is always true, if y is negative and we
6112 don't care about NaNs, i.e. negative values of x. */
6113 if (code == NE_EXPR || !HONOR_NANS (mode))
6114 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6115
6116 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6117 return fold_build2_loc (loc, GE_EXPR, type, arg,
6118 build_real (TREE_TYPE (arg), dconst0));
6119 }
6120 else if (code == GT_EXPR || code == GE_EXPR)
6121 {
6122 REAL_VALUE_TYPE c2;
6123
6124 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6125 real_convert (&c2, mode, &c2);
6126
6127 if (REAL_VALUE_ISINF (c2))
6128 {
6129 /* sqrt(x) > y is x == +Inf, when y is very large. */
6130 if (HONOR_INFINITIES (mode))
6131 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6132 build_real (TREE_TYPE (arg), c2));
6133
6134 /* sqrt(x) > y is always false, when y is very large
6135 and we don't care about infinities. */
6136 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6137 }
6138
6139 /* sqrt(x) > c is the same as x > c*c. */
6140 return fold_build2_loc (loc, code, type, arg,
6141 build_real (TREE_TYPE (arg), c2));
6142 }
6143 else if (code == LT_EXPR || code == LE_EXPR)
6144 {
6145 REAL_VALUE_TYPE c2;
6146
6147 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6148 real_convert (&c2, mode, &c2);
6149
6150 if (REAL_VALUE_ISINF (c2))
6151 {
6152 /* sqrt(x) < y is always true, when y is a very large
6153 value and we don't care about NaNs or Infinities. */
6154 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6155 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6156
6157 /* sqrt(x) < y is x != +Inf when y is very large and we
6158 don't care about NaNs. */
6159 if (! HONOR_NANS (mode))
6160 return fold_build2_loc (loc, NE_EXPR, type, arg,
6161 build_real (TREE_TYPE (arg), c2));
6162
6163 /* sqrt(x) < y is x >= 0 when y is very large and we
6164 don't care about Infinities. */
6165 if (! HONOR_INFINITIES (mode))
6166 return fold_build2_loc (loc, GE_EXPR, type, arg,
6167 build_real (TREE_TYPE (arg), dconst0));
6168
6169 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6170 arg = save_expr (arg);
6171 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6172 fold_build2_loc (loc, GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg),
6174 dconst0)),
6175 fold_build2_loc (loc, NE_EXPR, type, arg,
6176 build_real (TREE_TYPE (arg),
6177 c2)));
6178 }
6179
6180 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6181 if (! HONOR_NANS (mode))
6182 return fold_build2_loc (loc, code, type, arg,
6183 build_real (TREE_TYPE (arg), c2));
6184
6185 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6186 arg = save_expr (arg);
6187 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6188 fold_build2_loc (loc, GE_EXPR, type, arg,
6189 build_real (TREE_TYPE (arg),
6190 dconst0)),
6191 fold_build2_loc (loc, code, type, arg,
6192 build_real (TREE_TYPE (arg),
6193 c2)));
6194 }
6195 }
6196
6197 return NULL_TREE;
6198 }
6199
6200 /* Subroutine of fold() that optimizes comparisons against Infinities,
6201 either +Inf or -Inf.
6202
6203 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6204 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6205 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6206
6207 The function returns the constant folded tree if a simplification
6208 can be made, and NULL_TREE otherwise. */
6209
6210 static tree
6211 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6212 tree arg0, tree arg1)
6213 {
6214 enum machine_mode mode;
6215 REAL_VALUE_TYPE max;
6216 tree temp;
6217 bool neg;
6218
6219 mode = TYPE_MODE (TREE_TYPE (arg0));
6220
6221 /* For negative infinity swap the sense of the comparison. */
6222 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6223 if (neg)
6224 code = swap_tree_comparison (code);
6225
6226 switch (code)
6227 {
6228 case GT_EXPR:
6229 /* x > +Inf is always false, if with ignore sNANs. */
6230 if (HONOR_SNANS (mode))
6231 return NULL_TREE;
6232 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6233
6234 case LE_EXPR:
6235 /* x <= +Inf is always true, if we don't case about NaNs. */
6236 if (! HONOR_NANS (mode))
6237 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6238
6239 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6240 arg0 = save_expr (arg0);
6241 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6242
6243 case EQ_EXPR:
6244 case GE_EXPR:
6245 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6246 real_maxval (&max, neg, mode);
6247 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6248 arg0, build_real (TREE_TYPE (arg0), max));
6249
6250 case LT_EXPR:
6251 /* x < +Inf is always equal to x <= DBL_MAX. */
6252 real_maxval (&max, neg, mode);
6253 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6255
6256 case NE_EXPR:
6257 /* x != +Inf is always equal to !(x > DBL_MAX). */
6258 real_maxval (&max, neg, mode);
6259 if (! HONOR_NANS (mode))
6260 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6261 arg0, build_real (TREE_TYPE (arg0), max));
6262
6263 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6264 arg0, build_real (TREE_TYPE (arg0), max));
6265 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6266
6267 default:
6268 break;
6269 }
6270
6271 return NULL_TREE;
6272 }
6273
6274 /* Subroutine of fold() that optimizes comparisons of a division by
6275 a nonzero integer constant against an integer constant, i.e.
6276 X/C1 op C2.
6277
6278 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6279 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6280 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6281
6282 The function returns the constant folded tree if a simplification
6283 can be made, and NULL_TREE otherwise. */
6284
6285 static tree
6286 fold_div_compare (location_t loc,
6287 enum tree_code code, tree type, tree arg0, tree arg1)
6288 {
6289 tree prod, tmp, hi, lo;
6290 tree arg00 = TREE_OPERAND (arg0, 0);
6291 tree arg01 = TREE_OPERAND (arg0, 1);
6292 double_int val;
6293 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6294 bool neg_overflow;
6295 int overflow;
6296
6297 /* We have to do this the hard way to detect unsigned overflow.
6298 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6299 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6300 TREE_INT_CST_HIGH (arg01),
6301 TREE_INT_CST_LOW (arg1),
6302 TREE_INT_CST_HIGH (arg1),
6303 &val.low, &val.high, unsigned_p);
6304 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6305 neg_overflow = false;
6306
6307 if (unsigned_p)
6308 {
6309 tmp = int_const_binop (MINUS_EXPR, arg01,
6310 build_int_cst (TREE_TYPE (arg01), 1));
6311 lo = prod;
6312
6313 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6314 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6315 TREE_INT_CST_HIGH (prod),
6316 TREE_INT_CST_LOW (tmp),
6317 TREE_INT_CST_HIGH (tmp),
6318 &val.low, &val.high, unsigned_p);
6319 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6320 -1, overflow | TREE_OVERFLOW (prod));
6321 }
6322 else if (tree_int_cst_sgn (arg01) >= 0)
6323 {
6324 tmp = int_const_binop (MINUS_EXPR, arg01,
6325 build_int_cst (TREE_TYPE (arg01), 1));
6326 switch (tree_int_cst_sgn (arg1))
6327 {
6328 case -1:
6329 neg_overflow = true;
6330 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6331 hi = prod;
6332 break;
6333
6334 case 0:
6335 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6336 hi = tmp;
6337 break;
6338
6339 case 1:
6340 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6341 lo = prod;
6342 break;
6343
6344 default:
6345 gcc_unreachable ();
6346 }
6347 }
6348 else
6349 {
6350 /* A negative divisor reverses the relational operators. */
6351 code = swap_tree_comparison (code);
6352
6353 tmp = int_const_binop (PLUS_EXPR, arg01,
6354 build_int_cst (TREE_TYPE (arg01), 1));
6355 switch (tree_int_cst_sgn (arg1))
6356 {
6357 case -1:
6358 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6359 lo = prod;
6360 break;
6361
6362 case 0:
6363 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6364 lo = tmp;
6365 break;
6366
6367 case 1:
6368 neg_overflow = true;
6369 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6370 hi = prod;
6371 break;
6372
6373 default:
6374 gcc_unreachable ();
6375 }
6376 }
6377
6378 switch (code)
6379 {
6380 case EQ_EXPR:
6381 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6382 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6383 if (TREE_OVERFLOW (hi))
6384 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6385 if (TREE_OVERFLOW (lo))
6386 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6387 return build_range_check (loc, type, arg00, 1, lo, hi);
6388
6389 case NE_EXPR:
6390 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6391 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6392 if (TREE_OVERFLOW (hi))
6393 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6394 if (TREE_OVERFLOW (lo))
6395 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6396 return build_range_check (loc, type, arg00, 0, lo, hi);
6397
6398 case LT_EXPR:
6399 if (TREE_OVERFLOW (lo))
6400 {
6401 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6402 return omit_one_operand_loc (loc, type, tmp, arg00);
6403 }
6404 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6405
6406 case LE_EXPR:
6407 if (TREE_OVERFLOW (hi))
6408 {
6409 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6410 return omit_one_operand_loc (loc, type, tmp, arg00);
6411 }
6412 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6413
6414 case GT_EXPR:
6415 if (TREE_OVERFLOW (hi))
6416 {
6417 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6418 return omit_one_operand_loc (loc, type, tmp, arg00);
6419 }
6420 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6421
6422 case GE_EXPR:
6423 if (TREE_OVERFLOW (lo))
6424 {
6425 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6426 return omit_one_operand_loc (loc, type, tmp, arg00);
6427 }
6428 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6429
6430 default:
6431 break;
6432 }
6433
6434 return NULL_TREE;
6435 }
6436
6437
6438 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6439 equality/inequality test, then return a simplified form of the test
6440 using a sign testing. Otherwise return NULL. TYPE is the desired
6441 result type. */
6442
6443 static tree
6444 fold_single_bit_test_into_sign_test (location_t loc,
6445 enum tree_code code, tree arg0, tree arg1,
6446 tree result_type)
6447 {
6448 /* If this is testing a single bit, we can optimize the test. */
6449 if ((code == NE_EXPR || code == EQ_EXPR)
6450 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6451 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6452 {
6453 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6454 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6455 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6456
6457 if (arg00 != NULL_TREE
6458 /* This is only a win if casting to a signed type is cheap,
6459 i.e. when arg00's type is not a partial mode. */
6460 && TYPE_PRECISION (TREE_TYPE (arg00))
6461 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6462 {
6463 tree stype = signed_type_for (TREE_TYPE (arg00));
6464 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6465 result_type,
6466 fold_convert_loc (loc, stype, arg00),
6467 build_int_cst (stype, 0));
6468 }
6469 }
6470
6471 return NULL_TREE;
6472 }
6473
6474 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6475 equality/inequality test, then return a simplified form of
6476 the test using shifts and logical operations. Otherwise return
6477 NULL. TYPE is the desired result type. */
6478
6479 tree
6480 fold_single_bit_test (location_t loc, enum tree_code code,
6481 tree arg0, tree arg1, tree result_type)
6482 {
6483 /* If this is testing a single bit, we can optimize the test. */
6484 if ((code == NE_EXPR || code == EQ_EXPR)
6485 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6486 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6487 {
6488 tree inner = TREE_OPERAND (arg0, 0);
6489 tree type = TREE_TYPE (arg0);
6490 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6491 enum machine_mode operand_mode = TYPE_MODE (type);
6492 int ops_unsigned;
6493 tree signed_type, unsigned_type, intermediate_type;
6494 tree tem, one;
6495
6496 /* First, see if we can fold the single bit test into a sign-bit
6497 test. */
6498 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6499 result_type);
6500 if (tem)
6501 return tem;
6502
6503 /* Otherwise we have (A & C) != 0 where C is a single bit,
6504 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6505 Similarly for (A & C) == 0. */
6506
6507 /* If INNER is a right shift of a constant and it plus BITNUM does
6508 not overflow, adjust BITNUM and INNER. */
6509 if (TREE_CODE (inner) == RSHIFT_EXPR
6510 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6511 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6512 && bitnum < TYPE_PRECISION (type)
6513 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6514 bitnum - TYPE_PRECISION (type)))
6515 {
6516 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6517 inner = TREE_OPERAND (inner, 0);
6518 }
6519
6520 /* If we are going to be able to omit the AND below, we must do our
6521 operations as unsigned. If we must use the AND, we have a choice.
6522 Normally unsigned is faster, but for some machines signed is. */
6523 #ifdef LOAD_EXTEND_OP
6524 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6525 && !flag_syntax_only) ? 0 : 1;
6526 #else
6527 ops_unsigned = 1;
6528 #endif
6529
6530 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6531 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6532 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6533 inner = fold_convert_loc (loc, intermediate_type, inner);
6534
6535 if (bitnum != 0)
6536 inner = build2 (RSHIFT_EXPR, intermediate_type,
6537 inner, size_int (bitnum));
6538
6539 one = build_int_cst (intermediate_type, 1);
6540
6541 if (code == EQ_EXPR)
6542 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6543
6544 /* Put the AND last so it can combine with more things. */
6545 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6546
6547 /* Make sure to return the proper type. */
6548 inner = fold_convert_loc (loc, result_type, inner);
6549
6550 return inner;
6551 }
6552 return NULL_TREE;
6553 }
6554
6555 /* Check whether we are allowed to reorder operands arg0 and arg1,
6556 such that the evaluation of arg1 occurs before arg0. */
6557
6558 static bool
6559 reorder_operands_p (const_tree arg0, const_tree arg1)
6560 {
6561 if (! flag_evaluation_order)
6562 return true;
6563 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6564 return true;
6565 return ! TREE_SIDE_EFFECTS (arg0)
6566 && ! TREE_SIDE_EFFECTS (arg1);
6567 }
6568
6569 /* Test whether it is preferable two swap two operands, ARG0 and
6570 ARG1, for example because ARG0 is an integer constant and ARG1
6571 isn't. If REORDER is true, only recommend swapping if we can
6572 evaluate the operands in reverse order. */
6573
6574 bool
6575 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6576 {
6577 STRIP_SIGN_NOPS (arg0);
6578 STRIP_SIGN_NOPS (arg1);
6579
6580 if (TREE_CODE (arg1) == INTEGER_CST)
6581 return 0;
6582 if (TREE_CODE (arg0) == INTEGER_CST)
6583 return 1;
6584
6585 if (TREE_CODE (arg1) == REAL_CST)
6586 return 0;
6587 if (TREE_CODE (arg0) == REAL_CST)
6588 return 1;
6589
6590 if (TREE_CODE (arg1) == FIXED_CST)
6591 return 0;
6592 if (TREE_CODE (arg0) == FIXED_CST)
6593 return 1;
6594
6595 if (TREE_CODE (arg1) == COMPLEX_CST)
6596 return 0;
6597 if (TREE_CODE (arg0) == COMPLEX_CST)
6598 return 1;
6599
6600 if (TREE_CONSTANT (arg1))
6601 return 0;
6602 if (TREE_CONSTANT (arg0))
6603 return 1;
6604
6605 if (optimize_function_for_size_p (cfun))
6606 return 0;
6607
6608 if (reorder && flag_evaluation_order
6609 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6610 return 0;
6611
6612 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6613 for commutative and comparison operators. Ensuring a canonical
6614 form allows the optimizers to find additional redundancies without
6615 having to explicitly check for both orderings. */
6616 if (TREE_CODE (arg0) == SSA_NAME
6617 && TREE_CODE (arg1) == SSA_NAME
6618 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6619 return 1;
6620
6621 /* Put SSA_NAMEs last. */
6622 if (TREE_CODE (arg1) == SSA_NAME)
6623 return 0;
6624 if (TREE_CODE (arg0) == SSA_NAME)
6625 return 1;
6626
6627 /* Put variables last. */
6628 if (DECL_P (arg1))
6629 return 0;
6630 if (DECL_P (arg0))
6631 return 1;
6632
6633 return 0;
6634 }
6635
6636 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6637 ARG0 is extended to a wider type. */
6638
6639 static tree
6640 fold_widened_comparison (location_t loc, enum tree_code code,
6641 tree type, tree arg0, tree arg1)
6642 {
6643 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6644 tree arg1_unw;
6645 tree shorter_type, outer_type;
6646 tree min, max;
6647 bool above, below;
6648
6649 if (arg0_unw == arg0)
6650 return NULL_TREE;
6651 shorter_type = TREE_TYPE (arg0_unw);
6652
6653 #ifdef HAVE_canonicalize_funcptr_for_compare
6654 /* Disable this optimization if we're casting a function pointer
6655 type on targets that require function pointer canonicalization. */
6656 if (HAVE_canonicalize_funcptr_for_compare
6657 && TREE_CODE (shorter_type) == POINTER_TYPE
6658 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6659 return NULL_TREE;
6660 #endif
6661
6662 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6663 return NULL_TREE;
6664
6665 arg1_unw = get_unwidened (arg1, NULL_TREE);
6666
6667 /* If possible, express the comparison in the shorter mode. */
6668 if ((code == EQ_EXPR || code == NE_EXPR
6669 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6670 && (TREE_TYPE (arg1_unw) == shorter_type
6671 || ((TYPE_PRECISION (shorter_type)
6672 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6673 && (TYPE_UNSIGNED (shorter_type)
6674 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6675 || (TREE_CODE (arg1_unw) == INTEGER_CST
6676 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6677 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6678 && int_fits_type_p (arg1_unw, shorter_type))))
6679 return fold_build2_loc (loc, code, type, arg0_unw,
6680 fold_convert_loc (loc, shorter_type, arg1_unw));
6681
6682 if (TREE_CODE (arg1_unw) != INTEGER_CST
6683 || TREE_CODE (shorter_type) != INTEGER_TYPE
6684 || !int_fits_type_p (arg1_unw, shorter_type))
6685 return NULL_TREE;
6686
6687 /* If we are comparing with the integer that does not fit into the range
6688 of the shorter type, the result is known. */
6689 outer_type = TREE_TYPE (arg1_unw);
6690 min = lower_bound_in_type (outer_type, shorter_type);
6691 max = upper_bound_in_type (outer_type, shorter_type);
6692
6693 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6694 max, arg1_unw));
6695 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6696 arg1_unw, min));
6697
6698 switch (code)
6699 {
6700 case EQ_EXPR:
6701 if (above || below)
6702 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6703 break;
6704
6705 case NE_EXPR:
6706 if (above || below)
6707 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6708 break;
6709
6710 case LT_EXPR:
6711 case LE_EXPR:
6712 if (above)
6713 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6714 else if (below)
6715 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6716
6717 case GT_EXPR:
6718 case GE_EXPR:
6719 if (above)
6720 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6721 else if (below)
6722 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6723
6724 default:
6725 break;
6726 }
6727
6728 return NULL_TREE;
6729 }
6730
6731 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6732 ARG0 just the signedness is changed. */
6733
6734 static tree
6735 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6736 tree arg0, tree arg1)
6737 {
6738 tree arg0_inner;
6739 tree inner_type, outer_type;
6740
6741 if (!CONVERT_EXPR_P (arg0))
6742 return NULL_TREE;
6743
6744 outer_type = TREE_TYPE (arg0);
6745 arg0_inner = TREE_OPERAND (arg0, 0);
6746 inner_type = TREE_TYPE (arg0_inner);
6747
6748 #ifdef HAVE_canonicalize_funcptr_for_compare
6749 /* Disable this optimization if we're casting a function pointer
6750 type on targets that require function pointer canonicalization. */
6751 if (HAVE_canonicalize_funcptr_for_compare
6752 && TREE_CODE (inner_type) == POINTER_TYPE
6753 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6754 return NULL_TREE;
6755 #endif
6756
6757 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6758 return NULL_TREE;
6759
6760 if (TREE_CODE (arg1) != INTEGER_CST
6761 && !(CONVERT_EXPR_P (arg1)
6762 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6763 return NULL_TREE;
6764
6765 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6766 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6767 && code != NE_EXPR
6768 && code != EQ_EXPR)
6769 return NULL_TREE;
6770
6771 if (TREE_CODE (arg1) == INTEGER_CST)
6772 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6773 0, TREE_OVERFLOW (arg1));
6774 else
6775 arg1 = fold_convert_loc (loc, inner_type, arg1);
6776
6777 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6778 }
6779
6780 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6781 step of the array. Reconstructs s and delta in the case of s *
6782 delta being an integer constant (and thus already folded). ADDR is
6783 the address. MULT is the multiplicative expression. If the
6784 function succeeds, the new address expression is returned.
6785 Otherwise NULL_TREE is returned. LOC is the location of the
6786 resulting expression. */
6787
6788 static tree
6789 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6790 {
6791 tree s, delta, step;
6792 tree ref = TREE_OPERAND (addr, 0), pref;
6793 tree ret, pos;
6794 tree itype;
6795 bool mdim = false;
6796
6797 /* Strip the nops that might be added when converting op1 to sizetype. */
6798 STRIP_NOPS (op1);
6799
6800 /* Canonicalize op1 into a possibly non-constant delta
6801 and an INTEGER_CST s. */
6802 if (TREE_CODE (op1) == MULT_EXPR)
6803 {
6804 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6805
6806 STRIP_NOPS (arg0);
6807 STRIP_NOPS (arg1);
6808
6809 if (TREE_CODE (arg0) == INTEGER_CST)
6810 {
6811 s = arg0;
6812 delta = arg1;
6813 }
6814 else if (TREE_CODE (arg1) == INTEGER_CST)
6815 {
6816 s = arg1;
6817 delta = arg0;
6818 }
6819 else
6820 return NULL_TREE;
6821 }
6822 else if (TREE_CODE (op1) == INTEGER_CST)
6823 {
6824 delta = op1;
6825 s = NULL_TREE;
6826 }
6827 else
6828 {
6829 /* Simulate we are delta * 1. */
6830 delta = op1;
6831 s = integer_one_node;
6832 }
6833
6834 for (;; ref = TREE_OPERAND (ref, 0))
6835 {
6836 if (TREE_CODE (ref) == ARRAY_REF)
6837 {
6838 tree domain;
6839
6840 /* Remember if this was a multi-dimensional array. */
6841 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6842 mdim = true;
6843
6844 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6845 if (! domain)
6846 continue;
6847 itype = TREE_TYPE (domain);
6848
6849 step = array_ref_element_size (ref);
6850 if (TREE_CODE (step) != INTEGER_CST)
6851 continue;
6852
6853 if (s)
6854 {
6855 if (! tree_int_cst_equal (step, s))
6856 continue;
6857 }
6858 else
6859 {
6860 /* Try if delta is a multiple of step. */
6861 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6862 if (! tmp)
6863 continue;
6864 delta = tmp;
6865 }
6866
6867 /* Only fold here if we can verify we do not overflow one
6868 dimension of a multi-dimensional array. */
6869 if (mdim)
6870 {
6871 tree tmp;
6872
6873 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6874 || !TYPE_MAX_VALUE (domain)
6875 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6876 continue;
6877
6878 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6879 fold_convert_loc (loc, itype,
6880 TREE_OPERAND (ref, 1)),
6881 fold_convert_loc (loc, itype, delta));
6882 if (!tmp
6883 || TREE_CODE (tmp) != INTEGER_CST
6884 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6885 continue;
6886 }
6887
6888 break;
6889 }
6890 else if (TREE_CODE (ref) == COMPONENT_REF
6891 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6892 {
6893 tree domain;
6894
6895 /* Remember if this was a multi-dimensional array. */
6896 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6897 mdim = true;
6898
6899 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6900 if (! domain)
6901 continue;
6902 itype = TREE_TYPE (domain);
6903
6904 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6905 if (TREE_CODE (step) != INTEGER_CST)
6906 continue;
6907
6908 if (s)
6909 {
6910 if (! tree_int_cst_equal (step, s))
6911 continue;
6912 }
6913 else
6914 {
6915 /* Try if delta is a multiple of step. */
6916 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6917 if (! tmp)
6918 continue;
6919 delta = tmp;
6920 }
6921
6922 /* Only fold here if we can verify we do not overflow one
6923 dimension of a multi-dimensional array. */
6924 if (mdim)
6925 {
6926 tree tmp;
6927
6928 if (!TYPE_MIN_VALUE (domain)
6929 || !TYPE_MAX_VALUE (domain)
6930 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6931 continue;
6932
6933 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6934 fold_convert_loc (loc, itype,
6935 TYPE_MIN_VALUE (domain)),
6936 fold_convert_loc (loc, itype, delta));
6937 if (TREE_CODE (tmp) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6939 continue;
6940 }
6941
6942 break;
6943 }
6944 else
6945 mdim = false;
6946
6947 if (!handled_component_p (ref))
6948 return NULL_TREE;
6949 }
6950
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6953
6954 pref = TREE_OPERAND (addr, 0);
6955 ret = copy_node (pref);
6956 SET_EXPR_LOCATION (ret, loc);
6957 pos = ret;
6958
6959 while (pref != ref)
6960 {
6961 pref = TREE_OPERAND (pref, 0);
6962 TREE_OPERAND (pos, 0) = copy_node (pref);
6963 pos = TREE_OPERAND (pos, 0);
6964 }
6965
6966 if (TREE_CODE (ref) == ARRAY_REF)
6967 {
6968 TREE_OPERAND (pos, 1)
6969 = fold_build2_loc (loc, PLUS_EXPR, itype,
6970 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6971 fold_convert_loc (loc, itype, delta));
6972 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6973 }
6974 else if (TREE_CODE (ref) == COMPONENT_REF)
6975 {
6976 gcc_assert (ret == pos);
6977 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6978 fold_build2_loc
6979 (loc, PLUS_EXPR, itype,
6980 fold_convert_loc (loc, itype,
6981 TYPE_MIN_VALUE
6982 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6983 fold_convert_loc (loc, itype, delta)),
6984 NULL_TREE, NULL_TREE);
6985 return build_fold_addr_expr_loc (loc, ret);
6986 }
6987 else
6988 gcc_unreachable ();
6989 }
6990
6991
6992 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6993 means A >= Y && A != MAX, but in this case we know that
6994 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6995
6996 static tree
6997 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6998 {
6999 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7000
7001 if (TREE_CODE (bound) == LT_EXPR)
7002 a = TREE_OPERAND (bound, 0);
7003 else if (TREE_CODE (bound) == GT_EXPR)
7004 a = TREE_OPERAND (bound, 1);
7005 else
7006 return NULL_TREE;
7007
7008 typea = TREE_TYPE (a);
7009 if (!INTEGRAL_TYPE_P (typea)
7010 && !POINTER_TYPE_P (typea))
7011 return NULL_TREE;
7012
7013 if (TREE_CODE (ineq) == LT_EXPR)
7014 {
7015 a1 = TREE_OPERAND (ineq, 1);
7016 y = TREE_OPERAND (ineq, 0);
7017 }
7018 else if (TREE_CODE (ineq) == GT_EXPR)
7019 {
7020 a1 = TREE_OPERAND (ineq, 0);
7021 y = TREE_OPERAND (ineq, 1);
7022 }
7023 else
7024 return NULL_TREE;
7025
7026 if (TREE_TYPE (a1) != typea)
7027 return NULL_TREE;
7028
7029 if (POINTER_TYPE_P (typea))
7030 {
7031 /* Convert the pointer types into integer before taking the difference. */
7032 tree ta = fold_convert_loc (loc, ssizetype, a);
7033 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7034 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7035 }
7036 else
7037 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7038
7039 if (!diff || !integer_onep (diff))
7040 return NULL_TREE;
7041
7042 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7043 }
7044
7045 /* Fold a sum or difference of at least one multiplication.
7046 Returns the folded tree or NULL if no simplification could be made. */
7047
7048 static tree
7049 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7050 tree arg0, tree arg1)
7051 {
7052 tree arg00, arg01, arg10, arg11;
7053 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7054
7055 /* (A * C) +- (B * C) -> (A+-B) * C.
7056 (A * C) +- A -> A * (C+-1).
7057 We are most concerned about the case where C is a constant,
7058 but other combinations show up during loop reduction. Since
7059 it is not difficult, try all four possibilities. */
7060
7061 if (TREE_CODE (arg0) == MULT_EXPR)
7062 {
7063 arg00 = TREE_OPERAND (arg0, 0);
7064 arg01 = TREE_OPERAND (arg0, 1);
7065 }
7066 else if (TREE_CODE (arg0) == INTEGER_CST)
7067 {
7068 arg00 = build_one_cst (type);
7069 arg01 = arg0;
7070 }
7071 else
7072 {
7073 /* We cannot generate constant 1 for fract. */
7074 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7075 return NULL_TREE;
7076 arg00 = arg0;
7077 arg01 = build_one_cst (type);
7078 }
7079 if (TREE_CODE (arg1) == MULT_EXPR)
7080 {
7081 arg10 = TREE_OPERAND (arg1, 0);
7082 arg11 = TREE_OPERAND (arg1, 1);
7083 }
7084 else if (TREE_CODE (arg1) == INTEGER_CST)
7085 {
7086 arg10 = build_one_cst (type);
7087 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7088 the purpose of this canonicalization. */
7089 if (TREE_INT_CST_HIGH (arg1) == -1
7090 && negate_expr_p (arg1)
7091 && code == PLUS_EXPR)
7092 {
7093 arg11 = negate_expr (arg1);
7094 code = MINUS_EXPR;
7095 }
7096 else
7097 arg11 = arg1;
7098 }
7099 else
7100 {
7101 /* We cannot generate constant 1 for fract. */
7102 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7103 return NULL_TREE;
7104 arg10 = arg1;
7105 arg11 = build_one_cst (type);
7106 }
7107 same = NULL_TREE;
7108
7109 if (operand_equal_p (arg01, arg11, 0))
7110 same = arg01, alt0 = arg00, alt1 = arg10;
7111 else if (operand_equal_p (arg00, arg10, 0))
7112 same = arg00, alt0 = arg01, alt1 = arg11;
7113 else if (operand_equal_p (arg00, arg11, 0))
7114 same = arg00, alt0 = arg01, alt1 = arg10;
7115 else if (operand_equal_p (arg01, arg10, 0))
7116 same = arg01, alt0 = arg00, alt1 = arg11;
7117
7118 /* No identical multiplicands; see if we can find a common
7119 power-of-two factor in non-power-of-two multiplies. This
7120 can help in multi-dimensional array access. */
7121 else if (host_integerp (arg01, 0)
7122 && host_integerp (arg11, 0))
7123 {
7124 HOST_WIDE_INT int01, int11, tmp;
7125 bool swap = false;
7126 tree maybe_same;
7127 int01 = TREE_INT_CST_LOW (arg01);
7128 int11 = TREE_INT_CST_LOW (arg11);
7129
7130 /* Move min of absolute values to int11. */
7131 if (absu_hwi (int01) < absu_hwi (int11))
7132 {
7133 tmp = int01, int01 = int11, int11 = tmp;
7134 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7135 maybe_same = arg01;
7136 swap = true;
7137 }
7138 else
7139 maybe_same = arg11;
7140
7141 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7142 /* The remainder should not be a constant, otherwise we
7143 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7144 increased the number of multiplications necessary. */
7145 && TREE_CODE (arg10) != INTEGER_CST)
7146 {
7147 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7148 build_int_cst (TREE_TYPE (arg00),
7149 int01 / int11));
7150 alt1 = arg10;
7151 same = maybe_same;
7152 if (swap)
7153 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7154 }
7155 }
7156
7157 if (same)
7158 return fold_build2_loc (loc, MULT_EXPR, type,
7159 fold_build2_loc (loc, code, type,
7160 fold_convert_loc (loc, type, alt0),
7161 fold_convert_loc (loc, type, alt1)),
7162 fold_convert_loc (loc, type, same));
7163
7164 return NULL_TREE;
7165 }
7166
7167 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7168 specified by EXPR into the buffer PTR of length LEN bytes.
7169 Return the number of bytes placed in the buffer, or zero
7170 upon failure. */
7171
7172 static int
7173 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7174 {
7175 tree type = TREE_TYPE (expr);
7176 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7177 int byte, offset, word, words;
7178 unsigned char value;
7179
7180 if (total_bytes > len)
7181 return 0;
7182 words = total_bytes / UNITS_PER_WORD;
7183
7184 for (byte = 0; byte < total_bytes; byte++)
7185 {
7186 int bitpos = byte * BITS_PER_UNIT;
7187 if (bitpos < HOST_BITS_PER_WIDE_INT)
7188 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7189 else
7190 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7191 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7192
7193 if (total_bytes > UNITS_PER_WORD)
7194 {
7195 word = byte / UNITS_PER_WORD;
7196 if (WORDS_BIG_ENDIAN)
7197 word = (words - 1) - word;
7198 offset = word * UNITS_PER_WORD;
7199 if (BYTES_BIG_ENDIAN)
7200 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7201 else
7202 offset += byte % UNITS_PER_WORD;
7203 }
7204 else
7205 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7206 ptr[offset] = value;
7207 }
7208 return total_bytes;
7209 }
7210
7211
7212 /* Subroutine of native_encode_expr. Encode the REAL_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7215 upon failure. */
7216
7217 static int
7218 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7219 {
7220 tree type = TREE_TYPE (expr);
7221 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7222 int byte, offset, word, words, bitpos;
7223 unsigned char value;
7224
7225 /* There are always 32 bits in each long, no matter the size of
7226 the hosts long. We handle floating point representations with
7227 up to 192 bits. */
7228 long tmp[6];
7229
7230 if (total_bytes > len)
7231 return 0;
7232 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7233
7234 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7235
7236 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7237 bitpos += BITS_PER_UNIT)
7238 {
7239 byte = (bitpos / BITS_PER_UNIT) & 3;
7240 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7241
7242 if (UNITS_PER_WORD < 4)
7243 {
7244 word = byte / UNITS_PER_WORD;
7245 if (WORDS_BIG_ENDIAN)
7246 word = (words - 1) - word;
7247 offset = word * UNITS_PER_WORD;
7248 if (BYTES_BIG_ENDIAN)
7249 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7250 else
7251 offset += byte % UNITS_PER_WORD;
7252 }
7253 else
7254 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7255 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7256 }
7257 return total_bytes;
7258 }
7259
7260 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7263 upon failure. */
7264
7265 static int
7266 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7267 {
7268 int rsize, isize;
7269 tree part;
7270
7271 part = TREE_REALPART (expr);
7272 rsize = native_encode_expr (part, ptr, len);
7273 if (rsize == 0)
7274 return 0;
7275 part = TREE_IMAGPART (expr);
7276 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7277 if (isize != rsize)
7278 return 0;
7279 return rsize + isize;
7280 }
7281
7282
7283 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7284 specified by EXPR into the buffer PTR of length LEN bytes.
7285 Return the number of bytes placed in the buffer, or zero
7286 upon failure. */
7287
7288 static int
7289 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7290 {
7291 int i, size, offset, count;
7292 tree itype, elem, elements;
7293
7294 offset = 0;
7295 elements = TREE_VECTOR_CST_ELTS (expr);
7296 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7297 itype = TREE_TYPE (TREE_TYPE (expr));
7298 size = GET_MODE_SIZE (TYPE_MODE (itype));
7299 for (i = 0; i < count; i++)
7300 {
7301 if (elements)
7302 {
7303 elem = TREE_VALUE (elements);
7304 elements = TREE_CHAIN (elements);
7305 }
7306 else
7307 elem = NULL_TREE;
7308
7309 if (elem)
7310 {
7311 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7312 return 0;
7313 }
7314 else
7315 {
7316 if (offset + size > len)
7317 return 0;
7318 memset (ptr+offset, 0, size);
7319 }
7320 offset += size;
7321 }
7322 return offset;
7323 }
7324
7325
7326 /* Subroutine of native_encode_expr. Encode the STRING_CST
7327 specified by EXPR into the buffer PTR of length LEN bytes.
7328 Return the number of bytes placed in the buffer, or zero
7329 upon failure. */
7330
7331 static int
7332 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7333 {
7334 tree type = TREE_TYPE (expr);
7335 HOST_WIDE_INT total_bytes;
7336
7337 if (TREE_CODE (type) != ARRAY_TYPE
7338 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7339 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7340 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7341 return 0;
7342 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7343 if (total_bytes > len)
7344 return 0;
7345 if (TREE_STRING_LENGTH (expr) < total_bytes)
7346 {
7347 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7348 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7349 total_bytes - TREE_STRING_LENGTH (expr));
7350 }
7351 else
7352 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7353 return total_bytes;
7354 }
7355
7356
7357 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7358 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7359 buffer PTR of length LEN bytes. Return the number of bytes
7360 placed in the buffer, or zero upon failure. */
7361
7362 int
7363 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7364 {
7365 switch (TREE_CODE (expr))
7366 {
7367 case INTEGER_CST:
7368 return native_encode_int (expr, ptr, len);
7369
7370 case REAL_CST:
7371 return native_encode_real (expr, ptr, len);
7372
7373 case COMPLEX_CST:
7374 return native_encode_complex (expr, ptr, len);
7375
7376 case VECTOR_CST:
7377 return native_encode_vector (expr, ptr, len);
7378
7379 case STRING_CST:
7380 return native_encode_string (expr, ptr, len);
7381
7382 default:
7383 return 0;
7384 }
7385 }
7386
7387
7388 /* Subroutine of native_interpret_expr. Interpret the contents of
7389 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7390 If the buffer cannot be interpreted, return NULL_TREE. */
7391
7392 static tree
7393 native_interpret_int (tree type, const unsigned char *ptr, int len)
7394 {
7395 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7396 int byte, offset, word, words;
7397 unsigned char value;
7398 double_int result;
7399
7400 if (total_bytes > len)
7401 return NULL_TREE;
7402 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7403 return NULL_TREE;
7404
7405 result = double_int_zero;
7406 words = total_bytes / UNITS_PER_WORD;
7407
7408 for (byte = 0; byte < total_bytes; byte++)
7409 {
7410 int bitpos = byte * BITS_PER_UNIT;
7411 if (total_bytes > UNITS_PER_WORD)
7412 {
7413 word = byte / UNITS_PER_WORD;
7414 if (WORDS_BIG_ENDIAN)
7415 word = (words - 1) - word;
7416 offset = word * UNITS_PER_WORD;
7417 if (BYTES_BIG_ENDIAN)
7418 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7419 else
7420 offset += byte % UNITS_PER_WORD;
7421 }
7422 else
7423 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7424 value = ptr[offset];
7425
7426 if (bitpos < HOST_BITS_PER_WIDE_INT)
7427 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7428 else
7429 result.high |= (unsigned HOST_WIDE_INT) value
7430 << (bitpos - HOST_BITS_PER_WIDE_INT);
7431 }
7432
7433 return double_int_to_tree (type, result);
7434 }
7435
7436
7437 /* Subroutine of native_interpret_expr. Interpret the contents of
7438 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7439 If the buffer cannot be interpreted, return NULL_TREE. */
7440
7441 static tree
7442 native_interpret_real (tree type, const unsigned char *ptr, int len)
7443 {
7444 enum machine_mode mode = TYPE_MODE (type);
7445 int total_bytes = GET_MODE_SIZE (mode);
7446 int byte, offset, word, words, bitpos;
7447 unsigned char value;
7448 /* There are always 32 bits in each long, no matter the size of
7449 the hosts long. We handle floating point representations with
7450 up to 192 bits. */
7451 REAL_VALUE_TYPE r;
7452 long tmp[6];
7453
7454 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7455 if (total_bytes > len || total_bytes > 24)
7456 return NULL_TREE;
7457 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7458
7459 memset (tmp, 0, sizeof (tmp));
7460 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7461 bitpos += BITS_PER_UNIT)
7462 {
7463 byte = (bitpos / BITS_PER_UNIT) & 3;
7464 if (UNITS_PER_WORD < 4)
7465 {
7466 word = byte / UNITS_PER_WORD;
7467 if (WORDS_BIG_ENDIAN)
7468 word = (words - 1) - word;
7469 offset = word * UNITS_PER_WORD;
7470 if (BYTES_BIG_ENDIAN)
7471 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7472 else
7473 offset += byte % UNITS_PER_WORD;
7474 }
7475 else
7476 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7477 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7478
7479 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7480 }
7481
7482 real_from_target (&r, tmp, mode);
7483 return build_real (type, r);
7484 }
7485
7486
7487 /* Subroutine of native_interpret_expr. Interpret the contents of
7488 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7489 If the buffer cannot be interpreted, return NULL_TREE. */
7490
7491 static tree
7492 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7493 {
7494 tree etype, rpart, ipart;
7495 int size;
7496
7497 etype = TREE_TYPE (type);
7498 size = GET_MODE_SIZE (TYPE_MODE (etype));
7499 if (size * 2 > len)
7500 return NULL_TREE;
7501 rpart = native_interpret_expr (etype, ptr, size);
7502 if (!rpart)
7503 return NULL_TREE;
7504 ipart = native_interpret_expr (etype, ptr+size, size);
7505 if (!ipart)
7506 return NULL_TREE;
7507 return build_complex (type, rpart, ipart);
7508 }
7509
7510
7511 /* Subroutine of native_interpret_expr. Interpret the contents of
7512 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7513 If the buffer cannot be interpreted, return NULL_TREE. */
7514
7515 static tree
7516 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7517 {
7518 tree etype, elem, elements;
7519 int i, size, count;
7520
7521 etype = TREE_TYPE (type);
7522 size = GET_MODE_SIZE (TYPE_MODE (etype));
7523 count = TYPE_VECTOR_SUBPARTS (type);
7524 if (size * count > len)
7525 return NULL_TREE;
7526
7527 elements = NULL_TREE;
7528 for (i = count - 1; i >= 0; i--)
7529 {
7530 elem = native_interpret_expr (etype, ptr+(i*size), size);
7531 if (!elem)
7532 return NULL_TREE;
7533 elements = tree_cons (NULL_TREE, elem, elements);
7534 }
7535 return build_vector (type, elements);
7536 }
7537
7538
7539 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7540 the buffer PTR of length LEN as a constant of type TYPE. For
7541 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7542 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7543 return NULL_TREE. */
7544
7545 tree
7546 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7547 {
7548 switch (TREE_CODE (type))
7549 {
7550 case INTEGER_TYPE:
7551 case ENUMERAL_TYPE:
7552 case BOOLEAN_TYPE:
7553 return native_interpret_int (type, ptr, len);
7554
7555 case REAL_TYPE:
7556 return native_interpret_real (type, ptr, len);
7557
7558 case COMPLEX_TYPE:
7559 return native_interpret_complex (type, ptr, len);
7560
7561 case VECTOR_TYPE:
7562 return native_interpret_vector (type, ptr, len);
7563
7564 default:
7565 return NULL_TREE;
7566 }
7567 }
7568
7569
7570 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7571 TYPE at compile-time. If we're unable to perform the conversion
7572 return NULL_TREE. */
7573
7574 static tree
7575 fold_view_convert_expr (tree type, tree expr)
7576 {
7577 /* We support up to 512-bit values (for V8DFmode). */
7578 unsigned char buffer[64];
7579 int len;
7580
7581 /* Check that the host and target are sane. */
7582 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7583 return NULL_TREE;
7584
7585 len = native_encode_expr (expr, buffer, sizeof (buffer));
7586 if (len == 0)
7587 return NULL_TREE;
7588
7589 return native_interpret_expr (type, buffer, len);
7590 }
7591
7592 /* Build an expression for the address of T. Folds away INDIRECT_REF
7593 to avoid confusing the gimplify process. */
7594
7595 tree
7596 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7597 {
7598 /* The size of the object is not relevant when talking about its address. */
7599 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7600 t = TREE_OPERAND (t, 0);
7601
7602 if (TREE_CODE (t) == INDIRECT_REF)
7603 {
7604 t = TREE_OPERAND (t, 0);
7605
7606 if (TREE_TYPE (t) != ptrtype)
7607 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7608 }
7609 else if (TREE_CODE (t) == MEM_REF
7610 && integer_zerop (TREE_OPERAND (t, 1)))
7611 return TREE_OPERAND (t, 0);
7612 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7613 {
7614 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7615
7616 if (TREE_TYPE (t) != ptrtype)
7617 t = fold_convert_loc (loc, ptrtype, t);
7618 }
7619 else
7620 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7621
7622 return t;
7623 }
7624
7625 /* Build an expression for the address of T. */
7626
7627 tree
7628 build_fold_addr_expr_loc (location_t loc, tree t)
7629 {
7630 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7631
7632 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7633 }
7634
7635 /* Fold a unary expression of code CODE and type TYPE with operand
7636 OP0. Return the folded expression if folding is successful.
7637 Otherwise, return NULL_TREE. */
7638
7639 tree
7640 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7641 {
7642 tree tem;
7643 tree arg0;
7644 enum tree_code_class kind = TREE_CODE_CLASS (code);
7645
7646 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7647 && TREE_CODE_LENGTH (code) == 1);
7648
7649 arg0 = op0;
7650 if (arg0)
7651 {
7652 if (CONVERT_EXPR_CODE_P (code)
7653 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7654 {
7655 /* Don't use STRIP_NOPS, because signedness of argument type
7656 matters. */
7657 STRIP_SIGN_NOPS (arg0);
7658 }
7659 else
7660 {
7661 /* Strip any conversions that don't change the mode. This
7662 is safe for every expression, except for a comparison
7663 expression because its signedness is derived from its
7664 operands.
7665
7666 Note that this is done as an internal manipulation within
7667 the constant folder, in order to find the simplest
7668 representation of the arguments so that their form can be
7669 studied. In any cases, the appropriate type conversions
7670 should be put back in the tree that will get out of the
7671 constant folder. */
7672 STRIP_NOPS (arg0);
7673 }
7674 }
7675
7676 if (TREE_CODE_CLASS (code) == tcc_unary)
7677 {
7678 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7679 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7680 fold_build1_loc (loc, code, type,
7681 fold_convert_loc (loc, TREE_TYPE (op0),
7682 TREE_OPERAND (arg0, 1))));
7683 else if (TREE_CODE (arg0) == COND_EXPR)
7684 {
7685 tree arg01 = TREE_OPERAND (arg0, 1);
7686 tree arg02 = TREE_OPERAND (arg0, 2);
7687 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7688 arg01 = fold_build1_loc (loc, code, type,
7689 fold_convert_loc (loc,
7690 TREE_TYPE (op0), arg01));
7691 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7692 arg02 = fold_build1_loc (loc, code, type,
7693 fold_convert_loc (loc,
7694 TREE_TYPE (op0), arg02));
7695 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7696 arg01, arg02);
7697
7698 /* If this was a conversion, and all we did was to move into
7699 inside the COND_EXPR, bring it back out. But leave it if
7700 it is a conversion from integer to integer and the
7701 result precision is no wider than a word since such a
7702 conversion is cheap and may be optimized away by combine,
7703 while it couldn't if it were outside the COND_EXPR. Then return
7704 so we don't get into an infinite recursion loop taking the
7705 conversion out and then back in. */
7706
7707 if ((CONVERT_EXPR_CODE_P (code)
7708 || code == NON_LVALUE_EXPR)
7709 && TREE_CODE (tem) == COND_EXPR
7710 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7711 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7712 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7713 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7714 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7715 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7716 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7717 && (INTEGRAL_TYPE_P
7718 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7719 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7720 || flag_syntax_only))
7721 tem = build1_loc (loc, code, type,
7722 build3 (COND_EXPR,
7723 TREE_TYPE (TREE_OPERAND
7724 (TREE_OPERAND (tem, 1), 0)),
7725 TREE_OPERAND (tem, 0),
7726 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7727 TREE_OPERAND (TREE_OPERAND (tem, 2),
7728 0)));
7729 return tem;
7730 }
7731 }
7732
7733 switch (code)
7734 {
7735 case PAREN_EXPR:
7736 /* Re-association barriers around constants and other re-association
7737 barriers can be removed. */
7738 if (CONSTANT_CLASS_P (op0)
7739 || TREE_CODE (op0) == PAREN_EXPR)
7740 return fold_convert_loc (loc, type, op0);
7741 return NULL_TREE;
7742
7743 CASE_CONVERT:
7744 case FLOAT_EXPR:
7745 case FIX_TRUNC_EXPR:
7746 if (TREE_TYPE (op0) == type)
7747 return op0;
7748
7749 if (COMPARISON_CLASS_P (op0))
7750 {
7751 /* If we have (type) (a CMP b) and type is an integral type, return
7752 new expression involving the new type. Canonicalize
7753 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7754 non-integral type.
7755 Do not fold the result as that would not simplify further, also
7756 folding again results in recursions. */
7757 if (TREE_CODE (type) == BOOLEAN_TYPE)
7758 return build2_loc (loc, TREE_CODE (op0), type,
7759 TREE_OPERAND (op0, 0),
7760 TREE_OPERAND (op0, 1));
7761 else if (!INTEGRAL_TYPE_P (type))
7762 return build3_loc (loc, COND_EXPR, type, op0,
7763 constant_boolean_node (true, type),
7764 constant_boolean_node (false, type));
7765 }
7766
7767 /* Handle cases of two conversions in a row. */
7768 if (CONVERT_EXPR_P (op0))
7769 {
7770 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7771 tree inter_type = TREE_TYPE (op0);
7772 int inside_int = INTEGRAL_TYPE_P (inside_type);
7773 int inside_ptr = POINTER_TYPE_P (inside_type);
7774 int inside_float = FLOAT_TYPE_P (inside_type);
7775 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7776 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7777 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7778 int inter_int = INTEGRAL_TYPE_P (inter_type);
7779 int inter_ptr = POINTER_TYPE_P (inter_type);
7780 int inter_float = FLOAT_TYPE_P (inter_type);
7781 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7782 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7783 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7784 int final_int = INTEGRAL_TYPE_P (type);
7785 int final_ptr = POINTER_TYPE_P (type);
7786 int final_float = FLOAT_TYPE_P (type);
7787 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7788 unsigned int final_prec = TYPE_PRECISION (type);
7789 int final_unsignedp = TYPE_UNSIGNED (type);
7790
7791 /* In addition to the cases of two conversions in a row
7792 handled below, if we are converting something to its own
7793 type via an object of identical or wider precision, neither
7794 conversion is needed. */
7795 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7796 && (((inter_int || inter_ptr) && final_int)
7797 || (inter_float && final_float))
7798 && inter_prec >= final_prec)
7799 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7800
7801 /* Likewise, if the intermediate and initial types are either both
7802 float or both integer, we don't need the middle conversion if the
7803 former is wider than the latter and doesn't change the signedness
7804 (for integers). Avoid this if the final type is a pointer since
7805 then we sometimes need the middle conversion. Likewise if the
7806 final type has a precision not equal to the size of its mode. */
7807 if (((inter_int && inside_int)
7808 || (inter_float && inside_float)
7809 || (inter_vec && inside_vec))
7810 && inter_prec >= inside_prec
7811 && (inter_float || inter_vec
7812 || inter_unsignedp == inside_unsignedp)
7813 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7814 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7815 && ! final_ptr
7816 && (! final_vec || inter_prec == inside_prec))
7817 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7818
7819 /* If we have a sign-extension of a zero-extended value, we can
7820 replace that by a single zero-extension. */
7821 if (inside_int && inter_int && final_int
7822 && inside_prec < inter_prec && inter_prec < final_prec
7823 && inside_unsignedp && !inter_unsignedp)
7824 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7825
7826 /* Two conversions in a row are not needed unless:
7827 - some conversion is floating-point (overstrict for now), or
7828 - some conversion is a vector (overstrict for now), or
7829 - the intermediate type is narrower than both initial and
7830 final, or
7831 - the intermediate type and innermost type differ in signedness,
7832 and the outermost type is wider than the intermediate, or
7833 - the initial type is a pointer type and the precisions of the
7834 intermediate and final types differ, or
7835 - the final type is a pointer type and the precisions of the
7836 initial and intermediate types differ. */
7837 if (! inside_float && ! inter_float && ! final_float
7838 && ! inside_vec && ! inter_vec && ! final_vec
7839 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7840 && ! (inside_int && inter_int
7841 && inter_unsignedp != inside_unsignedp
7842 && inter_prec < final_prec)
7843 && ((inter_unsignedp && inter_prec > inside_prec)
7844 == (final_unsignedp && final_prec > inter_prec))
7845 && ! (inside_ptr && inter_prec != final_prec)
7846 && ! (final_ptr && inside_prec != inter_prec)
7847 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7848 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7849 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7850 }
7851
7852 /* Handle (T *)&A.B.C for A being of type T and B and C
7853 living at offset zero. This occurs frequently in
7854 C++ upcasting and then accessing the base. */
7855 if (TREE_CODE (op0) == ADDR_EXPR
7856 && POINTER_TYPE_P (type)
7857 && handled_component_p (TREE_OPERAND (op0, 0)))
7858 {
7859 HOST_WIDE_INT bitsize, bitpos;
7860 tree offset;
7861 enum machine_mode mode;
7862 int unsignedp, volatilep;
7863 tree base = TREE_OPERAND (op0, 0);
7864 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7865 &mode, &unsignedp, &volatilep, false);
7866 /* If the reference was to a (constant) zero offset, we can use
7867 the address of the base if it has the same base type
7868 as the result type and the pointer type is unqualified. */
7869 if (! offset && bitpos == 0
7870 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7871 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7872 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7873 return fold_convert_loc (loc, type,
7874 build_fold_addr_expr_loc (loc, base));
7875 }
7876
7877 if (TREE_CODE (op0) == MODIFY_EXPR
7878 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7879 /* Detect assigning a bitfield. */
7880 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7881 && DECL_BIT_FIELD
7882 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7883 {
7884 /* Don't leave an assignment inside a conversion
7885 unless assigning a bitfield. */
7886 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7887 /* First do the assignment, then return converted constant. */
7888 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7889 TREE_NO_WARNING (tem) = 1;
7890 TREE_USED (tem) = 1;
7891 return tem;
7892 }
7893
7894 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7895 constants (if x has signed type, the sign bit cannot be set
7896 in c). This folds extension into the BIT_AND_EXPR.
7897 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7898 very likely don't have maximal range for their precision and this
7899 transformation effectively doesn't preserve non-maximal ranges. */
7900 if (TREE_CODE (type) == INTEGER_TYPE
7901 && TREE_CODE (op0) == BIT_AND_EXPR
7902 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7903 {
7904 tree and_expr = op0;
7905 tree and0 = TREE_OPERAND (and_expr, 0);
7906 tree and1 = TREE_OPERAND (and_expr, 1);
7907 int change = 0;
7908
7909 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7910 || (TYPE_PRECISION (type)
7911 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7912 change = 1;
7913 else if (TYPE_PRECISION (TREE_TYPE (and1))
7914 <= HOST_BITS_PER_WIDE_INT
7915 && host_integerp (and1, 1))
7916 {
7917 unsigned HOST_WIDE_INT cst;
7918
7919 cst = tree_low_cst (and1, 1);
7920 cst &= (HOST_WIDE_INT) -1
7921 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7922 change = (cst == 0);
7923 #ifdef LOAD_EXTEND_OP
7924 if (change
7925 && !flag_syntax_only
7926 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7927 == ZERO_EXTEND))
7928 {
7929 tree uns = unsigned_type_for (TREE_TYPE (and0));
7930 and0 = fold_convert_loc (loc, uns, and0);
7931 and1 = fold_convert_loc (loc, uns, and1);
7932 }
7933 #endif
7934 }
7935 if (change)
7936 {
7937 tem = force_fit_type_double (type, tree_to_double_int (and1),
7938 0, TREE_OVERFLOW (and1));
7939 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7940 fold_convert_loc (loc, type, and0), tem);
7941 }
7942 }
7943
7944 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7945 when one of the new casts will fold away. Conservatively we assume
7946 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7947 if (POINTER_TYPE_P (type)
7948 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7949 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7950 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7951 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7952 {
7953 tree arg00 = TREE_OPERAND (arg0, 0);
7954 tree arg01 = TREE_OPERAND (arg0, 1);
7955
7956 return fold_build_pointer_plus_loc
7957 (loc, fold_convert_loc (loc, type, arg00), arg01);
7958 }
7959
7960 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7961 of the same precision, and X is an integer type not narrower than
7962 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7963 if (INTEGRAL_TYPE_P (type)
7964 && TREE_CODE (op0) == BIT_NOT_EXPR
7965 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7966 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7967 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7968 {
7969 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7970 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7971 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7972 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7973 fold_convert_loc (loc, type, tem));
7974 }
7975
7976 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7977 type of X and Y (integer types only). */
7978 if (INTEGRAL_TYPE_P (type)
7979 && TREE_CODE (op0) == MULT_EXPR
7980 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7981 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7982 {
7983 /* Be careful not to introduce new overflows. */
7984 tree mult_type;
7985 if (TYPE_OVERFLOW_WRAPS (type))
7986 mult_type = type;
7987 else
7988 mult_type = unsigned_type_for (type);
7989
7990 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7991 {
7992 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7993 fold_convert_loc (loc, mult_type,
7994 TREE_OPERAND (op0, 0)),
7995 fold_convert_loc (loc, mult_type,
7996 TREE_OPERAND (op0, 1)));
7997 return fold_convert_loc (loc, type, tem);
7998 }
7999 }
8000
8001 tem = fold_convert_const (code, type, op0);
8002 return tem ? tem : NULL_TREE;
8003
8004 case ADDR_SPACE_CONVERT_EXPR:
8005 if (integer_zerop (arg0))
8006 return fold_convert_const (code, type, arg0);
8007 return NULL_TREE;
8008
8009 case FIXED_CONVERT_EXPR:
8010 tem = fold_convert_const (code, type, arg0);
8011 return tem ? tem : NULL_TREE;
8012
8013 case VIEW_CONVERT_EXPR:
8014 if (TREE_TYPE (op0) == type)
8015 return op0;
8016 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8017 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8018 type, TREE_OPERAND (op0, 0));
8019 if (TREE_CODE (op0) == MEM_REF)
8020 return fold_build2_loc (loc, MEM_REF, type,
8021 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8022
8023 /* For integral conversions with the same precision or pointer
8024 conversions use a NOP_EXPR instead. */
8025 if ((INTEGRAL_TYPE_P (type)
8026 || POINTER_TYPE_P (type))
8027 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8028 || POINTER_TYPE_P (TREE_TYPE (op0)))
8029 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8030 return fold_convert_loc (loc, type, op0);
8031
8032 /* Strip inner integral conversions that do not change the precision. */
8033 if (CONVERT_EXPR_P (op0)
8034 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8035 || POINTER_TYPE_P (TREE_TYPE (op0)))
8036 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8037 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8038 && (TYPE_PRECISION (TREE_TYPE (op0))
8039 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8040 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8041 type, TREE_OPERAND (op0, 0));
8042
8043 return fold_view_convert_expr (type, op0);
8044
8045 case NEGATE_EXPR:
8046 tem = fold_negate_expr (loc, arg0);
8047 if (tem)
8048 return fold_convert_loc (loc, type, tem);
8049 return NULL_TREE;
8050
8051 case ABS_EXPR:
8052 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8053 return fold_abs_const (arg0, type);
8054 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8055 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8056 /* Convert fabs((double)float) into (double)fabsf(float). */
8057 else if (TREE_CODE (arg0) == NOP_EXPR
8058 && TREE_CODE (type) == REAL_TYPE)
8059 {
8060 tree targ0 = strip_float_extensions (arg0);
8061 if (targ0 != arg0)
8062 return fold_convert_loc (loc, type,
8063 fold_build1_loc (loc, ABS_EXPR,
8064 TREE_TYPE (targ0),
8065 targ0));
8066 }
8067 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8068 else if (TREE_CODE (arg0) == ABS_EXPR)
8069 return arg0;
8070 else if (tree_expr_nonnegative_p (arg0))
8071 return arg0;
8072
8073 /* Strip sign ops from argument. */
8074 if (TREE_CODE (type) == REAL_TYPE)
8075 {
8076 tem = fold_strip_sign_ops (arg0);
8077 if (tem)
8078 return fold_build1_loc (loc, ABS_EXPR, type,
8079 fold_convert_loc (loc, type, tem));
8080 }
8081 return NULL_TREE;
8082
8083 case CONJ_EXPR:
8084 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8085 return fold_convert_loc (loc, type, arg0);
8086 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8087 {
8088 tree itype = TREE_TYPE (type);
8089 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8090 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8091 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8092 negate_expr (ipart));
8093 }
8094 if (TREE_CODE (arg0) == COMPLEX_CST)
8095 {
8096 tree itype = TREE_TYPE (type);
8097 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8098 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8099 return build_complex (type, rpart, negate_expr (ipart));
8100 }
8101 if (TREE_CODE (arg0) == CONJ_EXPR)
8102 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8103 return NULL_TREE;
8104
8105 case BIT_NOT_EXPR:
8106 if (TREE_CODE (arg0) == INTEGER_CST)
8107 return fold_not_const (arg0, type);
8108 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8109 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8110 /* Convert ~ (-A) to A - 1. */
8111 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8112 return fold_build2_loc (loc, MINUS_EXPR, type,
8113 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8114 build_int_cst (type, 1));
8115 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8116 else if (INTEGRAL_TYPE_P (type)
8117 && ((TREE_CODE (arg0) == MINUS_EXPR
8118 && integer_onep (TREE_OPERAND (arg0, 1)))
8119 || (TREE_CODE (arg0) == PLUS_EXPR
8120 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8121 return fold_build1_loc (loc, NEGATE_EXPR, type,
8122 fold_convert_loc (loc, type,
8123 TREE_OPERAND (arg0, 0)));
8124 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8125 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8126 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8127 fold_convert_loc (loc, type,
8128 TREE_OPERAND (arg0, 0)))))
8129 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8130 fold_convert_loc (loc, type,
8131 TREE_OPERAND (arg0, 1)));
8132 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8133 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8134 fold_convert_loc (loc, type,
8135 TREE_OPERAND (arg0, 1)))))
8136 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8137 fold_convert_loc (loc, type,
8138 TREE_OPERAND (arg0, 0)), tem);
8139 /* Perform BIT_NOT_EXPR on each element individually. */
8140 else if (TREE_CODE (arg0) == VECTOR_CST)
8141 {
8142 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8143 int count = TYPE_VECTOR_SUBPARTS (type), i;
8144
8145 for (i = 0; i < count; i++)
8146 {
8147 if (elements)
8148 {
8149 elem = TREE_VALUE (elements);
8150 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8151 if (elem == NULL_TREE)
8152 break;
8153 elements = TREE_CHAIN (elements);
8154 }
8155 else
8156 elem = build_int_cst (TREE_TYPE (type), -1);
8157 list = tree_cons (NULL_TREE, elem, list);
8158 }
8159 if (i == count)
8160 return build_vector (type, nreverse (list));
8161 }
8162
8163 return NULL_TREE;
8164
8165 case TRUTH_NOT_EXPR:
8166 /* The argument to invert_truthvalue must have Boolean type. */
8167 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8168 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8169
8170 /* Note that the operand of this must be an int
8171 and its values must be 0 or 1.
8172 ("true" is a fixed value perhaps depending on the language,
8173 but we don't handle values other than 1 correctly yet.) */
8174 tem = fold_truth_not_expr (loc, arg0);
8175 if (!tem)
8176 return NULL_TREE;
8177 return fold_convert_loc (loc, type, tem);
8178
8179 case REALPART_EXPR:
8180 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8181 return fold_convert_loc (loc, type, arg0);
8182 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8183 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8184 TREE_OPERAND (arg0, 1));
8185 if (TREE_CODE (arg0) == COMPLEX_CST)
8186 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8187 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8188 {
8189 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8190 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8191 fold_build1_loc (loc, REALPART_EXPR, itype,
8192 TREE_OPERAND (arg0, 0)),
8193 fold_build1_loc (loc, REALPART_EXPR, itype,
8194 TREE_OPERAND (arg0, 1)));
8195 return fold_convert_loc (loc, type, tem);
8196 }
8197 if (TREE_CODE (arg0) == CONJ_EXPR)
8198 {
8199 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8200 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8201 TREE_OPERAND (arg0, 0));
8202 return fold_convert_loc (loc, type, tem);
8203 }
8204 if (TREE_CODE (arg0) == CALL_EXPR)
8205 {
8206 tree fn = get_callee_fndecl (arg0);
8207 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8208 switch (DECL_FUNCTION_CODE (fn))
8209 {
8210 CASE_FLT_FN (BUILT_IN_CEXPI):
8211 fn = mathfn_built_in (type, BUILT_IN_COS);
8212 if (fn)
8213 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8214 break;
8215
8216 default:
8217 break;
8218 }
8219 }
8220 return NULL_TREE;
8221
8222 case IMAGPART_EXPR:
8223 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8224 return build_zero_cst (type);
8225 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8226 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8227 TREE_OPERAND (arg0, 0));
8228 if (TREE_CODE (arg0) == COMPLEX_CST)
8229 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8230 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8231 {
8232 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8233 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8234 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8235 TREE_OPERAND (arg0, 0)),
8236 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8237 TREE_OPERAND (arg0, 1)));
8238 return fold_convert_loc (loc, type, tem);
8239 }
8240 if (TREE_CODE (arg0) == CONJ_EXPR)
8241 {
8242 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8243 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8244 return fold_convert_loc (loc, type, negate_expr (tem));
8245 }
8246 if (TREE_CODE (arg0) == CALL_EXPR)
8247 {
8248 tree fn = get_callee_fndecl (arg0);
8249 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8250 switch (DECL_FUNCTION_CODE (fn))
8251 {
8252 CASE_FLT_FN (BUILT_IN_CEXPI):
8253 fn = mathfn_built_in (type, BUILT_IN_SIN);
8254 if (fn)
8255 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8256 break;
8257
8258 default:
8259 break;
8260 }
8261 }
8262 return NULL_TREE;
8263
8264 case INDIRECT_REF:
8265 /* Fold *&X to X if X is an lvalue. */
8266 if (TREE_CODE (op0) == ADDR_EXPR)
8267 {
8268 tree op00 = TREE_OPERAND (op0, 0);
8269 if ((TREE_CODE (op00) == VAR_DECL
8270 || TREE_CODE (op00) == PARM_DECL
8271 || TREE_CODE (op00) == RESULT_DECL)
8272 && !TREE_READONLY (op00))
8273 return op00;
8274 }
8275 return NULL_TREE;
8276
8277 default:
8278 return NULL_TREE;
8279 } /* switch (code) */
8280 }
8281
8282
8283 /* If the operation was a conversion do _not_ mark a resulting constant
8284 with TREE_OVERFLOW if the original constant was not. These conversions
8285 have implementation defined behavior and retaining the TREE_OVERFLOW
8286 flag here would confuse later passes such as VRP. */
8287 tree
8288 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8289 tree type, tree op0)
8290 {
8291 tree res = fold_unary_loc (loc, code, type, op0);
8292 if (res
8293 && TREE_CODE (res) == INTEGER_CST
8294 && TREE_CODE (op0) == INTEGER_CST
8295 && CONVERT_EXPR_CODE_P (code))
8296 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8297
8298 return res;
8299 }
8300
8301 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8302 operands OP0 and OP1. LOC is the location of the resulting expression.
8303 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8304 Return the folded expression if folding is successful. Otherwise,
8305 return NULL_TREE. */
8306 static tree
8307 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8308 tree arg0, tree arg1, tree op0, tree op1)
8309 {
8310 tree tem;
8311
8312 /* We only do these simplifications if we are optimizing. */
8313 if (!optimize)
8314 return NULL_TREE;
8315
8316 /* Check for things like (A || B) && (A || C). We can convert this
8317 to A || (B && C). Note that either operator can be any of the four
8318 truth and/or operations and the transformation will still be
8319 valid. Also note that we only care about order for the
8320 ANDIF and ORIF operators. If B contains side effects, this
8321 might change the truth-value of A. */
8322 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8323 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8324 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8325 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8326 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8327 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8328 {
8329 tree a00 = TREE_OPERAND (arg0, 0);
8330 tree a01 = TREE_OPERAND (arg0, 1);
8331 tree a10 = TREE_OPERAND (arg1, 0);
8332 tree a11 = TREE_OPERAND (arg1, 1);
8333 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8334 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8335 && (code == TRUTH_AND_EXPR
8336 || code == TRUTH_OR_EXPR));
8337
8338 if (operand_equal_p (a00, a10, 0))
8339 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8340 fold_build2_loc (loc, code, type, a01, a11));
8341 else if (commutative && operand_equal_p (a00, a11, 0))
8342 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8343 fold_build2_loc (loc, code, type, a01, a10));
8344 else if (commutative && operand_equal_p (a01, a10, 0))
8345 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8346 fold_build2_loc (loc, code, type, a00, a11));
8347
8348 /* This case if tricky because we must either have commutative
8349 operators or else A10 must not have side-effects. */
8350
8351 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8352 && operand_equal_p (a01, a11, 0))
8353 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8354 fold_build2_loc (loc, code, type, a00, a10),
8355 a01);
8356 }
8357
8358 /* See if we can build a range comparison. */
8359 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8360 return tem;
8361
8362 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8363 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8364 {
8365 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8366 if (tem)
8367 return fold_build2_loc (loc, code, type, tem, arg1);
8368 }
8369
8370 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8371 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8372 {
8373 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8374 if (tem)
8375 return fold_build2_loc (loc, code, type, arg0, tem);
8376 }
8377
8378 /* Check for the possibility of merging component references. If our
8379 lhs is another similar operation, try to merge its rhs with our
8380 rhs. Then try to merge our lhs and rhs. */
8381 if (TREE_CODE (arg0) == code
8382 && 0 != (tem = fold_truthop (loc, code, type,
8383 TREE_OPERAND (arg0, 1), arg1)))
8384 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8385
8386 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
8387 return tem;
8388
8389 return NULL_TREE;
8390 }
8391
8392 /* Fold a binary expression of code CODE and type TYPE with operands
8393 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8394 Return the folded expression if folding is successful. Otherwise,
8395 return NULL_TREE. */
8396
8397 static tree
8398 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8399 {
8400 enum tree_code compl_code;
8401
8402 if (code == MIN_EXPR)
8403 compl_code = MAX_EXPR;
8404 else if (code == MAX_EXPR)
8405 compl_code = MIN_EXPR;
8406 else
8407 gcc_unreachable ();
8408
8409 /* MIN (MAX (a, b), b) == b. */
8410 if (TREE_CODE (op0) == compl_code
8411 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8412 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8413
8414 /* MIN (MAX (b, a), b) == b. */
8415 if (TREE_CODE (op0) == compl_code
8416 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8417 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8418 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8419
8420 /* MIN (a, MAX (a, b)) == a. */
8421 if (TREE_CODE (op1) == compl_code
8422 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8423 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8424 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8425
8426 /* MIN (a, MAX (b, a)) == a. */
8427 if (TREE_CODE (op1) == compl_code
8428 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8429 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8430 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8431
8432 return NULL_TREE;
8433 }
8434
8435 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8436 by changing CODE to reduce the magnitude of constants involved in
8437 ARG0 of the comparison.
8438 Returns a canonicalized comparison tree if a simplification was
8439 possible, otherwise returns NULL_TREE.
8440 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8441 valid if signed overflow is undefined. */
8442
8443 static tree
8444 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8445 tree arg0, tree arg1,
8446 bool *strict_overflow_p)
8447 {
8448 enum tree_code code0 = TREE_CODE (arg0);
8449 tree t, cst0 = NULL_TREE;
8450 int sgn0;
8451 bool swap = false;
8452
8453 /* Match A +- CST code arg1 and CST code arg1. We can change the
8454 first form only if overflow is undefined. */
8455 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8456 /* In principle pointers also have undefined overflow behavior,
8457 but that causes problems elsewhere. */
8458 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8459 && (code0 == MINUS_EXPR
8460 || code0 == PLUS_EXPR)
8461 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8462 || code0 == INTEGER_CST))
8463 return NULL_TREE;
8464
8465 /* Identify the constant in arg0 and its sign. */
8466 if (code0 == INTEGER_CST)
8467 cst0 = arg0;
8468 else
8469 cst0 = TREE_OPERAND (arg0, 1);
8470 sgn0 = tree_int_cst_sgn (cst0);
8471
8472 /* Overflowed constants and zero will cause problems. */
8473 if (integer_zerop (cst0)
8474 || TREE_OVERFLOW (cst0))
8475 return NULL_TREE;
8476
8477 /* See if we can reduce the magnitude of the constant in
8478 arg0 by changing the comparison code. */
8479 if (code0 == INTEGER_CST)
8480 {
8481 /* CST <= arg1 -> CST-1 < arg1. */
8482 if (code == LE_EXPR && sgn0 == 1)
8483 code = LT_EXPR;
8484 /* -CST < arg1 -> -CST-1 <= arg1. */
8485 else if (code == LT_EXPR && sgn0 == -1)
8486 code = LE_EXPR;
8487 /* CST > arg1 -> CST-1 >= arg1. */
8488 else if (code == GT_EXPR && sgn0 == 1)
8489 code = GE_EXPR;
8490 /* -CST >= arg1 -> -CST-1 > arg1. */
8491 else if (code == GE_EXPR && sgn0 == -1)
8492 code = GT_EXPR;
8493 else
8494 return NULL_TREE;
8495 /* arg1 code' CST' might be more canonical. */
8496 swap = true;
8497 }
8498 else
8499 {
8500 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8501 if (code == LT_EXPR
8502 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8503 code = LE_EXPR;
8504 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8505 else if (code == GT_EXPR
8506 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8507 code = GE_EXPR;
8508 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8509 else if (code == LE_EXPR
8510 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8511 code = LT_EXPR;
8512 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8513 else if (code == GE_EXPR
8514 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8515 code = GT_EXPR;
8516 else
8517 return NULL_TREE;
8518 *strict_overflow_p = true;
8519 }
8520
8521 /* Now build the constant reduced in magnitude. But not if that
8522 would produce one outside of its types range. */
8523 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8524 && ((sgn0 == 1
8525 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8526 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8527 || (sgn0 == -1
8528 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8529 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8530 /* We cannot swap the comparison here as that would cause us to
8531 endlessly recurse. */
8532 return NULL_TREE;
8533
8534 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8535 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8536 if (code0 != INTEGER_CST)
8537 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8538 t = fold_convert (TREE_TYPE (arg1), t);
8539
8540 /* If swapping might yield to a more canonical form, do so. */
8541 if (swap)
8542 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8543 else
8544 return fold_build2_loc (loc, code, type, t, arg1);
8545 }
8546
8547 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8548 overflow further. Try to decrease the magnitude of constants involved
8549 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8550 and put sole constants at the second argument position.
8551 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8552
8553 static tree
8554 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8555 tree arg0, tree arg1)
8556 {
8557 tree t;
8558 bool strict_overflow_p;
8559 const char * const warnmsg = G_("assuming signed overflow does not occur "
8560 "when reducing constant in comparison");
8561
8562 /* Try canonicalization by simplifying arg0. */
8563 strict_overflow_p = false;
8564 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8565 &strict_overflow_p);
8566 if (t)
8567 {
8568 if (strict_overflow_p)
8569 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8570 return t;
8571 }
8572
8573 /* Try canonicalization by simplifying arg1 using the swapped
8574 comparison. */
8575 code = swap_tree_comparison (code);
8576 strict_overflow_p = false;
8577 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8578 &strict_overflow_p);
8579 if (t && strict_overflow_p)
8580 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8581 return t;
8582 }
8583
8584 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8585 space. This is used to avoid issuing overflow warnings for
8586 expressions like &p->x which can not wrap. */
8587
8588 static bool
8589 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8590 {
8591 unsigned HOST_WIDE_INT offset_low, total_low;
8592 HOST_WIDE_INT size, offset_high, total_high;
8593
8594 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8595 return true;
8596
8597 if (bitpos < 0)
8598 return true;
8599
8600 if (offset == NULL_TREE)
8601 {
8602 offset_low = 0;
8603 offset_high = 0;
8604 }
8605 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8606 return true;
8607 else
8608 {
8609 offset_low = TREE_INT_CST_LOW (offset);
8610 offset_high = TREE_INT_CST_HIGH (offset);
8611 }
8612
8613 if (add_double_with_sign (offset_low, offset_high,
8614 bitpos / BITS_PER_UNIT, 0,
8615 &total_low, &total_high,
8616 true))
8617 return true;
8618
8619 if (total_high != 0)
8620 return true;
8621
8622 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8623 if (size <= 0)
8624 return true;
8625
8626 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8627 array. */
8628 if (TREE_CODE (base) == ADDR_EXPR)
8629 {
8630 HOST_WIDE_INT base_size;
8631
8632 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8633 if (base_size > 0 && size < base_size)
8634 size = base_size;
8635 }
8636
8637 return total_low > (unsigned HOST_WIDE_INT) size;
8638 }
8639
8640 /* Subroutine of fold_binary. This routine performs all of the
8641 transformations that are common to the equality/inequality
8642 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8643 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8644 fold_binary should call fold_binary. Fold a comparison with
8645 tree code CODE and type TYPE with operands OP0 and OP1. Return
8646 the folded comparison or NULL_TREE. */
8647
8648 static tree
8649 fold_comparison (location_t loc, enum tree_code code, tree type,
8650 tree op0, tree op1)
8651 {
8652 tree arg0, arg1, tem;
8653
8654 arg0 = op0;
8655 arg1 = op1;
8656
8657 STRIP_SIGN_NOPS (arg0);
8658 STRIP_SIGN_NOPS (arg1);
8659
8660 tem = fold_relational_const (code, type, arg0, arg1);
8661 if (tem != NULL_TREE)
8662 return tem;
8663
8664 /* If one arg is a real or integer constant, put it last. */
8665 if (tree_swap_operands_p (arg0, arg1, true))
8666 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8667
8668 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8669 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8670 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8671 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8672 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8673 && (TREE_CODE (arg1) == INTEGER_CST
8674 && !TREE_OVERFLOW (arg1)))
8675 {
8676 tree const1 = TREE_OPERAND (arg0, 1);
8677 tree const2 = arg1;
8678 tree variable = TREE_OPERAND (arg0, 0);
8679 tree lhs;
8680 int lhs_add;
8681 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8682
8683 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8684 TREE_TYPE (arg1), const2, const1);
8685
8686 /* If the constant operation overflowed this can be
8687 simplified as a comparison against INT_MAX/INT_MIN. */
8688 if (TREE_CODE (lhs) == INTEGER_CST
8689 && TREE_OVERFLOW (lhs))
8690 {
8691 int const1_sgn = tree_int_cst_sgn (const1);
8692 enum tree_code code2 = code;
8693
8694 /* Get the sign of the constant on the lhs if the
8695 operation were VARIABLE + CONST1. */
8696 if (TREE_CODE (arg0) == MINUS_EXPR)
8697 const1_sgn = -const1_sgn;
8698
8699 /* The sign of the constant determines if we overflowed
8700 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8701 Canonicalize to the INT_MIN overflow by swapping the comparison
8702 if necessary. */
8703 if (const1_sgn == -1)
8704 code2 = swap_tree_comparison (code);
8705
8706 /* We now can look at the canonicalized case
8707 VARIABLE + 1 CODE2 INT_MIN
8708 and decide on the result. */
8709 if (code2 == LT_EXPR
8710 || code2 == LE_EXPR
8711 || code2 == EQ_EXPR)
8712 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8713 else if (code2 == NE_EXPR
8714 || code2 == GE_EXPR
8715 || code2 == GT_EXPR)
8716 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8717 }
8718
8719 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8720 && (TREE_CODE (lhs) != INTEGER_CST
8721 || !TREE_OVERFLOW (lhs)))
8722 {
8723 if (code != EQ_EXPR && code != NE_EXPR)
8724 fold_overflow_warning ("assuming signed overflow does not occur "
8725 "when changing X +- C1 cmp C2 to "
8726 "X cmp C1 +- C2",
8727 WARN_STRICT_OVERFLOW_COMPARISON);
8728 return fold_build2_loc (loc, code, type, variable, lhs);
8729 }
8730 }
8731
8732 /* For comparisons of pointers we can decompose it to a compile time
8733 comparison of the base objects and the offsets into the object.
8734 This requires at least one operand being an ADDR_EXPR or a
8735 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8736 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8737 && (TREE_CODE (arg0) == ADDR_EXPR
8738 || TREE_CODE (arg1) == ADDR_EXPR
8739 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8740 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8741 {
8742 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8743 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8744 enum machine_mode mode;
8745 int volatilep, unsignedp;
8746 bool indirect_base0 = false, indirect_base1 = false;
8747
8748 /* Get base and offset for the access. Strip ADDR_EXPR for
8749 get_inner_reference, but put it back by stripping INDIRECT_REF
8750 off the base object if possible. indirect_baseN will be true
8751 if baseN is not an address but refers to the object itself. */
8752 base0 = arg0;
8753 if (TREE_CODE (arg0) == ADDR_EXPR)
8754 {
8755 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8756 &bitsize, &bitpos0, &offset0, &mode,
8757 &unsignedp, &volatilep, false);
8758 if (TREE_CODE (base0) == INDIRECT_REF)
8759 base0 = TREE_OPERAND (base0, 0);
8760 else
8761 indirect_base0 = true;
8762 }
8763 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8764 {
8765 base0 = TREE_OPERAND (arg0, 0);
8766 STRIP_SIGN_NOPS (base0);
8767 if (TREE_CODE (base0) == ADDR_EXPR)
8768 {
8769 base0 = TREE_OPERAND (base0, 0);
8770 indirect_base0 = true;
8771 }
8772 offset0 = TREE_OPERAND (arg0, 1);
8773 }
8774
8775 base1 = arg1;
8776 if (TREE_CODE (arg1) == ADDR_EXPR)
8777 {
8778 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8779 &bitsize, &bitpos1, &offset1, &mode,
8780 &unsignedp, &volatilep, false);
8781 if (TREE_CODE (base1) == INDIRECT_REF)
8782 base1 = TREE_OPERAND (base1, 0);
8783 else
8784 indirect_base1 = true;
8785 }
8786 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8787 {
8788 base1 = TREE_OPERAND (arg1, 0);
8789 STRIP_SIGN_NOPS (base1);
8790 if (TREE_CODE (base1) == ADDR_EXPR)
8791 {
8792 base1 = TREE_OPERAND (base1, 0);
8793 indirect_base1 = true;
8794 }
8795 offset1 = TREE_OPERAND (arg1, 1);
8796 }
8797
8798 /* A local variable can never be pointed to by
8799 the default SSA name of an incoming parameter. */
8800 if ((TREE_CODE (arg0) == ADDR_EXPR
8801 && indirect_base0
8802 && TREE_CODE (base0) == VAR_DECL
8803 && auto_var_in_fn_p (base0, current_function_decl)
8804 && !indirect_base1
8805 && TREE_CODE (base1) == SSA_NAME
8806 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8807 && SSA_NAME_IS_DEFAULT_DEF (base1))
8808 || (TREE_CODE (arg1) == ADDR_EXPR
8809 && indirect_base1
8810 && TREE_CODE (base1) == VAR_DECL
8811 && auto_var_in_fn_p (base1, current_function_decl)
8812 && !indirect_base0
8813 && TREE_CODE (base0) == SSA_NAME
8814 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8815 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8816 {
8817 if (code == NE_EXPR)
8818 return constant_boolean_node (1, type);
8819 else if (code == EQ_EXPR)
8820 return constant_boolean_node (0, type);
8821 }
8822 /* If we have equivalent bases we might be able to simplify. */
8823 else if (indirect_base0 == indirect_base1
8824 && operand_equal_p (base0, base1, 0))
8825 {
8826 /* We can fold this expression to a constant if the non-constant
8827 offset parts are equal. */
8828 if ((offset0 == offset1
8829 || (offset0 && offset1
8830 && operand_equal_p (offset0, offset1, 0)))
8831 && (code == EQ_EXPR
8832 || code == NE_EXPR
8833 || (indirect_base0 && DECL_P (base0))
8834 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8835
8836 {
8837 if (code != EQ_EXPR
8838 && code != NE_EXPR
8839 && bitpos0 != bitpos1
8840 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8841 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8842 fold_overflow_warning (("assuming pointer wraparound does not "
8843 "occur when comparing P +- C1 with "
8844 "P +- C2"),
8845 WARN_STRICT_OVERFLOW_CONDITIONAL);
8846
8847 switch (code)
8848 {
8849 case EQ_EXPR:
8850 return constant_boolean_node (bitpos0 == bitpos1, type);
8851 case NE_EXPR:
8852 return constant_boolean_node (bitpos0 != bitpos1, type);
8853 case LT_EXPR:
8854 return constant_boolean_node (bitpos0 < bitpos1, type);
8855 case LE_EXPR:
8856 return constant_boolean_node (bitpos0 <= bitpos1, type);
8857 case GE_EXPR:
8858 return constant_boolean_node (bitpos0 >= bitpos1, type);
8859 case GT_EXPR:
8860 return constant_boolean_node (bitpos0 > bitpos1, type);
8861 default:;
8862 }
8863 }
8864 /* We can simplify the comparison to a comparison of the variable
8865 offset parts if the constant offset parts are equal.
8866 Be careful to use signed size type here because otherwise we
8867 mess with array offsets in the wrong way. This is possible
8868 because pointer arithmetic is restricted to retain within an
8869 object and overflow on pointer differences is undefined as of
8870 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8871 else if (bitpos0 == bitpos1
8872 && ((code == EQ_EXPR || code == NE_EXPR)
8873 || (indirect_base0 && DECL_P (base0))
8874 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8875 {
8876 /* By converting to signed size type we cover middle-end pointer
8877 arithmetic which operates on unsigned pointer types of size
8878 type size and ARRAY_REF offsets which are properly sign or
8879 zero extended from their type in case it is narrower than
8880 size type. */
8881 if (offset0 == NULL_TREE)
8882 offset0 = build_int_cst (ssizetype, 0);
8883 else
8884 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8885 if (offset1 == NULL_TREE)
8886 offset1 = build_int_cst (ssizetype, 0);
8887 else
8888 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8889
8890 if (code != EQ_EXPR
8891 && code != NE_EXPR
8892 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8893 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8894 fold_overflow_warning (("assuming pointer wraparound does not "
8895 "occur when comparing P +- C1 with "
8896 "P +- C2"),
8897 WARN_STRICT_OVERFLOW_COMPARISON);
8898
8899 return fold_build2_loc (loc, code, type, offset0, offset1);
8900 }
8901 }
8902 /* For non-equal bases we can simplify if they are addresses
8903 of local binding decls or constants. */
8904 else if (indirect_base0 && indirect_base1
8905 /* We know that !operand_equal_p (base0, base1, 0)
8906 because the if condition was false. But make
8907 sure two decls are not the same. */
8908 && base0 != base1
8909 && TREE_CODE (arg0) == ADDR_EXPR
8910 && TREE_CODE (arg1) == ADDR_EXPR
8911 && (((TREE_CODE (base0) == VAR_DECL
8912 || TREE_CODE (base0) == PARM_DECL)
8913 && (targetm.binds_local_p (base0)
8914 || CONSTANT_CLASS_P (base1)))
8915 || CONSTANT_CLASS_P (base0))
8916 && (((TREE_CODE (base1) == VAR_DECL
8917 || TREE_CODE (base1) == PARM_DECL)
8918 && (targetm.binds_local_p (base1)
8919 || CONSTANT_CLASS_P (base0)))
8920 || CONSTANT_CLASS_P (base1)))
8921 {
8922 if (code == EQ_EXPR)
8923 return omit_two_operands_loc (loc, type, boolean_false_node,
8924 arg0, arg1);
8925 else if (code == NE_EXPR)
8926 return omit_two_operands_loc (loc, type, boolean_true_node,
8927 arg0, arg1);
8928 }
8929 /* For equal offsets we can simplify to a comparison of the
8930 base addresses. */
8931 else if (bitpos0 == bitpos1
8932 && (indirect_base0
8933 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8934 && (indirect_base1
8935 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8936 && ((offset0 == offset1)
8937 || (offset0 && offset1
8938 && operand_equal_p (offset0, offset1, 0))))
8939 {
8940 if (indirect_base0)
8941 base0 = build_fold_addr_expr_loc (loc, base0);
8942 if (indirect_base1)
8943 base1 = build_fold_addr_expr_loc (loc, base1);
8944 return fold_build2_loc (loc, code, type, base0, base1);
8945 }
8946 }
8947
8948 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8949 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8950 the resulting offset is smaller in absolute value than the
8951 original one. */
8952 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8953 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8954 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8955 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8956 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8957 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8958 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8959 {
8960 tree const1 = TREE_OPERAND (arg0, 1);
8961 tree const2 = TREE_OPERAND (arg1, 1);
8962 tree variable1 = TREE_OPERAND (arg0, 0);
8963 tree variable2 = TREE_OPERAND (arg1, 0);
8964 tree cst;
8965 const char * const warnmsg = G_("assuming signed overflow does not "
8966 "occur when combining constants around "
8967 "a comparison");
8968
8969 /* Put the constant on the side where it doesn't overflow and is
8970 of lower absolute value than before. */
8971 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8972 ? MINUS_EXPR : PLUS_EXPR,
8973 const2, const1);
8974 if (!TREE_OVERFLOW (cst)
8975 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8976 {
8977 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8978 return fold_build2_loc (loc, code, type,
8979 variable1,
8980 fold_build2_loc (loc,
8981 TREE_CODE (arg1), TREE_TYPE (arg1),
8982 variable2, cst));
8983 }
8984
8985 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8986 ? MINUS_EXPR : PLUS_EXPR,
8987 const1, const2);
8988 if (!TREE_OVERFLOW (cst)
8989 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8990 {
8991 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8992 return fold_build2_loc (loc, code, type,
8993 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8994 variable1, cst),
8995 variable2);
8996 }
8997 }
8998
8999 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9000 signed arithmetic case. That form is created by the compiler
9001 often enough for folding it to be of value. One example is in
9002 computing loop trip counts after Operator Strength Reduction. */
9003 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9004 && TREE_CODE (arg0) == MULT_EXPR
9005 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9006 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9007 && integer_zerop (arg1))
9008 {
9009 tree const1 = TREE_OPERAND (arg0, 1);
9010 tree const2 = arg1; /* zero */
9011 tree variable1 = TREE_OPERAND (arg0, 0);
9012 enum tree_code cmp_code = code;
9013
9014 /* Handle unfolded multiplication by zero. */
9015 if (integer_zerop (const1))
9016 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9017
9018 fold_overflow_warning (("assuming signed overflow does not occur when "
9019 "eliminating multiplication in comparison "
9020 "with zero"),
9021 WARN_STRICT_OVERFLOW_COMPARISON);
9022
9023 /* If const1 is negative we swap the sense of the comparison. */
9024 if (tree_int_cst_sgn (const1) < 0)
9025 cmp_code = swap_tree_comparison (cmp_code);
9026
9027 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9028 }
9029
9030 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9031 if (tem)
9032 return tem;
9033
9034 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9035 {
9036 tree targ0 = strip_float_extensions (arg0);
9037 tree targ1 = strip_float_extensions (arg1);
9038 tree newtype = TREE_TYPE (targ0);
9039
9040 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9041 newtype = TREE_TYPE (targ1);
9042
9043 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9044 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9045 return fold_build2_loc (loc, code, type,
9046 fold_convert_loc (loc, newtype, targ0),
9047 fold_convert_loc (loc, newtype, targ1));
9048
9049 /* (-a) CMP (-b) -> b CMP a */
9050 if (TREE_CODE (arg0) == NEGATE_EXPR
9051 && TREE_CODE (arg1) == NEGATE_EXPR)
9052 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9053 TREE_OPERAND (arg0, 0));
9054
9055 if (TREE_CODE (arg1) == REAL_CST)
9056 {
9057 REAL_VALUE_TYPE cst;
9058 cst = TREE_REAL_CST (arg1);
9059
9060 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9061 if (TREE_CODE (arg0) == NEGATE_EXPR)
9062 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9063 TREE_OPERAND (arg0, 0),
9064 build_real (TREE_TYPE (arg1),
9065 real_value_negate (&cst)));
9066
9067 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9068 /* a CMP (-0) -> a CMP 0 */
9069 if (REAL_VALUE_MINUS_ZERO (cst))
9070 return fold_build2_loc (loc, code, type, arg0,
9071 build_real (TREE_TYPE (arg1), dconst0));
9072
9073 /* x != NaN is always true, other ops are always false. */
9074 if (REAL_VALUE_ISNAN (cst)
9075 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9076 {
9077 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9078 return omit_one_operand_loc (loc, type, tem, arg0);
9079 }
9080
9081 /* Fold comparisons against infinity. */
9082 if (REAL_VALUE_ISINF (cst)
9083 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9084 {
9085 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9086 if (tem != NULL_TREE)
9087 return tem;
9088 }
9089 }
9090
9091 /* If this is a comparison of a real constant with a PLUS_EXPR
9092 or a MINUS_EXPR of a real constant, we can convert it into a
9093 comparison with a revised real constant as long as no overflow
9094 occurs when unsafe_math_optimizations are enabled. */
9095 if (flag_unsafe_math_optimizations
9096 && TREE_CODE (arg1) == REAL_CST
9097 && (TREE_CODE (arg0) == PLUS_EXPR
9098 || TREE_CODE (arg0) == MINUS_EXPR)
9099 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9100 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9101 ? MINUS_EXPR : PLUS_EXPR,
9102 arg1, TREE_OPERAND (arg0, 1)))
9103 && !TREE_OVERFLOW (tem))
9104 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9105
9106 /* Likewise, we can simplify a comparison of a real constant with
9107 a MINUS_EXPR whose first operand is also a real constant, i.e.
9108 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9109 floating-point types only if -fassociative-math is set. */
9110 if (flag_associative_math
9111 && TREE_CODE (arg1) == REAL_CST
9112 && TREE_CODE (arg0) == MINUS_EXPR
9113 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9114 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9115 arg1))
9116 && !TREE_OVERFLOW (tem))
9117 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9118 TREE_OPERAND (arg0, 1), tem);
9119
9120 /* Fold comparisons against built-in math functions. */
9121 if (TREE_CODE (arg1) == REAL_CST
9122 && flag_unsafe_math_optimizations
9123 && ! flag_errno_math)
9124 {
9125 enum built_in_function fcode = builtin_mathfn_code (arg0);
9126
9127 if (fcode != END_BUILTINS)
9128 {
9129 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9130 if (tem != NULL_TREE)
9131 return tem;
9132 }
9133 }
9134 }
9135
9136 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9137 && CONVERT_EXPR_P (arg0))
9138 {
9139 /* If we are widening one operand of an integer comparison,
9140 see if the other operand is similarly being widened. Perhaps we
9141 can do the comparison in the narrower type. */
9142 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9143 if (tem)
9144 return tem;
9145
9146 /* Or if we are changing signedness. */
9147 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9148 if (tem)
9149 return tem;
9150 }
9151
9152 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9153 constant, we can simplify it. */
9154 if (TREE_CODE (arg1) == INTEGER_CST
9155 && (TREE_CODE (arg0) == MIN_EXPR
9156 || TREE_CODE (arg0) == MAX_EXPR)
9157 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9158 {
9159 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9160 if (tem)
9161 return tem;
9162 }
9163
9164 /* Simplify comparison of something with itself. (For IEEE
9165 floating-point, we can only do some of these simplifications.) */
9166 if (operand_equal_p (arg0, arg1, 0))
9167 {
9168 switch (code)
9169 {
9170 case EQ_EXPR:
9171 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9172 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9173 return constant_boolean_node (1, type);
9174 break;
9175
9176 case GE_EXPR:
9177 case LE_EXPR:
9178 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9179 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9180 return constant_boolean_node (1, type);
9181 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9182
9183 case NE_EXPR:
9184 /* For NE, we can only do this simplification if integer
9185 or we don't honor IEEE floating point NaNs. */
9186 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9187 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9188 break;
9189 /* ... fall through ... */
9190 case GT_EXPR:
9191 case LT_EXPR:
9192 return constant_boolean_node (0, type);
9193 default:
9194 gcc_unreachable ();
9195 }
9196 }
9197
9198 /* If we are comparing an expression that just has comparisons
9199 of two integer values, arithmetic expressions of those comparisons,
9200 and constants, we can simplify it. There are only three cases
9201 to check: the two values can either be equal, the first can be
9202 greater, or the second can be greater. Fold the expression for
9203 those three values. Since each value must be 0 or 1, we have
9204 eight possibilities, each of which corresponds to the constant 0
9205 or 1 or one of the six possible comparisons.
9206
9207 This handles common cases like (a > b) == 0 but also handles
9208 expressions like ((x > y) - (y > x)) > 0, which supposedly
9209 occur in macroized code. */
9210
9211 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9212 {
9213 tree cval1 = 0, cval2 = 0;
9214 int save_p = 0;
9215
9216 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9217 /* Don't handle degenerate cases here; they should already
9218 have been handled anyway. */
9219 && cval1 != 0 && cval2 != 0
9220 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9221 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9222 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9223 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9224 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9225 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9226 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9227 {
9228 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9229 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9230
9231 /* We can't just pass T to eval_subst in case cval1 or cval2
9232 was the same as ARG1. */
9233
9234 tree high_result
9235 = fold_build2_loc (loc, code, type,
9236 eval_subst (loc, arg0, cval1, maxval,
9237 cval2, minval),
9238 arg1);
9239 tree equal_result
9240 = fold_build2_loc (loc, code, type,
9241 eval_subst (loc, arg0, cval1, maxval,
9242 cval2, maxval),
9243 arg1);
9244 tree low_result
9245 = fold_build2_loc (loc, code, type,
9246 eval_subst (loc, arg0, cval1, minval,
9247 cval2, maxval),
9248 arg1);
9249
9250 /* All three of these results should be 0 or 1. Confirm they are.
9251 Then use those values to select the proper code to use. */
9252
9253 if (TREE_CODE (high_result) == INTEGER_CST
9254 && TREE_CODE (equal_result) == INTEGER_CST
9255 && TREE_CODE (low_result) == INTEGER_CST)
9256 {
9257 /* Make a 3-bit mask with the high-order bit being the
9258 value for `>', the next for '=', and the low for '<'. */
9259 switch ((integer_onep (high_result) * 4)
9260 + (integer_onep (equal_result) * 2)
9261 + integer_onep (low_result))
9262 {
9263 case 0:
9264 /* Always false. */
9265 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9266 case 1:
9267 code = LT_EXPR;
9268 break;
9269 case 2:
9270 code = EQ_EXPR;
9271 break;
9272 case 3:
9273 code = LE_EXPR;
9274 break;
9275 case 4:
9276 code = GT_EXPR;
9277 break;
9278 case 5:
9279 code = NE_EXPR;
9280 break;
9281 case 6:
9282 code = GE_EXPR;
9283 break;
9284 case 7:
9285 /* Always true. */
9286 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9287 }
9288
9289 if (save_p)
9290 {
9291 tem = save_expr (build2 (code, type, cval1, cval2));
9292 SET_EXPR_LOCATION (tem, loc);
9293 return tem;
9294 }
9295 return fold_build2_loc (loc, code, type, cval1, cval2);
9296 }
9297 }
9298 }
9299
9300 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9301 into a single range test. */
9302 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9303 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9304 && TREE_CODE (arg1) == INTEGER_CST
9305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9306 && !integer_zerop (TREE_OPERAND (arg0, 1))
9307 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9308 && !TREE_OVERFLOW (arg1))
9309 {
9310 tem = fold_div_compare (loc, code, type, arg0, arg1);
9311 if (tem != NULL_TREE)
9312 return tem;
9313 }
9314
9315 /* Fold ~X op ~Y as Y op X. */
9316 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9317 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9318 {
9319 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9320 return fold_build2_loc (loc, code, type,
9321 fold_convert_loc (loc, cmp_type,
9322 TREE_OPERAND (arg1, 0)),
9323 TREE_OPERAND (arg0, 0));
9324 }
9325
9326 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9327 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9328 && TREE_CODE (arg1) == INTEGER_CST)
9329 {
9330 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9331 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9332 TREE_OPERAND (arg0, 0),
9333 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9334 fold_convert_loc (loc, cmp_type, arg1)));
9335 }
9336
9337 return NULL_TREE;
9338 }
9339
9340
9341 /* Subroutine of fold_binary. Optimize complex multiplications of the
9342 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9343 argument EXPR represents the expression "z" of type TYPE. */
9344
9345 static tree
9346 fold_mult_zconjz (location_t loc, tree type, tree expr)
9347 {
9348 tree itype = TREE_TYPE (type);
9349 tree rpart, ipart, tem;
9350
9351 if (TREE_CODE (expr) == COMPLEX_EXPR)
9352 {
9353 rpart = TREE_OPERAND (expr, 0);
9354 ipart = TREE_OPERAND (expr, 1);
9355 }
9356 else if (TREE_CODE (expr) == COMPLEX_CST)
9357 {
9358 rpart = TREE_REALPART (expr);
9359 ipart = TREE_IMAGPART (expr);
9360 }
9361 else
9362 {
9363 expr = save_expr (expr);
9364 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9365 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9366 }
9367
9368 rpart = save_expr (rpart);
9369 ipart = save_expr (ipart);
9370 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9371 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9372 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9373 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9374 build_zero_cst (itype));
9375 }
9376
9377
9378 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9379 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9380 guarantees that P and N have the same least significant log2(M) bits.
9381 N is not otherwise constrained. In particular, N is not normalized to
9382 0 <= N < M as is common. In general, the precise value of P is unknown.
9383 M is chosen as large as possible such that constant N can be determined.
9384
9385 Returns M and sets *RESIDUE to N.
9386
9387 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9388 account. This is not always possible due to PR 35705.
9389 */
9390
9391 static unsigned HOST_WIDE_INT
9392 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9393 bool allow_func_align)
9394 {
9395 enum tree_code code;
9396
9397 *residue = 0;
9398
9399 code = TREE_CODE (expr);
9400 if (code == ADDR_EXPR)
9401 {
9402 unsigned int bitalign;
9403 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9404 *residue /= BITS_PER_UNIT;
9405 return bitalign / BITS_PER_UNIT;
9406 }
9407 else if (code == POINTER_PLUS_EXPR)
9408 {
9409 tree op0, op1;
9410 unsigned HOST_WIDE_INT modulus;
9411 enum tree_code inner_code;
9412
9413 op0 = TREE_OPERAND (expr, 0);
9414 STRIP_NOPS (op0);
9415 modulus = get_pointer_modulus_and_residue (op0, residue,
9416 allow_func_align);
9417
9418 op1 = TREE_OPERAND (expr, 1);
9419 STRIP_NOPS (op1);
9420 inner_code = TREE_CODE (op1);
9421 if (inner_code == INTEGER_CST)
9422 {
9423 *residue += TREE_INT_CST_LOW (op1);
9424 return modulus;
9425 }
9426 else if (inner_code == MULT_EXPR)
9427 {
9428 op1 = TREE_OPERAND (op1, 1);
9429 if (TREE_CODE (op1) == INTEGER_CST)
9430 {
9431 unsigned HOST_WIDE_INT align;
9432
9433 /* Compute the greatest power-of-2 divisor of op1. */
9434 align = TREE_INT_CST_LOW (op1);
9435 align &= -align;
9436
9437 /* If align is non-zero and less than *modulus, replace
9438 *modulus with align., If align is 0, then either op1 is 0
9439 or the greatest power-of-2 divisor of op1 doesn't fit in an
9440 unsigned HOST_WIDE_INT. In either case, no additional
9441 constraint is imposed. */
9442 if (align)
9443 modulus = MIN (modulus, align);
9444
9445 return modulus;
9446 }
9447 }
9448 }
9449
9450 /* If we get here, we were unable to determine anything useful about the
9451 expression. */
9452 return 1;
9453 }
9454
9455
9456 /* Fold a binary expression of code CODE and type TYPE with operands
9457 OP0 and OP1. LOC is the location of the resulting expression.
9458 Return the folded expression if folding is successful. Otherwise,
9459 return NULL_TREE. */
9460
9461 tree
9462 fold_binary_loc (location_t loc,
9463 enum tree_code code, tree type, tree op0, tree op1)
9464 {
9465 enum tree_code_class kind = TREE_CODE_CLASS (code);
9466 tree arg0, arg1, tem;
9467 tree t1 = NULL_TREE;
9468 bool strict_overflow_p;
9469
9470 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9471 && TREE_CODE_LENGTH (code) == 2
9472 && op0 != NULL_TREE
9473 && op1 != NULL_TREE);
9474
9475 arg0 = op0;
9476 arg1 = op1;
9477
9478 /* Strip any conversions that don't change the mode. This is
9479 safe for every expression, except for a comparison expression
9480 because its signedness is derived from its operands. So, in
9481 the latter case, only strip conversions that don't change the
9482 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9483 preserved.
9484
9485 Note that this is done as an internal manipulation within the
9486 constant folder, in order to find the simplest representation
9487 of the arguments so that their form can be studied. In any
9488 cases, the appropriate type conversions should be put back in
9489 the tree that will get out of the constant folder. */
9490
9491 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9492 {
9493 STRIP_SIGN_NOPS (arg0);
9494 STRIP_SIGN_NOPS (arg1);
9495 }
9496 else
9497 {
9498 STRIP_NOPS (arg0);
9499 STRIP_NOPS (arg1);
9500 }
9501
9502 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9503 constant but we can't do arithmetic on them. */
9504 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9505 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9506 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9507 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9508 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9509 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9510 {
9511 if (kind == tcc_binary)
9512 {
9513 /* Make sure type and arg0 have the same saturating flag. */
9514 gcc_assert (TYPE_SATURATING (type)
9515 == TYPE_SATURATING (TREE_TYPE (arg0)));
9516 tem = const_binop (code, arg0, arg1);
9517 }
9518 else if (kind == tcc_comparison)
9519 tem = fold_relational_const (code, type, arg0, arg1);
9520 else
9521 tem = NULL_TREE;
9522
9523 if (tem != NULL_TREE)
9524 {
9525 if (TREE_TYPE (tem) != type)
9526 tem = fold_convert_loc (loc, type, tem);
9527 return tem;
9528 }
9529 }
9530
9531 /* If this is a commutative operation, and ARG0 is a constant, move it
9532 to ARG1 to reduce the number of tests below. */
9533 if (commutative_tree_code (code)
9534 && tree_swap_operands_p (arg0, arg1, true))
9535 return fold_build2_loc (loc, code, type, op1, op0);
9536
9537 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9538
9539 First check for cases where an arithmetic operation is applied to a
9540 compound, conditional, or comparison operation. Push the arithmetic
9541 operation inside the compound or conditional to see if any folding
9542 can then be done. Convert comparison to conditional for this purpose.
9543 The also optimizes non-constant cases that used to be done in
9544 expand_expr.
9545
9546 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9547 one of the operands is a comparison and the other is a comparison, a
9548 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9549 code below would make the expression more complex. Change it to a
9550 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9551 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9552
9553 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9554 || code == EQ_EXPR || code == NE_EXPR)
9555 && ((truth_value_p (TREE_CODE (arg0))
9556 && (truth_value_p (TREE_CODE (arg1))
9557 || (TREE_CODE (arg1) == BIT_AND_EXPR
9558 && integer_onep (TREE_OPERAND (arg1, 1)))))
9559 || (truth_value_p (TREE_CODE (arg1))
9560 && (truth_value_p (TREE_CODE (arg0))
9561 || (TREE_CODE (arg0) == BIT_AND_EXPR
9562 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9563 {
9564 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9565 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9566 : TRUTH_XOR_EXPR,
9567 boolean_type_node,
9568 fold_convert_loc (loc, boolean_type_node, arg0),
9569 fold_convert_loc (loc, boolean_type_node, arg1));
9570
9571 if (code == EQ_EXPR)
9572 tem = invert_truthvalue_loc (loc, tem);
9573
9574 return fold_convert_loc (loc, type, tem);
9575 }
9576
9577 if (TREE_CODE_CLASS (code) == tcc_binary
9578 || TREE_CODE_CLASS (code) == tcc_comparison)
9579 {
9580 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9581 {
9582 tem = fold_build2_loc (loc, code, type,
9583 fold_convert_loc (loc, TREE_TYPE (op0),
9584 TREE_OPERAND (arg0, 1)), op1);
9585 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9586 tem);
9587 }
9588 if (TREE_CODE (arg1) == COMPOUND_EXPR
9589 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9590 {
9591 tem = fold_build2_loc (loc, code, type, op0,
9592 fold_convert_loc (loc, TREE_TYPE (op1),
9593 TREE_OPERAND (arg1, 1)));
9594 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9595 tem);
9596 }
9597
9598 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9599 {
9600 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9601 arg0, arg1,
9602 /*cond_first_p=*/1);
9603 if (tem != NULL_TREE)
9604 return tem;
9605 }
9606
9607 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9608 {
9609 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9610 arg1, arg0,
9611 /*cond_first_p=*/0);
9612 if (tem != NULL_TREE)
9613 return tem;
9614 }
9615 }
9616
9617 switch (code)
9618 {
9619 case MEM_REF:
9620 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9621 if (TREE_CODE (arg0) == ADDR_EXPR
9622 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9623 {
9624 tree iref = TREE_OPERAND (arg0, 0);
9625 return fold_build2 (MEM_REF, type,
9626 TREE_OPERAND (iref, 0),
9627 int_const_binop (PLUS_EXPR, arg1,
9628 TREE_OPERAND (iref, 1)));
9629 }
9630
9631 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9632 if (TREE_CODE (arg0) == ADDR_EXPR
9633 && handled_component_p (TREE_OPERAND (arg0, 0)))
9634 {
9635 tree base;
9636 HOST_WIDE_INT coffset;
9637 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9638 &coffset);
9639 if (!base)
9640 return NULL_TREE;
9641 return fold_build2 (MEM_REF, type,
9642 build_fold_addr_expr (base),
9643 int_const_binop (PLUS_EXPR, arg1,
9644 size_int (coffset)));
9645 }
9646
9647 return NULL_TREE;
9648
9649 case POINTER_PLUS_EXPR:
9650 /* 0 +p index -> (type)index */
9651 if (integer_zerop (arg0))
9652 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9653
9654 /* PTR +p 0 -> PTR */
9655 if (integer_zerop (arg1))
9656 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9657
9658 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9659 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9660 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9661 return fold_convert_loc (loc, type,
9662 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9663 fold_convert_loc (loc, sizetype,
9664 arg1),
9665 fold_convert_loc (loc, sizetype,
9666 arg0)));
9667
9668 /* (PTR +p B) +p A -> PTR +p (B + A) */
9669 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9670 {
9671 tree inner;
9672 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9673 tree arg00 = TREE_OPERAND (arg0, 0);
9674 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9675 arg01, fold_convert_loc (loc, sizetype, arg1));
9676 return fold_convert_loc (loc, type,
9677 fold_build_pointer_plus_loc (loc,
9678 arg00, inner));
9679 }
9680
9681 /* PTR_CST +p CST -> CST1 */
9682 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9683 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9684 fold_convert_loc (loc, type, arg1));
9685
9686 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9687 of the array. Loop optimizer sometimes produce this type of
9688 expressions. */
9689 if (TREE_CODE (arg0) == ADDR_EXPR)
9690 {
9691 tem = try_move_mult_to_index (loc, arg0,
9692 fold_convert_loc (loc, sizetype, arg1));
9693 if (tem)
9694 return fold_convert_loc (loc, type, tem);
9695 }
9696
9697 return NULL_TREE;
9698
9699 case PLUS_EXPR:
9700 /* A + (-B) -> A - B */
9701 if (TREE_CODE (arg1) == NEGATE_EXPR)
9702 return fold_build2_loc (loc, MINUS_EXPR, type,
9703 fold_convert_loc (loc, type, arg0),
9704 fold_convert_loc (loc, type,
9705 TREE_OPERAND (arg1, 0)));
9706 /* (-A) + B -> B - A */
9707 if (TREE_CODE (arg0) == NEGATE_EXPR
9708 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9709 return fold_build2_loc (loc, MINUS_EXPR, type,
9710 fold_convert_loc (loc, type, arg1),
9711 fold_convert_loc (loc, type,
9712 TREE_OPERAND (arg0, 0)));
9713
9714 if (INTEGRAL_TYPE_P (type))
9715 {
9716 /* Convert ~A + 1 to -A. */
9717 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9718 && integer_onep (arg1))
9719 return fold_build1_loc (loc, NEGATE_EXPR, type,
9720 fold_convert_loc (loc, type,
9721 TREE_OPERAND (arg0, 0)));
9722
9723 /* ~X + X is -1. */
9724 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9725 && !TYPE_OVERFLOW_TRAPS (type))
9726 {
9727 tree tem = TREE_OPERAND (arg0, 0);
9728
9729 STRIP_NOPS (tem);
9730 if (operand_equal_p (tem, arg1, 0))
9731 {
9732 t1 = build_int_cst_type (type, -1);
9733 return omit_one_operand_loc (loc, type, t1, arg1);
9734 }
9735 }
9736
9737 /* X + ~X is -1. */
9738 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9739 && !TYPE_OVERFLOW_TRAPS (type))
9740 {
9741 tree tem = TREE_OPERAND (arg1, 0);
9742
9743 STRIP_NOPS (tem);
9744 if (operand_equal_p (arg0, tem, 0))
9745 {
9746 t1 = build_int_cst_type (type, -1);
9747 return omit_one_operand_loc (loc, type, t1, arg0);
9748 }
9749 }
9750
9751 /* X + (X / CST) * -CST is X % CST. */
9752 if (TREE_CODE (arg1) == MULT_EXPR
9753 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9754 && operand_equal_p (arg0,
9755 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9756 {
9757 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9758 tree cst1 = TREE_OPERAND (arg1, 1);
9759 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9760 cst1, cst0);
9761 if (sum && integer_zerop (sum))
9762 return fold_convert_loc (loc, type,
9763 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9764 TREE_TYPE (arg0), arg0,
9765 cst0));
9766 }
9767 }
9768
9769 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9770 same or one. Make sure type is not saturating.
9771 fold_plusminus_mult_expr will re-associate. */
9772 if ((TREE_CODE (arg0) == MULT_EXPR
9773 || TREE_CODE (arg1) == MULT_EXPR)
9774 && !TYPE_SATURATING (type)
9775 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9776 {
9777 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9778 if (tem)
9779 return tem;
9780 }
9781
9782 if (! FLOAT_TYPE_P (type))
9783 {
9784 if (integer_zerop (arg1))
9785 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9786
9787 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9788 with a constant, and the two constants have no bits in common,
9789 we should treat this as a BIT_IOR_EXPR since this may produce more
9790 simplifications. */
9791 if (TREE_CODE (arg0) == BIT_AND_EXPR
9792 && TREE_CODE (arg1) == BIT_AND_EXPR
9793 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9794 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9795 && integer_zerop (const_binop (BIT_AND_EXPR,
9796 TREE_OPERAND (arg0, 1),
9797 TREE_OPERAND (arg1, 1))))
9798 {
9799 code = BIT_IOR_EXPR;
9800 goto bit_ior;
9801 }
9802
9803 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9804 (plus (plus (mult) (mult)) (foo)) so that we can
9805 take advantage of the factoring cases below. */
9806 if (TYPE_OVERFLOW_WRAPS (type)
9807 && (((TREE_CODE (arg0) == PLUS_EXPR
9808 || TREE_CODE (arg0) == MINUS_EXPR)
9809 && TREE_CODE (arg1) == MULT_EXPR)
9810 || ((TREE_CODE (arg1) == PLUS_EXPR
9811 || TREE_CODE (arg1) == MINUS_EXPR)
9812 && TREE_CODE (arg0) == MULT_EXPR)))
9813 {
9814 tree parg0, parg1, parg, marg;
9815 enum tree_code pcode;
9816
9817 if (TREE_CODE (arg1) == MULT_EXPR)
9818 parg = arg0, marg = arg1;
9819 else
9820 parg = arg1, marg = arg0;
9821 pcode = TREE_CODE (parg);
9822 parg0 = TREE_OPERAND (parg, 0);
9823 parg1 = TREE_OPERAND (parg, 1);
9824 STRIP_NOPS (parg0);
9825 STRIP_NOPS (parg1);
9826
9827 if (TREE_CODE (parg0) == MULT_EXPR
9828 && TREE_CODE (parg1) != MULT_EXPR)
9829 return fold_build2_loc (loc, pcode, type,
9830 fold_build2_loc (loc, PLUS_EXPR, type,
9831 fold_convert_loc (loc, type,
9832 parg0),
9833 fold_convert_loc (loc, type,
9834 marg)),
9835 fold_convert_loc (loc, type, parg1));
9836 if (TREE_CODE (parg0) != MULT_EXPR
9837 && TREE_CODE (parg1) == MULT_EXPR)
9838 return
9839 fold_build2_loc (loc, PLUS_EXPR, type,
9840 fold_convert_loc (loc, type, parg0),
9841 fold_build2_loc (loc, pcode, type,
9842 fold_convert_loc (loc, type, marg),
9843 fold_convert_loc (loc, type,
9844 parg1)));
9845 }
9846 }
9847 else
9848 {
9849 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9850 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9851 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9852
9853 /* Likewise if the operands are reversed. */
9854 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9855 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9856
9857 /* Convert X + -C into X - C. */
9858 if (TREE_CODE (arg1) == REAL_CST
9859 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9860 {
9861 tem = fold_negate_const (arg1, type);
9862 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9863 return fold_build2_loc (loc, MINUS_EXPR, type,
9864 fold_convert_loc (loc, type, arg0),
9865 fold_convert_loc (loc, type, tem));
9866 }
9867
9868 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9869 to __complex__ ( x, y ). This is not the same for SNaNs or
9870 if signed zeros are involved. */
9871 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9872 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9873 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9874 {
9875 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9876 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9877 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9878 bool arg0rz = false, arg0iz = false;
9879 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9880 || (arg0i && (arg0iz = real_zerop (arg0i))))
9881 {
9882 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9883 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9884 if (arg0rz && arg1i && real_zerop (arg1i))
9885 {
9886 tree rp = arg1r ? arg1r
9887 : build1 (REALPART_EXPR, rtype, arg1);
9888 tree ip = arg0i ? arg0i
9889 : build1 (IMAGPART_EXPR, rtype, arg0);
9890 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9891 }
9892 else if (arg0iz && arg1r && real_zerop (arg1r))
9893 {
9894 tree rp = arg0r ? arg0r
9895 : build1 (REALPART_EXPR, rtype, arg0);
9896 tree ip = arg1i ? arg1i
9897 : build1 (IMAGPART_EXPR, rtype, arg1);
9898 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9899 }
9900 }
9901 }
9902
9903 if (flag_unsafe_math_optimizations
9904 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9905 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9906 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9907 return tem;
9908
9909 /* Convert x+x into x*2.0. */
9910 if (operand_equal_p (arg0, arg1, 0)
9911 && SCALAR_FLOAT_TYPE_P (type))
9912 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9913 build_real (type, dconst2));
9914
9915 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9916 We associate floats only if the user has specified
9917 -fassociative-math. */
9918 if (flag_associative_math
9919 && TREE_CODE (arg1) == PLUS_EXPR
9920 && TREE_CODE (arg0) != MULT_EXPR)
9921 {
9922 tree tree10 = TREE_OPERAND (arg1, 0);
9923 tree tree11 = TREE_OPERAND (arg1, 1);
9924 if (TREE_CODE (tree11) == MULT_EXPR
9925 && TREE_CODE (tree10) == MULT_EXPR)
9926 {
9927 tree tree0;
9928 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9929 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9930 }
9931 }
9932 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9933 We associate floats only if the user has specified
9934 -fassociative-math. */
9935 if (flag_associative_math
9936 && TREE_CODE (arg0) == PLUS_EXPR
9937 && TREE_CODE (arg1) != MULT_EXPR)
9938 {
9939 tree tree00 = TREE_OPERAND (arg0, 0);
9940 tree tree01 = TREE_OPERAND (arg0, 1);
9941 if (TREE_CODE (tree01) == MULT_EXPR
9942 && TREE_CODE (tree00) == MULT_EXPR)
9943 {
9944 tree tree0;
9945 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9946 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9947 }
9948 }
9949 }
9950
9951 bit_rotate:
9952 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9953 is a rotate of A by C1 bits. */
9954 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9955 is a rotate of A by B bits. */
9956 {
9957 enum tree_code code0, code1;
9958 tree rtype;
9959 code0 = TREE_CODE (arg0);
9960 code1 = TREE_CODE (arg1);
9961 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9962 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9963 && operand_equal_p (TREE_OPERAND (arg0, 0),
9964 TREE_OPERAND (arg1, 0), 0)
9965 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9966 TYPE_UNSIGNED (rtype))
9967 /* Only create rotates in complete modes. Other cases are not
9968 expanded properly. */
9969 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9970 {
9971 tree tree01, tree11;
9972 enum tree_code code01, code11;
9973
9974 tree01 = TREE_OPERAND (arg0, 1);
9975 tree11 = TREE_OPERAND (arg1, 1);
9976 STRIP_NOPS (tree01);
9977 STRIP_NOPS (tree11);
9978 code01 = TREE_CODE (tree01);
9979 code11 = TREE_CODE (tree11);
9980 if (code01 == INTEGER_CST
9981 && code11 == INTEGER_CST
9982 && TREE_INT_CST_HIGH (tree01) == 0
9983 && TREE_INT_CST_HIGH (tree11) == 0
9984 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9985 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9986 {
9987 tem = build2_loc (loc, LROTATE_EXPR,
9988 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9989 TREE_OPERAND (arg0, 0),
9990 code0 == LSHIFT_EXPR ? tree01 : tree11);
9991 return fold_convert_loc (loc, type, tem);
9992 }
9993 else if (code11 == MINUS_EXPR)
9994 {
9995 tree tree110, tree111;
9996 tree110 = TREE_OPERAND (tree11, 0);
9997 tree111 = TREE_OPERAND (tree11, 1);
9998 STRIP_NOPS (tree110);
9999 STRIP_NOPS (tree111);
10000 if (TREE_CODE (tree110) == INTEGER_CST
10001 && 0 == compare_tree_int (tree110,
10002 TYPE_PRECISION
10003 (TREE_TYPE (TREE_OPERAND
10004 (arg0, 0))))
10005 && operand_equal_p (tree01, tree111, 0))
10006 return
10007 fold_convert_loc (loc, type,
10008 build2 ((code0 == LSHIFT_EXPR
10009 ? LROTATE_EXPR
10010 : RROTATE_EXPR),
10011 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10012 TREE_OPERAND (arg0, 0), tree01));
10013 }
10014 else if (code01 == MINUS_EXPR)
10015 {
10016 tree tree010, tree011;
10017 tree010 = TREE_OPERAND (tree01, 0);
10018 tree011 = TREE_OPERAND (tree01, 1);
10019 STRIP_NOPS (tree010);
10020 STRIP_NOPS (tree011);
10021 if (TREE_CODE (tree010) == INTEGER_CST
10022 && 0 == compare_tree_int (tree010,
10023 TYPE_PRECISION
10024 (TREE_TYPE (TREE_OPERAND
10025 (arg0, 0))))
10026 && operand_equal_p (tree11, tree011, 0))
10027 return fold_convert_loc
10028 (loc, type,
10029 build2 ((code0 != LSHIFT_EXPR
10030 ? LROTATE_EXPR
10031 : RROTATE_EXPR),
10032 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10033 TREE_OPERAND (arg0, 0), tree11));
10034 }
10035 }
10036 }
10037
10038 associate:
10039 /* In most languages, can't associate operations on floats through
10040 parentheses. Rather than remember where the parentheses were, we
10041 don't associate floats at all, unless the user has specified
10042 -fassociative-math.
10043 And, we need to make sure type is not saturating. */
10044
10045 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10046 && !TYPE_SATURATING (type))
10047 {
10048 tree var0, con0, lit0, minus_lit0;
10049 tree var1, con1, lit1, minus_lit1;
10050 bool ok = true;
10051
10052 /* Split both trees into variables, constants, and literals. Then
10053 associate each group together, the constants with literals,
10054 then the result with variables. This increases the chances of
10055 literals being recombined later and of generating relocatable
10056 expressions for the sum of a constant and literal. */
10057 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10058 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10059 code == MINUS_EXPR);
10060
10061 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10062 if (code == MINUS_EXPR)
10063 code = PLUS_EXPR;
10064
10065 /* With undefined overflow we can only associate constants with one
10066 variable, and constants whose association doesn't overflow. */
10067 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10068 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10069 {
10070 if (var0 && var1)
10071 {
10072 tree tmp0 = var0;
10073 tree tmp1 = var1;
10074
10075 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10076 tmp0 = TREE_OPERAND (tmp0, 0);
10077 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10078 tmp1 = TREE_OPERAND (tmp1, 0);
10079 /* The only case we can still associate with two variables
10080 is if they are the same, modulo negation. */
10081 if (!operand_equal_p (tmp0, tmp1, 0))
10082 ok = false;
10083 }
10084
10085 if (ok && lit0 && lit1)
10086 {
10087 tree tmp0 = fold_convert (type, lit0);
10088 tree tmp1 = fold_convert (type, lit1);
10089
10090 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10091 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10092 ok = false;
10093 }
10094 }
10095
10096 /* Only do something if we found more than two objects. Otherwise,
10097 nothing has changed and we risk infinite recursion. */
10098 if (ok
10099 && (2 < ((var0 != 0) + (var1 != 0)
10100 + (con0 != 0) + (con1 != 0)
10101 + (lit0 != 0) + (lit1 != 0)
10102 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10103 {
10104 var0 = associate_trees (loc, var0, var1, code, type);
10105 con0 = associate_trees (loc, con0, con1, code, type);
10106 lit0 = associate_trees (loc, lit0, lit1, code, type);
10107 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10108
10109 /* Preserve the MINUS_EXPR if the negative part of the literal is
10110 greater than the positive part. Otherwise, the multiplicative
10111 folding code (i.e extract_muldiv) may be fooled in case
10112 unsigned constants are subtracted, like in the following
10113 example: ((X*2 + 4) - 8U)/2. */
10114 if (minus_lit0 && lit0)
10115 {
10116 if (TREE_CODE (lit0) == INTEGER_CST
10117 && TREE_CODE (minus_lit0) == INTEGER_CST
10118 && tree_int_cst_lt (lit0, minus_lit0))
10119 {
10120 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10121 MINUS_EXPR, type);
10122 lit0 = 0;
10123 }
10124 else
10125 {
10126 lit0 = associate_trees (loc, lit0, minus_lit0,
10127 MINUS_EXPR, type);
10128 minus_lit0 = 0;
10129 }
10130 }
10131 if (minus_lit0)
10132 {
10133 if (con0 == 0)
10134 return
10135 fold_convert_loc (loc, type,
10136 associate_trees (loc, var0, minus_lit0,
10137 MINUS_EXPR, type));
10138 else
10139 {
10140 con0 = associate_trees (loc, con0, minus_lit0,
10141 MINUS_EXPR, type);
10142 return
10143 fold_convert_loc (loc, type,
10144 associate_trees (loc, var0, con0,
10145 PLUS_EXPR, type));
10146 }
10147 }
10148
10149 con0 = associate_trees (loc, con0, lit0, code, type);
10150 return
10151 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10152 code, type));
10153 }
10154 }
10155
10156 return NULL_TREE;
10157
10158 case MINUS_EXPR:
10159 /* Pointer simplifications for subtraction, simple reassociations. */
10160 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10161 {
10162 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10163 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10164 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10165 {
10166 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10167 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10168 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10169 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10170 return fold_build2_loc (loc, PLUS_EXPR, type,
10171 fold_build2_loc (loc, MINUS_EXPR, type,
10172 arg00, arg10),
10173 fold_build2_loc (loc, MINUS_EXPR, type,
10174 arg01, arg11));
10175 }
10176 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10177 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10178 {
10179 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10180 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10181 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10182 fold_convert_loc (loc, type, arg1));
10183 if (tmp)
10184 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10185 }
10186 }
10187 /* A - (-B) -> A + B */
10188 if (TREE_CODE (arg1) == NEGATE_EXPR)
10189 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10190 fold_convert_loc (loc, type,
10191 TREE_OPERAND (arg1, 0)));
10192 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10193 if (TREE_CODE (arg0) == NEGATE_EXPR
10194 && (FLOAT_TYPE_P (type)
10195 || INTEGRAL_TYPE_P (type))
10196 && negate_expr_p (arg1)
10197 && reorder_operands_p (arg0, arg1))
10198 return fold_build2_loc (loc, MINUS_EXPR, type,
10199 fold_convert_loc (loc, type,
10200 negate_expr (arg1)),
10201 fold_convert_loc (loc, type,
10202 TREE_OPERAND (arg0, 0)));
10203 /* Convert -A - 1 to ~A. */
10204 if (INTEGRAL_TYPE_P (type)
10205 && TREE_CODE (arg0) == NEGATE_EXPR
10206 && integer_onep (arg1)
10207 && !TYPE_OVERFLOW_TRAPS (type))
10208 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10209 fold_convert_loc (loc, type,
10210 TREE_OPERAND (arg0, 0)));
10211
10212 /* Convert -1 - A to ~A. */
10213 if (INTEGRAL_TYPE_P (type)
10214 && integer_all_onesp (arg0))
10215 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10216
10217
10218 /* X - (X / CST) * CST is X % CST. */
10219 if (INTEGRAL_TYPE_P (type)
10220 && TREE_CODE (arg1) == MULT_EXPR
10221 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10222 && operand_equal_p (arg0,
10223 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10224 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10225 TREE_OPERAND (arg1, 1), 0))
10226 return
10227 fold_convert_loc (loc, type,
10228 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10229 arg0, TREE_OPERAND (arg1, 1)));
10230
10231 if (! FLOAT_TYPE_P (type))
10232 {
10233 if (integer_zerop (arg0))
10234 return negate_expr (fold_convert_loc (loc, type, arg1));
10235 if (integer_zerop (arg1))
10236 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10237
10238 /* Fold A - (A & B) into ~B & A. */
10239 if (!TREE_SIDE_EFFECTS (arg0)
10240 && TREE_CODE (arg1) == BIT_AND_EXPR)
10241 {
10242 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10243 {
10244 tree arg10 = fold_convert_loc (loc, type,
10245 TREE_OPERAND (arg1, 0));
10246 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10247 fold_build1_loc (loc, BIT_NOT_EXPR,
10248 type, arg10),
10249 fold_convert_loc (loc, type, arg0));
10250 }
10251 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10252 {
10253 tree arg11 = fold_convert_loc (loc,
10254 type, TREE_OPERAND (arg1, 1));
10255 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10256 fold_build1_loc (loc, BIT_NOT_EXPR,
10257 type, arg11),
10258 fold_convert_loc (loc, type, arg0));
10259 }
10260 }
10261
10262 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10263 any power of 2 minus 1. */
10264 if (TREE_CODE (arg0) == BIT_AND_EXPR
10265 && TREE_CODE (arg1) == BIT_AND_EXPR
10266 && operand_equal_p (TREE_OPERAND (arg0, 0),
10267 TREE_OPERAND (arg1, 0), 0))
10268 {
10269 tree mask0 = TREE_OPERAND (arg0, 1);
10270 tree mask1 = TREE_OPERAND (arg1, 1);
10271 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10272
10273 if (operand_equal_p (tem, mask1, 0))
10274 {
10275 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10276 TREE_OPERAND (arg0, 0), mask1);
10277 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10278 }
10279 }
10280 }
10281
10282 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10283 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10284 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10285
10286 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10287 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10288 (-ARG1 + ARG0) reduces to -ARG1. */
10289 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10290 return negate_expr (fold_convert_loc (loc, type, arg1));
10291
10292 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10293 __complex__ ( x, -y ). This is not the same for SNaNs or if
10294 signed zeros are involved. */
10295 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10296 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10297 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10298 {
10299 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10300 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10301 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10302 bool arg0rz = false, arg0iz = false;
10303 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10304 || (arg0i && (arg0iz = real_zerop (arg0i))))
10305 {
10306 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10307 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10308 if (arg0rz && arg1i && real_zerop (arg1i))
10309 {
10310 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10311 arg1r ? arg1r
10312 : build1 (REALPART_EXPR, rtype, arg1));
10313 tree ip = arg0i ? arg0i
10314 : build1 (IMAGPART_EXPR, rtype, arg0);
10315 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10316 }
10317 else if (arg0iz && arg1r && real_zerop (arg1r))
10318 {
10319 tree rp = arg0r ? arg0r
10320 : build1 (REALPART_EXPR, rtype, arg0);
10321 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10322 arg1i ? arg1i
10323 : build1 (IMAGPART_EXPR, rtype, arg1));
10324 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10325 }
10326 }
10327 }
10328
10329 /* Fold &x - &x. This can happen from &x.foo - &x.
10330 This is unsafe for certain floats even in non-IEEE formats.
10331 In IEEE, it is unsafe because it does wrong for NaNs.
10332 Also note that operand_equal_p is always false if an operand
10333 is volatile. */
10334
10335 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10336 && operand_equal_p (arg0, arg1, 0))
10337 return build_zero_cst (type);
10338
10339 /* A - B -> A + (-B) if B is easily negatable. */
10340 if (negate_expr_p (arg1)
10341 && ((FLOAT_TYPE_P (type)
10342 /* Avoid this transformation if B is a positive REAL_CST. */
10343 && (TREE_CODE (arg1) != REAL_CST
10344 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10345 || INTEGRAL_TYPE_P (type)))
10346 return fold_build2_loc (loc, PLUS_EXPR, type,
10347 fold_convert_loc (loc, type, arg0),
10348 fold_convert_loc (loc, type,
10349 negate_expr (arg1)));
10350
10351 /* Try folding difference of addresses. */
10352 {
10353 HOST_WIDE_INT diff;
10354
10355 if ((TREE_CODE (arg0) == ADDR_EXPR
10356 || TREE_CODE (arg1) == ADDR_EXPR)
10357 && ptr_difference_const (arg0, arg1, &diff))
10358 return build_int_cst_type (type, diff);
10359 }
10360
10361 /* Fold &a[i] - &a[j] to i-j. */
10362 if (TREE_CODE (arg0) == ADDR_EXPR
10363 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10364 && TREE_CODE (arg1) == ADDR_EXPR
10365 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10366 {
10367 tree aref0 = TREE_OPERAND (arg0, 0);
10368 tree aref1 = TREE_OPERAND (arg1, 0);
10369 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10370 TREE_OPERAND (aref1, 0), 0))
10371 {
10372 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10373 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10374 tree esz = array_ref_element_size (aref0);
10375 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10376 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10377 fold_convert_loc (loc, type, esz));
10378
10379 }
10380 }
10381
10382 if (FLOAT_TYPE_P (type)
10383 && flag_unsafe_math_optimizations
10384 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10385 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10386 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10387 return tem;
10388
10389 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10390 same or one. Make sure type is not saturating.
10391 fold_plusminus_mult_expr will re-associate. */
10392 if ((TREE_CODE (arg0) == MULT_EXPR
10393 || TREE_CODE (arg1) == MULT_EXPR)
10394 && !TYPE_SATURATING (type)
10395 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10396 {
10397 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10398 if (tem)
10399 return tem;
10400 }
10401
10402 goto associate;
10403
10404 case MULT_EXPR:
10405 /* (-A) * (-B) -> A * B */
10406 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10407 return fold_build2_loc (loc, MULT_EXPR, type,
10408 fold_convert_loc (loc, type,
10409 TREE_OPERAND (arg0, 0)),
10410 fold_convert_loc (loc, type,
10411 negate_expr (arg1)));
10412 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10413 return fold_build2_loc (loc, MULT_EXPR, type,
10414 fold_convert_loc (loc, type,
10415 negate_expr (arg0)),
10416 fold_convert_loc (loc, type,
10417 TREE_OPERAND (arg1, 0)));
10418
10419 if (! FLOAT_TYPE_P (type))
10420 {
10421 if (integer_zerop (arg1))
10422 return omit_one_operand_loc (loc, type, arg1, arg0);
10423 if (integer_onep (arg1))
10424 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10425 /* Transform x * -1 into -x. Make sure to do the negation
10426 on the original operand with conversions not stripped
10427 because we can only strip non-sign-changing conversions. */
10428 if (integer_all_onesp (arg1))
10429 return fold_convert_loc (loc, type, negate_expr (op0));
10430 /* Transform x * -C into -x * C if x is easily negatable. */
10431 if (TREE_CODE (arg1) == INTEGER_CST
10432 && tree_int_cst_sgn (arg1) == -1
10433 && negate_expr_p (arg0)
10434 && (tem = negate_expr (arg1)) != arg1
10435 && !TREE_OVERFLOW (tem))
10436 return fold_build2_loc (loc, MULT_EXPR, type,
10437 fold_convert_loc (loc, type,
10438 negate_expr (arg0)),
10439 tem);
10440
10441 /* (a * (1 << b)) is (a << b) */
10442 if (TREE_CODE (arg1) == LSHIFT_EXPR
10443 && integer_onep (TREE_OPERAND (arg1, 0)))
10444 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10445 TREE_OPERAND (arg1, 1));
10446 if (TREE_CODE (arg0) == LSHIFT_EXPR
10447 && integer_onep (TREE_OPERAND (arg0, 0)))
10448 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10449 TREE_OPERAND (arg0, 1));
10450
10451 /* (A + A) * C -> A * 2 * C */
10452 if (TREE_CODE (arg0) == PLUS_EXPR
10453 && TREE_CODE (arg1) == INTEGER_CST
10454 && operand_equal_p (TREE_OPERAND (arg0, 0),
10455 TREE_OPERAND (arg0, 1), 0))
10456 return fold_build2_loc (loc, MULT_EXPR, type,
10457 omit_one_operand_loc (loc, type,
10458 TREE_OPERAND (arg0, 0),
10459 TREE_OPERAND (arg0, 1)),
10460 fold_build2_loc (loc, MULT_EXPR, type,
10461 build_int_cst (type, 2) , arg1));
10462
10463 strict_overflow_p = false;
10464 if (TREE_CODE (arg1) == INTEGER_CST
10465 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10466 &strict_overflow_p)))
10467 {
10468 if (strict_overflow_p)
10469 fold_overflow_warning (("assuming signed overflow does not "
10470 "occur when simplifying "
10471 "multiplication"),
10472 WARN_STRICT_OVERFLOW_MISC);
10473 return fold_convert_loc (loc, type, tem);
10474 }
10475
10476 /* Optimize z * conj(z) for integer complex numbers. */
10477 if (TREE_CODE (arg0) == CONJ_EXPR
10478 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10479 return fold_mult_zconjz (loc, type, arg1);
10480 if (TREE_CODE (arg1) == CONJ_EXPR
10481 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10482 return fold_mult_zconjz (loc, type, arg0);
10483 }
10484 else
10485 {
10486 /* Maybe fold x * 0 to 0. The expressions aren't the same
10487 when x is NaN, since x * 0 is also NaN. Nor are they the
10488 same in modes with signed zeros, since multiplying a
10489 negative value by 0 gives -0, not +0. */
10490 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10491 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10492 && real_zerop (arg1))
10493 return omit_one_operand_loc (loc, type, arg1, arg0);
10494 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10495 Likewise for complex arithmetic with signed zeros. */
10496 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10497 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10498 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10499 && real_onep (arg1))
10500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10501
10502 /* Transform x * -1.0 into -x. */
10503 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10504 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10505 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10506 && real_minus_onep (arg1))
10507 return fold_convert_loc (loc, type, negate_expr (arg0));
10508
10509 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10510 the result for floating point types due to rounding so it is applied
10511 only if -fassociative-math was specify. */
10512 if (flag_associative_math
10513 && TREE_CODE (arg0) == RDIV_EXPR
10514 && TREE_CODE (arg1) == REAL_CST
10515 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10516 {
10517 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10518 arg1);
10519 if (tem)
10520 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10521 TREE_OPERAND (arg0, 1));
10522 }
10523
10524 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10525 if (operand_equal_p (arg0, arg1, 0))
10526 {
10527 tree tem = fold_strip_sign_ops (arg0);
10528 if (tem != NULL_TREE)
10529 {
10530 tem = fold_convert_loc (loc, type, tem);
10531 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10532 }
10533 }
10534
10535 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10536 This is not the same for NaNs or if signed zeros are
10537 involved. */
10538 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10539 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10540 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10541 && TREE_CODE (arg1) == COMPLEX_CST
10542 && real_zerop (TREE_REALPART (arg1)))
10543 {
10544 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10545 if (real_onep (TREE_IMAGPART (arg1)))
10546 return
10547 fold_build2_loc (loc, COMPLEX_EXPR, type,
10548 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10549 rtype, arg0)),
10550 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10551 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10552 return
10553 fold_build2_loc (loc, COMPLEX_EXPR, type,
10554 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10555 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10556 rtype, arg0)));
10557 }
10558
10559 /* Optimize z * conj(z) for floating point complex numbers.
10560 Guarded by flag_unsafe_math_optimizations as non-finite
10561 imaginary components don't produce scalar results. */
10562 if (flag_unsafe_math_optimizations
10563 && TREE_CODE (arg0) == CONJ_EXPR
10564 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10565 return fold_mult_zconjz (loc, type, arg1);
10566 if (flag_unsafe_math_optimizations
10567 && TREE_CODE (arg1) == CONJ_EXPR
10568 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10569 return fold_mult_zconjz (loc, type, arg0);
10570
10571 if (flag_unsafe_math_optimizations)
10572 {
10573 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10574 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10575
10576 /* Optimizations of root(...)*root(...). */
10577 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10578 {
10579 tree rootfn, arg;
10580 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10581 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10582
10583 /* Optimize sqrt(x)*sqrt(x) as x. */
10584 if (BUILTIN_SQRT_P (fcode0)
10585 && operand_equal_p (arg00, arg10, 0)
10586 && ! HONOR_SNANS (TYPE_MODE (type)))
10587 return arg00;
10588
10589 /* Optimize root(x)*root(y) as root(x*y). */
10590 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10591 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10592 return build_call_expr_loc (loc, rootfn, 1, arg);
10593 }
10594
10595 /* Optimize expN(x)*expN(y) as expN(x+y). */
10596 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10597 {
10598 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10599 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10600 CALL_EXPR_ARG (arg0, 0),
10601 CALL_EXPR_ARG (arg1, 0));
10602 return build_call_expr_loc (loc, expfn, 1, arg);
10603 }
10604
10605 /* Optimizations of pow(...)*pow(...). */
10606 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10607 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10608 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10609 {
10610 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10611 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10612 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10613 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10614
10615 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10616 if (operand_equal_p (arg01, arg11, 0))
10617 {
10618 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10619 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10620 arg00, arg10);
10621 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10622 }
10623
10624 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10625 if (operand_equal_p (arg00, arg10, 0))
10626 {
10627 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10628 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10629 arg01, arg11);
10630 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10631 }
10632 }
10633
10634 /* Optimize tan(x)*cos(x) as sin(x). */
10635 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10636 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10637 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10638 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10639 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10640 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10641 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10642 CALL_EXPR_ARG (arg1, 0), 0))
10643 {
10644 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10645
10646 if (sinfn != NULL_TREE)
10647 return build_call_expr_loc (loc, sinfn, 1,
10648 CALL_EXPR_ARG (arg0, 0));
10649 }
10650
10651 /* Optimize x*pow(x,c) as pow(x,c+1). */
10652 if (fcode1 == BUILT_IN_POW
10653 || fcode1 == BUILT_IN_POWF
10654 || fcode1 == BUILT_IN_POWL)
10655 {
10656 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10657 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10658 if (TREE_CODE (arg11) == REAL_CST
10659 && !TREE_OVERFLOW (arg11)
10660 && operand_equal_p (arg0, arg10, 0))
10661 {
10662 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10663 REAL_VALUE_TYPE c;
10664 tree arg;
10665
10666 c = TREE_REAL_CST (arg11);
10667 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10668 arg = build_real (type, c);
10669 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10670 }
10671 }
10672
10673 /* Optimize pow(x,c)*x as pow(x,c+1). */
10674 if (fcode0 == BUILT_IN_POW
10675 || fcode0 == BUILT_IN_POWF
10676 || fcode0 == BUILT_IN_POWL)
10677 {
10678 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10679 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10680 if (TREE_CODE (arg01) == REAL_CST
10681 && !TREE_OVERFLOW (arg01)
10682 && operand_equal_p (arg1, arg00, 0))
10683 {
10684 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10685 REAL_VALUE_TYPE c;
10686 tree arg;
10687
10688 c = TREE_REAL_CST (arg01);
10689 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10690 arg = build_real (type, c);
10691 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10692 }
10693 }
10694
10695 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10696 if (!in_gimple_form
10697 && optimize_function_for_speed_p (cfun)
10698 && operand_equal_p (arg0, arg1, 0))
10699 {
10700 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10701
10702 if (powfn)
10703 {
10704 tree arg = build_real (type, dconst2);
10705 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10706 }
10707 }
10708 }
10709 }
10710 goto associate;
10711
10712 case BIT_IOR_EXPR:
10713 bit_ior:
10714 if (integer_all_onesp (arg1))
10715 return omit_one_operand_loc (loc, type, arg1, arg0);
10716 if (integer_zerop (arg1))
10717 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10718 if (operand_equal_p (arg0, arg1, 0))
10719 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10720
10721 /* ~X | X is -1. */
10722 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10723 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10724 {
10725 t1 = build_zero_cst (type);
10726 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10727 return omit_one_operand_loc (loc, type, t1, arg1);
10728 }
10729
10730 /* X | ~X is -1. */
10731 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10732 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10733 {
10734 t1 = build_zero_cst (type);
10735 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10736 return omit_one_operand_loc (loc, type, t1, arg0);
10737 }
10738
10739 /* Canonicalize (X & C1) | C2. */
10740 if (TREE_CODE (arg0) == BIT_AND_EXPR
10741 && TREE_CODE (arg1) == INTEGER_CST
10742 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10743 {
10744 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10745 int width = TYPE_PRECISION (type), w;
10746 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10747 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10748 hi2 = TREE_INT_CST_HIGH (arg1);
10749 lo2 = TREE_INT_CST_LOW (arg1);
10750
10751 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10752 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10753 return omit_one_operand_loc (loc, type, arg1,
10754 TREE_OPERAND (arg0, 0));
10755
10756 if (width > HOST_BITS_PER_WIDE_INT)
10757 {
10758 mhi = (unsigned HOST_WIDE_INT) -1
10759 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10760 mlo = -1;
10761 }
10762 else
10763 {
10764 mhi = 0;
10765 mlo = (unsigned HOST_WIDE_INT) -1
10766 >> (HOST_BITS_PER_WIDE_INT - width);
10767 }
10768
10769 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10770 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10771 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10772 TREE_OPERAND (arg0, 0), arg1);
10773
10774 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10775 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10776 mode which allows further optimizations. */
10777 hi1 &= mhi;
10778 lo1 &= mlo;
10779 hi2 &= mhi;
10780 lo2 &= mlo;
10781 hi3 = hi1 & ~hi2;
10782 lo3 = lo1 & ~lo2;
10783 for (w = BITS_PER_UNIT;
10784 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10785 w <<= 1)
10786 {
10787 unsigned HOST_WIDE_INT mask
10788 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10789 if (((lo1 | lo2) & mask) == mask
10790 && (lo1 & ~mask) == 0 && hi1 == 0)
10791 {
10792 hi3 = 0;
10793 lo3 = mask;
10794 break;
10795 }
10796 }
10797 if (hi3 != hi1 || lo3 != lo1)
10798 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10799 fold_build2_loc (loc, BIT_AND_EXPR, type,
10800 TREE_OPERAND (arg0, 0),
10801 build_int_cst_wide (type,
10802 lo3, hi3)),
10803 arg1);
10804 }
10805
10806 /* (X & Y) | Y is (X, Y). */
10807 if (TREE_CODE (arg0) == BIT_AND_EXPR
10808 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10809 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10810 /* (X & Y) | X is (Y, X). */
10811 if (TREE_CODE (arg0) == BIT_AND_EXPR
10812 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10813 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10814 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10815 /* X | (X & Y) is (Y, X). */
10816 if (TREE_CODE (arg1) == BIT_AND_EXPR
10817 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10818 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10819 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10820 /* X | (Y & X) is (Y, X). */
10821 if (TREE_CODE (arg1) == BIT_AND_EXPR
10822 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10823 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10824 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10825
10826 /* (X & ~Y) | (~X & Y) is X ^ Y */
10827 if (TREE_CODE (arg0) == BIT_AND_EXPR
10828 && TREE_CODE (arg1) == BIT_AND_EXPR)
10829 {
10830 tree a0, a1, l0, l1, n0, n1;
10831
10832 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10833 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10834
10835 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10836 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10837
10838 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10839 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10840
10841 if ((operand_equal_p (n0, a0, 0)
10842 && operand_equal_p (n1, a1, 0))
10843 || (operand_equal_p (n0, a1, 0)
10844 && operand_equal_p (n1, a0, 0)))
10845 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10846 }
10847
10848 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10849 if (t1 != NULL_TREE)
10850 return t1;
10851
10852 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10853
10854 This results in more efficient code for machines without a NAND
10855 instruction. Combine will canonicalize to the first form
10856 which will allow use of NAND instructions provided by the
10857 backend if they exist. */
10858 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10859 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10860 {
10861 return
10862 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10863 build2 (BIT_AND_EXPR, type,
10864 fold_convert_loc (loc, type,
10865 TREE_OPERAND (arg0, 0)),
10866 fold_convert_loc (loc, type,
10867 TREE_OPERAND (arg1, 0))));
10868 }
10869
10870 /* See if this can be simplified into a rotate first. If that
10871 is unsuccessful continue in the association code. */
10872 goto bit_rotate;
10873
10874 case BIT_XOR_EXPR:
10875 if (integer_zerop (arg1))
10876 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10877 if (integer_all_onesp (arg1))
10878 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10879 if (operand_equal_p (arg0, arg1, 0))
10880 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10881
10882 /* ~X ^ X is -1. */
10883 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10884 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10885 {
10886 t1 = build_zero_cst (type);
10887 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10888 return omit_one_operand_loc (loc, type, t1, arg1);
10889 }
10890
10891 /* X ^ ~X is -1. */
10892 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10893 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10894 {
10895 t1 = build_zero_cst (type);
10896 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10897 return omit_one_operand_loc (loc, type, t1, arg0);
10898 }
10899
10900 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10901 with a constant, and the two constants have no bits in common,
10902 we should treat this as a BIT_IOR_EXPR since this may produce more
10903 simplifications. */
10904 if (TREE_CODE (arg0) == BIT_AND_EXPR
10905 && TREE_CODE (arg1) == BIT_AND_EXPR
10906 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10907 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10908 && integer_zerop (const_binop (BIT_AND_EXPR,
10909 TREE_OPERAND (arg0, 1),
10910 TREE_OPERAND (arg1, 1))))
10911 {
10912 code = BIT_IOR_EXPR;
10913 goto bit_ior;
10914 }
10915
10916 /* (X | Y) ^ X -> Y & ~ X*/
10917 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10918 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10919 {
10920 tree t2 = TREE_OPERAND (arg0, 1);
10921 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10922 arg1);
10923 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10924 fold_convert_loc (loc, type, t2),
10925 fold_convert_loc (loc, type, t1));
10926 return t1;
10927 }
10928
10929 /* (Y | X) ^ X -> Y & ~ X*/
10930 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10931 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10932 {
10933 tree t2 = TREE_OPERAND (arg0, 0);
10934 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10935 arg1);
10936 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10937 fold_convert_loc (loc, type, t2),
10938 fold_convert_loc (loc, type, t1));
10939 return t1;
10940 }
10941
10942 /* X ^ (X | Y) -> Y & ~ X*/
10943 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10944 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10945 {
10946 tree t2 = TREE_OPERAND (arg1, 1);
10947 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10948 arg0);
10949 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10950 fold_convert_loc (loc, type, t2),
10951 fold_convert_loc (loc, type, t1));
10952 return t1;
10953 }
10954
10955 /* X ^ (Y | X) -> Y & ~ X*/
10956 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10957 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10958 {
10959 tree t2 = TREE_OPERAND (arg1, 0);
10960 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10961 arg0);
10962 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10963 fold_convert_loc (loc, type, t2),
10964 fold_convert_loc (loc, type, t1));
10965 return t1;
10966 }
10967
10968 /* Convert ~X ^ ~Y to X ^ Y. */
10969 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10970 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10971 return fold_build2_loc (loc, code, type,
10972 fold_convert_loc (loc, type,
10973 TREE_OPERAND (arg0, 0)),
10974 fold_convert_loc (loc, type,
10975 TREE_OPERAND (arg1, 0)));
10976
10977 /* Convert ~X ^ C to X ^ ~C. */
10978 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10979 && TREE_CODE (arg1) == INTEGER_CST)
10980 return fold_build2_loc (loc, code, type,
10981 fold_convert_loc (loc, type,
10982 TREE_OPERAND (arg0, 0)),
10983 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10984
10985 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10986 if (TREE_CODE (arg0) == BIT_AND_EXPR
10987 && integer_onep (TREE_OPERAND (arg0, 1))
10988 && integer_onep (arg1))
10989 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10990 build_int_cst (TREE_TYPE (arg0), 0));
10991
10992 /* Fold (X & Y) ^ Y as ~X & Y. */
10993 if (TREE_CODE (arg0) == BIT_AND_EXPR
10994 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10995 {
10996 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10997 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10998 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10999 fold_convert_loc (loc, type, arg1));
11000 }
11001 /* Fold (X & Y) ^ X as ~Y & X. */
11002 if (TREE_CODE (arg0) == BIT_AND_EXPR
11003 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11004 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11005 {
11006 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11007 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11008 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11009 fold_convert_loc (loc, type, arg1));
11010 }
11011 /* Fold X ^ (X & Y) as X & ~Y. */
11012 if (TREE_CODE (arg1) == BIT_AND_EXPR
11013 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11014 {
11015 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11016 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11017 fold_convert_loc (loc, type, arg0),
11018 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11019 }
11020 /* Fold X ^ (Y & X) as ~Y & X. */
11021 if (TREE_CODE (arg1) == BIT_AND_EXPR
11022 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11023 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11024 {
11025 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11026 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11027 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11028 fold_convert_loc (loc, type, arg0));
11029 }
11030
11031 /* See if this can be simplified into a rotate first. If that
11032 is unsuccessful continue in the association code. */
11033 goto bit_rotate;
11034
11035 case BIT_AND_EXPR:
11036 if (integer_all_onesp (arg1))
11037 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11038 if (integer_zerop (arg1))
11039 return omit_one_operand_loc (loc, type, arg1, arg0);
11040 if (operand_equal_p (arg0, arg1, 0))
11041 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11042
11043 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11044 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11045 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11046 || (TREE_CODE (arg0) == EQ_EXPR
11047 && integer_zerop (TREE_OPERAND (arg0, 1))))
11048 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11049 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11050
11051 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11052 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11053 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11054 || (TREE_CODE (arg1) == EQ_EXPR
11055 && integer_zerop (TREE_OPERAND (arg1, 1))))
11056 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11057 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11058
11059 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11060 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11061 && TREE_CODE (arg1) == INTEGER_CST
11062 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11063 {
11064 tree tmp1 = fold_convert_loc (loc, type, arg1);
11065 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11066 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11067 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11068 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11069 return
11070 fold_convert_loc (loc, type,
11071 fold_build2_loc (loc, BIT_IOR_EXPR,
11072 type, tmp2, tmp3));
11073 }
11074
11075 /* (X | Y) & Y is (X, Y). */
11076 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11077 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11078 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11079 /* (X | Y) & X is (Y, X). */
11080 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11081 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11082 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11083 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11084 /* X & (X | Y) is (Y, X). */
11085 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11086 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11087 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11088 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11089 /* X & (Y | X) is (Y, X). */
11090 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11091 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11092 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11093 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11094
11095 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11096 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11097 && integer_onep (TREE_OPERAND (arg0, 1))
11098 && integer_onep (arg1))
11099 {
11100 tem = TREE_OPERAND (arg0, 0);
11101 return fold_build2_loc (loc, EQ_EXPR, type,
11102 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11103 build_int_cst (TREE_TYPE (tem), 1)),
11104 build_int_cst (TREE_TYPE (tem), 0));
11105 }
11106 /* Fold ~X & 1 as (X & 1) == 0. */
11107 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11108 && integer_onep (arg1))
11109 {
11110 tem = TREE_OPERAND (arg0, 0);
11111 return fold_build2_loc (loc, EQ_EXPR, type,
11112 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11113 build_int_cst (TREE_TYPE (tem), 1)),
11114 build_int_cst (TREE_TYPE (tem), 0));
11115 }
11116 /* Fold !X & 1 as X == 0. */
11117 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11118 && integer_onep (arg1))
11119 {
11120 tem = TREE_OPERAND (arg0, 0);
11121 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11122 build_int_cst (TREE_TYPE (tem), 0));
11123 }
11124
11125 /* Fold (X ^ Y) & Y as ~X & Y. */
11126 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11127 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11128 {
11129 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11130 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11131 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11132 fold_convert_loc (loc, type, arg1));
11133 }
11134 /* Fold (X ^ Y) & X as ~Y & X. */
11135 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11136 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11137 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11138 {
11139 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11140 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11141 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11142 fold_convert_loc (loc, type, arg1));
11143 }
11144 /* Fold X & (X ^ Y) as X & ~Y. */
11145 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11146 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11147 {
11148 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11149 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11150 fold_convert_loc (loc, type, arg0),
11151 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11152 }
11153 /* Fold X & (Y ^ X) as ~Y & X. */
11154 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11155 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11156 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11157 {
11158 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11159 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11160 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11161 fold_convert_loc (loc, type, arg0));
11162 }
11163
11164 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11165 ((A & N) + B) & M -> (A + B) & M
11166 Similarly if (N & M) == 0,
11167 ((A | N) + B) & M -> (A + B) & M
11168 and for - instead of + (or unary - instead of +)
11169 and/or ^ instead of |.
11170 If B is constant and (B & M) == 0, fold into A & M. */
11171 if (host_integerp (arg1, 1))
11172 {
11173 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11174 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11175 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11176 && (TREE_CODE (arg0) == PLUS_EXPR
11177 || TREE_CODE (arg0) == MINUS_EXPR
11178 || TREE_CODE (arg0) == NEGATE_EXPR)
11179 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11180 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11181 {
11182 tree pmop[2];
11183 int which = 0;
11184 unsigned HOST_WIDE_INT cst0;
11185
11186 /* Now we know that arg0 is (C + D) or (C - D) or
11187 -C and arg1 (M) is == (1LL << cst) - 1.
11188 Store C into PMOP[0] and D into PMOP[1]. */
11189 pmop[0] = TREE_OPERAND (arg0, 0);
11190 pmop[1] = NULL;
11191 if (TREE_CODE (arg0) != NEGATE_EXPR)
11192 {
11193 pmop[1] = TREE_OPERAND (arg0, 1);
11194 which = 1;
11195 }
11196
11197 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11198 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11199 & cst1) != cst1)
11200 which = -1;
11201
11202 for (; which >= 0; which--)
11203 switch (TREE_CODE (pmop[which]))
11204 {
11205 case BIT_AND_EXPR:
11206 case BIT_IOR_EXPR:
11207 case BIT_XOR_EXPR:
11208 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11209 != INTEGER_CST)
11210 break;
11211 /* tree_low_cst not used, because we don't care about
11212 the upper bits. */
11213 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11214 cst0 &= cst1;
11215 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11216 {
11217 if (cst0 != cst1)
11218 break;
11219 }
11220 else if (cst0 != 0)
11221 break;
11222 /* If C or D is of the form (A & N) where
11223 (N & M) == M, or of the form (A | N) or
11224 (A ^ N) where (N & M) == 0, replace it with A. */
11225 pmop[which] = TREE_OPERAND (pmop[which], 0);
11226 break;
11227 case INTEGER_CST:
11228 /* If C or D is a N where (N & M) == 0, it can be
11229 omitted (assumed 0). */
11230 if ((TREE_CODE (arg0) == PLUS_EXPR
11231 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11232 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11233 pmop[which] = NULL;
11234 break;
11235 default:
11236 break;
11237 }
11238
11239 /* Only build anything new if we optimized one or both arguments
11240 above. */
11241 if (pmop[0] != TREE_OPERAND (arg0, 0)
11242 || (TREE_CODE (arg0) != NEGATE_EXPR
11243 && pmop[1] != TREE_OPERAND (arg0, 1)))
11244 {
11245 tree utype = TREE_TYPE (arg0);
11246 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11247 {
11248 /* Perform the operations in a type that has defined
11249 overflow behavior. */
11250 utype = unsigned_type_for (TREE_TYPE (arg0));
11251 if (pmop[0] != NULL)
11252 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11253 if (pmop[1] != NULL)
11254 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11255 }
11256
11257 if (TREE_CODE (arg0) == NEGATE_EXPR)
11258 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11259 else if (TREE_CODE (arg0) == PLUS_EXPR)
11260 {
11261 if (pmop[0] != NULL && pmop[1] != NULL)
11262 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11263 pmop[0], pmop[1]);
11264 else if (pmop[0] != NULL)
11265 tem = pmop[0];
11266 else if (pmop[1] != NULL)
11267 tem = pmop[1];
11268 else
11269 return build_int_cst (type, 0);
11270 }
11271 else if (pmop[0] == NULL)
11272 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11273 else
11274 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11275 pmop[0], pmop[1]);
11276 /* TEM is now the new binary +, - or unary - replacement. */
11277 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11278 fold_convert_loc (loc, utype, arg1));
11279 return fold_convert_loc (loc, type, tem);
11280 }
11281 }
11282 }
11283
11284 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11285 if (t1 != NULL_TREE)
11286 return t1;
11287 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11288 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11289 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11290 {
11291 unsigned int prec
11292 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11293
11294 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11295 && (~TREE_INT_CST_LOW (arg1)
11296 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11297 return
11298 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11299 }
11300
11301 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11302
11303 This results in more efficient code for machines without a NOR
11304 instruction. Combine will canonicalize to the first form
11305 which will allow use of NOR instructions provided by the
11306 backend if they exist. */
11307 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11308 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11309 {
11310 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11311 build2 (BIT_IOR_EXPR, type,
11312 fold_convert_loc (loc, type,
11313 TREE_OPERAND (arg0, 0)),
11314 fold_convert_loc (loc, type,
11315 TREE_OPERAND (arg1, 0))));
11316 }
11317
11318 /* If arg0 is derived from the address of an object or function, we may
11319 be able to fold this expression using the object or function's
11320 alignment. */
11321 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11322 {
11323 unsigned HOST_WIDE_INT modulus, residue;
11324 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11325
11326 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11327 integer_onep (arg1));
11328
11329 /* This works because modulus is a power of 2. If this weren't the
11330 case, we'd have to replace it by its greatest power-of-2
11331 divisor: modulus & -modulus. */
11332 if (low < modulus)
11333 return build_int_cst (type, residue & low);
11334 }
11335
11336 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11337 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11338 if the new mask might be further optimized. */
11339 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11340 || TREE_CODE (arg0) == RSHIFT_EXPR)
11341 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11342 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11343 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11344 < TYPE_PRECISION (TREE_TYPE (arg0))
11345 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11346 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11347 {
11348 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11349 unsigned HOST_WIDE_INT mask
11350 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11351 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11352 tree shift_type = TREE_TYPE (arg0);
11353
11354 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11355 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11356 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11357 && TYPE_PRECISION (TREE_TYPE (arg0))
11358 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11359 {
11360 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11361 tree arg00 = TREE_OPERAND (arg0, 0);
11362 /* See if more bits can be proven as zero because of
11363 zero extension. */
11364 if (TREE_CODE (arg00) == NOP_EXPR
11365 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11366 {
11367 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11368 if (TYPE_PRECISION (inner_type)
11369 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11370 && TYPE_PRECISION (inner_type) < prec)
11371 {
11372 prec = TYPE_PRECISION (inner_type);
11373 /* See if we can shorten the right shift. */
11374 if (shiftc < prec)
11375 shift_type = inner_type;
11376 }
11377 }
11378 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11379 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11380 zerobits <<= prec - shiftc;
11381 /* For arithmetic shift if sign bit could be set, zerobits
11382 can contain actually sign bits, so no transformation is
11383 possible, unless MASK masks them all away. In that
11384 case the shift needs to be converted into logical shift. */
11385 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11386 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11387 {
11388 if ((mask & zerobits) == 0)
11389 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11390 else
11391 zerobits = 0;
11392 }
11393 }
11394
11395 /* ((X << 16) & 0xff00) is (X, 0). */
11396 if ((mask & zerobits) == mask)
11397 return omit_one_operand_loc (loc, type,
11398 build_int_cst (type, 0), arg0);
11399
11400 newmask = mask | zerobits;
11401 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11402 {
11403 unsigned int prec;
11404
11405 /* Only do the transformation if NEWMASK is some integer
11406 mode's mask. */
11407 for (prec = BITS_PER_UNIT;
11408 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11409 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11410 break;
11411 if (prec < HOST_BITS_PER_WIDE_INT
11412 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11413 {
11414 tree newmaskt;
11415
11416 if (shift_type != TREE_TYPE (arg0))
11417 {
11418 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11419 fold_convert_loc (loc, shift_type,
11420 TREE_OPERAND (arg0, 0)),
11421 TREE_OPERAND (arg0, 1));
11422 tem = fold_convert_loc (loc, type, tem);
11423 }
11424 else
11425 tem = op0;
11426 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11427 if (!tree_int_cst_equal (newmaskt, arg1))
11428 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11429 }
11430 }
11431 }
11432
11433 goto associate;
11434
11435 case RDIV_EXPR:
11436 /* Don't touch a floating-point divide by zero unless the mode
11437 of the constant can represent infinity. */
11438 if (TREE_CODE (arg1) == REAL_CST
11439 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11440 && real_zerop (arg1))
11441 return NULL_TREE;
11442
11443 /* Optimize A / A to 1.0 if we don't care about
11444 NaNs or Infinities. Skip the transformation
11445 for non-real operands. */
11446 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11447 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11448 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11449 && operand_equal_p (arg0, arg1, 0))
11450 {
11451 tree r = build_real (TREE_TYPE (arg0), dconst1);
11452
11453 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11454 }
11455
11456 /* The complex version of the above A / A optimization. */
11457 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11458 && operand_equal_p (arg0, arg1, 0))
11459 {
11460 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11461 if (! HONOR_NANS (TYPE_MODE (elem_type))
11462 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11463 {
11464 tree r = build_real (elem_type, dconst1);
11465 /* omit_two_operands will call fold_convert for us. */
11466 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11467 }
11468 }
11469
11470 /* (-A) / (-B) -> A / B */
11471 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11472 return fold_build2_loc (loc, RDIV_EXPR, type,
11473 TREE_OPERAND (arg0, 0),
11474 negate_expr (arg1));
11475 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11476 return fold_build2_loc (loc, RDIV_EXPR, type,
11477 negate_expr (arg0),
11478 TREE_OPERAND (arg1, 0));
11479
11480 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11481 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11482 && real_onep (arg1))
11483 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11484
11485 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11486 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11487 && real_minus_onep (arg1))
11488 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11489 negate_expr (arg0)));
11490
11491 /* If ARG1 is a constant, we can convert this to a multiply by the
11492 reciprocal. This does not have the same rounding properties,
11493 so only do this if -freciprocal-math. We can actually
11494 always safely do it if ARG1 is a power of two, but it's hard to
11495 tell if it is or not in a portable manner. */
11496 if (TREE_CODE (arg1) == REAL_CST)
11497 {
11498 if (flag_reciprocal_math
11499 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11500 arg1)))
11501 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11502 /* Find the reciprocal if optimizing and the result is exact. */
11503 if (optimize)
11504 {
11505 REAL_VALUE_TYPE r;
11506 r = TREE_REAL_CST (arg1);
11507 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11508 {
11509 tem = build_real (type, r);
11510 return fold_build2_loc (loc, MULT_EXPR, type,
11511 fold_convert_loc (loc, type, arg0), tem);
11512 }
11513 }
11514 }
11515 /* Convert A/B/C to A/(B*C). */
11516 if (flag_reciprocal_math
11517 && TREE_CODE (arg0) == RDIV_EXPR)
11518 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11519 fold_build2_loc (loc, MULT_EXPR, type,
11520 TREE_OPERAND (arg0, 1), arg1));
11521
11522 /* Convert A/(B/C) to (A/B)*C. */
11523 if (flag_reciprocal_math
11524 && TREE_CODE (arg1) == RDIV_EXPR)
11525 return fold_build2_loc (loc, MULT_EXPR, type,
11526 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11527 TREE_OPERAND (arg1, 0)),
11528 TREE_OPERAND (arg1, 1));
11529
11530 /* Convert C1/(X*C2) into (C1/C2)/X. */
11531 if (flag_reciprocal_math
11532 && TREE_CODE (arg1) == MULT_EXPR
11533 && TREE_CODE (arg0) == REAL_CST
11534 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11535 {
11536 tree tem = const_binop (RDIV_EXPR, arg0,
11537 TREE_OPERAND (arg1, 1));
11538 if (tem)
11539 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11540 TREE_OPERAND (arg1, 0));
11541 }
11542
11543 if (flag_unsafe_math_optimizations)
11544 {
11545 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11546 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11547
11548 /* Optimize sin(x)/cos(x) as tan(x). */
11549 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11550 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11551 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11552 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11553 CALL_EXPR_ARG (arg1, 0), 0))
11554 {
11555 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11556
11557 if (tanfn != NULL_TREE)
11558 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11559 }
11560
11561 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11562 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11563 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11564 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11565 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11566 CALL_EXPR_ARG (arg1, 0), 0))
11567 {
11568 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11569
11570 if (tanfn != NULL_TREE)
11571 {
11572 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11573 CALL_EXPR_ARG (arg0, 0));
11574 return fold_build2_loc (loc, RDIV_EXPR, type,
11575 build_real (type, dconst1), tmp);
11576 }
11577 }
11578
11579 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11580 NaNs or Infinities. */
11581 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11582 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11583 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11584 {
11585 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11586 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11587
11588 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11589 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11590 && operand_equal_p (arg00, arg01, 0))
11591 {
11592 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11593
11594 if (cosfn != NULL_TREE)
11595 return build_call_expr_loc (loc, cosfn, 1, arg00);
11596 }
11597 }
11598
11599 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11600 NaNs or Infinities. */
11601 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11602 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11603 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11604 {
11605 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11606 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11607
11608 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11609 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11610 && operand_equal_p (arg00, arg01, 0))
11611 {
11612 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11613
11614 if (cosfn != NULL_TREE)
11615 {
11616 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11617 return fold_build2_loc (loc, RDIV_EXPR, type,
11618 build_real (type, dconst1),
11619 tmp);
11620 }
11621 }
11622 }
11623
11624 /* Optimize pow(x,c)/x as pow(x,c-1). */
11625 if (fcode0 == BUILT_IN_POW
11626 || fcode0 == BUILT_IN_POWF
11627 || fcode0 == BUILT_IN_POWL)
11628 {
11629 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11630 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11631 if (TREE_CODE (arg01) == REAL_CST
11632 && !TREE_OVERFLOW (arg01)
11633 && operand_equal_p (arg1, arg00, 0))
11634 {
11635 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11636 REAL_VALUE_TYPE c;
11637 tree arg;
11638
11639 c = TREE_REAL_CST (arg01);
11640 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11641 arg = build_real (type, c);
11642 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11643 }
11644 }
11645
11646 /* Optimize a/root(b/c) into a*root(c/b). */
11647 if (BUILTIN_ROOT_P (fcode1))
11648 {
11649 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11650
11651 if (TREE_CODE (rootarg) == RDIV_EXPR)
11652 {
11653 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11654 tree b = TREE_OPERAND (rootarg, 0);
11655 tree c = TREE_OPERAND (rootarg, 1);
11656
11657 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11658
11659 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11660 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11661 }
11662 }
11663
11664 /* Optimize x/expN(y) into x*expN(-y). */
11665 if (BUILTIN_EXPONENT_P (fcode1))
11666 {
11667 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11668 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11669 arg1 = build_call_expr_loc (loc,
11670 expfn, 1,
11671 fold_convert_loc (loc, type, arg));
11672 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11673 }
11674
11675 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11676 if (fcode1 == BUILT_IN_POW
11677 || fcode1 == BUILT_IN_POWF
11678 || fcode1 == BUILT_IN_POWL)
11679 {
11680 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11681 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11682 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11683 tree neg11 = fold_convert_loc (loc, type,
11684 negate_expr (arg11));
11685 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11686 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11687 }
11688 }
11689 return NULL_TREE;
11690
11691 case TRUNC_DIV_EXPR:
11692 /* Optimize (X & (-A)) / A where A is a power of 2,
11693 to X >> log2(A) */
11694 if (TREE_CODE (arg0) == BIT_AND_EXPR
11695 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11696 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11697 {
11698 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11699 arg1, TREE_OPERAND (arg0, 1));
11700 if (sum && integer_zerop (sum)) {
11701 unsigned long pow2;
11702
11703 if (TREE_INT_CST_LOW (arg1))
11704 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11705 else
11706 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11707 + HOST_BITS_PER_WIDE_INT;
11708
11709 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11710 TREE_OPERAND (arg0, 0),
11711 build_int_cst (integer_type_node, pow2));
11712 }
11713 }
11714
11715 /* Fall thru */
11716
11717 case FLOOR_DIV_EXPR:
11718 /* Simplify A / (B << N) where A and B are positive and B is
11719 a power of 2, to A >> (N + log2(B)). */
11720 strict_overflow_p = false;
11721 if (TREE_CODE (arg1) == LSHIFT_EXPR
11722 && (TYPE_UNSIGNED (type)
11723 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11724 {
11725 tree sval = TREE_OPERAND (arg1, 0);
11726 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11727 {
11728 tree sh_cnt = TREE_OPERAND (arg1, 1);
11729 unsigned long pow2;
11730
11731 if (TREE_INT_CST_LOW (sval))
11732 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11733 else
11734 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11735 + HOST_BITS_PER_WIDE_INT;
11736
11737 if (strict_overflow_p)
11738 fold_overflow_warning (("assuming signed overflow does not "
11739 "occur when simplifying A / (B << N)"),
11740 WARN_STRICT_OVERFLOW_MISC);
11741
11742 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11743 sh_cnt,
11744 build_int_cst (TREE_TYPE (sh_cnt),
11745 pow2));
11746 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11747 fold_convert_loc (loc, type, arg0), sh_cnt);
11748 }
11749 }
11750
11751 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11752 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11753 if (INTEGRAL_TYPE_P (type)
11754 && TYPE_UNSIGNED (type)
11755 && code == FLOOR_DIV_EXPR)
11756 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11757
11758 /* Fall thru */
11759
11760 case ROUND_DIV_EXPR:
11761 case CEIL_DIV_EXPR:
11762 case EXACT_DIV_EXPR:
11763 if (integer_onep (arg1))
11764 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11765 if (integer_zerop (arg1))
11766 return NULL_TREE;
11767 /* X / -1 is -X. */
11768 if (!TYPE_UNSIGNED (type)
11769 && TREE_CODE (arg1) == INTEGER_CST
11770 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11771 && TREE_INT_CST_HIGH (arg1) == -1)
11772 return fold_convert_loc (loc, type, negate_expr (arg0));
11773
11774 /* Convert -A / -B to A / B when the type is signed and overflow is
11775 undefined. */
11776 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11777 && TREE_CODE (arg0) == NEGATE_EXPR
11778 && negate_expr_p (arg1))
11779 {
11780 if (INTEGRAL_TYPE_P (type))
11781 fold_overflow_warning (("assuming signed overflow does not occur "
11782 "when distributing negation across "
11783 "division"),
11784 WARN_STRICT_OVERFLOW_MISC);
11785 return fold_build2_loc (loc, code, type,
11786 fold_convert_loc (loc, type,
11787 TREE_OPERAND (arg0, 0)),
11788 fold_convert_loc (loc, type,
11789 negate_expr (arg1)));
11790 }
11791 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11792 && TREE_CODE (arg1) == NEGATE_EXPR
11793 && negate_expr_p (arg0))
11794 {
11795 if (INTEGRAL_TYPE_P (type))
11796 fold_overflow_warning (("assuming signed overflow does not occur "
11797 "when distributing negation across "
11798 "division"),
11799 WARN_STRICT_OVERFLOW_MISC);
11800 return fold_build2_loc (loc, code, type,
11801 fold_convert_loc (loc, type,
11802 negate_expr (arg0)),
11803 fold_convert_loc (loc, type,
11804 TREE_OPERAND (arg1, 0)));
11805 }
11806
11807 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11808 operation, EXACT_DIV_EXPR.
11809
11810 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11811 At one time others generated faster code, it's not clear if they do
11812 after the last round to changes to the DIV code in expmed.c. */
11813 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11814 && multiple_of_p (type, arg0, arg1))
11815 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11816
11817 strict_overflow_p = false;
11818 if (TREE_CODE (arg1) == INTEGER_CST
11819 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11820 &strict_overflow_p)))
11821 {
11822 if (strict_overflow_p)
11823 fold_overflow_warning (("assuming signed overflow does not occur "
11824 "when simplifying division"),
11825 WARN_STRICT_OVERFLOW_MISC);
11826 return fold_convert_loc (loc, type, tem);
11827 }
11828
11829 return NULL_TREE;
11830
11831 case CEIL_MOD_EXPR:
11832 case FLOOR_MOD_EXPR:
11833 case ROUND_MOD_EXPR:
11834 case TRUNC_MOD_EXPR:
11835 /* X % 1 is always zero, but be sure to preserve any side
11836 effects in X. */
11837 if (integer_onep (arg1))
11838 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11839
11840 /* X % 0, return X % 0 unchanged so that we can get the
11841 proper warnings and errors. */
11842 if (integer_zerop (arg1))
11843 return NULL_TREE;
11844
11845 /* 0 % X is always zero, but be sure to preserve any side
11846 effects in X. Place this after checking for X == 0. */
11847 if (integer_zerop (arg0))
11848 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11849
11850 /* X % -1 is zero. */
11851 if (!TYPE_UNSIGNED (type)
11852 && TREE_CODE (arg1) == INTEGER_CST
11853 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11854 && TREE_INT_CST_HIGH (arg1) == -1)
11855 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11856
11857 /* X % -C is the same as X % C. */
11858 if (code == TRUNC_MOD_EXPR
11859 && !TYPE_UNSIGNED (type)
11860 && TREE_CODE (arg1) == INTEGER_CST
11861 && !TREE_OVERFLOW (arg1)
11862 && TREE_INT_CST_HIGH (arg1) < 0
11863 && !TYPE_OVERFLOW_TRAPS (type)
11864 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11865 && !sign_bit_p (arg1, arg1))
11866 return fold_build2_loc (loc, code, type,
11867 fold_convert_loc (loc, type, arg0),
11868 fold_convert_loc (loc, type,
11869 negate_expr (arg1)));
11870
11871 /* X % -Y is the same as X % Y. */
11872 if (code == TRUNC_MOD_EXPR
11873 && !TYPE_UNSIGNED (type)
11874 && TREE_CODE (arg1) == NEGATE_EXPR
11875 && !TYPE_OVERFLOW_TRAPS (type))
11876 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11877 fold_convert_loc (loc, type,
11878 TREE_OPERAND (arg1, 0)));
11879
11880 strict_overflow_p = false;
11881 if (TREE_CODE (arg1) == INTEGER_CST
11882 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11883 &strict_overflow_p)))
11884 {
11885 if (strict_overflow_p)
11886 fold_overflow_warning (("assuming signed overflow does not occur "
11887 "when simplifying modulus"),
11888 WARN_STRICT_OVERFLOW_MISC);
11889 return fold_convert_loc (loc, type, tem);
11890 }
11891
11892 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11893 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11894 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11895 && (TYPE_UNSIGNED (type)
11896 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11897 {
11898 tree c = arg1;
11899 /* Also optimize A % (C << N) where C is a power of 2,
11900 to A & ((C << N) - 1). */
11901 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11902 c = TREE_OPERAND (arg1, 0);
11903
11904 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11905 {
11906 tree mask
11907 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11908 build_int_cst (TREE_TYPE (arg1), 1));
11909 if (strict_overflow_p)
11910 fold_overflow_warning (("assuming signed overflow does not "
11911 "occur when simplifying "
11912 "X % (power of two)"),
11913 WARN_STRICT_OVERFLOW_MISC);
11914 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11915 fold_convert_loc (loc, type, arg0),
11916 fold_convert_loc (loc, type, mask));
11917 }
11918 }
11919
11920 return NULL_TREE;
11921
11922 case LROTATE_EXPR:
11923 case RROTATE_EXPR:
11924 if (integer_all_onesp (arg0))
11925 return omit_one_operand_loc (loc, type, arg0, arg1);
11926 goto shift;
11927
11928 case RSHIFT_EXPR:
11929 /* Optimize -1 >> x for arithmetic right shifts. */
11930 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11931 && tree_expr_nonnegative_p (arg1))
11932 return omit_one_operand_loc (loc, type, arg0, arg1);
11933 /* ... fall through ... */
11934
11935 case LSHIFT_EXPR:
11936 shift:
11937 if (integer_zerop (arg1))
11938 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11939 if (integer_zerop (arg0))
11940 return omit_one_operand_loc (loc, type, arg0, arg1);
11941
11942 /* Since negative shift count is not well-defined,
11943 don't try to compute it in the compiler. */
11944 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11945 return NULL_TREE;
11946
11947 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11948 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11949 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11950 && host_integerp (TREE_OPERAND (arg0, 1), false)
11951 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11952 {
11953 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11954 + TREE_INT_CST_LOW (arg1));
11955
11956 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11957 being well defined. */
11958 if (low >= TYPE_PRECISION (type))
11959 {
11960 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11961 low = low % TYPE_PRECISION (type);
11962 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11963 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11964 TREE_OPERAND (arg0, 0));
11965 else
11966 low = TYPE_PRECISION (type) - 1;
11967 }
11968
11969 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11970 build_int_cst (type, low));
11971 }
11972
11973 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11974 into x & ((unsigned)-1 >> c) for unsigned types. */
11975 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11976 || (TYPE_UNSIGNED (type)
11977 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11978 && host_integerp (arg1, false)
11979 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11980 && host_integerp (TREE_OPERAND (arg0, 1), false)
11981 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11982 {
11983 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11984 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11985 tree lshift;
11986 tree arg00;
11987
11988 if (low0 == low1)
11989 {
11990 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11991
11992 lshift = build_int_cst (type, -1);
11993 lshift = int_const_binop (code, lshift, arg1);
11994
11995 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11996 }
11997 }
11998
11999 /* Rewrite an LROTATE_EXPR by a constant into an
12000 RROTATE_EXPR by a new constant. */
12001 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12002 {
12003 tree tem = build_int_cst (TREE_TYPE (arg1),
12004 TYPE_PRECISION (type));
12005 tem = const_binop (MINUS_EXPR, tem, arg1);
12006 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12007 }
12008
12009 /* If we have a rotate of a bit operation with the rotate count and
12010 the second operand of the bit operation both constant,
12011 permute the two operations. */
12012 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12013 && (TREE_CODE (arg0) == BIT_AND_EXPR
12014 || TREE_CODE (arg0) == BIT_IOR_EXPR
12015 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12016 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12017 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12018 fold_build2_loc (loc, code, type,
12019 TREE_OPERAND (arg0, 0), arg1),
12020 fold_build2_loc (loc, code, type,
12021 TREE_OPERAND (arg0, 1), arg1));
12022
12023 /* Two consecutive rotates adding up to the precision of the
12024 type can be ignored. */
12025 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12026 && TREE_CODE (arg0) == RROTATE_EXPR
12027 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12028 && TREE_INT_CST_HIGH (arg1) == 0
12029 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12030 && ((TREE_INT_CST_LOW (arg1)
12031 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12032 == (unsigned int) TYPE_PRECISION (type)))
12033 return TREE_OPERAND (arg0, 0);
12034
12035 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12036 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12037 if the latter can be further optimized. */
12038 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12039 && TREE_CODE (arg0) == BIT_AND_EXPR
12040 && TREE_CODE (arg1) == INTEGER_CST
12041 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12042 {
12043 tree mask = fold_build2_loc (loc, code, type,
12044 fold_convert_loc (loc, type,
12045 TREE_OPERAND (arg0, 1)),
12046 arg1);
12047 tree shift = fold_build2_loc (loc, code, type,
12048 fold_convert_loc (loc, type,
12049 TREE_OPERAND (arg0, 0)),
12050 arg1);
12051 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12052 if (tem)
12053 return tem;
12054 }
12055
12056 return NULL_TREE;
12057
12058 case MIN_EXPR:
12059 if (operand_equal_p (arg0, arg1, 0))
12060 return omit_one_operand_loc (loc, type, arg0, arg1);
12061 if (INTEGRAL_TYPE_P (type)
12062 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12063 return omit_one_operand_loc (loc, type, arg1, arg0);
12064 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12065 if (tem)
12066 return tem;
12067 goto associate;
12068
12069 case MAX_EXPR:
12070 if (operand_equal_p (arg0, arg1, 0))
12071 return omit_one_operand_loc (loc, type, arg0, arg1);
12072 if (INTEGRAL_TYPE_P (type)
12073 && TYPE_MAX_VALUE (type)
12074 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12075 return omit_one_operand_loc (loc, type, arg1, arg0);
12076 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12077 if (tem)
12078 return tem;
12079 goto associate;
12080
12081 case TRUTH_ANDIF_EXPR:
12082 /* Note that the operands of this must be ints
12083 and their values must be 0 or 1.
12084 ("true" is a fixed value perhaps depending on the language.) */
12085 /* If first arg is constant zero, return it. */
12086 if (integer_zerop (arg0))
12087 return fold_convert_loc (loc, type, arg0);
12088 case TRUTH_AND_EXPR:
12089 /* If either arg is constant true, drop it. */
12090 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12091 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12092 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12093 /* Preserve sequence points. */
12094 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12095 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12096 /* If second arg is constant zero, result is zero, but first arg
12097 must be evaluated. */
12098 if (integer_zerop (arg1))
12099 return omit_one_operand_loc (loc, type, arg1, arg0);
12100 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12101 case will be handled here. */
12102 if (integer_zerop (arg0))
12103 return omit_one_operand_loc (loc, type, arg0, arg1);
12104
12105 /* !X && X is always false. */
12106 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12107 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12108 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12109 /* X && !X is always false. */
12110 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12111 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12112 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12113
12114 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12115 means A >= Y && A != MAX, but in this case we know that
12116 A < X <= MAX. */
12117
12118 if (!TREE_SIDE_EFFECTS (arg0)
12119 && !TREE_SIDE_EFFECTS (arg1))
12120 {
12121 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12122 if (tem && !operand_equal_p (tem, arg0, 0))
12123 return fold_build2_loc (loc, code, type, tem, arg1);
12124
12125 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12126 if (tem && !operand_equal_p (tem, arg1, 0))
12127 return fold_build2_loc (loc, code, type, arg0, tem);
12128 }
12129
12130 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12131 != NULL_TREE)
12132 return tem;
12133
12134 return NULL_TREE;
12135
12136 case TRUTH_ORIF_EXPR:
12137 /* Note that the operands of this must be ints
12138 and their values must be 0 or true.
12139 ("true" is a fixed value perhaps depending on the language.) */
12140 /* If first arg is constant true, return it. */
12141 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12142 return fold_convert_loc (loc, type, arg0);
12143 case TRUTH_OR_EXPR:
12144 /* If either arg is constant zero, drop it. */
12145 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12146 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12147 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12148 /* Preserve sequence points. */
12149 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12150 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12151 /* If second arg is constant true, result is true, but we must
12152 evaluate first arg. */
12153 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12154 return omit_one_operand_loc (loc, type, arg1, arg0);
12155 /* Likewise for first arg, but note this only occurs here for
12156 TRUTH_OR_EXPR. */
12157 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12158 return omit_one_operand_loc (loc, type, arg0, arg1);
12159
12160 /* !X || X is always true. */
12161 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12162 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12163 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12164 /* X || !X is always true. */
12165 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12166 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12167 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12168
12169 /* (X && !Y) || (!X && Y) is X ^ Y */
12170 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12171 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12172 {
12173 tree a0, a1, l0, l1, n0, n1;
12174
12175 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12176 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12177
12178 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12179 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12180
12181 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12182 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12183
12184 if ((operand_equal_p (n0, a0, 0)
12185 && operand_equal_p (n1, a1, 0))
12186 || (operand_equal_p (n0, a1, 0)
12187 && operand_equal_p (n1, a0, 0)))
12188 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12189 }
12190
12191 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12192 != NULL_TREE)
12193 return tem;
12194
12195 return NULL_TREE;
12196
12197 case TRUTH_XOR_EXPR:
12198 /* If the second arg is constant zero, drop it. */
12199 if (integer_zerop (arg1))
12200 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12201 /* If the second arg is constant true, this is a logical inversion. */
12202 if (integer_onep (arg1))
12203 {
12204 /* Only call invert_truthvalue if operand is a truth value. */
12205 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12206 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12207 else
12208 tem = invert_truthvalue_loc (loc, arg0);
12209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12210 }
12211 /* Identical arguments cancel to zero. */
12212 if (operand_equal_p (arg0, arg1, 0))
12213 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12214
12215 /* !X ^ X is always true. */
12216 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12217 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12218 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12219
12220 /* X ^ !X is always true. */
12221 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12222 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12223 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12224
12225 return NULL_TREE;
12226
12227 case EQ_EXPR:
12228 case NE_EXPR:
12229 STRIP_NOPS (arg0);
12230 STRIP_NOPS (arg1);
12231
12232 tem = fold_comparison (loc, code, type, op0, op1);
12233 if (tem != NULL_TREE)
12234 return tem;
12235
12236 /* bool_var != 0 becomes bool_var. */
12237 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12238 && code == NE_EXPR)
12239 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12240
12241 /* bool_var == 1 becomes bool_var. */
12242 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12243 && code == EQ_EXPR)
12244 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12245
12246 /* bool_var != 1 becomes !bool_var. */
12247 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12248 && code == NE_EXPR)
12249 return fold_convert_loc (loc, type,
12250 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12251 TREE_TYPE (arg0), arg0));
12252
12253 /* bool_var == 0 becomes !bool_var. */
12254 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12255 && code == EQ_EXPR)
12256 return fold_convert_loc (loc, type,
12257 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12258 TREE_TYPE (arg0), arg0));
12259
12260 /* !exp != 0 becomes !exp */
12261 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12262 && code == NE_EXPR)
12263 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12264
12265 /* If this is an equality comparison of the address of two non-weak,
12266 unaliased symbols neither of which are extern (since we do not
12267 have access to attributes for externs), then we know the result. */
12268 if (TREE_CODE (arg0) == ADDR_EXPR
12269 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12270 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12271 && ! lookup_attribute ("alias",
12272 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12273 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12274 && TREE_CODE (arg1) == ADDR_EXPR
12275 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12276 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12277 && ! lookup_attribute ("alias",
12278 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12279 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12280 {
12281 /* We know that we're looking at the address of two
12282 non-weak, unaliased, static _DECL nodes.
12283
12284 It is both wasteful and incorrect to call operand_equal_p
12285 to compare the two ADDR_EXPR nodes. It is wasteful in that
12286 all we need to do is test pointer equality for the arguments
12287 to the two ADDR_EXPR nodes. It is incorrect to use
12288 operand_equal_p as that function is NOT equivalent to a
12289 C equality test. It can in fact return false for two
12290 objects which would test as equal using the C equality
12291 operator. */
12292 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12293 return constant_boolean_node (equal
12294 ? code == EQ_EXPR : code != EQ_EXPR,
12295 type);
12296 }
12297
12298 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12299 a MINUS_EXPR of a constant, we can convert it into a comparison with
12300 a revised constant as long as no overflow occurs. */
12301 if (TREE_CODE (arg1) == INTEGER_CST
12302 && (TREE_CODE (arg0) == PLUS_EXPR
12303 || TREE_CODE (arg0) == MINUS_EXPR)
12304 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12305 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12306 ? MINUS_EXPR : PLUS_EXPR,
12307 fold_convert_loc (loc, TREE_TYPE (arg0),
12308 arg1),
12309 TREE_OPERAND (arg0, 1)))
12310 && !TREE_OVERFLOW (tem))
12311 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12312
12313 /* Similarly for a NEGATE_EXPR. */
12314 if (TREE_CODE (arg0) == NEGATE_EXPR
12315 && TREE_CODE (arg1) == INTEGER_CST
12316 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12317 arg1)))
12318 && TREE_CODE (tem) == INTEGER_CST
12319 && !TREE_OVERFLOW (tem))
12320 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12321
12322 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12323 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12324 && TREE_CODE (arg1) == INTEGER_CST
12325 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12326 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12327 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12328 fold_convert_loc (loc,
12329 TREE_TYPE (arg0),
12330 arg1),
12331 TREE_OPERAND (arg0, 1)));
12332
12333 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12334 if ((TREE_CODE (arg0) == PLUS_EXPR
12335 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12336 || TREE_CODE (arg0) == MINUS_EXPR)
12337 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12338 0)),
12339 arg1, 0)
12340 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12341 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12342 {
12343 tree val = TREE_OPERAND (arg0, 1);
12344 return omit_two_operands_loc (loc, type,
12345 fold_build2_loc (loc, code, type,
12346 val,
12347 build_int_cst (TREE_TYPE (val),
12348 0)),
12349 TREE_OPERAND (arg0, 0), arg1);
12350 }
12351
12352 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12353 if (TREE_CODE (arg0) == MINUS_EXPR
12354 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12355 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12356 1)),
12357 arg1, 0)
12358 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12359 {
12360 return omit_two_operands_loc (loc, type,
12361 code == NE_EXPR
12362 ? boolean_true_node : boolean_false_node,
12363 TREE_OPERAND (arg0, 1), arg1);
12364 }
12365
12366 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12367 for !=. Don't do this for ordered comparisons due to overflow. */
12368 if (TREE_CODE (arg0) == MINUS_EXPR
12369 && integer_zerop (arg1))
12370 return fold_build2_loc (loc, code, type,
12371 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12372
12373 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12374 if (TREE_CODE (arg0) == ABS_EXPR
12375 && (integer_zerop (arg1) || real_zerop (arg1)))
12376 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12377
12378 /* If this is an EQ or NE comparison with zero and ARG0 is
12379 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12380 two operations, but the latter can be done in one less insn
12381 on machines that have only two-operand insns or on which a
12382 constant cannot be the first operand. */
12383 if (TREE_CODE (arg0) == BIT_AND_EXPR
12384 && integer_zerop (arg1))
12385 {
12386 tree arg00 = TREE_OPERAND (arg0, 0);
12387 tree arg01 = TREE_OPERAND (arg0, 1);
12388 if (TREE_CODE (arg00) == LSHIFT_EXPR
12389 && integer_onep (TREE_OPERAND (arg00, 0)))
12390 {
12391 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12392 arg01, TREE_OPERAND (arg00, 1));
12393 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12394 build_int_cst (TREE_TYPE (arg0), 1));
12395 return fold_build2_loc (loc, code, type,
12396 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12397 arg1);
12398 }
12399 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12400 && integer_onep (TREE_OPERAND (arg01, 0)))
12401 {
12402 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12403 arg00, TREE_OPERAND (arg01, 1));
12404 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12405 build_int_cst (TREE_TYPE (arg0), 1));
12406 return fold_build2_loc (loc, code, type,
12407 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12408 arg1);
12409 }
12410 }
12411
12412 /* If this is an NE or EQ comparison of zero against the result of a
12413 signed MOD operation whose second operand is a power of 2, make
12414 the MOD operation unsigned since it is simpler and equivalent. */
12415 if (integer_zerop (arg1)
12416 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12417 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12418 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12419 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12420 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12421 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12422 {
12423 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12424 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12425 fold_convert_loc (loc, newtype,
12426 TREE_OPERAND (arg0, 0)),
12427 fold_convert_loc (loc, newtype,
12428 TREE_OPERAND (arg0, 1)));
12429
12430 return fold_build2_loc (loc, code, type, newmod,
12431 fold_convert_loc (loc, newtype, arg1));
12432 }
12433
12434 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12435 C1 is a valid shift constant, and C2 is a power of two, i.e.
12436 a single bit. */
12437 if (TREE_CODE (arg0) == BIT_AND_EXPR
12438 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12439 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12440 == INTEGER_CST
12441 && integer_pow2p (TREE_OPERAND (arg0, 1))
12442 && integer_zerop (arg1))
12443 {
12444 tree itype = TREE_TYPE (arg0);
12445 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12446 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12447
12448 /* Check for a valid shift count. */
12449 if (TREE_INT_CST_HIGH (arg001) == 0
12450 && TREE_INT_CST_LOW (arg001) < prec)
12451 {
12452 tree arg01 = TREE_OPERAND (arg0, 1);
12453 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12454 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12455 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12456 can be rewritten as (X & (C2 << C1)) != 0. */
12457 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12458 {
12459 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12460 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12461 return fold_build2_loc (loc, code, type, tem,
12462 fold_convert_loc (loc, itype, arg1));
12463 }
12464 /* Otherwise, for signed (arithmetic) shifts,
12465 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12466 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12467 else if (!TYPE_UNSIGNED (itype))
12468 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12469 arg000, build_int_cst (itype, 0));
12470 /* Otherwise, of unsigned (logical) shifts,
12471 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12472 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12473 else
12474 return omit_one_operand_loc (loc, type,
12475 code == EQ_EXPR ? integer_one_node
12476 : integer_zero_node,
12477 arg000);
12478 }
12479 }
12480
12481 /* If we have (A & C) == C where C is a power of 2, convert this into
12482 (A & C) != 0. Similarly for NE_EXPR. */
12483 if (TREE_CODE (arg0) == BIT_AND_EXPR
12484 && integer_pow2p (TREE_OPERAND (arg0, 1))
12485 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12486 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12487 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12488 integer_zero_node));
12489
12490 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12491 bit, then fold the expression into A < 0 or A >= 0. */
12492 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12493 if (tem)
12494 return tem;
12495
12496 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12497 Similarly for NE_EXPR. */
12498 if (TREE_CODE (arg0) == BIT_AND_EXPR
12499 && TREE_CODE (arg1) == INTEGER_CST
12500 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12501 {
12502 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12503 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12504 TREE_OPERAND (arg0, 1));
12505 tree dandnotc
12506 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12507 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12508 notc);
12509 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12510 if (integer_nonzerop (dandnotc))
12511 return omit_one_operand_loc (loc, type, rslt, arg0);
12512 }
12513
12514 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12515 Similarly for NE_EXPR. */
12516 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12517 && TREE_CODE (arg1) == INTEGER_CST
12518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12519 {
12520 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12521 tree candnotd
12522 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12523 TREE_OPERAND (arg0, 1),
12524 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12525 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12526 if (integer_nonzerop (candnotd))
12527 return omit_one_operand_loc (loc, type, rslt, arg0);
12528 }
12529
12530 /* If this is a comparison of a field, we may be able to simplify it. */
12531 if ((TREE_CODE (arg0) == COMPONENT_REF
12532 || TREE_CODE (arg0) == BIT_FIELD_REF)
12533 /* Handle the constant case even without -O
12534 to make sure the warnings are given. */
12535 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12536 {
12537 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12538 if (t1)
12539 return t1;
12540 }
12541
12542 /* Optimize comparisons of strlen vs zero to a compare of the
12543 first character of the string vs zero. To wit,
12544 strlen(ptr) == 0 => *ptr == 0
12545 strlen(ptr) != 0 => *ptr != 0
12546 Other cases should reduce to one of these two (or a constant)
12547 due to the return value of strlen being unsigned. */
12548 if (TREE_CODE (arg0) == CALL_EXPR
12549 && integer_zerop (arg1))
12550 {
12551 tree fndecl = get_callee_fndecl (arg0);
12552
12553 if (fndecl
12554 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12555 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12556 && call_expr_nargs (arg0) == 1
12557 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12558 {
12559 tree iref = build_fold_indirect_ref_loc (loc,
12560 CALL_EXPR_ARG (arg0, 0));
12561 return fold_build2_loc (loc, code, type, iref,
12562 build_int_cst (TREE_TYPE (iref), 0));
12563 }
12564 }
12565
12566 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12567 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12568 if (TREE_CODE (arg0) == RSHIFT_EXPR
12569 && integer_zerop (arg1)
12570 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12571 {
12572 tree arg00 = TREE_OPERAND (arg0, 0);
12573 tree arg01 = TREE_OPERAND (arg0, 1);
12574 tree itype = TREE_TYPE (arg00);
12575 if (TREE_INT_CST_HIGH (arg01) == 0
12576 && TREE_INT_CST_LOW (arg01)
12577 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12578 {
12579 if (TYPE_UNSIGNED (itype))
12580 {
12581 itype = signed_type_for (itype);
12582 arg00 = fold_convert_loc (loc, itype, arg00);
12583 }
12584 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12585 type, arg00, build_int_cst (itype, 0));
12586 }
12587 }
12588
12589 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12590 if (integer_zerop (arg1)
12591 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12592 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12593 TREE_OPERAND (arg0, 1));
12594
12595 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12596 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12597 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12598 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12599 build_int_cst (TREE_TYPE (arg0), 0));
12600 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12601 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12603 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12604 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12605 build_int_cst (TREE_TYPE (arg0), 0));
12606
12607 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12608 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12609 && TREE_CODE (arg1) == INTEGER_CST
12610 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12611 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12612 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12613 TREE_OPERAND (arg0, 1), arg1));
12614
12615 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12616 (X & C) == 0 when C is a single bit. */
12617 if (TREE_CODE (arg0) == BIT_AND_EXPR
12618 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12619 && integer_zerop (arg1)
12620 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12621 {
12622 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12623 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12624 TREE_OPERAND (arg0, 1));
12625 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12626 type, tem,
12627 fold_convert_loc (loc, TREE_TYPE (arg0),
12628 arg1));
12629 }
12630
12631 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12632 constant C is a power of two, i.e. a single bit. */
12633 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12634 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12635 && integer_zerop (arg1)
12636 && integer_pow2p (TREE_OPERAND (arg0, 1))
12637 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12638 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12639 {
12640 tree arg00 = TREE_OPERAND (arg0, 0);
12641 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12642 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12643 }
12644
12645 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12646 when is C is a power of two, i.e. a single bit. */
12647 if (TREE_CODE (arg0) == BIT_AND_EXPR
12648 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12649 && integer_zerop (arg1)
12650 && integer_pow2p (TREE_OPERAND (arg0, 1))
12651 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12652 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12653 {
12654 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12655 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12656 arg000, TREE_OPERAND (arg0, 1));
12657 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12658 tem, build_int_cst (TREE_TYPE (tem), 0));
12659 }
12660
12661 if (integer_zerop (arg1)
12662 && tree_expr_nonzero_p (arg0))
12663 {
12664 tree res = constant_boolean_node (code==NE_EXPR, type);
12665 return omit_one_operand_loc (loc, type, res, arg0);
12666 }
12667
12668 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12669 if (TREE_CODE (arg0) == NEGATE_EXPR
12670 && TREE_CODE (arg1) == NEGATE_EXPR)
12671 return fold_build2_loc (loc, code, type,
12672 TREE_OPERAND (arg0, 0),
12673 fold_convert_loc (loc, TREE_TYPE (arg0),
12674 TREE_OPERAND (arg1, 0)));
12675
12676 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12677 if (TREE_CODE (arg0) == BIT_AND_EXPR
12678 && TREE_CODE (arg1) == BIT_AND_EXPR)
12679 {
12680 tree arg00 = TREE_OPERAND (arg0, 0);
12681 tree arg01 = TREE_OPERAND (arg0, 1);
12682 tree arg10 = TREE_OPERAND (arg1, 0);
12683 tree arg11 = TREE_OPERAND (arg1, 1);
12684 tree itype = TREE_TYPE (arg0);
12685
12686 if (operand_equal_p (arg01, arg11, 0))
12687 return fold_build2_loc (loc, code, type,
12688 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12689 fold_build2_loc (loc,
12690 BIT_XOR_EXPR, itype,
12691 arg00, arg10),
12692 arg01),
12693 build_int_cst (itype, 0));
12694
12695 if (operand_equal_p (arg01, arg10, 0))
12696 return fold_build2_loc (loc, code, type,
12697 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12698 fold_build2_loc (loc,
12699 BIT_XOR_EXPR, itype,
12700 arg00, arg11),
12701 arg01),
12702 build_int_cst (itype, 0));
12703
12704 if (operand_equal_p (arg00, arg11, 0))
12705 return fold_build2_loc (loc, code, type,
12706 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12707 fold_build2_loc (loc,
12708 BIT_XOR_EXPR, itype,
12709 arg01, arg10),
12710 arg00),
12711 build_int_cst (itype, 0));
12712
12713 if (operand_equal_p (arg00, arg10, 0))
12714 return fold_build2_loc (loc, code, type,
12715 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12716 fold_build2_loc (loc,
12717 BIT_XOR_EXPR, itype,
12718 arg01, arg11),
12719 arg00),
12720 build_int_cst (itype, 0));
12721 }
12722
12723 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12724 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12725 {
12726 tree arg00 = TREE_OPERAND (arg0, 0);
12727 tree arg01 = TREE_OPERAND (arg0, 1);
12728 tree arg10 = TREE_OPERAND (arg1, 0);
12729 tree arg11 = TREE_OPERAND (arg1, 1);
12730 tree itype = TREE_TYPE (arg0);
12731
12732 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12733 operand_equal_p guarantees no side-effects so we don't need
12734 to use omit_one_operand on Z. */
12735 if (operand_equal_p (arg01, arg11, 0))
12736 return fold_build2_loc (loc, code, type, arg00,
12737 fold_convert_loc (loc, TREE_TYPE (arg00),
12738 arg10));
12739 if (operand_equal_p (arg01, arg10, 0))
12740 return fold_build2_loc (loc, code, type, arg00,
12741 fold_convert_loc (loc, TREE_TYPE (arg00),
12742 arg11));
12743 if (operand_equal_p (arg00, arg11, 0))
12744 return fold_build2_loc (loc, code, type, arg01,
12745 fold_convert_loc (loc, TREE_TYPE (arg01),
12746 arg10));
12747 if (operand_equal_p (arg00, arg10, 0))
12748 return fold_build2_loc (loc, code, type, arg01,
12749 fold_convert_loc (loc, TREE_TYPE (arg01),
12750 arg11));
12751
12752 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12753 if (TREE_CODE (arg01) == INTEGER_CST
12754 && TREE_CODE (arg11) == INTEGER_CST)
12755 {
12756 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12757 fold_convert_loc (loc, itype, arg11));
12758 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12759 return fold_build2_loc (loc, code, type, tem,
12760 fold_convert_loc (loc, itype, arg10));
12761 }
12762 }
12763
12764 /* Attempt to simplify equality/inequality comparisons of complex
12765 values. Only lower the comparison if the result is known or
12766 can be simplified to a single scalar comparison. */
12767 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12768 || TREE_CODE (arg0) == COMPLEX_CST)
12769 && (TREE_CODE (arg1) == COMPLEX_EXPR
12770 || TREE_CODE (arg1) == COMPLEX_CST))
12771 {
12772 tree real0, imag0, real1, imag1;
12773 tree rcond, icond;
12774
12775 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12776 {
12777 real0 = TREE_OPERAND (arg0, 0);
12778 imag0 = TREE_OPERAND (arg0, 1);
12779 }
12780 else
12781 {
12782 real0 = TREE_REALPART (arg0);
12783 imag0 = TREE_IMAGPART (arg0);
12784 }
12785
12786 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12787 {
12788 real1 = TREE_OPERAND (arg1, 0);
12789 imag1 = TREE_OPERAND (arg1, 1);
12790 }
12791 else
12792 {
12793 real1 = TREE_REALPART (arg1);
12794 imag1 = TREE_IMAGPART (arg1);
12795 }
12796
12797 rcond = fold_binary_loc (loc, code, type, real0, real1);
12798 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12799 {
12800 if (integer_zerop (rcond))
12801 {
12802 if (code == EQ_EXPR)
12803 return omit_two_operands_loc (loc, type, boolean_false_node,
12804 imag0, imag1);
12805 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12806 }
12807 else
12808 {
12809 if (code == NE_EXPR)
12810 return omit_two_operands_loc (loc, type, boolean_true_node,
12811 imag0, imag1);
12812 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12813 }
12814 }
12815
12816 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12817 if (icond && TREE_CODE (icond) == INTEGER_CST)
12818 {
12819 if (integer_zerop (icond))
12820 {
12821 if (code == EQ_EXPR)
12822 return omit_two_operands_loc (loc, type, boolean_false_node,
12823 real0, real1);
12824 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12825 }
12826 else
12827 {
12828 if (code == NE_EXPR)
12829 return omit_two_operands_loc (loc, type, boolean_true_node,
12830 real0, real1);
12831 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12832 }
12833 }
12834 }
12835
12836 return NULL_TREE;
12837
12838 case LT_EXPR:
12839 case GT_EXPR:
12840 case LE_EXPR:
12841 case GE_EXPR:
12842 tem = fold_comparison (loc, code, type, op0, op1);
12843 if (tem != NULL_TREE)
12844 return tem;
12845
12846 /* Transform comparisons of the form X +- C CMP X. */
12847 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12848 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12849 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12850 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12851 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12852 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12853 {
12854 tree arg01 = TREE_OPERAND (arg0, 1);
12855 enum tree_code code0 = TREE_CODE (arg0);
12856 int is_positive;
12857
12858 if (TREE_CODE (arg01) == REAL_CST)
12859 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12860 else
12861 is_positive = tree_int_cst_sgn (arg01);
12862
12863 /* (X - c) > X becomes false. */
12864 if (code == GT_EXPR
12865 && ((code0 == MINUS_EXPR && is_positive >= 0)
12866 || (code0 == PLUS_EXPR && is_positive <= 0)))
12867 {
12868 if (TREE_CODE (arg01) == INTEGER_CST
12869 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12870 fold_overflow_warning (("assuming signed overflow does not "
12871 "occur when assuming that (X - c) > X "
12872 "is always false"),
12873 WARN_STRICT_OVERFLOW_ALL);
12874 return constant_boolean_node (0, type);
12875 }
12876
12877 /* Likewise (X + c) < X becomes false. */
12878 if (code == LT_EXPR
12879 && ((code0 == PLUS_EXPR && is_positive >= 0)
12880 || (code0 == MINUS_EXPR && is_positive <= 0)))
12881 {
12882 if (TREE_CODE (arg01) == INTEGER_CST
12883 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12884 fold_overflow_warning (("assuming signed overflow does not "
12885 "occur when assuming that "
12886 "(X + c) < X is always false"),
12887 WARN_STRICT_OVERFLOW_ALL);
12888 return constant_boolean_node (0, type);
12889 }
12890
12891 /* Convert (X - c) <= X to true. */
12892 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12893 && code == LE_EXPR
12894 && ((code0 == MINUS_EXPR && is_positive >= 0)
12895 || (code0 == PLUS_EXPR && is_positive <= 0)))
12896 {
12897 if (TREE_CODE (arg01) == INTEGER_CST
12898 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12899 fold_overflow_warning (("assuming signed overflow does not "
12900 "occur when assuming that "
12901 "(X - c) <= X is always true"),
12902 WARN_STRICT_OVERFLOW_ALL);
12903 return constant_boolean_node (1, type);
12904 }
12905
12906 /* Convert (X + c) >= X to true. */
12907 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12908 && code == GE_EXPR
12909 && ((code0 == PLUS_EXPR && is_positive >= 0)
12910 || (code0 == MINUS_EXPR && is_positive <= 0)))
12911 {
12912 if (TREE_CODE (arg01) == INTEGER_CST
12913 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12914 fold_overflow_warning (("assuming signed overflow does not "
12915 "occur when assuming that "
12916 "(X + c) >= X is always true"),
12917 WARN_STRICT_OVERFLOW_ALL);
12918 return constant_boolean_node (1, type);
12919 }
12920
12921 if (TREE_CODE (arg01) == INTEGER_CST)
12922 {
12923 /* Convert X + c > X and X - c < X to true for integers. */
12924 if (code == GT_EXPR
12925 && ((code0 == PLUS_EXPR && is_positive > 0)
12926 || (code0 == MINUS_EXPR && is_positive < 0)))
12927 {
12928 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12929 fold_overflow_warning (("assuming signed overflow does "
12930 "not occur when assuming that "
12931 "(X + c) > X is always true"),
12932 WARN_STRICT_OVERFLOW_ALL);
12933 return constant_boolean_node (1, type);
12934 }
12935
12936 if (code == LT_EXPR
12937 && ((code0 == MINUS_EXPR && is_positive > 0)
12938 || (code0 == PLUS_EXPR && is_positive < 0)))
12939 {
12940 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12941 fold_overflow_warning (("assuming signed overflow does "
12942 "not occur when assuming that "
12943 "(X - c) < X is always true"),
12944 WARN_STRICT_OVERFLOW_ALL);
12945 return constant_boolean_node (1, type);
12946 }
12947
12948 /* Convert X + c <= X and X - c >= X to false for integers. */
12949 if (code == LE_EXPR
12950 && ((code0 == PLUS_EXPR && is_positive > 0)
12951 || (code0 == MINUS_EXPR && is_positive < 0)))
12952 {
12953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12954 fold_overflow_warning (("assuming signed overflow does "
12955 "not occur when assuming that "
12956 "(X + c) <= X is always false"),
12957 WARN_STRICT_OVERFLOW_ALL);
12958 return constant_boolean_node (0, type);
12959 }
12960
12961 if (code == GE_EXPR
12962 && ((code0 == MINUS_EXPR && is_positive > 0)
12963 || (code0 == PLUS_EXPR && is_positive < 0)))
12964 {
12965 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12966 fold_overflow_warning (("assuming signed overflow does "
12967 "not occur when assuming that "
12968 "(X - c) >= X is always false"),
12969 WARN_STRICT_OVERFLOW_ALL);
12970 return constant_boolean_node (0, type);
12971 }
12972 }
12973 }
12974
12975 /* Comparisons with the highest or lowest possible integer of
12976 the specified precision will have known values. */
12977 {
12978 tree arg1_type = TREE_TYPE (arg1);
12979 unsigned int width = TYPE_PRECISION (arg1_type);
12980
12981 if (TREE_CODE (arg1) == INTEGER_CST
12982 && width <= 2 * HOST_BITS_PER_WIDE_INT
12983 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12984 {
12985 HOST_WIDE_INT signed_max_hi;
12986 unsigned HOST_WIDE_INT signed_max_lo;
12987 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12988
12989 if (width <= HOST_BITS_PER_WIDE_INT)
12990 {
12991 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12992 - 1;
12993 signed_max_hi = 0;
12994 max_hi = 0;
12995
12996 if (TYPE_UNSIGNED (arg1_type))
12997 {
12998 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12999 min_lo = 0;
13000 min_hi = 0;
13001 }
13002 else
13003 {
13004 max_lo = signed_max_lo;
13005 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13006 min_hi = -1;
13007 }
13008 }
13009 else
13010 {
13011 width -= HOST_BITS_PER_WIDE_INT;
13012 signed_max_lo = -1;
13013 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13014 - 1;
13015 max_lo = -1;
13016 min_lo = 0;
13017
13018 if (TYPE_UNSIGNED (arg1_type))
13019 {
13020 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13021 min_hi = 0;
13022 }
13023 else
13024 {
13025 max_hi = signed_max_hi;
13026 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13027 }
13028 }
13029
13030 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13031 && TREE_INT_CST_LOW (arg1) == max_lo)
13032 switch (code)
13033 {
13034 case GT_EXPR:
13035 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13036
13037 case GE_EXPR:
13038 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13039
13040 case LE_EXPR:
13041 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13042
13043 case LT_EXPR:
13044 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13045
13046 /* The GE_EXPR and LT_EXPR cases above are not normally
13047 reached because of previous transformations. */
13048
13049 default:
13050 break;
13051 }
13052 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13053 == max_hi
13054 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13055 switch (code)
13056 {
13057 case GT_EXPR:
13058 arg1 = const_binop (PLUS_EXPR, arg1,
13059 build_int_cst (TREE_TYPE (arg1), 1));
13060 return fold_build2_loc (loc, EQ_EXPR, type,
13061 fold_convert_loc (loc,
13062 TREE_TYPE (arg1), arg0),
13063 arg1);
13064 case LE_EXPR:
13065 arg1 = const_binop (PLUS_EXPR, arg1,
13066 build_int_cst (TREE_TYPE (arg1), 1));
13067 return fold_build2_loc (loc, NE_EXPR, type,
13068 fold_convert_loc (loc, TREE_TYPE (arg1),
13069 arg0),
13070 arg1);
13071 default:
13072 break;
13073 }
13074 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13075 == min_hi
13076 && TREE_INT_CST_LOW (arg1) == min_lo)
13077 switch (code)
13078 {
13079 case LT_EXPR:
13080 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13081
13082 case LE_EXPR:
13083 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13084
13085 case GE_EXPR:
13086 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13087
13088 case GT_EXPR:
13089 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13090
13091 default:
13092 break;
13093 }
13094 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13095 == min_hi
13096 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13097 switch (code)
13098 {
13099 case GE_EXPR:
13100 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13101 return fold_build2_loc (loc, NE_EXPR, type,
13102 fold_convert_loc (loc,
13103 TREE_TYPE (arg1), arg0),
13104 arg1);
13105 case LT_EXPR:
13106 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13107 return fold_build2_loc (loc, EQ_EXPR, type,
13108 fold_convert_loc (loc, TREE_TYPE (arg1),
13109 arg0),
13110 arg1);
13111 default:
13112 break;
13113 }
13114
13115 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13116 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13117 && TYPE_UNSIGNED (arg1_type)
13118 /* We will flip the signedness of the comparison operator
13119 associated with the mode of arg1, so the sign bit is
13120 specified by this mode. Check that arg1 is the signed
13121 max associated with this sign bit. */
13122 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13123 /* signed_type does not work on pointer types. */
13124 && INTEGRAL_TYPE_P (arg1_type))
13125 {
13126 /* The following case also applies to X < signed_max+1
13127 and X >= signed_max+1 because previous transformations. */
13128 if (code == LE_EXPR || code == GT_EXPR)
13129 {
13130 tree st;
13131 st = signed_type_for (TREE_TYPE (arg1));
13132 return fold_build2_loc (loc,
13133 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13134 type, fold_convert_loc (loc, st, arg0),
13135 build_int_cst (st, 0));
13136 }
13137 }
13138 }
13139 }
13140
13141 /* If we are comparing an ABS_EXPR with a constant, we can
13142 convert all the cases into explicit comparisons, but they may
13143 well not be faster than doing the ABS and one comparison.
13144 But ABS (X) <= C is a range comparison, which becomes a subtraction
13145 and a comparison, and is probably faster. */
13146 if (code == LE_EXPR
13147 && TREE_CODE (arg1) == INTEGER_CST
13148 && TREE_CODE (arg0) == ABS_EXPR
13149 && ! TREE_SIDE_EFFECTS (arg0)
13150 && (0 != (tem = negate_expr (arg1)))
13151 && TREE_CODE (tem) == INTEGER_CST
13152 && !TREE_OVERFLOW (tem))
13153 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13154 build2 (GE_EXPR, type,
13155 TREE_OPERAND (arg0, 0), tem),
13156 build2 (LE_EXPR, type,
13157 TREE_OPERAND (arg0, 0), arg1));
13158
13159 /* Convert ABS_EXPR<x> >= 0 to true. */
13160 strict_overflow_p = false;
13161 if (code == GE_EXPR
13162 && (integer_zerop (arg1)
13163 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13164 && real_zerop (arg1)))
13165 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13166 {
13167 if (strict_overflow_p)
13168 fold_overflow_warning (("assuming signed overflow does not occur "
13169 "when simplifying comparison of "
13170 "absolute value and zero"),
13171 WARN_STRICT_OVERFLOW_CONDITIONAL);
13172 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13173 }
13174
13175 /* Convert ABS_EXPR<x> < 0 to false. */
13176 strict_overflow_p = false;
13177 if (code == LT_EXPR
13178 && (integer_zerop (arg1) || real_zerop (arg1))
13179 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13180 {
13181 if (strict_overflow_p)
13182 fold_overflow_warning (("assuming signed overflow does not occur "
13183 "when simplifying comparison of "
13184 "absolute value and zero"),
13185 WARN_STRICT_OVERFLOW_CONDITIONAL);
13186 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13187 }
13188
13189 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13190 and similarly for >= into !=. */
13191 if ((code == LT_EXPR || code == GE_EXPR)
13192 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13193 && TREE_CODE (arg1) == LSHIFT_EXPR
13194 && integer_onep (TREE_OPERAND (arg1, 0)))
13195 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13196 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13197 TREE_OPERAND (arg1, 1)),
13198 build_int_cst (TREE_TYPE (arg0), 0));
13199
13200 if ((code == LT_EXPR || code == GE_EXPR)
13201 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13202 && CONVERT_EXPR_P (arg1)
13203 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13204 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13205 {
13206 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13207 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13208 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13209 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13210 build_int_cst (TREE_TYPE (arg0), 0));
13211 }
13212
13213 return NULL_TREE;
13214
13215 case UNORDERED_EXPR:
13216 case ORDERED_EXPR:
13217 case UNLT_EXPR:
13218 case UNLE_EXPR:
13219 case UNGT_EXPR:
13220 case UNGE_EXPR:
13221 case UNEQ_EXPR:
13222 case LTGT_EXPR:
13223 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13224 {
13225 t1 = fold_relational_const (code, type, arg0, arg1);
13226 if (t1 != NULL_TREE)
13227 return t1;
13228 }
13229
13230 /* If the first operand is NaN, the result is constant. */
13231 if (TREE_CODE (arg0) == REAL_CST
13232 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13233 && (code != LTGT_EXPR || ! flag_trapping_math))
13234 {
13235 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13236 ? integer_zero_node
13237 : integer_one_node;
13238 return omit_one_operand_loc (loc, type, t1, arg1);
13239 }
13240
13241 /* If the second operand is NaN, the result is constant. */
13242 if (TREE_CODE (arg1) == REAL_CST
13243 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13244 && (code != LTGT_EXPR || ! flag_trapping_math))
13245 {
13246 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13247 ? integer_zero_node
13248 : integer_one_node;
13249 return omit_one_operand_loc (loc, type, t1, arg0);
13250 }
13251
13252 /* Simplify unordered comparison of something with itself. */
13253 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13254 && operand_equal_p (arg0, arg1, 0))
13255 return constant_boolean_node (1, type);
13256
13257 if (code == LTGT_EXPR
13258 && !flag_trapping_math
13259 && operand_equal_p (arg0, arg1, 0))
13260 return constant_boolean_node (0, type);
13261
13262 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13263 {
13264 tree targ0 = strip_float_extensions (arg0);
13265 tree targ1 = strip_float_extensions (arg1);
13266 tree newtype = TREE_TYPE (targ0);
13267
13268 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13269 newtype = TREE_TYPE (targ1);
13270
13271 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13272 return fold_build2_loc (loc, code, type,
13273 fold_convert_loc (loc, newtype, targ0),
13274 fold_convert_loc (loc, newtype, targ1));
13275 }
13276
13277 return NULL_TREE;
13278
13279 case COMPOUND_EXPR:
13280 /* When pedantic, a compound expression can be neither an lvalue
13281 nor an integer constant expression. */
13282 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13283 return NULL_TREE;
13284 /* Don't let (0, 0) be null pointer constant. */
13285 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13286 : fold_convert_loc (loc, type, arg1);
13287 return pedantic_non_lvalue_loc (loc, tem);
13288
13289 case COMPLEX_EXPR:
13290 if ((TREE_CODE (arg0) == REAL_CST
13291 && TREE_CODE (arg1) == REAL_CST)
13292 || (TREE_CODE (arg0) == INTEGER_CST
13293 && TREE_CODE (arg1) == INTEGER_CST))
13294 return build_complex (type, arg0, arg1);
13295 if (TREE_CODE (arg0) == REALPART_EXPR
13296 && TREE_CODE (arg1) == IMAGPART_EXPR
13297 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13298 && operand_equal_p (TREE_OPERAND (arg0, 0),
13299 TREE_OPERAND (arg1, 0), 0))
13300 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13301 TREE_OPERAND (arg1, 0));
13302 return NULL_TREE;
13303
13304 case ASSERT_EXPR:
13305 /* An ASSERT_EXPR should never be passed to fold_binary. */
13306 gcc_unreachable ();
13307
13308 default:
13309 return NULL_TREE;
13310 } /* switch (code) */
13311 }
13312
13313 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13314 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13315 of GOTO_EXPR. */
13316
13317 static tree
13318 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13319 {
13320 switch (TREE_CODE (*tp))
13321 {
13322 case LABEL_EXPR:
13323 return *tp;
13324
13325 case GOTO_EXPR:
13326 *walk_subtrees = 0;
13327
13328 /* ... fall through ... */
13329
13330 default:
13331 return NULL_TREE;
13332 }
13333 }
13334
13335 /* Return whether the sub-tree ST contains a label which is accessible from
13336 outside the sub-tree. */
13337
13338 static bool
13339 contains_label_p (tree st)
13340 {
13341 return
13342 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13343 }
13344
13345 /* Fold a ternary expression of code CODE and type TYPE with operands
13346 OP0, OP1, and OP2. Return the folded expression if folding is
13347 successful. Otherwise, return NULL_TREE. */
13348
13349 tree
13350 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13351 tree op0, tree op1, tree op2)
13352 {
13353 tree tem;
13354 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13355 enum tree_code_class kind = TREE_CODE_CLASS (code);
13356
13357 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13358 && TREE_CODE_LENGTH (code) == 3);
13359
13360 /* Strip any conversions that don't change the mode. This is safe
13361 for every expression, except for a comparison expression because
13362 its signedness is derived from its operands. So, in the latter
13363 case, only strip conversions that don't change the signedness.
13364
13365 Note that this is done as an internal manipulation within the
13366 constant folder, in order to find the simplest representation of
13367 the arguments so that their form can be studied. In any cases,
13368 the appropriate type conversions should be put back in the tree
13369 that will get out of the constant folder. */
13370 if (op0)
13371 {
13372 arg0 = op0;
13373 STRIP_NOPS (arg0);
13374 }
13375
13376 if (op1)
13377 {
13378 arg1 = op1;
13379 STRIP_NOPS (arg1);
13380 }
13381
13382 if (op2)
13383 {
13384 arg2 = op2;
13385 STRIP_NOPS (arg2);
13386 }
13387
13388 switch (code)
13389 {
13390 case COMPONENT_REF:
13391 if (TREE_CODE (arg0) == CONSTRUCTOR
13392 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13393 {
13394 unsigned HOST_WIDE_INT idx;
13395 tree field, value;
13396 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13397 if (field == arg1)
13398 return value;
13399 }
13400 return NULL_TREE;
13401
13402 case COND_EXPR:
13403 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13404 so all simple results must be passed through pedantic_non_lvalue. */
13405 if (TREE_CODE (arg0) == INTEGER_CST)
13406 {
13407 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13408 tem = integer_zerop (arg0) ? op2 : op1;
13409 /* Only optimize constant conditions when the selected branch
13410 has the same type as the COND_EXPR. This avoids optimizing
13411 away "c ? x : throw", where the throw has a void type.
13412 Avoid throwing away that operand which contains label. */
13413 if ((!TREE_SIDE_EFFECTS (unused_op)
13414 || !contains_label_p (unused_op))
13415 && (! VOID_TYPE_P (TREE_TYPE (tem))
13416 || VOID_TYPE_P (type)))
13417 return pedantic_non_lvalue_loc (loc, tem);
13418 return NULL_TREE;
13419 }
13420 if (operand_equal_p (arg1, op2, 0))
13421 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13422
13423 /* If we have A op B ? A : C, we may be able to convert this to a
13424 simpler expression, depending on the operation and the values
13425 of B and C. Signed zeros prevent all of these transformations,
13426 for reasons given above each one.
13427
13428 Also try swapping the arguments and inverting the conditional. */
13429 if (COMPARISON_CLASS_P (arg0)
13430 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13431 arg1, TREE_OPERAND (arg0, 1))
13432 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13433 {
13434 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13435 if (tem)
13436 return tem;
13437 }
13438
13439 if (COMPARISON_CLASS_P (arg0)
13440 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13441 op2,
13442 TREE_OPERAND (arg0, 1))
13443 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13444 {
13445 location_t loc0 = expr_location_or (arg0, loc);
13446 tem = fold_truth_not_expr (loc0, arg0);
13447 if (tem && COMPARISON_CLASS_P (tem))
13448 {
13449 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13450 if (tem)
13451 return tem;
13452 }
13453 }
13454
13455 /* If the second operand is simpler than the third, swap them
13456 since that produces better jump optimization results. */
13457 if (truth_value_p (TREE_CODE (arg0))
13458 && tree_swap_operands_p (op1, op2, false))
13459 {
13460 location_t loc0 = expr_location_or (arg0, loc);
13461 /* See if this can be inverted. If it can't, possibly because
13462 it was a floating-point inequality comparison, don't do
13463 anything. */
13464 tem = fold_truth_not_expr (loc0, arg0);
13465 if (tem)
13466 return fold_build3_loc (loc, code, type, tem, op2, op1);
13467 }
13468
13469 /* Convert A ? 1 : 0 to simply A. */
13470 if (integer_onep (op1)
13471 && integer_zerop (op2)
13472 /* If we try to convert OP0 to our type, the
13473 call to fold will try to move the conversion inside
13474 a COND, which will recurse. In that case, the COND_EXPR
13475 is probably the best choice, so leave it alone. */
13476 && type == TREE_TYPE (arg0))
13477 return pedantic_non_lvalue_loc (loc, arg0);
13478
13479 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13480 over COND_EXPR in cases such as floating point comparisons. */
13481 if (integer_zerop (op1)
13482 && integer_onep (op2)
13483 && truth_value_p (TREE_CODE (arg0)))
13484 return pedantic_non_lvalue_loc (loc,
13485 fold_convert_loc (loc, type,
13486 invert_truthvalue_loc (loc,
13487 arg0)));
13488
13489 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13490 if (TREE_CODE (arg0) == LT_EXPR
13491 && integer_zerop (TREE_OPERAND (arg0, 1))
13492 && integer_zerop (op2)
13493 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13494 {
13495 /* sign_bit_p only checks ARG1 bits within A's precision.
13496 If <sign bit of A> has wider type than A, bits outside
13497 of A's precision in <sign bit of A> need to be checked.
13498 If they are all 0, this optimization needs to be done
13499 in unsigned A's type, if they are all 1 in signed A's type,
13500 otherwise this can't be done. */
13501 if (TYPE_PRECISION (TREE_TYPE (tem))
13502 < TYPE_PRECISION (TREE_TYPE (arg1))
13503 && TYPE_PRECISION (TREE_TYPE (tem))
13504 < TYPE_PRECISION (type))
13505 {
13506 unsigned HOST_WIDE_INT mask_lo;
13507 HOST_WIDE_INT mask_hi;
13508 int inner_width, outer_width;
13509 tree tem_type;
13510
13511 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13512 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13513 if (outer_width > TYPE_PRECISION (type))
13514 outer_width = TYPE_PRECISION (type);
13515
13516 if (outer_width > HOST_BITS_PER_WIDE_INT)
13517 {
13518 mask_hi = ((unsigned HOST_WIDE_INT) -1
13519 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13520 mask_lo = -1;
13521 }
13522 else
13523 {
13524 mask_hi = 0;
13525 mask_lo = ((unsigned HOST_WIDE_INT) -1
13526 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13527 }
13528 if (inner_width > HOST_BITS_PER_WIDE_INT)
13529 {
13530 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13531 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13532 mask_lo = 0;
13533 }
13534 else
13535 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13536 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13537
13538 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13539 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13540 {
13541 tem_type = signed_type_for (TREE_TYPE (tem));
13542 tem = fold_convert_loc (loc, tem_type, tem);
13543 }
13544 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13545 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13546 {
13547 tem_type = unsigned_type_for (TREE_TYPE (tem));
13548 tem = fold_convert_loc (loc, tem_type, tem);
13549 }
13550 else
13551 tem = NULL;
13552 }
13553
13554 if (tem)
13555 return
13556 fold_convert_loc (loc, type,
13557 fold_build2_loc (loc, BIT_AND_EXPR,
13558 TREE_TYPE (tem), tem,
13559 fold_convert_loc (loc,
13560 TREE_TYPE (tem),
13561 arg1)));
13562 }
13563
13564 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13565 already handled above. */
13566 if (TREE_CODE (arg0) == BIT_AND_EXPR
13567 && integer_onep (TREE_OPERAND (arg0, 1))
13568 && integer_zerop (op2)
13569 && integer_pow2p (arg1))
13570 {
13571 tree tem = TREE_OPERAND (arg0, 0);
13572 STRIP_NOPS (tem);
13573 if (TREE_CODE (tem) == RSHIFT_EXPR
13574 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13575 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13576 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13577 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13578 TREE_OPERAND (tem, 0), arg1);
13579 }
13580
13581 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13582 is probably obsolete because the first operand should be a
13583 truth value (that's why we have the two cases above), but let's
13584 leave it in until we can confirm this for all front-ends. */
13585 if (integer_zerop (op2)
13586 && TREE_CODE (arg0) == NE_EXPR
13587 && integer_zerop (TREE_OPERAND (arg0, 1))
13588 && integer_pow2p (arg1)
13589 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13590 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13591 arg1, OEP_ONLY_CONST))
13592 return pedantic_non_lvalue_loc (loc,
13593 fold_convert_loc (loc, type,
13594 TREE_OPERAND (arg0, 0)));
13595
13596 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13597 if (integer_zerop (op2)
13598 && truth_value_p (TREE_CODE (arg0))
13599 && truth_value_p (TREE_CODE (arg1)))
13600 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13601 fold_convert_loc (loc, type, arg0),
13602 arg1);
13603
13604 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13605 if (integer_onep (op2)
13606 && truth_value_p (TREE_CODE (arg0))
13607 && truth_value_p (TREE_CODE (arg1)))
13608 {
13609 location_t loc0 = expr_location_or (arg0, loc);
13610 /* Only perform transformation if ARG0 is easily inverted. */
13611 tem = fold_truth_not_expr (loc0, arg0);
13612 if (tem)
13613 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13614 fold_convert_loc (loc, type, tem),
13615 arg1);
13616 }
13617
13618 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13619 if (integer_zerop (arg1)
13620 && truth_value_p (TREE_CODE (arg0))
13621 && truth_value_p (TREE_CODE (op2)))
13622 {
13623 location_t loc0 = expr_location_or (arg0, loc);
13624 /* Only perform transformation if ARG0 is easily inverted. */
13625 tem = fold_truth_not_expr (loc0, arg0);
13626 if (tem)
13627 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13628 fold_convert_loc (loc, type, tem),
13629 op2);
13630 }
13631
13632 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13633 if (integer_onep (arg1)
13634 && truth_value_p (TREE_CODE (arg0))
13635 && truth_value_p (TREE_CODE (op2)))
13636 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13637 fold_convert_loc (loc, type, arg0),
13638 op2);
13639
13640 return NULL_TREE;
13641
13642 case CALL_EXPR:
13643 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13644 of fold_ternary on them. */
13645 gcc_unreachable ();
13646
13647 case BIT_FIELD_REF:
13648 if ((TREE_CODE (arg0) == VECTOR_CST
13649 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13650 && type == TREE_TYPE (TREE_TYPE (arg0)))
13651 {
13652 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13653 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13654
13655 if (width != 0
13656 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13657 && (idx % width) == 0
13658 && (idx = idx / width)
13659 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13660 {
13661 tree elements = NULL_TREE;
13662
13663 if (TREE_CODE (arg0) == VECTOR_CST)
13664 elements = TREE_VECTOR_CST_ELTS (arg0);
13665 else
13666 {
13667 unsigned HOST_WIDE_INT idx;
13668 tree value;
13669
13670 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13671 elements = tree_cons (NULL_TREE, value, elements);
13672 }
13673 while (idx-- > 0 && elements)
13674 elements = TREE_CHAIN (elements);
13675 if (elements)
13676 return TREE_VALUE (elements);
13677 else
13678 return build_zero_cst (type);
13679 }
13680 }
13681
13682 /* A bit-field-ref that referenced the full argument can be stripped. */
13683 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13684 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13685 && integer_zerop (op2))
13686 return fold_convert_loc (loc, type, arg0);
13687
13688 return NULL_TREE;
13689
13690 case FMA_EXPR:
13691 /* For integers we can decompose the FMA if possible. */
13692 if (TREE_CODE (arg0) == INTEGER_CST
13693 && TREE_CODE (arg1) == INTEGER_CST)
13694 return fold_build2_loc (loc, PLUS_EXPR, type,
13695 const_binop (MULT_EXPR, arg0, arg1), arg2);
13696 if (integer_zerop (arg2))
13697 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13698
13699 return fold_fma (loc, type, arg0, arg1, arg2);
13700
13701 default:
13702 return NULL_TREE;
13703 } /* switch (code) */
13704 }
13705
13706 /* Perform constant folding and related simplification of EXPR.
13707 The related simplifications include x*1 => x, x*0 => 0, etc.,
13708 and application of the associative law.
13709 NOP_EXPR conversions may be removed freely (as long as we
13710 are careful not to change the type of the overall expression).
13711 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13712 but we can constant-fold them if they have constant operands. */
13713
13714 #ifdef ENABLE_FOLD_CHECKING
13715 # define fold(x) fold_1 (x)
13716 static tree fold_1 (tree);
13717 static
13718 #endif
13719 tree
13720 fold (tree expr)
13721 {
13722 const tree t = expr;
13723 enum tree_code code = TREE_CODE (t);
13724 enum tree_code_class kind = TREE_CODE_CLASS (code);
13725 tree tem;
13726 location_t loc = EXPR_LOCATION (expr);
13727
13728 /* Return right away if a constant. */
13729 if (kind == tcc_constant)
13730 return t;
13731
13732 /* CALL_EXPR-like objects with variable numbers of operands are
13733 treated specially. */
13734 if (kind == tcc_vl_exp)
13735 {
13736 if (code == CALL_EXPR)
13737 {
13738 tem = fold_call_expr (loc, expr, false);
13739 return tem ? tem : expr;
13740 }
13741 return expr;
13742 }
13743
13744 if (IS_EXPR_CODE_CLASS (kind))
13745 {
13746 tree type = TREE_TYPE (t);
13747 tree op0, op1, op2;
13748
13749 switch (TREE_CODE_LENGTH (code))
13750 {
13751 case 1:
13752 op0 = TREE_OPERAND (t, 0);
13753 tem = fold_unary_loc (loc, code, type, op0);
13754 return tem ? tem : expr;
13755 case 2:
13756 op0 = TREE_OPERAND (t, 0);
13757 op1 = TREE_OPERAND (t, 1);
13758 tem = fold_binary_loc (loc, code, type, op0, op1);
13759 return tem ? tem : expr;
13760 case 3:
13761 op0 = TREE_OPERAND (t, 0);
13762 op1 = TREE_OPERAND (t, 1);
13763 op2 = TREE_OPERAND (t, 2);
13764 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13765 return tem ? tem : expr;
13766 default:
13767 break;
13768 }
13769 }
13770
13771 switch (code)
13772 {
13773 case ARRAY_REF:
13774 {
13775 tree op0 = TREE_OPERAND (t, 0);
13776 tree op1 = TREE_OPERAND (t, 1);
13777
13778 if (TREE_CODE (op1) == INTEGER_CST
13779 && TREE_CODE (op0) == CONSTRUCTOR
13780 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13781 {
13782 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13783 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13784 unsigned HOST_WIDE_INT begin = 0;
13785
13786 /* Find a matching index by means of a binary search. */
13787 while (begin != end)
13788 {
13789 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13790 tree index = VEC_index (constructor_elt, elts, middle)->index;
13791
13792 if (TREE_CODE (index) == INTEGER_CST
13793 && tree_int_cst_lt (index, op1))
13794 begin = middle + 1;
13795 else if (TREE_CODE (index) == INTEGER_CST
13796 && tree_int_cst_lt (op1, index))
13797 end = middle;
13798 else if (TREE_CODE (index) == RANGE_EXPR
13799 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13800 begin = middle + 1;
13801 else if (TREE_CODE (index) == RANGE_EXPR
13802 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13803 end = middle;
13804 else
13805 return VEC_index (constructor_elt, elts, middle)->value;
13806 }
13807 }
13808
13809 return t;
13810 }
13811
13812 case CONST_DECL:
13813 return fold (DECL_INITIAL (t));
13814
13815 default:
13816 return t;
13817 } /* switch (code) */
13818 }
13819
13820 #ifdef ENABLE_FOLD_CHECKING
13821 #undef fold
13822
13823 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13824 static void fold_check_failed (const_tree, const_tree);
13825 void print_fold_checksum (const_tree);
13826
13827 /* When --enable-checking=fold, compute a digest of expr before
13828 and after actual fold call to see if fold did not accidentally
13829 change original expr. */
13830
13831 tree
13832 fold (tree expr)
13833 {
13834 tree ret;
13835 struct md5_ctx ctx;
13836 unsigned char checksum_before[16], checksum_after[16];
13837 htab_t ht;
13838
13839 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13840 md5_init_ctx (&ctx);
13841 fold_checksum_tree (expr, &ctx, ht);
13842 md5_finish_ctx (&ctx, checksum_before);
13843 htab_empty (ht);
13844
13845 ret = fold_1 (expr);
13846
13847 md5_init_ctx (&ctx);
13848 fold_checksum_tree (expr, &ctx, ht);
13849 md5_finish_ctx (&ctx, checksum_after);
13850 htab_delete (ht);
13851
13852 if (memcmp (checksum_before, checksum_after, 16))
13853 fold_check_failed (expr, ret);
13854
13855 return ret;
13856 }
13857
13858 void
13859 print_fold_checksum (const_tree expr)
13860 {
13861 struct md5_ctx ctx;
13862 unsigned char checksum[16], cnt;
13863 htab_t ht;
13864
13865 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13866 md5_init_ctx (&ctx);
13867 fold_checksum_tree (expr, &ctx, ht);
13868 md5_finish_ctx (&ctx, checksum);
13869 htab_delete (ht);
13870 for (cnt = 0; cnt < 16; ++cnt)
13871 fprintf (stderr, "%02x", checksum[cnt]);
13872 putc ('\n', stderr);
13873 }
13874
13875 static void
13876 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13877 {
13878 internal_error ("fold check: original tree changed by fold");
13879 }
13880
13881 static void
13882 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13883 {
13884 void **slot;
13885 enum tree_code code;
13886 union tree_node buf;
13887 int i, len;
13888
13889 recursive_label:
13890
13891 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13892 <= sizeof (struct tree_function_decl))
13893 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13894 if (expr == NULL)
13895 return;
13896 slot = (void **) htab_find_slot (ht, expr, INSERT);
13897 if (*slot != NULL)
13898 return;
13899 *slot = CONST_CAST_TREE (expr);
13900 code = TREE_CODE (expr);
13901 if (TREE_CODE_CLASS (code) == tcc_declaration
13902 && DECL_ASSEMBLER_NAME_SET_P (expr))
13903 {
13904 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13905 memcpy ((char *) &buf, expr, tree_size (expr));
13906 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13907 expr = (tree) &buf;
13908 }
13909 else if (TREE_CODE_CLASS (code) == tcc_type
13910 && (TYPE_POINTER_TO (expr)
13911 || TYPE_REFERENCE_TO (expr)
13912 || TYPE_CACHED_VALUES_P (expr)
13913 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13914 || TYPE_NEXT_VARIANT (expr)))
13915 {
13916 /* Allow these fields to be modified. */
13917 tree tmp;
13918 memcpy ((char *) &buf, expr, tree_size (expr));
13919 expr = tmp = (tree) &buf;
13920 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13921 TYPE_POINTER_TO (tmp) = NULL;
13922 TYPE_REFERENCE_TO (tmp) = NULL;
13923 TYPE_NEXT_VARIANT (tmp) = NULL;
13924 if (TYPE_CACHED_VALUES_P (tmp))
13925 {
13926 TYPE_CACHED_VALUES_P (tmp) = 0;
13927 TYPE_CACHED_VALUES (tmp) = NULL;
13928 }
13929 }
13930 md5_process_bytes (expr, tree_size (expr), ctx);
13931 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13932 if (TREE_CODE_CLASS (code) != tcc_type
13933 && TREE_CODE_CLASS (code) != tcc_declaration
13934 && code != TREE_LIST
13935 && code != SSA_NAME
13936 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13937 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13938 switch (TREE_CODE_CLASS (code))
13939 {
13940 case tcc_constant:
13941 switch (code)
13942 {
13943 case STRING_CST:
13944 md5_process_bytes (TREE_STRING_POINTER (expr),
13945 TREE_STRING_LENGTH (expr), ctx);
13946 break;
13947 case COMPLEX_CST:
13948 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13949 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13950 break;
13951 case VECTOR_CST:
13952 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13953 break;
13954 default:
13955 break;
13956 }
13957 break;
13958 case tcc_exceptional:
13959 switch (code)
13960 {
13961 case TREE_LIST:
13962 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13963 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13964 expr = TREE_CHAIN (expr);
13965 goto recursive_label;
13966 break;
13967 case TREE_VEC:
13968 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13969 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13970 break;
13971 default:
13972 break;
13973 }
13974 break;
13975 case tcc_expression:
13976 case tcc_reference:
13977 case tcc_comparison:
13978 case tcc_unary:
13979 case tcc_binary:
13980 case tcc_statement:
13981 case tcc_vl_exp:
13982 len = TREE_OPERAND_LENGTH (expr);
13983 for (i = 0; i < len; ++i)
13984 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13985 break;
13986 case tcc_declaration:
13987 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13988 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13989 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13990 {
13991 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13992 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13993 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13994 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13995 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13996 }
13997 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13998 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13999
14000 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14001 {
14002 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14003 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14004 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14005 }
14006 break;
14007 case tcc_type:
14008 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14009 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14010 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14011 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14012 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14013 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14014 if (INTEGRAL_TYPE_P (expr)
14015 || SCALAR_FLOAT_TYPE_P (expr))
14016 {
14017 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14018 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14019 }
14020 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14021 if (TREE_CODE (expr) == RECORD_TYPE
14022 || TREE_CODE (expr) == UNION_TYPE
14023 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14024 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14025 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14026 break;
14027 default:
14028 break;
14029 }
14030 }
14031
14032 /* Helper function for outputting the checksum of a tree T. When
14033 debugging with gdb, you can "define mynext" to be "next" followed
14034 by "call debug_fold_checksum (op0)", then just trace down till the
14035 outputs differ. */
14036
14037 DEBUG_FUNCTION void
14038 debug_fold_checksum (const_tree t)
14039 {
14040 int i;
14041 unsigned char checksum[16];
14042 struct md5_ctx ctx;
14043 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14044
14045 md5_init_ctx (&ctx);
14046 fold_checksum_tree (t, &ctx, ht);
14047 md5_finish_ctx (&ctx, checksum);
14048 htab_empty (ht);
14049
14050 for (i = 0; i < 16; i++)
14051 fprintf (stderr, "%d ", checksum[i]);
14052
14053 fprintf (stderr, "\n");
14054 }
14055
14056 #endif
14057
14058 /* Fold a unary tree expression with code CODE of type TYPE with an
14059 operand OP0. LOC is the location of the resulting expression.
14060 Return a folded expression if successful. Otherwise, return a tree
14061 expression with code CODE of type TYPE with an operand OP0. */
14062
14063 tree
14064 fold_build1_stat_loc (location_t loc,
14065 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14066 {
14067 tree tem;
14068 #ifdef ENABLE_FOLD_CHECKING
14069 unsigned char checksum_before[16], checksum_after[16];
14070 struct md5_ctx ctx;
14071 htab_t ht;
14072
14073 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14074 md5_init_ctx (&ctx);
14075 fold_checksum_tree (op0, &ctx, ht);
14076 md5_finish_ctx (&ctx, checksum_before);
14077 htab_empty (ht);
14078 #endif
14079
14080 tem = fold_unary_loc (loc, code, type, op0);
14081 if (!tem)
14082 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14083
14084 #ifdef ENABLE_FOLD_CHECKING
14085 md5_init_ctx (&ctx);
14086 fold_checksum_tree (op0, &ctx, ht);
14087 md5_finish_ctx (&ctx, checksum_after);
14088 htab_delete (ht);
14089
14090 if (memcmp (checksum_before, checksum_after, 16))
14091 fold_check_failed (op0, tem);
14092 #endif
14093 return tem;
14094 }
14095
14096 /* Fold a binary tree expression with code CODE of type TYPE with
14097 operands OP0 and OP1. LOC is the location of the resulting
14098 expression. Return a folded expression if successful. Otherwise,
14099 return a tree expression with code CODE of type TYPE with operands
14100 OP0 and OP1. */
14101
14102 tree
14103 fold_build2_stat_loc (location_t loc,
14104 enum tree_code code, tree type, tree op0, tree op1
14105 MEM_STAT_DECL)
14106 {
14107 tree tem;
14108 #ifdef ENABLE_FOLD_CHECKING
14109 unsigned char checksum_before_op0[16],
14110 checksum_before_op1[16],
14111 checksum_after_op0[16],
14112 checksum_after_op1[16];
14113 struct md5_ctx ctx;
14114 htab_t ht;
14115
14116 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14117 md5_init_ctx (&ctx);
14118 fold_checksum_tree (op0, &ctx, ht);
14119 md5_finish_ctx (&ctx, checksum_before_op0);
14120 htab_empty (ht);
14121
14122 md5_init_ctx (&ctx);
14123 fold_checksum_tree (op1, &ctx, ht);
14124 md5_finish_ctx (&ctx, checksum_before_op1);
14125 htab_empty (ht);
14126 #endif
14127
14128 tem = fold_binary_loc (loc, code, type, op0, op1);
14129 if (!tem)
14130 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14131
14132 #ifdef ENABLE_FOLD_CHECKING
14133 md5_init_ctx (&ctx);
14134 fold_checksum_tree (op0, &ctx, ht);
14135 md5_finish_ctx (&ctx, checksum_after_op0);
14136 htab_empty (ht);
14137
14138 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14139 fold_check_failed (op0, tem);
14140
14141 md5_init_ctx (&ctx);
14142 fold_checksum_tree (op1, &ctx, ht);
14143 md5_finish_ctx (&ctx, checksum_after_op1);
14144 htab_delete (ht);
14145
14146 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14147 fold_check_failed (op1, tem);
14148 #endif
14149 return tem;
14150 }
14151
14152 /* Fold a ternary tree expression with code CODE of type TYPE with
14153 operands OP0, OP1, and OP2. Return a folded expression if
14154 successful. Otherwise, return a tree expression with code CODE of
14155 type TYPE with operands OP0, OP1, and OP2. */
14156
14157 tree
14158 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14159 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14160 {
14161 tree tem;
14162 #ifdef ENABLE_FOLD_CHECKING
14163 unsigned char checksum_before_op0[16],
14164 checksum_before_op1[16],
14165 checksum_before_op2[16],
14166 checksum_after_op0[16],
14167 checksum_after_op1[16],
14168 checksum_after_op2[16];
14169 struct md5_ctx ctx;
14170 htab_t ht;
14171
14172 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14173 md5_init_ctx (&ctx);
14174 fold_checksum_tree (op0, &ctx, ht);
14175 md5_finish_ctx (&ctx, checksum_before_op0);
14176 htab_empty (ht);
14177
14178 md5_init_ctx (&ctx);
14179 fold_checksum_tree (op1, &ctx, ht);
14180 md5_finish_ctx (&ctx, checksum_before_op1);
14181 htab_empty (ht);
14182
14183 md5_init_ctx (&ctx);
14184 fold_checksum_tree (op2, &ctx, ht);
14185 md5_finish_ctx (&ctx, checksum_before_op2);
14186 htab_empty (ht);
14187 #endif
14188
14189 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14190 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14191 if (!tem)
14192 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14193
14194 #ifdef ENABLE_FOLD_CHECKING
14195 md5_init_ctx (&ctx);
14196 fold_checksum_tree (op0, &ctx, ht);
14197 md5_finish_ctx (&ctx, checksum_after_op0);
14198 htab_empty (ht);
14199
14200 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14201 fold_check_failed (op0, tem);
14202
14203 md5_init_ctx (&ctx);
14204 fold_checksum_tree (op1, &ctx, ht);
14205 md5_finish_ctx (&ctx, checksum_after_op1);
14206 htab_empty (ht);
14207
14208 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14209 fold_check_failed (op1, tem);
14210
14211 md5_init_ctx (&ctx);
14212 fold_checksum_tree (op2, &ctx, ht);
14213 md5_finish_ctx (&ctx, checksum_after_op2);
14214 htab_delete (ht);
14215
14216 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14217 fold_check_failed (op2, tem);
14218 #endif
14219 return tem;
14220 }
14221
14222 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14223 arguments in ARGARRAY, and a null static chain.
14224 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14225 of type TYPE from the given operands as constructed by build_call_array. */
14226
14227 tree
14228 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14229 int nargs, tree *argarray)
14230 {
14231 tree tem;
14232 #ifdef ENABLE_FOLD_CHECKING
14233 unsigned char checksum_before_fn[16],
14234 checksum_before_arglist[16],
14235 checksum_after_fn[16],
14236 checksum_after_arglist[16];
14237 struct md5_ctx ctx;
14238 htab_t ht;
14239 int i;
14240
14241 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14242 md5_init_ctx (&ctx);
14243 fold_checksum_tree (fn, &ctx, ht);
14244 md5_finish_ctx (&ctx, checksum_before_fn);
14245 htab_empty (ht);
14246
14247 md5_init_ctx (&ctx);
14248 for (i = 0; i < nargs; i++)
14249 fold_checksum_tree (argarray[i], &ctx, ht);
14250 md5_finish_ctx (&ctx, checksum_before_arglist);
14251 htab_empty (ht);
14252 #endif
14253
14254 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14255
14256 #ifdef ENABLE_FOLD_CHECKING
14257 md5_init_ctx (&ctx);
14258 fold_checksum_tree (fn, &ctx, ht);
14259 md5_finish_ctx (&ctx, checksum_after_fn);
14260 htab_empty (ht);
14261
14262 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14263 fold_check_failed (fn, tem);
14264
14265 md5_init_ctx (&ctx);
14266 for (i = 0; i < nargs; i++)
14267 fold_checksum_tree (argarray[i], &ctx, ht);
14268 md5_finish_ctx (&ctx, checksum_after_arglist);
14269 htab_delete (ht);
14270
14271 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14272 fold_check_failed (NULL_TREE, tem);
14273 #endif
14274 return tem;
14275 }
14276
14277 /* Perform constant folding and related simplification of initializer
14278 expression EXPR. These behave identically to "fold_buildN" but ignore
14279 potential run-time traps and exceptions that fold must preserve. */
14280
14281 #define START_FOLD_INIT \
14282 int saved_signaling_nans = flag_signaling_nans;\
14283 int saved_trapping_math = flag_trapping_math;\
14284 int saved_rounding_math = flag_rounding_math;\
14285 int saved_trapv = flag_trapv;\
14286 int saved_folding_initializer = folding_initializer;\
14287 flag_signaling_nans = 0;\
14288 flag_trapping_math = 0;\
14289 flag_rounding_math = 0;\
14290 flag_trapv = 0;\
14291 folding_initializer = 1;
14292
14293 #define END_FOLD_INIT \
14294 flag_signaling_nans = saved_signaling_nans;\
14295 flag_trapping_math = saved_trapping_math;\
14296 flag_rounding_math = saved_rounding_math;\
14297 flag_trapv = saved_trapv;\
14298 folding_initializer = saved_folding_initializer;
14299
14300 tree
14301 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14302 tree type, tree op)
14303 {
14304 tree result;
14305 START_FOLD_INIT;
14306
14307 result = fold_build1_loc (loc, code, type, op);
14308
14309 END_FOLD_INIT;
14310 return result;
14311 }
14312
14313 tree
14314 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14315 tree type, tree op0, tree op1)
14316 {
14317 tree result;
14318 START_FOLD_INIT;
14319
14320 result = fold_build2_loc (loc, code, type, op0, op1);
14321
14322 END_FOLD_INIT;
14323 return result;
14324 }
14325
14326 tree
14327 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14328 tree type, tree op0, tree op1, tree op2)
14329 {
14330 tree result;
14331 START_FOLD_INIT;
14332
14333 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14334
14335 END_FOLD_INIT;
14336 return result;
14337 }
14338
14339 tree
14340 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14341 int nargs, tree *argarray)
14342 {
14343 tree result;
14344 START_FOLD_INIT;
14345
14346 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14347
14348 END_FOLD_INIT;
14349 return result;
14350 }
14351
14352 #undef START_FOLD_INIT
14353 #undef END_FOLD_INIT
14354
14355 /* Determine if first argument is a multiple of second argument. Return 0 if
14356 it is not, or we cannot easily determined it to be.
14357
14358 An example of the sort of thing we care about (at this point; this routine
14359 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14360 fold cases do now) is discovering that
14361
14362 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14363
14364 is a multiple of
14365
14366 SAVE_EXPR (J * 8)
14367
14368 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14369
14370 This code also handles discovering that
14371
14372 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14373
14374 is a multiple of 8 so we don't have to worry about dealing with a
14375 possible remainder.
14376
14377 Note that we *look* inside a SAVE_EXPR only to determine how it was
14378 calculated; it is not safe for fold to do much of anything else with the
14379 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14380 at run time. For example, the latter example above *cannot* be implemented
14381 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14382 evaluation time of the original SAVE_EXPR is not necessarily the same at
14383 the time the new expression is evaluated. The only optimization of this
14384 sort that would be valid is changing
14385
14386 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14387
14388 divided by 8 to
14389
14390 SAVE_EXPR (I) * SAVE_EXPR (J)
14391
14392 (where the same SAVE_EXPR (J) is used in the original and the
14393 transformed version). */
14394
14395 int
14396 multiple_of_p (tree type, const_tree top, const_tree bottom)
14397 {
14398 if (operand_equal_p (top, bottom, 0))
14399 return 1;
14400
14401 if (TREE_CODE (type) != INTEGER_TYPE)
14402 return 0;
14403
14404 switch (TREE_CODE (top))
14405 {
14406 case BIT_AND_EXPR:
14407 /* Bitwise and provides a power of two multiple. If the mask is
14408 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14409 if (!integer_pow2p (bottom))
14410 return 0;
14411 /* FALLTHRU */
14412
14413 case MULT_EXPR:
14414 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14415 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14416
14417 case PLUS_EXPR:
14418 case MINUS_EXPR:
14419 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14420 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14421
14422 case LSHIFT_EXPR:
14423 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14424 {
14425 tree op1, t1;
14426
14427 op1 = TREE_OPERAND (top, 1);
14428 /* const_binop may not detect overflow correctly,
14429 so check for it explicitly here. */
14430 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14431 > TREE_INT_CST_LOW (op1)
14432 && TREE_INT_CST_HIGH (op1) == 0
14433 && 0 != (t1 = fold_convert (type,
14434 const_binop (LSHIFT_EXPR,
14435 size_one_node,
14436 op1)))
14437 && !TREE_OVERFLOW (t1))
14438 return multiple_of_p (type, t1, bottom);
14439 }
14440 return 0;
14441
14442 case NOP_EXPR:
14443 /* Can't handle conversions from non-integral or wider integral type. */
14444 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14445 || (TYPE_PRECISION (type)
14446 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14447 return 0;
14448
14449 /* .. fall through ... */
14450
14451 case SAVE_EXPR:
14452 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14453
14454 case COND_EXPR:
14455 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14456 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14457
14458 case INTEGER_CST:
14459 if (TREE_CODE (bottom) != INTEGER_CST
14460 || integer_zerop (bottom)
14461 || (TYPE_UNSIGNED (type)
14462 && (tree_int_cst_sgn (top) < 0
14463 || tree_int_cst_sgn (bottom) < 0)))
14464 return 0;
14465 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14466 top, bottom));
14467
14468 default:
14469 return 0;
14470 }
14471 }
14472
14473 /* Return true if CODE or TYPE is known to be non-negative. */
14474
14475 static bool
14476 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14477 {
14478 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14479 && truth_value_p (code))
14480 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14481 have a signed:1 type (where the value is -1 and 0). */
14482 return true;
14483 return false;
14484 }
14485
14486 /* Return true if (CODE OP0) is known to be non-negative. If the return
14487 value is based on the assumption that signed overflow is undefined,
14488 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14489 *STRICT_OVERFLOW_P. */
14490
14491 bool
14492 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14493 bool *strict_overflow_p)
14494 {
14495 if (TYPE_UNSIGNED (type))
14496 return true;
14497
14498 switch (code)
14499 {
14500 case ABS_EXPR:
14501 /* We can't return 1 if flag_wrapv is set because
14502 ABS_EXPR<INT_MIN> = INT_MIN. */
14503 if (!INTEGRAL_TYPE_P (type))
14504 return true;
14505 if (TYPE_OVERFLOW_UNDEFINED (type))
14506 {
14507 *strict_overflow_p = true;
14508 return true;
14509 }
14510 break;
14511
14512 case NON_LVALUE_EXPR:
14513 case FLOAT_EXPR:
14514 case FIX_TRUNC_EXPR:
14515 return tree_expr_nonnegative_warnv_p (op0,
14516 strict_overflow_p);
14517
14518 case NOP_EXPR:
14519 {
14520 tree inner_type = TREE_TYPE (op0);
14521 tree outer_type = type;
14522
14523 if (TREE_CODE (outer_type) == REAL_TYPE)
14524 {
14525 if (TREE_CODE (inner_type) == REAL_TYPE)
14526 return tree_expr_nonnegative_warnv_p (op0,
14527 strict_overflow_p);
14528 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14529 {
14530 if (TYPE_UNSIGNED (inner_type))
14531 return true;
14532 return tree_expr_nonnegative_warnv_p (op0,
14533 strict_overflow_p);
14534 }
14535 }
14536 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14537 {
14538 if (TREE_CODE (inner_type) == REAL_TYPE)
14539 return tree_expr_nonnegative_warnv_p (op0,
14540 strict_overflow_p);
14541 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14542 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14543 && TYPE_UNSIGNED (inner_type);
14544 }
14545 }
14546 break;
14547
14548 default:
14549 return tree_simple_nonnegative_warnv_p (code, type);
14550 }
14551
14552 /* We don't know sign of `t', so be conservative and return false. */
14553 return false;
14554 }
14555
14556 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14557 value is based on the assumption that signed overflow is undefined,
14558 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14559 *STRICT_OVERFLOW_P. */
14560
14561 bool
14562 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14563 tree op1, bool *strict_overflow_p)
14564 {
14565 if (TYPE_UNSIGNED (type))
14566 return true;
14567
14568 switch (code)
14569 {
14570 case POINTER_PLUS_EXPR:
14571 case PLUS_EXPR:
14572 if (FLOAT_TYPE_P (type))
14573 return (tree_expr_nonnegative_warnv_p (op0,
14574 strict_overflow_p)
14575 && tree_expr_nonnegative_warnv_p (op1,
14576 strict_overflow_p));
14577
14578 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14579 both unsigned and at least 2 bits shorter than the result. */
14580 if (TREE_CODE (type) == INTEGER_TYPE
14581 && TREE_CODE (op0) == NOP_EXPR
14582 && TREE_CODE (op1) == NOP_EXPR)
14583 {
14584 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14585 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14586 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14587 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14588 {
14589 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14590 TYPE_PRECISION (inner2)) + 1;
14591 return prec < TYPE_PRECISION (type);
14592 }
14593 }
14594 break;
14595
14596 case MULT_EXPR:
14597 if (FLOAT_TYPE_P (type))
14598 {
14599 /* x * x for floating point x is always non-negative. */
14600 if (operand_equal_p (op0, op1, 0))
14601 return true;
14602 return (tree_expr_nonnegative_warnv_p (op0,
14603 strict_overflow_p)
14604 && tree_expr_nonnegative_warnv_p (op1,
14605 strict_overflow_p));
14606 }
14607
14608 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14609 both unsigned and their total bits is shorter than the result. */
14610 if (TREE_CODE (type) == INTEGER_TYPE
14611 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14612 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14613 {
14614 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14615 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14616 : TREE_TYPE (op0);
14617 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14618 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14619 : TREE_TYPE (op1);
14620
14621 bool unsigned0 = TYPE_UNSIGNED (inner0);
14622 bool unsigned1 = TYPE_UNSIGNED (inner1);
14623
14624 if (TREE_CODE (op0) == INTEGER_CST)
14625 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14626
14627 if (TREE_CODE (op1) == INTEGER_CST)
14628 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14629
14630 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14631 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14632 {
14633 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14634 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14635 : TYPE_PRECISION (inner0);
14636
14637 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14638 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14639 : TYPE_PRECISION (inner1);
14640
14641 return precision0 + precision1 < TYPE_PRECISION (type);
14642 }
14643 }
14644 return false;
14645
14646 case BIT_AND_EXPR:
14647 case MAX_EXPR:
14648 return (tree_expr_nonnegative_warnv_p (op0,
14649 strict_overflow_p)
14650 || tree_expr_nonnegative_warnv_p (op1,
14651 strict_overflow_p));
14652
14653 case BIT_IOR_EXPR:
14654 case BIT_XOR_EXPR:
14655 case MIN_EXPR:
14656 case RDIV_EXPR:
14657 case TRUNC_DIV_EXPR:
14658 case CEIL_DIV_EXPR:
14659 case FLOOR_DIV_EXPR:
14660 case ROUND_DIV_EXPR:
14661 return (tree_expr_nonnegative_warnv_p (op0,
14662 strict_overflow_p)
14663 && tree_expr_nonnegative_warnv_p (op1,
14664 strict_overflow_p));
14665
14666 case TRUNC_MOD_EXPR:
14667 case CEIL_MOD_EXPR:
14668 case FLOOR_MOD_EXPR:
14669 case ROUND_MOD_EXPR:
14670 return tree_expr_nonnegative_warnv_p (op0,
14671 strict_overflow_p);
14672 default:
14673 return tree_simple_nonnegative_warnv_p (code, type);
14674 }
14675
14676 /* We don't know sign of `t', so be conservative and return false. */
14677 return false;
14678 }
14679
14680 /* Return true if T is known to be non-negative. If the return
14681 value is based on the assumption that signed overflow is undefined,
14682 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14683 *STRICT_OVERFLOW_P. */
14684
14685 bool
14686 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14687 {
14688 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14689 return true;
14690
14691 switch (TREE_CODE (t))
14692 {
14693 case INTEGER_CST:
14694 return tree_int_cst_sgn (t) >= 0;
14695
14696 case REAL_CST:
14697 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14698
14699 case FIXED_CST:
14700 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14701
14702 case COND_EXPR:
14703 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14704 strict_overflow_p)
14705 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14706 strict_overflow_p));
14707 default:
14708 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14709 TREE_TYPE (t));
14710 }
14711 /* We don't know sign of `t', so be conservative and return false. */
14712 return false;
14713 }
14714
14715 /* Return true if T is known to be non-negative. If the return
14716 value is based on the assumption that signed overflow is undefined,
14717 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14718 *STRICT_OVERFLOW_P. */
14719
14720 bool
14721 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14722 tree arg0, tree arg1, bool *strict_overflow_p)
14723 {
14724 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14725 switch (DECL_FUNCTION_CODE (fndecl))
14726 {
14727 CASE_FLT_FN (BUILT_IN_ACOS):
14728 CASE_FLT_FN (BUILT_IN_ACOSH):
14729 CASE_FLT_FN (BUILT_IN_CABS):
14730 CASE_FLT_FN (BUILT_IN_COSH):
14731 CASE_FLT_FN (BUILT_IN_ERFC):
14732 CASE_FLT_FN (BUILT_IN_EXP):
14733 CASE_FLT_FN (BUILT_IN_EXP10):
14734 CASE_FLT_FN (BUILT_IN_EXP2):
14735 CASE_FLT_FN (BUILT_IN_FABS):
14736 CASE_FLT_FN (BUILT_IN_FDIM):
14737 CASE_FLT_FN (BUILT_IN_HYPOT):
14738 CASE_FLT_FN (BUILT_IN_POW10):
14739 CASE_INT_FN (BUILT_IN_FFS):
14740 CASE_INT_FN (BUILT_IN_PARITY):
14741 CASE_INT_FN (BUILT_IN_POPCOUNT):
14742 case BUILT_IN_BSWAP32:
14743 case BUILT_IN_BSWAP64:
14744 /* Always true. */
14745 return true;
14746
14747 CASE_FLT_FN (BUILT_IN_SQRT):
14748 /* sqrt(-0.0) is -0.0. */
14749 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14750 return true;
14751 return tree_expr_nonnegative_warnv_p (arg0,
14752 strict_overflow_p);
14753
14754 CASE_FLT_FN (BUILT_IN_ASINH):
14755 CASE_FLT_FN (BUILT_IN_ATAN):
14756 CASE_FLT_FN (BUILT_IN_ATANH):
14757 CASE_FLT_FN (BUILT_IN_CBRT):
14758 CASE_FLT_FN (BUILT_IN_CEIL):
14759 CASE_FLT_FN (BUILT_IN_ERF):
14760 CASE_FLT_FN (BUILT_IN_EXPM1):
14761 CASE_FLT_FN (BUILT_IN_FLOOR):
14762 CASE_FLT_FN (BUILT_IN_FMOD):
14763 CASE_FLT_FN (BUILT_IN_FREXP):
14764 CASE_FLT_FN (BUILT_IN_ICEIL):
14765 CASE_FLT_FN (BUILT_IN_IFLOOR):
14766 CASE_FLT_FN (BUILT_IN_IRINT):
14767 CASE_FLT_FN (BUILT_IN_IROUND):
14768 CASE_FLT_FN (BUILT_IN_LCEIL):
14769 CASE_FLT_FN (BUILT_IN_LDEXP):
14770 CASE_FLT_FN (BUILT_IN_LFLOOR):
14771 CASE_FLT_FN (BUILT_IN_LLCEIL):
14772 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14773 CASE_FLT_FN (BUILT_IN_LLRINT):
14774 CASE_FLT_FN (BUILT_IN_LLROUND):
14775 CASE_FLT_FN (BUILT_IN_LRINT):
14776 CASE_FLT_FN (BUILT_IN_LROUND):
14777 CASE_FLT_FN (BUILT_IN_MODF):
14778 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14779 CASE_FLT_FN (BUILT_IN_RINT):
14780 CASE_FLT_FN (BUILT_IN_ROUND):
14781 CASE_FLT_FN (BUILT_IN_SCALB):
14782 CASE_FLT_FN (BUILT_IN_SCALBLN):
14783 CASE_FLT_FN (BUILT_IN_SCALBN):
14784 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14785 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14786 CASE_FLT_FN (BUILT_IN_SINH):
14787 CASE_FLT_FN (BUILT_IN_TANH):
14788 CASE_FLT_FN (BUILT_IN_TRUNC):
14789 /* True if the 1st argument is nonnegative. */
14790 return tree_expr_nonnegative_warnv_p (arg0,
14791 strict_overflow_p);
14792
14793 CASE_FLT_FN (BUILT_IN_FMAX):
14794 /* True if the 1st OR 2nd arguments are nonnegative. */
14795 return (tree_expr_nonnegative_warnv_p (arg0,
14796 strict_overflow_p)
14797 || (tree_expr_nonnegative_warnv_p (arg1,
14798 strict_overflow_p)));
14799
14800 CASE_FLT_FN (BUILT_IN_FMIN):
14801 /* True if the 1st AND 2nd arguments are nonnegative. */
14802 return (tree_expr_nonnegative_warnv_p (arg0,
14803 strict_overflow_p)
14804 && (tree_expr_nonnegative_warnv_p (arg1,
14805 strict_overflow_p)));
14806
14807 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14808 /* True if the 2nd argument is nonnegative. */
14809 return tree_expr_nonnegative_warnv_p (arg1,
14810 strict_overflow_p);
14811
14812 CASE_FLT_FN (BUILT_IN_POWI):
14813 /* True if the 1st argument is nonnegative or the second
14814 argument is an even integer. */
14815 if (TREE_CODE (arg1) == INTEGER_CST
14816 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14817 return true;
14818 return tree_expr_nonnegative_warnv_p (arg0,
14819 strict_overflow_p);
14820
14821 CASE_FLT_FN (BUILT_IN_POW):
14822 /* True if the 1st argument is nonnegative or the second
14823 argument is an even integer valued real. */
14824 if (TREE_CODE (arg1) == REAL_CST)
14825 {
14826 REAL_VALUE_TYPE c;
14827 HOST_WIDE_INT n;
14828
14829 c = TREE_REAL_CST (arg1);
14830 n = real_to_integer (&c);
14831 if ((n & 1) == 0)
14832 {
14833 REAL_VALUE_TYPE cint;
14834 real_from_integer (&cint, VOIDmode, n,
14835 n < 0 ? -1 : 0, 0);
14836 if (real_identical (&c, &cint))
14837 return true;
14838 }
14839 }
14840 return tree_expr_nonnegative_warnv_p (arg0,
14841 strict_overflow_p);
14842
14843 default:
14844 break;
14845 }
14846 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14847 type);
14848 }
14849
14850 /* Return true if T is known to be non-negative. If the return
14851 value is based on the assumption that signed overflow is undefined,
14852 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14853 *STRICT_OVERFLOW_P. */
14854
14855 bool
14856 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14857 {
14858 enum tree_code code = TREE_CODE (t);
14859 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14860 return true;
14861
14862 switch (code)
14863 {
14864 case TARGET_EXPR:
14865 {
14866 tree temp = TARGET_EXPR_SLOT (t);
14867 t = TARGET_EXPR_INITIAL (t);
14868
14869 /* If the initializer is non-void, then it's a normal expression
14870 that will be assigned to the slot. */
14871 if (!VOID_TYPE_P (t))
14872 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14873
14874 /* Otherwise, the initializer sets the slot in some way. One common
14875 way is an assignment statement at the end of the initializer. */
14876 while (1)
14877 {
14878 if (TREE_CODE (t) == BIND_EXPR)
14879 t = expr_last (BIND_EXPR_BODY (t));
14880 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14881 || TREE_CODE (t) == TRY_CATCH_EXPR)
14882 t = expr_last (TREE_OPERAND (t, 0));
14883 else if (TREE_CODE (t) == STATEMENT_LIST)
14884 t = expr_last (t);
14885 else
14886 break;
14887 }
14888 if (TREE_CODE (t) == MODIFY_EXPR
14889 && TREE_OPERAND (t, 0) == temp)
14890 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14891 strict_overflow_p);
14892
14893 return false;
14894 }
14895
14896 case CALL_EXPR:
14897 {
14898 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14899 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14900
14901 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14902 get_callee_fndecl (t),
14903 arg0,
14904 arg1,
14905 strict_overflow_p);
14906 }
14907 case COMPOUND_EXPR:
14908 case MODIFY_EXPR:
14909 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14910 strict_overflow_p);
14911 case BIND_EXPR:
14912 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14913 strict_overflow_p);
14914 case SAVE_EXPR:
14915 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14916 strict_overflow_p);
14917
14918 default:
14919 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14920 TREE_TYPE (t));
14921 }
14922
14923 /* We don't know sign of `t', so be conservative and return false. */
14924 return false;
14925 }
14926
14927 /* Return true if T is known to be non-negative. If the return
14928 value is based on the assumption that signed overflow is undefined,
14929 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14930 *STRICT_OVERFLOW_P. */
14931
14932 bool
14933 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14934 {
14935 enum tree_code code;
14936 if (t == error_mark_node)
14937 return false;
14938
14939 code = TREE_CODE (t);
14940 switch (TREE_CODE_CLASS (code))
14941 {
14942 case tcc_binary:
14943 case tcc_comparison:
14944 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14945 TREE_TYPE (t),
14946 TREE_OPERAND (t, 0),
14947 TREE_OPERAND (t, 1),
14948 strict_overflow_p);
14949
14950 case tcc_unary:
14951 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14952 TREE_TYPE (t),
14953 TREE_OPERAND (t, 0),
14954 strict_overflow_p);
14955
14956 case tcc_constant:
14957 case tcc_declaration:
14958 case tcc_reference:
14959 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14960
14961 default:
14962 break;
14963 }
14964
14965 switch (code)
14966 {
14967 case TRUTH_AND_EXPR:
14968 case TRUTH_OR_EXPR:
14969 case TRUTH_XOR_EXPR:
14970 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14971 TREE_TYPE (t),
14972 TREE_OPERAND (t, 0),
14973 TREE_OPERAND (t, 1),
14974 strict_overflow_p);
14975 case TRUTH_NOT_EXPR:
14976 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14977 TREE_TYPE (t),
14978 TREE_OPERAND (t, 0),
14979 strict_overflow_p);
14980
14981 case COND_EXPR:
14982 case CONSTRUCTOR:
14983 case OBJ_TYPE_REF:
14984 case ASSERT_EXPR:
14985 case ADDR_EXPR:
14986 case WITH_SIZE_EXPR:
14987 case SSA_NAME:
14988 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14989
14990 default:
14991 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14992 }
14993 }
14994
14995 /* Return true if `t' is known to be non-negative. Handle warnings
14996 about undefined signed overflow. */
14997
14998 bool
14999 tree_expr_nonnegative_p (tree t)
15000 {
15001 bool ret, strict_overflow_p;
15002
15003 strict_overflow_p = false;
15004 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15005 if (strict_overflow_p)
15006 fold_overflow_warning (("assuming signed overflow does not occur when "
15007 "determining that expression is always "
15008 "non-negative"),
15009 WARN_STRICT_OVERFLOW_MISC);
15010 return ret;
15011 }
15012
15013
15014 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15015 For floating point we further ensure that T is not denormal.
15016 Similar logic is present in nonzero_address in rtlanal.h.
15017
15018 If the return value is based on the assumption that signed overflow
15019 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15020 change *STRICT_OVERFLOW_P. */
15021
15022 bool
15023 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15024 bool *strict_overflow_p)
15025 {
15026 switch (code)
15027 {
15028 case ABS_EXPR:
15029 return tree_expr_nonzero_warnv_p (op0,
15030 strict_overflow_p);
15031
15032 case NOP_EXPR:
15033 {
15034 tree inner_type = TREE_TYPE (op0);
15035 tree outer_type = type;
15036
15037 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15038 && tree_expr_nonzero_warnv_p (op0,
15039 strict_overflow_p));
15040 }
15041 break;
15042
15043 case NON_LVALUE_EXPR:
15044 return tree_expr_nonzero_warnv_p (op0,
15045 strict_overflow_p);
15046
15047 default:
15048 break;
15049 }
15050
15051 return false;
15052 }
15053
15054 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15055 For floating point we further ensure that T is not denormal.
15056 Similar logic is present in nonzero_address in rtlanal.h.
15057
15058 If the return value is based on the assumption that signed overflow
15059 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15060 change *STRICT_OVERFLOW_P. */
15061
15062 bool
15063 tree_binary_nonzero_warnv_p (enum tree_code code,
15064 tree type,
15065 tree op0,
15066 tree op1, bool *strict_overflow_p)
15067 {
15068 bool sub_strict_overflow_p;
15069 switch (code)
15070 {
15071 case POINTER_PLUS_EXPR:
15072 case PLUS_EXPR:
15073 if (TYPE_OVERFLOW_UNDEFINED (type))
15074 {
15075 /* With the presence of negative values it is hard
15076 to say something. */
15077 sub_strict_overflow_p = false;
15078 if (!tree_expr_nonnegative_warnv_p (op0,
15079 &sub_strict_overflow_p)
15080 || !tree_expr_nonnegative_warnv_p (op1,
15081 &sub_strict_overflow_p))
15082 return false;
15083 /* One of operands must be positive and the other non-negative. */
15084 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15085 overflows, on a twos-complement machine the sum of two
15086 nonnegative numbers can never be zero. */
15087 return (tree_expr_nonzero_warnv_p (op0,
15088 strict_overflow_p)
15089 || tree_expr_nonzero_warnv_p (op1,
15090 strict_overflow_p));
15091 }
15092 break;
15093
15094 case MULT_EXPR:
15095 if (TYPE_OVERFLOW_UNDEFINED (type))
15096 {
15097 if (tree_expr_nonzero_warnv_p (op0,
15098 strict_overflow_p)
15099 && tree_expr_nonzero_warnv_p (op1,
15100 strict_overflow_p))
15101 {
15102 *strict_overflow_p = true;
15103 return true;
15104 }
15105 }
15106 break;
15107
15108 case MIN_EXPR:
15109 sub_strict_overflow_p = false;
15110 if (tree_expr_nonzero_warnv_p (op0,
15111 &sub_strict_overflow_p)
15112 && tree_expr_nonzero_warnv_p (op1,
15113 &sub_strict_overflow_p))
15114 {
15115 if (sub_strict_overflow_p)
15116 *strict_overflow_p = true;
15117 }
15118 break;
15119
15120 case MAX_EXPR:
15121 sub_strict_overflow_p = false;
15122 if (tree_expr_nonzero_warnv_p (op0,
15123 &sub_strict_overflow_p))
15124 {
15125 if (sub_strict_overflow_p)
15126 *strict_overflow_p = true;
15127
15128 /* When both operands are nonzero, then MAX must be too. */
15129 if (tree_expr_nonzero_warnv_p (op1,
15130 strict_overflow_p))
15131 return true;
15132
15133 /* MAX where operand 0 is positive is positive. */
15134 return tree_expr_nonnegative_warnv_p (op0,
15135 strict_overflow_p);
15136 }
15137 /* MAX where operand 1 is positive is positive. */
15138 else if (tree_expr_nonzero_warnv_p (op1,
15139 &sub_strict_overflow_p)
15140 && tree_expr_nonnegative_warnv_p (op1,
15141 &sub_strict_overflow_p))
15142 {
15143 if (sub_strict_overflow_p)
15144 *strict_overflow_p = true;
15145 return true;
15146 }
15147 break;
15148
15149 case BIT_IOR_EXPR:
15150 return (tree_expr_nonzero_warnv_p (op1,
15151 strict_overflow_p)
15152 || tree_expr_nonzero_warnv_p (op0,
15153 strict_overflow_p));
15154
15155 default:
15156 break;
15157 }
15158
15159 return false;
15160 }
15161
15162 /* Return true when T is an address and is known to be nonzero.
15163 For floating point we further ensure that T is not denormal.
15164 Similar logic is present in nonzero_address in rtlanal.h.
15165
15166 If the return value is based on the assumption that signed overflow
15167 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15168 change *STRICT_OVERFLOW_P. */
15169
15170 bool
15171 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15172 {
15173 bool sub_strict_overflow_p;
15174 switch (TREE_CODE (t))
15175 {
15176 case INTEGER_CST:
15177 return !integer_zerop (t);
15178
15179 case ADDR_EXPR:
15180 {
15181 tree base = TREE_OPERAND (t, 0);
15182 if (!DECL_P (base))
15183 base = get_base_address (base);
15184
15185 if (!base)
15186 return false;
15187
15188 /* Weak declarations may link to NULL. Other things may also be NULL
15189 so protect with -fdelete-null-pointer-checks; but not variables
15190 allocated on the stack. */
15191 if (DECL_P (base)
15192 && (flag_delete_null_pointer_checks
15193 || (DECL_CONTEXT (base)
15194 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15195 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15196 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15197
15198 /* Constants are never weak. */
15199 if (CONSTANT_CLASS_P (base))
15200 return true;
15201
15202 return false;
15203 }
15204
15205 case COND_EXPR:
15206 sub_strict_overflow_p = false;
15207 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15208 &sub_strict_overflow_p)
15209 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15210 &sub_strict_overflow_p))
15211 {
15212 if (sub_strict_overflow_p)
15213 *strict_overflow_p = true;
15214 return true;
15215 }
15216 break;
15217
15218 default:
15219 break;
15220 }
15221 return false;
15222 }
15223
15224 /* Return true when T is an address and is known to be nonzero.
15225 For floating point we further ensure that T is not denormal.
15226 Similar logic is present in nonzero_address in rtlanal.h.
15227
15228 If the return value is based on the assumption that signed overflow
15229 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15230 change *STRICT_OVERFLOW_P. */
15231
15232 bool
15233 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15234 {
15235 tree type = TREE_TYPE (t);
15236 enum tree_code code;
15237
15238 /* Doing something useful for floating point would need more work. */
15239 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15240 return false;
15241
15242 code = TREE_CODE (t);
15243 switch (TREE_CODE_CLASS (code))
15244 {
15245 case tcc_unary:
15246 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15247 strict_overflow_p);
15248 case tcc_binary:
15249 case tcc_comparison:
15250 return tree_binary_nonzero_warnv_p (code, type,
15251 TREE_OPERAND (t, 0),
15252 TREE_OPERAND (t, 1),
15253 strict_overflow_p);
15254 case tcc_constant:
15255 case tcc_declaration:
15256 case tcc_reference:
15257 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15258
15259 default:
15260 break;
15261 }
15262
15263 switch (code)
15264 {
15265 case TRUTH_NOT_EXPR:
15266 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15267 strict_overflow_p);
15268
15269 case TRUTH_AND_EXPR:
15270 case TRUTH_OR_EXPR:
15271 case TRUTH_XOR_EXPR:
15272 return tree_binary_nonzero_warnv_p (code, type,
15273 TREE_OPERAND (t, 0),
15274 TREE_OPERAND (t, 1),
15275 strict_overflow_p);
15276
15277 case COND_EXPR:
15278 case CONSTRUCTOR:
15279 case OBJ_TYPE_REF:
15280 case ASSERT_EXPR:
15281 case ADDR_EXPR:
15282 case WITH_SIZE_EXPR:
15283 case SSA_NAME:
15284 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15285
15286 case COMPOUND_EXPR:
15287 case MODIFY_EXPR:
15288 case BIND_EXPR:
15289 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15290 strict_overflow_p);
15291
15292 case SAVE_EXPR:
15293 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15294 strict_overflow_p);
15295
15296 case CALL_EXPR:
15297 return alloca_call_p (t);
15298
15299 default:
15300 break;
15301 }
15302 return false;
15303 }
15304
15305 /* Return true when T is an address and is known to be nonzero.
15306 Handle warnings about undefined signed overflow. */
15307
15308 bool
15309 tree_expr_nonzero_p (tree t)
15310 {
15311 bool ret, strict_overflow_p;
15312
15313 strict_overflow_p = false;
15314 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15315 if (strict_overflow_p)
15316 fold_overflow_warning (("assuming signed overflow does not occur when "
15317 "determining that expression is always "
15318 "non-zero"),
15319 WARN_STRICT_OVERFLOW_MISC);
15320 return ret;
15321 }
15322
15323 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15324 attempt to fold the expression to a constant without modifying TYPE,
15325 OP0 or OP1.
15326
15327 If the expression could be simplified to a constant, then return
15328 the constant. If the expression would not be simplified to a
15329 constant, then return NULL_TREE. */
15330
15331 tree
15332 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15333 {
15334 tree tem = fold_binary (code, type, op0, op1);
15335 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15336 }
15337
15338 /* Given the components of a unary expression CODE, TYPE and OP0,
15339 attempt to fold the expression to a constant without modifying
15340 TYPE or OP0.
15341
15342 If the expression could be simplified to a constant, then return
15343 the constant. If the expression would not be simplified to a
15344 constant, then return NULL_TREE. */
15345
15346 tree
15347 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15348 {
15349 tree tem = fold_unary (code, type, op0);
15350 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15351 }
15352
15353 /* If EXP represents referencing an element in a constant string
15354 (either via pointer arithmetic or array indexing), return the
15355 tree representing the value accessed, otherwise return NULL. */
15356
15357 tree
15358 fold_read_from_constant_string (tree exp)
15359 {
15360 if ((TREE_CODE (exp) == INDIRECT_REF
15361 || TREE_CODE (exp) == ARRAY_REF)
15362 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15363 {
15364 tree exp1 = TREE_OPERAND (exp, 0);
15365 tree index;
15366 tree string;
15367 location_t loc = EXPR_LOCATION (exp);
15368
15369 if (TREE_CODE (exp) == INDIRECT_REF)
15370 string = string_constant (exp1, &index);
15371 else
15372 {
15373 tree low_bound = array_ref_low_bound (exp);
15374 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15375
15376 /* Optimize the special-case of a zero lower bound.
15377
15378 We convert the low_bound to sizetype to avoid some problems
15379 with constant folding. (E.g. suppose the lower bound is 1,
15380 and its mode is QI. Without the conversion,l (ARRAY
15381 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15382 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15383 if (! integer_zerop (low_bound))
15384 index = size_diffop_loc (loc, index,
15385 fold_convert_loc (loc, sizetype, low_bound));
15386
15387 string = exp1;
15388 }
15389
15390 if (string
15391 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15392 && TREE_CODE (string) == STRING_CST
15393 && TREE_CODE (index) == INTEGER_CST
15394 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15395 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15396 == MODE_INT)
15397 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15398 return build_int_cst_type (TREE_TYPE (exp),
15399 (TREE_STRING_POINTER (string)
15400 [TREE_INT_CST_LOW (index)]));
15401 }
15402 return NULL;
15403 }
15404
15405 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15406 an integer constant, real, or fixed-point constant.
15407
15408 TYPE is the type of the result. */
15409
15410 static tree
15411 fold_negate_const (tree arg0, tree type)
15412 {
15413 tree t = NULL_TREE;
15414
15415 switch (TREE_CODE (arg0))
15416 {
15417 case INTEGER_CST:
15418 {
15419 double_int val = tree_to_double_int (arg0);
15420 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15421
15422 t = force_fit_type_double (type, val, 1,
15423 (overflow | TREE_OVERFLOW (arg0))
15424 && !TYPE_UNSIGNED (type));
15425 break;
15426 }
15427
15428 case REAL_CST:
15429 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15430 break;
15431
15432 case FIXED_CST:
15433 {
15434 FIXED_VALUE_TYPE f;
15435 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15436 &(TREE_FIXED_CST (arg0)), NULL,
15437 TYPE_SATURATING (type));
15438 t = build_fixed (type, f);
15439 /* Propagate overflow flags. */
15440 if (overflow_p | TREE_OVERFLOW (arg0))
15441 TREE_OVERFLOW (t) = 1;
15442 break;
15443 }
15444
15445 default:
15446 gcc_unreachable ();
15447 }
15448
15449 return t;
15450 }
15451
15452 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15453 an integer constant or real constant.
15454
15455 TYPE is the type of the result. */
15456
15457 tree
15458 fold_abs_const (tree arg0, tree type)
15459 {
15460 tree t = NULL_TREE;
15461
15462 switch (TREE_CODE (arg0))
15463 {
15464 case INTEGER_CST:
15465 {
15466 double_int val = tree_to_double_int (arg0);
15467
15468 /* If the value is unsigned or non-negative, then the absolute value
15469 is the same as the ordinary value. */
15470 if (TYPE_UNSIGNED (type)
15471 || !double_int_negative_p (val))
15472 t = arg0;
15473
15474 /* If the value is negative, then the absolute value is
15475 its negation. */
15476 else
15477 {
15478 int overflow;
15479
15480 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15481 t = force_fit_type_double (type, val, -1,
15482 overflow | TREE_OVERFLOW (arg0));
15483 }
15484 }
15485 break;
15486
15487 case REAL_CST:
15488 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15489 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15490 else
15491 t = arg0;
15492 break;
15493
15494 default:
15495 gcc_unreachable ();
15496 }
15497
15498 return t;
15499 }
15500
15501 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15502 constant. TYPE is the type of the result. */
15503
15504 static tree
15505 fold_not_const (const_tree arg0, tree type)
15506 {
15507 double_int val;
15508
15509 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15510
15511 val = double_int_not (tree_to_double_int (arg0));
15512 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15513 }
15514
15515 /* Given CODE, a relational operator, the target type, TYPE and two
15516 constant operands OP0 and OP1, return the result of the
15517 relational operation. If the result is not a compile time
15518 constant, then return NULL_TREE. */
15519
15520 static tree
15521 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15522 {
15523 int result, invert;
15524
15525 /* From here on, the only cases we handle are when the result is
15526 known to be a constant. */
15527
15528 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15529 {
15530 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15531 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15532
15533 /* Handle the cases where either operand is a NaN. */
15534 if (real_isnan (c0) || real_isnan (c1))
15535 {
15536 switch (code)
15537 {
15538 case EQ_EXPR:
15539 case ORDERED_EXPR:
15540 result = 0;
15541 break;
15542
15543 case NE_EXPR:
15544 case UNORDERED_EXPR:
15545 case UNLT_EXPR:
15546 case UNLE_EXPR:
15547 case UNGT_EXPR:
15548 case UNGE_EXPR:
15549 case UNEQ_EXPR:
15550 result = 1;
15551 break;
15552
15553 case LT_EXPR:
15554 case LE_EXPR:
15555 case GT_EXPR:
15556 case GE_EXPR:
15557 case LTGT_EXPR:
15558 if (flag_trapping_math)
15559 return NULL_TREE;
15560 result = 0;
15561 break;
15562
15563 default:
15564 gcc_unreachable ();
15565 }
15566
15567 return constant_boolean_node (result, type);
15568 }
15569
15570 return constant_boolean_node (real_compare (code, c0, c1), type);
15571 }
15572
15573 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15574 {
15575 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15576 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15577 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15578 }
15579
15580 /* Handle equality/inequality of complex constants. */
15581 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15582 {
15583 tree rcond = fold_relational_const (code, type,
15584 TREE_REALPART (op0),
15585 TREE_REALPART (op1));
15586 tree icond = fold_relational_const (code, type,
15587 TREE_IMAGPART (op0),
15588 TREE_IMAGPART (op1));
15589 if (code == EQ_EXPR)
15590 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15591 else if (code == NE_EXPR)
15592 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15593 else
15594 return NULL_TREE;
15595 }
15596
15597 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15598
15599 To compute GT, swap the arguments and do LT.
15600 To compute GE, do LT and invert the result.
15601 To compute LE, swap the arguments, do LT and invert the result.
15602 To compute NE, do EQ and invert the result.
15603
15604 Therefore, the code below must handle only EQ and LT. */
15605
15606 if (code == LE_EXPR || code == GT_EXPR)
15607 {
15608 tree tem = op0;
15609 op0 = op1;
15610 op1 = tem;
15611 code = swap_tree_comparison (code);
15612 }
15613
15614 /* Note that it is safe to invert for real values here because we
15615 have already handled the one case that it matters. */
15616
15617 invert = 0;
15618 if (code == NE_EXPR || code == GE_EXPR)
15619 {
15620 invert = 1;
15621 code = invert_tree_comparison (code, false);
15622 }
15623
15624 /* Compute a result for LT or EQ if args permit;
15625 Otherwise return T. */
15626 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15627 {
15628 if (code == EQ_EXPR)
15629 result = tree_int_cst_equal (op0, op1);
15630 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15631 result = INT_CST_LT_UNSIGNED (op0, op1);
15632 else
15633 result = INT_CST_LT (op0, op1);
15634 }
15635 else
15636 return NULL_TREE;
15637
15638 if (invert)
15639 result ^= 1;
15640 return constant_boolean_node (result, type);
15641 }
15642
15643 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15644 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15645 itself. */
15646
15647 tree
15648 fold_build_cleanup_point_expr (tree type, tree expr)
15649 {
15650 /* If the expression does not have side effects then we don't have to wrap
15651 it with a cleanup point expression. */
15652 if (!TREE_SIDE_EFFECTS (expr))
15653 return expr;
15654
15655 /* If the expression is a return, check to see if the expression inside the
15656 return has no side effects or the right hand side of the modify expression
15657 inside the return. If either don't have side effects set we don't need to
15658 wrap the expression in a cleanup point expression. Note we don't check the
15659 left hand side of the modify because it should always be a return decl. */
15660 if (TREE_CODE (expr) == RETURN_EXPR)
15661 {
15662 tree op = TREE_OPERAND (expr, 0);
15663 if (!op || !TREE_SIDE_EFFECTS (op))
15664 return expr;
15665 op = TREE_OPERAND (op, 1);
15666 if (!TREE_SIDE_EFFECTS (op))
15667 return expr;
15668 }
15669
15670 return build1 (CLEANUP_POINT_EXPR, type, expr);
15671 }
15672
15673 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15674 of an indirection through OP0, or NULL_TREE if no simplification is
15675 possible. */
15676
15677 tree
15678 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15679 {
15680 tree sub = op0;
15681 tree subtype;
15682
15683 STRIP_NOPS (sub);
15684 subtype = TREE_TYPE (sub);
15685 if (!POINTER_TYPE_P (subtype))
15686 return NULL_TREE;
15687
15688 if (TREE_CODE (sub) == ADDR_EXPR)
15689 {
15690 tree op = TREE_OPERAND (sub, 0);
15691 tree optype = TREE_TYPE (op);
15692 /* *&CONST_DECL -> to the value of the const decl. */
15693 if (TREE_CODE (op) == CONST_DECL)
15694 return DECL_INITIAL (op);
15695 /* *&p => p; make sure to handle *&"str"[cst] here. */
15696 if (type == optype)
15697 {
15698 tree fop = fold_read_from_constant_string (op);
15699 if (fop)
15700 return fop;
15701 else
15702 return op;
15703 }
15704 /* *(foo *)&fooarray => fooarray[0] */
15705 else if (TREE_CODE (optype) == ARRAY_TYPE
15706 && type == TREE_TYPE (optype)
15707 && (!in_gimple_form
15708 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15709 {
15710 tree type_domain = TYPE_DOMAIN (optype);
15711 tree min_val = size_zero_node;
15712 if (type_domain && TYPE_MIN_VALUE (type_domain))
15713 min_val = TYPE_MIN_VALUE (type_domain);
15714 if (in_gimple_form
15715 && TREE_CODE (min_val) != INTEGER_CST)
15716 return NULL_TREE;
15717 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15718 NULL_TREE, NULL_TREE);
15719 }
15720 /* *(foo *)&complexfoo => __real__ complexfoo */
15721 else if (TREE_CODE (optype) == COMPLEX_TYPE
15722 && type == TREE_TYPE (optype))
15723 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15724 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15725 else if (TREE_CODE (optype) == VECTOR_TYPE
15726 && type == TREE_TYPE (optype))
15727 {
15728 tree part_width = TYPE_SIZE (type);
15729 tree index = bitsize_int (0);
15730 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15731 }
15732 }
15733
15734 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15735 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15736 {
15737 tree op00 = TREE_OPERAND (sub, 0);
15738 tree op01 = TREE_OPERAND (sub, 1);
15739
15740 STRIP_NOPS (op00);
15741 if (TREE_CODE (op00) == ADDR_EXPR)
15742 {
15743 tree op00type;
15744 op00 = TREE_OPERAND (op00, 0);
15745 op00type = TREE_TYPE (op00);
15746
15747 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15748 if (TREE_CODE (op00type) == VECTOR_TYPE
15749 && type == TREE_TYPE (op00type))
15750 {
15751 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15752 tree part_width = TYPE_SIZE (type);
15753 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15754 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15755 tree index = bitsize_int (indexi);
15756
15757 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15758 return fold_build3_loc (loc,
15759 BIT_FIELD_REF, type, op00,
15760 part_width, index);
15761
15762 }
15763 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15764 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15765 && type == TREE_TYPE (op00type))
15766 {
15767 tree size = TYPE_SIZE_UNIT (type);
15768 if (tree_int_cst_equal (size, op01))
15769 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15770 }
15771 /* ((foo *)&fooarray)[1] => fooarray[1] */
15772 else if (TREE_CODE (op00type) == ARRAY_TYPE
15773 && type == TREE_TYPE (op00type))
15774 {
15775 tree type_domain = TYPE_DOMAIN (op00type);
15776 tree min_val = size_zero_node;
15777 if (type_domain && TYPE_MIN_VALUE (type_domain))
15778 min_val = TYPE_MIN_VALUE (type_domain);
15779 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15780 TYPE_SIZE_UNIT (type));
15781 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15782 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15783 NULL_TREE, NULL_TREE);
15784 }
15785 }
15786 }
15787
15788 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15789 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15790 && type == TREE_TYPE (TREE_TYPE (subtype))
15791 && (!in_gimple_form
15792 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15793 {
15794 tree type_domain;
15795 tree min_val = size_zero_node;
15796 sub = build_fold_indirect_ref_loc (loc, sub);
15797 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15798 if (type_domain && TYPE_MIN_VALUE (type_domain))
15799 min_val = TYPE_MIN_VALUE (type_domain);
15800 if (in_gimple_form
15801 && TREE_CODE (min_val) != INTEGER_CST)
15802 return NULL_TREE;
15803 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15804 NULL_TREE);
15805 }
15806
15807 return NULL_TREE;
15808 }
15809
15810 /* Builds an expression for an indirection through T, simplifying some
15811 cases. */
15812
15813 tree
15814 build_fold_indirect_ref_loc (location_t loc, tree t)
15815 {
15816 tree type = TREE_TYPE (TREE_TYPE (t));
15817 tree sub = fold_indirect_ref_1 (loc, type, t);
15818
15819 if (sub)
15820 return sub;
15821
15822 return build1_loc (loc, INDIRECT_REF, type, t);
15823 }
15824
15825 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15826
15827 tree
15828 fold_indirect_ref_loc (location_t loc, tree t)
15829 {
15830 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15831
15832 if (sub)
15833 return sub;
15834 else
15835 return t;
15836 }
15837
15838 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15839 whose result is ignored. The type of the returned tree need not be
15840 the same as the original expression. */
15841
15842 tree
15843 fold_ignored_result (tree t)
15844 {
15845 if (!TREE_SIDE_EFFECTS (t))
15846 return integer_zero_node;
15847
15848 for (;;)
15849 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15850 {
15851 case tcc_unary:
15852 t = TREE_OPERAND (t, 0);
15853 break;
15854
15855 case tcc_binary:
15856 case tcc_comparison:
15857 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15858 t = TREE_OPERAND (t, 0);
15859 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15860 t = TREE_OPERAND (t, 1);
15861 else
15862 return t;
15863 break;
15864
15865 case tcc_expression:
15866 switch (TREE_CODE (t))
15867 {
15868 case COMPOUND_EXPR:
15869 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15870 return t;
15871 t = TREE_OPERAND (t, 0);
15872 break;
15873
15874 case COND_EXPR:
15875 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15876 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15877 return t;
15878 t = TREE_OPERAND (t, 0);
15879 break;
15880
15881 default:
15882 return t;
15883 }
15884 break;
15885
15886 default:
15887 return t;
15888 }
15889 }
15890
15891 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15892 This can only be applied to objects of a sizetype. */
15893
15894 tree
15895 round_up_loc (location_t loc, tree value, int divisor)
15896 {
15897 tree div = NULL_TREE;
15898
15899 gcc_assert (divisor > 0);
15900 if (divisor == 1)
15901 return value;
15902
15903 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15904 have to do anything. Only do this when we are not given a const,
15905 because in that case, this check is more expensive than just
15906 doing it. */
15907 if (TREE_CODE (value) != INTEGER_CST)
15908 {
15909 div = build_int_cst (TREE_TYPE (value), divisor);
15910
15911 if (multiple_of_p (TREE_TYPE (value), value, div))
15912 return value;
15913 }
15914
15915 /* If divisor is a power of two, simplify this to bit manipulation. */
15916 if (divisor == (divisor & -divisor))
15917 {
15918 if (TREE_CODE (value) == INTEGER_CST)
15919 {
15920 double_int val = tree_to_double_int (value);
15921 bool overflow_p;
15922
15923 if ((val.low & (divisor - 1)) == 0)
15924 return value;
15925
15926 overflow_p = TREE_OVERFLOW (value);
15927 val.low &= ~(divisor - 1);
15928 val.low += divisor;
15929 if (val.low == 0)
15930 {
15931 val.high++;
15932 if (val.high == 0)
15933 overflow_p = true;
15934 }
15935
15936 return force_fit_type_double (TREE_TYPE (value), val,
15937 -1, overflow_p);
15938 }
15939 else
15940 {
15941 tree t;
15942
15943 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15944 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15945 t = build_int_cst (TREE_TYPE (value), -divisor);
15946 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15947 }
15948 }
15949 else
15950 {
15951 if (!div)
15952 div = build_int_cst (TREE_TYPE (value), divisor);
15953 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15954 value = size_binop_loc (loc, MULT_EXPR, value, div);
15955 }
15956
15957 return value;
15958 }
15959
15960 /* Likewise, but round down. */
15961
15962 tree
15963 round_down_loc (location_t loc, tree value, int divisor)
15964 {
15965 tree div = NULL_TREE;
15966
15967 gcc_assert (divisor > 0);
15968 if (divisor == 1)
15969 return value;
15970
15971 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15972 have to do anything. Only do this when we are not given a const,
15973 because in that case, this check is more expensive than just
15974 doing it. */
15975 if (TREE_CODE (value) != INTEGER_CST)
15976 {
15977 div = build_int_cst (TREE_TYPE (value), divisor);
15978
15979 if (multiple_of_p (TREE_TYPE (value), value, div))
15980 return value;
15981 }
15982
15983 /* If divisor is a power of two, simplify this to bit manipulation. */
15984 if (divisor == (divisor & -divisor))
15985 {
15986 tree t;
15987
15988 t = build_int_cst (TREE_TYPE (value), -divisor);
15989 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15990 }
15991 else
15992 {
15993 if (!div)
15994 div = build_int_cst (TREE_TYPE (value), divisor);
15995 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15996 value = size_binop_loc (loc, MULT_EXPR, value, div);
15997 }
15998
15999 return value;
16000 }
16001
16002 /* Returns the pointer to the base of the object addressed by EXP and
16003 extracts the information about the offset of the access, storing it
16004 to PBITPOS and POFFSET. */
16005
16006 static tree
16007 split_address_to_core_and_offset (tree exp,
16008 HOST_WIDE_INT *pbitpos, tree *poffset)
16009 {
16010 tree core;
16011 enum machine_mode mode;
16012 int unsignedp, volatilep;
16013 HOST_WIDE_INT bitsize;
16014 location_t loc = EXPR_LOCATION (exp);
16015
16016 if (TREE_CODE (exp) == ADDR_EXPR)
16017 {
16018 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16019 poffset, &mode, &unsignedp, &volatilep,
16020 false);
16021 core = build_fold_addr_expr_loc (loc, core);
16022 }
16023 else
16024 {
16025 core = exp;
16026 *pbitpos = 0;
16027 *poffset = NULL_TREE;
16028 }
16029
16030 return core;
16031 }
16032
16033 /* Returns true if addresses of E1 and E2 differ by a constant, false
16034 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16035
16036 bool
16037 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16038 {
16039 tree core1, core2;
16040 HOST_WIDE_INT bitpos1, bitpos2;
16041 tree toffset1, toffset2, tdiff, type;
16042
16043 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16044 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16045
16046 if (bitpos1 % BITS_PER_UNIT != 0
16047 || bitpos2 % BITS_PER_UNIT != 0
16048 || !operand_equal_p (core1, core2, 0))
16049 return false;
16050
16051 if (toffset1 && toffset2)
16052 {
16053 type = TREE_TYPE (toffset1);
16054 if (type != TREE_TYPE (toffset2))
16055 toffset2 = fold_convert (type, toffset2);
16056
16057 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16058 if (!cst_and_fits_in_hwi (tdiff))
16059 return false;
16060
16061 *diff = int_cst_value (tdiff);
16062 }
16063 else if (toffset1 || toffset2)
16064 {
16065 /* If only one of the offsets is non-constant, the difference cannot
16066 be a constant. */
16067 return false;
16068 }
16069 else
16070 *diff = 0;
16071
16072 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16073 return true;
16074 }
16075
16076 /* Simplify the floating point expression EXP when the sign of the
16077 result is not significant. Return NULL_TREE if no simplification
16078 is possible. */
16079
16080 tree
16081 fold_strip_sign_ops (tree exp)
16082 {
16083 tree arg0, arg1;
16084 location_t loc = EXPR_LOCATION (exp);
16085
16086 switch (TREE_CODE (exp))
16087 {
16088 case ABS_EXPR:
16089 case NEGATE_EXPR:
16090 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16091 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16092
16093 case MULT_EXPR:
16094 case RDIV_EXPR:
16095 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16096 return NULL_TREE;
16097 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16098 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16099 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16100 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16101 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16102 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16103 break;
16104
16105 case COMPOUND_EXPR:
16106 arg0 = TREE_OPERAND (exp, 0);
16107 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16108 if (arg1)
16109 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16110 break;
16111
16112 case COND_EXPR:
16113 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16114 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16115 if (arg0 || arg1)
16116 return fold_build3_loc (loc,
16117 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16118 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16119 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16120 break;
16121
16122 case CALL_EXPR:
16123 {
16124 const enum built_in_function fcode = builtin_mathfn_code (exp);
16125 switch (fcode)
16126 {
16127 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16128 /* Strip copysign function call, return the 1st argument. */
16129 arg0 = CALL_EXPR_ARG (exp, 0);
16130 arg1 = CALL_EXPR_ARG (exp, 1);
16131 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16132
16133 default:
16134 /* Strip sign ops from the argument of "odd" math functions. */
16135 if (negate_mathfn_p (fcode))
16136 {
16137 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16138 if (arg0)
16139 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16140 }
16141 break;
16142 }
16143 }
16144 break;
16145
16146 default:
16147 break;
16148 }
16149 return NULL_TREE;
16150 }