9f1e0809a3cd224bc75f8ee7f8439e3d71161847
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide and size_binop.
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
40
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "toplev.h"
58 #include "intl.h"
59 #include "ggc.h"
60 #include "hashtab.h"
61 #include "langhooks.h"
62 #include "md5.h"
63 #include "gimple.h"
64 #include "tree-flow.h"
65
66 /* Nonzero if we are folding constants inside an initializer; zero
67 otherwise. */
68 int folding_initializer = 0;
69
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
74 COMPCODE_FALSE = 0,
75 COMPCODE_LT = 1,
76 COMPCODE_EQ = 2,
77 COMPCODE_LE = 3,
78 COMPCODE_GT = 4,
79 COMPCODE_LTGT = 5,
80 COMPCODE_GE = 6,
81 COMPCODE_ORD = 7,
82 COMPCODE_UNORD = 8,
83 COMPCODE_UNLT = 9,
84 COMPCODE_UNEQ = 10,
85 COMPCODE_UNLE = 11,
86 COMPCODE_UNGT = 12,
87 COMPCODE_NE = 13,
88 COMPCODE_UNGE = 14,
89 COMPCODE_TRUE = 15
90 };
91
92 static bool negate_mathfn_p (enum built_in_function);
93 static bool negate_expr_p (tree);
94 static tree negate_expr (tree);
95 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
97 static tree const_binop (enum tree_code, tree, tree);
98 static enum comparison_code comparison_to_compcode (enum tree_code);
99 static enum tree_code compcode_to_comparison (enum comparison_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
104 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (location_t, tree, tree,
106 HOST_WIDE_INT, HOST_WIDE_INT, int);
107 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 tree, tree, tree);
109 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
112 tree *, tree *);
113 static int all_ones_mask_p (const_tree, int);
114 static tree sign_bit_p (tree, const_tree);
115 static int simple_operand_p (const_tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 extern tree make_range (tree, int *, tree *, tree *, bool *);
120 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 tree, tree);
122 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
123 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
124 static tree unextend (tree, int, int, tree);
125 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
126 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static tree fold_binary_op_with_conditional_arg (location_t,
131 enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static tree fold_mathfn_compare (location_t,
135 enum built_in_function, enum tree_code,
136 tree, tree, tree);
137 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (const_tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static tree fold_convert_const (enum tree_code, tree, tree);
144
145
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
149 addition.
150
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 sign. */
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 \f
156 /* If ARG2 divides ARG1 with zero remainder, carries out the division
157 of type CODE and returns the quotient.
158 Otherwise returns NULL_TREE. */
159
160 tree
161 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
162 {
163 double_int quo, rem;
164 int uns;
165
166 /* The sign of the division is according to operand two, that
167 does the correct thing for POINTER_PLUS_EXPR where we want
168 a signed division. */
169 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
170 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
171 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
172 uns = false;
173
174 quo = double_int_divmod (tree_to_double_int (arg1),
175 tree_to_double_int (arg2),
176 uns, code, &rem);
177
178 if (double_int_zero_p (rem))
179 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
180
181 return NULL_TREE;
182 }
183 \f
184 /* This is nonzero if we should defer warnings about undefined
185 overflow. This facility exists because these warnings are a
186 special case. The code to estimate loop iterations does not want
187 to issue any warnings, since it works with expressions which do not
188 occur in user code. Various bits of cleanup code call fold(), but
189 only use the result if it has certain characteristics (e.g., is a
190 constant); that code only wants to issue a warning if the result is
191 used. */
192
193 static int fold_deferring_overflow_warnings;
194
195 /* If a warning about undefined overflow is deferred, this is the
196 warning. Note that this may cause us to turn two warnings into
197 one, but that is fine since it is sufficient to only give one
198 warning per expression. */
199
200 static const char* fold_deferred_overflow_warning;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 level at which the warning should be emitted. */
204
205 static enum warn_strict_overflow_code fold_deferred_overflow_code;
206
207 /* Start deferring overflow warnings. We could use a stack here to
208 permit nested calls, but at present it is not necessary. */
209
210 void
211 fold_defer_overflow_warnings (void)
212 {
213 ++fold_deferring_overflow_warnings;
214 }
215
216 /* Stop deferring overflow warnings. If there is a pending warning,
217 and ISSUE is true, then issue the warning if appropriate. STMT is
218 the statement with which the warning should be associated (used for
219 location information); STMT may be NULL. CODE is the level of the
220 warning--a warn_strict_overflow_code value. This function will use
221 the smaller of CODE and the deferred code when deciding whether to
222 issue the warning. CODE may be zero to mean to always use the
223 deferred code. */
224
225 void
226 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
227 {
228 const char *warnmsg;
229 location_t locus;
230
231 gcc_assert (fold_deferring_overflow_warnings > 0);
232 --fold_deferring_overflow_warnings;
233 if (fold_deferring_overflow_warnings > 0)
234 {
235 if (fold_deferred_overflow_warning != NULL
236 && code != 0
237 && code < (int) fold_deferred_overflow_code)
238 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
239 return;
240 }
241
242 warnmsg = fold_deferred_overflow_warning;
243 fold_deferred_overflow_warning = NULL;
244
245 if (!issue || warnmsg == NULL)
246 return;
247
248 if (gimple_no_warning_p (stmt))
249 return;
250
251 /* Use the smallest code level when deciding to issue the
252 warning. */
253 if (code == 0 || code > (int) fold_deferred_overflow_code)
254 code = fold_deferred_overflow_code;
255
256 if (!issue_strict_overflow_warning (code))
257 return;
258
259 if (stmt == NULL)
260 locus = input_location;
261 else
262 locus = gimple_location (stmt);
263 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
264 }
265
266 /* Stop deferring overflow warnings, ignoring any deferred
267 warnings. */
268
269 void
270 fold_undefer_and_ignore_overflow_warnings (void)
271 {
272 fold_undefer_overflow_warnings (false, NULL, 0);
273 }
274
275 /* Whether we are deferring overflow warnings. */
276
277 bool
278 fold_deferring_overflow_warnings_p (void)
279 {
280 return fold_deferring_overflow_warnings > 0;
281 }
282
283 /* This is called when we fold something based on the fact that signed
284 overflow is undefined. */
285
286 static void
287 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
288 {
289 if (fold_deferring_overflow_warnings > 0)
290 {
291 if (fold_deferred_overflow_warning == NULL
292 || wc < fold_deferred_overflow_code)
293 {
294 fold_deferred_overflow_warning = gmsgid;
295 fold_deferred_overflow_code = wc;
296 }
297 }
298 else if (issue_strict_overflow_warning (wc))
299 warning (OPT_Wstrict_overflow, gmsgid);
300 }
301 \f
302 /* Return true if the built-in mathematical function specified by CODE
303 is odd, i.e. -f(x) == f(-x). */
304
305 static bool
306 negate_mathfn_p (enum built_in_function code)
307 {
308 switch (code)
309 {
310 CASE_FLT_FN (BUILT_IN_ASIN):
311 CASE_FLT_FN (BUILT_IN_ASINH):
312 CASE_FLT_FN (BUILT_IN_ATAN):
313 CASE_FLT_FN (BUILT_IN_ATANH):
314 CASE_FLT_FN (BUILT_IN_CASIN):
315 CASE_FLT_FN (BUILT_IN_CASINH):
316 CASE_FLT_FN (BUILT_IN_CATAN):
317 CASE_FLT_FN (BUILT_IN_CATANH):
318 CASE_FLT_FN (BUILT_IN_CBRT):
319 CASE_FLT_FN (BUILT_IN_CPROJ):
320 CASE_FLT_FN (BUILT_IN_CSIN):
321 CASE_FLT_FN (BUILT_IN_CSINH):
322 CASE_FLT_FN (BUILT_IN_CTAN):
323 CASE_FLT_FN (BUILT_IN_CTANH):
324 CASE_FLT_FN (BUILT_IN_ERF):
325 CASE_FLT_FN (BUILT_IN_LLROUND):
326 CASE_FLT_FN (BUILT_IN_LROUND):
327 CASE_FLT_FN (BUILT_IN_ROUND):
328 CASE_FLT_FN (BUILT_IN_SIN):
329 CASE_FLT_FN (BUILT_IN_SINH):
330 CASE_FLT_FN (BUILT_IN_TAN):
331 CASE_FLT_FN (BUILT_IN_TANH):
332 CASE_FLT_FN (BUILT_IN_TRUNC):
333 return true;
334
335 CASE_FLT_FN (BUILT_IN_LLRINT):
336 CASE_FLT_FN (BUILT_IN_LRINT):
337 CASE_FLT_FN (BUILT_IN_NEARBYINT):
338 CASE_FLT_FN (BUILT_IN_RINT):
339 return !flag_rounding_math;
340
341 default:
342 break;
343 }
344 return false;
345 }
346
347 /* Check whether we may negate an integer constant T without causing
348 overflow. */
349
350 bool
351 may_negate_without_overflow_p (const_tree t)
352 {
353 unsigned HOST_WIDE_INT val;
354 unsigned int prec;
355 tree type;
356
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
358
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
361 return false;
362
363 prec = TYPE_PRECISION (type);
364 if (prec > HOST_BITS_PER_WIDE_INT)
365 {
366 if (TREE_INT_CST_LOW (t) != 0)
367 return true;
368 prec -= HOST_BITS_PER_WIDE_INT;
369 val = TREE_INT_CST_HIGH (t);
370 }
371 else
372 val = TREE_INT_CST_LOW (t);
373 if (prec < HOST_BITS_PER_WIDE_INT)
374 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
375 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
376 }
377
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
380
381 static bool
382 negate_expr_p (tree t)
383 {
384 tree type;
385
386 if (t == 0)
387 return false;
388
389 type = TREE_TYPE (t);
390
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
393 {
394 case INTEGER_CST:
395 if (TYPE_OVERFLOW_WRAPS (type))
396 return true;
397
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
403
404 case FIXED_CST:
405 case NEGATE_EXPR:
406 return true;
407
408 case REAL_CST:
409 /* We want to canonicalize to positive real constants. Pretend
410 that only negative ones can be easily negated. */
411 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
412
413 case COMPLEX_CST:
414 return negate_expr_p (TREE_REALPART (t))
415 && negate_expr_p (TREE_IMAGPART (t));
416
417 case COMPLEX_EXPR:
418 return negate_expr_p (TREE_OPERAND (t, 0))
419 && negate_expr_p (TREE_OPERAND (t, 1));
420
421 case CONJ_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0));
423
424 case PLUS_EXPR:
425 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
426 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
427 return false;
428 /* -(A + B) -> (-B) - A. */
429 if (negate_expr_p (TREE_OPERAND (t, 1))
430 && reorder_operands_p (TREE_OPERAND (t, 0),
431 TREE_OPERAND (t, 1)))
432 return true;
433 /* -(A + B) -> (-A) - B. */
434 return negate_expr_p (TREE_OPERAND (t, 0));
435
436 case MINUS_EXPR:
437 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
438 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
439 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
440 && reorder_operands_p (TREE_OPERAND (t, 0),
441 TREE_OPERAND (t, 1));
442
443 case MULT_EXPR:
444 if (TYPE_UNSIGNED (TREE_TYPE (t)))
445 break;
446
447 /* Fall through. */
448
449 case RDIV_EXPR:
450 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
451 return negate_expr_p (TREE_OPERAND (t, 1))
452 || negate_expr_p (TREE_OPERAND (t, 0));
453 break;
454
455 case TRUNC_DIV_EXPR:
456 case ROUND_DIV_EXPR:
457 case FLOOR_DIV_EXPR:
458 case CEIL_DIV_EXPR:
459 case EXACT_DIV_EXPR:
460 /* In general we can't negate A / B, because if A is INT_MIN and
461 B is 1, we may turn this into INT_MIN / -1 which is undefined
462 and actually traps on some architectures. But if overflow is
463 undefined, we can negate, because - (INT_MIN / 1) is an
464 overflow. */
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
466 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
467 break;
468 return negate_expr_p (TREE_OPERAND (t, 1))
469 || negate_expr_p (TREE_OPERAND (t, 0));
470
471 case NOP_EXPR:
472 /* Negate -((double)float) as (double)(-float). */
473 if (TREE_CODE (type) == REAL_TYPE)
474 {
475 tree tem = strip_float_extensions (t);
476 if (tem != t)
477 return negate_expr_p (tem);
478 }
479 break;
480
481 case CALL_EXPR:
482 /* Negate -f(x) as f(-x). */
483 if (negate_mathfn_p (builtin_mathfn_code (t)))
484 return negate_expr_p (CALL_EXPR_ARG (t, 0));
485 break;
486
487 case RSHIFT_EXPR:
488 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
489 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
490 {
491 tree op1 = TREE_OPERAND (t, 1);
492 if (TREE_INT_CST_HIGH (op1) == 0
493 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
494 == TREE_INT_CST_LOW (op1))
495 return true;
496 }
497 break;
498
499 default:
500 break;
501 }
502 return false;
503 }
504
505 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
506 simplification is possible.
507 If negate_expr_p would return true for T, NULL_TREE will never be
508 returned. */
509
510 static tree
511 fold_negate_expr (location_t loc, tree t)
512 {
513 tree type = TREE_TYPE (t);
514 tree tem;
515
516 switch (TREE_CODE (t))
517 {
518 /* Convert - (~A) to A + 1. */
519 case BIT_NOT_EXPR:
520 if (INTEGRAL_TYPE_P (type))
521 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
522 build_int_cst (type, 1));
523 break;
524
525 case INTEGER_CST:
526 tem = fold_negate_const (t, type);
527 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
528 || !TYPE_OVERFLOW_TRAPS (type))
529 return tem;
530 break;
531
532 case REAL_CST:
533 tem = fold_negate_const (t, type);
534 /* Two's complement FP formats, such as c4x, may overflow. */
535 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
536 return tem;
537 break;
538
539 case FIXED_CST:
540 tem = fold_negate_const (t, type);
541 return tem;
542
543 case COMPLEX_CST:
544 {
545 tree rpart = negate_expr (TREE_REALPART (t));
546 tree ipart = negate_expr (TREE_IMAGPART (t));
547
548 if ((TREE_CODE (rpart) == REAL_CST
549 && TREE_CODE (ipart) == REAL_CST)
550 || (TREE_CODE (rpart) == INTEGER_CST
551 && TREE_CODE (ipart) == INTEGER_CST))
552 return build_complex (type, rpart, ipart);
553 }
554 break;
555
556 case COMPLEX_EXPR:
557 if (negate_expr_p (t))
558 return fold_build2_loc (loc, COMPLEX_EXPR, type,
559 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
560 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
561 break;
562
563 case CONJ_EXPR:
564 if (negate_expr_p (t))
565 return fold_build1_loc (loc, CONJ_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
567 break;
568
569 case NEGATE_EXPR:
570 return TREE_OPERAND (t, 0);
571
572 case PLUS_EXPR:
573 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
574 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
575 {
576 /* -(A + B) -> (-B) - A. */
577 if (negate_expr_p (TREE_OPERAND (t, 1))
578 && reorder_operands_p (TREE_OPERAND (t, 0),
579 TREE_OPERAND (t, 1)))
580 {
581 tem = negate_expr (TREE_OPERAND (t, 1));
582 return fold_build2_loc (loc, MINUS_EXPR, type,
583 tem, TREE_OPERAND (t, 0));
584 }
585
586 /* -(A + B) -> (-A) - B. */
587 if (negate_expr_p (TREE_OPERAND (t, 0)))
588 {
589 tem = negate_expr (TREE_OPERAND (t, 0));
590 return fold_build2_loc (loc, MINUS_EXPR, type,
591 tem, TREE_OPERAND (t, 1));
592 }
593 }
594 break;
595
596 case MINUS_EXPR:
597 /* - (A - B) -> B - A */
598 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
599 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
600 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
601 return fold_build2_loc (loc, MINUS_EXPR, type,
602 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
603 break;
604
605 case MULT_EXPR:
606 if (TYPE_UNSIGNED (type))
607 break;
608
609 /* Fall through. */
610
611 case RDIV_EXPR:
612 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
613 {
614 tem = TREE_OPERAND (t, 1);
615 if (negate_expr_p (tem))
616 return fold_build2_loc (loc, TREE_CODE (t), type,
617 TREE_OPERAND (t, 0), negate_expr (tem));
618 tem = TREE_OPERAND (t, 0);
619 if (negate_expr_p (tem))
620 return fold_build2_loc (loc, TREE_CODE (t), type,
621 negate_expr (tem), TREE_OPERAND (t, 1));
622 }
623 break;
624
625 case TRUNC_DIV_EXPR:
626 case ROUND_DIV_EXPR:
627 case FLOOR_DIV_EXPR:
628 case CEIL_DIV_EXPR:
629 case EXACT_DIV_EXPR:
630 /* In general we can't negate A / B, because if A is INT_MIN and
631 B is 1, we may turn this into INT_MIN / -1 which is undefined
632 and actually traps on some architectures. But if overflow is
633 undefined, we can negate, because - (INT_MIN / 1) is an
634 overflow. */
635 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
636 {
637 const char * const warnmsg = G_("assuming signed overflow does not "
638 "occur when negating a division");
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 {
642 if (INTEGRAL_TYPE_P (type)
643 && (TREE_CODE (tem) != INTEGER_CST
644 || integer_onep (tem)))
645 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 TREE_OPERAND (t, 0), negate_expr (tem));
648 }
649 tem = TREE_OPERAND (t, 0);
650 if (negate_expr_p (tem))
651 {
652 if (INTEGRAL_TYPE_P (type)
653 && (TREE_CODE (tem) != INTEGER_CST
654 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
655 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (tem), TREE_OPERAND (t, 1));
658 }
659 }
660 break;
661
662 case NOP_EXPR:
663 /* Convert -((double)float) into (double)(-float). */
664 if (TREE_CODE (type) == REAL_TYPE)
665 {
666 tem = strip_float_extensions (t);
667 if (tem != t && negate_expr_p (tem))
668 return fold_convert_loc (loc, type, negate_expr (tem));
669 }
670 break;
671
672 case CALL_EXPR:
673 /* Negate -f(x) as f(-x). */
674 if (negate_mathfn_p (builtin_mathfn_code (t))
675 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
676 {
677 tree fndecl, arg;
678
679 fndecl = get_callee_fndecl (t);
680 arg = negate_expr (CALL_EXPR_ARG (t, 0));
681 return build_call_expr_loc (loc, fndecl, 1, arg);
682 }
683 break;
684
685 case RSHIFT_EXPR:
686 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
687 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
688 {
689 tree op1 = TREE_OPERAND (t, 1);
690 if (TREE_INT_CST_HIGH (op1) == 0
691 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
692 == TREE_INT_CST_LOW (op1))
693 {
694 tree ntype = TYPE_UNSIGNED (type)
695 ? signed_type_for (type)
696 : unsigned_type_for (type);
697 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
698 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
699 return fold_convert_loc (loc, type, temp);
700 }
701 }
702 break;
703
704 default:
705 break;
706 }
707
708 return NULL_TREE;
709 }
710
711 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
712 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
713 return NULL_TREE. */
714
715 static tree
716 negate_expr (tree t)
717 {
718 tree type, tem;
719 location_t loc;
720
721 if (t == NULL_TREE)
722 return NULL_TREE;
723
724 loc = EXPR_LOCATION (t);
725 type = TREE_TYPE (t);
726 STRIP_SIGN_NOPS (t);
727
728 tem = fold_negate_expr (loc, t);
729 if (!tem)
730 {
731 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
732 SET_EXPR_LOCATION (tem, loc);
733 }
734 return fold_convert_loc (loc, type, tem);
735 }
736 \f
737 /* Split a tree IN into a constant, literal and variable parts that could be
738 combined with CODE to make IN. "constant" means an expression with
739 TREE_CONSTANT but that isn't an actual constant. CODE must be a
740 commutative arithmetic operation. Store the constant part into *CONP,
741 the literal in *LITP and return the variable part. If a part isn't
742 present, set it to null. If the tree does not decompose in this way,
743 return the entire tree as the variable part and the other parts as null.
744
745 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
746 case, we negate an operand that was subtracted. Except if it is a
747 literal for which we use *MINUS_LITP instead.
748
749 If NEGATE_P is true, we are negating all of IN, again except a literal
750 for which we use *MINUS_LITP instead.
751
752 If IN is itself a literal or constant, return it as appropriate.
753
754 Note that we do not guarantee that any of the three values will be the
755 same type as IN, but they will have the same signedness and mode. */
756
757 static tree
758 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
759 tree *minus_litp, int negate_p)
760 {
761 tree var = 0;
762
763 *conp = 0;
764 *litp = 0;
765 *minus_litp = 0;
766
767 /* Strip any conversions that don't change the machine mode or signedness. */
768 STRIP_SIGN_NOPS (in);
769
770 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
771 || TREE_CODE (in) == FIXED_CST)
772 *litp = in;
773 else if (TREE_CODE (in) == code
774 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
775 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
776 /* We can associate addition and subtraction together (even
777 though the C standard doesn't say so) for integers because
778 the value is not affected. For reals, the value might be
779 affected, so we can't. */
780 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
781 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
782 {
783 tree op0 = TREE_OPERAND (in, 0);
784 tree op1 = TREE_OPERAND (in, 1);
785 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
786 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
787
788 /* First see if either of the operands is a literal, then a constant. */
789 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
790 || TREE_CODE (op0) == FIXED_CST)
791 *litp = op0, op0 = 0;
792 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
793 || TREE_CODE (op1) == FIXED_CST)
794 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
795
796 if (op0 != 0 && TREE_CONSTANT (op0))
797 *conp = op0, op0 = 0;
798 else if (op1 != 0 && TREE_CONSTANT (op1))
799 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
800
801 /* If we haven't dealt with either operand, this is not a case we can
802 decompose. Otherwise, VAR is either of the ones remaining, if any. */
803 if (op0 != 0 && op1 != 0)
804 var = in;
805 else if (op0 != 0)
806 var = op0;
807 else
808 var = op1, neg_var_p = neg1_p;
809
810 /* Now do any needed negations. */
811 if (neg_litp_p)
812 *minus_litp = *litp, *litp = 0;
813 if (neg_conp_p)
814 *conp = negate_expr (*conp);
815 if (neg_var_p)
816 var = negate_expr (var);
817 }
818 else if (TREE_CONSTANT (in))
819 *conp = in;
820 else
821 var = in;
822
823 if (negate_p)
824 {
825 if (*litp)
826 *minus_litp = *litp, *litp = 0;
827 else if (*minus_litp)
828 *litp = *minus_litp, *minus_litp = 0;
829 *conp = negate_expr (*conp);
830 var = negate_expr (var);
831 }
832
833 return var;
834 }
835
836 /* Re-associate trees split by the above function. T1 and T2 are
837 either expressions to associate or null. Return the new
838 expression, if any. LOC is the location of the new expression. If
839 we build an operation, do it in TYPE and with CODE. */
840
841 static tree
842 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
843 {
844 tree tem;
845
846 if (t1 == 0)
847 return t2;
848 else if (t2 == 0)
849 return t1;
850
851 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
852 try to fold this since we will have infinite recursion. But do
853 deal with any NEGATE_EXPRs. */
854 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
855 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
856 {
857 if (code == PLUS_EXPR)
858 {
859 if (TREE_CODE (t1) == NEGATE_EXPR)
860 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
861 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
862 else if (TREE_CODE (t2) == NEGATE_EXPR)
863 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
864 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
865 else if (integer_zerop (t2))
866 return fold_convert_loc (loc, type, t1);
867 }
868 else if (code == MINUS_EXPR)
869 {
870 if (integer_zerop (t2))
871 return fold_convert_loc (loc, type, t1);
872 }
873
874 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
875 fold_convert_loc (loc, type, t2));
876 goto associate_trees_exit;
877 }
878
879 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
880 fold_convert_loc (loc, type, t2));
881 associate_trees_exit:
882 protected_set_expr_location (tem, loc);
883 return tem;
884 }
885 \f
886 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
887 for use in int_const_binop, size_binop and size_diffop. */
888
889 static bool
890 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
891 {
892 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
893 return false;
894 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
895 return false;
896
897 switch (code)
898 {
899 case LSHIFT_EXPR:
900 case RSHIFT_EXPR:
901 case LROTATE_EXPR:
902 case RROTATE_EXPR:
903 return true;
904
905 default:
906 break;
907 }
908
909 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
910 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
911 && TYPE_MODE (type1) == TYPE_MODE (type2);
912 }
913
914
915 /* Combine two integer constants ARG1 and ARG2 under operation CODE
916 to produce a new constant. Return NULL_TREE if we don't know how
917 to evaluate CODE at compile-time.
918
919 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
920
921 tree
922 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
923 {
924 double_int op1, op2, res, tmp;
925 tree t;
926 tree type = TREE_TYPE (arg1);
927 bool uns = TYPE_UNSIGNED (type);
928 bool is_sizetype
929 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
930 bool overflow = false;
931
932 op1 = tree_to_double_int (arg1);
933 op2 = tree_to_double_int (arg2);
934
935 switch (code)
936 {
937 case BIT_IOR_EXPR:
938 res = double_int_ior (op1, op2);
939 break;
940
941 case BIT_XOR_EXPR:
942 res = double_int_xor (op1, op2);
943 break;
944
945 case BIT_AND_EXPR:
946 res = double_int_and (op1, op2);
947 break;
948
949 case RSHIFT_EXPR:
950 res = double_int_rshift (op1, double_int_to_shwi (op2),
951 TYPE_PRECISION (type), !uns);
952 break;
953
954 case LSHIFT_EXPR:
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res = double_int_lshift (op1, double_int_to_shwi (op2),
959 TYPE_PRECISION (type), !uns);
960 break;
961
962 case RROTATE_EXPR:
963 res = double_int_rrotate (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type));
965 break;
966
967 case LROTATE_EXPR:
968 res = double_int_lrotate (op1, double_int_to_shwi (op2),
969 TYPE_PRECISION (type));
970 break;
971
972 case PLUS_EXPR:
973 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
974 &res.low, &res.high);
975 break;
976
977 case MINUS_EXPR:
978 neg_double (op2.low, op2.high, &res.low, &res.high);
979 add_double (op1.low, op1.high, res.low, res.high,
980 &res.low, &res.high);
981 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
982 break;
983
984 case MULT_EXPR:
985 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
986 &res.low, &res.high);
987 break;
988
989 case TRUNC_DIV_EXPR:
990 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
991 case EXACT_DIV_EXPR:
992 /* This is a shortcut for a common special case. */
993 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
994 && !TREE_OVERFLOW (arg1)
995 && !TREE_OVERFLOW (arg2)
996 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
997 {
998 if (code == CEIL_DIV_EXPR)
999 op1.low += op2.low - 1;
1000
1001 res.low = op1.low / op2.low, res.high = 0;
1002 break;
1003 }
1004
1005 /* ... fall through ... */
1006
1007 case ROUND_DIV_EXPR:
1008 if (double_int_zero_p (op2))
1009 return NULL_TREE;
1010 if (double_int_one_p (op2))
1011 {
1012 res = op1;
1013 break;
1014 }
1015 if (double_int_equal_p (op1, op2)
1016 && ! double_int_zero_p (op1))
1017 {
1018 res = double_int_one;
1019 break;
1020 }
1021 overflow = div_and_round_double (code, uns,
1022 op1.low, op1.high, op2.low, op2.high,
1023 &res.low, &res.high,
1024 &tmp.low, &tmp.high);
1025 break;
1026
1027 case TRUNC_MOD_EXPR:
1028 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1029 /* This is a shortcut for a common special case. */
1030 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1031 && !TREE_OVERFLOW (arg1)
1032 && !TREE_OVERFLOW (arg2)
1033 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1034 {
1035 if (code == CEIL_MOD_EXPR)
1036 op1.low += op2.low - 1;
1037 res.low = op1.low % op2.low, res.high = 0;
1038 break;
1039 }
1040
1041 /* ... fall through ... */
1042
1043 case ROUND_MOD_EXPR:
1044 if (double_int_zero_p (op2))
1045 return NULL_TREE;
1046 overflow = div_and_round_double (code, uns,
1047 op1.low, op1.high, op2.low, op2.high,
1048 &tmp.low, &tmp.high,
1049 &res.low, &res.high);
1050 break;
1051
1052 case MIN_EXPR:
1053 res = double_int_min (op1, op2, uns);
1054 break;
1055
1056 case MAX_EXPR:
1057 res = double_int_max (op1, op2, uns);
1058 break;
1059
1060 default:
1061 return NULL_TREE;
1062 }
1063
1064 if (notrunc)
1065 {
1066 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
1067
1068 /* Propagate overflow flags ourselves. */
1069 if (((!uns || is_sizetype) && overflow)
1070 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1071 {
1072 t = copy_node (t);
1073 TREE_OVERFLOW (t) = 1;
1074 }
1075 }
1076 else
1077 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1078 ((!uns || is_sizetype) && overflow)
1079 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1080
1081 return t;
1082 }
1083
1084 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1085 constant. We assume ARG1 and ARG2 have the same data type, or at least
1086 are the same kind of constant and the same machine mode. Return zero if
1087 combining the constants is not allowed in the current operating mode. */
1088
1089 static tree
1090 const_binop (enum tree_code code, tree arg1, tree arg2)
1091 {
1092 /* Sanity check for the recursive cases. */
1093 if (!arg1 || !arg2)
1094 return NULL_TREE;
1095
1096 STRIP_NOPS (arg1);
1097 STRIP_NOPS (arg2);
1098
1099 if (TREE_CODE (arg1) == INTEGER_CST)
1100 return int_const_binop (code, arg1, arg2, 0);
1101
1102 if (TREE_CODE (arg1) == REAL_CST)
1103 {
1104 enum machine_mode mode;
1105 REAL_VALUE_TYPE d1;
1106 REAL_VALUE_TYPE d2;
1107 REAL_VALUE_TYPE value;
1108 REAL_VALUE_TYPE result;
1109 bool inexact;
1110 tree t, type;
1111
1112 /* The following codes are handled by real_arithmetic. */
1113 switch (code)
1114 {
1115 case PLUS_EXPR:
1116 case MINUS_EXPR:
1117 case MULT_EXPR:
1118 case RDIV_EXPR:
1119 case MIN_EXPR:
1120 case MAX_EXPR:
1121 break;
1122
1123 default:
1124 return NULL_TREE;
1125 }
1126
1127 d1 = TREE_REAL_CST (arg1);
1128 d2 = TREE_REAL_CST (arg2);
1129
1130 type = TREE_TYPE (arg1);
1131 mode = TYPE_MODE (type);
1132
1133 /* Don't perform operation if we honor signaling NaNs and
1134 either operand is a NaN. */
1135 if (HONOR_SNANS (mode)
1136 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1137 return NULL_TREE;
1138
1139 /* Don't perform operation if it would raise a division
1140 by zero exception. */
1141 if (code == RDIV_EXPR
1142 && REAL_VALUES_EQUAL (d2, dconst0)
1143 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1144 return NULL_TREE;
1145
1146 /* If either operand is a NaN, just return it. Otherwise, set up
1147 for floating-point trap; we return an overflow. */
1148 if (REAL_VALUE_ISNAN (d1))
1149 return arg1;
1150 else if (REAL_VALUE_ISNAN (d2))
1151 return arg2;
1152
1153 inexact = real_arithmetic (&value, code, &d1, &d2);
1154 real_convert (&result, mode, &value);
1155
1156 /* Don't constant fold this floating point operation if
1157 the result has overflowed and flag_trapping_math. */
1158 if (flag_trapping_math
1159 && MODE_HAS_INFINITIES (mode)
1160 && REAL_VALUE_ISINF (result)
1161 && !REAL_VALUE_ISINF (d1)
1162 && !REAL_VALUE_ISINF (d2))
1163 return NULL_TREE;
1164
1165 /* Don't constant fold this floating point operation if the
1166 result may dependent upon the run-time rounding mode and
1167 flag_rounding_math is set, or if GCC's software emulation
1168 is unable to accurately represent the result. */
1169 if ((flag_rounding_math
1170 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1171 && (inexact || !real_identical (&result, &value)))
1172 return NULL_TREE;
1173
1174 t = build_real (type, result);
1175
1176 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1177 return t;
1178 }
1179
1180 if (TREE_CODE (arg1) == FIXED_CST)
1181 {
1182 FIXED_VALUE_TYPE f1;
1183 FIXED_VALUE_TYPE f2;
1184 FIXED_VALUE_TYPE result;
1185 tree t, type;
1186 int sat_p;
1187 bool overflow_p;
1188
1189 /* The following codes are handled by fixed_arithmetic. */
1190 switch (code)
1191 {
1192 case PLUS_EXPR:
1193 case MINUS_EXPR:
1194 case MULT_EXPR:
1195 case TRUNC_DIV_EXPR:
1196 f2 = TREE_FIXED_CST (arg2);
1197 break;
1198
1199 case LSHIFT_EXPR:
1200 case RSHIFT_EXPR:
1201 f2.data.high = TREE_INT_CST_HIGH (arg2);
1202 f2.data.low = TREE_INT_CST_LOW (arg2);
1203 f2.mode = SImode;
1204 break;
1205
1206 default:
1207 return NULL_TREE;
1208 }
1209
1210 f1 = TREE_FIXED_CST (arg1);
1211 type = TREE_TYPE (arg1);
1212 sat_p = TYPE_SATURATING (type);
1213 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1214 t = build_fixed (type, result);
1215 /* Propagate overflow flags. */
1216 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1217 TREE_OVERFLOW (t) = 1;
1218 return t;
1219 }
1220
1221 if (TREE_CODE (arg1) == COMPLEX_CST)
1222 {
1223 tree type = TREE_TYPE (arg1);
1224 tree r1 = TREE_REALPART (arg1);
1225 tree i1 = TREE_IMAGPART (arg1);
1226 tree r2 = TREE_REALPART (arg2);
1227 tree i2 = TREE_IMAGPART (arg2);
1228 tree real, imag;
1229
1230 switch (code)
1231 {
1232 case PLUS_EXPR:
1233 case MINUS_EXPR:
1234 real = const_binop (code, r1, r2);
1235 imag = const_binop (code, i1, i2);
1236 break;
1237
1238 case MULT_EXPR:
1239 if (COMPLEX_FLOAT_TYPE_P (type))
1240 return do_mpc_arg2 (arg1, arg2, type,
1241 /* do_nonfinite= */ folding_initializer,
1242 mpc_mul);
1243
1244 real = const_binop (MINUS_EXPR,
1245 const_binop (MULT_EXPR, r1, r2),
1246 const_binop (MULT_EXPR, i1, i2));
1247 imag = const_binop (PLUS_EXPR,
1248 const_binop (MULT_EXPR, r1, i2),
1249 const_binop (MULT_EXPR, i1, r2));
1250 break;
1251
1252 case RDIV_EXPR:
1253 if (COMPLEX_FLOAT_TYPE_P (type))
1254 return do_mpc_arg2 (arg1, arg2, type,
1255 /* do_nonfinite= */ folding_initializer,
1256 mpc_div);
1257 /* Fallthru ... */
1258 case TRUNC_DIV_EXPR:
1259 case CEIL_DIV_EXPR:
1260 case FLOOR_DIV_EXPR:
1261 case ROUND_DIV_EXPR:
1262 if (flag_complex_method == 0)
1263 {
1264 /* Keep this algorithm in sync with
1265 tree-complex.c:expand_complex_div_straight().
1266
1267 Expand complex division to scalars, straightforward algorithm.
1268 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1269 t = br*br + bi*bi
1270 */
1271 tree magsquared
1272 = const_binop (PLUS_EXPR,
1273 const_binop (MULT_EXPR, r2, r2),
1274 const_binop (MULT_EXPR, i2, i2));
1275 tree t1
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r1, r2),
1278 const_binop (MULT_EXPR, i1, i2));
1279 tree t2
1280 = const_binop (MINUS_EXPR,
1281 const_binop (MULT_EXPR, i1, r2),
1282 const_binop (MULT_EXPR, r1, i2));
1283
1284 real = const_binop (code, t1, magsquared);
1285 imag = const_binop (code, t2, magsquared);
1286 }
1287 else
1288 {
1289 /* Keep this algorithm in sync with
1290 tree-complex.c:expand_complex_div_wide().
1291
1292 Expand complex division to scalars, modified algorithm to minimize
1293 overflow with wide input ranges. */
1294 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1295 fold_abs_const (r2, TREE_TYPE (type)),
1296 fold_abs_const (i2, TREE_TYPE (type)));
1297
1298 if (integer_nonzerop (compare))
1299 {
1300 /* In the TRUE branch, we compute
1301 ratio = br/bi;
1302 div = (br * ratio) + bi;
1303 tr = (ar * ratio) + ai;
1304 ti = (ai * ratio) - ar;
1305 tr = tr / div;
1306 ti = ti / div; */
1307 tree ratio = const_binop (code, r2, i2);
1308 tree div = const_binop (PLUS_EXPR, i2,
1309 const_binop (MULT_EXPR, r2, ratio));
1310 real = const_binop (MULT_EXPR, r1, ratio);
1311 real = const_binop (PLUS_EXPR, real, i1);
1312 real = const_binop (code, real, div);
1313
1314 imag = const_binop (MULT_EXPR, i1, ratio);
1315 imag = const_binop (MINUS_EXPR, imag, r1);
1316 imag = const_binop (code, imag, div);
1317 }
1318 else
1319 {
1320 /* In the FALSE branch, we compute
1321 ratio = d/c;
1322 divisor = (d * ratio) + c;
1323 tr = (b * ratio) + a;
1324 ti = b - (a * ratio);
1325 tr = tr / div;
1326 ti = ti / div; */
1327 tree ratio = const_binop (code, i2, r2);
1328 tree div = const_binop (PLUS_EXPR, r2,
1329 const_binop (MULT_EXPR, i2, ratio));
1330
1331 real = const_binop (MULT_EXPR, i1, ratio);
1332 real = const_binop (PLUS_EXPR, real, r1);
1333 real = const_binop (code, real, div);
1334
1335 imag = const_binop (MULT_EXPR, r1, ratio);
1336 imag = const_binop (MINUS_EXPR, i1, imag);
1337 imag = const_binop (code, imag, div);
1338 }
1339 }
1340 break;
1341
1342 default:
1343 return NULL_TREE;
1344 }
1345
1346 if (real && imag)
1347 return build_complex (type, real, imag);
1348 }
1349
1350 if (TREE_CODE (arg1) == VECTOR_CST)
1351 {
1352 tree type = TREE_TYPE(arg1);
1353 int count = TYPE_VECTOR_SUBPARTS (type), i;
1354 tree elements1, elements2, list = NULL_TREE;
1355
1356 if(TREE_CODE(arg2) != VECTOR_CST)
1357 return NULL_TREE;
1358
1359 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1360 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1361
1362 for (i = 0; i < count; i++)
1363 {
1364 tree elem1, elem2, elem;
1365
1366 /* The trailing elements can be empty and should be treated as 0 */
1367 if(!elements1)
1368 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1369 else
1370 {
1371 elem1 = TREE_VALUE(elements1);
1372 elements1 = TREE_CHAIN (elements1);
1373 }
1374
1375 if(!elements2)
1376 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1377 else
1378 {
1379 elem2 = TREE_VALUE(elements2);
1380 elements2 = TREE_CHAIN (elements2);
1381 }
1382
1383 elem = const_binop (code, elem1, elem2);
1384
1385 /* It is possible that const_binop cannot handle the given
1386 code and return NULL_TREE */
1387 if(elem == NULL_TREE)
1388 return NULL_TREE;
1389
1390 list = tree_cons (NULL_TREE, elem, list);
1391 }
1392 return build_vector(type, nreverse(list));
1393 }
1394 return NULL_TREE;
1395 }
1396
1397 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1398 indicates which particular sizetype to create. */
1399
1400 tree
1401 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1402 {
1403 return build_int_cst (sizetype_tab[(int) kind], number);
1404 }
1405 \f
1406 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1407 is a tree code. The type of the result is taken from the operands.
1408 Both must be equivalent integer types, ala int_binop_types_match_p.
1409 If the operands are constant, so is the result. */
1410
1411 tree
1412 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1413 {
1414 tree type = TREE_TYPE (arg0);
1415
1416 if (arg0 == error_mark_node || arg1 == error_mark_node)
1417 return error_mark_node;
1418
1419 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1420 TREE_TYPE (arg1)));
1421
1422 /* Handle the special case of two integer constants faster. */
1423 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1424 {
1425 /* And some specific cases even faster than that. */
1426 if (code == PLUS_EXPR)
1427 {
1428 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1429 return arg1;
1430 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1431 return arg0;
1432 }
1433 else if (code == MINUS_EXPR)
1434 {
1435 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1436 return arg0;
1437 }
1438 else if (code == MULT_EXPR)
1439 {
1440 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1441 return arg1;
1442 }
1443
1444 /* Handle general case of two integer constants. */
1445 return int_const_binop (code, arg0, arg1, 0);
1446 }
1447
1448 return fold_build2_loc (loc, code, type, arg0, arg1);
1449 }
1450
1451 /* Given two values, either both of sizetype or both of bitsizetype,
1452 compute the difference between the two values. Return the value
1453 in signed type corresponding to the type of the operands. */
1454
1455 tree
1456 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1457 {
1458 tree type = TREE_TYPE (arg0);
1459 tree ctype;
1460
1461 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1462 TREE_TYPE (arg1)));
1463
1464 /* If the type is already signed, just do the simple thing. */
1465 if (!TYPE_UNSIGNED (type))
1466 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1467
1468 if (type == sizetype)
1469 ctype = ssizetype;
1470 else if (type == bitsizetype)
1471 ctype = sbitsizetype;
1472 else
1473 ctype = signed_type_for (type);
1474
1475 /* If either operand is not a constant, do the conversions to the signed
1476 type and subtract. The hardware will do the right thing with any
1477 overflow in the subtraction. */
1478 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1479 return size_binop_loc (loc, MINUS_EXPR,
1480 fold_convert_loc (loc, ctype, arg0),
1481 fold_convert_loc (loc, ctype, arg1));
1482
1483 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1484 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1485 overflow) and negate (which can't either). Special-case a result
1486 of zero while we're here. */
1487 if (tree_int_cst_equal (arg0, arg1))
1488 return build_int_cst (ctype, 0);
1489 else if (tree_int_cst_lt (arg1, arg0))
1490 return fold_convert_loc (loc, ctype,
1491 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1492 else
1493 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1494 fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc,
1496 MINUS_EXPR,
1497 arg1, arg0)));
1498 }
1499 \f
1500 /* A subroutine of fold_convert_const handling conversions of an
1501 INTEGER_CST to another integer type. */
1502
1503 static tree
1504 fold_convert_const_int_from_int (tree type, const_tree arg1)
1505 {
1506 tree t;
1507
1508 /* Given an integer constant, make new constant with new type,
1509 appropriately sign-extended or truncated. */
1510 t = force_fit_type_double (type, tree_to_double_int (arg1),
1511 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1512 (TREE_INT_CST_HIGH (arg1) < 0
1513 && (TYPE_UNSIGNED (type)
1514 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1515 | TREE_OVERFLOW (arg1));
1516
1517 return t;
1518 }
1519
1520 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1521 to an integer type. */
1522
1523 static tree
1524 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1525 {
1526 int overflow = 0;
1527 tree t;
1528
1529 /* The following code implements the floating point to integer
1530 conversion rules required by the Java Language Specification,
1531 that IEEE NaNs are mapped to zero and values that overflow
1532 the target precision saturate, i.e. values greater than
1533 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1534 are mapped to INT_MIN. These semantics are allowed by the
1535 C and C++ standards that simply state that the behavior of
1536 FP-to-integer conversion is unspecified upon overflow. */
1537
1538 double_int val;
1539 REAL_VALUE_TYPE r;
1540 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1541
1542 switch (code)
1543 {
1544 case FIX_TRUNC_EXPR:
1545 real_trunc (&r, VOIDmode, &x);
1546 break;
1547
1548 default:
1549 gcc_unreachable ();
1550 }
1551
1552 /* If R is NaN, return zero and show we have an overflow. */
1553 if (REAL_VALUE_ISNAN (r))
1554 {
1555 overflow = 1;
1556 val = double_int_zero;
1557 }
1558
1559 /* See if R is less than the lower bound or greater than the
1560 upper bound. */
1561
1562 if (! overflow)
1563 {
1564 tree lt = TYPE_MIN_VALUE (type);
1565 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1566 if (REAL_VALUES_LESS (r, l))
1567 {
1568 overflow = 1;
1569 val = tree_to_double_int (lt);
1570 }
1571 }
1572
1573 if (! overflow)
1574 {
1575 tree ut = TYPE_MAX_VALUE (type);
1576 if (ut)
1577 {
1578 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1579 if (REAL_VALUES_LESS (u, r))
1580 {
1581 overflow = 1;
1582 val = tree_to_double_int (ut);
1583 }
1584 }
1585 }
1586
1587 if (! overflow)
1588 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1589
1590 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1591 return t;
1592 }
1593
1594 /* A subroutine of fold_convert_const handling conversions of a
1595 FIXED_CST to an integer type. */
1596
1597 static tree
1598 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1599 {
1600 tree t;
1601 double_int temp, temp_trunc;
1602 unsigned int mode;
1603
1604 /* Right shift FIXED_CST to temp by fbit. */
1605 temp = TREE_FIXED_CST (arg1).data;
1606 mode = TREE_FIXED_CST (arg1).mode;
1607 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1608 {
1609 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1610 HOST_BITS_PER_DOUBLE_INT,
1611 SIGNED_FIXED_POINT_MODE_P (mode));
1612
1613 /* Left shift temp to temp_trunc by fbit. */
1614 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1615 HOST_BITS_PER_DOUBLE_INT,
1616 SIGNED_FIXED_POINT_MODE_P (mode));
1617 }
1618 else
1619 {
1620 temp = double_int_zero;
1621 temp_trunc = double_int_zero;
1622 }
1623
1624 /* If FIXED_CST is negative, we need to round the value toward 0.
1625 By checking if the fractional bits are not zero to add 1 to temp. */
1626 if (SIGNED_FIXED_POINT_MODE_P (mode)
1627 && double_int_negative_p (temp_trunc)
1628 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1629 temp = double_int_add (temp, double_int_one);
1630
1631 /* Given a fixed-point constant, make new constant with new type,
1632 appropriately sign-extended or truncated. */
1633 t = force_fit_type_double (type, temp, -1,
1634 (double_int_negative_p (temp)
1635 && (TYPE_UNSIGNED (type)
1636 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1637 | TREE_OVERFLOW (arg1));
1638
1639 return t;
1640 }
1641
1642 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1643 to another floating point type. */
1644
1645 static tree
1646 fold_convert_const_real_from_real (tree type, const_tree arg1)
1647 {
1648 REAL_VALUE_TYPE value;
1649 tree t;
1650
1651 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1652 t = build_real (type, value);
1653
1654 /* If converting an infinity or NAN to a representation that doesn't
1655 have one, set the overflow bit so that we can produce some kind of
1656 error message at the appropriate point if necessary. It's not the
1657 most user-friendly message, but it's better than nothing. */
1658 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1659 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1660 TREE_OVERFLOW (t) = 1;
1661 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1662 && !MODE_HAS_NANS (TYPE_MODE (type)))
1663 TREE_OVERFLOW (t) = 1;
1664 /* Regular overflow, conversion produced an infinity in a mode that
1665 can't represent them. */
1666 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1667 && REAL_VALUE_ISINF (value)
1668 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1669 TREE_OVERFLOW (t) = 1;
1670 else
1671 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1672 return t;
1673 }
1674
1675 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1676 to a floating point type. */
1677
1678 static tree
1679 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1680 {
1681 REAL_VALUE_TYPE value;
1682 tree t;
1683
1684 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1685 t = build_real (type, value);
1686
1687 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1688 return t;
1689 }
1690
1691 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1692 to another fixed-point type. */
1693
1694 static tree
1695 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1696 {
1697 FIXED_VALUE_TYPE value;
1698 tree t;
1699 bool overflow_p;
1700
1701 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1702 TYPE_SATURATING (type));
1703 t = build_fixed (type, value);
1704
1705 /* Propagate overflow flags. */
1706 if (overflow_p | TREE_OVERFLOW (arg1))
1707 TREE_OVERFLOW (t) = 1;
1708 return t;
1709 }
1710
1711 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1712 to a fixed-point type. */
1713
1714 static tree
1715 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1716 {
1717 FIXED_VALUE_TYPE value;
1718 tree t;
1719 bool overflow_p;
1720
1721 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1722 TREE_INT_CST (arg1),
1723 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1724 TYPE_SATURATING (type));
1725 t = build_fixed (type, value);
1726
1727 /* Propagate overflow flags. */
1728 if (overflow_p | TREE_OVERFLOW (arg1))
1729 TREE_OVERFLOW (t) = 1;
1730 return t;
1731 }
1732
1733 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1734 to a fixed-point type. */
1735
1736 static tree
1737 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1738 {
1739 FIXED_VALUE_TYPE value;
1740 tree t;
1741 bool overflow_p;
1742
1743 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1744 &TREE_REAL_CST (arg1),
1745 TYPE_SATURATING (type));
1746 t = build_fixed (type, value);
1747
1748 /* Propagate overflow flags. */
1749 if (overflow_p | TREE_OVERFLOW (arg1))
1750 TREE_OVERFLOW (t) = 1;
1751 return t;
1752 }
1753
1754 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1755 type TYPE. If no simplification can be done return NULL_TREE. */
1756
1757 static tree
1758 fold_convert_const (enum tree_code code, tree type, tree arg1)
1759 {
1760 if (TREE_TYPE (arg1) == type)
1761 return arg1;
1762
1763 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1764 || TREE_CODE (type) == OFFSET_TYPE)
1765 {
1766 if (TREE_CODE (arg1) == INTEGER_CST)
1767 return fold_convert_const_int_from_int (type, arg1);
1768 else if (TREE_CODE (arg1) == REAL_CST)
1769 return fold_convert_const_int_from_real (code, type, arg1);
1770 else if (TREE_CODE (arg1) == FIXED_CST)
1771 return fold_convert_const_int_from_fixed (type, arg1);
1772 }
1773 else if (TREE_CODE (type) == REAL_TYPE)
1774 {
1775 if (TREE_CODE (arg1) == INTEGER_CST)
1776 return build_real_from_int_cst (type, arg1);
1777 else if (TREE_CODE (arg1) == REAL_CST)
1778 return fold_convert_const_real_from_real (type, arg1);
1779 else if (TREE_CODE (arg1) == FIXED_CST)
1780 return fold_convert_const_real_from_fixed (type, arg1);
1781 }
1782 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1783 {
1784 if (TREE_CODE (arg1) == FIXED_CST)
1785 return fold_convert_const_fixed_from_fixed (type, arg1);
1786 else if (TREE_CODE (arg1) == INTEGER_CST)
1787 return fold_convert_const_fixed_from_int (type, arg1);
1788 else if (TREE_CODE (arg1) == REAL_CST)
1789 return fold_convert_const_fixed_from_real (type, arg1);
1790 }
1791 return NULL_TREE;
1792 }
1793
1794 /* Construct a vector of zero elements of vector type TYPE. */
1795
1796 static tree
1797 build_zero_vector (tree type)
1798 {
1799 tree t;
1800
1801 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1802 return build_vector_from_val (type, t);
1803 }
1804
1805 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1806
1807 bool
1808 fold_convertible_p (const_tree type, const_tree arg)
1809 {
1810 tree orig = TREE_TYPE (arg);
1811
1812 if (type == orig)
1813 return true;
1814
1815 if (TREE_CODE (arg) == ERROR_MARK
1816 || TREE_CODE (type) == ERROR_MARK
1817 || TREE_CODE (orig) == ERROR_MARK)
1818 return false;
1819
1820 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1821 return true;
1822
1823 switch (TREE_CODE (type))
1824 {
1825 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1826 case POINTER_TYPE: case REFERENCE_TYPE:
1827 case OFFSET_TYPE:
1828 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1829 || TREE_CODE (orig) == OFFSET_TYPE)
1830 return true;
1831 return (TREE_CODE (orig) == VECTOR_TYPE
1832 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1833
1834 case REAL_TYPE:
1835 case FIXED_POINT_TYPE:
1836 case COMPLEX_TYPE:
1837 case VECTOR_TYPE:
1838 case VOID_TYPE:
1839 return TREE_CODE (type) == TREE_CODE (orig);
1840
1841 default:
1842 return false;
1843 }
1844 }
1845
1846 /* Convert expression ARG to type TYPE. Used by the middle-end for
1847 simple conversions in preference to calling the front-end's convert. */
1848
1849 tree
1850 fold_convert_loc (location_t loc, tree type, tree arg)
1851 {
1852 tree orig = TREE_TYPE (arg);
1853 tree tem;
1854
1855 if (type == orig)
1856 return arg;
1857
1858 if (TREE_CODE (arg) == ERROR_MARK
1859 || TREE_CODE (type) == ERROR_MARK
1860 || TREE_CODE (orig) == ERROR_MARK)
1861 return error_mark_node;
1862
1863 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1864 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1865
1866 switch (TREE_CODE (type))
1867 {
1868 case POINTER_TYPE:
1869 case REFERENCE_TYPE:
1870 /* Handle conversions between pointers to different address spaces. */
1871 if (POINTER_TYPE_P (orig)
1872 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1873 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1874 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1875 /* fall through */
1876
1877 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1878 case OFFSET_TYPE:
1879 if (TREE_CODE (arg) == INTEGER_CST)
1880 {
1881 tem = fold_convert_const (NOP_EXPR, type, arg);
1882 if (tem != NULL_TREE)
1883 return tem;
1884 }
1885 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1886 || TREE_CODE (orig) == OFFSET_TYPE)
1887 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1888 if (TREE_CODE (orig) == COMPLEX_TYPE)
1889 return fold_convert_loc (loc, type,
1890 fold_build1_loc (loc, REALPART_EXPR,
1891 TREE_TYPE (orig), arg));
1892 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1893 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1894 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895
1896 case REAL_TYPE:
1897 if (TREE_CODE (arg) == INTEGER_CST)
1898 {
1899 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1900 if (tem != NULL_TREE)
1901 return tem;
1902 }
1903 else if (TREE_CODE (arg) == REAL_CST)
1904 {
1905 tem = fold_convert_const (NOP_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1907 return tem;
1908 }
1909 else if (TREE_CODE (arg) == FIXED_CST)
1910 {
1911 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1912 if (tem != NULL_TREE)
1913 return tem;
1914 }
1915
1916 switch (TREE_CODE (orig))
1917 {
1918 case INTEGER_TYPE:
1919 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1920 case POINTER_TYPE: case REFERENCE_TYPE:
1921 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1922
1923 case REAL_TYPE:
1924 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1925
1926 case FIXED_POINT_TYPE:
1927 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1928
1929 case COMPLEX_TYPE:
1930 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1931 return fold_convert_loc (loc, type, tem);
1932
1933 default:
1934 gcc_unreachable ();
1935 }
1936
1937 case FIXED_POINT_TYPE:
1938 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1939 || TREE_CODE (arg) == REAL_CST)
1940 {
1941 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1943 goto fold_convert_exit;
1944 }
1945
1946 switch (TREE_CODE (orig))
1947 {
1948 case FIXED_POINT_TYPE:
1949 case INTEGER_TYPE:
1950 case ENUMERAL_TYPE:
1951 case BOOLEAN_TYPE:
1952 case REAL_TYPE:
1953 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1954
1955 case COMPLEX_TYPE:
1956 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1957 return fold_convert_loc (loc, type, tem);
1958
1959 default:
1960 gcc_unreachable ();
1961 }
1962
1963 case COMPLEX_TYPE:
1964 switch (TREE_CODE (orig))
1965 {
1966 case INTEGER_TYPE:
1967 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1968 case POINTER_TYPE: case REFERENCE_TYPE:
1969 case REAL_TYPE:
1970 case FIXED_POINT_TYPE:
1971 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1972 fold_convert_loc (loc, TREE_TYPE (type), arg),
1973 fold_convert_loc (loc, TREE_TYPE (type),
1974 integer_zero_node));
1975 case COMPLEX_TYPE:
1976 {
1977 tree rpart, ipart;
1978
1979 if (TREE_CODE (arg) == COMPLEX_EXPR)
1980 {
1981 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1982 TREE_OPERAND (arg, 0));
1983 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1984 TREE_OPERAND (arg, 1));
1985 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1986 }
1987
1988 arg = save_expr (arg);
1989 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1990 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1991 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1992 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1993 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1994 }
1995
1996 default:
1997 gcc_unreachable ();
1998 }
1999
2000 case VECTOR_TYPE:
2001 if (integer_zerop (arg))
2002 return build_zero_vector (type);
2003 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2004 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2005 || TREE_CODE (orig) == VECTOR_TYPE);
2006 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2007
2008 case VOID_TYPE:
2009 tem = fold_ignored_result (arg);
2010 if (TREE_CODE (tem) == MODIFY_EXPR)
2011 goto fold_convert_exit;
2012 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2013
2014 default:
2015 gcc_unreachable ();
2016 }
2017 fold_convert_exit:
2018 protected_set_expr_location (tem, loc);
2019 return tem;
2020 }
2021 \f
2022 /* Return false if expr can be assumed not to be an lvalue, true
2023 otherwise. */
2024
2025 static bool
2026 maybe_lvalue_p (const_tree x)
2027 {
2028 /* We only need to wrap lvalue tree codes. */
2029 switch (TREE_CODE (x))
2030 {
2031 case VAR_DECL:
2032 case PARM_DECL:
2033 case RESULT_DECL:
2034 case LABEL_DECL:
2035 case FUNCTION_DECL:
2036 case SSA_NAME:
2037
2038 case COMPONENT_REF:
2039 case MEM_REF:
2040 case INDIRECT_REF:
2041 case ARRAY_REF:
2042 case ARRAY_RANGE_REF:
2043 case BIT_FIELD_REF:
2044 case OBJ_TYPE_REF:
2045
2046 case REALPART_EXPR:
2047 case IMAGPART_EXPR:
2048 case PREINCREMENT_EXPR:
2049 case PREDECREMENT_EXPR:
2050 case SAVE_EXPR:
2051 case TRY_CATCH_EXPR:
2052 case WITH_CLEANUP_EXPR:
2053 case COMPOUND_EXPR:
2054 case MODIFY_EXPR:
2055 case TARGET_EXPR:
2056 case COND_EXPR:
2057 case BIND_EXPR:
2058 break;
2059
2060 default:
2061 /* Assume the worst for front-end tree codes. */
2062 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2063 break;
2064 return false;
2065 }
2066
2067 return true;
2068 }
2069
2070 /* Return an expr equal to X but certainly not valid as an lvalue. */
2071
2072 tree
2073 non_lvalue_loc (location_t loc, tree x)
2074 {
2075 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2076 us. */
2077 if (in_gimple_form)
2078 return x;
2079
2080 if (! maybe_lvalue_p (x))
2081 return x;
2082 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2083 SET_EXPR_LOCATION (x, loc);
2084 return x;
2085 }
2086
2087 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2088 Zero means allow extended lvalues. */
2089
2090 int pedantic_lvalues;
2091
2092 /* When pedantic, return an expr equal to X but certainly not valid as a
2093 pedantic lvalue. Otherwise, return X. */
2094
2095 static tree
2096 pedantic_non_lvalue_loc (location_t loc, tree x)
2097 {
2098 if (pedantic_lvalues)
2099 return non_lvalue_loc (loc, x);
2100 protected_set_expr_location (x, loc);
2101 return x;
2102 }
2103 \f
2104 /* Given a tree comparison code, return the code that is the logical inverse
2105 of the given code. It is not safe to do this for floating-point
2106 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2107 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2108
2109 enum tree_code
2110 invert_tree_comparison (enum tree_code code, bool honor_nans)
2111 {
2112 if (honor_nans && flag_trapping_math)
2113 return ERROR_MARK;
2114
2115 switch (code)
2116 {
2117 case EQ_EXPR:
2118 return NE_EXPR;
2119 case NE_EXPR:
2120 return EQ_EXPR;
2121 case GT_EXPR:
2122 return honor_nans ? UNLE_EXPR : LE_EXPR;
2123 case GE_EXPR:
2124 return honor_nans ? UNLT_EXPR : LT_EXPR;
2125 case LT_EXPR:
2126 return honor_nans ? UNGE_EXPR : GE_EXPR;
2127 case LE_EXPR:
2128 return honor_nans ? UNGT_EXPR : GT_EXPR;
2129 case LTGT_EXPR:
2130 return UNEQ_EXPR;
2131 case UNEQ_EXPR:
2132 return LTGT_EXPR;
2133 case UNGT_EXPR:
2134 return LE_EXPR;
2135 case UNGE_EXPR:
2136 return LT_EXPR;
2137 case UNLT_EXPR:
2138 return GE_EXPR;
2139 case UNLE_EXPR:
2140 return GT_EXPR;
2141 case ORDERED_EXPR:
2142 return UNORDERED_EXPR;
2143 case UNORDERED_EXPR:
2144 return ORDERED_EXPR;
2145 default:
2146 gcc_unreachable ();
2147 }
2148 }
2149
2150 /* Similar, but return the comparison that results if the operands are
2151 swapped. This is safe for floating-point. */
2152
2153 enum tree_code
2154 swap_tree_comparison (enum tree_code code)
2155 {
2156 switch (code)
2157 {
2158 case EQ_EXPR:
2159 case NE_EXPR:
2160 case ORDERED_EXPR:
2161 case UNORDERED_EXPR:
2162 case LTGT_EXPR:
2163 case UNEQ_EXPR:
2164 return code;
2165 case GT_EXPR:
2166 return LT_EXPR;
2167 case GE_EXPR:
2168 return LE_EXPR;
2169 case LT_EXPR:
2170 return GT_EXPR;
2171 case LE_EXPR:
2172 return GE_EXPR;
2173 case UNGT_EXPR:
2174 return UNLT_EXPR;
2175 case UNGE_EXPR:
2176 return UNLE_EXPR;
2177 case UNLT_EXPR:
2178 return UNGT_EXPR;
2179 case UNLE_EXPR:
2180 return UNGE_EXPR;
2181 default:
2182 gcc_unreachable ();
2183 }
2184 }
2185
2186
2187 /* Convert a comparison tree code from an enum tree_code representation
2188 into a compcode bit-based encoding. This function is the inverse of
2189 compcode_to_comparison. */
2190
2191 static enum comparison_code
2192 comparison_to_compcode (enum tree_code code)
2193 {
2194 switch (code)
2195 {
2196 case LT_EXPR:
2197 return COMPCODE_LT;
2198 case EQ_EXPR:
2199 return COMPCODE_EQ;
2200 case LE_EXPR:
2201 return COMPCODE_LE;
2202 case GT_EXPR:
2203 return COMPCODE_GT;
2204 case NE_EXPR:
2205 return COMPCODE_NE;
2206 case GE_EXPR:
2207 return COMPCODE_GE;
2208 case ORDERED_EXPR:
2209 return COMPCODE_ORD;
2210 case UNORDERED_EXPR:
2211 return COMPCODE_UNORD;
2212 case UNLT_EXPR:
2213 return COMPCODE_UNLT;
2214 case UNEQ_EXPR:
2215 return COMPCODE_UNEQ;
2216 case UNLE_EXPR:
2217 return COMPCODE_UNLE;
2218 case UNGT_EXPR:
2219 return COMPCODE_UNGT;
2220 case LTGT_EXPR:
2221 return COMPCODE_LTGT;
2222 case UNGE_EXPR:
2223 return COMPCODE_UNGE;
2224 default:
2225 gcc_unreachable ();
2226 }
2227 }
2228
2229 /* Convert a compcode bit-based encoding of a comparison operator back
2230 to GCC's enum tree_code representation. This function is the
2231 inverse of comparison_to_compcode. */
2232
2233 static enum tree_code
2234 compcode_to_comparison (enum comparison_code code)
2235 {
2236 switch (code)
2237 {
2238 case COMPCODE_LT:
2239 return LT_EXPR;
2240 case COMPCODE_EQ:
2241 return EQ_EXPR;
2242 case COMPCODE_LE:
2243 return LE_EXPR;
2244 case COMPCODE_GT:
2245 return GT_EXPR;
2246 case COMPCODE_NE:
2247 return NE_EXPR;
2248 case COMPCODE_GE:
2249 return GE_EXPR;
2250 case COMPCODE_ORD:
2251 return ORDERED_EXPR;
2252 case COMPCODE_UNORD:
2253 return UNORDERED_EXPR;
2254 case COMPCODE_UNLT:
2255 return UNLT_EXPR;
2256 case COMPCODE_UNEQ:
2257 return UNEQ_EXPR;
2258 case COMPCODE_UNLE:
2259 return UNLE_EXPR;
2260 case COMPCODE_UNGT:
2261 return UNGT_EXPR;
2262 case COMPCODE_LTGT:
2263 return LTGT_EXPR;
2264 case COMPCODE_UNGE:
2265 return UNGE_EXPR;
2266 default:
2267 gcc_unreachable ();
2268 }
2269 }
2270
2271 /* Return a tree for the comparison which is the combination of
2272 doing the AND or OR (depending on CODE) of the two operations LCODE
2273 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2274 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2275 if this makes the transformation invalid. */
2276
2277 tree
2278 combine_comparisons (location_t loc,
2279 enum tree_code code, enum tree_code lcode,
2280 enum tree_code rcode, tree truth_type,
2281 tree ll_arg, tree lr_arg)
2282 {
2283 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2284 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2285 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2286 int compcode;
2287
2288 switch (code)
2289 {
2290 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2291 compcode = lcompcode & rcompcode;
2292 break;
2293
2294 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2295 compcode = lcompcode | rcompcode;
2296 break;
2297
2298 default:
2299 return NULL_TREE;
2300 }
2301
2302 if (!honor_nans)
2303 {
2304 /* Eliminate unordered comparisons, as well as LTGT and ORD
2305 which are not used unless the mode has NaNs. */
2306 compcode &= ~COMPCODE_UNORD;
2307 if (compcode == COMPCODE_LTGT)
2308 compcode = COMPCODE_NE;
2309 else if (compcode == COMPCODE_ORD)
2310 compcode = COMPCODE_TRUE;
2311 }
2312 else if (flag_trapping_math)
2313 {
2314 /* Check that the original operation and the optimized ones will trap
2315 under the same condition. */
2316 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2317 && (lcompcode != COMPCODE_EQ)
2318 && (lcompcode != COMPCODE_ORD);
2319 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2320 && (rcompcode != COMPCODE_EQ)
2321 && (rcompcode != COMPCODE_ORD);
2322 bool trap = (compcode & COMPCODE_UNORD) == 0
2323 && (compcode != COMPCODE_EQ)
2324 && (compcode != COMPCODE_ORD);
2325
2326 /* In a short-circuited boolean expression the LHS might be
2327 such that the RHS, if evaluated, will never trap. For
2328 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2329 if neither x nor y is NaN. (This is a mixed blessing: for
2330 example, the expression above will never trap, hence
2331 optimizing it to x < y would be invalid). */
2332 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2333 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2334 rtrap = false;
2335
2336 /* If the comparison was short-circuited, and only the RHS
2337 trapped, we may now generate a spurious trap. */
2338 if (rtrap && !ltrap
2339 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2340 return NULL_TREE;
2341
2342 /* If we changed the conditions that cause a trap, we lose. */
2343 if ((ltrap || rtrap) != trap)
2344 return NULL_TREE;
2345 }
2346
2347 if (compcode == COMPCODE_TRUE)
2348 return constant_boolean_node (true, truth_type);
2349 else if (compcode == COMPCODE_FALSE)
2350 return constant_boolean_node (false, truth_type);
2351 else
2352 {
2353 enum tree_code tcode;
2354
2355 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2356 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2357 }
2358 }
2359 \f
2360 /* Return nonzero if two operands (typically of the same tree node)
2361 are necessarily equal. If either argument has side-effects this
2362 function returns zero. FLAGS modifies behavior as follows:
2363
2364 If OEP_ONLY_CONST is set, only return nonzero for constants.
2365 This function tests whether the operands are indistinguishable;
2366 it does not test whether they are equal using C's == operation.
2367 The distinction is important for IEEE floating point, because
2368 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2369 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2370
2371 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2372 even though it may hold multiple values during a function.
2373 This is because a GCC tree node guarantees that nothing else is
2374 executed between the evaluation of its "operands" (which may often
2375 be evaluated in arbitrary order). Hence if the operands themselves
2376 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2377 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2378 unset means assuming isochronic (or instantaneous) tree equivalence.
2379 Unless comparing arbitrary expression trees, such as from different
2380 statements, this flag can usually be left unset.
2381
2382 If OEP_PURE_SAME is set, then pure functions with identical arguments
2383 are considered the same. It is used when the caller has other ways
2384 to ensure that global memory is unchanged in between. */
2385
2386 int
2387 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2388 {
2389 /* If either is ERROR_MARK, they aren't equal. */
2390 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2391 || TREE_TYPE (arg0) == error_mark_node
2392 || TREE_TYPE (arg1) == error_mark_node)
2393 return 0;
2394
2395 /* Similar, if either does not have a type (like a released SSA name),
2396 they aren't equal. */
2397 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2398 return 0;
2399
2400 /* Check equality of integer constants before bailing out due to
2401 precision differences. */
2402 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2403 return tree_int_cst_equal (arg0, arg1);
2404
2405 /* If both types don't have the same signedness, then we can't consider
2406 them equal. We must check this before the STRIP_NOPS calls
2407 because they may change the signedness of the arguments. As pointers
2408 strictly don't have a signedness, require either two pointers or
2409 two non-pointers as well. */
2410 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2411 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2412 return 0;
2413
2414 /* We cannot consider pointers to different address space equal. */
2415 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2416 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2417 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2418 return 0;
2419
2420 /* If both types don't have the same precision, then it is not safe
2421 to strip NOPs. */
2422 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2423 return 0;
2424
2425 STRIP_NOPS (arg0);
2426 STRIP_NOPS (arg1);
2427
2428 /* In case both args are comparisons but with different comparison
2429 code, try to swap the comparison operands of one arg to produce
2430 a match and compare that variant. */
2431 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2432 && COMPARISON_CLASS_P (arg0)
2433 && COMPARISON_CLASS_P (arg1))
2434 {
2435 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2436
2437 if (TREE_CODE (arg0) == swap_code)
2438 return operand_equal_p (TREE_OPERAND (arg0, 0),
2439 TREE_OPERAND (arg1, 1), flags)
2440 && operand_equal_p (TREE_OPERAND (arg0, 1),
2441 TREE_OPERAND (arg1, 0), flags);
2442 }
2443
2444 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2445 /* This is needed for conversions and for COMPONENT_REF.
2446 Might as well play it safe and always test this. */
2447 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2448 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2449 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2450 return 0;
2451
2452 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2453 We don't care about side effects in that case because the SAVE_EXPR
2454 takes care of that for us. In all other cases, two expressions are
2455 equal if they have no side effects. If we have two identical
2456 expressions with side effects that should be treated the same due
2457 to the only side effects being identical SAVE_EXPR's, that will
2458 be detected in the recursive calls below. */
2459 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2460 && (TREE_CODE (arg0) == SAVE_EXPR
2461 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2462 return 1;
2463
2464 /* Next handle constant cases, those for which we can return 1 even
2465 if ONLY_CONST is set. */
2466 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2467 switch (TREE_CODE (arg0))
2468 {
2469 case INTEGER_CST:
2470 return tree_int_cst_equal (arg0, arg1);
2471
2472 case FIXED_CST:
2473 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2474 TREE_FIXED_CST (arg1));
2475
2476 case REAL_CST:
2477 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2478 TREE_REAL_CST (arg1)))
2479 return 1;
2480
2481
2482 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2483 {
2484 /* If we do not distinguish between signed and unsigned zero,
2485 consider them equal. */
2486 if (real_zerop (arg0) && real_zerop (arg1))
2487 return 1;
2488 }
2489 return 0;
2490
2491 case VECTOR_CST:
2492 {
2493 tree v1, v2;
2494
2495 v1 = TREE_VECTOR_CST_ELTS (arg0);
2496 v2 = TREE_VECTOR_CST_ELTS (arg1);
2497 while (v1 && v2)
2498 {
2499 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2500 flags))
2501 return 0;
2502 v1 = TREE_CHAIN (v1);
2503 v2 = TREE_CHAIN (v2);
2504 }
2505
2506 return v1 == v2;
2507 }
2508
2509 case COMPLEX_CST:
2510 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2511 flags)
2512 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2513 flags));
2514
2515 case STRING_CST:
2516 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2517 && ! memcmp (TREE_STRING_POINTER (arg0),
2518 TREE_STRING_POINTER (arg1),
2519 TREE_STRING_LENGTH (arg0)));
2520
2521 case ADDR_EXPR:
2522 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2523 0);
2524 default:
2525 break;
2526 }
2527
2528 if (flags & OEP_ONLY_CONST)
2529 return 0;
2530
2531 /* Define macros to test an operand from arg0 and arg1 for equality and a
2532 variant that allows null and views null as being different from any
2533 non-null value. In the latter case, if either is null, the both
2534 must be; otherwise, do the normal comparison. */
2535 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2536 TREE_OPERAND (arg1, N), flags)
2537
2538 #define OP_SAME_WITH_NULL(N) \
2539 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2540 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2541
2542 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2543 {
2544 case tcc_unary:
2545 /* Two conversions are equal only if signedness and modes match. */
2546 switch (TREE_CODE (arg0))
2547 {
2548 CASE_CONVERT:
2549 case FIX_TRUNC_EXPR:
2550 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2551 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2552 return 0;
2553 break;
2554 default:
2555 break;
2556 }
2557
2558 return OP_SAME (0);
2559
2560
2561 case tcc_comparison:
2562 case tcc_binary:
2563 if (OP_SAME (0) && OP_SAME (1))
2564 return 1;
2565
2566 /* For commutative ops, allow the other order. */
2567 return (commutative_tree_code (TREE_CODE (arg0))
2568 && operand_equal_p (TREE_OPERAND (arg0, 0),
2569 TREE_OPERAND (arg1, 1), flags)
2570 && operand_equal_p (TREE_OPERAND (arg0, 1),
2571 TREE_OPERAND (arg1, 0), flags));
2572
2573 case tcc_reference:
2574 /* If either of the pointer (or reference) expressions we are
2575 dereferencing contain a side effect, these cannot be equal. */
2576 if (TREE_SIDE_EFFECTS (arg0)
2577 || TREE_SIDE_EFFECTS (arg1))
2578 return 0;
2579
2580 switch (TREE_CODE (arg0))
2581 {
2582 case INDIRECT_REF:
2583 case REALPART_EXPR:
2584 case IMAGPART_EXPR:
2585 return OP_SAME (0);
2586
2587 case MEM_REF:
2588 /* Require equal access sizes, and similar pointer types.
2589 We can have incomplete types for array references of
2590 variable-sized arrays from the Fortran frontent
2591 though. */
2592 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2593 || (TYPE_SIZE (TREE_TYPE (arg0))
2594 && TYPE_SIZE (TREE_TYPE (arg1))
2595 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2596 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2597 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2598 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2599 && OP_SAME (0) && OP_SAME (1));
2600
2601 case ARRAY_REF:
2602 case ARRAY_RANGE_REF:
2603 /* Operands 2 and 3 may be null.
2604 Compare the array index by value if it is constant first as we
2605 may have different types but same value here. */
2606 return (OP_SAME (0)
2607 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2608 TREE_OPERAND (arg1, 1))
2609 || OP_SAME (1))
2610 && OP_SAME_WITH_NULL (2)
2611 && OP_SAME_WITH_NULL (3));
2612
2613 case COMPONENT_REF:
2614 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2615 may be NULL when we're called to compare MEM_EXPRs. */
2616 return OP_SAME_WITH_NULL (0)
2617 && OP_SAME (1)
2618 && OP_SAME_WITH_NULL (2);
2619
2620 case BIT_FIELD_REF:
2621 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2622
2623 default:
2624 return 0;
2625 }
2626
2627 case tcc_expression:
2628 switch (TREE_CODE (arg0))
2629 {
2630 case ADDR_EXPR:
2631 case TRUTH_NOT_EXPR:
2632 return OP_SAME (0);
2633
2634 case TRUTH_ANDIF_EXPR:
2635 case TRUTH_ORIF_EXPR:
2636 return OP_SAME (0) && OP_SAME (1);
2637
2638 case FMA_EXPR:
2639 case WIDEN_MULT_PLUS_EXPR:
2640 case WIDEN_MULT_MINUS_EXPR:
2641 if (!OP_SAME (2))
2642 return 0;
2643 /* The multiplcation operands are commutative. */
2644 /* FALLTHRU */
2645
2646 case TRUTH_AND_EXPR:
2647 case TRUTH_OR_EXPR:
2648 case TRUTH_XOR_EXPR:
2649 if (OP_SAME (0) && OP_SAME (1))
2650 return 1;
2651
2652 /* Otherwise take into account this is a commutative operation. */
2653 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2654 TREE_OPERAND (arg1, 1), flags)
2655 && operand_equal_p (TREE_OPERAND (arg0, 1),
2656 TREE_OPERAND (arg1, 0), flags));
2657
2658 case COND_EXPR:
2659 case VEC_COND_EXPR:
2660 case DOT_PROD_EXPR:
2661 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2662
2663 default:
2664 return 0;
2665 }
2666
2667 case tcc_vl_exp:
2668 switch (TREE_CODE (arg0))
2669 {
2670 case CALL_EXPR:
2671 /* If the CALL_EXPRs call different functions, then they
2672 clearly can not be equal. */
2673 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2674 flags))
2675 return 0;
2676
2677 {
2678 unsigned int cef = call_expr_flags (arg0);
2679 if (flags & OEP_PURE_SAME)
2680 cef &= ECF_CONST | ECF_PURE;
2681 else
2682 cef &= ECF_CONST;
2683 if (!cef)
2684 return 0;
2685 }
2686
2687 /* Now see if all the arguments are the same. */
2688 {
2689 const_call_expr_arg_iterator iter0, iter1;
2690 const_tree a0, a1;
2691 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2692 a1 = first_const_call_expr_arg (arg1, &iter1);
2693 a0 && a1;
2694 a0 = next_const_call_expr_arg (&iter0),
2695 a1 = next_const_call_expr_arg (&iter1))
2696 if (! operand_equal_p (a0, a1, flags))
2697 return 0;
2698
2699 /* If we get here and both argument lists are exhausted
2700 then the CALL_EXPRs are equal. */
2701 return ! (a0 || a1);
2702 }
2703 default:
2704 return 0;
2705 }
2706
2707 case tcc_declaration:
2708 /* Consider __builtin_sqrt equal to sqrt. */
2709 return (TREE_CODE (arg0) == FUNCTION_DECL
2710 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2711 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2712 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2713
2714 default:
2715 return 0;
2716 }
2717
2718 #undef OP_SAME
2719 #undef OP_SAME_WITH_NULL
2720 }
2721 \f
2722 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2723 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2724
2725 When in doubt, return 0. */
2726
2727 static int
2728 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2729 {
2730 int unsignedp1, unsignedpo;
2731 tree primarg0, primarg1, primother;
2732 unsigned int correct_width;
2733
2734 if (operand_equal_p (arg0, arg1, 0))
2735 return 1;
2736
2737 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2738 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2739 return 0;
2740
2741 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2742 and see if the inner values are the same. This removes any
2743 signedness comparison, which doesn't matter here. */
2744 primarg0 = arg0, primarg1 = arg1;
2745 STRIP_NOPS (primarg0);
2746 STRIP_NOPS (primarg1);
2747 if (operand_equal_p (primarg0, primarg1, 0))
2748 return 1;
2749
2750 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2751 actual comparison operand, ARG0.
2752
2753 First throw away any conversions to wider types
2754 already present in the operands. */
2755
2756 primarg1 = get_narrower (arg1, &unsignedp1);
2757 primother = get_narrower (other, &unsignedpo);
2758
2759 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2760 if (unsignedp1 == unsignedpo
2761 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2762 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2763 {
2764 tree type = TREE_TYPE (arg0);
2765
2766 /* Make sure shorter operand is extended the right way
2767 to match the longer operand. */
2768 primarg1 = fold_convert (signed_or_unsigned_type_for
2769 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2770
2771 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2772 return 1;
2773 }
2774
2775 return 0;
2776 }
2777 \f
2778 /* See if ARG is an expression that is either a comparison or is performing
2779 arithmetic on comparisons. The comparisons must only be comparing
2780 two different values, which will be stored in *CVAL1 and *CVAL2; if
2781 they are nonzero it means that some operands have already been found.
2782 No variables may be used anywhere else in the expression except in the
2783 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2784 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2785
2786 If this is true, return 1. Otherwise, return zero. */
2787
2788 static int
2789 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2790 {
2791 enum tree_code code = TREE_CODE (arg);
2792 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2793
2794 /* We can handle some of the tcc_expression cases here. */
2795 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2796 tclass = tcc_unary;
2797 else if (tclass == tcc_expression
2798 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2799 || code == COMPOUND_EXPR))
2800 tclass = tcc_binary;
2801
2802 else if (tclass == tcc_expression && code == SAVE_EXPR
2803 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2804 {
2805 /* If we've already found a CVAL1 or CVAL2, this expression is
2806 two complex to handle. */
2807 if (*cval1 || *cval2)
2808 return 0;
2809
2810 tclass = tcc_unary;
2811 *save_p = 1;
2812 }
2813
2814 switch (tclass)
2815 {
2816 case tcc_unary:
2817 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2818
2819 case tcc_binary:
2820 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2821 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2822 cval1, cval2, save_p));
2823
2824 case tcc_constant:
2825 return 1;
2826
2827 case tcc_expression:
2828 if (code == COND_EXPR)
2829 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2830 cval1, cval2, save_p)
2831 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2832 cval1, cval2, save_p)
2833 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2834 cval1, cval2, save_p));
2835 return 0;
2836
2837 case tcc_comparison:
2838 /* First see if we can handle the first operand, then the second. For
2839 the second operand, we know *CVAL1 can't be zero. It must be that
2840 one side of the comparison is each of the values; test for the
2841 case where this isn't true by failing if the two operands
2842 are the same. */
2843
2844 if (operand_equal_p (TREE_OPERAND (arg, 0),
2845 TREE_OPERAND (arg, 1), 0))
2846 return 0;
2847
2848 if (*cval1 == 0)
2849 *cval1 = TREE_OPERAND (arg, 0);
2850 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2851 ;
2852 else if (*cval2 == 0)
2853 *cval2 = TREE_OPERAND (arg, 0);
2854 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2855 ;
2856 else
2857 return 0;
2858
2859 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2860 ;
2861 else if (*cval2 == 0)
2862 *cval2 = TREE_OPERAND (arg, 1);
2863 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2864 ;
2865 else
2866 return 0;
2867
2868 return 1;
2869
2870 default:
2871 return 0;
2872 }
2873 }
2874 \f
2875 /* ARG is a tree that is known to contain just arithmetic operations and
2876 comparisons. Evaluate the operations in the tree substituting NEW0 for
2877 any occurrence of OLD0 as an operand of a comparison and likewise for
2878 NEW1 and OLD1. */
2879
2880 static tree
2881 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2882 tree old1, tree new1)
2883 {
2884 tree type = TREE_TYPE (arg);
2885 enum tree_code code = TREE_CODE (arg);
2886 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2887
2888 /* We can handle some of the tcc_expression cases here. */
2889 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2890 tclass = tcc_unary;
2891 else if (tclass == tcc_expression
2892 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2893 tclass = tcc_binary;
2894
2895 switch (tclass)
2896 {
2897 case tcc_unary:
2898 return fold_build1_loc (loc, code, type,
2899 eval_subst (loc, TREE_OPERAND (arg, 0),
2900 old0, new0, old1, new1));
2901
2902 case tcc_binary:
2903 return fold_build2_loc (loc, code, type,
2904 eval_subst (loc, TREE_OPERAND (arg, 0),
2905 old0, new0, old1, new1),
2906 eval_subst (loc, TREE_OPERAND (arg, 1),
2907 old0, new0, old1, new1));
2908
2909 case tcc_expression:
2910 switch (code)
2911 {
2912 case SAVE_EXPR:
2913 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2914 old1, new1);
2915
2916 case COMPOUND_EXPR:
2917 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2918 old1, new1);
2919
2920 case COND_EXPR:
2921 return fold_build3_loc (loc, code, type,
2922 eval_subst (loc, TREE_OPERAND (arg, 0),
2923 old0, new0, old1, new1),
2924 eval_subst (loc, TREE_OPERAND (arg, 1),
2925 old0, new0, old1, new1),
2926 eval_subst (loc, TREE_OPERAND (arg, 2),
2927 old0, new0, old1, new1));
2928 default:
2929 break;
2930 }
2931 /* Fall through - ??? */
2932
2933 case tcc_comparison:
2934 {
2935 tree arg0 = TREE_OPERAND (arg, 0);
2936 tree arg1 = TREE_OPERAND (arg, 1);
2937
2938 /* We need to check both for exact equality and tree equality. The
2939 former will be true if the operand has a side-effect. In that
2940 case, we know the operand occurred exactly once. */
2941
2942 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2943 arg0 = new0;
2944 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2945 arg0 = new1;
2946
2947 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2948 arg1 = new0;
2949 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2950 arg1 = new1;
2951
2952 return fold_build2_loc (loc, code, type, arg0, arg1);
2953 }
2954
2955 default:
2956 return arg;
2957 }
2958 }
2959 \f
2960 /* Return a tree for the case when the result of an expression is RESULT
2961 converted to TYPE and OMITTED was previously an operand of the expression
2962 but is now not needed (e.g., we folded OMITTED * 0).
2963
2964 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2965 the conversion of RESULT to TYPE. */
2966
2967 tree
2968 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2969 {
2970 tree t = fold_convert_loc (loc, type, result);
2971
2972 /* If the resulting operand is an empty statement, just return the omitted
2973 statement casted to void. */
2974 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2975 {
2976 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
2977 goto omit_one_operand_exit;
2978 }
2979
2980 if (TREE_SIDE_EFFECTS (omitted))
2981 {
2982 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2983 goto omit_one_operand_exit;
2984 }
2985
2986 return non_lvalue_loc (loc, t);
2987
2988 omit_one_operand_exit:
2989 protected_set_expr_location (t, loc);
2990 return t;
2991 }
2992
2993 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2994
2995 static tree
2996 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2997 tree omitted)
2998 {
2999 tree t = fold_convert_loc (loc, type, result);
3000
3001 /* If the resulting operand is an empty statement, just return the omitted
3002 statement casted to void. */
3003 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3004 {
3005 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3006 goto pedantic_omit_one_operand_exit;
3007 }
3008
3009 if (TREE_SIDE_EFFECTS (omitted))
3010 {
3011 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3012 goto pedantic_omit_one_operand_exit;
3013 }
3014
3015 return pedantic_non_lvalue_loc (loc, t);
3016
3017 pedantic_omit_one_operand_exit:
3018 protected_set_expr_location (t, loc);
3019 return t;
3020 }
3021
3022 /* Return a tree for the case when the result of an expression is RESULT
3023 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3024 of the expression but are now not needed.
3025
3026 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3027 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3028 evaluated before OMITTED2. Otherwise, if neither has side effects,
3029 just do the conversion of RESULT to TYPE. */
3030
3031 tree
3032 omit_two_operands_loc (location_t loc, tree type, tree result,
3033 tree omitted1, tree omitted2)
3034 {
3035 tree t = fold_convert_loc (loc, type, result);
3036
3037 if (TREE_SIDE_EFFECTS (omitted2))
3038 {
3039 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3040 SET_EXPR_LOCATION (t, loc);
3041 }
3042 if (TREE_SIDE_EFFECTS (omitted1))
3043 {
3044 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3045 SET_EXPR_LOCATION (t, loc);
3046 }
3047
3048 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3049 }
3050
3051 \f
3052 /* Return a simplified tree node for the truth-negation of ARG. This
3053 never alters ARG itself. We assume that ARG is an operation that
3054 returns a truth value (0 or 1).
3055
3056 FIXME: one would think we would fold the result, but it causes
3057 problems with the dominator optimizer. */
3058
3059 tree
3060 fold_truth_not_expr (location_t loc, tree arg)
3061 {
3062 tree t, type = TREE_TYPE (arg);
3063 enum tree_code code = TREE_CODE (arg);
3064 location_t loc1, loc2;
3065
3066 /* If this is a comparison, we can simply invert it, except for
3067 floating-point non-equality comparisons, in which case we just
3068 enclose a TRUTH_NOT_EXPR around what we have. */
3069
3070 if (TREE_CODE_CLASS (code) == tcc_comparison)
3071 {
3072 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3073 if (FLOAT_TYPE_P (op_type)
3074 && flag_trapping_math
3075 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3076 && code != NE_EXPR && code != EQ_EXPR)
3077 return NULL_TREE;
3078
3079 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3080 if (code == ERROR_MARK)
3081 return NULL_TREE;
3082
3083 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3084 SET_EXPR_LOCATION (t, loc);
3085 return t;
3086 }
3087
3088 switch (code)
3089 {
3090 case INTEGER_CST:
3091 return constant_boolean_node (integer_zerop (arg), type);
3092
3093 case TRUTH_AND_EXPR:
3094 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3095 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3096 if (loc1 == UNKNOWN_LOCATION)
3097 loc1 = loc;
3098 if (loc2 == UNKNOWN_LOCATION)
3099 loc2 = loc;
3100 t = build2 (TRUTH_OR_EXPR, type,
3101 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3102 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3103 break;
3104
3105 case TRUTH_OR_EXPR:
3106 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3107 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3108 if (loc1 == UNKNOWN_LOCATION)
3109 loc1 = loc;
3110 if (loc2 == UNKNOWN_LOCATION)
3111 loc2 = loc;
3112 t = build2 (TRUTH_AND_EXPR, type,
3113 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3114 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3115 break;
3116
3117 case TRUTH_XOR_EXPR:
3118 /* Here we can invert either operand. We invert the first operand
3119 unless the second operand is a TRUTH_NOT_EXPR in which case our
3120 result is the XOR of the first operand with the inside of the
3121 negation of the second operand. */
3122
3123 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3124 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3125 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3126 else
3127 t = build2 (TRUTH_XOR_EXPR, type,
3128 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3129 TREE_OPERAND (arg, 1));
3130 break;
3131
3132 case TRUTH_ANDIF_EXPR:
3133 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3134 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3135 if (loc1 == UNKNOWN_LOCATION)
3136 loc1 = loc;
3137 if (loc2 == UNKNOWN_LOCATION)
3138 loc2 = loc;
3139 t = build2 (TRUTH_ORIF_EXPR, type,
3140 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3141 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3142 break;
3143
3144 case TRUTH_ORIF_EXPR:
3145 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3146 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3147 if (loc1 == UNKNOWN_LOCATION)
3148 loc1 = loc;
3149 if (loc2 == UNKNOWN_LOCATION)
3150 loc2 = loc;
3151 t = build2 (TRUTH_ANDIF_EXPR, type,
3152 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3153 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3154 break;
3155
3156 case TRUTH_NOT_EXPR:
3157 return TREE_OPERAND (arg, 0);
3158
3159 case COND_EXPR:
3160 {
3161 tree arg1 = TREE_OPERAND (arg, 1);
3162 tree arg2 = TREE_OPERAND (arg, 2);
3163
3164 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3165 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3166 if (loc1 == UNKNOWN_LOCATION)
3167 loc1 = loc;
3168 if (loc2 == UNKNOWN_LOCATION)
3169 loc2 = loc;
3170
3171 /* A COND_EXPR may have a throw as one operand, which
3172 then has void type. Just leave void operands
3173 as they are. */
3174 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3175 VOID_TYPE_P (TREE_TYPE (arg1))
3176 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3177 VOID_TYPE_P (TREE_TYPE (arg2))
3178 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3179 break;
3180 }
3181
3182 case COMPOUND_EXPR:
3183 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3184 if (loc1 == UNKNOWN_LOCATION)
3185 loc1 = loc;
3186 t = build2 (COMPOUND_EXPR, type,
3187 TREE_OPERAND (arg, 0),
3188 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3189 break;
3190
3191 case NON_LVALUE_EXPR:
3192 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3193 if (loc1 == UNKNOWN_LOCATION)
3194 loc1 = loc;
3195 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3196
3197 CASE_CONVERT:
3198 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3199 {
3200 t = build1 (TRUTH_NOT_EXPR, type, arg);
3201 break;
3202 }
3203
3204 /* ... fall through ... */
3205
3206 case FLOAT_EXPR:
3207 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3208 if (loc1 == UNKNOWN_LOCATION)
3209 loc1 = loc;
3210 t = build1 (TREE_CODE (arg), type,
3211 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3212 break;
3213
3214 case BIT_AND_EXPR:
3215 if (!integer_onep (TREE_OPERAND (arg, 1)))
3216 return NULL_TREE;
3217 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3218 break;
3219
3220 case SAVE_EXPR:
3221 t = build1 (TRUTH_NOT_EXPR, type, arg);
3222 break;
3223
3224 case CLEANUP_POINT_EXPR:
3225 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3226 if (loc1 == UNKNOWN_LOCATION)
3227 loc1 = loc;
3228 t = build1 (CLEANUP_POINT_EXPR, type,
3229 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3230 break;
3231
3232 default:
3233 t = NULL_TREE;
3234 break;
3235 }
3236
3237 if (t)
3238 SET_EXPR_LOCATION (t, loc);
3239
3240 return t;
3241 }
3242
3243 /* Return a simplified tree node for the truth-negation of ARG. This
3244 never alters ARG itself. We assume that ARG is an operation that
3245 returns a truth value (0 or 1).
3246
3247 FIXME: one would think we would fold the result, but it causes
3248 problems with the dominator optimizer. */
3249
3250 tree
3251 invert_truthvalue_loc (location_t loc, tree arg)
3252 {
3253 tree tem;
3254
3255 if (TREE_CODE (arg) == ERROR_MARK)
3256 return arg;
3257
3258 tem = fold_truth_not_expr (loc, arg);
3259 if (!tem)
3260 {
3261 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3262 SET_EXPR_LOCATION (tem, loc);
3263 }
3264
3265 return tem;
3266 }
3267
3268 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3269 operands are another bit-wise operation with a common input. If so,
3270 distribute the bit operations to save an operation and possibly two if
3271 constants are involved. For example, convert
3272 (A | B) & (A | C) into A | (B & C)
3273 Further simplification will occur if B and C are constants.
3274
3275 If this optimization cannot be done, 0 will be returned. */
3276
3277 static tree
3278 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3279 tree arg0, tree arg1)
3280 {
3281 tree common;
3282 tree left, right;
3283
3284 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3285 || TREE_CODE (arg0) == code
3286 || (TREE_CODE (arg0) != BIT_AND_EXPR
3287 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3288 return 0;
3289
3290 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3291 {
3292 common = TREE_OPERAND (arg0, 0);
3293 left = TREE_OPERAND (arg0, 1);
3294 right = TREE_OPERAND (arg1, 1);
3295 }
3296 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3297 {
3298 common = TREE_OPERAND (arg0, 0);
3299 left = TREE_OPERAND (arg0, 1);
3300 right = TREE_OPERAND (arg1, 0);
3301 }
3302 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3303 {
3304 common = TREE_OPERAND (arg0, 1);
3305 left = TREE_OPERAND (arg0, 0);
3306 right = TREE_OPERAND (arg1, 1);
3307 }
3308 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3309 {
3310 common = TREE_OPERAND (arg0, 1);
3311 left = TREE_OPERAND (arg0, 0);
3312 right = TREE_OPERAND (arg1, 0);
3313 }
3314 else
3315 return 0;
3316
3317 common = fold_convert_loc (loc, type, common);
3318 left = fold_convert_loc (loc, type, left);
3319 right = fold_convert_loc (loc, type, right);
3320 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3321 fold_build2_loc (loc, code, type, left, right));
3322 }
3323
3324 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3325 with code CODE. This optimization is unsafe. */
3326 static tree
3327 distribute_real_division (location_t loc, enum tree_code code, tree type,
3328 tree arg0, tree arg1)
3329 {
3330 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3331 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3332
3333 /* (A / C) +- (B / C) -> (A +- B) / C. */
3334 if (mul0 == mul1
3335 && operand_equal_p (TREE_OPERAND (arg0, 1),
3336 TREE_OPERAND (arg1, 1), 0))
3337 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3338 fold_build2_loc (loc, code, type,
3339 TREE_OPERAND (arg0, 0),
3340 TREE_OPERAND (arg1, 0)),
3341 TREE_OPERAND (arg0, 1));
3342
3343 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3344 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3345 TREE_OPERAND (arg1, 0), 0)
3346 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3347 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3348 {
3349 REAL_VALUE_TYPE r0, r1;
3350 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3351 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3352 if (!mul0)
3353 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3354 if (!mul1)
3355 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3356 real_arithmetic (&r0, code, &r0, &r1);
3357 return fold_build2_loc (loc, MULT_EXPR, type,
3358 TREE_OPERAND (arg0, 0),
3359 build_real (type, r0));
3360 }
3361
3362 return NULL_TREE;
3363 }
3364 \f
3365 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3366 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3367
3368 static tree
3369 make_bit_field_ref (location_t loc, tree inner, tree type,
3370 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3371 {
3372 tree result, bftype;
3373
3374 if (bitpos == 0)
3375 {
3376 tree size = TYPE_SIZE (TREE_TYPE (inner));
3377 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3378 || POINTER_TYPE_P (TREE_TYPE (inner)))
3379 && host_integerp (size, 0)
3380 && tree_low_cst (size, 0) == bitsize)
3381 return fold_convert_loc (loc, type, inner);
3382 }
3383
3384 bftype = type;
3385 if (TYPE_PRECISION (bftype) != bitsize
3386 || TYPE_UNSIGNED (bftype) == !unsignedp)
3387 bftype = build_nonstandard_integer_type (bitsize, 0);
3388
3389 result = build3 (BIT_FIELD_REF, bftype, inner,
3390 size_int (bitsize), bitsize_int (bitpos));
3391 SET_EXPR_LOCATION (result, loc);
3392
3393 if (bftype != type)
3394 result = fold_convert_loc (loc, type, result);
3395
3396 return result;
3397 }
3398
3399 /* Optimize a bit-field compare.
3400
3401 There are two cases: First is a compare against a constant and the
3402 second is a comparison of two items where the fields are at the same
3403 bit position relative to the start of a chunk (byte, halfword, word)
3404 large enough to contain it. In these cases we can avoid the shift
3405 implicit in bitfield extractions.
3406
3407 For constants, we emit a compare of the shifted constant with the
3408 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3409 compared. For two fields at the same position, we do the ANDs with the
3410 similar mask and compare the result of the ANDs.
3411
3412 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3413 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3414 are the left and right operands of the comparison, respectively.
3415
3416 If the optimization described above can be done, we return the resulting
3417 tree. Otherwise we return zero. */
3418
3419 static tree
3420 optimize_bit_field_compare (location_t loc, enum tree_code code,
3421 tree compare_type, tree lhs, tree rhs)
3422 {
3423 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3424 tree type = TREE_TYPE (lhs);
3425 tree signed_type, unsigned_type;
3426 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3427 enum machine_mode lmode, rmode, nmode;
3428 int lunsignedp, runsignedp;
3429 int lvolatilep = 0, rvolatilep = 0;
3430 tree linner, rinner = NULL_TREE;
3431 tree mask;
3432 tree offset;
3433
3434 /* Get all the information about the extractions being done. If the bit size
3435 if the same as the size of the underlying object, we aren't doing an
3436 extraction at all and so can do nothing. We also don't want to
3437 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3438 then will no longer be able to replace it. */
3439 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3440 &lunsignedp, &lvolatilep, false);
3441 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3442 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3443 return 0;
3444
3445 if (!const_p)
3446 {
3447 /* If this is not a constant, we can only do something if bit positions,
3448 sizes, and signedness are the same. */
3449 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3450 &runsignedp, &rvolatilep, false);
3451
3452 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3453 || lunsignedp != runsignedp || offset != 0
3454 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3455 return 0;
3456 }
3457
3458 /* See if we can find a mode to refer to this field. We should be able to,
3459 but fail if we can't. */
3460 if (lvolatilep
3461 && GET_MODE_BITSIZE (lmode) > 0
3462 && flag_strict_volatile_bitfields > 0)
3463 nmode = lmode;
3464 else
3465 nmode = get_best_mode (lbitsize, lbitpos,
3466 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3467 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3468 TYPE_ALIGN (TREE_TYPE (rinner))),
3469 word_mode, lvolatilep || rvolatilep);
3470 if (nmode == VOIDmode)
3471 return 0;
3472
3473 /* Set signed and unsigned types of the precision of this mode for the
3474 shifts below. */
3475 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3476 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3477
3478 /* Compute the bit position and size for the new reference and our offset
3479 within it. If the new reference is the same size as the original, we
3480 won't optimize anything, so return zero. */
3481 nbitsize = GET_MODE_BITSIZE (nmode);
3482 nbitpos = lbitpos & ~ (nbitsize - 1);
3483 lbitpos -= nbitpos;
3484 if (nbitsize == lbitsize)
3485 return 0;
3486
3487 if (BYTES_BIG_ENDIAN)
3488 lbitpos = nbitsize - lbitsize - lbitpos;
3489
3490 /* Make the mask to be used against the extracted field. */
3491 mask = build_int_cst_type (unsigned_type, -1);
3492 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3493 mask = const_binop (RSHIFT_EXPR, mask,
3494 size_int (nbitsize - lbitsize - lbitpos));
3495
3496 if (! const_p)
3497 /* If not comparing with constant, just rework the comparison
3498 and return. */
3499 return fold_build2_loc (loc, code, compare_type,
3500 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3501 make_bit_field_ref (loc, linner,
3502 unsigned_type,
3503 nbitsize, nbitpos,
3504 1),
3505 mask),
3506 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3507 make_bit_field_ref (loc, rinner,
3508 unsigned_type,
3509 nbitsize, nbitpos,
3510 1),
3511 mask));
3512
3513 /* Otherwise, we are handling the constant case. See if the constant is too
3514 big for the field. Warn and return a tree of for 0 (false) if so. We do
3515 this not only for its own sake, but to avoid having to test for this
3516 error case below. If we didn't, we might generate wrong code.
3517
3518 For unsigned fields, the constant shifted right by the field length should
3519 be all zero. For signed fields, the high-order bits should agree with
3520 the sign bit. */
3521
3522 if (lunsignedp)
3523 {
3524 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3525 fold_convert_loc (loc,
3526 unsigned_type, rhs),
3527 size_int (lbitsize))))
3528 {
3529 warning (0, "comparison is always %d due to width of bit-field",
3530 code == NE_EXPR);
3531 return constant_boolean_node (code == NE_EXPR, compare_type);
3532 }
3533 }
3534 else
3535 {
3536 tree tem = const_binop (RSHIFT_EXPR,
3537 fold_convert_loc (loc, signed_type, rhs),
3538 size_int (lbitsize - 1));
3539 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3540 {
3541 warning (0, "comparison is always %d due to width of bit-field",
3542 code == NE_EXPR);
3543 return constant_boolean_node (code == NE_EXPR, compare_type);
3544 }
3545 }
3546
3547 /* Single-bit compares should always be against zero. */
3548 if (lbitsize == 1 && ! integer_zerop (rhs))
3549 {
3550 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3551 rhs = build_int_cst (type, 0);
3552 }
3553
3554 /* Make a new bitfield reference, shift the constant over the
3555 appropriate number of bits and mask it with the computed mask
3556 (in case this was a signed field). If we changed it, make a new one. */
3557 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3558 if (lvolatilep)
3559 {
3560 TREE_SIDE_EFFECTS (lhs) = 1;
3561 TREE_THIS_VOLATILE (lhs) = 1;
3562 }
3563
3564 rhs = const_binop (BIT_AND_EXPR,
3565 const_binop (LSHIFT_EXPR,
3566 fold_convert_loc (loc, unsigned_type, rhs),
3567 size_int (lbitpos)),
3568 mask);
3569
3570 lhs = build2 (code, compare_type,
3571 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3572 rhs);
3573 SET_EXPR_LOCATION (lhs, loc);
3574 return lhs;
3575 }
3576 \f
3577 /* Subroutine for fold_truthop: decode a field reference.
3578
3579 If EXP is a comparison reference, we return the innermost reference.
3580
3581 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3582 set to the starting bit number.
3583
3584 If the innermost field can be completely contained in a mode-sized
3585 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3586
3587 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3588 otherwise it is not changed.
3589
3590 *PUNSIGNEDP is set to the signedness of the field.
3591
3592 *PMASK is set to the mask used. This is either contained in a
3593 BIT_AND_EXPR or derived from the width of the field.
3594
3595 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3596
3597 Return 0 if this is not a component reference or is one that we can't
3598 do anything with. */
3599
3600 static tree
3601 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3602 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3603 int *punsignedp, int *pvolatilep,
3604 tree *pmask, tree *pand_mask)
3605 {
3606 tree outer_type = 0;
3607 tree and_mask = 0;
3608 tree mask, inner, offset;
3609 tree unsigned_type;
3610 unsigned int precision;
3611
3612 /* All the optimizations using this function assume integer fields.
3613 There are problems with FP fields since the type_for_size call
3614 below can fail for, e.g., XFmode. */
3615 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3616 return 0;
3617
3618 /* We are interested in the bare arrangement of bits, so strip everything
3619 that doesn't affect the machine mode. However, record the type of the
3620 outermost expression if it may matter below. */
3621 if (CONVERT_EXPR_P (exp)
3622 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3623 outer_type = TREE_TYPE (exp);
3624 STRIP_NOPS (exp);
3625
3626 if (TREE_CODE (exp) == BIT_AND_EXPR)
3627 {
3628 and_mask = TREE_OPERAND (exp, 1);
3629 exp = TREE_OPERAND (exp, 0);
3630 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3631 if (TREE_CODE (and_mask) != INTEGER_CST)
3632 return 0;
3633 }
3634
3635 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3636 punsignedp, pvolatilep, false);
3637 if ((inner == exp && and_mask == 0)
3638 || *pbitsize < 0 || offset != 0
3639 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3640 return 0;
3641
3642 /* If the number of bits in the reference is the same as the bitsize of
3643 the outer type, then the outer type gives the signedness. Otherwise
3644 (in case of a small bitfield) the signedness is unchanged. */
3645 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3646 *punsignedp = TYPE_UNSIGNED (outer_type);
3647
3648 /* Compute the mask to access the bitfield. */
3649 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3650 precision = TYPE_PRECISION (unsigned_type);
3651
3652 mask = build_int_cst_type (unsigned_type, -1);
3653
3654 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3655 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3656
3657 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3658 if (and_mask != 0)
3659 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3660 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3661
3662 *pmask = mask;
3663 *pand_mask = and_mask;
3664 return inner;
3665 }
3666
3667 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3668 bit positions. */
3669
3670 static int
3671 all_ones_mask_p (const_tree mask, int size)
3672 {
3673 tree type = TREE_TYPE (mask);
3674 unsigned int precision = TYPE_PRECISION (type);
3675 tree tmask;
3676
3677 tmask = build_int_cst_type (signed_type_for (type), -1);
3678
3679 return
3680 tree_int_cst_equal (mask,
3681 const_binop (RSHIFT_EXPR,
3682 const_binop (LSHIFT_EXPR, tmask,
3683 size_int (precision - size)),
3684 size_int (precision - size)));
3685 }
3686
3687 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3688 represents the sign bit of EXP's type. If EXP represents a sign
3689 or zero extension, also test VAL against the unextended type.
3690 The return value is the (sub)expression whose sign bit is VAL,
3691 or NULL_TREE otherwise. */
3692
3693 static tree
3694 sign_bit_p (tree exp, const_tree val)
3695 {
3696 unsigned HOST_WIDE_INT mask_lo, lo;
3697 HOST_WIDE_INT mask_hi, hi;
3698 int width;
3699 tree t;
3700
3701 /* Tree EXP must have an integral type. */
3702 t = TREE_TYPE (exp);
3703 if (! INTEGRAL_TYPE_P (t))
3704 return NULL_TREE;
3705
3706 /* Tree VAL must be an integer constant. */
3707 if (TREE_CODE (val) != INTEGER_CST
3708 || TREE_OVERFLOW (val))
3709 return NULL_TREE;
3710
3711 width = TYPE_PRECISION (t);
3712 if (width > HOST_BITS_PER_WIDE_INT)
3713 {
3714 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3715 lo = 0;
3716
3717 mask_hi = ((unsigned HOST_WIDE_INT) -1
3718 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3719 mask_lo = -1;
3720 }
3721 else
3722 {
3723 hi = 0;
3724 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3725
3726 mask_hi = 0;
3727 mask_lo = ((unsigned HOST_WIDE_INT) -1
3728 >> (HOST_BITS_PER_WIDE_INT - width));
3729 }
3730
3731 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3732 treat VAL as if it were unsigned. */
3733 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3734 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3735 return exp;
3736
3737 /* Handle extension from a narrower type. */
3738 if (TREE_CODE (exp) == NOP_EXPR
3739 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3740 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3741
3742 return NULL_TREE;
3743 }
3744
3745 /* Subroutine for fold_truthop: determine if an operand is simple enough
3746 to be evaluated unconditionally. */
3747
3748 static int
3749 simple_operand_p (const_tree exp)
3750 {
3751 /* Strip any conversions that don't change the machine mode. */
3752 STRIP_NOPS (exp);
3753
3754 return (CONSTANT_CLASS_P (exp)
3755 || TREE_CODE (exp) == SSA_NAME
3756 || (DECL_P (exp)
3757 && ! TREE_ADDRESSABLE (exp)
3758 && ! TREE_THIS_VOLATILE (exp)
3759 && ! DECL_NONLOCAL (exp)
3760 /* Don't regard global variables as simple. They may be
3761 allocated in ways unknown to the compiler (shared memory,
3762 #pragma weak, etc). */
3763 && ! TREE_PUBLIC (exp)
3764 && ! DECL_EXTERNAL (exp)
3765 /* Loading a static variable is unduly expensive, but global
3766 registers aren't expensive. */
3767 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3768 }
3769 \f
3770 /* The following functions are subroutines to fold_range_test and allow it to
3771 try to change a logical combination of comparisons into a range test.
3772
3773 For example, both
3774 X == 2 || X == 3 || X == 4 || X == 5
3775 and
3776 X >= 2 && X <= 5
3777 are converted to
3778 (unsigned) (X - 2) <= 3
3779
3780 We describe each set of comparisons as being either inside or outside
3781 a range, using a variable named like IN_P, and then describe the
3782 range with a lower and upper bound. If one of the bounds is omitted,
3783 it represents either the highest or lowest value of the type.
3784
3785 In the comments below, we represent a range by two numbers in brackets
3786 preceded by a "+" to designate being inside that range, or a "-" to
3787 designate being outside that range, so the condition can be inverted by
3788 flipping the prefix. An omitted bound is represented by a "-". For
3789 example, "- [-, 10]" means being outside the range starting at the lowest
3790 possible value and ending at 10, in other words, being greater than 10.
3791 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3792 always false.
3793
3794 We set up things so that the missing bounds are handled in a consistent
3795 manner so neither a missing bound nor "true" and "false" need to be
3796 handled using a special case. */
3797
3798 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3799 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3800 and UPPER1_P are nonzero if the respective argument is an upper bound
3801 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3802 must be specified for a comparison. ARG1 will be converted to ARG0's
3803 type if both are specified. */
3804
3805 static tree
3806 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3807 tree arg1, int upper1_p)
3808 {
3809 tree tem;
3810 int result;
3811 int sgn0, sgn1;
3812
3813 /* If neither arg represents infinity, do the normal operation.
3814 Else, if not a comparison, return infinity. Else handle the special
3815 comparison rules. Note that most of the cases below won't occur, but
3816 are handled for consistency. */
3817
3818 if (arg0 != 0 && arg1 != 0)
3819 {
3820 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3821 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3822 STRIP_NOPS (tem);
3823 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3824 }
3825
3826 if (TREE_CODE_CLASS (code) != tcc_comparison)
3827 return 0;
3828
3829 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3830 for neither. In real maths, we cannot assume open ended ranges are
3831 the same. But, this is computer arithmetic, where numbers are finite.
3832 We can therefore make the transformation of any unbounded range with
3833 the value Z, Z being greater than any representable number. This permits
3834 us to treat unbounded ranges as equal. */
3835 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3836 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3837 switch (code)
3838 {
3839 case EQ_EXPR:
3840 result = sgn0 == sgn1;
3841 break;
3842 case NE_EXPR:
3843 result = sgn0 != sgn1;
3844 break;
3845 case LT_EXPR:
3846 result = sgn0 < sgn1;
3847 break;
3848 case LE_EXPR:
3849 result = sgn0 <= sgn1;
3850 break;
3851 case GT_EXPR:
3852 result = sgn0 > sgn1;
3853 break;
3854 case GE_EXPR:
3855 result = sgn0 >= sgn1;
3856 break;
3857 default:
3858 gcc_unreachable ();
3859 }
3860
3861 return constant_boolean_node (result, type);
3862 }
3863 \f
3864 /* Given EXP, a logical expression, set the range it is testing into
3865 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3866 actually being tested. *PLOW and *PHIGH will be made of the same
3867 type as the returned expression. If EXP is not a comparison, we
3868 will most likely not be returning a useful value and range. Set
3869 *STRICT_OVERFLOW_P to true if the return value is only valid
3870 because signed overflow is undefined; otherwise, do not change
3871 *STRICT_OVERFLOW_P. */
3872
3873 tree
3874 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3875 bool *strict_overflow_p)
3876 {
3877 enum tree_code code;
3878 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3879 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3880 int in_p, n_in_p;
3881 tree low, high, n_low, n_high;
3882 location_t loc = EXPR_LOCATION (exp);
3883
3884 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3885 and see if we can refine the range. Some of the cases below may not
3886 happen, but it doesn't seem worth worrying about this. We "continue"
3887 the outer loop when we've changed something; otherwise we "break"
3888 the switch, which will "break" the while. */
3889
3890 in_p = 0;
3891 low = high = build_int_cst (TREE_TYPE (exp), 0);
3892
3893 while (1)
3894 {
3895 code = TREE_CODE (exp);
3896 exp_type = TREE_TYPE (exp);
3897
3898 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3899 {
3900 if (TREE_OPERAND_LENGTH (exp) > 0)
3901 arg0 = TREE_OPERAND (exp, 0);
3902 if (TREE_CODE_CLASS (code) == tcc_comparison
3903 || TREE_CODE_CLASS (code) == tcc_unary
3904 || TREE_CODE_CLASS (code) == tcc_binary)
3905 arg0_type = TREE_TYPE (arg0);
3906 if (TREE_CODE_CLASS (code) == tcc_binary
3907 || TREE_CODE_CLASS (code) == tcc_comparison
3908 || (TREE_CODE_CLASS (code) == tcc_expression
3909 && TREE_OPERAND_LENGTH (exp) > 1))
3910 arg1 = TREE_OPERAND (exp, 1);
3911 }
3912
3913 switch (code)
3914 {
3915 case TRUTH_NOT_EXPR:
3916 in_p = ! in_p, exp = arg0;
3917 continue;
3918
3919 case EQ_EXPR: case NE_EXPR:
3920 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3921 /* We can only do something if the range is testing for zero
3922 and if the second operand is an integer constant. Note that
3923 saying something is "in" the range we make is done by
3924 complementing IN_P since it will set in the initial case of
3925 being not equal to zero; "out" is leaving it alone. */
3926 if (low == 0 || high == 0
3927 || ! integer_zerop (low) || ! integer_zerop (high)
3928 || TREE_CODE (arg1) != INTEGER_CST)
3929 break;
3930
3931 switch (code)
3932 {
3933 case NE_EXPR: /* - [c, c] */
3934 low = high = arg1;
3935 break;
3936 case EQ_EXPR: /* + [c, c] */
3937 in_p = ! in_p, low = high = arg1;
3938 break;
3939 case GT_EXPR: /* - [-, c] */
3940 low = 0, high = arg1;
3941 break;
3942 case GE_EXPR: /* + [c, -] */
3943 in_p = ! in_p, low = arg1, high = 0;
3944 break;
3945 case LT_EXPR: /* - [c, -] */
3946 low = arg1, high = 0;
3947 break;
3948 case LE_EXPR: /* + [-, c] */
3949 in_p = ! in_p, low = 0, high = arg1;
3950 break;
3951 default:
3952 gcc_unreachable ();
3953 }
3954
3955 /* If this is an unsigned comparison, we also know that EXP is
3956 greater than or equal to zero. We base the range tests we make
3957 on that fact, so we record it here so we can parse existing
3958 range tests. We test arg0_type since often the return type
3959 of, e.g. EQ_EXPR, is boolean. */
3960 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3961 {
3962 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3963 in_p, low, high, 1,
3964 build_int_cst (arg0_type, 0),
3965 NULL_TREE))
3966 break;
3967
3968 in_p = n_in_p, low = n_low, high = n_high;
3969
3970 /* If the high bound is missing, but we have a nonzero low
3971 bound, reverse the range so it goes from zero to the low bound
3972 minus 1. */
3973 if (high == 0 && low && ! integer_zerop (low))
3974 {
3975 in_p = ! in_p;
3976 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3977 integer_one_node, 0);
3978 low = build_int_cst (arg0_type, 0);
3979 }
3980 }
3981
3982 exp = arg0;
3983 continue;
3984
3985 case NEGATE_EXPR:
3986 /* (-x) IN [a,b] -> x in [-b, -a] */
3987 n_low = range_binop (MINUS_EXPR, exp_type,
3988 build_int_cst (exp_type, 0),
3989 0, high, 1);
3990 n_high = range_binop (MINUS_EXPR, exp_type,
3991 build_int_cst (exp_type, 0),
3992 0, low, 0);
3993 if (n_high != 0 && TREE_OVERFLOW (n_high))
3994 break;
3995 goto normalize;
3996
3997 case BIT_NOT_EXPR:
3998 /* ~ X -> -X - 1 */
3999 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4000 build_int_cst (exp_type, 1));
4001 SET_EXPR_LOCATION (exp, loc);
4002 continue;
4003
4004 case PLUS_EXPR: case MINUS_EXPR:
4005 if (TREE_CODE (arg1) != INTEGER_CST)
4006 break;
4007
4008 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4009 move a constant to the other side. */
4010 if (!TYPE_UNSIGNED (arg0_type)
4011 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4012 break;
4013
4014 /* If EXP is signed, any overflow in the computation is undefined,
4015 so we don't worry about it so long as our computations on
4016 the bounds don't overflow. For unsigned, overflow is defined
4017 and this is exactly the right thing. */
4018 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4019 arg0_type, low, 0, arg1, 0);
4020 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4021 arg0_type, high, 1, arg1, 0);
4022 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4023 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4024 break;
4025
4026 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4027 *strict_overflow_p = true;
4028
4029 normalize:
4030 /* Check for an unsigned range which has wrapped around the maximum
4031 value thus making n_high < n_low, and normalize it. */
4032 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4033 {
4034 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4035 integer_one_node, 0);
4036 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4037 integer_one_node, 0);
4038
4039 /* If the range is of the form +/- [ x+1, x ], we won't
4040 be able to normalize it. But then, it represents the
4041 whole range or the empty set, so make it
4042 +/- [ -, - ]. */
4043 if (tree_int_cst_equal (n_low, low)
4044 && tree_int_cst_equal (n_high, high))
4045 low = high = 0;
4046 else
4047 in_p = ! in_p;
4048 }
4049 else
4050 low = n_low, high = n_high;
4051
4052 exp = arg0;
4053 continue;
4054
4055 CASE_CONVERT: case NON_LVALUE_EXPR:
4056 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4057 break;
4058
4059 if (! INTEGRAL_TYPE_P (arg0_type)
4060 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4061 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4062 break;
4063
4064 n_low = low, n_high = high;
4065
4066 if (n_low != 0)
4067 n_low = fold_convert_loc (loc, arg0_type, n_low);
4068
4069 if (n_high != 0)
4070 n_high = fold_convert_loc (loc, arg0_type, n_high);
4071
4072
4073 /* If we're converting arg0 from an unsigned type, to exp,
4074 a signed type, we will be doing the comparison as unsigned.
4075 The tests above have already verified that LOW and HIGH
4076 are both positive.
4077
4078 So we have to ensure that we will handle large unsigned
4079 values the same way that the current signed bounds treat
4080 negative values. */
4081
4082 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4083 {
4084 tree high_positive;
4085 tree equiv_type;
4086 /* For fixed-point modes, we need to pass the saturating flag
4087 as the 2nd parameter. */
4088 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4089 equiv_type = lang_hooks.types.type_for_mode
4090 (TYPE_MODE (arg0_type),
4091 TYPE_SATURATING (arg0_type));
4092 else
4093 equiv_type = lang_hooks.types.type_for_mode
4094 (TYPE_MODE (arg0_type), 1);
4095
4096 /* A range without an upper bound is, naturally, unbounded.
4097 Since convert would have cropped a very large value, use
4098 the max value for the destination type. */
4099 high_positive
4100 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4101 : TYPE_MAX_VALUE (arg0_type);
4102
4103 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4104 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4105 fold_convert_loc (loc, arg0_type,
4106 high_positive),
4107 build_int_cst (arg0_type, 1));
4108
4109 /* If the low bound is specified, "and" the range with the
4110 range for which the original unsigned value will be
4111 positive. */
4112 if (low != 0)
4113 {
4114 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4115 1, n_low, n_high, 1,
4116 fold_convert_loc (loc, arg0_type,
4117 integer_zero_node),
4118 high_positive))
4119 break;
4120
4121 in_p = (n_in_p == in_p);
4122 }
4123 else
4124 {
4125 /* Otherwise, "or" the range with the range of the input
4126 that will be interpreted as negative. */
4127 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4128 0, n_low, n_high, 1,
4129 fold_convert_loc (loc, arg0_type,
4130 integer_zero_node),
4131 high_positive))
4132 break;
4133
4134 in_p = (in_p != n_in_p);
4135 }
4136 }
4137
4138 exp = arg0;
4139 low = n_low, high = n_high;
4140 continue;
4141
4142 default:
4143 break;
4144 }
4145
4146 break;
4147 }
4148
4149 /* If EXP is a constant, we can evaluate whether this is true or false. */
4150 if (TREE_CODE (exp) == INTEGER_CST)
4151 {
4152 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4153 exp, 0, low, 0))
4154 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4155 exp, 1, high, 1)));
4156 low = high = 0;
4157 exp = 0;
4158 }
4159
4160 *pin_p = in_p, *plow = low, *phigh = high;
4161 return exp;
4162 }
4163 \f
4164 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4165 type, TYPE, return an expression to test if EXP is in (or out of, depending
4166 on IN_P) the range. Return 0 if the test couldn't be created. */
4167
4168 tree
4169 build_range_check (location_t loc, tree type, tree exp, int in_p,
4170 tree low, tree high)
4171 {
4172 tree etype = TREE_TYPE (exp), value;
4173
4174 #ifdef HAVE_canonicalize_funcptr_for_compare
4175 /* Disable this optimization for function pointer expressions
4176 on targets that require function pointer canonicalization. */
4177 if (HAVE_canonicalize_funcptr_for_compare
4178 && TREE_CODE (etype) == POINTER_TYPE
4179 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4180 return NULL_TREE;
4181 #endif
4182
4183 if (! in_p)
4184 {
4185 value = build_range_check (loc, type, exp, 1, low, high);
4186 if (value != 0)
4187 return invert_truthvalue_loc (loc, value);
4188
4189 return 0;
4190 }
4191
4192 if (low == 0 && high == 0)
4193 return build_int_cst (type, 1);
4194
4195 if (low == 0)
4196 return fold_build2_loc (loc, LE_EXPR, type, exp,
4197 fold_convert_loc (loc, etype, high));
4198
4199 if (high == 0)
4200 return fold_build2_loc (loc, GE_EXPR, type, exp,
4201 fold_convert_loc (loc, etype, low));
4202
4203 if (operand_equal_p (low, high, 0))
4204 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4205 fold_convert_loc (loc, etype, low));
4206
4207 if (integer_zerop (low))
4208 {
4209 if (! TYPE_UNSIGNED (etype))
4210 {
4211 etype = unsigned_type_for (etype);
4212 high = fold_convert_loc (loc, etype, high);
4213 exp = fold_convert_loc (loc, etype, exp);
4214 }
4215 return build_range_check (loc, type, exp, 1, 0, high);
4216 }
4217
4218 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4219 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4220 {
4221 unsigned HOST_WIDE_INT lo;
4222 HOST_WIDE_INT hi;
4223 int prec;
4224
4225 prec = TYPE_PRECISION (etype);
4226 if (prec <= HOST_BITS_PER_WIDE_INT)
4227 {
4228 hi = 0;
4229 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4230 }
4231 else
4232 {
4233 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4234 lo = (unsigned HOST_WIDE_INT) -1;
4235 }
4236
4237 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4238 {
4239 if (TYPE_UNSIGNED (etype))
4240 {
4241 tree signed_etype = signed_type_for (etype);
4242 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4243 etype
4244 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4245 else
4246 etype = signed_etype;
4247 exp = fold_convert_loc (loc, etype, exp);
4248 }
4249 return fold_build2_loc (loc, GT_EXPR, type, exp,
4250 build_int_cst (etype, 0));
4251 }
4252 }
4253
4254 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4255 This requires wrap-around arithmetics for the type of the expression.
4256 First make sure that arithmetics in this type is valid, then make sure
4257 that it wraps around. */
4258 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4259 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4260 TYPE_UNSIGNED (etype));
4261
4262 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4263 {
4264 tree utype, minv, maxv;
4265
4266 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4267 for the type in question, as we rely on this here. */
4268 utype = unsigned_type_for (etype);
4269 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4270 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4271 integer_one_node, 1);
4272 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4273
4274 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4275 minv, 1, maxv, 1)))
4276 etype = utype;
4277 else
4278 return 0;
4279 }
4280
4281 high = fold_convert_loc (loc, etype, high);
4282 low = fold_convert_loc (loc, etype, low);
4283 exp = fold_convert_loc (loc, etype, exp);
4284
4285 value = const_binop (MINUS_EXPR, high, low);
4286
4287
4288 if (POINTER_TYPE_P (etype))
4289 {
4290 if (value != 0 && !TREE_OVERFLOW (value))
4291 {
4292 low = fold_convert_loc (loc, sizetype, low);
4293 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
4294 return build_range_check (loc, type,
4295 fold_build2_loc (loc, POINTER_PLUS_EXPR,
4296 etype, exp, low),
4297 1, build_int_cst (etype, 0), value);
4298 }
4299 return 0;
4300 }
4301
4302 if (value != 0 && !TREE_OVERFLOW (value))
4303 return build_range_check (loc, type,
4304 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4305 1, build_int_cst (etype, 0), value);
4306
4307 return 0;
4308 }
4309 \f
4310 /* Return the predecessor of VAL in its type, handling the infinite case. */
4311
4312 static tree
4313 range_predecessor (tree val)
4314 {
4315 tree type = TREE_TYPE (val);
4316
4317 if (INTEGRAL_TYPE_P (type)
4318 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4319 return 0;
4320 else
4321 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4322 }
4323
4324 /* Return the successor of VAL in its type, handling the infinite case. */
4325
4326 static tree
4327 range_successor (tree val)
4328 {
4329 tree type = TREE_TYPE (val);
4330
4331 if (INTEGRAL_TYPE_P (type)
4332 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4333 return 0;
4334 else
4335 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4336 }
4337
4338 /* Given two ranges, see if we can merge them into one. Return 1 if we
4339 can, 0 if we can't. Set the output range into the specified parameters. */
4340
4341 bool
4342 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4343 tree high0, int in1_p, tree low1, tree high1)
4344 {
4345 int no_overlap;
4346 int subset;
4347 int temp;
4348 tree tem;
4349 int in_p;
4350 tree low, high;
4351 int lowequal = ((low0 == 0 && low1 == 0)
4352 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4353 low0, 0, low1, 0)));
4354 int highequal = ((high0 == 0 && high1 == 0)
4355 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4356 high0, 1, high1, 1)));
4357
4358 /* Make range 0 be the range that starts first, or ends last if they
4359 start at the same value. Swap them if it isn't. */
4360 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4361 low0, 0, low1, 0))
4362 || (lowequal
4363 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4364 high1, 1, high0, 1))))
4365 {
4366 temp = in0_p, in0_p = in1_p, in1_p = temp;
4367 tem = low0, low0 = low1, low1 = tem;
4368 tem = high0, high0 = high1, high1 = tem;
4369 }
4370
4371 /* Now flag two cases, whether the ranges are disjoint or whether the
4372 second range is totally subsumed in the first. Note that the tests
4373 below are simplified by the ones above. */
4374 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4375 high0, 1, low1, 0));
4376 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4377 high1, 1, high0, 1));
4378
4379 /* We now have four cases, depending on whether we are including or
4380 excluding the two ranges. */
4381 if (in0_p && in1_p)
4382 {
4383 /* If they don't overlap, the result is false. If the second range
4384 is a subset it is the result. Otherwise, the range is from the start
4385 of the second to the end of the first. */
4386 if (no_overlap)
4387 in_p = 0, low = high = 0;
4388 else if (subset)
4389 in_p = 1, low = low1, high = high1;
4390 else
4391 in_p = 1, low = low1, high = high0;
4392 }
4393
4394 else if (in0_p && ! in1_p)
4395 {
4396 /* If they don't overlap, the result is the first range. If they are
4397 equal, the result is false. If the second range is a subset of the
4398 first, and the ranges begin at the same place, we go from just after
4399 the end of the second range to the end of the first. If the second
4400 range is not a subset of the first, or if it is a subset and both
4401 ranges end at the same place, the range starts at the start of the
4402 first range and ends just before the second range.
4403 Otherwise, we can't describe this as a single range. */
4404 if (no_overlap)
4405 in_p = 1, low = low0, high = high0;
4406 else if (lowequal && highequal)
4407 in_p = 0, low = high = 0;
4408 else if (subset && lowequal)
4409 {
4410 low = range_successor (high1);
4411 high = high0;
4412 in_p = 1;
4413 if (low == 0)
4414 {
4415 /* We are in the weird situation where high0 > high1 but
4416 high1 has no successor. Punt. */
4417 return 0;
4418 }
4419 }
4420 else if (! subset || highequal)
4421 {
4422 low = low0;
4423 high = range_predecessor (low1);
4424 in_p = 1;
4425 if (high == 0)
4426 {
4427 /* low0 < low1 but low1 has no predecessor. Punt. */
4428 return 0;
4429 }
4430 }
4431 else
4432 return 0;
4433 }
4434
4435 else if (! in0_p && in1_p)
4436 {
4437 /* If they don't overlap, the result is the second range. If the second
4438 is a subset of the first, the result is false. Otherwise,
4439 the range starts just after the first range and ends at the
4440 end of the second. */
4441 if (no_overlap)
4442 in_p = 1, low = low1, high = high1;
4443 else if (subset || highequal)
4444 in_p = 0, low = high = 0;
4445 else
4446 {
4447 low = range_successor (high0);
4448 high = high1;
4449 in_p = 1;
4450 if (low == 0)
4451 {
4452 /* high1 > high0 but high0 has no successor. Punt. */
4453 return 0;
4454 }
4455 }
4456 }
4457
4458 else
4459 {
4460 /* The case where we are excluding both ranges. Here the complex case
4461 is if they don't overlap. In that case, the only time we have a
4462 range is if they are adjacent. If the second is a subset of the
4463 first, the result is the first. Otherwise, the range to exclude
4464 starts at the beginning of the first range and ends at the end of the
4465 second. */
4466 if (no_overlap)
4467 {
4468 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4469 range_successor (high0),
4470 1, low1, 0)))
4471 in_p = 0, low = low0, high = high1;
4472 else
4473 {
4474 /* Canonicalize - [min, x] into - [-, x]. */
4475 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4476 switch (TREE_CODE (TREE_TYPE (low0)))
4477 {
4478 case ENUMERAL_TYPE:
4479 if (TYPE_PRECISION (TREE_TYPE (low0))
4480 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4481 break;
4482 /* FALLTHROUGH */
4483 case INTEGER_TYPE:
4484 if (tree_int_cst_equal (low0,
4485 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4486 low0 = 0;
4487 break;
4488 case POINTER_TYPE:
4489 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4490 && integer_zerop (low0))
4491 low0 = 0;
4492 break;
4493 default:
4494 break;
4495 }
4496
4497 /* Canonicalize - [x, max] into - [x, -]. */
4498 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4499 switch (TREE_CODE (TREE_TYPE (high1)))
4500 {
4501 case ENUMERAL_TYPE:
4502 if (TYPE_PRECISION (TREE_TYPE (high1))
4503 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4504 break;
4505 /* FALLTHROUGH */
4506 case INTEGER_TYPE:
4507 if (tree_int_cst_equal (high1,
4508 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4509 high1 = 0;
4510 break;
4511 case POINTER_TYPE:
4512 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4513 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4514 high1, 1,
4515 integer_one_node, 1)))
4516 high1 = 0;
4517 break;
4518 default:
4519 break;
4520 }
4521
4522 /* The ranges might be also adjacent between the maximum and
4523 minimum values of the given type. For
4524 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4525 return + [x + 1, y - 1]. */
4526 if (low0 == 0 && high1 == 0)
4527 {
4528 low = range_successor (high0);
4529 high = range_predecessor (low1);
4530 if (low == 0 || high == 0)
4531 return 0;
4532
4533 in_p = 1;
4534 }
4535 else
4536 return 0;
4537 }
4538 }
4539 else if (subset)
4540 in_p = 0, low = low0, high = high0;
4541 else
4542 in_p = 0, low = low0, high = high1;
4543 }
4544
4545 *pin_p = in_p, *plow = low, *phigh = high;
4546 return 1;
4547 }
4548 \f
4549
4550 /* Subroutine of fold, looking inside expressions of the form
4551 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4552 of the COND_EXPR. This function is being used also to optimize
4553 A op B ? C : A, by reversing the comparison first.
4554
4555 Return a folded expression whose code is not a COND_EXPR
4556 anymore, or NULL_TREE if no folding opportunity is found. */
4557
4558 static tree
4559 fold_cond_expr_with_comparison (location_t loc, tree type,
4560 tree arg0, tree arg1, tree arg2)
4561 {
4562 enum tree_code comp_code = TREE_CODE (arg0);
4563 tree arg00 = TREE_OPERAND (arg0, 0);
4564 tree arg01 = TREE_OPERAND (arg0, 1);
4565 tree arg1_type = TREE_TYPE (arg1);
4566 tree tem;
4567
4568 STRIP_NOPS (arg1);
4569 STRIP_NOPS (arg2);
4570
4571 /* If we have A op 0 ? A : -A, consider applying the following
4572 transformations:
4573
4574 A == 0? A : -A same as -A
4575 A != 0? A : -A same as A
4576 A >= 0? A : -A same as abs (A)
4577 A > 0? A : -A same as abs (A)
4578 A <= 0? A : -A same as -abs (A)
4579 A < 0? A : -A same as -abs (A)
4580
4581 None of these transformations work for modes with signed
4582 zeros. If A is +/-0, the first two transformations will
4583 change the sign of the result (from +0 to -0, or vice
4584 versa). The last four will fix the sign of the result,
4585 even though the original expressions could be positive or
4586 negative, depending on the sign of A.
4587
4588 Note that all these transformations are correct if A is
4589 NaN, since the two alternatives (A and -A) are also NaNs. */
4590 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4591 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4592 ? real_zerop (arg01)
4593 : integer_zerop (arg01))
4594 && ((TREE_CODE (arg2) == NEGATE_EXPR
4595 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4596 /* In the case that A is of the form X-Y, '-A' (arg2) may
4597 have already been folded to Y-X, check for that. */
4598 || (TREE_CODE (arg1) == MINUS_EXPR
4599 && TREE_CODE (arg2) == MINUS_EXPR
4600 && operand_equal_p (TREE_OPERAND (arg1, 0),
4601 TREE_OPERAND (arg2, 1), 0)
4602 && operand_equal_p (TREE_OPERAND (arg1, 1),
4603 TREE_OPERAND (arg2, 0), 0))))
4604 switch (comp_code)
4605 {
4606 case EQ_EXPR:
4607 case UNEQ_EXPR:
4608 tem = fold_convert_loc (loc, arg1_type, arg1);
4609 return pedantic_non_lvalue_loc (loc,
4610 fold_convert_loc (loc, type,
4611 negate_expr (tem)));
4612 case NE_EXPR:
4613 case LTGT_EXPR:
4614 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4615 case UNGE_EXPR:
4616 case UNGT_EXPR:
4617 if (flag_trapping_math)
4618 break;
4619 /* Fall through. */
4620 case GE_EXPR:
4621 case GT_EXPR:
4622 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4623 arg1 = fold_convert_loc (loc, signed_type_for
4624 (TREE_TYPE (arg1)), arg1);
4625 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4626 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4627 case UNLE_EXPR:
4628 case UNLT_EXPR:
4629 if (flag_trapping_math)
4630 break;
4631 case LE_EXPR:
4632 case LT_EXPR:
4633 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4634 arg1 = fold_convert_loc (loc, signed_type_for
4635 (TREE_TYPE (arg1)), arg1);
4636 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4637 return negate_expr (fold_convert_loc (loc, type, tem));
4638 default:
4639 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4640 break;
4641 }
4642
4643 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4644 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4645 both transformations are correct when A is NaN: A != 0
4646 is then true, and A == 0 is false. */
4647
4648 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4649 && integer_zerop (arg01) && integer_zerop (arg2))
4650 {
4651 if (comp_code == NE_EXPR)
4652 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4653 else if (comp_code == EQ_EXPR)
4654 return build_int_cst (type, 0);
4655 }
4656
4657 /* Try some transformations of A op B ? A : B.
4658
4659 A == B? A : B same as B
4660 A != B? A : B same as A
4661 A >= B? A : B same as max (A, B)
4662 A > B? A : B same as max (B, A)
4663 A <= B? A : B same as min (A, B)
4664 A < B? A : B same as min (B, A)
4665
4666 As above, these transformations don't work in the presence
4667 of signed zeros. For example, if A and B are zeros of
4668 opposite sign, the first two transformations will change
4669 the sign of the result. In the last four, the original
4670 expressions give different results for (A=+0, B=-0) and
4671 (A=-0, B=+0), but the transformed expressions do not.
4672
4673 The first two transformations are correct if either A or B
4674 is a NaN. In the first transformation, the condition will
4675 be false, and B will indeed be chosen. In the case of the
4676 second transformation, the condition A != B will be true,
4677 and A will be chosen.
4678
4679 The conversions to max() and min() are not correct if B is
4680 a number and A is not. The conditions in the original
4681 expressions will be false, so all four give B. The min()
4682 and max() versions would give a NaN instead. */
4683 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4684 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4685 /* Avoid these transformations if the COND_EXPR may be used
4686 as an lvalue in the C++ front-end. PR c++/19199. */
4687 && (in_gimple_form
4688 || (strcmp (lang_hooks.name, "GNU C++") != 0
4689 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4690 || ! maybe_lvalue_p (arg1)
4691 || ! maybe_lvalue_p (arg2)))
4692 {
4693 tree comp_op0 = arg00;
4694 tree comp_op1 = arg01;
4695 tree comp_type = TREE_TYPE (comp_op0);
4696
4697 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4698 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4699 {
4700 comp_type = type;
4701 comp_op0 = arg1;
4702 comp_op1 = arg2;
4703 }
4704
4705 switch (comp_code)
4706 {
4707 case EQ_EXPR:
4708 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4709 case NE_EXPR:
4710 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4711 case LE_EXPR:
4712 case LT_EXPR:
4713 case UNLE_EXPR:
4714 case UNLT_EXPR:
4715 /* In C++ a ?: expression can be an lvalue, so put the
4716 operand which will be used if they are equal first
4717 so that we can convert this back to the
4718 corresponding COND_EXPR. */
4719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4720 {
4721 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4722 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4723 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4724 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4725 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4726 comp_op1, comp_op0);
4727 return pedantic_non_lvalue_loc (loc,
4728 fold_convert_loc (loc, type, tem));
4729 }
4730 break;
4731 case GE_EXPR:
4732 case GT_EXPR:
4733 case UNGE_EXPR:
4734 case UNGT_EXPR:
4735 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4736 {
4737 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4738 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4739 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4740 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4741 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4742 comp_op1, comp_op0);
4743 return pedantic_non_lvalue_loc (loc,
4744 fold_convert_loc (loc, type, tem));
4745 }
4746 break;
4747 case UNEQ_EXPR:
4748 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4749 return pedantic_non_lvalue_loc (loc,
4750 fold_convert_loc (loc, type, arg2));
4751 break;
4752 case LTGT_EXPR:
4753 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4754 return pedantic_non_lvalue_loc (loc,
4755 fold_convert_loc (loc, type, arg1));
4756 break;
4757 default:
4758 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4759 break;
4760 }
4761 }
4762
4763 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4764 we might still be able to simplify this. For example,
4765 if C1 is one less or one more than C2, this might have started
4766 out as a MIN or MAX and been transformed by this function.
4767 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4768
4769 if (INTEGRAL_TYPE_P (type)
4770 && TREE_CODE (arg01) == INTEGER_CST
4771 && TREE_CODE (arg2) == INTEGER_CST)
4772 switch (comp_code)
4773 {
4774 case EQ_EXPR:
4775 if (TREE_CODE (arg1) == INTEGER_CST)
4776 break;
4777 /* We can replace A with C1 in this case. */
4778 arg1 = fold_convert_loc (loc, type, arg01);
4779 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4780
4781 case LT_EXPR:
4782 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4783 MIN_EXPR, to preserve the signedness of the comparison. */
4784 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4785 OEP_ONLY_CONST)
4786 && operand_equal_p (arg01,
4787 const_binop (PLUS_EXPR, arg2,
4788 build_int_cst (type, 1)),
4789 OEP_ONLY_CONST))
4790 {
4791 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4792 fold_convert_loc (loc, TREE_TYPE (arg00),
4793 arg2));
4794 return pedantic_non_lvalue_loc (loc,
4795 fold_convert_loc (loc, type, tem));
4796 }
4797 break;
4798
4799 case LE_EXPR:
4800 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4801 as above. */
4802 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4803 OEP_ONLY_CONST)
4804 && operand_equal_p (arg01,
4805 const_binop (MINUS_EXPR, arg2,
4806 build_int_cst (type, 1)),
4807 OEP_ONLY_CONST))
4808 {
4809 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4810 fold_convert_loc (loc, TREE_TYPE (arg00),
4811 arg2));
4812 return pedantic_non_lvalue_loc (loc,
4813 fold_convert_loc (loc, type, tem));
4814 }
4815 break;
4816
4817 case GT_EXPR:
4818 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4819 MAX_EXPR, to preserve the signedness of the comparison. */
4820 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4821 OEP_ONLY_CONST)
4822 && operand_equal_p (arg01,
4823 const_binop (MINUS_EXPR, arg2,
4824 build_int_cst (type, 1)),
4825 OEP_ONLY_CONST))
4826 {
4827 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4828 fold_convert_loc (loc, TREE_TYPE (arg00),
4829 arg2));
4830 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4831 }
4832 break;
4833
4834 case GE_EXPR:
4835 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4836 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4837 OEP_ONLY_CONST)
4838 && operand_equal_p (arg01,
4839 const_binop (PLUS_EXPR, arg2,
4840 build_int_cst (type, 1)),
4841 OEP_ONLY_CONST))
4842 {
4843 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4844 fold_convert_loc (loc, TREE_TYPE (arg00),
4845 arg2));
4846 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4847 }
4848 break;
4849 case NE_EXPR:
4850 break;
4851 default:
4852 gcc_unreachable ();
4853 }
4854
4855 return NULL_TREE;
4856 }
4857
4858
4859 \f
4860 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4861 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4862 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4863 false) >= 2)
4864 #endif
4865
4866 /* EXP is some logical combination of boolean tests. See if we can
4867 merge it into some range test. Return the new tree if so. */
4868
4869 static tree
4870 fold_range_test (location_t loc, enum tree_code code, tree type,
4871 tree op0, tree op1)
4872 {
4873 int or_op = (code == TRUTH_ORIF_EXPR
4874 || code == TRUTH_OR_EXPR);
4875 int in0_p, in1_p, in_p;
4876 tree low0, low1, low, high0, high1, high;
4877 bool strict_overflow_p = false;
4878 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4879 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4880 tree tem;
4881 const char * const warnmsg = G_("assuming signed overflow does not occur "
4882 "when simplifying range test");
4883
4884 /* If this is an OR operation, invert both sides; we will invert
4885 again at the end. */
4886 if (or_op)
4887 in0_p = ! in0_p, in1_p = ! in1_p;
4888
4889 /* If both expressions are the same, if we can merge the ranges, and we
4890 can build the range test, return it or it inverted. If one of the
4891 ranges is always true or always false, consider it to be the same
4892 expression as the other. */
4893 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4894 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4895 in1_p, low1, high1)
4896 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
4897 lhs != 0 ? lhs
4898 : rhs != 0 ? rhs : integer_zero_node,
4899 in_p, low, high))))
4900 {
4901 if (strict_overflow_p)
4902 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4903 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4904 }
4905
4906 /* On machines where the branch cost is expensive, if this is a
4907 short-circuited branch and the underlying object on both sides
4908 is the same, make a non-short-circuit operation. */
4909 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4910 && lhs != 0 && rhs != 0
4911 && (code == TRUTH_ANDIF_EXPR
4912 || code == TRUTH_ORIF_EXPR)
4913 && operand_equal_p (lhs, rhs, 0))
4914 {
4915 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4916 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4917 which cases we can't do this. */
4918 if (simple_operand_p (lhs))
4919 {
4920 tem = build2 (code == TRUTH_ANDIF_EXPR
4921 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4922 type, op0, op1);
4923 SET_EXPR_LOCATION (tem, loc);
4924 return tem;
4925 }
4926
4927 else if (lang_hooks.decls.global_bindings_p () == 0
4928 && ! CONTAINS_PLACEHOLDER_P (lhs))
4929 {
4930 tree common = save_expr (lhs);
4931
4932 if (0 != (lhs = build_range_check (loc, type, common,
4933 or_op ? ! in0_p : in0_p,
4934 low0, high0))
4935 && (0 != (rhs = build_range_check (loc, type, common,
4936 or_op ? ! in1_p : in1_p,
4937 low1, high1))))
4938 {
4939 if (strict_overflow_p)
4940 fold_overflow_warning (warnmsg,
4941 WARN_STRICT_OVERFLOW_COMPARISON);
4942 tem = build2 (code == TRUTH_ANDIF_EXPR
4943 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4944 type, lhs, rhs);
4945 SET_EXPR_LOCATION (tem, loc);
4946 return tem;
4947 }
4948 }
4949 }
4950
4951 return 0;
4952 }
4953 \f
4954 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4955 bit value. Arrange things so the extra bits will be set to zero if and
4956 only if C is signed-extended to its full width. If MASK is nonzero,
4957 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4958
4959 static tree
4960 unextend (tree c, int p, int unsignedp, tree mask)
4961 {
4962 tree type = TREE_TYPE (c);
4963 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4964 tree temp;
4965
4966 if (p == modesize || unsignedp)
4967 return c;
4968
4969 /* We work by getting just the sign bit into the low-order bit, then
4970 into the high-order bit, then sign-extend. We then XOR that value
4971 with C. */
4972 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4973 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4974
4975 /* We must use a signed type in order to get an arithmetic right shift.
4976 However, we must also avoid introducing accidental overflows, so that
4977 a subsequent call to integer_zerop will work. Hence we must
4978 do the type conversion here. At this point, the constant is either
4979 zero or one, and the conversion to a signed type can never overflow.
4980 We could get an overflow if this conversion is done anywhere else. */
4981 if (TYPE_UNSIGNED (type))
4982 temp = fold_convert (signed_type_for (type), temp);
4983
4984 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4985 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4986 if (mask != 0)
4987 temp = const_binop (BIT_AND_EXPR, temp,
4988 fold_convert (TREE_TYPE (c), mask));
4989 /* If necessary, convert the type back to match the type of C. */
4990 if (TYPE_UNSIGNED (type))
4991 temp = fold_convert (type, temp);
4992
4993 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4994 }
4995 \f
4996 /* For an expression that has the form
4997 (A && B) || ~B
4998 or
4999 (A || B) && ~B,
5000 we can drop one of the inner expressions and simplify to
5001 A || ~B
5002 or
5003 A && ~B
5004 LOC is the location of the resulting expression. OP is the inner
5005 logical operation; the left-hand side in the examples above, while CMPOP
5006 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5007 removing a condition that guards another, as in
5008 (A != NULL && A->...) || A == NULL
5009 which we must not transform. If RHS_ONLY is true, only eliminate the
5010 right-most operand of the inner logical operation. */
5011
5012 static tree
5013 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5014 bool rhs_only)
5015 {
5016 tree type = TREE_TYPE (cmpop);
5017 enum tree_code code = TREE_CODE (cmpop);
5018 enum tree_code truthop_code = TREE_CODE (op);
5019 tree lhs = TREE_OPERAND (op, 0);
5020 tree rhs = TREE_OPERAND (op, 1);
5021 tree orig_lhs = lhs, orig_rhs = rhs;
5022 enum tree_code rhs_code = TREE_CODE (rhs);
5023 enum tree_code lhs_code = TREE_CODE (lhs);
5024 enum tree_code inv_code;
5025
5026 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5027 return NULL_TREE;
5028
5029 if (TREE_CODE_CLASS (code) != tcc_comparison)
5030 return NULL_TREE;
5031
5032 if (rhs_code == truthop_code)
5033 {
5034 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5035 if (newrhs != NULL_TREE)
5036 {
5037 rhs = newrhs;
5038 rhs_code = TREE_CODE (rhs);
5039 }
5040 }
5041 if (lhs_code == truthop_code && !rhs_only)
5042 {
5043 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5044 if (newlhs != NULL_TREE)
5045 {
5046 lhs = newlhs;
5047 lhs_code = TREE_CODE (lhs);
5048 }
5049 }
5050
5051 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5052 if (inv_code == rhs_code
5053 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5054 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5055 return lhs;
5056 if (!rhs_only && inv_code == lhs_code
5057 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5058 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5059 return rhs;
5060 if (rhs != orig_rhs || lhs != orig_lhs)
5061 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5062 lhs, rhs);
5063 return NULL_TREE;
5064 }
5065
5066 /* Find ways of folding logical expressions of LHS and RHS:
5067 Try to merge two comparisons to the same innermost item.
5068 Look for range tests like "ch >= '0' && ch <= '9'".
5069 Look for combinations of simple terms on machines with expensive branches
5070 and evaluate the RHS unconditionally.
5071
5072 For example, if we have p->a == 2 && p->b == 4 and we can make an
5073 object large enough to span both A and B, we can do this with a comparison
5074 against the object ANDed with the a mask.
5075
5076 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5077 operations to do this with one comparison.
5078
5079 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5080 function and the one above.
5081
5082 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5083 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5084
5085 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5086 two operands.
5087
5088 We return the simplified tree or 0 if no optimization is possible. */
5089
5090 static tree
5091 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5092 tree lhs, tree rhs)
5093 {
5094 /* If this is the "or" of two comparisons, we can do something if
5095 the comparisons are NE_EXPR. If this is the "and", we can do something
5096 if the comparisons are EQ_EXPR. I.e.,
5097 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5098
5099 WANTED_CODE is this operation code. For single bit fields, we can
5100 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5101 comparison for one-bit fields. */
5102
5103 enum tree_code wanted_code;
5104 enum tree_code lcode, rcode;
5105 tree ll_arg, lr_arg, rl_arg, rr_arg;
5106 tree ll_inner, lr_inner, rl_inner, rr_inner;
5107 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5108 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5109 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5110 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5111 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5112 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5113 enum machine_mode lnmode, rnmode;
5114 tree ll_mask, lr_mask, rl_mask, rr_mask;
5115 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5116 tree l_const, r_const;
5117 tree lntype, rntype, result;
5118 HOST_WIDE_INT first_bit, end_bit;
5119 int volatilep;
5120 tree orig_lhs = lhs, orig_rhs = rhs;
5121 enum tree_code orig_code = code;
5122
5123 /* Start by getting the comparison codes. Fail if anything is volatile.
5124 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5125 it were surrounded with a NE_EXPR. */
5126
5127 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5128 return 0;
5129
5130 lcode = TREE_CODE (lhs);
5131 rcode = TREE_CODE (rhs);
5132
5133 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5134 {
5135 lhs = build2 (NE_EXPR, truth_type, lhs,
5136 build_int_cst (TREE_TYPE (lhs), 0));
5137 lcode = NE_EXPR;
5138 }
5139
5140 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5141 {
5142 rhs = build2 (NE_EXPR, truth_type, rhs,
5143 build_int_cst (TREE_TYPE (rhs), 0));
5144 rcode = NE_EXPR;
5145 }
5146
5147 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5148 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5149 return 0;
5150
5151 ll_arg = TREE_OPERAND (lhs, 0);
5152 lr_arg = TREE_OPERAND (lhs, 1);
5153 rl_arg = TREE_OPERAND (rhs, 0);
5154 rr_arg = TREE_OPERAND (rhs, 1);
5155
5156 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5157 if (simple_operand_p (ll_arg)
5158 && simple_operand_p (lr_arg))
5159 {
5160 tree result;
5161 if (operand_equal_p (ll_arg, rl_arg, 0)
5162 && operand_equal_p (lr_arg, rr_arg, 0))
5163 {
5164 result = combine_comparisons (loc, code, lcode, rcode,
5165 truth_type, ll_arg, lr_arg);
5166 if (result)
5167 return result;
5168 }
5169 else if (operand_equal_p (ll_arg, rr_arg, 0)
5170 && operand_equal_p (lr_arg, rl_arg, 0))
5171 {
5172 result = combine_comparisons (loc, code, lcode,
5173 swap_tree_comparison (rcode),
5174 truth_type, ll_arg, lr_arg);
5175 if (result)
5176 return result;
5177 }
5178 }
5179
5180 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5181 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5182
5183 /* If the RHS can be evaluated unconditionally and its operands are
5184 simple, it wins to evaluate the RHS unconditionally on machines
5185 with expensive branches. In this case, this isn't a comparison
5186 that can be merged. Avoid doing this if the RHS is a floating-point
5187 comparison since those can trap. */
5188
5189 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5190 false) >= 2
5191 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5192 && simple_operand_p (rl_arg)
5193 && simple_operand_p (rr_arg))
5194 {
5195 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5196 if (code == TRUTH_OR_EXPR
5197 && lcode == NE_EXPR && integer_zerop (lr_arg)
5198 && rcode == NE_EXPR && integer_zerop (rr_arg)
5199 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5200 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5201 {
5202 result = build2 (NE_EXPR, truth_type,
5203 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5204 ll_arg, rl_arg),
5205 build_int_cst (TREE_TYPE (ll_arg), 0));
5206 goto fold_truthop_exit;
5207 }
5208
5209 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5210 if (code == TRUTH_AND_EXPR
5211 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5212 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5213 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5214 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5215 {
5216 result = build2 (EQ_EXPR, truth_type,
5217 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5218 ll_arg, rl_arg),
5219 build_int_cst (TREE_TYPE (ll_arg), 0));
5220 goto fold_truthop_exit;
5221 }
5222
5223 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5224 {
5225 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5226 {
5227 result = build2 (code, truth_type, lhs, rhs);
5228 goto fold_truthop_exit;
5229 }
5230 return NULL_TREE;
5231 }
5232 }
5233
5234 /* See if the comparisons can be merged. Then get all the parameters for
5235 each side. */
5236
5237 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5238 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5239 return 0;
5240
5241 volatilep = 0;
5242 ll_inner = decode_field_reference (loc, ll_arg,
5243 &ll_bitsize, &ll_bitpos, &ll_mode,
5244 &ll_unsignedp, &volatilep, &ll_mask,
5245 &ll_and_mask);
5246 lr_inner = decode_field_reference (loc, lr_arg,
5247 &lr_bitsize, &lr_bitpos, &lr_mode,
5248 &lr_unsignedp, &volatilep, &lr_mask,
5249 &lr_and_mask);
5250 rl_inner = decode_field_reference (loc, rl_arg,
5251 &rl_bitsize, &rl_bitpos, &rl_mode,
5252 &rl_unsignedp, &volatilep, &rl_mask,
5253 &rl_and_mask);
5254 rr_inner = decode_field_reference (loc, rr_arg,
5255 &rr_bitsize, &rr_bitpos, &rr_mode,
5256 &rr_unsignedp, &volatilep, &rr_mask,
5257 &rr_and_mask);
5258
5259 /* It must be true that the inner operation on the lhs of each
5260 comparison must be the same if we are to be able to do anything.
5261 Then see if we have constants. If not, the same must be true for
5262 the rhs's. */
5263 if (volatilep || ll_inner == 0 || rl_inner == 0
5264 || ! operand_equal_p (ll_inner, rl_inner, 0))
5265 return 0;
5266
5267 if (TREE_CODE (lr_arg) == INTEGER_CST
5268 && TREE_CODE (rr_arg) == INTEGER_CST)
5269 l_const = lr_arg, r_const = rr_arg;
5270 else if (lr_inner == 0 || rr_inner == 0
5271 || ! operand_equal_p (lr_inner, rr_inner, 0))
5272 return 0;
5273 else
5274 l_const = r_const = 0;
5275
5276 /* If either comparison code is not correct for our logical operation,
5277 fail. However, we can convert a one-bit comparison against zero into
5278 the opposite comparison against that bit being set in the field. */
5279
5280 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5281 if (lcode != wanted_code)
5282 {
5283 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5284 {
5285 /* Make the left operand unsigned, since we are only interested
5286 in the value of one bit. Otherwise we are doing the wrong
5287 thing below. */
5288 ll_unsignedp = 1;
5289 l_const = ll_mask;
5290 }
5291 else
5292 return 0;
5293 }
5294
5295 /* This is analogous to the code for l_const above. */
5296 if (rcode != wanted_code)
5297 {
5298 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5299 {
5300 rl_unsignedp = 1;
5301 r_const = rl_mask;
5302 }
5303 else
5304 return 0;
5305 }
5306
5307 /* See if we can find a mode that contains both fields being compared on
5308 the left. If we can't, fail. Otherwise, update all constants and masks
5309 to be relative to a field of that size. */
5310 first_bit = MIN (ll_bitpos, rl_bitpos);
5311 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5312 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5313 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5314 volatilep);
5315 if (lnmode == VOIDmode)
5316 return 0;
5317
5318 lnbitsize = GET_MODE_BITSIZE (lnmode);
5319 lnbitpos = first_bit & ~ (lnbitsize - 1);
5320 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5321 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5322
5323 if (BYTES_BIG_ENDIAN)
5324 {
5325 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5326 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5327 }
5328
5329 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5330 size_int (xll_bitpos));
5331 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5332 size_int (xrl_bitpos));
5333
5334 if (l_const)
5335 {
5336 l_const = fold_convert_loc (loc, lntype, l_const);
5337 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5338 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5339 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5340 fold_build1_loc (loc, BIT_NOT_EXPR,
5341 lntype, ll_mask))))
5342 {
5343 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5344
5345 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5346 }
5347 }
5348 if (r_const)
5349 {
5350 r_const = fold_convert_loc (loc, lntype, r_const);
5351 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5352 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5353 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5354 fold_build1_loc (loc, BIT_NOT_EXPR,
5355 lntype, rl_mask))))
5356 {
5357 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5358
5359 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5360 }
5361 }
5362
5363 /* If the right sides are not constant, do the same for it. Also,
5364 disallow this optimization if a size or signedness mismatch occurs
5365 between the left and right sides. */
5366 if (l_const == 0)
5367 {
5368 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5369 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5370 /* Make sure the two fields on the right
5371 correspond to the left without being swapped. */
5372 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5373 return 0;
5374
5375 first_bit = MIN (lr_bitpos, rr_bitpos);
5376 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5377 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5378 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5379 volatilep);
5380 if (rnmode == VOIDmode)
5381 return 0;
5382
5383 rnbitsize = GET_MODE_BITSIZE (rnmode);
5384 rnbitpos = first_bit & ~ (rnbitsize - 1);
5385 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5386 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5387
5388 if (BYTES_BIG_ENDIAN)
5389 {
5390 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5391 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5392 }
5393
5394 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5395 rntype, lr_mask),
5396 size_int (xlr_bitpos));
5397 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5398 rntype, rr_mask),
5399 size_int (xrr_bitpos));
5400
5401 /* Make a mask that corresponds to both fields being compared.
5402 Do this for both items being compared. If the operands are the
5403 same size and the bits being compared are in the same position
5404 then we can do this by masking both and comparing the masked
5405 results. */
5406 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5407 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5408 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5409 {
5410 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5411 ll_unsignedp || rl_unsignedp);
5412 if (! all_ones_mask_p (ll_mask, lnbitsize))
5413 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5414
5415 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5416 lr_unsignedp || rr_unsignedp);
5417 if (! all_ones_mask_p (lr_mask, rnbitsize))
5418 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5419
5420 result = build2 (wanted_code, truth_type, lhs, rhs);
5421 goto fold_truthop_exit;
5422 }
5423
5424 /* There is still another way we can do something: If both pairs of
5425 fields being compared are adjacent, we may be able to make a wider
5426 field containing them both.
5427
5428 Note that we still must mask the lhs/rhs expressions. Furthermore,
5429 the mask must be shifted to account for the shift done by
5430 make_bit_field_ref. */
5431 if ((ll_bitsize + ll_bitpos == rl_bitpos
5432 && lr_bitsize + lr_bitpos == rr_bitpos)
5433 || (ll_bitpos == rl_bitpos + rl_bitsize
5434 && lr_bitpos == rr_bitpos + rr_bitsize))
5435 {
5436 tree type;
5437
5438 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5439 ll_bitsize + rl_bitsize,
5440 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5441 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5442 lr_bitsize + rr_bitsize,
5443 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5444
5445 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5446 size_int (MIN (xll_bitpos, xrl_bitpos)));
5447 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5448 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5449
5450 /* Convert to the smaller type before masking out unwanted bits. */
5451 type = lntype;
5452 if (lntype != rntype)
5453 {
5454 if (lnbitsize > rnbitsize)
5455 {
5456 lhs = fold_convert_loc (loc, rntype, lhs);
5457 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5458 type = rntype;
5459 }
5460 else if (lnbitsize < rnbitsize)
5461 {
5462 rhs = fold_convert_loc (loc, lntype, rhs);
5463 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5464 type = lntype;
5465 }
5466 }
5467
5468 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5469 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5470
5471 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5472 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5473
5474 result = build2 (wanted_code, truth_type, lhs, rhs);
5475 goto fold_truthop_exit;
5476 }
5477
5478 return 0;
5479 }
5480
5481 /* Handle the case of comparisons with constants. If there is something in
5482 common between the masks, those bits of the constants must be the same.
5483 If not, the condition is always false. Test for this to avoid generating
5484 incorrect code below. */
5485 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5486 if (! integer_zerop (result)
5487 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5488 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5489 {
5490 if (wanted_code == NE_EXPR)
5491 {
5492 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5493 return constant_boolean_node (true, truth_type);
5494 }
5495 else
5496 {
5497 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5498 return constant_boolean_node (false, truth_type);
5499 }
5500 }
5501
5502 /* Construct the expression we will return. First get the component
5503 reference we will make. Unless the mask is all ones the width of
5504 that field, perform the mask operation. Then compare with the
5505 merged constant. */
5506 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5507 ll_unsignedp || rl_unsignedp);
5508
5509 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5510 if (! all_ones_mask_p (ll_mask, lnbitsize))
5511 {
5512 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5513 SET_EXPR_LOCATION (result, loc);
5514 }
5515
5516 result = build2 (wanted_code, truth_type, result,
5517 const_binop (BIT_IOR_EXPR, l_const, r_const));
5518
5519 fold_truthop_exit:
5520 SET_EXPR_LOCATION (result, loc);
5521 return result;
5522 }
5523 \f
5524 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5525 constant. */
5526
5527 static tree
5528 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5529 tree op0, tree op1)
5530 {
5531 tree arg0 = op0;
5532 enum tree_code op_code;
5533 tree comp_const;
5534 tree minmax_const;
5535 int consts_equal, consts_lt;
5536 tree inner;
5537
5538 STRIP_SIGN_NOPS (arg0);
5539
5540 op_code = TREE_CODE (arg0);
5541 minmax_const = TREE_OPERAND (arg0, 1);
5542 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5543 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5544 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5545 inner = TREE_OPERAND (arg0, 0);
5546
5547 /* If something does not permit us to optimize, return the original tree. */
5548 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5549 || TREE_CODE (comp_const) != INTEGER_CST
5550 || TREE_OVERFLOW (comp_const)
5551 || TREE_CODE (minmax_const) != INTEGER_CST
5552 || TREE_OVERFLOW (minmax_const))
5553 return NULL_TREE;
5554
5555 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5556 and GT_EXPR, doing the rest with recursive calls using logical
5557 simplifications. */
5558 switch (code)
5559 {
5560 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5561 {
5562 tree tem
5563 = optimize_minmax_comparison (loc,
5564 invert_tree_comparison (code, false),
5565 type, op0, op1);
5566 if (tem)
5567 return invert_truthvalue_loc (loc, tem);
5568 return NULL_TREE;
5569 }
5570
5571 case GE_EXPR:
5572 return
5573 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5574 optimize_minmax_comparison
5575 (loc, EQ_EXPR, type, arg0, comp_const),
5576 optimize_minmax_comparison
5577 (loc, GT_EXPR, type, arg0, comp_const));
5578
5579 case EQ_EXPR:
5580 if (op_code == MAX_EXPR && consts_equal)
5581 /* MAX (X, 0) == 0 -> X <= 0 */
5582 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5583
5584 else if (op_code == MAX_EXPR && consts_lt)
5585 /* MAX (X, 0) == 5 -> X == 5 */
5586 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5587
5588 else if (op_code == MAX_EXPR)
5589 /* MAX (X, 0) == -1 -> false */
5590 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5591
5592 else if (consts_equal)
5593 /* MIN (X, 0) == 0 -> X >= 0 */
5594 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5595
5596 else if (consts_lt)
5597 /* MIN (X, 0) == 5 -> false */
5598 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5599
5600 else
5601 /* MIN (X, 0) == -1 -> X == -1 */
5602 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5603
5604 case GT_EXPR:
5605 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5606 /* MAX (X, 0) > 0 -> X > 0
5607 MAX (X, 0) > 5 -> X > 5 */
5608 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5609
5610 else if (op_code == MAX_EXPR)
5611 /* MAX (X, 0) > -1 -> true */
5612 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5613
5614 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5615 /* MIN (X, 0) > 0 -> false
5616 MIN (X, 0) > 5 -> false */
5617 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5618
5619 else
5620 /* MIN (X, 0) > -1 -> X > -1 */
5621 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5622
5623 default:
5624 return NULL_TREE;
5625 }
5626 }
5627 \f
5628 /* T is an integer expression that is being multiplied, divided, or taken a
5629 modulus (CODE says which and what kind of divide or modulus) by a
5630 constant C. See if we can eliminate that operation by folding it with
5631 other operations already in T. WIDE_TYPE, if non-null, is a type that
5632 should be used for the computation if wider than our type.
5633
5634 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5635 (X * 2) + (Y * 4). We must, however, be assured that either the original
5636 expression would not overflow or that overflow is undefined for the type
5637 in the language in question.
5638
5639 If we return a non-null expression, it is an equivalent form of the
5640 original computation, but need not be in the original type.
5641
5642 We set *STRICT_OVERFLOW_P to true if the return values depends on
5643 signed overflow being undefined. Otherwise we do not change
5644 *STRICT_OVERFLOW_P. */
5645
5646 static tree
5647 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5648 bool *strict_overflow_p)
5649 {
5650 /* To avoid exponential search depth, refuse to allow recursion past
5651 three levels. Beyond that (1) it's highly unlikely that we'll find
5652 something interesting and (2) we've probably processed it before
5653 when we built the inner expression. */
5654
5655 static int depth;
5656 tree ret;
5657
5658 if (depth > 3)
5659 return NULL;
5660
5661 depth++;
5662 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5663 depth--;
5664
5665 return ret;
5666 }
5667
5668 static tree
5669 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5670 bool *strict_overflow_p)
5671 {
5672 tree type = TREE_TYPE (t);
5673 enum tree_code tcode = TREE_CODE (t);
5674 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5675 > GET_MODE_SIZE (TYPE_MODE (type)))
5676 ? wide_type : type);
5677 tree t1, t2;
5678 int same_p = tcode == code;
5679 tree op0 = NULL_TREE, op1 = NULL_TREE;
5680 bool sub_strict_overflow_p;
5681
5682 /* Don't deal with constants of zero here; they confuse the code below. */
5683 if (integer_zerop (c))
5684 return NULL_TREE;
5685
5686 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5687 op0 = TREE_OPERAND (t, 0);
5688
5689 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5690 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5691
5692 /* Note that we need not handle conditional operations here since fold
5693 already handles those cases. So just do arithmetic here. */
5694 switch (tcode)
5695 {
5696 case INTEGER_CST:
5697 /* For a constant, we can always simplify if we are a multiply
5698 or (for divide and modulus) if it is a multiple of our constant. */
5699 if (code == MULT_EXPR
5700 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5701 return const_binop (code, fold_convert (ctype, t),
5702 fold_convert (ctype, c));
5703 break;
5704
5705 CASE_CONVERT: case NON_LVALUE_EXPR:
5706 /* If op0 is an expression ... */
5707 if ((COMPARISON_CLASS_P (op0)
5708 || UNARY_CLASS_P (op0)
5709 || BINARY_CLASS_P (op0)
5710 || VL_EXP_CLASS_P (op0)
5711 || EXPRESSION_CLASS_P (op0))
5712 /* ... and has wrapping overflow, and its type is smaller
5713 than ctype, then we cannot pass through as widening. */
5714 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5715 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5716 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5717 && (TYPE_PRECISION (ctype)
5718 > TYPE_PRECISION (TREE_TYPE (op0))))
5719 /* ... or this is a truncation (t is narrower than op0),
5720 then we cannot pass through this narrowing. */
5721 || (TYPE_PRECISION (type)
5722 < TYPE_PRECISION (TREE_TYPE (op0)))
5723 /* ... or signedness changes for division or modulus,
5724 then we cannot pass through this conversion. */
5725 || (code != MULT_EXPR
5726 && (TYPE_UNSIGNED (ctype)
5727 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5728 /* ... or has undefined overflow while the converted to
5729 type has not, we cannot do the operation in the inner type
5730 as that would introduce undefined overflow. */
5731 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5732 && !TYPE_OVERFLOW_UNDEFINED (type))))
5733 break;
5734
5735 /* Pass the constant down and see if we can make a simplification. If
5736 we can, replace this expression with the inner simplification for
5737 possible later conversion to our or some other type. */
5738 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5739 && TREE_CODE (t2) == INTEGER_CST
5740 && !TREE_OVERFLOW (t2)
5741 && (0 != (t1 = extract_muldiv (op0, t2, code,
5742 code == MULT_EXPR
5743 ? ctype : NULL_TREE,
5744 strict_overflow_p))))
5745 return t1;
5746 break;
5747
5748 case ABS_EXPR:
5749 /* If widening the type changes it from signed to unsigned, then we
5750 must avoid building ABS_EXPR itself as unsigned. */
5751 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5752 {
5753 tree cstype = (*signed_type_for) (ctype);
5754 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5755 != 0)
5756 {
5757 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5758 return fold_convert (ctype, t1);
5759 }
5760 break;
5761 }
5762 /* If the constant is negative, we cannot simplify this. */
5763 if (tree_int_cst_sgn (c) == -1)
5764 break;
5765 /* FALLTHROUGH */
5766 case NEGATE_EXPR:
5767 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5768 != 0)
5769 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5770 break;
5771
5772 case MIN_EXPR: case MAX_EXPR:
5773 /* If widening the type changes the signedness, then we can't perform
5774 this optimization as that changes the result. */
5775 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5776 break;
5777
5778 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5779 sub_strict_overflow_p = false;
5780 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5781 &sub_strict_overflow_p)) != 0
5782 && (t2 = extract_muldiv (op1, c, code, wide_type,
5783 &sub_strict_overflow_p)) != 0)
5784 {
5785 if (tree_int_cst_sgn (c) < 0)
5786 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5787 if (sub_strict_overflow_p)
5788 *strict_overflow_p = true;
5789 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5790 fold_convert (ctype, t2));
5791 }
5792 break;
5793
5794 case LSHIFT_EXPR: case RSHIFT_EXPR:
5795 /* If the second operand is constant, this is a multiplication
5796 or floor division, by a power of two, so we can treat it that
5797 way unless the multiplier or divisor overflows. Signed
5798 left-shift overflow is implementation-defined rather than
5799 undefined in C90, so do not convert signed left shift into
5800 multiplication. */
5801 if (TREE_CODE (op1) == INTEGER_CST
5802 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5803 /* const_binop may not detect overflow correctly,
5804 so check for it explicitly here. */
5805 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5806 && TREE_INT_CST_HIGH (op1) == 0
5807 && 0 != (t1 = fold_convert (ctype,
5808 const_binop (LSHIFT_EXPR,
5809 size_one_node,
5810 op1)))
5811 && !TREE_OVERFLOW (t1))
5812 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5813 ? MULT_EXPR : FLOOR_DIV_EXPR,
5814 ctype,
5815 fold_convert (ctype, op0),
5816 t1),
5817 c, code, wide_type, strict_overflow_p);
5818 break;
5819
5820 case PLUS_EXPR: case MINUS_EXPR:
5821 /* See if we can eliminate the operation on both sides. If we can, we
5822 can return a new PLUS or MINUS. If we can't, the only remaining
5823 cases where we can do anything are if the second operand is a
5824 constant. */
5825 sub_strict_overflow_p = false;
5826 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5827 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5828 if (t1 != 0 && t2 != 0
5829 && (code == MULT_EXPR
5830 /* If not multiplication, we can only do this if both operands
5831 are divisible by c. */
5832 || (multiple_of_p (ctype, op0, c)
5833 && multiple_of_p (ctype, op1, c))))
5834 {
5835 if (sub_strict_overflow_p)
5836 *strict_overflow_p = true;
5837 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5838 fold_convert (ctype, t2));
5839 }
5840
5841 /* If this was a subtraction, negate OP1 and set it to be an addition.
5842 This simplifies the logic below. */
5843 if (tcode == MINUS_EXPR)
5844 {
5845 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5846 /* If OP1 was not easily negatable, the constant may be OP0. */
5847 if (TREE_CODE (op0) == INTEGER_CST)
5848 {
5849 tree tem = op0;
5850 op0 = op1;
5851 op1 = tem;
5852 tem = t1;
5853 t1 = t2;
5854 t2 = tem;
5855 }
5856 }
5857
5858 if (TREE_CODE (op1) != INTEGER_CST)
5859 break;
5860
5861 /* If either OP1 or C are negative, this optimization is not safe for
5862 some of the division and remainder types while for others we need
5863 to change the code. */
5864 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5865 {
5866 if (code == CEIL_DIV_EXPR)
5867 code = FLOOR_DIV_EXPR;
5868 else if (code == FLOOR_DIV_EXPR)
5869 code = CEIL_DIV_EXPR;
5870 else if (code != MULT_EXPR
5871 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5872 break;
5873 }
5874
5875 /* If it's a multiply or a division/modulus operation of a multiple
5876 of our constant, do the operation and verify it doesn't overflow. */
5877 if (code == MULT_EXPR
5878 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5879 {
5880 op1 = const_binop (code, fold_convert (ctype, op1),
5881 fold_convert (ctype, c));
5882 /* We allow the constant to overflow with wrapping semantics. */
5883 if (op1 == 0
5884 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5885 break;
5886 }
5887 else
5888 break;
5889
5890 /* If we have an unsigned type is not a sizetype, we cannot widen
5891 the operation since it will change the result if the original
5892 computation overflowed. */
5893 if (TYPE_UNSIGNED (ctype)
5894 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5895 && ctype != type)
5896 break;
5897
5898 /* If we were able to eliminate our operation from the first side,
5899 apply our operation to the second side and reform the PLUS. */
5900 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5901 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5902
5903 /* The last case is if we are a multiply. In that case, we can
5904 apply the distributive law to commute the multiply and addition
5905 if the multiplication of the constants doesn't overflow. */
5906 if (code == MULT_EXPR)
5907 return fold_build2 (tcode, ctype,
5908 fold_build2 (code, ctype,
5909 fold_convert (ctype, op0),
5910 fold_convert (ctype, c)),
5911 op1);
5912
5913 break;
5914
5915 case MULT_EXPR:
5916 /* We have a special case here if we are doing something like
5917 (C * 8) % 4 since we know that's zero. */
5918 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5919 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5920 /* If the multiplication can overflow we cannot optimize this.
5921 ??? Until we can properly mark individual operations as
5922 not overflowing we need to treat sizetype special here as
5923 stor-layout relies on this opimization to make
5924 DECL_FIELD_BIT_OFFSET always a constant. */
5925 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5926 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5927 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5928 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5929 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5930 {
5931 *strict_overflow_p = true;
5932 return omit_one_operand (type, integer_zero_node, op0);
5933 }
5934
5935 /* ... fall through ... */
5936
5937 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5938 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5939 /* If we can extract our operation from the LHS, do so and return a
5940 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5941 do something only if the second operand is a constant. */
5942 if (same_p
5943 && (t1 = extract_muldiv (op0, c, code, wide_type,
5944 strict_overflow_p)) != 0)
5945 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5946 fold_convert (ctype, op1));
5947 else if (tcode == MULT_EXPR && code == MULT_EXPR
5948 && (t1 = extract_muldiv (op1, c, code, wide_type,
5949 strict_overflow_p)) != 0)
5950 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5951 fold_convert (ctype, t1));
5952 else if (TREE_CODE (op1) != INTEGER_CST)
5953 return 0;
5954
5955 /* If these are the same operation types, we can associate them
5956 assuming no overflow. */
5957 if (tcode == code
5958 && 0 != (t1 = int_const_binop (MULT_EXPR,
5959 fold_convert (ctype, op1),
5960 fold_convert (ctype, c), 1))
5961 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
5962 (TYPE_UNSIGNED (ctype)
5963 && tcode != MULT_EXPR) ? -1 : 1,
5964 TREE_OVERFLOW (t1)))
5965 && !TREE_OVERFLOW (t1))
5966 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5967
5968 /* If these operations "cancel" each other, we have the main
5969 optimizations of this pass, which occur when either constant is a
5970 multiple of the other, in which case we replace this with either an
5971 operation or CODE or TCODE.
5972
5973 If we have an unsigned type that is not a sizetype, we cannot do
5974 this since it will change the result if the original computation
5975 overflowed. */
5976 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5977 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5978 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5979 || (tcode == MULT_EXPR
5980 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5981 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5982 && code != MULT_EXPR)))
5983 {
5984 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5985 {
5986 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5987 *strict_overflow_p = true;
5988 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5989 fold_convert (ctype,
5990 const_binop (TRUNC_DIV_EXPR,
5991 op1, c)));
5992 }
5993 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5994 {
5995 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5996 *strict_overflow_p = true;
5997 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5998 fold_convert (ctype,
5999 const_binop (TRUNC_DIV_EXPR,
6000 c, op1)));
6001 }
6002 }
6003 break;
6004
6005 default:
6006 break;
6007 }
6008
6009 return 0;
6010 }
6011 \f
6012 /* Return a node which has the indicated constant VALUE (either 0 or
6013 1), and is of the indicated TYPE. */
6014
6015 tree
6016 constant_boolean_node (int value, tree type)
6017 {
6018 if (type == integer_type_node)
6019 return value ? integer_one_node : integer_zero_node;
6020 else if (type == boolean_type_node)
6021 return value ? boolean_true_node : boolean_false_node;
6022 else
6023 return build_int_cst (type, value);
6024 }
6025
6026
6027 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6028 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6029 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6030 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6031 COND is the first argument to CODE; otherwise (as in the example
6032 given here), it is the second argument. TYPE is the type of the
6033 original expression. Return NULL_TREE if no simplification is
6034 possible. */
6035
6036 static tree
6037 fold_binary_op_with_conditional_arg (location_t loc,
6038 enum tree_code code,
6039 tree type, tree op0, tree op1,
6040 tree cond, tree arg, int cond_first_p)
6041 {
6042 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6043 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6044 tree test, true_value, false_value;
6045 tree lhs = NULL_TREE;
6046 tree rhs = NULL_TREE;
6047
6048 if (TREE_CODE (cond) == COND_EXPR)
6049 {
6050 test = TREE_OPERAND (cond, 0);
6051 true_value = TREE_OPERAND (cond, 1);
6052 false_value = TREE_OPERAND (cond, 2);
6053 /* If this operand throws an expression, then it does not make
6054 sense to try to perform a logical or arithmetic operation
6055 involving it. */
6056 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6057 lhs = true_value;
6058 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6059 rhs = false_value;
6060 }
6061 else
6062 {
6063 tree testtype = TREE_TYPE (cond);
6064 test = cond;
6065 true_value = constant_boolean_node (true, testtype);
6066 false_value = constant_boolean_node (false, testtype);
6067 }
6068
6069 /* This transformation is only worthwhile if we don't have to wrap ARG
6070 in a SAVE_EXPR and the operation can be simplified on at least one
6071 of the branches once its pushed inside the COND_EXPR. */
6072 if (!TREE_CONSTANT (arg)
6073 && (TREE_SIDE_EFFECTS (arg)
6074 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6075 return NULL_TREE;
6076
6077 arg = fold_convert_loc (loc, arg_type, arg);
6078 if (lhs == 0)
6079 {
6080 true_value = fold_convert_loc (loc, cond_type, true_value);
6081 if (cond_first_p)
6082 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6083 else
6084 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6085 }
6086 if (rhs == 0)
6087 {
6088 false_value = fold_convert_loc (loc, cond_type, false_value);
6089 if (cond_first_p)
6090 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6091 else
6092 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6093 }
6094
6095 /* Check that we have simplified at least one of the branches. */
6096 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6097 return NULL_TREE;
6098
6099 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6100 }
6101
6102 \f
6103 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6104
6105 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6106 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6107 ADDEND is the same as X.
6108
6109 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6110 and finite. The problematic cases are when X is zero, and its mode
6111 has signed zeros. In the case of rounding towards -infinity,
6112 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6113 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6114
6115 bool
6116 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6117 {
6118 if (!real_zerop (addend))
6119 return false;
6120
6121 /* Don't allow the fold with -fsignaling-nans. */
6122 if (HONOR_SNANS (TYPE_MODE (type)))
6123 return false;
6124
6125 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6126 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6127 return true;
6128
6129 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6130 if (TREE_CODE (addend) == REAL_CST
6131 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6132 negate = !negate;
6133
6134 /* The mode has signed zeros, and we have to honor their sign.
6135 In this situation, there is only one case we can return true for.
6136 X - 0 is the same as X unless rounding towards -infinity is
6137 supported. */
6138 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6139 }
6140
6141 /* Subroutine of fold() that checks comparisons of built-in math
6142 functions against real constants.
6143
6144 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6145 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6146 is the type of the result and ARG0 and ARG1 are the operands of the
6147 comparison. ARG1 must be a TREE_REAL_CST.
6148
6149 The function returns the constant folded tree if a simplification
6150 can be made, and NULL_TREE otherwise. */
6151
6152 static tree
6153 fold_mathfn_compare (location_t loc,
6154 enum built_in_function fcode, enum tree_code code,
6155 tree type, tree arg0, tree arg1)
6156 {
6157 REAL_VALUE_TYPE c;
6158
6159 if (BUILTIN_SQRT_P (fcode))
6160 {
6161 tree arg = CALL_EXPR_ARG (arg0, 0);
6162 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6163
6164 c = TREE_REAL_CST (arg1);
6165 if (REAL_VALUE_NEGATIVE (c))
6166 {
6167 /* sqrt(x) < y is always false, if y is negative. */
6168 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6169 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6170
6171 /* sqrt(x) > y is always true, if y is negative and we
6172 don't care about NaNs, i.e. negative values of x. */
6173 if (code == NE_EXPR || !HONOR_NANS (mode))
6174 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6175
6176 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6177 return fold_build2_loc (loc, GE_EXPR, type, arg,
6178 build_real (TREE_TYPE (arg), dconst0));
6179 }
6180 else if (code == GT_EXPR || code == GE_EXPR)
6181 {
6182 REAL_VALUE_TYPE c2;
6183
6184 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6185 real_convert (&c2, mode, &c2);
6186
6187 if (REAL_VALUE_ISINF (c2))
6188 {
6189 /* sqrt(x) > y is x == +Inf, when y is very large. */
6190 if (HONOR_INFINITIES (mode))
6191 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6192 build_real (TREE_TYPE (arg), c2));
6193
6194 /* sqrt(x) > y is always false, when y is very large
6195 and we don't care about infinities. */
6196 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6197 }
6198
6199 /* sqrt(x) > c is the same as x > c*c. */
6200 return fold_build2_loc (loc, code, type, arg,
6201 build_real (TREE_TYPE (arg), c2));
6202 }
6203 else if (code == LT_EXPR || code == LE_EXPR)
6204 {
6205 REAL_VALUE_TYPE c2;
6206
6207 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6208 real_convert (&c2, mode, &c2);
6209
6210 if (REAL_VALUE_ISINF (c2))
6211 {
6212 /* sqrt(x) < y is always true, when y is a very large
6213 value and we don't care about NaNs or Infinities. */
6214 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6215 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6216
6217 /* sqrt(x) < y is x != +Inf when y is very large and we
6218 don't care about NaNs. */
6219 if (! HONOR_NANS (mode))
6220 return fold_build2_loc (loc, NE_EXPR, type, arg,
6221 build_real (TREE_TYPE (arg), c2));
6222
6223 /* sqrt(x) < y is x >= 0 when y is very large and we
6224 don't care about Infinities. */
6225 if (! HONOR_INFINITIES (mode))
6226 return fold_build2_loc (loc, GE_EXPR, type, arg,
6227 build_real (TREE_TYPE (arg), dconst0));
6228
6229 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6230 if (lang_hooks.decls.global_bindings_p () != 0
6231 || CONTAINS_PLACEHOLDER_P (arg))
6232 return NULL_TREE;
6233
6234 arg = save_expr (arg);
6235 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6236 fold_build2_loc (loc, GE_EXPR, type, arg,
6237 build_real (TREE_TYPE (arg),
6238 dconst0)),
6239 fold_build2_loc (loc, NE_EXPR, type, arg,
6240 build_real (TREE_TYPE (arg),
6241 c2)));
6242 }
6243
6244 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6245 if (! HONOR_NANS (mode))
6246 return fold_build2_loc (loc, code, type, arg,
6247 build_real (TREE_TYPE (arg), c2));
6248
6249 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6250 if (lang_hooks.decls.global_bindings_p () == 0
6251 && ! CONTAINS_PLACEHOLDER_P (arg))
6252 {
6253 arg = save_expr (arg);
6254 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6255 fold_build2_loc (loc, GE_EXPR, type, arg,
6256 build_real (TREE_TYPE (arg),
6257 dconst0)),
6258 fold_build2_loc (loc, code, type, arg,
6259 build_real (TREE_TYPE (arg),
6260 c2)));
6261 }
6262 }
6263 }
6264
6265 return NULL_TREE;
6266 }
6267
6268 /* Subroutine of fold() that optimizes comparisons against Infinities,
6269 either +Inf or -Inf.
6270
6271 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6272 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6273 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6274
6275 The function returns the constant folded tree if a simplification
6276 can be made, and NULL_TREE otherwise. */
6277
6278 static tree
6279 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6280 tree arg0, tree arg1)
6281 {
6282 enum machine_mode mode;
6283 REAL_VALUE_TYPE max;
6284 tree temp;
6285 bool neg;
6286
6287 mode = TYPE_MODE (TREE_TYPE (arg0));
6288
6289 /* For negative infinity swap the sense of the comparison. */
6290 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6291 if (neg)
6292 code = swap_tree_comparison (code);
6293
6294 switch (code)
6295 {
6296 case GT_EXPR:
6297 /* x > +Inf is always false, if with ignore sNANs. */
6298 if (HONOR_SNANS (mode))
6299 return NULL_TREE;
6300 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6301
6302 case LE_EXPR:
6303 /* x <= +Inf is always true, if we don't case about NaNs. */
6304 if (! HONOR_NANS (mode))
6305 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6306
6307 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6308 if (lang_hooks.decls.global_bindings_p () == 0
6309 && ! CONTAINS_PLACEHOLDER_P (arg0))
6310 {
6311 arg0 = save_expr (arg0);
6312 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6313 }
6314 break;
6315
6316 case EQ_EXPR:
6317 case GE_EXPR:
6318 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6319 real_maxval (&max, neg, mode);
6320 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6321 arg0, build_real (TREE_TYPE (arg0), max));
6322
6323 case LT_EXPR:
6324 /* x < +Inf is always equal to x <= DBL_MAX. */
6325 real_maxval (&max, neg, mode);
6326 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6327 arg0, build_real (TREE_TYPE (arg0), max));
6328
6329 case NE_EXPR:
6330 /* x != +Inf is always equal to !(x > DBL_MAX). */
6331 real_maxval (&max, neg, mode);
6332 if (! HONOR_NANS (mode))
6333 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6334 arg0, build_real (TREE_TYPE (arg0), max));
6335
6336 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6337 arg0, build_real (TREE_TYPE (arg0), max));
6338 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6339
6340 default:
6341 break;
6342 }
6343
6344 return NULL_TREE;
6345 }
6346
6347 /* Subroutine of fold() that optimizes comparisons of a division by
6348 a nonzero integer constant against an integer constant, i.e.
6349 X/C1 op C2.
6350
6351 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6352 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6353 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6354
6355 The function returns the constant folded tree if a simplification
6356 can be made, and NULL_TREE otherwise. */
6357
6358 static tree
6359 fold_div_compare (location_t loc,
6360 enum tree_code code, tree type, tree arg0, tree arg1)
6361 {
6362 tree prod, tmp, hi, lo;
6363 tree arg00 = TREE_OPERAND (arg0, 0);
6364 tree arg01 = TREE_OPERAND (arg0, 1);
6365 double_int val;
6366 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6367 bool neg_overflow;
6368 int overflow;
6369
6370 /* We have to do this the hard way to detect unsigned overflow.
6371 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6372 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6373 TREE_INT_CST_HIGH (arg01),
6374 TREE_INT_CST_LOW (arg1),
6375 TREE_INT_CST_HIGH (arg1),
6376 &val.low, &val.high, unsigned_p);
6377 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6378 neg_overflow = false;
6379
6380 if (unsigned_p)
6381 {
6382 tmp = int_const_binop (MINUS_EXPR, arg01,
6383 build_int_cst (TREE_TYPE (arg01), 1), 0);
6384 lo = prod;
6385
6386 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6387 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6388 TREE_INT_CST_HIGH (prod),
6389 TREE_INT_CST_LOW (tmp),
6390 TREE_INT_CST_HIGH (tmp),
6391 &val.low, &val.high, unsigned_p);
6392 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6393 -1, overflow | TREE_OVERFLOW (prod));
6394 }
6395 else if (tree_int_cst_sgn (arg01) >= 0)
6396 {
6397 tmp = int_const_binop (MINUS_EXPR, arg01,
6398 build_int_cst (TREE_TYPE (arg01), 1), 0);
6399 switch (tree_int_cst_sgn (arg1))
6400 {
6401 case -1:
6402 neg_overflow = true;
6403 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6404 hi = prod;
6405 break;
6406
6407 case 0:
6408 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6409 hi = tmp;
6410 break;
6411
6412 case 1:
6413 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6414 lo = prod;
6415 break;
6416
6417 default:
6418 gcc_unreachable ();
6419 }
6420 }
6421 else
6422 {
6423 /* A negative divisor reverses the relational operators. */
6424 code = swap_tree_comparison (code);
6425
6426 tmp = int_const_binop (PLUS_EXPR, arg01,
6427 build_int_cst (TREE_TYPE (arg01), 1), 0);
6428 switch (tree_int_cst_sgn (arg1))
6429 {
6430 case -1:
6431 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6432 lo = prod;
6433 break;
6434
6435 case 0:
6436 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6437 lo = tmp;
6438 break;
6439
6440 case 1:
6441 neg_overflow = true;
6442 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6443 hi = prod;
6444 break;
6445
6446 default:
6447 gcc_unreachable ();
6448 }
6449 }
6450
6451 switch (code)
6452 {
6453 case EQ_EXPR:
6454 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6455 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6456 if (TREE_OVERFLOW (hi))
6457 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6458 if (TREE_OVERFLOW (lo))
6459 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6460 return build_range_check (loc, type, arg00, 1, lo, hi);
6461
6462 case NE_EXPR:
6463 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6464 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6465 if (TREE_OVERFLOW (hi))
6466 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6467 if (TREE_OVERFLOW (lo))
6468 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6469 return build_range_check (loc, type, arg00, 0, lo, hi);
6470
6471 case LT_EXPR:
6472 if (TREE_OVERFLOW (lo))
6473 {
6474 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6475 return omit_one_operand_loc (loc, type, tmp, arg00);
6476 }
6477 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6478
6479 case LE_EXPR:
6480 if (TREE_OVERFLOW (hi))
6481 {
6482 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6483 return omit_one_operand_loc (loc, type, tmp, arg00);
6484 }
6485 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6486
6487 case GT_EXPR:
6488 if (TREE_OVERFLOW (hi))
6489 {
6490 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6491 return omit_one_operand_loc (loc, type, tmp, arg00);
6492 }
6493 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6494
6495 case GE_EXPR:
6496 if (TREE_OVERFLOW (lo))
6497 {
6498 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6499 return omit_one_operand_loc (loc, type, tmp, arg00);
6500 }
6501 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6502
6503 default:
6504 break;
6505 }
6506
6507 return NULL_TREE;
6508 }
6509
6510
6511 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6512 equality/inequality test, then return a simplified form of the test
6513 using a sign testing. Otherwise return NULL. TYPE is the desired
6514 result type. */
6515
6516 static tree
6517 fold_single_bit_test_into_sign_test (location_t loc,
6518 enum tree_code code, tree arg0, tree arg1,
6519 tree result_type)
6520 {
6521 /* If this is testing a single bit, we can optimize the test. */
6522 if ((code == NE_EXPR || code == EQ_EXPR)
6523 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6524 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6525 {
6526 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6527 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6528 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6529
6530 if (arg00 != NULL_TREE
6531 /* This is only a win if casting to a signed type is cheap,
6532 i.e. when arg00's type is not a partial mode. */
6533 && TYPE_PRECISION (TREE_TYPE (arg00))
6534 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6535 {
6536 tree stype = signed_type_for (TREE_TYPE (arg00));
6537 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6538 result_type,
6539 fold_convert_loc (loc, stype, arg00),
6540 build_int_cst (stype, 0));
6541 }
6542 }
6543
6544 return NULL_TREE;
6545 }
6546
6547 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6548 equality/inequality test, then return a simplified form of
6549 the test using shifts and logical operations. Otherwise return
6550 NULL. TYPE is the desired result type. */
6551
6552 tree
6553 fold_single_bit_test (location_t loc, enum tree_code code,
6554 tree arg0, tree arg1, tree result_type)
6555 {
6556 /* If this is testing a single bit, we can optimize the test. */
6557 if ((code == NE_EXPR || code == EQ_EXPR)
6558 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6559 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6560 {
6561 tree inner = TREE_OPERAND (arg0, 0);
6562 tree type = TREE_TYPE (arg0);
6563 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6564 enum machine_mode operand_mode = TYPE_MODE (type);
6565 int ops_unsigned;
6566 tree signed_type, unsigned_type, intermediate_type;
6567 tree tem, one;
6568
6569 /* First, see if we can fold the single bit test into a sign-bit
6570 test. */
6571 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6572 result_type);
6573 if (tem)
6574 return tem;
6575
6576 /* Otherwise we have (A & C) != 0 where C is a single bit,
6577 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6578 Similarly for (A & C) == 0. */
6579
6580 /* If INNER is a right shift of a constant and it plus BITNUM does
6581 not overflow, adjust BITNUM and INNER. */
6582 if (TREE_CODE (inner) == RSHIFT_EXPR
6583 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6584 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6585 && bitnum < TYPE_PRECISION (type)
6586 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6587 bitnum - TYPE_PRECISION (type)))
6588 {
6589 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6590 inner = TREE_OPERAND (inner, 0);
6591 }
6592
6593 /* If we are going to be able to omit the AND below, we must do our
6594 operations as unsigned. If we must use the AND, we have a choice.
6595 Normally unsigned is faster, but for some machines signed is. */
6596 #ifdef LOAD_EXTEND_OP
6597 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6598 && !flag_syntax_only) ? 0 : 1;
6599 #else
6600 ops_unsigned = 1;
6601 #endif
6602
6603 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6604 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6605 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6606 inner = fold_convert_loc (loc, intermediate_type, inner);
6607
6608 if (bitnum != 0)
6609 inner = build2 (RSHIFT_EXPR, intermediate_type,
6610 inner, size_int (bitnum));
6611
6612 one = build_int_cst (intermediate_type, 1);
6613
6614 if (code == EQ_EXPR)
6615 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6616
6617 /* Put the AND last so it can combine with more things. */
6618 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6619
6620 /* Make sure to return the proper type. */
6621 inner = fold_convert_loc (loc, result_type, inner);
6622
6623 return inner;
6624 }
6625 return NULL_TREE;
6626 }
6627
6628 /* Check whether we are allowed to reorder operands arg0 and arg1,
6629 such that the evaluation of arg1 occurs before arg0. */
6630
6631 static bool
6632 reorder_operands_p (const_tree arg0, const_tree arg1)
6633 {
6634 if (! flag_evaluation_order)
6635 return true;
6636 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6637 return true;
6638 return ! TREE_SIDE_EFFECTS (arg0)
6639 && ! TREE_SIDE_EFFECTS (arg1);
6640 }
6641
6642 /* Test whether it is preferable two swap two operands, ARG0 and
6643 ARG1, for example because ARG0 is an integer constant and ARG1
6644 isn't. If REORDER is true, only recommend swapping if we can
6645 evaluate the operands in reverse order. */
6646
6647 bool
6648 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6649 {
6650 STRIP_SIGN_NOPS (arg0);
6651 STRIP_SIGN_NOPS (arg1);
6652
6653 if (TREE_CODE (arg1) == INTEGER_CST)
6654 return 0;
6655 if (TREE_CODE (arg0) == INTEGER_CST)
6656 return 1;
6657
6658 if (TREE_CODE (arg1) == REAL_CST)
6659 return 0;
6660 if (TREE_CODE (arg0) == REAL_CST)
6661 return 1;
6662
6663 if (TREE_CODE (arg1) == FIXED_CST)
6664 return 0;
6665 if (TREE_CODE (arg0) == FIXED_CST)
6666 return 1;
6667
6668 if (TREE_CODE (arg1) == COMPLEX_CST)
6669 return 0;
6670 if (TREE_CODE (arg0) == COMPLEX_CST)
6671 return 1;
6672
6673 if (TREE_CONSTANT (arg1))
6674 return 0;
6675 if (TREE_CONSTANT (arg0))
6676 return 1;
6677
6678 if (optimize_function_for_size_p (cfun))
6679 return 0;
6680
6681 if (reorder && flag_evaluation_order
6682 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6683 return 0;
6684
6685 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6686 for commutative and comparison operators. Ensuring a canonical
6687 form allows the optimizers to find additional redundancies without
6688 having to explicitly check for both orderings. */
6689 if (TREE_CODE (arg0) == SSA_NAME
6690 && TREE_CODE (arg1) == SSA_NAME
6691 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6692 return 1;
6693
6694 /* Put SSA_NAMEs last. */
6695 if (TREE_CODE (arg1) == SSA_NAME)
6696 return 0;
6697 if (TREE_CODE (arg0) == SSA_NAME)
6698 return 1;
6699
6700 /* Put variables last. */
6701 if (DECL_P (arg1))
6702 return 0;
6703 if (DECL_P (arg0))
6704 return 1;
6705
6706 return 0;
6707 }
6708
6709 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6710 ARG0 is extended to a wider type. */
6711
6712 static tree
6713 fold_widened_comparison (location_t loc, enum tree_code code,
6714 tree type, tree arg0, tree arg1)
6715 {
6716 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6717 tree arg1_unw;
6718 tree shorter_type, outer_type;
6719 tree min, max;
6720 bool above, below;
6721
6722 if (arg0_unw == arg0)
6723 return NULL_TREE;
6724 shorter_type = TREE_TYPE (arg0_unw);
6725
6726 #ifdef HAVE_canonicalize_funcptr_for_compare
6727 /* Disable this optimization if we're casting a function pointer
6728 type on targets that require function pointer canonicalization. */
6729 if (HAVE_canonicalize_funcptr_for_compare
6730 && TREE_CODE (shorter_type) == POINTER_TYPE
6731 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6732 return NULL_TREE;
6733 #endif
6734
6735 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6736 return NULL_TREE;
6737
6738 arg1_unw = get_unwidened (arg1, NULL_TREE);
6739
6740 /* If possible, express the comparison in the shorter mode. */
6741 if ((code == EQ_EXPR || code == NE_EXPR
6742 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6743 && (TREE_TYPE (arg1_unw) == shorter_type
6744 || ((TYPE_PRECISION (shorter_type)
6745 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6746 && (TYPE_UNSIGNED (shorter_type)
6747 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6748 || (TREE_CODE (arg1_unw) == INTEGER_CST
6749 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6750 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6751 && int_fits_type_p (arg1_unw, shorter_type))))
6752 return fold_build2_loc (loc, code, type, arg0_unw,
6753 fold_convert_loc (loc, shorter_type, arg1_unw));
6754
6755 if (TREE_CODE (arg1_unw) != INTEGER_CST
6756 || TREE_CODE (shorter_type) != INTEGER_TYPE
6757 || !int_fits_type_p (arg1_unw, shorter_type))
6758 return NULL_TREE;
6759
6760 /* If we are comparing with the integer that does not fit into the range
6761 of the shorter type, the result is known. */
6762 outer_type = TREE_TYPE (arg1_unw);
6763 min = lower_bound_in_type (outer_type, shorter_type);
6764 max = upper_bound_in_type (outer_type, shorter_type);
6765
6766 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6767 max, arg1_unw));
6768 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6769 arg1_unw, min));
6770
6771 switch (code)
6772 {
6773 case EQ_EXPR:
6774 if (above || below)
6775 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6776 break;
6777
6778 case NE_EXPR:
6779 if (above || below)
6780 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6781 break;
6782
6783 case LT_EXPR:
6784 case LE_EXPR:
6785 if (above)
6786 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6787 else if (below)
6788 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6789
6790 case GT_EXPR:
6791 case GE_EXPR:
6792 if (above)
6793 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6794 else if (below)
6795 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6796
6797 default:
6798 break;
6799 }
6800
6801 return NULL_TREE;
6802 }
6803
6804 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6805 ARG0 just the signedness is changed. */
6806
6807 static tree
6808 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6809 tree arg0, tree arg1)
6810 {
6811 tree arg0_inner;
6812 tree inner_type, outer_type;
6813
6814 if (!CONVERT_EXPR_P (arg0))
6815 return NULL_TREE;
6816
6817 outer_type = TREE_TYPE (arg0);
6818 arg0_inner = TREE_OPERAND (arg0, 0);
6819 inner_type = TREE_TYPE (arg0_inner);
6820
6821 #ifdef HAVE_canonicalize_funcptr_for_compare
6822 /* Disable this optimization if we're casting a function pointer
6823 type on targets that require function pointer canonicalization. */
6824 if (HAVE_canonicalize_funcptr_for_compare
6825 && TREE_CODE (inner_type) == POINTER_TYPE
6826 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6827 return NULL_TREE;
6828 #endif
6829
6830 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6831 return NULL_TREE;
6832
6833 if (TREE_CODE (arg1) != INTEGER_CST
6834 && !(CONVERT_EXPR_P (arg1)
6835 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6836 return NULL_TREE;
6837
6838 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6839 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6840 && code != NE_EXPR
6841 && code != EQ_EXPR)
6842 return NULL_TREE;
6843
6844 if (TREE_CODE (arg1) == INTEGER_CST)
6845 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6846 0, TREE_OVERFLOW (arg1));
6847 else
6848 arg1 = fold_convert_loc (loc, inner_type, arg1);
6849
6850 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6851 }
6852
6853 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6854 step of the array. Reconstructs s and delta in the case of s *
6855 delta being an integer constant (and thus already folded). ADDR is
6856 the address. MULT is the multiplicative expression. If the
6857 function succeeds, the new address expression is returned.
6858 Otherwise NULL_TREE is returned. LOC is the location of the
6859 resulting expression. */
6860
6861 static tree
6862 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6863 {
6864 tree s, delta, step;
6865 tree ref = TREE_OPERAND (addr, 0), pref;
6866 tree ret, pos;
6867 tree itype;
6868 bool mdim = false;
6869
6870 /* Strip the nops that might be added when converting op1 to sizetype. */
6871 STRIP_NOPS (op1);
6872
6873 /* Canonicalize op1 into a possibly non-constant delta
6874 and an INTEGER_CST s. */
6875 if (TREE_CODE (op1) == MULT_EXPR)
6876 {
6877 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6878
6879 STRIP_NOPS (arg0);
6880 STRIP_NOPS (arg1);
6881
6882 if (TREE_CODE (arg0) == INTEGER_CST)
6883 {
6884 s = arg0;
6885 delta = arg1;
6886 }
6887 else if (TREE_CODE (arg1) == INTEGER_CST)
6888 {
6889 s = arg1;
6890 delta = arg0;
6891 }
6892 else
6893 return NULL_TREE;
6894 }
6895 else if (TREE_CODE (op1) == INTEGER_CST)
6896 {
6897 delta = op1;
6898 s = NULL_TREE;
6899 }
6900 else
6901 {
6902 /* Simulate we are delta * 1. */
6903 delta = op1;
6904 s = integer_one_node;
6905 }
6906
6907 for (;; ref = TREE_OPERAND (ref, 0))
6908 {
6909 if (TREE_CODE (ref) == ARRAY_REF)
6910 {
6911 tree domain;
6912
6913 /* Remember if this was a multi-dimensional array. */
6914 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6915 mdim = true;
6916
6917 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6918 if (! domain)
6919 continue;
6920 itype = TREE_TYPE (domain);
6921
6922 step = array_ref_element_size (ref);
6923 if (TREE_CODE (step) != INTEGER_CST)
6924 continue;
6925
6926 if (s)
6927 {
6928 if (! tree_int_cst_equal (step, s))
6929 continue;
6930 }
6931 else
6932 {
6933 /* Try if delta is a multiple of step. */
6934 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6935 if (! tmp)
6936 continue;
6937 delta = tmp;
6938 }
6939
6940 /* Only fold here if we can verify we do not overflow one
6941 dimension of a multi-dimensional array. */
6942 if (mdim)
6943 {
6944 tree tmp;
6945
6946 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6947 || !TYPE_MAX_VALUE (domain)
6948 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6949 continue;
6950
6951 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6952 fold_convert_loc (loc, itype,
6953 TREE_OPERAND (ref, 1)),
6954 fold_convert_loc (loc, itype, delta));
6955 if (!tmp
6956 || TREE_CODE (tmp) != INTEGER_CST
6957 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6958 continue;
6959 }
6960
6961 break;
6962 }
6963 else
6964 mdim = false;
6965
6966 if (!handled_component_p (ref))
6967 return NULL_TREE;
6968 }
6969
6970 /* We found the suitable array reference. So copy everything up to it,
6971 and replace the index. */
6972
6973 pref = TREE_OPERAND (addr, 0);
6974 ret = copy_node (pref);
6975 SET_EXPR_LOCATION (ret, loc);
6976 pos = ret;
6977
6978 while (pref != ref)
6979 {
6980 pref = TREE_OPERAND (pref, 0);
6981 TREE_OPERAND (pos, 0) = copy_node (pref);
6982 pos = TREE_OPERAND (pos, 0);
6983 }
6984
6985 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
6986 fold_convert_loc (loc, itype,
6987 TREE_OPERAND (pos, 1)),
6988 fold_convert_loc (loc, itype, delta));
6989
6990 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6991 }
6992
6993
6994 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6995 means A >= Y && A != MAX, but in this case we know that
6996 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6997
6998 static tree
6999 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7000 {
7001 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7002
7003 if (TREE_CODE (bound) == LT_EXPR)
7004 a = TREE_OPERAND (bound, 0);
7005 else if (TREE_CODE (bound) == GT_EXPR)
7006 a = TREE_OPERAND (bound, 1);
7007 else
7008 return NULL_TREE;
7009
7010 typea = TREE_TYPE (a);
7011 if (!INTEGRAL_TYPE_P (typea)
7012 && !POINTER_TYPE_P (typea))
7013 return NULL_TREE;
7014
7015 if (TREE_CODE (ineq) == LT_EXPR)
7016 {
7017 a1 = TREE_OPERAND (ineq, 1);
7018 y = TREE_OPERAND (ineq, 0);
7019 }
7020 else if (TREE_CODE (ineq) == GT_EXPR)
7021 {
7022 a1 = TREE_OPERAND (ineq, 0);
7023 y = TREE_OPERAND (ineq, 1);
7024 }
7025 else
7026 return NULL_TREE;
7027
7028 if (TREE_TYPE (a1) != typea)
7029 return NULL_TREE;
7030
7031 if (POINTER_TYPE_P (typea))
7032 {
7033 /* Convert the pointer types into integer before taking the difference. */
7034 tree ta = fold_convert_loc (loc, ssizetype, a);
7035 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7036 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7037 }
7038 else
7039 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7040
7041 if (!diff || !integer_onep (diff))
7042 return NULL_TREE;
7043
7044 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7045 }
7046
7047 /* Fold a sum or difference of at least one multiplication.
7048 Returns the folded tree or NULL if no simplification could be made. */
7049
7050 static tree
7051 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7052 tree arg0, tree arg1)
7053 {
7054 tree arg00, arg01, arg10, arg11;
7055 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7056
7057 /* (A * C) +- (B * C) -> (A+-B) * C.
7058 (A * C) +- A -> A * (C+-1).
7059 We are most concerned about the case where C is a constant,
7060 but other combinations show up during loop reduction. Since
7061 it is not difficult, try all four possibilities. */
7062
7063 if (TREE_CODE (arg0) == MULT_EXPR)
7064 {
7065 arg00 = TREE_OPERAND (arg0, 0);
7066 arg01 = TREE_OPERAND (arg0, 1);
7067 }
7068 else if (TREE_CODE (arg0) == INTEGER_CST)
7069 {
7070 arg00 = build_one_cst (type);
7071 arg01 = arg0;
7072 }
7073 else
7074 {
7075 /* We cannot generate constant 1 for fract. */
7076 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7077 return NULL_TREE;
7078 arg00 = arg0;
7079 arg01 = build_one_cst (type);
7080 }
7081 if (TREE_CODE (arg1) == MULT_EXPR)
7082 {
7083 arg10 = TREE_OPERAND (arg1, 0);
7084 arg11 = TREE_OPERAND (arg1, 1);
7085 }
7086 else if (TREE_CODE (arg1) == INTEGER_CST)
7087 {
7088 arg10 = build_one_cst (type);
7089 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7090 the purpose of this canonicalization. */
7091 if (TREE_INT_CST_HIGH (arg1) == -1
7092 && negate_expr_p (arg1)
7093 && code == PLUS_EXPR)
7094 {
7095 arg11 = negate_expr (arg1);
7096 code = MINUS_EXPR;
7097 }
7098 else
7099 arg11 = arg1;
7100 }
7101 else
7102 {
7103 /* We cannot generate constant 1 for fract. */
7104 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7105 return NULL_TREE;
7106 arg10 = arg1;
7107 arg11 = build_one_cst (type);
7108 }
7109 same = NULL_TREE;
7110
7111 if (operand_equal_p (arg01, arg11, 0))
7112 same = arg01, alt0 = arg00, alt1 = arg10;
7113 else if (operand_equal_p (arg00, arg10, 0))
7114 same = arg00, alt0 = arg01, alt1 = arg11;
7115 else if (operand_equal_p (arg00, arg11, 0))
7116 same = arg00, alt0 = arg01, alt1 = arg10;
7117 else if (operand_equal_p (arg01, arg10, 0))
7118 same = arg01, alt0 = arg00, alt1 = arg11;
7119
7120 /* No identical multiplicands; see if we can find a common
7121 power-of-two factor in non-power-of-two multiplies. This
7122 can help in multi-dimensional array access. */
7123 else if (host_integerp (arg01, 0)
7124 && host_integerp (arg11, 0))
7125 {
7126 HOST_WIDE_INT int01, int11, tmp;
7127 bool swap = false;
7128 tree maybe_same;
7129 int01 = TREE_INT_CST_LOW (arg01);
7130 int11 = TREE_INT_CST_LOW (arg11);
7131
7132 /* Move min of absolute values to int11. */
7133 if ((int01 >= 0 ? int01 : -int01)
7134 < (int11 >= 0 ? int11 : -int11))
7135 {
7136 tmp = int01, int01 = int11, int11 = tmp;
7137 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7138 maybe_same = arg01;
7139 swap = true;
7140 }
7141 else
7142 maybe_same = arg11;
7143
7144 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7145 /* The remainder should not be a constant, otherwise we
7146 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7147 increased the number of multiplications necessary. */
7148 && TREE_CODE (arg10) != INTEGER_CST)
7149 {
7150 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7151 build_int_cst (TREE_TYPE (arg00),
7152 int01 / int11));
7153 alt1 = arg10;
7154 same = maybe_same;
7155 if (swap)
7156 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7157 }
7158 }
7159
7160 if (same)
7161 return fold_build2_loc (loc, MULT_EXPR, type,
7162 fold_build2_loc (loc, code, type,
7163 fold_convert_loc (loc, type, alt0),
7164 fold_convert_loc (loc, type, alt1)),
7165 fold_convert_loc (loc, type, same));
7166
7167 return NULL_TREE;
7168 }
7169
7170 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7171 specified by EXPR into the buffer PTR of length LEN bytes.
7172 Return the number of bytes placed in the buffer, or zero
7173 upon failure. */
7174
7175 static int
7176 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7177 {
7178 tree type = TREE_TYPE (expr);
7179 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7180 int byte, offset, word, words;
7181 unsigned char value;
7182
7183 if (total_bytes > len)
7184 return 0;
7185 words = total_bytes / UNITS_PER_WORD;
7186
7187 for (byte = 0; byte < total_bytes; byte++)
7188 {
7189 int bitpos = byte * BITS_PER_UNIT;
7190 if (bitpos < HOST_BITS_PER_WIDE_INT)
7191 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7192 else
7193 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7194 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7195
7196 if (total_bytes > UNITS_PER_WORD)
7197 {
7198 word = byte / UNITS_PER_WORD;
7199 if (WORDS_BIG_ENDIAN)
7200 word = (words - 1) - word;
7201 offset = word * UNITS_PER_WORD;
7202 if (BYTES_BIG_ENDIAN)
7203 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7204 else
7205 offset += byte % UNITS_PER_WORD;
7206 }
7207 else
7208 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7209 ptr[offset] = value;
7210 }
7211 return total_bytes;
7212 }
7213
7214
7215 /* Subroutine of native_encode_expr. Encode the REAL_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7218 upon failure. */
7219
7220 static int
7221 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7222 {
7223 tree type = TREE_TYPE (expr);
7224 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7225 int byte, offset, word, words, bitpos;
7226 unsigned char value;
7227
7228 /* There are always 32 bits in each long, no matter the size of
7229 the hosts long. We handle floating point representations with
7230 up to 192 bits. */
7231 long tmp[6];
7232
7233 if (total_bytes > len)
7234 return 0;
7235 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7236
7237 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7238
7239 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7240 bitpos += BITS_PER_UNIT)
7241 {
7242 byte = (bitpos / BITS_PER_UNIT) & 3;
7243 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7244
7245 if (UNITS_PER_WORD < 4)
7246 {
7247 word = byte / UNITS_PER_WORD;
7248 if (WORDS_BIG_ENDIAN)
7249 word = (words - 1) - word;
7250 offset = word * UNITS_PER_WORD;
7251 if (BYTES_BIG_ENDIAN)
7252 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7253 else
7254 offset += byte % UNITS_PER_WORD;
7255 }
7256 else
7257 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7258 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7259 }
7260 return total_bytes;
7261 }
7262
7263 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7264 specified by EXPR into the buffer PTR of length LEN bytes.
7265 Return the number of bytes placed in the buffer, or zero
7266 upon failure. */
7267
7268 static int
7269 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7270 {
7271 int rsize, isize;
7272 tree part;
7273
7274 part = TREE_REALPART (expr);
7275 rsize = native_encode_expr (part, ptr, len);
7276 if (rsize == 0)
7277 return 0;
7278 part = TREE_IMAGPART (expr);
7279 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7280 if (isize != rsize)
7281 return 0;
7282 return rsize + isize;
7283 }
7284
7285
7286 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7287 specified by EXPR into the buffer PTR of length LEN bytes.
7288 Return the number of bytes placed in the buffer, or zero
7289 upon failure. */
7290
7291 static int
7292 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7293 {
7294 int i, size, offset, count;
7295 tree itype, elem, elements;
7296
7297 offset = 0;
7298 elements = TREE_VECTOR_CST_ELTS (expr);
7299 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7300 itype = TREE_TYPE (TREE_TYPE (expr));
7301 size = GET_MODE_SIZE (TYPE_MODE (itype));
7302 for (i = 0; i < count; i++)
7303 {
7304 if (elements)
7305 {
7306 elem = TREE_VALUE (elements);
7307 elements = TREE_CHAIN (elements);
7308 }
7309 else
7310 elem = NULL_TREE;
7311
7312 if (elem)
7313 {
7314 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7315 return 0;
7316 }
7317 else
7318 {
7319 if (offset + size > len)
7320 return 0;
7321 memset (ptr+offset, 0, size);
7322 }
7323 offset += size;
7324 }
7325 return offset;
7326 }
7327
7328
7329 /* Subroutine of native_encode_expr. Encode the STRING_CST
7330 specified by EXPR into the buffer PTR of length LEN bytes.
7331 Return the number of bytes placed in the buffer, or zero
7332 upon failure. */
7333
7334 static int
7335 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7336 {
7337 tree type = TREE_TYPE (expr);
7338 HOST_WIDE_INT total_bytes;
7339
7340 if (TREE_CODE (type) != ARRAY_TYPE
7341 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7342 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7343 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7344 return 0;
7345 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7346 if (total_bytes > len)
7347 return 0;
7348 if (TREE_STRING_LENGTH (expr) < total_bytes)
7349 {
7350 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7351 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7352 total_bytes - TREE_STRING_LENGTH (expr));
7353 }
7354 else
7355 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7356 return total_bytes;
7357 }
7358
7359
7360 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7361 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7362 buffer PTR of length LEN bytes. Return the number of bytes
7363 placed in the buffer, or zero upon failure. */
7364
7365 int
7366 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7367 {
7368 switch (TREE_CODE (expr))
7369 {
7370 case INTEGER_CST:
7371 return native_encode_int (expr, ptr, len);
7372
7373 case REAL_CST:
7374 return native_encode_real (expr, ptr, len);
7375
7376 case COMPLEX_CST:
7377 return native_encode_complex (expr, ptr, len);
7378
7379 case VECTOR_CST:
7380 return native_encode_vector (expr, ptr, len);
7381
7382 case STRING_CST:
7383 return native_encode_string (expr, ptr, len);
7384
7385 default:
7386 return 0;
7387 }
7388 }
7389
7390
7391 /* Subroutine of native_interpret_expr. Interpret the contents of
7392 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7393 If the buffer cannot be interpreted, return NULL_TREE. */
7394
7395 static tree
7396 native_interpret_int (tree type, const unsigned char *ptr, int len)
7397 {
7398 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7399 int byte, offset, word, words;
7400 unsigned char value;
7401 double_int result;
7402
7403 if (total_bytes > len)
7404 return NULL_TREE;
7405 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7406 return NULL_TREE;
7407
7408 result = double_int_zero;
7409 words = total_bytes / UNITS_PER_WORD;
7410
7411 for (byte = 0; byte < total_bytes; byte++)
7412 {
7413 int bitpos = byte * BITS_PER_UNIT;
7414 if (total_bytes > UNITS_PER_WORD)
7415 {
7416 word = byte / UNITS_PER_WORD;
7417 if (WORDS_BIG_ENDIAN)
7418 word = (words - 1) - word;
7419 offset = word * UNITS_PER_WORD;
7420 if (BYTES_BIG_ENDIAN)
7421 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7422 else
7423 offset += byte % UNITS_PER_WORD;
7424 }
7425 else
7426 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7427 value = ptr[offset];
7428
7429 if (bitpos < HOST_BITS_PER_WIDE_INT)
7430 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7431 else
7432 result.high |= (unsigned HOST_WIDE_INT) value
7433 << (bitpos - HOST_BITS_PER_WIDE_INT);
7434 }
7435
7436 return double_int_to_tree (type, result);
7437 }
7438
7439
7440 /* Subroutine of native_interpret_expr. Interpret the contents of
7441 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7442 If the buffer cannot be interpreted, return NULL_TREE. */
7443
7444 static tree
7445 native_interpret_real (tree type, const unsigned char *ptr, int len)
7446 {
7447 enum machine_mode mode = TYPE_MODE (type);
7448 int total_bytes = GET_MODE_SIZE (mode);
7449 int byte, offset, word, words, bitpos;
7450 unsigned char value;
7451 /* There are always 32 bits in each long, no matter the size of
7452 the hosts long. We handle floating point representations with
7453 up to 192 bits. */
7454 REAL_VALUE_TYPE r;
7455 long tmp[6];
7456
7457 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7458 if (total_bytes > len || total_bytes > 24)
7459 return NULL_TREE;
7460 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7461
7462 memset (tmp, 0, sizeof (tmp));
7463 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7464 bitpos += BITS_PER_UNIT)
7465 {
7466 byte = (bitpos / BITS_PER_UNIT) & 3;
7467 if (UNITS_PER_WORD < 4)
7468 {
7469 word = byte / UNITS_PER_WORD;
7470 if (WORDS_BIG_ENDIAN)
7471 word = (words - 1) - word;
7472 offset = word * UNITS_PER_WORD;
7473 if (BYTES_BIG_ENDIAN)
7474 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7475 else
7476 offset += byte % UNITS_PER_WORD;
7477 }
7478 else
7479 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7480 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7481
7482 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7483 }
7484
7485 real_from_target (&r, tmp, mode);
7486 return build_real (type, r);
7487 }
7488
7489
7490 /* Subroutine of native_interpret_expr. Interpret the contents of
7491 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7492 If the buffer cannot be interpreted, return NULL_TREE. */
7493
7494 static tree
7495 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7496 {
7497 tree etype, rpart, ipart;
7498 int size;
7499
7500 etype = TREE_TYPE (type);
7501 size = GET_MODE_SIZE (TYPE_MODE (etype));
7502 if (size * 2 > len)
7503 return NULL_TREE;
7504 rpart = native_interpret_expr (etype, ptr, size);
7505 if (!rpart)
7506 return NULL_TREE;
7507 ipart = native_interpret_expr (etype, ptr+size, size);
7508 if (!ipart)
7509 return NULL_TREE;
7510 return build_complex (type, rpart, ipart);
7511 }
7512
7513
7514 /* Subroutine of native_interpret_expr. Interpret the contents of
7515 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7516 If the buffer cannot be interpreted, return NULL_TREE. */
7517
7518 static tree
7519 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7520 {
7521 tree etype, elem, elements;
7522 int i, size, count;
7523
7524 etype = TREE_TYPE (type);
7525 size = GET_MODE_SIZE (TYPE_MODE (etype));
7526 count = TYPE_VECTOR_SUBPARTS (type);
7527 if (size * count > len)
7528 return NULL_TREE;
7529
7530 elements = NULL_TREE;
7531 for (i = count - 1; i >= 0; i--)
7532 {
7533 elem = native_interpret_expr (etype, ptr+(i*size), size);
7534 if (!elem)
7535 return NULL_TREE;
7536 elements = tree_cons (NULL_TREE, elem, elements);
7537 }
7538 return build_vector (type, elements);
7539 }
7540
7541
7542 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7543 the buffer PTR of length LEN as a constant of type TYPE. For
7544 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7545 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7546 return NULL_TREE. */
7547
7548 tree
7549 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7550 {
7551 switch (TREE_CODE (type))
7552 {
7553 case INTEGER_TYPE:
7554 case ENUMERAL_TYPE:
7555 case BOOLEAN_TYPE:
7556 return native_interpret_int (type, ptr, len);
7557
7558 case REAL_TYPE:
7559 return native_interpret_real (type, ptr, len);
7560
7561 case COMPLEX_TYPE:
7562 return native_interpret_complex (type, ptr, len);
7563
7564 case VECTOR_TYPE:
7565 return native_interpret_vector (type, ptr, len);
7566
7567 default:
7568 return NULL_TREE;
7569 }
7570 }
7571
7572
7573 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7574 TYPE at compile-time. If we're unable to perform the conversion
7575 return NULL_TREE. */
7576
7577 static tree
7578 fold_view_convert_expr (tree type, tree expr)
7579 {
7580 /* We support up to 512-bit values (for V8DFmode). */
7581 unsigned char buffer[64];
7582 int len;
7583
7584 /* Check that the host and target are sane. */
7585 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7586 return NULL_TREE;
7587
7588 len = native_encode_expr (expr, buffer, sizeof (buffer));
7589 if (len == 0)
7590 return NULL_TREE;
7591
7592 return native_interpret_expr (type, buffer, len);
7593 }
7594
7595 /* Build an expression for the address of T. Folds away INDIRECT_REF
7596 to avoid confusing the gimplify process. */
7597
7598 tree
7599 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7600 {
7601 /* The size of the object is not relevant when talking about its address. */
7602 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7603 t = TREE_OPERAND (t, 0);
7604
7605 if (TREE_CODE (t) == INDIRECT_REF)
7606 {
7607 t = TREE_OPERAND (t, 0);
7608
7609 if (TREE_TYPE (t) != ptrtype)
7610 {
7611 t = build1 (NOP_EXPR, ptrtype, t);
7612 SET_EXPR_LOCATION (t, loc);
7613 }
7614 }
7615 else if (TREE_CODE (t) == MEM_REF
7616 && integer_zerop (TREE_OPERAND (t, 1)))
7617 return TREE_OPERAND (t, 0);
7618 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7619 {
7620 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7621
7622 if (TREE_TYPE (t) != ptrtype)
7623 t = fold_convert_loc (loc, ptrtype, t);
7624 }
7625 else
7626 {
7627 t = build1 (ADDR_EXPR, ptrtype, t);
7628 SET_EXPR_LOCATION (t, loc);
7629 }
7630
7631 return t;
7632 }
7633
7634 /* Build an expression for the address of T. */
7635
7636 tree
7637 build_fold_addr_expr_loc (location_t loc, tree t)
7638 {
7639 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7640
7641 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7642 }
7643
7644 /* Fold a unary expression of code CODE and type TYPE with operand
7645 OP0. Return the folded expression if folding is successful.
7646 Otherwise, return NULL_TREE. */
7647
7648 tree
7649 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7650 {
7651 tree tem;
7652 tree arg0;
7653 enum tree_code_class kind = TREE_CODE_CLASS (code);
7654
7655 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7656 && TREE_CODE_LENGTH (code) == 1);
7657
7658 arg0 = op0;
7659 if (arg0)
7660 {
7661 if (CONVERT_EXPR_CODE_P (code)
7662 || code == FLOAT_EXPR || code == ABS_EXPR)
7663 {
7664 /* Don't use STRIP_NOPS, because signedness of argument type
7665 matters. */
7666 STRIP_SIGN_NOPS (arg0);
7667 }
7668 else
7669 {
7670 /* Strip any conversions that don't change the mode. This
7671 is safe for every expression, except for a comparison
7672 expression because its signedness is derived from its
7673 operands.
7674
7675 Note that this is done as an internal manipulation within
7676 the constant folder, in order to find the simplest
7677 representation of the arguments so that their form can be
7678 studied. In any cases, the appropriate type conversions
7679 should be put back in the tree that will get out of the
7680 constant folder. */
7681 STRIP_NOPS (arg0);
7682 }
7683 }
7684
7685 if (TREE_CODE_CLASS (code) == tcc_unary)
7686 {
7687 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7688 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7689 fold_build1_loc (loc, code, type,
7690 fold_convert_loc (loc, TREE_TYPE (op0),
7691 TREE_OPERAND (arg0, 1))));
7692 else if (TREE_CODE (arg0) == COND_EXPR)
7693 {
7694 tree arg01 = TREE_OPERAND (arg0, 1);
7695 tree arg02 = TREE_OPERAND (arg0, 2);
7696 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7697 arg01 = fold_build1_loc (loc, code, type,
7698 fold_convert_loc (loc,
7699 TREE_TYPE (op0), arg01));
7700 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7701 arg02 = fold_build1_loc (loc, code, type,
7702 fold_convert_loc (loc,
7703 TREE_TYPE (op0), arg02));
7704 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7705 arg01, arg02);
7706
7707 /* If this was a conversion, and all we did was to move into
7708 inside the COND_EXPR, bring it back out. But leave it if
7709 it is a conversion from integer to integer and the
7710 result precision is no wider than a word since such a
7711 conversion is cheap and may be optimized away by combine,
7712 while it couldn't if it were outside the COND_EXPR. Then return
7713 so we don't get into an infinite recursion loop taking the
7714 conversion out and then back in. */
7715
7716 if ((CONVERT_EXPR_CODE_P (code)
7717 || code == NON_LVALUE_EXPR)
7718 && TREE_CODE (tem) == COND_EXPR
7719 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7720 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7721 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7722 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7723 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7724 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7725 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7726 && (INTEGRAL_TYPE_P
7727 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7728 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7729 || flag_syntax_only))
7730 {
7731 tem = build1 (code, type,
7732 build3 (COND_EXPR,
7733 TREE_TYPE (TREE_OPERAND
7734 (TREE_OPERAND (tem, 1), 0)),
7735 TREE_OPERAND (tem, 0),
7736 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7737 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7738 SET_EXPR_LOCATION (tem, loc);
7739 }
7740 return tem;
7741 }
7742 else if (COMPARISON_CLASS_P (arg0))
7743 {
7744 if (TREE_CODE (type) == BOOLEAN_TYPE)
7745 {
7746 arg0 = copy_node (arg0);
7747 TREE_TYPE (arg0) = type;
7748 return arg0;
7749 }
7750 else if (TREE_CODE (type) != INTEGER_TYPE)
7751 return fold_build3_loc (loc, COND_EXPR, type, arg0,
7752 fold_build1_loc (loc, code, type,
7753 integer_one_node),
7754 fold_build1_loc (loc, code, type,
7755 integer_zero_node));
7756 }
7757 }
7758
7759 switch (code)
7760 {
7761 case PAREN_EXPR:
7762 /* Re-association barriers around constants and other re-association
7763 barriers can be removed. */
7764 if (CONSTANT_CLASS_P (op0)
7765 || TREE_CODE (op0) == PAREN_EXPR)
7766 return fold_convert_loc (loc, type, op0);
7767 return NULL_TREE;
7768
7769 CASE_CONVERT:
7770 case FLOAT_EXPR:
7771 case FIX_TRUNC_EXPR:
7772 if (TREE_TYPE (op0) == type)
7773 return op0;
7774
7775 /* If we have (type) (a CMP b) and type is an integral type, return
7776 new expression involving the new type. */
7777 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7778 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7779 TREE_OPERAND (op0, 1));
7780
7781 /* Handle cases of two conversions in a row. */
7782 if (CONVERT_EXPR_P (op0))
7783 {
7784 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7785 tree inter_type = TREE_TYPE (op0);
7786 int inside_int = INTEGRAL_TYPE_P (inside_type);
7787 int inside_ptr = POINTER_TYPE_P (inside_type);
7788 int inside_float = FLOAT_TYPE_P (inside_type);
7789 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7790 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7791 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7792 int inter_int = INTEGRAL_TYPE_P (inter_type);
7793 int inter_ptr = POINTER_TYPE_P (inter_type);
7794 int inter_float = FLOAT_TYPE_P (inter_type);
7795 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7796 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7797 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7798 int final_int = INTEGRAL_TYPE_P (type);
7799 int final_ptr = POINTER_TYPE_P (type);
7800 int final_float = FLOAT_TYPE_P (type);
7801 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7802 unsigned int final_prec = TYPE_PRECISION (type);
7803 int final_unsignedp = TYPE_UNSIGNED (type);
7804
7805 /* In addition to the cases of two conversions in a row
7806 handled below, if we are converting something to its own
7807 type via an object of identical or wider precision, neither
7808 conversion is needed. */
7809 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7810 && (((inter_int || inter_ptr) && final_int)
7811 || (inter_float && final_float))
7812 && inter_prec >= final_prec)
7813 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7814
7815 /* Likewise, if the intermediate and initial types are either both
7816 float or both integer, we don't need the middle conversion if the
7817 former is wider than the latter and doesn't change the signedness
7818 (for integers). Avoid this if the final type is a pointer since
7819 then we sometimes need the middle conversion. Likewise if the
7820 final type has a precision not equal to the size of its mode. */
7821 if (((inter_int && inside_int)
7822 || (inter_float && inside_float)
7823 || (inter_vec && inside_vec))
7824 && inter_prec >= inside_prec
7825 && (inter_float || inter_vec
7826 || inter_unsignedp == inside_unsignedp)
7827 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7828 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7829 && ! final_ptr
7830 && (! final_vec || inter_prec == inside_prec))
7831 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7832
7833 /* If we have a sign-extension of a zero-extended value, we can
7834 replace that by a single zero-extension. */
7835 if (inside_int && inter_int && final_int
7836 && inside_prec < inter_prec && inter_prec < final_prec
7837 && inside_unsignedp && !inter_unsignedp)
7838 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7839
7840 /* Two conversions in a row are not needed unless:
7841 - some conversion is floating-point (overstrict for now), or
7842 - some conversion is a vector (overstrict for now), or
7843 - the intermediate type is narrower than both initial and
7844 final, or
7845 - the intermediate type and innermost type differ in signedness,
7846 and the outermost type is wider than the intermediate, or
7847 - the initial type is a pointer type and the precisions of the
7848 intermediate and final types differ, or
7849 - the final type is a pointer type and the precisions of the
7850 initial and intermediate types differ. */
7851 if (! inside_float && ! inter_float && ! final_float
7852 && ! inside_vec && ! inter_vec && ! final_vec
7853 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7854 && ! (inside_int && inter_int
7855 && inter_unsignedp != inside_unsignedp
7856 && inter_prec < final_prec)
7857 && ((inter_unsignedp && inter_prec > inside_prec)
7858 == (final_unsignedp && final_prec > inter_prec))
7859 && ! (inside_ptr && inter_prec != final_prec)
7860 && ! (final_ptr && inside_prec != inter_prec)
7861 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7862 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7863 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7864 }
7865
7866 /* Handle (T *)&A.B.C for A being of type T and B and C
7867 living at offset zero. This occurs frequently in
7868 C++ upcasting and then accessing the base. */
7869 if (TREE_CODE (op0) == ADDR_EXPR
7870 && POINTER_TYPE_P (type)
7871 && handled_component_p (TREE_OPERAND (op0, 0)))
7872 {
7873 HOST_WIDE_INT bitsize, bitpos;
7874 tree offset;
7875 enum machine_mode mode;
7876 int unsignedp, volatilep;
7877 tree base = TREE_OPERAND (op0, 0);
7878 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7879 &mode, &unsignedp, &volatilep, false);
7880 /* If the reference was to a (constant) zero offset, we can use
7881 the address of the base if it has the same base type
7882 as the result type and the pointer type is unqualified. */
7883 if (! offset && bitpos == 0
7884 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7885 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7886 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7887 return fold_convert_loc (loc, type,
7888 build_fold_addr_expr_loc (loc, base));
7889 }
7890
7891 if (TREE_CODE (op0) == MODIFY_EXPR
7892 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7893 /* Detect assigning a bitfield. */
7894 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7895 && DECL_BIT_FIELD
7896 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7897 {
7898 /* Don't leave an assignment inside a conversion
7899 unless assigning a bitfield. */
7900 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7901 /* First do the assignment, then return converted constant. */
7902 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7903 TREE_NO_WARNING (tem) = 1;
7904 TREE_USED (tem) = 1;
7905 SET_EXPR_LOCATION (tem, loc);
7906 return tem;
7907 }
7908
7909 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7910 constants (if x has signed type, the sign bit cannot be set
7911 in c). This folds extension into the BIT_AND_EXPR.
7912 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7913 very likely don't have maximal range for their precision and this
7914 transformation effectively doesn't preserve non-maximal ranges. */
7915 if (TREE_CODE (type) == INTEGER_TYPE
7916 && TREE_CODE (op0) == BIT_AND_EXPR
7917 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7918 {
7919 tree and_expr = op0;
7920 tree and0 = TREE_OPERAND (and_expr, 0);
7921 tree and1 = TREE_OPERAND (and_expr, 1);
7922 int change = 0;
7923
7924 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7925 || (TYPE_PRECISION (type)
7926 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7927 change = 1;
7928 else if (TYPE_PRECISION (TREE_TYPE (and1))
7929 <= HOST_BITS_PER_WIDE_INT
7930 && host_integerp (and1, 1))
7931 {
7932 unsigned HOST_WIDE_INT cst;
7933
7934 cst = tree_low_cst (and1, 1);
7935 cst &= (HOST_WIDE_INT) -1
7936 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7937 change = (cst == 0);
7938 #ifdef LOAD_EXTEND_OP
7939 if (change
7940 && !flag_syntax_only
7941 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7942 == ZERO_EXTEND))
7943 {
7944 tree uns = unsigned_type_for (TREE_TYPE (and0));
7945 and0 = fold_convert_loc (loc, uns, and0);
7946 and1 = fold_convert_loc (loc, uns, and1);
7947 }
7948 #endif
7949 }
7950 if (change)
7951 {
7952 tem = force_fit_type_double (type, tree_to_double_int (and1),
7953 0, TREE_OVERFLOW (and1));
7954 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7955 fold_convert_loc (loc, type, and0), tem);
7956 }
7957 }
7958
7959 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7960 when one of the new casts will fold away. Conservatively we assume
7961 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7962 if (POINTER_TYPE_P (type)
7963 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7964 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7965 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7966 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7967 {
7968 tree arg00 = TREE_OPERAND (arg0, 0);
7969 tree arg01 = TREE_OPERAND (arg0, 1);
7970
7971 return fold_build2_loc (loc,
7972 TREE_CODE (arg0), type,
7973 fold_convert_loc (loc, type, arg00),
7974 fold_convert_loc (loc, sizetype, arg01));
7975 }
7976
7977 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7978 of the same precision, and X is an integer type not narrower than
7979 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7980 if (INTEGRAL_TYPE_P (type)
7981 && TREE_CODE (op0) == BIT_NOT_EXPR
7982 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7983 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7984 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7985 {
7986 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7987 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7988 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7989 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7990 fold_convert_loc (loc, type, tem));
7991 }
7992
7993 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7994 type of X and Y (integer types only). */
7995 if (INTEGRAL_TYPE_P (type)
7996 && TREE_CODE (op0) == MULT_EXPR
7997 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7998 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7999 {
8000 /* Be careful not to introduce new overflows. */
8001 tree mult_type;
8002 if (TYPE_OVERFLOW_WRAPS (type))
8003 mult_type = type;
8004 else
8005 mult_type = unsigned_type_for (type);
8006
8007 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8008 {
8009 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8010 fold_convert_loc (loc, mult_type,
8011 TREE_OPERAND (op0, 0)),
8012 fold_convert_loc (loc, mult_type,
8013 TREE_OPERAND (op0, 1)));
8014 return fold_convert_loc (loc, type, tem);
8015 }
8016 }
8017
8018 tem = fold_convert_const (code, type, op0);
8019 return tem ? tem : NULL_TREE;
8020
8021 case ADDR_SPACE_CONVERT_EXPR:
8022 if (integer_zerop (arg0))
8023 return fold_convert_const (code, type, arg0);
8024 return NULL_TREE;
8025
8026 case FIXED_CONVERT_EXPR:
8027 tem = fold_convert_const (code, type, arg0);
8028 return tem ? tem : NULL_TREE;
8029
8030 case VIEW_CONVERT_EXPR:
8031 if (TREE_TYPE (op0) == type)
8032 return op0;
8033 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8034 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8035 type, TREE_OPERAND (op0, 0));
8036 if (TREE_CODE (op0) == MEM_REF)
8037 return fold_build2_loc (loc, MEM_REF, type,
8038 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8039
8040 /* For integral conversions with the same precision or pointer
8041 conversions use a NOP_EXPR instead. */
8042 if ((INTEGRAL_TYPE_P (type)
8043 || POINTER_TYPE_P (type))
8044 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8045 || POINTER_TYPE_P (TREE_TYPE (op0)))
8046 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8047 return fold_convert_loc (loc, type, op0);
8048
8049 /* Strip inner integral conversions that do not change the precision. */
8050 if (CONVERT_EXPR_P (op0)
8051 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8052 || POINTER_TYPE_P (TREE_TYPE (op0)))
8053 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8054 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8055 && (TYPE_PRECISION (TREE_TYPE (op0))
8056 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8057 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8058 type, TREE_OPERAND (op0, 0));
8059
8060 return fold_view_convert_expr (type, op0);
8061
8062 case NEGATE_EXPR:
8063 tem = fold_negate_expr (loc, arg0);
8064 if (tem)
8065 return fold_convert_loc (loc, type, tem);
8066 return NULL_TREE;
8067
8068 case ABS_EXPR:
8069 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8070 return fold_abs_const (arg0, type);
8071 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8072 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8073 /* Convert fabs((double)float) into (double)fabsf(float). */
8074 else if (TREE_CODE (arg0) == NOP_EXPR
8075 && TREE_CODE (type) == REAL_TYPE)
8076 {
8077 tree targ0 = strip_float_extensions (arg0);
8078 if (targ0 != arg0)
8079 return fold_convert_loc (loc, type,
8080 fold_build1_loc (loc, ABS_EXPR,
8081 TREE_TYPE (targ0),
8082 targ0));
8083 }
8084 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8085 else if (TREE_CODE (arg0) == ABS_EXPR)
8086 return arg0;
8087 else if (tree_expr_nonnegative_p (arg0))
8088 return arg0;
8089
8090 /* Strip sign ops from argument. */
8091 if (TREE_CODE (type) == REAL_TYPE)
8092 {
8093 tem = fold_strip_sign_ops (arg0);
8094 if (tem)
8095 return fold_build1_loc (loc, ABS_EXPR, type,
8096 fold_convert_loc (loc, type, tem));
8097 }
8098 return NULL_TREE;
8099
8100 case CONJ_EXPR:
8101 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8102 return fold_convert_loc (loc, type, arg0);
8103 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8104 {
8105 tree itype = TREE_TYPE (type);
8106 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8107 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8108 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8109 negate_expr (ipart));
8110 }
8111 if (TREE_CODE (arg0) == COMPLEX_CST)
8112 {
8113 tree itype = TREE_TYPE (type);
8114 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8115 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8116 return build_complex (type, rpart, negate_expr (ipart));
8117 }
8118 if (TREE_CODE (arg0) == CONJ_EXPR)
8119 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8120 return NULL_TREE;
8121
8122 case BIT_NOT_EXPR:
8123 if (TREE_CODE (arg0) == INTEGER_CST)
8124 return fold_not_const (arg0, type);
8125 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8126 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8127 /* Convert ~ (-A) to A - 1. */
8128 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8129 return fold_build2_loc (loc, MINUS_EXPR, type,
8130 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8131 build_int_cst (type, 1));
8132 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8133 else if (INTEGRAL_TYPE_P (type)
8134 && ((TREE_CODE (arg0) == MINUS_EXPR
8135 && integer_onep (TREE_OPERAND (arg0, 1)))
8136 || (TREE_CODE (arg0) == PLUS_EXPR
8137 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8138 return fold_build1_loc (loc, NEGATE_EXPR, type,
8139 fold_convert_loc (loc, type,
8140 TREE_OPERAND (arg0, 0)));
8141 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8142 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8143 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8144 fold_convert_loc (loc, type,
8145 TREE_OPERAND (arg0, 0)))))
8146 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8147 fold_convert_loc (loc, type,
8148 TREE_OPERAND (arg0, 1)));
8149 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8150 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8151 fold_convert_loc (loc, type,
8152 TREE_OPERAND (arg0, 1)))))
8153 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8154 fold_convert_loc (loc, type,
8155 TREE_OPERAND (arg0, 0)), tem);
8156 /* Perform BIT_NOT_EXPR on each element individually. */
8157 else if (TREE_CODE (arg0) == VECTOR_CST)
8158 {
8159 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8160 int count = TYPE_VECTOR_SUBPARTS (type), i;
8161
8162 for (i = 0; i < count; i++)
8163 {
8164 if (elements)
8165 {
8166 elem = TREE_VALUE (elements);
8167 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8168 if (elem == NULL_TREE)
8169 break;
8170 elements = TREE_CHAIN (elements);
8171 }
8172 else
8173 elem = build_int_cst (TREE_TYPE (type), -1);
8174 list = tree_cons (NULL_TREE, elem, list);
8175 }
8176 if (i == count)
8177 return build_vector (type, nreverse (list));
8178 }
8179
8180 return NULL_TREE;
8181
8182 case TRUTH_NOT_EXPR:
8183 /* The argument to invert_truthvalue must have Boolean type. */
8184 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8185 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8186
8187 /* Note that the operand of this must be an int
8188 and its values must be 0 or 1.
8189 ("true" is a fixed value perhaps depending on the language,
8190 but we don't handle values other than 1 correctly yet.) */
8191 tem = fold_truth_not_expr (loc, arg0);
8192 if (!tem)
8193 return NULL_TREE;
8194 return fold_convert_loc (loc, type, tem);
8195
8196 case REALPART_EXPR:
8197 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8198 return fold_convert_loc (loc, type, arg0);
8199 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8200 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8201 TREE_OPERAND (arg0, 1));
8202 if (TREE_CODE (arg0) == COMPLEX_CST)
8203 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8204 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8205 {
8206 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8207 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8208 fold_build1_loc (loc, REALPART_EXPR, itype,
8209 TREE_OPERAND (arg0, 0)),
8210 fold_build1_loc (loc, REALPART_EXPR, itype,
8211 TREE_OPERAND (arg0, 1)));
8212 return fold_convert_loc (loc, type, tem);
8213 }
8214 if (TREE_CODE (arg0) == CONJ_EXPR)
8215 {
8216 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8217 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8218 TREE_OPERAND (arg0, 0));
8219 return fold_convert_loc (loc, type, tem);
8220 }
8221 if (TREE_CODE (arg0) == CALL_EXPR)
8222 {
8223 tree fn = get_callee_fndecl (arg0);
8224 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8225 switch (DECL_FUNCTION_CODE (fn))
8226 {
8227 CASE_FLT_FN (BUILT_IN_CEXPI):
8228 fn = mathfn_built_in (type, BUILT_IN_COS);
8229 if (fn)
8230 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8231 break;
8232
8233 default:
8234 break;
8235 }
8236 }
8237 return NULL_TREE;
8238
8239 case IMAGPART_EXPR:
8240 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8241 return build_zero_cst (type);
8242 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8243 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8244 TREE_OPERAND (arg0, 0));
8245 if (TREE_CODE (arg0) == COMPLEX_CST)
8246 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8247 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8248 {
8249 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8250 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8251 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8252 TREE_OPERAND (arg0, 0)),
8253 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8254 TREE_OPERAND (arg0, 1)));
8255 return fold_convert_loc (loc, type, tem);
8256 }
8257 if (TREE_CODE (arg0) == CONJ_EXPR)
8258 {
8259 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8260 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8261 return fold_convert_loc (loc, type, negate_expr (tem));
8262 }
8263 if (TREE_CODE (arg0) == CALL_EXPR)
8264 {
8265 tree fn = get_callee_fndecl (arg0);
8266 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8267 switch (DECL_FUNCTION_CODE (fn))
8268 {
8269 CASE_FLT_FN (BUILT_IN_CEXPI):
8270 fn = mathfn_built_in (type, BUILT_IN_SIN);
8271 if (fn)
8272 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8273 break;
8274
8275 default:
8276 break;
8277 }
8278 }
8279 return NULL_TREE;
8280
8281 case INDIRECT_REF:
8282 /* Fold *&X to X if X is an lvalue. */
8283 if (TREE_CODE (op0) == ADDR_EXPR)
8284 {
8285 tree op00 = TREE_OPERAND (op0, 0);
8286 if ((TREE_CODE (op00) == VAR_DECL
8287 || TREE_CODE (op00) == PARM_DECL
8288 || TREE_CODE (op00) == RESULT_DECL)
8289 && !TREE_READONLY (op00))
8290 return op00;
8291 }
8292 return NULL_TREE;
8293
8294 default:
8295 return NULL_TREE;
8296 } /* switch (code) */
8297 }
8298
8299
8300 /* If the operation was a conversion do _not_ mark a resulting constant
8301 with TREE_OVERFLOW if the original constant was not. These conversions
8302 have implementation defined behavior and retaining the TREE_OVERFLOW
8303 flag here would confuse later passes such as VRP. */
8304 tree
8305 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8306 tree type, tree op0)
8307 {
8308 tree res = fold_unary_loc (loc, code, type, op0);
8309 if (res
8310 && TREE_CODE (res) == INTEGER_CST
8311 && TREE_CODE (op0) == INTEGER_CST
8312 && CONVERT_EXPR_CODE_P (code))
8313 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8314
8315 return res;
8316 }
8317
8318 /* Fold a binary expression of code CODE and type TYPE with operands
8319 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8320 Return the folded expression if folding is successful. Otherwise,
8321 return NULL_TREE. */
8322
8323 static tree
8324 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8325 {
8326 enum tree_code compl_code;
8327
8328 if (code == MIN_EXPR)
8329 compl_code = MAX_EXPR;
8330 else if (code == MAX_EXPR)
8331 compl_code = MIN_EXPR;
8332 else
8333 gcc_unreachable ();
8334
8335 /* MIN (MAX (a, b), b) == b. */
8336 if (TREE_CODE (op0) == compl_code
8337 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8338 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8339
8340 /* MIN (MAX (b, a), b) == b. */
8341 if (TREE_CODE (op0) == compl_code
8342 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8343 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8344 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8345
8346 /* MIN (a, MAX (a, b)) == a. */
8347 if (TREE_CODE (op1) == compl_code
8348 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8349 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8350 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8351
8352 /* MIN (a, MAX (b, a)) == a. */
8353 if (TREE_CODE (op1) == compl_code
8354 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8355 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8356 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8357
8358 return NULL_TREE;
8359 }
8360
8361 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8362 by changing CODE to reduce the magnitude of constants involved in
8363 ARG0 of the comparison.
8364 Returns a canonicalized comparison tree if a simplification was
8365 possible, otherwise returns NULL_TREE.
8366 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8367 valid if signed overflow is undefined. */
8368
8369 static tree
8370 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8371 tree arg0, tree arg1,
8372 bool *strict_overflow_p)
8373 {
8374 enum tree_code code0 = TREE_CODE (arg0);
8375 tree t, cst0 = NULL_TREE;
8376 int sgn0;
8377 bool swap = false;
8378
8379 /* Match A +- CST code arg1 and CST code arg1. We can change the
8380 first form only if overflow is undefined. */
8381 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8382 /* In principle pointers also have undefined overflow behavior,
8383 but that causes problems elsewhere. */
8384 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8385 && (code0 == MINUS_EXPR
8386 || code0 == PLUS_EXPR)
8387 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8388 || code0 == INTEGER_CST))
8389 return NULL_TREE;
8390
8391 /* Identify the constant in arg0 and its sign. */
8392 if (code0 == INTEGER_CST)
8393 cst0 = arg0;
8394 else
8395 cst0 = TREE_OPERAND (arg0, 1);
8396 sgn0 = tree_int_cst_sgn (cst0);
8397
8398 /* Overflowed constants and zero will cause problems. */
8399 if (integer_zerop (cst0)
8400 || TREE_OVERFLOW (cst0))
8401 return NULL_TREE;
8402
8403 /* See if we can reduce the magnitude of the constant in
8404 arg0 by changing the comparison code. */
8405 if (code0 == INTEGER_CST)
8406 {
8407 /* CST <= arg1 -> CST-1 < arg1. */
8408 if (code == LE_EXPR && sgn0 == 1)
8409 code = LT_EXPR;
8410 /* -CST < arg1 -> -CST-1 <= arg1. */
8411 else if (code == LT_EXPR && sgn0 == -1)
8412 code = LE_EXPR;
8413 /* CST > arg1 -> CST-1 >= arg1. */
8414 else if (code == GT_EXPR && sgn0 == 1)
8415 code = GE_EXPR;
8416 /* -CST >= arg1 -> -CST-1 > arg1. */
8417 else if (code == GE_EXPR && sgn0 == -1)
8418 code = GT_EXPR;
8419 else
8420 return NULL_TREE;
8421 /* arg1 code' CST' might be more canonical. */
8422 swap = true;
8423 }
8424 else
8425 {
8426 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8427 if (code == LT_EXPR
8428 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8429 code = LE_EXPR;
8430 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8431 else if (code == GT_EXPR
8432 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8433 code = GE_EXPR;
8434 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8435 else if (code == LE_EXPR
8436 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8437 code = LT_EXPR;
8438 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8439 else if (code == GE_EXPR
8440 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8441 code = GT_EXPR;
8442 else
8443 return NULL_TREE;
8444 *strict_overflow_p = true;
8445 }
8446
8447 /* Now build the constant reduced in magnitude. But not if that
8448 would produce one outside of its types range. */
8449 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8450 && ((sgn0 == 1
8451 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8452 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8453 || (sgn0 == -1
8454 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8455 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8456 /* We cannot swap the comparison here as that would cause us to
8457 endlessly recurse. */
8458 return NULL_TREE;
8459
8460 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8461 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8462 if (code0 != INTEGER_CST)
8463 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8464
8465 /* If swapping might yield to a more canonical form, do so. */
8466 if (swap)
8467 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8468 else
8469 return fold_build2_loc (loc, code, type, t, arg1);
8470 }
8471
8472 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8473 overflow further. Try to decrease the magnitude of constants involved
8474 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8475 and put sole constants at the second argument position.
8476 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8477
8478 static tree
8479 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8480 tree arg0, tree arg1)
8481 {
8482 tree t;
8483 bool strict_overflow_p;
8484 const char * const warnmsg = G_("assuming signed overflow does not occur "
8485 "when reducing constant in comparison");
8486
8487 /* Try canonicalization by simplifying arg0. */
8488 strict_overflow_p = false;
8489 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8490 &strict_overflow_p);
8491 if (t)
8492 {
8493 if (strict_overflow_p)
8494 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8495 return t;
8496 }
8497
8498 /* Try canonicalization by simplifying arg1 using the swapped
8499 comparison. */
8500 code = swap_tree_comparison (code);
8501 strict_overflow_p = false;
8502 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8503 &strict_overflow_p);
8504 if (t && strict_overflow_p)
8505 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8506 return t;
8507 }
8508
8509 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8510 space. This is used to avoid issuing overflow warnings for
8511 expressions like &p->x which can not wrap. */
8512
8513 static bool
8514 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8515 {
8516 unsigned HOST_WIDE_INT offset_low, total_low;
8517 HOST_WIDE_INT size, offset_high, total_high;
8518
8519 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8520 return true;
8521
8522 if (bitpos < 0)
8523 return true;
8524
8525 if (offset == NULL_TREE)
8526 {
8527 offset_low = 0;
8528 offset_high = 0;
8529 }
8530 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8531 return true;
8532 else
8533 {
8534 offset_low = TREE_INT_CST_LOW (offset);
8535 offset_high = TREE_INT_CST_HIGH (offset);
8536 }
8537
8538 if (add_double_with_sign (offset_low, offset_high,
8539 bitpos / BITS_PER_UNIT, 0,
8540 &total_low, &total_high,
8541 true))
8542 return true;
8543
8544 if (total_high != 0)
8545 return true;
8546
8547 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8548 if (size <= 0)
8549 return true;
8550
8551 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8552 array. */
8553 if (TREE_CODE (base) == ADDR_EXPR)
8554 {
8555 HOST_WIDE_INT base_size;
8556
8557 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8558 if (base_size > 0 && size < base_size)
8559 size = base_size;
8560 }
8561
8562 return total_low > (unsigned HOST_WIDE_INT) size;
8563 }
8564
8565 /* Subroutine of fold_binary. This routine performs all of the
8566 transformations that are common to the equality/inequality
8567 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8568 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8569 fold_binary should call fold_binary. Fold a comparison with
8570 tree code CODE and type TYPE with operands OP0 and OP1. Return
8571 the folded comparison or NULL_TREE. */
8572
8573 static tree
8574 fold_comparison (location_t loc, enum tree_code code, tree type,
8575 tree op0, tree op1)
8576 {
8577 tree arg0, arg1, tem;
8578
8579 arg0 = op0;
8580 arg1 = op1;
8581
8582 STRIP_SIGN_NOPS (arg0);
8583 STRIP_SIGN_NOPS (arg1);
8584
8585 tem = fold_relational_const (code, type, arg0, arg1);
8586 if (tem != NULL_TREE)
8587 return tem;
8588
8589 /* If one arg is a real or integer constant, put it last. */
8590 if (tree_swap_operands_p (arg0, arg1, true))
8591 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8592
8593 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8594 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8595 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8596 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8597 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8598 && (TREE_CODE (arg1) == INTEGER_CST
8599 && !TREE_OVERFLOW (arg1)))
8600 {
8601 tree const1 = TREE_OPERAND (arg0, 1);
8602 tree const2 = arg1;
8603 tree variable = TREE_OPERAND (arg0, 0);
8604 tree lhs;
8605 int lhs_add;
8606 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8607
8608 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8609 TREE_TYPE (arg1), const2, const1);
8610
8611 /* If the constant operation overflowed this can be
8612 simplified as a comparison against INT_MAX/INT_MIN. */
8613 if (TREE_CODE (lhs) == INTEGER_CST
8614 && TREE_OVERFLOW (lhs))
8615 {
8616 int const1_sgn = tree_int_cst_sgn (const1);
8617 enum tree_code code2 = code;
8618
8619 /* Get the sign of the constant on the lhs if the
8620 operation were VARIABLE + CONST1. */
8621 if (TREE_CODE (arg0) == MINUS_EXPR)
8622 const1_sgn = -const1_sgn;
8623
8624 /* The sign of the constant determines if we overflowed
8625 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8626 Canonicalize to the INT_MIN overflow by swapping the comparison
8627 if necessary. */
8628 if (const1_sgn == -1)
8629 code2 = swap_tree_comparison (code);
8630
8631 /* We now can look at the canonicalized case
8632 VARIABLE + 1 CODE2 INT_MIN
8633 and decide on the result. */
8634 if (code2 == LT_EXPR
8635 || code2 == LE_EXPR
8636 || code2 == EQ_EXPR)
8637 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8638 else if (code2 == NE_EXPR
8639 || code2 == GE_EXPR
8640 || code2 == GT_EXPR)
8641 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8642 }
8643
8644 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8645 && (TREE_CODE (lhs) != INTEGER_CST
8646 || !TREE_OVERFLOW (lhs)))
8647 {
8648 fold_overflow_warning ("assuming signed overflow does not occur "
8649 "when changing X +- C1 cmp C2 to "
8650 "X cmp C1 +- C2",
8651 WARN_STRICT_OVERFLOW_COMPARISON);
8652 return fold_build2_loc (loc, code, type, variable, lhs);
8653 }
8654 }
8655
8656 /* For comparisons of pointers we can decompose it to a compile time
8657 comparison of the base objects and the offsets into the object.
8658 This requires at least one operand being an ADDR_EXPR or a
8659 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8660 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8661 && (TREE_CODE (arg0) == ADDR_EXPR
8662 || TREE_CODE (arg1) == ADDR_EXPR
8663 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8664 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8665 {
8666 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8667 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8668 enum machine_mode mode;
8669 int volatilep, unsignedp;
8670 bool indirect_base0 = false, indirect_base1 = false;
8671
8672 /* Get base and offset for the access. Strip ADDR_EXPR for
8673 get_inner_reference, but put it back by stripping INDIRECT_REF
8674 off the base object if possible. indirect_baseN will be true
8675 if baseN is not an address but refers to the object itself. */
8676 base0 = arg0;
8677 if (TREE_CODE (arg0) == ADDR_EXPR)
8678 {
8679 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8680 &bitsize, &bitpos0, &offset0, &mode,
8681 &unsignedp, &volatilep, false);
8682 if (TREE_CODE (base0) == INDIRECT_REF)
8683 base0 = TREE_OPERAND (base0, 0);
8684 else
8685 indirect_base0 = true;
8686 }
8687 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8688 {
8689 base0 = TREE_OPERAND (arg0, 0);
8690 STRIP_SIGN_NOPS (base0);
8691 if (TREE_CODE (base0) == ADDR_EXPR)
8692 {
8693 base0 = TREE_OPERAND (base0, 0);
8694 indirect_base0 = true;
8695 }
8696 offset0 = TREE_OPERAND (arg0, 1);
8697 }
8698
8699 base1 = arg1;
8700 if (TREE_CODE (arg1) == ADDR_EXPR)
8701 {
8702 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8703 &bitsize, &bitpos1, &offset1, &mode,
8704 &unsignedp, &volatilep, false);
8705 if (TREE_CODE (base1) == INDIRECT_REF)
8706 base1 = TREE_OPERAND (base1, 0);
8707 else
8708 indirect_base1 = true;
8709 }
8710 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8711 {
8712 base1 = TREE_OPERAND (arg1, 0);
8713 STRIP_SIGN_NOPS (base1);
8714 if (TREE_CODE (base1) == ADDR_EXPR)
8715 {
8716 base1 = TREE_OPERAND (base1, 0);
8717 indirect_base1 = true;
8718 }
8719 offset1 = TREE_OPERAND (arg1, 1);
8720 }
8721
8722 /* A local variable can never be pointed to by
8723 the default SSA name of an incoming parameter. */
8724 if ((TREE_CODE (arg0) == ADDR_EXPR
8725 && indirect_base0
8726 && TREE_CODE (base0) == VAR_DECL
8727 && auto_var_in_fn_p (base0, current_function_decl)
8728 && !indirect_base1
8729 && TREE_CODE (base1) == SSA_NAME
8730 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8731 && SSA_NAME_IS_DEFAULT_DEF (base1))
8732 || (TREE_CODE (arg1) == ADDR_EXPR
8733 && indirect_base1
8734 && TREE_CODE (base1) == VAR_DECL
8735 && auto_var_in_fn_p (base1, current_function_decl)
8736 && !indirect_base0
8737 && TREE_CODE (base0) == SSA_NAME
8738 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8739 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8740 {
8741 if (code == NE_EXPR)
8742 return constant_boolean_node (1, type);
8743 else if (code == EQ_EXPR)
8744 return constant_boolean_node (0, type);
8745 }
8746 /* If we have equivalent bases we might be able to simplify. */
8747 else if (indirect_base0 == indirect_base1
8748 && operand_equal_p (base0, base1, 0))
8749 {
8750 /* We can fold this expression to a constant if the non-constant
8751 offset parts are equal. */
8752 if ((offset0 == offset1
8753 || (offset0 && offset1
8754 && operand_equal_p (offset0, offset1, 0)))
8755 && (code == EQ_EXPR
8756 || code == NE_EXPR
8757 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8758
8759 {
8760 if (code != EQ_EXPR
8761 && code != NE_EXPR
8762 && bitpos0 != bitpos1
8763 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8764 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8765 fold_overflow_warning (("assuming pointer wraparound does not "
8766 "occur when comparing P +- C1 with "
8767 "P +- C2"),
8768 WARN_STRICT_OVERFLOW_CONDITIONAL);
8769
8770 switch (code)
8771 {
8772 case EQ_EXPR:
8773 return constant_boolean_node (bitpos0 == bitpos1, type);
8774 case NE_EXPR:
8775 return constant_boolean_node (bitpos0 != bitpos1, type);
8776 case LT_EXPR:
8777 return constant_boolean_node (bitpos0 < bitpos1, type);
8778 case LE_EXPR:
8779 return constant_boolean_node (bitpos0 <= bitpos1, type);
8780 case GE_EXPR:
8781 return constant_boolean_node (bitpos0 >= bitpos1, type);
8782 case GT_EXPR:
8783 return constant_boolean_node (bitpos0 > bitpos1, type);
8784 default:;
8785 }
8786 }
8787 /* We can simplify the comparison to a comparison of the variable
8788 offset parts if the constant offset parts are equal.
8789 Be careful to use signed size type here because otherwise we
8790 mess with array offsets in the wrong way. This is possible
8791 because pointer arithmetic is restricted to retain within an
8792 object and overflow on pointer differences is undefined as of
8793 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8794 else if (bitpos0 == bitpos1
8795 && ((code == EQ_EXPR || code == NE_EXPR)
8796 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8797 {
8798 /* By converting to signed size type we cover middle-end pointer
8799 arithmetic which operates on unsigned pointer types of size
8800 type size and ARRAY_REF offsets which are properly sign or
8801 zero extended from their type in case it is narrower than
8802 size type. */
8803 if (offset0 == NULL_TREE)
8804 offset0 = build_int_cst (ssizetype, 0);
8805 else
8806 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8807 if (offset1 == NULL_TREE)
8808 offset1 = build_int_cst (ssizetype, 0);
8809 else
8810 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8811
8812 if (code != EQ_EXPR
8813 && code != NE_EXPR
8814 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8815 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8816 fold_overflow_warning (("assuming pointer wraparound does not "
8817 "occur when comparing P +- C1 with "
8818 "P +- C2"),
8819 WARN_STRICT_OVERFLOW_COMPARISON);
8820
8821 return fold_build2_loc (loc, code, type, offset0, offset1);
8822 }
8823 }
8824 /* For non-equal bases we can simplify if they are addresses
8825 of local binding decls or constants. */
8826 else if (indirect_base0 && indirect_base1
8827 /* We know that !operand_equal_p (base0, base1, 0)
8828 because the if condition was false. But make
8829 sure two decls are not the same. */
8830 && base0 != base1
8831 && TREE_CODE (arg0) == ADDR_EXPR
8832 && TREE_CODE (arg1) == ADDR_EXPR
8833 && (((TREE_CODE (base0) == VAR_DECL
8834 || TREE_CODE (base0) == PARM_DECL)
8835 && (targetm.binds_local_p (base0)
8836 || CONSTANT_CLASS_P (base1)))
8837 || CONSTANT_CLASS_P (base0))
8838 && (((TREE_CODE (base1) == VAR_DECL
8839 || TREE_CODE (base1) == PARM_DECL)
8840 && (targetm.binds_local_p (base1)
8841 || CONSTANT_CLASS_P (base0)))
8842 || CONSTANT_CLASS_P (base1)))
8843 {
8844 if (code == EQ_EXPR)
8845 return omit_two_operands_loc (loc, type, boolean_false_node,
8846 arg0, arg1);
8847 else if (code == NE_EXPR)
8848 return omit_two_operands_loc (loc, type, boolean_true_node,
8849 arg0, arg1);
8850 }
8851 /* For equal offsets we can simplify to a comparison of the
8852 base addresses. */
8853 else if (bitpos0 == bitpos1
8854 && (indirect_base0
8855 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8856 && (indirect_base1
8857 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8858 && ((offset0 == offset1)
8859 || (offset0 && offset1
8860 && operand_equal_p (offset0, offset1, 0))))
8861 {
8862 if (indirect_base0)
8863 base0 = build_fold_addr_expr_loc (loc, base0);
8864 if (indirect_base1)
8865 base1 = build_fold_addr_expr_loc (loc, base1);
8866 return fold_build2_loc (loc, code, type, base0, base1);
8867 }
8868 }
8869
8870 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8871 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8872 the resulting offset is smaller in absolute value than the
8873 original one. */
8874 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8875 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8876 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8877 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8878 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8879 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8880 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8881 {
8882 tree const1 = TREE_OPERAND (arg0, 1);
8883 tree const2 = TREE_OPERAND (arg1, 1);
8884 tree variable1 = TREE_OPERAND (arg0, 0);
8885 tree variable2 = TREE_OPERAND (arg1, 0);
8886 tree cst;
8887 const char * const warnmsg = G_("assuming signed overflow does not "
8888 "occur when combining constants around "
8889 "a comparison");
8890
8891 /* Put the constant on the side where it doesn't overflow and is
8892 of lower absolute value than before. */
8893 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8894 ? MINUS_EXPR : PLUS_EXPR,
8895 const2, const1, 0);
8896 if (!TREE_OVERFLOW (cst)
8897 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8898 {
8899 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8900 return fold_build2_loc (loc, code, type,
8901 variable1,
8902 fold_build2_loc (loc,
8903 TREE_CODE (arg1), TREE_TYPE (arg1),
8904 variable2, cst));
8905 }
8906
8907 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8908 ? MINUS_EXPR : PLUS_EXPR,
8909 const1, const2, 0);
8910 if (!TREE_OVERFLOW (cst)
8911 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8912 {
8913 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8914 return fold_build2_loc (loc, code, type,
8915 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
8916 variable1, cst),
8917 variable2);
8918 }
8919 }
8920
8921 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8922 signed arithmetic case. That form is created by the compiler
8923 often enough for folding it to be of value. One example is in
8924 computing loop trip counts after Operator Strength Reduction. */
8925 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8926 && TREE_CODE (arg0) == MULT_EXPR
8927 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8928 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8929 && integer_zerop (arg1))
8930 {
8931 tree const1 = TREE_OPERAND (arg0, 1);
8932 tree const2 = arg1; /* zero */
8933 tree variable1 = TREE_OPERAND (arg0, 0);
8934 enum tree_code cmp_code = code;
8935
8936 /* Handle unfolded multiplication by zero. */
8937 if (integer_zerop (const1))
8938 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8939
8940 fold_overflow_warning (("assuming signed overflow does not occur when "
8941 "eliminating multiplication in comparison "
8942 "with zero"),
8943 WARN_STRICT_OVERFLOW_COMPARISON);
8944
8945 /* If const1 is negative we swap the sense of the comparison. */
8946 if (tree_int_cst_sgn (const1) < 0)
8947 cmp_code = swap_tree_comparison (cmp_code);
8948
8949 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
8950 }
8951
8952 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
8953 if (tem)
8954 return tem;
8955
8956 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8957 {
8958 tree targ0 = strip_float_extensions (arg0);
8959 tree targ1 = strip_float_extensions (arg1);
8960 tree newtype = TREE_TYPE (targ0);
8961
8962 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8963 newtype = TREE_TYPE (targ1);
8964
8965 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8966 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8967 return fold_build2_loc (loc, code, type,
8968 fold_convert_loc (loc, newtype, targ0),
8969 fold_convert_loc (loc, newtype, targ1));
8970
8971 /* (-a) CMP (-b) -> b CMP a */
8972 if (TREE_CODE (arg0) == NEGATE_EXPR
8973 && TREE_CODE (arg1) == NEGATE_EXPR)
8974 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
8975 TREE_OPERAND (arg0, 0));
8976
8977 if (TREE_CODE (arg1) == REAL_CST)
8978 {
8979 REAL_VALUE_TYPE cst;
8980 cst = TREE_REAL_CST (arg1);
8981
8982 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8983 if (TREE_CODE (arg0) == NEGATE_EXPR)
8984 return fold_build2_loc (loc, swap_tree_comparison (code), type,
8985 TREE_OPERAND (arg0, 0),
8986 build_real (TREE_TYPE (arg1),
8987 real_value_negate (&cst)));
8988
8989 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8990 /* a CMP (-0) -> a CMP 0 */
8991 if (REAL_VALUE_MINUS_ZERO (cst))
8992 return fold_build2_loc (loc, code, type, arg0,
8993 build_real (TREE_TYPE (arg1), dconst0));
8994
8995 /* x != NaN is always true, other ops are always false. */
8996 if (REAL_VALUE_ISNAN (cst)
8997 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8998 {
8999 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9000 return omit_one_operand_loc (loc, type, tem, arg0);
9001 }
9002
9003 /* Fold comparisons against infinity. */
9004 if (REAL_VALUE_ISINF (cst)
9005 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9006 {
9007 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9008 if (tem != NULL_TREE)
9009 return tem;
9010 }
9011 }
9012
9013 /* If this is a comparison of a real constant with a PLUS_EXPR
9014 or a MINUS_EXPR of a real constant, we can convert it into a
9015 comparison with a revised real constant as long as no overflow
9016 occurs when unsafe_math_optimizations are enabled. */
9017 if (flag_unsafe_math_optimizations
9018 && TREE_CODE (arg1) == REAL_CST
9019 && (TREE_CODE (arg0) == PLUS_EXPR
9020 || TREE_CODE (arg0) == MINUS_EXPR)
9021 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9022 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9023 ? MINUS_EXPR : PLUS_EXPR,
9024 arg1, TREE_OPERAND (arg0, 1)))
9025 && !TREE_OVERFLOW (tem))
9026 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9027
9028 /* Likewise, we can simplify a comparison of a real constant with
9029 a MINUS_EXPR whose first operand is also a real constant, i.e.
9030 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9031 floating-point types only if -fassociative-math is set. */
9032 if (flag_associative_math
9033 && TREE_CODE (arg1) == REAL_CST
9034 && TREE_CODE (arg0) == MINUS_EXPR
9035 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9036 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9037 arg1))
9038 && !TREE_OVERFLOW (tem))
9039 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9040 TREE_OPERAND (arg0, 1), tem);
9041
9042 /* Fold comparisons against built-in math functions. */
9043 if (TREE_CODE (arg1) == REAL_CST
9044 && flag_unsafe_math_optimizations
9045 && ! flag_errno_math)
9046 {
9047 enum built_in_function fcode = builtin_mathfn_code (arg0);
9048
9049 if (fcode != END_BUILTINS)
9050 {
9051 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9052 if (tem != NULL_TREE)
9053 return tem;
9054 }
9055 }
9056 }
9057
9058 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9059 && CONVERT_EXPR_P (arg0))
9060 {
9061 /* If we are widening one operand of an integer comparison,
9062 see if the other operand is similarly being widened. Perhaps we
9063 can do the comparison in the narrower type. */
9064 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9065 if (tem)
9066 return tem;
9067
9068 /* Or if we are changing signedness. */
9069 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9070 if (tem)
9071 return tem;
9072 }
9073
9074 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9075 constant, we can simplify it. */
9076 if (TREE_CODE (arg1) == INTEGER_CST
9077 && (TREE_CODE (arg0) == MIN_EXPR
9078 || TREE_CODE (arg0) == MAX_EXPR)
9079 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9080 {
9081 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9082 if (tem)
9083 return tem;
9084 }
9085
9086 /* Simplify comparison of something with itself. (For IEEE
9087 floating-point, we can only do some of these simplifications.) */
9088 if (operand_equal_p (arg0, arg1, 0))
9089 {
9090 switch (code)
9091 {
9092 case EQ_EXPR:
9093 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9094 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9095 return constant_boolean_node (1, type);
9096 break;
9097
9098 case GE_EXPR:
9099 case LE_EXPR:
9100 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9101 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9102 return constant_boolean_node (1, type);
9103 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9104
9105 case NE_EXPR:
9106 /* For NE, we can only do this simplification if integer
9107 or we don't honor IEEE floating point NaNs. */
9108 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9109 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9110 break;
9111 /* ... fall through ... */
9112 case GT_EXPR:
9113 case LT_EXPR:
9114 return constant_boolean_node (0, type);
9115 default:
9116 gcc_unreachable ();
9117 }
9118 }
9119
9120 /* If we are comparing an expression that just has comparisons
9121 of two integer values, arithmetic expressions of those comparisons,
9122 and constants, we can simplify it. There are only three cases
9123 to check: the two values can either be equal, the first can be
9124 greater, or the second can be greater. Fold the expression for
9125 those three values. Since each value must be 0 or 1, we have
9126 eight possibilities, each of which corresponds to the constant 0
9127 or 1 or one of the six possible comparisons.
9128
9129 This handles common cases like (a > b) == 0 but also handles
9130 expressions like ((x > y) - (y > x)) > 0, which supposedly
9131 occur in macroized code. */
9132
9133 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9134 {
9135 tree cval1 = 0, cval2 = 0;
9136 int save_p = 0;
9137
9138 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9139 /* Don't handle degenerate cases here; they should already
9140 have been handled anyway. */
9141 && cval1 != 0 && cval2 != 0
9142 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9143 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9144 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9145 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9146 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9147 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9148 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9149 {
9150 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9151 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9152
9153 /* We can't just pass T to eval_subst in case cval1 or cval2
9154 was the same as ARG1. */
9155
9156 tree high_result
9157 = fold_build2_loc (loc, code, type,
9158 eval_subst (loc, arg0, cval1, maxval,
9159 cval2, minval),
9160 arg1);
9161 tree equal_result
9162 = fold_build2_loc (loc, code, type,
9163 eval_subst (loc, arg0, cval1, maxval,
9164 cval2, maxval),
9165 arg1);
9166 tree low_result
9167 = fold_build2_loc (loc, code, type,
9168 eval_subst (loc, arg0, cval1, minval,
9169 cval2, maxval),
9170 arg1);
9171
9172 /* All three of these results should be 0 or 1. Confirm they are.
9173 Then use those values to select the proper code to use. */
9174
9175 if (TREE_CODE (high_result) == INTEGER_CST
9176 && TREE_CODE (equal_result) == INTEGER_CST
9177 && TREE_CODE (low_result) == INTEGER_CST)
9178 {
9179 /* Make a 3-bit mask with the high-order bit being the
9180 value for `>', the next for '=', and the low for '<'. */
9181 switch ((integer_onep (high_result) * 4)
9182 + (integer_onep (equal_result) * 2)
9183 + integer_onep (low_result))
9184 {
9185 case 0:
9186 /* Always false. */
9187 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9188 case 1:
9189 code = LT_EXPR;
9190 break;
9191 case 2:
9192 code = EQ_EXPR;
9193 break;
9194 case 3:
9195 code = LE_EXPR;
9196 break;
9197 case 4:
9198 code = GT_EXPR;
9199 break;
9200 case 5:
9201 code = NE_EXPR;
9202 break;
9203 case 6:
9204 code = GE_EXPR;
9205 break;
9206 case 7:
9207 /* Always true. */
9208 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9209 }
9210
9211 if (save_p)
9212 {
9213 tem = save_expr (build2 (code, type, cval1, cval2));
9214 SET_EXPR_LOCATION (tem, loc);
9215 return tem;
9216 }
9217 return fold_build2_loc (loc, code, type, cval1, cval2);
9218 }
9219 }
9220 }
9221
9222 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9223 into a single range test. */
9224 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9225 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9226 && TREE_CODE (arg1) == INTEGER_CST
9227 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9228 && !integer_zerop (TREE_OPERAND (arg0, 1))
9229 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9230 && !TREE_OVERFLOW (arg1))
9231 {
9232 tem = fold_div_compare (loc, code, type, arg0, arg1);
9233 if (tem != NULL_TREE)
9234 return tem;
9235 }
9236
9237 /* Fold ~X op ~Y as Y op X. */
9238 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9239 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9240 {
9241 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9242 return fold_build2_loc (loc, code, type,
9243 fold_convert_loc (loc, cmp_type,
9244 TREE_OPERAND (arg1, 0)),
9245 TREE_OPERAND (arg0, 0));
9246 }
9247
9248 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9249 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9250 && TREE_CODE (arg1) == INTEGER_CST)
9251 {
9252 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9253 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9254 TREE_OPERAND (arg0, 0),
9255 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9256 fold_convert_loc (loc, cmp_type, arg1)));
9257 }
9258
9259 return NULL_TREE;
9260 }
9261
9262
9263 /* Subroutine of fold_binary. Optimize complex multiplications of the
9264 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9265 argument EXPR represents the expression "z" of type TYPE. */
9266
9267 static tree
9268 fold_mult_zconjz (location_t loc, tree type, tree expr)
9269 {
9270 tree itype = TREE_TYPE (type);
9271 tree rpart, ipart, tem;
9272
9273 if (TREE_CODE (expr) == COMPLEX_EXPR)
9274 {
9275 rpart = TREE_OPERAND (expr, 0);
9276 ipart = TREE_OPERAND (expr, 1);
9277 }
9278 else if (TREE_CODE (expr) == COMPLEX_CST)
9279 {
9280 rpart = TREE_REALPART (expr);
9281 ipart = TREE_IMAGPART (expr);
9282 }
9283 else
9284 {
9285 expr = save_expr (expr);
9286 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9287 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9288 }
9289
9290 rpart = save_expr (rpart);
9291 ipart = save_expr (ipart);
9292 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9293 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9294 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9295 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9296 build_zero_cst (itype));
9297 }
9298
9299
9300 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9301 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9302 guarantees that P and N have the same least significant log2(M) bits.
9303 N is not otherwise constrained. In particular, N is not normalized to
9304 0 <= N < M as is common. In general, the precise value of P is unknown.
9305 M is chosen as large as possible such that constant N can be determined.
9306
9307 Returns M and sets *RESIDUE to N.
9308
9309 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9310 account. This is not always possible due to PR 35705.
9311 */
9312
9313 static unsigned HOST_WIDE_INT
9314 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9315 bool allow_func_align)
9316 {
9317 enum tree_code code;
9318
9319 *residue = 0;
9320
9321 code = TREE_CODE (expr);
9322 if (code == ADDR_EXPR)
9323 {
9324 expr = TREE_OPERAND (expr, 0);
9325 if (handled_component_p (expr))
9326 {
9327 HOST_WIDE_INT bitsize, bitpos;
9328 tree offset;
9329 enum machine_mode mode;
9330 int unsignedp, volatilep;
9331
9332 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9333 &mode, &unsignedp, &volatilep, false);
9334 *residue = bitpos / BITS_PER_UNIT;
9335 if (offset)
9336 {
9337 if (TREE_CODE (offset) == INTEGER_CST)
9338 *residue += TREE_INT_CST_LOW (offset);
9339 else
9340 /* We don't handle more complicated offset expressions. */
9341 return 1;
9342 }
9343 }
9344
9345 if (DECL_P (expr)
9346 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9347 return DECL_ALIGN_UNIT (expr);
9348 }
9349 else if (code == POINTER_PLUS_EXPR)
9350 {
9351 tree op0, op1;
9352 unsigned HOST_WIDE_INT modulus;
9353 enum tree_code inner_code;
9354
9355 op0 = TREE_OPERAND (expr, 0);
9356 STRIP_NOPS (op0);
9357 modulus = get_pointer_modulus_and_residue (op0, residue,
9358 allow_func_align);
9359
9360 op1 = TREE_OPERAND (expr, 1);
9361 STRIP_NOPS (op1);
9362 inner_code = TREE_CODE (op1);
9363 if (inner_code == INTEGER_CST)
9364 {
9365 *residue += TREE_INT_CST_LOW (op1);
9366 return modulus;
9367 }
9368 else if (inner_code == MULT_EXPR)
9369 {
9370 op1 = TREE_OPERAND (op1, 1);
9371 if (TREE_CODE (op1) == INTEGER_CST)
9372 {
9373 unsigned HOST_WIDE_INT align;
9374
9375 /* Compute the greatest power-of-2 divisor of op1. */
9376 align = TREE_INT_CST_LOW (op1);
9377 align &= -align;
9378
9379 /* If align is non-zero and less than *modulus, replace
9380 *modulus with align., If align is 0, then either op1 is 0
9381 or the greatest power-of-2 divisor of op1 doesn't fit in an
9382 unsigned HOST_WIDE_INT. In either case, no additional
9383 constraint is imposed. */
9384 if (align)
9385 modulus = MIN (modulus, align);
9386
9387 return modulus;
9388 }
9389 }
9390 }
9391
9392 /* If we get here, we were unable to determine anything useful about the
9393 expression. */
9394 return 1;
9395 }
9396
9397
9398 /* Fold a binary expression of code CODE and type TYPE with operands
9399 OP0 and OP1. LOC is the location of the resulting expression.
9400 Return the folded expression if folding is successful. Otherwise,
9401 return NULL_TREE. */
9402
9403 tree
9404 fold_binary_loc (location_t loc,
9405 enum tree_code code, tree type, tree op0, tree op1)
9406 {
9407 enum tree_code_class kind = TREE_CODE_CLASS (code);
9408 tree arg0, arg1, tem;
9409 tree t1 = NULL_TREE;
9410 bool strict_overflow_p;
9411
9412 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9413 && TREE_CODE_LENGTH (code) == 2
9414 && op0 != NULL_TREE
9415 && op1 != NULL_TREE);
9416
9417 arg0 = op0;
9418 arg1 = op1;
9419
9420 /* Strip any conversions that don't change the mode. This is
9421 safe for every expression, except for a comparison expression
9422 because its signedness is derived from its operands. So, in
9423 the latter case, only strip conversions that don't change the
9424 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9425 preserved.
9426
9427 Note that this is done as an internal manipulation within the
9428 constant folder, in order to find the simplest representation
9429 of the arguments so that their form can be studied. In any
9430 cases, the appropriate type conversions should be put back in
9431 the tree that will get out of the constant folder. */
9432
9433 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9434 {
9435 STRIP_SIGN_NOPS (arg0);
9436 STRIP_SIGN_NOPS (arg1);
9437 }
9438 else
9439 {
9440 STRIP_NOPS (arg0);
9441 STRIP_NOPS (arg1);
9442 }
9443
9444 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9445 constant but we can't do arithmetic on them. */
9446 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9447 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9448 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9449 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9450 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9451 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9452 {
9453 if (kind == tcc_binary)
9454 {
9455 /* Make sure type and arg0 have the same saturating flag. */
9456 gcc_assert (TYPE_SATURATING (type)
9457 == TYPE_SATURATING (TREE_TYPE (arg0)));
9458 tem = const_binop (code, arg0, arg1);
9459 }
9460 else if (kind == tcc_comparison)
9461 tem = fold_relational_const (code, type, arg0, arg1);
9462 else
9463 tem = NULL_TREE;
9464
9465 if (tem != NULL_TREE)
9466 {
9467 if (TREE_TYPE (tem) != type)
9468 tem = fold_convert_loc (loc, type, tem);
9469 return tem;
9470 }
9471 }
9472
9473 /* If this is a commutative operation, and ARG0 is a constant, move it
9474 to ARG1 to reduce the number of tests below. */
9475 if (commutative_tree_code (code)
9476 && tree_swap_operands_p (arg0, arg1, true))
9477 return fold_build2_loc (loc, code, type, op1, op0);
9478
9479 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9480
9481 First check for cases where an arithmetic operation is applied to a
9482 compound, conditional, or comparison operation. Push the arithmetic
9483 operation inside the compound or conditional to see if any folding
9484 can then be done. Convert comparison to conditional for this purpose.
9485 The also optimizes non-constant cases that used to be done in
9486 expand_expr.
9487
9488 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9489 one of the operands is a comparison and the other is a comparison, a
9490 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9491 code below would make the expression more complex. Change it to a
9492 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9493 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9494
9495 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9496 || code == EQ_EXPR || code == NE_EXPR)
9497 && ((truth_value_p (TREE_CODE (arg0))
9498 && (truth_value_p (TREE_CODE (arg1))
9499 || (TREE_CODE (arg1) == BIT_AND_EXPR
9500 && integer_onep (TREE_OPERAND (arg1, 1)))))
9501 || (truth_value_p (TREE_CODE (arg1))
9502 && (truth_value_p (TREE_CODE (arg0))
9503 || (TREE_CODE (arg0) == BIT_AND_EXPR
9504 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9505 {
9506 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9507 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9508 : TRUTH_XOR_EXPR,
9509 boolean_type_node,
9510 fold_convert_loc (loc, boolean_type_node, arg0),
9511 fold_convert_loc (loc, boolean_type_node, arg1));
9512
9513 if (code == EQ_EXPR)
9514 tem = invert_truthvalue_loc (loc, tem);
9515
9516 return fold_convert_loc (loc, type, tem);
9517 }
9518
9519 if (TREE_CODE_CLASS (code) == tcc_binary
9520 || TREE_CODE_CLASS (code) == tcc_comparison)
9521 {
9522 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9523 {
9524 tem = fold_build2_loc (loc, code, type,
9525 fold_convert_loc (loc, TREE_TYPE (op0),
9526 TREE_OPERAND (arg0, 1)), op1);
9527 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
9528 goto fold_binary_exit;
9529 }
9530 if (TREE_CODE (arg1) == COMPOUND_EXPR
9531 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9532 {
9533 tem = fold_build2_loc (loc, code, type, op0,
9534 fold_convert_loc (loc, TREE_TYPE (op1),
9535 TREE_OPERAND (arg1, 1)));
9536 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
9537 goto fold_binary_exit;
9538 }
9539
9540 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9541 {
9542 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9543 arg0, arg1,
9544 /*cond_first_p=*/1);
9545 if (tem != NULL_TREE)
9546 return tem;
9547 }
9548
9549 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9550 {
9551 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9552 arg1, arg0,
9553 /*cond_first_p=*/0);
9554 if (tem != NULL_TREE)
9555 return tem;
9556 }
9557 }
9558
9559 switch (code)
9560 {
9561 case MEM_REF:
9562 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9563 if (TREE_CODE (arg0) == ADDR_EXPR
9564 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9565 {
9566 tree iref = TREE_OPERAND (arg0, 0);
9567 return fold_build2 (MEM_REF, type,
9568 TREE_OPERAND (iref, 0),
9569 int_const_binop (PLUS_EXPR, arg1,
9570 TREE_OPERAND (iref, 1), 0));
9571 }
9572
9573 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9574 if (TREE_CODE (arg0) == ADDR_EXPR
9575 && handled_component_p (TREE_OPERAND (arg0, 0)))
9576 {
9577 tree base;
9578 HOST_WIDE_INT coffset;
9579 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9580 &coffset);
9581 if (!base)
9582 return NULL_TREE;
9583 return fold_build2 (MEM_REF, type,
9584 build_fold_addr_expr (base),
9585 int_const_binop (PLUS_EXPR, arg1,
9586 size_int (coffset), 0));
9587 }
9588
9589 return NULL_TREE;
9590
9591 case POINTER_PLUS_EXPR:
9592 /* 0 +p index -> (type)index */
9593 if (integer_zerop (arg0))
9594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9595
9596 /* PTR +p 0 -> PTR */
9597 if (integer_zerop (arg1))
9598 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9599
9600 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9601 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9602 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9603 return fold_convert_loc (loc, type,
9604 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9605 fold_convert_loc (loc, sizetype,
9606 arg1),
9607 fold_convert_loc (loc, sizetype,
9608 arg0)));
9609
9610 /* index +p PTR -> PTR +p index */
9611 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9612 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9613 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
9614 fold_convert_loc (loc, type, arg1),
9615 fold_convert_loc (loc, sizetype, arg0));
9616
9617 /* (PTR +p B) +p A -> PTR +p (B + A) */
9618 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9619 {
9620 tree inner;
9621 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9622 tree arg00 = TREE_OPERAND (arg0, 0);
9623 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9624 arg01, fold_convert_loc (loc, sizetype, arg1));
9625 return fold_convert_loc (loc, type,
9626 fold_build2_loc (loc, POINTER_PLUS_EXPR,
9627 TREE_TYPE (arg00),
9628 arg00, inner));
9629 }
9630
9631 /* PTR_CST +p CST -> CST1 */
9632 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9633 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9634 fold_convert_loc (loc, type, arg1));
9635
9636 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9637 of the array. Loop optimizer sometimes produce this type of
9638 expressions. */
9639 if (TREE_CODE (arg0) == ADDR_EXPR)
9640 {
9641 tem = try_move_mult_to_index (loc, arg0,
9642 fold_convert_loc (loc, sizetype, arg1));
9643 if (tem)
9644 return fold_convert_loc (loc, type, tem);
9645 }
9646
9647 return NULL_TREE;
9648
9649 case PLUS_EXPR:
9650 /* A + (-B) -> A - B */
9651 if (TREE_CODE (arg1) == NEGATE_EXPR)
9652 return fold_build2_loc (loc, MINUS_EXPR, type,
9653 fold_convert_loc (loc, type, arg0),
9654 fold_convert_loc (loc, type,
9655 TREE_OPERAND (arg1, 0)));
9656 /* (-A) + B -> B - A */
9657 if (TREE_CODE (arg0) == NEGATE_EXPR
9658 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9659 return fold_build2_loc (loc, MINUS_EXPR, type,
9660 fold_convert_loc (loc, type, arg1),
9661 fold_convert_loc (loc, type,
9662 TREE_OPERAND (arg0, 0)));
9663
9664 if (INTEGRAL_TYPE_P (type))
9665 {
9666 /* Convert ~A + 1 to -A. */
9667 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9668 && integer_onep (arg1))
9669 return fold_build1_loc (loc, NEGATE_EXPR, type,
9670 fold_convert_loc (loc, type,
9671 TREE_OPERAND (arg0, 0)));
9672
9673 /* ~X + X is -1. */
9674 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9675 && !TYPE_OVERFLOW_TRAPS (type))
9676 {
9677 tree tem = TREE_OPERAND (arg0, 0);
9678
9679 STRIP_NOPS (tem);
9680 if (operand_equal_p (tem, arg1, 0))
9681 {
9682 t1 = build_int_cst_type (type, -1);
9683 return omit_one_operand_loc (loc, type, t1, arg1);
9684 }
9685 }
9686
9687 /* X + ~X is -1. */
9688 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9689 && !TYPE_OVERFLOW_TRAPS (type))
9690 {
9691 tree tem = TREE_OPERAND (arg1, 0);
9692
9693 STRIP_NOPS (tem);
9694 if (operand_equal_p (arg0, tem, 0))
9695 {
9696 t1 = build_int_cst_type (type, -1);
9697 return omit_one_operand_loc (loc, type, t1, arg0);
9698 }
9699 }
9700
9701 /* X + (X / CST) * -CST is X % CST. */
9702 if (TREE_CODE (arg1) == MULT_EXPR
9703 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9704 && operand_equal_p (arg0,
9705 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9706 {
9707 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9708 tree cst1 = TREE_OPERAND (arg1, 1);
9709 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9710 cst1, cst0);
9711 if (sum && integer_zerop (sum))
9712 return fold_convert_loc (loc, type,
9713 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9714 TREE_TYPE (arg0), arg0,
9715 cst0));
9716 }
9717 }
9718
9719 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9720 same or one. Make sure type is not saturating.
9721 fold_plusminus_mult_expr will re-associate. */
9722 if ((TREE_CODE (arg0) == MULT_EXPR
9723 || TREE_CODE (arg1) == MULT_EXPR)
9724 && !TYPE_SATURATING (type)
9725 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9726 {
9727 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9728 if (tem)
9729 return tem;
9730 }
9731
9732 if (! FLOAT_TYPE_P (type))
9733 {
9734 if (integer_zerop (arg1))
9735 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9736
9737 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9738 with a constant, and the two constants have no bits in common,
9739 we should treat this as a BIT_IOR_EXPR since this may produce more
9740 simplifications. */
9741 if (TREE_CODE (arg0) == BIT_AND_EXPR
9742 && TREE_CODE (arg1) == BIT_AND_EXPR
9743 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9744 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9745 && integer_zerop (const_binop (BIT_AND_EXPR,
9746 TREE_OPERAND (arg0, 1),
9747 TREE_OPERAND (arg1, 1))))
9748 {
9749 code = BIT_IOR_EXPR;
9750 goto bit_ior;
9751 }
9752
9753 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9754 (plus (plus (mult) (mult)) (foo)) so that we can
9755 take advantage of the factoring cases below. */
9756 if (((TREE_CODE (arg0) == PLUS_EXPR
9757 || TREE_CODE (arg0) == MINUS_EXPR)
9758 && TREE_CODE (arg1) == MULT_EXPR)
9759 || ((TREE_CODE (arg1) == PLUS_EXPR
9760 || TREE_CODE (arg1) == MINUS_EXPR)
9761 && TREE_CODE (arg0) == MULT_EXPR))
9762 {
9763 tree parg0, parg1, parg, marg;
9764 enum tree_code pcode;
9765
9766 if (TREE_CODE (arg1) == MULT_EXPR)
9767 parg = arg0, marg = arg1;
9768 else
9769 parg = arg1, marg = arg0;
9770 pcode = TREE_CODE (parg);
9771 parg0 = TREE_OPERAND (parg, 0);
9772 parg1 = TREE_OPERAND (parg, 1);
9773 STRIP_NOPS (parg0);
9774 STRIP_NOPS (parg1);
9775
9776 if (TREE_CODE (parg0) == MULT_EXPR
9777 && TREE_CODE (parg1) != MULT_EXPR)
9778 return fold_build2_loc (loc, pcode, type,
9779 fold_build2_loc (loc, PLUS_EXPR, type,
9780 fold_convert_loc (loc, type,
9781 parg0),
9782 fold_convert_loc (loc, type,
9783 marg)),
9784 fold_convert_loc (loc, type, parg1));
9785 if (TREE_CODE (parg0) != MULT_EXPR
9786 && TREE_CODE (parg1) == MULT_EXPR)
9787 return
9788 fold_build2_loc (loc, PLUS_EXPR, type,
9789 fold_convert_loc (loc, type, parg0),
9790 fold_build2_loc (loc, pcode, type,
9791 fold_convert_loc (loc, type, marg),
9792 fold_convert_loc (loc, type,
9793 parg1)));
9794 }
9795 }
9796 else
9797 {
9798 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9799 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9800 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9801
9802 /* Likewise if the operands are reversed. */
9803 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9804 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9805
9806 /* Convert X + -C into X - C. */
9807 if (TREE_CODE (arg1) == REAL_CST
9808 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9809 {
9810 tem = fold_negate_const (arg1, type);
9811 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9812 return fold_build2_loc (loc, MINUS_EXPR, type,
9813 fold_convert_loc (loc, type, arg0),
9814 fold_convert_loc (loc, type, tem));
9815 }
9816
9817 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9818 to __complex__ ( x, y ). This is not the same for SNaNs or
9819 if signed zeros are involved. */
9820 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9821 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9822 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9823 {
9824 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9825 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9826 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9827 bool arg0rz = false, arg0iz = false;
9828 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9829 || (arg0i && (arg0iz = real_zerop (arg0i))))
9830 {
9831 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9832 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9833 if (arg0rz && arg1i && real_zerop (arg1i))
9834 {
9835 tree rp = arg1r ? arg1r
9836 : build1 (REALPART_EXPR, rtype, arg1);
9837 tree ip = arg0i ? arg0i
9838 : build1 (IMAGPART_EXPR, rtype, arg0);
9839 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9840 }
9841 else if (arg0iz && arg1r && real_zerop (arg1r))
9842 {
9843 tree rp = arg0r ? arg0r
9844 : build1 (REALPART_EXPR, rtype, arg0);
9845 tree ip = arg1i ? arg1i
9846 : build1 (IMAGPART_EXPR, rtype, arg1);
9847 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9848 }
9849 }
9850 }
9851
9852 if (flag_unsafe_math_optimizations
9853 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9854 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9855 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9856 return tem;
9857
9858 /* Convert x+x into x*2.0. */
9859 if (operand_equal_p (arg0, arg1, 0)
9860 && SCALAR_FLOAT_TYPE_P (type))
9861 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
9862 build_real (type, dconst2));
9863
9864 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9865 We associate floats only if the user has specified
9866 -fassociative-math. */
9867 if (flag_associative_math
9868 && TREE_CODE (arg1) == PLUS_EXPR
9869 && TREE_CODE (arg0) != MULT_EXPR)
9870 {
9871 tree tree10 = TREE_OPERAND (arg1, 0);
9872 tree tree11 = TREE_OPERAND (arg1, 1);
9873 if (TREE_CODE (tree11) == MULT_EXPR
9874 && TREE_CODE (tree10) == MULT_EXPR)
9875 {
9876 tree tree0;
9877 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9878 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9879 }
9880 }
9881 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9882 We associate floats only if the user has specified
9883 -fassociative-math. */
9884 if (flag_associative_math
9885 && TREE_CODE (arg0) == PLUS_EXPR
9886 && TREE_CODE (arg1) != MULT_EXPR)
9887 {
9888 tree tree00 = TREE_OPERAND (arg0, 0);
9889 tree tree01 = TREE_OPERAND (arg0, 1);
9890 if (TREE_CODE (tree01) == MULT_EXPR
9891 && TREE_CODE (tree00) == MULT_EXPR)
9892 {
9893 tree tree0;
9894 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9895 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9896 }
9897 }
9898 }
9899
9900 bit_rotate:
9901 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9902 is a rotate of A by C1 bits. */
9903 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9904 is a rotate of A by B bits. */
9905 {
9906 enum tree_code code0, code1;
9907 tree rtype;
9908 code0 = TREE_CODE (arg0);
9909 code1 = TREE_CODE (arg1);
9910 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9911 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9912 && operand_equal_p (TREE_OPERAND (arg0, 0),
9913 TREE_OPERAND (arg1, 0), 0)
9914 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9915 TYPE_UNSIGNED (rtype))
9916 /* Only create rotates in complete modes. Other cases are not
9917 expanded properly. */
9918 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9919 {
9920 tree tree01, tree11;
9921 enum tree_code code01, code11;
9922
9923 tree01 = TREE_OPERAND (arg0, 1);
9924 tree11 = TREE_OPERAND (arg1, 1);
9925 STRIP_NOPS (tree01);
9926 STRIP_NOPS (tree11);
9927 code01 = TREE_CODE (tree01);
9928 code11 = TREE_CODE (tree11);
9929 if (code01 == INTEGER_CST
9930 && code11 == INTEGER_CST
9931 && TREE_INT_CST_HIGH (tree01) == 0
9932 && TREE_INT_CST_HIGH (tree11) == 0
9933 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9934 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9935 {
9936 tem = build2 (LROTATE_EXPR,
9937 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9938 TREE_OPERAND (arg0, 0),
9939 code0 == LSHIFT_EXPR
9940 ? tree01 : tree11);
9941 SET_EXPR_LOCATION (tem, loc);
9942 return fold_convert_loc (loc, type, tem);
9943 }
9944 else if (code11 == MINUS_EXPR)
9945 {
9946 tree tree110, tree111;
9947 tree110 = TREE_OPERAND (tree11, 0);
9948 tree111 = TREE_OPERAND (tree11, 1);
9949 STRIP_NOPS (tree110);
9950 STRIP_NOPS (tree111);
9951 if (TREE_CODE (tree110) == INTEGER_CST
9952 && 0 == compare_tree_int (tree110,
9953 TYPE_PRECISION
9954 (TREE_TYPE (TREE_OPERAND
9955 (arg0, 0))))
9956 && operand_equal_p (tree01, tree111, 0))
9957 return
9958 fold_convert_loc (loc, type,
9959 build2 ((code0 == LSHIFT_EXPR
9960 ? LROTATE_EXPR
9961 : RROTATE_EXPR),
9962 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9963 TREE_OPERAND (arg0, 0), tree01));
9964 }
9965 else if (code01 == MINUS_EXPR)
9966 {
9967 tree tree010, tree011;
9968 tree010 = TREE_OPERAND (tree01, 0);
9969 tree011 = TREE_OPERAND (tree01, 1);
9970 STRIP_NOPS (tree010);
9971 STRIP_NOPS (tree011);
9972 if (TREE_CODE (tree010) == INTEGER_CST
9973 && 0 == compare_tree_int (tree010,
9974 TYPE_PRECISION
9975 (TREE_TYPE (TREE_OPERAND
9976 (arg0, 0))))
9977 && operand_equal_p (tree11, tree011, 0))
9978 return fold_convert_loc
9979 (loc, type,
9980 build2 ((code0 != LSHIFT_EXPR
9981 ? LROTATE_EXPR
9982 : RROTATE_EXPR),
9983 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9984 TREE_OPERAND (arg0, 0), tree11));
9985 }
9986 }
9987 }
9988
9989 associate:
9990 /* In most languages, can't associate operations on floats through
9991 parentheses. Rather than remember where the parentheses were, we
9992 don't associate floats at all, unless the user has specified
9993 -fassociative-math.
9994 And, we need to make sure type is not saturating. */
9995
9996 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9997 && !TYPE_SATURATING (type))
9998 {
9999 tree var0, con0, lit0, minus_lit0;
10000 tree var1, con1, lit1, minus_lit1;
10001 bool ok = true;
10002
10003 /* Split both trees into variables, constants, and literals. Then
10004 associate each group together, the constants with literals,
10005 then the result with variables. This increases the chances of
10006 literals being recombined later and of generating relocatable
10007 expressions for the sum of a constant and literal. */
10008 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10009 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10010 code == MINUS_EXPR);
10011
10012 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10013 if (code == MINUS_EXPR)
10014 code = PLUS_EXPR;
10015
10016 /* With undefined overflow we can only associate constants with one
10017 variable, and constants whose association doesn't overflow. */
10018 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10019 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10020 {
10021 if (var0 && var1)
10022 {
10023 tree tmp0 = var0;
10024 tree tmp1 = var1;
10025
10026 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10027 tmp0 = TREE_OPERAND (tmp0, 0);
10028 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10029 tmp1 = TREE_OPERAND (tmp1, 0);
10030 /* The only case we can still associate with two variables
10031 is if they are the same, modulo negation. */
10032 if (!operand_equal_p (tmp0, tmp1, 0))
10033 ok = false;
10034 }
10035
10036 if (ok && lit0 && lit1)
10037 {
10038 tree tmp0 = fold_convert (type, lit0);
10039 tree tmp1 = fold_convert (type, lit1);
10040
10041 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10042 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10043 ok = false;
10044 }
10045 }
10046
10047 /* Only do something if we found more than two objects. Otherwise,
10048 nothing has changed and we risk infinite recursion. */
10049 if (ok
10050 && (2 < ((var0 != 0) + (var1 != 0)
10051 + (con0 != 0) + (con1 != 0)
10052 + (lit0 != 0) + (lit1 != 0)
10053 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10054 {
10055 var0 = associate_trees (loc, var0, var1, code, type);
10056 con0 = associate_trees (loc, con0, con1, code, type);
10057 lit0 = associate_trees (loc, lit0, lit1, code, type);
10058 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10059
10060 /* Preserve the MINUS_EXPR if the negative part of the literal is
10061 greater than the positive part. Otherwise, the multiplicative
10062 folding code (i.e extract_muldiv) may be fooled in case
10063 unsigned constants are subtracted, like in the following
10064 example: ((X*2 + 4) - 8U)/2. */
10065 if (minus_lit0 && lit0)
10066 {
10067 if (TREE_CODE (lit0) == INTEGER_CST
10068 && TREE_CODE (minus_lit0) == INTEGER_CST
10069 && tree_int_cst_lt (lit0, minus_lit0))
10070 {
10071 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10072 MINUS_EXPR, type);
10073 lit0 = 0;
10074 }
10075 else
10076 {
10077 lit0 = associate_trees (loc, lit0, minus_lit0,
10078 MINUS_EXPR, type);
10079 minus_lit0 = 0;
10080 }
10081 }
10082 if (minus_lit0)
10083 {
10084 if (con0 == 0)
10085 return
10086 fold_convert_loc (loc, type,
10087 associate_trees (loc, var0, minus_lit0,
10088 MINUS_EXPR, type));
10089 else
10090 {
10091 con0 = associate_trees (loc, con0, minus_lit0,
10092 MINUS_EXPR, type);
10093 return
10094 fold_convert_loc (loc, type,
10095 associate_trees (loc, var0, con0,
10096 PLUS_EXPR, type));
10097 }
10098 }
10099
10100 con0 = associate_trees (loc, con0, lit0, code, type);
10101 return
10102 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10103 code, type));
10104 }
10105 }
10106
10107 return NULL_TREE;
10108
10109 case MINUS_EXPR:
10110 /* Pointer simplifications for subtraction, simple reassociations. */
10111 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10112 {
10113 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10114 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10115 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10116 {
10117 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10118 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10119 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10120 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10121 return fold_build2_loc (loc, PLUS_EXPR, type,
10122 fold_build2_loc (loc, MINUS_EXPR, type,
10123 arg00, arg10),
10124 fold_build2_loc (loc, MINUS_EXPR, type,
10125 arg01, arg11));
10126 }
10127 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10128 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10129 {
10130 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10131 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10132 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10133 fold_convert_loc (loc, type, arg1));
10134 if (tmp)
10135 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10136 }
10137 }
10138 /* A - (-B) -> A + B */
10139 if (TREE_CODE (arg1) == NEGATE_EXPR)
10140 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10141 fold_convert_loc (loc, type,
10142 TREE_OPERAND (arg1, 0)));
10143 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10144 if (TREE_CODE (arg0) == NEGATE_EXPR
10145 && (FLOAT_TYPE_P (type)
10146 || INTEGRAL_TYPE_P (type))
10147 && negate_expr_p (arg1)
10148 && reorder_operands_p (arg0, arg1))
10149 return fold_build2_loc (loc, MINUS_EXPR, type,
10150 fold_convert_loc (loc, type,
10151 negate_expr (arg1)),
10152 fold_convert_loc (loc, type,
10153 TREE_OPERAND (arg0, 0)));
10154 /* Convert -A - 1 to ~A. */
10155 if (INTEGRAL_TYPE_P (type)
10156 && TREE_CODE (arg0) == NEGATE_EXPR
10157 && integer_onep (arg1)
10158 && !TYPE_OVERFLOW_TRAPS (type))
10159 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10160 fold_convert_loc (loc, type,
10161 TREE_OPERAND (arg0, 0)));
10162
10163 /* Convert -1 - A to ~A. */
10164 if (INTEGRAL_TYPE_P (type)
10165 && integer_all_onesp (arg0))
10166 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10167
10168
10169 /* X - (X / CST) * CST is X % CST. */
10170 if (INTEGRAL_TYPE_P (type)
10171 && TREE_CODE (arg1) == MULT_EXPR
10172 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10173 && operand_equal_p (arg0,
10174 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10175 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10176 TREE_OPERAND (arg1, 1), 0))
10177 return
10178 fold_convert_loc (loc, type,
10179 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10180 arg0, TREE_OPERAND (arg1, 1)));
10181
10182 if (! FLOAT_TYPE_P (type))
10183 {
10184 if (integer_zerop (arg0))
10185 return negate_expr (fold_convert_loc (loc, type, arg1));
10186 if (integer_zerop (arg1))
10187 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10188
10189 /* Fold A - (A & B) into ~B & A. */
10190 if (!TREE_SIDE_EFFECTS (arg0)
10191 && TREE_CODE (arg1) == BIT_AND_EXPR)
10192 {
10193 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10194 {
10195 tree arg10 = fold_convert_loc (loc, type,
10196 TREE_OPERAND (arg1, 0));
10197 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10198 fold_build1_loc (loc, BIT_NOT_EXPR,
10199 type, arg10),
10200 fold_convert_loc (loc, type, arg0));
10201 }
10202 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10203 {
10204 tree arg11 = fold_convert_loc (loc,
10205 type, TREE_OPERAND (arg1, 1));
10206 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10207 fold_build1_loc (loc, BIT_NOT_EXPR,
10208 type, arg11),
10209 fold_convert_loc (loc, type, arg0));
10210 }
10211 }
10212
10213 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10214 any power of 2 minus 1. */
10215 if (TREE_CODE (arg0) == BIT_AND_EXPR
10216 && TREE_CODE (arg1) == BIT_AND_EXPR
10217 && operand_equal_p (TREE_OPERAND (arg0, 0),
10218 TREE_OPERAND (arg1, 0), 0))
10219 {
10220 tree mask0 = TREE_OPERAND (arg0, 1);
10221 tree mask1 = TREE_OPERAND (arg1, 1);
10222 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10223
10224 if (operand_equal_p (tem, mask1, 0))
10225 {
10226 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10227 TREE_OPERAND (arg0, 0), mask1);
10228 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10229 }
10230 }
10231 }
10232
10233 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10234 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10235 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10236
10237 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10238 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10239 (-ARG1 + ARG0) reduces to -ARG1. */
10240 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10241 return negate_expr (fold_convert_loc (loc, type, arg1));
10242
10243 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10244 __complex__ ( x, -y ). This is not the same for SNaNs or if
10245 signed zeros are involved. */
10246 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10247 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10248 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10249 {
10250 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10251 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10252 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10253 bool arg0rz = false, arg0iz = false;
10254 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10255 || (arg0i && (arg0iz = real_zerop (arg0i))))
10256 {
10257 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10258 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10259 if (arg0rz && arg1i && real_zerop (arg1i))
10260 {
10261 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10262 arg1r ? arg1r
10263 : build1 (REALPART_EXPR, rtype, arg1));
10264 tree ip = arg0i ? arg0i
10265 : build1 (IMAGPART_EXPR, rtype, arg0);
10266 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10267 }
10268 else if (arg0iz && arg1r && real_zerop (arg1r))
10269 {
10270 tree rp = arg0r ? arg0r
10271 : build1 (REALPART_EXPR, rtype, arg0);
10272 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10273 arg1i ? arg1i
10274 : build1 (IMAGPART_EXPR, rtype, arg1));
10275 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10276 }
10277 }
10278 }
10279
10280 /* Fold &x - &x. This can happen from &x.foo - &x.
10281 This is unsafe for certain floats even in non-IEEE formats.
10282 In IEEE, it is unsafe because it does wrong for NaNs.
10283 Also note that operand_equal_p is always false if an operand
10284 is volatile. */
10285
10286 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10287 && operand_equal_p (arg0, arg1, 0))
10288 return build_zero_cst (type);
10289
10290 /* A - B -> A + (-B) if B is easily negatable. */
10291 if (negate_expr_p (arg1)
10292 && ((FLOAT_TYPE_P (type)
10293 /* Avoid this transformation if B is a positive REAL_CST. */
10294 && (TREE_CODE (arg1) != REAL_CST
10295 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10296 || INTEGRAL_TYPE_P (type)))
10297 return fold_build2_loc (loc, PLUS_EXPR, type,
10298 fold_convert_loc (loc, type, arg0),
10299 fold_convert_loc (loc, type,
10300 negate_expr (arg1)));
10301
10302 /* Try folding difference of addresses. */
10303 {
10304 HOST_WIDE_INT diff;
10305
10306 if ((TREE_CODE (arg0) == ADDR_EXPR
10307 || TREE_CODE (arg1) == ADDR_EXPR)
10308 && ptr_difference_const (arg0, arg1, &diff))
10309 return build_int_cst_type (type, diff);
10310 }
10311
10312 /* Fold &a[i] - &a[j] to i-j. */
10313 if (TREE_CODE (arg0) == ADDR_EXPR
10314 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10315 && TREE_CODE (arg1) == ADDR_EXPR
10316 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10317 {
10318 tree aref0 = TREE_OPERAND (arg0, 0);
10319 tree aref1 = TREE_OPERAND (arg1, 0);
10320 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10321 TREE_OPERAND (aref1, 0), 0))
10322 {
10323 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10324 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10325 tree esz = array_ref_element_size (aref0);
10326 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10327 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10328 fold_convert_loc (loc, type, esz));
10329
10330 }
10331 }
10332
10333 if (FLOAT_TYPE_P (type)
10334 && flag_unsafe_math_optimizations
10335 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10336 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10337 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10338 return tem;
10339
10340 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10341 same or one. Make sure type is not saturating.
10342 fold_plusminus_mult_expr will re-associate. */
10343 if ((TREE_CODE (arg0) == MULT_EXPR
10344 || TREE_CODE (arg1) == MULT_EXPR)
10345 && !TYPE_SATURATING (type)
10346 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10347 {
10348 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10349 if (tem)
10350 return tem;
10351 }
10352
10353 goto associate;
10354
10355 case MULT_EXPR:
10356 /* (-A) * (-B) -> A * B */
10357 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10358 return fold_build2_loc (loc, MULT_EXPR, type,
10359 fold_convert_loc (loc, type,
10360 TREE_OPERAND (arg0, 0)),
10361 fold_convert_loc (loc, type,
10362 negate_expr (arg1)));
10363 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10364 return fold_build2_loc (loc, MULT_EXPR, type,
10365 fold_convert_loc (loc, type,
10366 negate_expr (arg0)),
10367 fold_convert_loc (loc, type,
10368 TREE_OPERAND (arg1, 0)));
10369
10370 if (! FLOAT_TYPE_P (type))
10371 {
10372 if (integer_zerop (arg1))
10373 return omit_one_operand_loc (loc, type, arg1, arg0);
10374 if (integer_onep (arg1))
10375 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10376 /* Transform x * -1 into -x. Make sure to do the negation
10377 on the original operand with conversions not stripped
10378 because we can only strip non-sign-changing conversions. */
10379 if (integer_all_onesp (arg1))
10380 return fold_convert_loc (loc, type, negate_expr (op0));
10381 /* Transform x * -C into -x * C if x is easily negatable. */
10382 if (TREE_CODE (arg1) == INTEGER_CST
10383 && tree_int_cst_sgn (arg1) == -1
10384 && negate_expr_p (arg0)
10385 && (tem = negate_expr (arg1)) != arg1
10386 && !TREE_OVERFLOW (tem))
10387 return fold_build2_loc (loc, MULT_EXPR, type,
10388 fold_convert_loc (loc, type,
10389 negate_expr (arg0)),
10390 tem);
10391
10392 /* (a * (1 << b)) is (a << b) */
10393 if (TREE_CODE (arg1) == LSHIFT_EXPR
10394 && integer_onep (TREE_OPERAND (arg1, 0)))
10395 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10396 TREE_OPERAND (arg1, 1));
10397 if (TREE_CODE (arg0) == LSHIFT_EXPR
10398 && integer_onep (TREE_OPERAND (arg0, 0)))
10399 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10400 TREE_OPERAND (arg0, 1));
10401
10402 /* (A + A) * C -> A * 2 * C */
10403 if (TREE_CODE (arg0) == PLUS_EXPR
10404 && TREE_CODE (arg1) == INTEGER_CST
10405 && operand_equal_p (TREE_OPERAND (arg0, 0),
10406 TREE_OPERAND (arg0, 1), 0))
10407 return fold_build2_loc (loc, MULT_EXPR, type,
10408 omit_one_operand_loc (loc, type,
10409 TREE_OPERAND (arg0, 0),
10410 TREE_OPERAND (arg0, 1)),
10411 fold_build2_loc (loc, MULT_EXPR, type,
10412 build_int_cst (type, 2) , arg1));
10413
10414 strict_overflow_p = false;
10415 if (TREE_CODE (arg1) == INTEGER_CST
10416 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10417 &strict_overflow_p)))
10418 {
10419 if (strict_overflow_p)
10420 fold_overflow_warning (("assuming signed overflow does not "
10421 "occur when simplifying "
10422 "multiplication"),
10423 WARN_STRICT_OVERFLOW_MISC);
10424 return fold_convert_loc (loc, type, tem);
10425 }
10426
10427 /* Optimize z * conj(z) for integer complex numbers. */
10428 if (TREE_CODE (arg0) == CONJ_EXPR
10429 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10430 return fold_mult_zconjz (loc, type, arg1);
10431 if (TREE_CODE (arg1) == CONJ_EXPR
10432 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10433 return fold_mult_zconjz (loc, type, arg0);
10434 }
10435 else
10436 {
10437 /* Maybe fold x * 0 to 0. The expressions aren't the same
10438 when x is NaN, since x * 0 is also NaN. Nor are they the
10439 same in modes with signed zeros, since multiplying a
10440 negative value by 0 gives -0, not +0. */
10441 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10442 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10443 && real_zerop (arg1))
10444 return omit_one_operand_loc (loc, type, arg1, arg0);
10445 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10446 Likewise for complex arithmetic with signed zeros. */
10447 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10448 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10449 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10450 && real_onep (arg1))
10451 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10452
10453 /* Transform x * -1.0 into -x. */
10454 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10455 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10456 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10457 && real_minus_onep (arg1))
10458 return fold_convert_loc (loc, type, negate_expr (arg0));
10459
10460 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10461 the result for floating point types due to rounding so it is applied
10462 only if -fassociative-math was specify. */
10463 if (flag_associative_math
10464 && TREE_CODE (arg0) == RDIV_EXPR
10465 && TREE_CODE (arg1) == REAL_CST
10466 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10467 {
10468 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10469 arg1);
10470 if (tem)
10471 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10472 TREE_OPERAND (arg0, 1));
10473 }
10474
10475 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10476 if (operand_equal_p (arg0, arg1, 0))
10477 {
10478 tree tem = fold_strip_sign_ops (arg0);
10479 if (tem != NULL_TREE)
10480 {
10481 tem = fold_convert_loc (loc, type, tem);
10482 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10483 }
10484 }
10485
10486 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10487 This is not the same for NaNs or if signed zeros are
10488 involved. */
10489 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10490 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10491 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10492 && TREE_CODE (arg1) == COMPLEX_CST
10493 && real_zerop (TREE_REALPART (arg1)))
10494 {
10495 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10496 if (real_onep (TREE_IMAGPART (arg1)))
10497 return
10498 fold_build2_loc (loc, COMPLEX_EXPR, type,
10499 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10500 rtype, arg0)),
10501 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10502 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10503 return
10504 fold_build2_loc (loc, COMPLEX_EXPR, type,
10505 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10506 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10507 rtype, arg0)));
10508 }
10509
10510 /* Optimize z * conj(z) for floating point complex numbers.
10511 Guarded by flag_unsafe_math_optimizations as non-finite
10512 imaginary components don't produce scalar results. */
10513 if (flag_unsafe_math_optimizations
10514 && TREE_CODE (arg0) == CONJ_EXPR
10515 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10516 return fold_mult_zconjz (loc, type, arg1);
10517 if (flag_unsafe_math_optimizations
10518 && TREE_CODE (arg1) == CONJ_EXPR
10519 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10520 return fold_mult_zconjz (loc, type, arg0);
10521
10522 if (flag_unsafe_math_optimizations)
10523 {
10524 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10525 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10526
10527 /* Optimizations of root(...)*root(...). */
10528 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10529 {
10530 tree rootfn, arg;
10531 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10532 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10533
10534 /* Optimize sqrt(x)*sqrt(x) as x. */
10535 if (BUILTIN_SQRT_P (fcode0)
10536 && operand_equal_p (arg00, arg10, 0)
10537 && ! HONOR_SNANS (TYPE_MODE (type)))
10538 return arg00;
10539
10540 /* Optimize root(x)*root(y) as root(x*y). */
10541 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10542 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10543 return build_call_expr_loc (loc, rootfn, 1, arg);
10544 }
10545
10546 /* Optimize expN(x)*expN(y) as expN(x+y). */
10547 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10548 {
10549 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10550 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10551 CALL_EXPR_ARG (arg0, 0),
10552 CALL_EXPR_ARG (arg1, 0));
10553 return build_call_expr_loc (loc, expfn, 1, arg);
10554 }
10555
10556 /* Optimizations of pow(...)*pow(...). */
10557 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10558 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10559 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10560 {
10561 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10562 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10563 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10564 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10565
10566 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10567 if (operand_equal_p (arg01, arg11, 0))
10568 {
10569 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10570 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10571 arg00, arg10);
10572 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10573 }
10574
10575 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10576 if (operand_equal_p (arg00, arg10, 0))
10577 {
10578 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10579 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10580 arg01, arg11);
10581 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10582 }
10583 }
10584
10585 /* Optimize tan(x)*cos(x) as sin(x). */
10586 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10587 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10588 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10589 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10590 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10591 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10592 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10593 CALL_EXPR_ARG (arg1, 0), 0))
10594 {
10595 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10596
10597 if (sinfn != NULL_TREE)
10598 return build_call_expr_loc (loc, sinfn, 1,
10599 CALL_EXPR_ARG (arg0, 0));
10600 }
10601
10602 /* Optimize x*pow(x,c) as pow(x,c+1). */
10603 if (fcode1 == BUILT_IN_POW
10604 || fcode1 == BUILT_IN_POWF
10605 || fcode1 == BUILT_IN_POWL)
10606 {
10607 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10608 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10609 if (TREE_CODE (arg11) == REAL_CST
10610 && !TREE_OVERFLOW (arg11)
10611 && operand_equal_p (arg0, arg10, 0))
10612 {
10613 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10614 REAL_VALUE_TYPE c;
10615 tree arg;
10616
10617 c = TREE_REAL_CST (arg11);
10618 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10619 arg = build_real (type, c);
10620 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10621 }
10622 }
10623
10624 /* Optimize pow(x,c)*x as pow(x,c+1). */
10625 if (fcode0 == BUILT_IN_POW
10626 || fcode0 == BUILT_IN_POWF
10627 || fcode0 == BUILT_IN_POWL)
10628 {
10629 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10630 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10631 if (TREE_CODE (arg01) == REAL_CST
10632 && !TREE_OVERFLOW (arg01)
10633 && operand_equal_p (arg1, arg00, 0))
10634 {
10635 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10636 REAL_VALUE_TYPE c;
10637 tree arg;
10638
10639 c = TREE_REAL_CST (arg01);
10640 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10641 arg = build_real (type, c);
10642 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10643 }
10644 }
10645
10646 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10647 if (optimize_function_for_speed_p (cfun)
10648 && operand_equal_p (arg0, arg1, 0))
10649 {
10650 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10651
10652 if (powfn)
10653 {
10654 tree arg = build_real (type, dconst2);
10655 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10656 }
10657 }
10658 }
10659 }
10660 goto associate;
10661
10662 case BIT_IOR_EXPR:
10663 bit_ior:
10664 if (integer_all_onesp (arg1))
10665 return omit_one_operand_loc (loc, type, arg1, arg0);
10666 if (integer_zerop (arg1))
10667 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10668 if (operand_equal_p (arg0, arg1, 0))
10669 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10670
10671 /* ~X | X is -1. */
10672 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10673 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10674 {
10675 t1 = build_zero_cst (type);
10676 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10677 return omit_one_operand_loc (loc, type, t1, arg1);
10678 }
10679
10680 /* X | ~X is -1. */
10681 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10682 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10683 {
10684 t1 = build_zero_cst (type);
10685 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10686 return omit_one_operand_loc (loc, type, t1, arg0);
10687 }
10688
10689 /* Canonicalize (X & C1) | C2. */
10690 if (TREE_CODE (arg0) == BIT_AND_EXPR
10691 && TREE_CODE (arg1) == INTEGER_CST
10692 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10693 {
10694 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10695 int width = TYPE_PRECISION (type), w;
10696 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10697 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10698 hi2 = TREE_INT_CST_HIGH (arg1);
10699 lo2 = TREE_INT_CST_LOW (arg1);
10700
10701 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10702 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10703 return omit_one_operand_loc (loc, type, arg1,
10704 TREE_OPERAND (arg0, 0));
10705
10706 if (width > HOST_BITS_PER_WIDE_INT)
10707 {
10708 mhi = (unsigned HOST_WIDE_INT) -1
10709 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10710 mlo = -1;
10711 }
10712 else
10713 {
10714 mhi = 0;
10715 mlo = (unsigned HOST_WIDE_INT) -1
10716 >> (HOST_BITS_PER_WIDE_INT - width);
10717 }
10718
10719 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10720 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10721 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10722 TREE_OPERAND (arg0, 0), arg1);
10723
10724 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10725 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10726 mode which allows further optimizations. */
10727 hi1 &= mhi;
10728 lo1 &= mlo;
10729 hi2 &= mhi;
10730 lo2 &= mlo;
10731 hi3 = hi1 & ~hi2;
10732 lo3 = lo1 & ~lo2;
10733 for (w = BITS_PER_UNIT;
10734 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10735 w <<= 1)
10736 {
10737 unsigned HOST_WIDE_INT mask
10738 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10739 if (((lo1 | lo2) & mask) == mask
10740 && (lo1 & ~mask) == 0 && hi1 == 0)
10741 {
10742 hi3 = 0;
10743 lo3 = mask;
10744 break;
10745 }
10746 }
10747 if (hi3 != hi1 || lo3 != lo1)
10748 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10749 fold_build2_loc (loc, BIT_AND_EXPR, type,
10750 TREE_OPERAND (arg0, 0),
10751 build_int_cst_wide (type,
10752 lo3, hi3)),
10753 arg1);
10754 }
10755
10756 /* (X & Y) | Y is (X, Y). */
10757 if (TREE_CODE (arg0) == BIT_AND_EXPR
10758 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10759 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
10760 /* (X & Y) | X is (Y, X). */
10761 if (TREE_CODE (arg0) == BIT_AND_EXPR
10762 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10763 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10764 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
10765 /* X | (X & Y) is (Y, X). */
10766 if (TREE_CODE (arg1) == BIT_AND_EXPR
10767 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10768 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10769 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
10770 /* X | (Y & X) is (Y, X). */
10771 if (TREE_CODE (arg1) == BIT_AND_EXPR
10772 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10773 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10774 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
10775
10776 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10777 if (t1 != NULL_TREE)
10778 return t1;
10779
10780 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10781
10782 This results in more efficient code for machines without a NAND
10783 instruction. Combine will canonicalize to the first form
10784 which will allow use of NAND instructions provided by the
10785 backend if they exist. */
10786 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10787 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10788 {
10789 return
10790 fold_build1_loc (loc, BIT_NOT_EXPR, type,
10791 build2 (BIT_AND_EXPR, type,
10792 fold_convert_loc (loc, type,
10793 TREE_OPERAND (arg0, 0)),
10794 fold_convert_loc (loc, type,
10795 TREE_OPERAND (arg1, 0))));
10796 }
10797
10798 /* See if this can be simplified into a rotate first. If that
10799 is unsuccessful continue in the association code. */
10800 goto bit_rotate;
10801
10802 case BIT_XOR_EXPR:
10803 if (integer_zerop (arg1))
10804 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10805 if (integer_all_onesp (arg1))
10806 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
10807 if (operand_equal_p (arg0, arg1, 0))
10808 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10809
10810 /* ~X ^ X is -1. */
10811 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10812 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10813 {
10814 t1 = build_zero_cst (type);
10815 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10816 return omit_one_operand_loc (loc, type, t1, arg1);
10817 }
10818
10819 /* X ^ ~X is -1. */
10820 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10821 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10822 {
10823 t1 = build_zero_cst (type);
10824 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10825 return omit_one_operand_loc (loc, type, t1, arg0);
10826 }
10827
10828 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10829 with a constant, and the two constants have no bits in common,
10830 we should treat this as a BIT_IOR_EXPR since this may produce more
10831 simplifications. */
10832 if (TREE_CODE (arg0) == BIT_AND_EXPR
10833 && TREE_CODE (arg1) == BIT_AND_EXPR
10834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10835 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10836 && integer_zerop (const_binop (BIT_AND_EXPR,
10837 TREE_OPERAND (arg0, 1),
10838 TREE_OPERAND (arg1, 1))))
10839 {
10840 code = BIT_IOR_EXPR;
10841 goto bit_ior;
10842 }
10843
10844 /* (X | Y) ^ X -> Y & ~ X*/
10845 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10846 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10847 {
10848 tree t2 = TREE_OPERAND (arg0, 1);
10849 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10850 arg1);
10851 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10852 fold_convert_loc (loc, type, t2),
10853 fold_convert_loc (loc, type, t1));
10854 return t1;
10855 }
10856
10857 /* (Y | X) ^ X -> Y & ~ X*/
10858 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10859 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10860 {
10861 tree t2 = TREE_OPERAND (arg0, 0);
10862 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
10863 arg1);
10864 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10865 fold_convert_loc (loc, type, t2),
10866 fold_convert_loc (loc, type, t1));
10867 return t1;
10868 }
10869
10870 /* X ^ (X | Y) -> Y & ~ X*/
10871 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10872 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10873 {
10874 tree t2 = TREE_OPERAND (arg1, 1);
10875 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10876 arg0);
10877 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10878 fold_convert_loc (loc, type, t2),
10879 fold_convert_loc (loc, type, t1));
10880 return t1;
10881 }
10882
10883 /* X ^ (Y | X) -> Y & ~ X*/
10884 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10885 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10886 {
10887 tree t2 = TREE_OPERAND (arg1, 0);
10888 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
10889 arg0);
10890 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
10891 fold_convert_loc (loc, type, t2),
10892 fold_convert_loc (loc, type, t1));
10893 return t1;
10894 }
10895
10896 /* Convert ~X ^ ~Y to X ^ Y. */
10897 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10898 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10899 return fold_build2_loc (loc, code, type,
10900 fold_convert_loc (loc, type,
10901 TREE_OPERAND (arg0, 0)),
10902 fold_convert_loc (loc, type,
10903 TREE_OPERAND (arg1, 0)));
10904
10905 /* Convert ~X ^ C to X ^ ~C. */
10906 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10907 && TREE_CODE (arg1) == INTEGER_CST)
10908 return fold_build2_loc (loc, code, type,
10909 fold_convert_loc (loc, type,
10910 TREE_OPERAND (arg0, 0)),
10911 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
10912
10913 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10914 if (TREE_CODE (arg0) == BIT_AND_EXPR
10915 && integer_onep (TREE_OPERAND (arg0, 1))
10916 && integer_onep (arg1))
10917 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10918 build_int_cst (TREE_TYPE (arg0), 0));
10919
10920 /* Fold (X & Y) ^ Y as ~X & Y. */
10921 if (TREE_CODE (arg0) == BIT_AND_EXPR
10922 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10923 {
10924 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10925 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10926 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10927 fold_convert_loc (loc, type, arg1));
10928 }
10929 /* Fold (X & Y) ^ X as ~Y & X. */
10930 if (TREE_CODE (arg0) == BIT_AND_EXPR
10931 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10932 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10933 {
10934 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10935 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10936 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10937 fold_convert_loc (loc, type, arg1));
10938 }
10939 /* Fold X ^ (X & Y) as X & ~Y. */
10940 if (TREE_CODE (arg1) == BIT_AND_EXPR
10941 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10942 {
10943 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10944 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10945 fold_convert_loc (loc, type, arg0),
10946 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10947 }
10948 /* Fold X ^ (Y & X) as ~Y & X. */
10949 if (TREE_CODE (arg1) == BIT_AND_EXPR
10950 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10951 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10952 {
10953 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10954 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10955 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10956 fold_convert_loc (loc, type, arg0));
10957 }
10958
10959 /* See if this can be simplified into a rotate first. If that
10960 is unsuccessful continue in the association code. */
10961 goto bit_rotate;
10962
10963 case BIT_AND_EXPR:
10964 if (integer_all_onesp (arg1))
10965 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10966 if (integer_zerop (arg1))
10967 return omit_one_operand_loc (loc, type, arg1, arg0);
10968 if (operand_equal_p (arg0, arg1, 0))
10969 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10970
10971 /* ~X & X is always zero. */
10972 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10973 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10974 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10975
10976 /* X & ~X is always zero. */
10977 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10978 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10979 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10980
10981 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10982 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10983 && TREE_CODE (arg1) == INTEGER_CST
10984 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10985 {
10986 tree tmp1 = fold_convert_loc (loc, type, arg1);
10987 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10988 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10989 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
10990 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
10991 return
10992 fold_convert_loc (loc, type,
10993 fold_build2_loc (loc, BIT_IOR_EXPR,
10994 type, tmp2, tmp3));
10995 }
10996
10997 /* (X | Y) & Y is (X, Y). */
10998 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10999 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11000 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11001 /* (X | Y) & X is (Y, X). */
11002 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11003 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11004 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11005 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11006 /* X & (X | Y) is (Y, X). */
11007 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11008 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11009 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11010 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11011 /* X & (Y | X) is (Y, X). */
11012 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11013 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11014 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11015 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11016
11017 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11018 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11019 && integer_onep (TREE_OPERAND (arg0, 1))
11020 && integer_onep (arg1))
11021 {
11022 tem = TREE_OPERAND (arg0, 0);
11023 return fold_build2_loc (loc, EQ_EXPR, type,
11024 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11025 build_int_cst (TREE_TYPE (tem), 1)),
11026 build_int_cst (TREE_TYPE (tem), 0));
11027 }
11028 /* Fold ~X & 1 as (X & 1) == 0. */
11029 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11030 && integer_onep (arg1))
11031 {
11032 tem = TREE_OPERAND (arg0, 0);
11033 return fold_build2_loc (loc, EQ_EXPR, type,
11034 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11035 build_int_cst (TREE_TYPE (tem), 1)),
11036 build_int_cst (TREE_TYPE (tem), 0));
11037 }
11038
11039 /* Fold (X ^ Y) & Y as ~X & Y. */
11040 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11041 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11042 {
11043 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11044 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11045 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11046 fold_convert_loc (loc, type, arg1));
11047 }
11048 /* Fold (X ^ Y) & X as ~Y & X. */
11049 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11050 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11051 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11052 {
11053 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11054 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11055 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11056 fold_convert_loc (loc, type, arg1));
11057 }
11058 /* Fold X & (X ^ Y) as X & ~Y. */
11059 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11060 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11061 {
11062 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11063 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11064 fold_convert_loc (loc, type, arg0),
11065 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11066 }
11067 /* Fold X & (Y ^ X) as ~Y & X. */
11068 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11069 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11070 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11071 {
11072 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11073 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11074 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11075 fold_convert_loc (loc, type, arg0));
11076 }
11077
11078 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11079 ((A & N) + B) & M -> (A + B) & M
11080 Similarly if (N & M) == 0,
11081 ((A | N) + B) & M -> (A + B) & M
11082 and for - instead of + (or unary - instead of +)
11083 and/or ^ instead of |.
11084 If B is constant and (B & M) == 0, fold into A & M. */
11085 if (host_integerp (arg1, 1))
11086 {
11087 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11088 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11089 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11090 && (TREE_CODE (arg0) == PLUS_EXPR
11091 || TREE_CODE (arg0) == MINUS_EXPR
11092 || TREE_CODE (arg0) == NEGATE_EXPR)
11093 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11094 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11095 {
11096 tree pmop[2];
11097 int which = 0;
11098 unsigned HOST_WIDE_INT cst0;
11099
11100 /* Now we know that arg0 is (C + D) or (C - D) or
11101 -C and arg1 (M) is == (1LL << cst) - 1.
11102 Store C into PMOP[0] and D into PMOP[1]. */
11103 pmop[0] = TREE_OPERAND (arg0, 0);
11104 pmop[1] = NULL;
11105 if (TREE_CODE (arg0) != NEGATE_EXPR)
11106 {
11107 pmop[1] = TREE_OPERAND (arg0, 1);
11108 which = 1;
11109 }
11110
11111 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11112 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11113 & cst1) != cst1)
11114 which = -1;
11115
11116 for (; which >= 0; which--)
11117 switch (TREE_CODE (pmop[which]))
11118 {
11119 case BIT_AND_EXPR:
11120 case BIT_IOR_EXPR:
11121 case BIT_XOR_EXPR:
11122 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11123 != INTEGER_CST)
11124 break;
11125 /* tree_low_cst not used, because we don't care about
11126 the upper bits. */
11127 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11128 cst0 &= cst1;
11129 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11130 {
11131 if (cst0 != cst1)
11132 break;
11133 }
11134 else if (cst0 != 0)
11135 break;
11136 /* If C or D is of the form (A & N) where
11137 (N & M) == M, or of the form (A | N) or
11138 (A ^ N) where (N & M) == 0, replace it with A. */
11139 pmop[which] = TREE_OPERAND (pmop[which], 0);
11140 break;
11141 case INTEGER_CST:
11142 /* If C or D is a N where (N & M) == 0, it can be
11143 omitted (assumed 0). */
11144 if ((TREE_CODE (arg0) == PLUS_EXPR
11145 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11146 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11147 pmop[which] = NULL;
11148 break;
11149 default:
11150 break;
11151 }
11152
11153 /* Only build anything new if we optimized one or both arguments
11154 above. */
11155 if (pmop[0] != TREE_OPERAND (arg0, 0)
11156 || (TREE_CODE (arg0) != NEGATE_EXPR
11157 && pmop[1] != TREE_OPERAND (arg0, 1)))
11158 {
11159 tree utype = TREE_TYPE (arg0);
11160 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11161 {
11162 /* Perform the operations in a type that has defined
11163 overflow behavior. */
11164 utype = unsigned_type_for (TREE_TYPE (arg0));
11165 if (pmop[0] != NULL)
11166 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11167 if (pmop[1] != NULL)
11168 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11169 }
11170
11171 if (TREE_CODE (arg0) == NEGATE_EXPR)
11172 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11173 else if (TREE_CODE (arg0) == PLUS_EXPR)
11174 {
11175 if (pmop[0] != NULL && pmop[1] != NULL)
11176 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11177 pmop[0], pmop[1]);
11178 else if (pmop[0] != NULL)
11179 tem = pmop[0];
11180 else if (pmop[1] != NULL)
11181 tem = pmop[1];
11182 else
11183 return build_int_cst (type, 0);
11184 }
11185 else if (pmop[0] == NULL)
11186 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11187 else
11188 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11189 pmop[0], pmop[1]);
11190 /* TEM is now the new binary +, - or unary - replacement. */
11191 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11192 fold_convert_loc (loc, utype, arg1));
11193 return fold_convert_loc (loc, type, tem);
11194 }
11195 }
11196 }
11197
11198 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11199 if (t1 != NULL_TREE)
11200 return t1;
11201 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11202 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11203 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11204 {
11205 unsigned int prec
11206 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11207
11208 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11209 && (~TREE_INT_CST_LOW (arg1)
11210 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11211 return
11212 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11213 }
11214
11215 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11216
11217 This results in more efficient code for machines without a NOR
11218 instruction. Combine will canonicalize to the first form
11219 which will allow use of NOR instructions provided by the
11220 backend if they exist. */
11221 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11222 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11223 {
11224 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11225 build2 (BIT_IOR_EXPR, type,
11226 fold_convert_loc (loc, type,
11227 TREE_OPERAND (arg0, 0)),
11228 fold_convert_loc (loc, type,
11229 TREE_OPERAND (arg1, 0))));
11230 }
11231
11232 /* If arg0 is derived from the address of an object or function, we may
11233 be able to fold this expression using the object or function's
11234 alignment. */
11235 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11236 {
11237 unsigned HOST_WIDE_INT modulus, residue;
11238 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11239
11240 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11241 integer_onep (arg1));
11242
11243 /* This works because modulus is a power of 2. If this weren't the
11244 case, we'd have to replace it by its greatest power-of-2
11245 divisor: modulus & -modulus. */
11246 if (low < modulus)
11247 return build_int_cst (type, residue & low);
11248 }
11249
11250 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11251 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11252 if the new mask might be further optimized. */
11253 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11254 || TREE_CODE (arg0) == RSHIFT_EXPR)
11255 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11256 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11257 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11258 < TYPE_PRECISION (TREE_TYPE (arg0))
11259 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11260 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11261 {
11262 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11263 unsigned HOST_WIDE_INT mask
11264 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11265 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11266 tree shift_type = TREE_TYPE (arg0);
11267
11268 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11269 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11270 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11271 && TYPE_PRECISION (TREE_TYPE (arg0))
11272 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11273 {
11274 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11275 tree arg00 = TREE_OPERAND (arg0, 0);
11276 /* See if more bits can be proven as zero because of
11277 zero extension. */
11278 if (TREE_CODE (arg00) == NOP_EXPR
11279 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11280 {
11281 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11282 if (TYPE_PRECISION (inner_type)
11283 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11284 && TYPE_PRECISION (inner_type) < prec)
11285 {
11286 prec = TYPE_PRECISION (inner_type);
11287 /* See if we can shorten the right shift. */
11288 if (shiftc < prec)
11289 shift_type = inner_type;
11290 }
11291 }
11292 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11293 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11294 zerobits <<= prec - shiftc;
11295 /* For arithmetic shift if sign bit could be set, zerobits
11296 can contain actually sign bits, so no transformation is
11297 possible, unless MASK masks them all away. In that
11298 case the shift needs to be converted into logical shift. */
11299 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11300 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11301 {
11302 if ((mask & zerobits) == 0)
11303 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11304 else
11305 zerobits = 0;
11306 }
11307 }
11308
11309 /* ((X << 16) & 0xff00) is (X, 0). */
11310 if ((mask & zerobits) == mask)
11311 return omit_one_operand_loc (loc, type,
11312 build_int_cst (type, 0), arg0);
11313
11314 newmask = mask | zerobits;
11315 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11316 {
11317 unsigned int prec;
11318
11319 /* Only do the transformation if NEWMASK is some integer
11320 mode's mask. */
11321 for (prec = BITS_PER_UNIT;
11322 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11323 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11324 break;
11325 if (prec < HOST_BITS_PER_WIDE_INT
11326 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11327 {
11328 tree newmaskt;
11329
11330 if (shift_type != TREE_TYPE (arg0))
11331 {
11332 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11333 fold_convert_loc (loc, shift_type,
11334 TREE_OPERAND (arg0, 0)),
11335 TREE_OPERAND (arg0, 1));
11336 tem = fold_convert_loc (loc, type, tem);
11337 }
11338 else
11339 tem = op0;
11340 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11341 if (!tree_int_cst_equal (newmaskt, arg1))
11342 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11343 }
11344 }
11345 }
11346
11347 goto associate;
11348
11349 case RDIV_EXPR:
11350 /* Don't touch a floating-point divide by zero unless the mode
11351 of the constant can represent infinity. */
11352 if (TREE_CODE (arg1) == REAL_CST
11353 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11354 && real_zerop (arg1))
11355 return NULL_TREE;
11356
11357 /* Optimize A / A to 1.0 if we don't care about
11358 NaNs or Infinities. Skip the transformation
11359 for non-real operands. */
11360 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11361 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11362 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11363 && operand_equal_p (arg0, arg1, 0))
11364 {
11365 tree r = build_real (TREE_TYPE (arg0), dconst1);
11366
11367 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11368 }
11369
11370 /* The complex version of the above A / A optimization. */
11371 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11372 && operand_equal_p (arg0, arg1, 0))
11373 {
11374 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11375 if (! HONOR_NANS (TYPE_MODE (elem_type))
11376 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11377 {
11378 tree r = build_real (elem_type, dconst1);
11379 /* omit_two_operands will call fold_convert for us. */
11380 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11381 }
11382 }
11383
11384 /* (-A) / (-B) -> A / B */
11385 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11386 return fold_build2_loc (loc, RDIV_EXPR, type,
11387 TREE_OPERAND (arg0, 0),
11388 negate_expr (arg1));
11389 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11390 return fold_build2_loc (loc, RDIV_EXPR, type,
11391 negate_expr (arg0),
11392 TREE_OPERAND (arg1, 0));
11393
11394 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11395 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11396 && real_onep (arg1))
11397 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11398
11399 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11400 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11401 && real_minus_onep (arg1))
11402 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11403 negate_expr (arg0)));
11404
11405 /* If ARG1 is a constant, we can convert this to a multiply by the
11406 reciprocal. This does not have the same rounding properties,
11407 so only do this if -freciprocal-math. We can actually
11408 always safely do it if ARG1 is a power of two, but it's hard to
11409 tell if it is or not in a portable manner. */
11410 if (TREE_CODE (arg1) == REAL_CST)
11411 {
11412 if (flag_reciprocal_math
11413 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11414 arg1)))
11415 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11416 /* Find the reciprocal if optimizing and the result is exact. */
11417 if (optimize)
11418 {
11419 REAL_VALUE_TYPE r;
11420 r = TREE_REAL_CST (arg1);
11421 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11422 {
11423 tem = build_real (type, r);
11424 return fold_build2_loc (loc, MULT_EXPR, type,
11425 fold_convert_loc (loc, type, arg0), tem);
11426 }
11427 }
11428 }
11429 /* Convert A/B/C to A/(B*C). */
11430 if (flag_reciprocal_math
11431 && TREE_CODE (arg0) == RDIV_EXPR)
11432 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11433 fold_build2_loc (loc, MULT_EXPR, type,
11434 TREE_OPERAND (arg0, 1), arg1));
11435
11436 /* Convert A/(B/C) to (A/B)*C. */
11437 if (flag_reciprocal_math
11438 && TREE_CODE (arg1) == RDIV_EXPR)
11439 return fold_build2_loc (loc, MULT_EXPR, type,
11440 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11441 TREE_OPERAND (arg1, 0)),
11442 TREE_OPERAND (arg1, 1));
11443
11444 /* Convert C1/(X*C2) into (C1/C2)/X. */
11445 if (flag_reciprocal_math
11446 && TREE_CODE (arg1) == MULT_EXPR
11447 && TREE_CODE (arg0) == REAL_CST
11448 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11449 {
11450 tree tem = const_binop (RDIV_EXPR, arg0,
11451 TREE_OPERAND (arg1, 1));
11452 if (tem)
11453 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11454 TREE_OPERAND (arg1, 0));
11455 }
11456
11457 if (flag_unsafe_math_optimizations)
11458 {
11459 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11460 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11461
11462 /* Optimize sin(x)/cos(x) as tan(x). */
11463 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11464 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11465 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11466 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11467 CALL_EXPR_ARG (arg1, 0), 0))
11468 {
11469 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11470
11471 if (tanfn != NULL_TREE)
11472 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11473 }
11474
11475 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11476 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11477 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11478 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11479 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11480 CALL_EXPR_ARG (arg1, 0), 0))
11481 {
11482 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11483
11484 if (tanfn != NULL_TREE)
11485 {
11486 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11487 CALL_EXPR_ARG (arg0, 0));
11488 return fold_build2_loc (loc, RDIV_EXPR, type,
11489 build_real (type, dconst1), tmp);
11490 }
11491 }
11492
11493 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11494 NaNs or Infinities. */
11495 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11496 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11497 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11498 {
11499 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11500 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11501
11502 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11503 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11504 && operand_equal_p (arg00, arg01, 0))
11505 {
11506 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11507
11508 if (cosfn != NULL_TREE)
11509 return build_call_expr_loc (loc, cosfn, 1, arg00);
11510 }
11511 }
11512
11513 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11514 NaNs or Infinities. */
11515 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11516 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11517 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11518 {
11519 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11520 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11521
11522 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11523 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11524 && operand_equal_p (arg00, arg01, 0))
11525 {
11526 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11527
11528 if (cosfn != NULL_TREE)
11529 {
11530 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11531 return fold_build2_loc (loc, RDIV_EXPR, type,
11532 build_real (type, dconst1),
11533 tmp);
11534 }
11535 }
11536 }
11537
11538 /* Optimize pow(x,c)/x as pow(x,c-1). */
11539 if (fcode0 == BUILT_IN_POW
11540 || fcode0 == BUILT_IN_POWF
11541 || fcode0 == BUILT_IN_POWL)
11542 {
11543 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11544 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11545 if (TREE_CODE (arg01) == REAL_CST
11546 && !TREE_OVERFLOW (arg01)
11547 && operand_equal_p (arg1, arg00, 0))
11548 {
11549 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11550 REAL_VALUE_TYPE c;
11551 tree arg;
11552
11553 c = TREE_REAL_CST (arg01);
11554 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11555 arg = build_real (type, c);
11556 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11557 }
11558 }
11559
11560 /* Optimize a/root(b/c) into a*root(c/b). */
11561 if (BUILTIN_ROOT_P (fcode1))
11562 {
11563 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11564
11565 if (TREE_CODE (rootarg) == RDIV_EXPR)
11566 {
11567 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11568 tree b = TREE_OPERAND (rootarg, 0);
11569 tree c = TREE_OPERAND (rootarg, 1);
11570
11571 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11572
11573 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11574 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11575 }
11576 }
11577
11578 /* Optimize x/expN(y) into x*expN(-y). */
11579 if (BUILTIN_EXPONENT_P (fcode1))
11580 {
11581 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11582 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11583 arg1 = build_call_expr_loc (loc,
11584 expfn, 1,
11585 fold_convert_loc (loc, type, arg));
11586 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11587 }
11588
11589 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11590 if (fcode1 == BUILT_IN_POW
11591 || fcode1 == BUILT_IN_POWF
11592 || fcode1 == BUILT_IN_POWL)
11593 {
11594 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11595 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11596 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11597 tree neg11 = fold_convert_loc (loc, type,
11598 negate_expr (arg11));
11599 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11600 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11601 }
11602 }
11603 return NULL_TREE;
11604
11605 case TRUNC_DIV_EXPR:
11606 /* Optimize (X & (-A)) / A where A is a power of 2,
11607 to X >> log2(A) */
11608 if (TREE_CODE (arg0) == BIT_AND_EXPR
11609 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11610 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11611 {
11612 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11613 arg1, TREE_OPERAND (arg0, 1));
11614 if (sum && integer_zerop (sum)) {
11615 unsigned long pow2;
11616
11617 if (TREE_INT_CST_LOW (arg1))
11618 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11619 else
11620 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11621 + HOST_BITS_PER_WIDE_INT;
11622
11623 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11624 TREE_OPERAND (arg0, 0),
11625 build_int_cst (NULL_TREE, pow2));
11626 }
11627 }
11628
11629 /* Fall thru */
11630
11631 case FLOOR_DIV_EXPR:
11632 /* Simplify A / (B << N) where A and B are positive and B is
11633 a power of 2, to A >> (N + log2(B)). */
11634 strict_overflow_p = false;
11635 if (TREE_CODE (arg1) == LSHIFT_EXPR
11636 && (TYPE_UNSIGNED (type)
11637 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11638 {
11639 tree sval = TREE_OPERAND (arg1, 0);
11640 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11641 {
11642 tree sh_cnt = TREE_OPERAND (arg1, 1);
11643 unsigned long pow2;
11644
11645 if (TREE_INT_CST_LOW (sval))
11646 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11647 else
11648 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11649 + HOST_BITS_PER_WIDE_INT;
11650
11651 if (strict_overflow_p)
11652 fold_overflow_warning (("assuming signed overflow does not "
11653 "occur when simplifying A / (B << N)"),
11654 WARN_STRICT_OVERFLOW_MISC);
11655
11656 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11657 sh_cnt, build_int_cst (NULL_TREE, pow2));
11658 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11659 fold_convert_loc (loc, type, arg0), sh_cnt);
11660 }
11661 }
11662
11663 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11664 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11665 if (INTEGRAL_TYPE_P (type)
11666 && TYPE_UNSIGNED (type)
11667 && code == FLOOR_DIV_EXPR)
11668 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11669
11670 /* Fall thru */
11671
11672 case ROUND_DIV_EXPR:
11673 case CEIL_DIV_EXPR:
11674 case EXACT_DIV_EXPR:
11675 if (integer_onep (arg1))
11676 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11677 if (integer_zerop (arg1))
11678 return NULL_TREE;
11679 /* X / -1 is -X. */
11680 if (!TYPE_UNSIGNED (type)
11681 && TREE_CODE (arg1) == INTEGER_CST
11682 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11683 && TREE_INT_CST_HIGH (arg1) == -1)
11684 return fold_convert_loc (loc, type, negate_expr (arg0));
11685
11686 /* Convert -A / -B to A / B when the type is signed and overflow is
11687 undefined. */
11688 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11689 && TREE_CODE (arg0) == NEGATE_EXPR
11690 && negate_expr_p (arg1))
11691 {
11692 if (INTEGRAL_TYPE_P (type))
11693 fold_overflow_warning (("assuming signed overflow does not occur "
11694 "when distributing negation across "
11695 "division"),
11696 WARN_STRICT_OVERFLOW_MISC);
11697 return fold_build2_loc (loc, code, type,
11698 fold_convert_loc (loc, type,
11699 TREE_OPERAND (arg0, 0)),
11700 fold_convert_loc (loc, type,
11701 negate_expr (arg1)));
11702 }
11703 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11704 && TREE_CODE (arg1) == NEGATE_EXPR
11705 && negate_expr_p (arg0))
11706 {
11707 if (INTEGRAL_TYPE_P (type))
11708 fold_overflow_warning (("assuming signed overflow does not occur "
11709 "when distributing negation across "
11710 "division"),
11711 WARN_STRICT_OVERFLOW_MISC);
11712 return fold_build2_loc (loc, code, type,
11713 fold_convert_loc (loc, type,
11714 negate_expr (arg0)),
11715 fold_convert_loc (loc, type,
11716 TREE_OPERAND (arg1, 0)));
11717 }
11718
11719 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11720 operation, EXACT_DIV_EXPR.
11721
11722 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11723 At one time others generated faster code, it's not clear if they do
11724 after the last round to changes to the DIV code in expmed.c. */
11725 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11726 && multiple_of_p (type, arg0, arg1))
11727 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11728
11729 strict_overflow_p = false;
11730 if (TREE_CODE (arg1) == INTEGER_CST
11731 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11732 &strict_overflow_p)))
11733 {
11734 if (strict_overflow_p)
11735 fold_overflow_warning (("assuming signed overflow does not occur "
11736 "when simplifying division"),
11737 WARN_STRICT_OVERFLOW_MISC);
11738 return fold_convert_loc (loc, type, tem);
11739 }
11740
11741 return NULL_TREE;
11742
11743 case CEIL_MOD_EXPR:
11744 case FLOOR_MOD_EXPR:
11745 case ROUND_MOD_EXPR:
11746 case TRUNC_MOD_EXPR:
11747 /* X % 1 is always zero, but be sure to preserve any side
11748 effects in X. */
11749 if (integer_onep (arg1))
11750 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11751
11752 /* X % 0, return X % 0 unchanged so that we can get the
11753 proper warnings and errors. */
11754 if (integer_zerop (arg1))
11755 return NULL_TREE;
11756
11757 /* 0 % X is always zero, but be sure to preserve any side
11758 effects in X. Place this after checking for X == 0. */
11759 if (integer_zerop (arg0))
11760 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11761
11762 /* X % -1 is zero. */
11763 if (!TYPE_UNSIGNED (type)
11764 && TREE_CODE (arg1) == INTEGER_CST
11765 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11766 && TREE_INT_CST_HIGH (arg1) == -1)
11767 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11768
11769 /* X % -C is the same as X % C. */
11770 if (code == TRUNC_MOD_EXPR
11771 && !TYPE_UNSIGNED (type)
11772 && TREE_CODE (arg1) == INTEGER_CST
11773 && !TREE_OVERFLOW (arg1)
11774 && TREE_INT_CST_HIGH (arg1) < 0
11775 && !TYPE_OVERFLOW_TRAPS (type)
11776 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11777 && !sign_bit_p (arg1, arg1))
11778 return fold_build2_loc (loc, code, type,
11779 fold_convert_loc (loc, type, arg0),
11780 fold_convert_loc (loc, type,
11781 negate_expr (arg1)));
11782
11783 /* X % -Y is the same as X % Y. */
11784 if (code == TRUNC_MOD_EXPR
11785 && !TYPE_UNSIGNED (type)
11786 && TREE_CODE (arg1) == NEGATE_EXPR
11787 && !TYPE_OVERFLOW_TRAPS (type))
11788 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11789 fold_convert_loc (loc, type,
11790 TREE_OPERAND (arg1, 0)));
11791
11792 strict_overflow_p = false;
11793 if (TREE_CODE (arg1) == INTEGER_CST
11794 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11795 &strict_overflow_p)))
11796 {
11797 if (strict_overflow_p)
11798 fold_overflow_warning (("assuming signed overflow does not occur "
11799 "when simplifying modulus"),
11800 WARN_STRICT_OVERFLOW_MISC);
11801 return fold_convert_loc (loc, type, tem);
11802 }
11803
11804 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11805 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11806 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11807 && (TYPE_UNSIGNED (type)
11808 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11809 {
11810 tree c = arg1;
11811 /* Also optimize A % (C << N) where C is a power of 2,
11812 to A & ((C << N) - 1). */
11813 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11814 c = TREE_OPERAND (arg1, 0);
11815
11816 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11817 {
11818 tree mask
11819 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11820 build_int_cst (TREE_TYPE (arg1), 1));
11821 if (strict_overflow_p)
11822 fold_overflow_warning (("assuming signed overflow does not "
11823 "occur when simplifying "
11824 "X % (power of two)"),
11825 WARN_STRICT_OVERFLOW_MISC);
11826 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11827 fold_convert_loc (loc, type, arg0),
11828 fold_convert_loc (loc, type, mask));
11829 }
11830 }
11831
11832 return NULL_TREE;
11833
11834 case LROTATE_EXPR:
11835 case RROTATE_EXPR:
11836 if (integer_all_onesp (arg0))
11837 return omit_one_operand_loc (loc, type, arg0, arg1);
11838 goto shift;
11839
11840 case RSHIFT_EXPR:
11841 /* Optimize -1 >> x for arithmetic right shifts. */
11842 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11843 && tree_expr_nonnegative_p (arg1))
11844 return omit_one_operand_loc (loc, type, arg0, arg1);
11845 /* ... fall through ... */
11846
11847 case LSHIFT_EXPR:
11848 shift:
11849 if (integer_zerop (arg1))
11850 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11851 if (integer_zerop (arg0))
11852 return omit_one_operand_loc (loc, type, arg0, arg1);
11853
11854 /* Since negative shift count is not well-defined,
11855 don't try to compute it in the compiler. */
11856 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11857 return NULL_TREE;
11858
11859 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11860 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11861 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11862 && host_integerp (TREE_OPERAND (arg0, 1), false)
11863 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11864 {
11865 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11866 + TREE_INT_CST_LOW (arg1));
11867
11868 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11869 being well defined. */
11870 if (low >= TYPE_PRECISION (type))
11871 {
11872 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11873 low = low % TYPE_PRECISION (type);
11874 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11875 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
11876 TREE_OPERAND (arg0, 0));
11877 else
11878 low = TYPE_PRECISION (type) - 1;
11879 }
11880
11881 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11882 build_int_cst (type, low));
11883 }
11884
11885 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11886 into x & ((unsigned)-1 >> c) for unsigned types. */
11887 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11888 || (TYPE_UNSIGNED (type)
11889 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11890 && host_integerp (arg1, false)
11891 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11892 && host_integerp (TREE_OPERAND (arg0, 1), false)
11893 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11894 {
11895 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11896 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11897 tree lshift;
11898 tree arg00;
11899
11900 if (low0 == low1)
11901 {
11902 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11903
11904 lshift = build_int_cst (type, -1);
11905 lshift = int_const_binop (code, lshift, arg1, 0);
11906
11907 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11908 }
11909 }
11910
11911 /* Rewrite an LROTATE_EXPR by a constant into an
11912 RROTATE_EXPR by a new constant. */
11913 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11914 {
11915 tree tem = build_int_cst (TREE_TYPE (arg1),
11916 TYPE_PRECISION (type));
11917 tem = const_binop (MINUS_EXPR, tem, arg1);
11918 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
11919 }
11920
11921 /* If we have a rotate of a bit operation with the rotate count and
11922 the second operand of the bit operation both constant,
11923 permute the two operations. */
11924 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11925 && (TREE_CODE (arg0) == BIT_AND_EXPR
11926 || TREE_CODE (arg0) == BIT_IOR_EXPR
11927 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11928 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11929 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11930 fold_build2_loc (loc, code, type,
11931 TREE_OPERAND (arg0, 0), arg1),
11932 fold_build2_loc (loc, code, type,
11933 TREE_OPERAND (arg0, 1), arg1));
11934
11935 /* Two consecutive rotates adding up to the precision of the
11936 type can be ignored. */
11937 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11938 && TREE_CODE (arg0) == RROTATE_EXPR
11939 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11940 && TREE_INT_CST_HIGH (arg1) == 0
11941 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11942 && ((TREE_INT_CST_LOW (arg1)
11943 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11944 == (unsigned int) TYPE_PRECISION (type)))
11945 return TREE_OPERAND (arg0, 0);
11946
11947 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11948 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11949 if the latter can be further optimized. */
11950 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11951 && TREE_CODE (arg0) == BIT_AND_EXPR
11952 && TREE_CODE (arg1) == INTEGER_CST
11953 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11954 {
11955 tree mask = fold_build2_loc (loc, code, type,
11956 fold_convert_loc (loc, type,
11957 TREE_OPERAND (arg0, 1)),
11958 arg1);
11959 tree shift = fold_build2_loc (loc, code, type,
11960 fold_convert_loc (loc, type,
11961 TREE_OPERAND (arg0, 0)),
11962 arg1);
11963 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11964 if (tem)
11965 return tem;
11966 }
11967
11968 return NULL_TREE;
11969
11970 case MIN_EXPR:
11971 if (operand_equal_p (arg0, arg1, 0))
11972 return omit_one_operand_loc (loc, type, arg0, arg1);
11973 if (INTEGRAL_TYPE_P (type)
11974 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11975 return omit_one_operand_loc (loc, type, arg1, arg0);
11976 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11977 if (tem)
11978 return tem;
11979 goto associate;
11980
11981 case MAX_EXPR:
11982 if (operand_equal_p (arg0, arg1, 0))
11983 return omit_one_operand_loc (loc, type, arg0, arg1);
11984 if (INTEGRAL_TYPE_P (type)
11985 && TYPE_MAX_VALUE (type)
11986 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11987 return omit_one_operand_loc (loc, type, arg1, arg0);
11988 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11989 if (tem)
11990 return tem;
11991 goto associate;
11992
11993 case TRUTH_ANDIF_EXPR:
11994 /* Note that the operands of this must be ints
11995 and their values must be 0 or 1.
11996 ("true" is a fixed value perhaps depending on the language.) */
11997 /* If first arg is constant zero, return it. */
11998 if (integer_zerop (arg0))
11999 return fold_convert_loc (loc, type, arg0);
12000 case TRUTH_AND_EXPR:
12001 /* If either arg is constant true, drop it. */
12002 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12003 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12004 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12005 /* Preserve sequence points. */
12006 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12007 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12008 /* If second arg is constant zero, result is zero, but first arg
12009 must be evaluated. */
12010 if (integer_zerop (arg1))
12011 return omit_one_operand_loc (loc, type, arg1, arg0);
12012 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12013 case will be handled here. */
12014 if (integer_zerop (arg0))
12015 return omit_one_operand_loc (loc, type, arg0, arg1);
12016
12017 /* !X && X is always false. */
12018 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12020 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12021 /* X && !X is always false. */
12022 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12023 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12024 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12025
12026 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12027 means A >= Y && A != MAX, but in this case we know that
12028 A < X <= MAX. */
12029
12030 if (!TREE_SIDE_EFFECTS (arg0)
12031 && !TREE_SIDE_EFFECTS (arg1))
12032 {
12033 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12034 if (tem && !operand_equal_p (tem, arg0, 0))
12035 return fold_build2_loc (loc, code, type, tem, arg1);
12036
12037 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12038 if (tem && !operand_equal_p (tem, arg1, 0))
12039 return fold_build2_loc (loc, code, type, arg0, tem);
12040 }
12041
12042 truth_andor:
12043 /* We only do these simplifications if we are optimizing. */
12044 if (!optimize)
12045 return NULL_TREE;
12046
12047 /* Check for things like (A || B) && (A || C). We can convert this
12048 to A || (B && C). Note that either operator can be any of the four
12049 truth and/or operations and the transformation will still be
12050 valid. Also note that we only care about order for the
12051 ANDIF and ORIF operators. If B contains side effects, this
12052 might change the truth-value of A. */
12053 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12054 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12055 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12056 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12057 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12058 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12059 {
12060 tree a00 = TREE_OPERAND (arg0, 0);
12061 tree a01 = TREE_OPERAND (arg0, 1);
12062 tree a10 = TREE_OPERAND (arg1, 0);
12063 tree a11 = TREE_OPERAND (arg1, 1);
12064 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12065 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12066 && (code == TRUTH_AND_EXPR
12067 || code == TRUTH_OR_EXPR));
12068
12069 if (operand_equal_p (a00, a10, 0))
12070 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12071 fold_build2_loc (loc, code, type, a01, a11));
12072 else if (commutative && operand_equal_p (a00, a11, 0))
12073 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12074 fold_build2_loc (loc, code, type, a01, a10));
12075 else if (commutative && operand_equal_p (a01, a10, 0))
12076 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12077 fold_build2_loc (loc, code, type, a00, a11));
12078
12079 /* This case if tricky because we must either have commutative
12080 operators or else A10 must not have side-effects. */
12081
12082 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12083 && operand_equal_p (a01, a11, 0))
12084 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12085 fold_build2_loc (loc, code, type, a00, a10),
12086 a01);
12087 }
12088
12089 /* See if we can build a range comparison. */
12090 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12091 return tem;
12092
12093 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
12094 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
12095 {
12096 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
12097 if (tem)
12098 return fold_build2_loc (loc, code, type, tem, arg1);
12099 }
12100
12101 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
12102 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
12103 {
12104 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
12105 if (tem)
12106 return fold_build2_loc (loc, code, type, arg0, tem);
12107 }
12108
12109 /* Check for the possibility of merging component references. If our
12110 lhs is another similar operation, try to merge its rhs with our
12111 rhs. Then try to merge our lhs and rhs. */
12112 if (TREE_CODE (arg0) == code
12113 && 0 != (tem = fold_truthop (loc, code, type,
12114 TREE_OPERAND (arg0, 1), arg1)))
12115 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12116
12117 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12118 return tem;
12119
12120 return NULL_TREE;
12121
12122 case TRUTH_ORIF_EXPR:
12123 /* Note that the operands of this must be ints
12124 and their values must be 0 or true.
12125 ("true" is a fixed value perhaps depending on the language.) */
12126 /* If first arg is constant true, return it. */
12127 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12128 return fold_convert_loc (loc, type, arg0);
12129 case TRUTH_OR_EXPR:
12130 /* If either arg is constant zero, drop it. */
12131 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12132 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12133 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12134 /* Preserve sequence points. */
12135 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12136 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12137 /* If second arg is constant true, result is true, but we must
12138 evaluate first arg. */
12139 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12140 return omit_one_operand_loc (loc, type, arg1, arg0);
12141 /* Likewise for first arg, but note this only occurs here for
12142 TRUTH_OR_EXPR. */
12143 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12144 return omit_one_operand_loc (loc, type, arg0, arg1);
12145
12146 /* !X || X is always true. */
12147 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12148 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12149 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12150 /* X || !X is always true. */
12151 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12152 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12153 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12154
12155 goto truth_andor;
12156
12157 case TRUTH_XOR_EXPR:
12158 /* If the second arg is constant zero, drop it. */
12159 if (integer_zerop (arg1))
12160 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12161 /* If the second arg is constant true, this is a logical inversion. */
12162 if (integer_onep (arg1))
12163 {
12164 /* Only call invert_truthvalue if operand is a truth value. */
12165 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12166 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12167 else
12168 tem = invert_truthvalue_loc (loc, arg0);
12169 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12170 }
12171 /* Identical arguments cancel to zero. */
12172 if (operand_equal_p (arg0, arg1, 0))
12173 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12174
12175 /* !X ^ X is always true. */
12176 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12177 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12178 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12179
12180 /* X ^ !X is always true. */
12181 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12182 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12183 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12184
12185 return NULL_TREE;
12186
12187 case EQ_EXPR:
12188 case NE_EXPR:
12189 tem = fold_comparison (loc, code, type, op0, op1);
12190 if (tem != NULL_TREE)
12191 return tem;
12192
12193 /* bool_var != 0 becomes bool_var. */
12194 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12195 && code == NE_EXPR)
12196 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12197
12198 /* bool_var == 1 becomes bool_var. */
12199 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12200 && code == EQ_EXPR)
12201 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12202
12203 /* bool_var != 1 becomes !bool_var. */
12204 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12205 && code == NE_EXPR)
12206 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12207 fold_convert_loc (loc, type, arg0));
12208
12209 /* bool_var == 0 becomes !bool_var. */
12210 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12211 && code == EQ_EXPR)
12212 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12213 fold_convert_loc (loc, type, arg0));
12214
12215 /* !exp != 0 becomes !exp */
12216 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12217 && code == NE_EXPR)
12218 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12219
12220 /* If this is an equality comparison of the address of two non-weak,
12221 unaliased symbols neither of which are extern (since we do not
12222 have access to attributes for externs), then we know the result. */
12223 if (TREE_CODE (arg0) == ADDR_EXPR
12224 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12225 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12226 && ! lookup_attribute ("alias",
12227 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12228 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12229 && TREE_CODE (arg1) == ADDR_EXPR
12230 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12231 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12232 && ! lookup_attribute ("alias",
12233 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12234 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12235 {
12236 /* We know that we're looking at the address of two
12237 non-weak, unaliased, static _DECL nodes.
12238
12239 It is both wasteful and incorrect to call operand_equal_p
12240 to compare the two ADDR_EXPR nodes. It is wasteful in that
12241 all we need to do is test pointer equality for the arguments
12242 to the two ADDR_EXPR nodes. It is incorrect to use
12243 operand_equal_p as that function is NOT equivalent to a
12244 C equality test. It can in fact return false for two
12245 objects which would test as equal using the C equality
12246 operator. */
12247 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12248 return constant_boolean_node (equal
12249 ? code == EQ_EXPR : code != EQ_EXPR,
12250 type);
12251 }
12252
12253 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12254 a MINUS_EXPR of a constant, we can convert it into a comparison with
12255 a revised constant as long as no overflow occurs. */
12256 if (TREE_CODE (arg1) == INTEGER_CST
12257 && (TREE_CODE (arg0) == PLUS_EXPR
12258 || TREE_CODE (arg0) == MINUS_EXPR)
12259 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12260 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12261 ? MINUS_EXPR : PLUS_EXPR,
12262 fold_convert_loc (loc, TREE_TYPE (arg0),
12263 arg1),
12264 TREE_OPERAND (arg0, 1)))
12265 && !TREE_OVERFLOW (tem))
12266 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12267
12268 /* Similarly for a NEGATE_EXPR. */
12269 if (TREE_CODE (arg0) == NEGATE_EXPR
12270 && TREE_CODE (arg1) == INTEGER_CST
12271 && 0 != (tem = negate_expr (arg1))
12272 && TREE_CODE (tem) == INTEGER_CST
12273 && !TREE_OVERFLOW (tem))
12274 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12275
12276 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12277 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12278 && TREE_CODE (arg1) == INTEGER_CST
12279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12280 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12281 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12282 fold_convert_loc (loc,
12283 TREE_TYPE (arg0),
12284 arg1),
12285 TREE_OPERAND (arg0, 1)));
12286
12287 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12288 if ((TREE_CODE (arg0) == PLUS_EXPR
12289 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12290 || TREE_CODE (arg0) == MINUS_EXPR)
12291 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12292 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12293 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12294 {
12295 tree val = TREE_OPERAND (arg0, 1);
12296 return omit_two_operands_loc (loc, type,
12297 fold_build2_loc (loc, code, type,
12298 val,
12299 build_int_cst (TREE_TYPE (val),
12300 0)),
12301 TREE_OPERAND (arg0, 0), arg1);
12302 }
12303
12304 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12305 if (TREE_CODE (arg0) == MINUS_EXPR
12306 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12307 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12308 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12309 {
12310 return omit_two_operands_loc (loc, type,
12311 code == NE_EXPR
12312 ? boolean_true_node : boolean_false_node,
12313 TREE_OPERAND (arg0, 1), arg1);
12314 }
12315
12316 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12317 for !=. Don't do this for ordered comparisons due to overflow. */
12318 if (TREE_CODE (arg0) == MINUS_EXPR
12319 && integer_zerop (arg1))
12320 return fold_build2_loc (loc, code, type,
12321 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12322
12323 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12324 if (TREE_CODE (arg0) == ABS_EXPR
12325 && (integer_zerop (arg1) || real_zerop (arg1)))
12326 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12327
12328 /* If this is an EQ or NE comparison with zero and ARG0 is
12329 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12330 two operations, but the latter can be done in one less insn
12331 on machines that have only two-operand insns or on which a
12332 constant cannot be the first operand. */
12333 if (TREE_CODE (arg0) == BIT_AND_EXPR
12334 && integer_zerop (arg1))
12335 {
12336 tree arg00 = TREE_OPERAND (arg0, 0);
12337 tree arg01 = TREE_OPERAND (arg0, 1);
12338 if (TREE_CODE (arg00) == LSHIFT_EXPR
12339 && integer_onep (TREE_OPERAND (arg00, 0)))
12340 {
12341 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12342 arg01, TREE_OPERAND (arg00, 1));
12343 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12344 build_int_cst (TREE_TYPE (arg0), 1));
12345 return fold_build2_loc (loc, code, type,
12346 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12347 arg1);
12348 }
12349 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12350 && integer_onep (TREE_OPERAND (arg01, 0)))
12351 {
12352 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12353 arg00, TREE_OPERAND (arg01, 1));
12354 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12355 build_int_cst (TREE_TYPE (arg0), 1));
12356 return fold_build2_loc (loc, code, type,
12357 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12358 arg1);
12359 }
12360 }
12361
12362 /* If this is an NE or EQ comparison of zero against the result of a
12363 signed MOD operation whose second operand is a power of 2, make
12364 the MOD operation unsigned since it is simpler and equivalent. */
12365 if (integer_zerop (arg1)
12366 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12367 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12368 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12369 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12370 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12371 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12372 {
12373 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12374 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12375 fold_convert_loc (loc, newtype,
12376 TREE_OPERAND (arg0, 0)),
12377 fold_convert_loc (loc, newtype,
12378 TREE_OPERAND (arg0, 1)));
12379
12380 return fold_build2_loc (loc, code, type, newmod,
12381 fold_convert_loc (loc, newtype, arg1));
12382 }
12383
12384 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12385 C1 is a valid shift constant, and C2 is a power of two, i.e.
12386 a single bit. */
12387 if (TREE_CODE (arg0) == BIT_AND_EXPR
12388 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12389 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12390 == INTEGER_CST
12391 && integer_pow2p (TREE_OPERAND (arg0, 1))
12392 && integer_zerop (arg1))
12393 {
12394 tree itype = TREE_TYPE (arg0);
12395 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12396 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12397
12398 /* Check for a valid shift count. */
12399 if (TREE_INT_CST_HIGH (arg001) == 0
12400 && TREE_INT_CST_LOW (arg001) < prec)
12401 {
12402 tree arg01 = TREE_OPERAND (arg0, 1);
12403 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12404 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12405 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12406 can be rewritten as (X & (C2 << C1)) != 0. */
12407 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12408 {
12409 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12410 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12411 return fold_build2_loc (loc, code, type, tem, arg1);
12412 }
12413 /* Otherwise, for signed (arithmetic) shifts,
12414 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12415 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12416 else if (!TYPE_UNSIGNED (itype))
12417 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12418 arg000, build_int_cst (itype, 0));
12419 /* Otherwise, of unsigned (logical) shifts,
12420 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12421 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12422 else
12423 return omit_one_operand_loc (loc, type,
12424 code == EQ_EXPR ? integer_one_node
12425 : integer_zero_node,
12426 arg000);
12427 }
12428 }
12429
12430 /* If this is an NE comparison of zero with an AND of one, remove the
12431 comparison since the AND will give the correct value. */
12432 if (code == NE_EXPR
12433 && integer_zerop (arg1)
12434 && TREE_CODE (arg0) == BIT_AND_EXPR
12435 && integer_onep (TREE_OPERAND (arg0, 1)))
12436 return fold_convert_loc (loc, type, arg0);
12437
12438 /* If we have (A & C) == C where C is a power of 2, convert this into
12439 (A & C) != 0. Similarly for NE_EXPR. */
12440 if (TREE_CODE (arg0) == BIT_AND_EXPR
12441 && integer_pow2p (TREE_OPERAND (arg0, 1))
12442 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12443 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12444 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12445 integer_zero_node));
12446
12447 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12448 bit, then fold the expression into A < 0 or A >= 0. */
12449 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12450 if (tem)
12451 return tem;
12452
12453 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12454 Similarly for NE_EXPR. */
12455 if (TREE_CODE (arg0) == BIT_AND_EXPR
12456 && TREE_CODE (arg1) == INTEGER_CST
12457 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12458 {
12459 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12460 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12461 TREE_OPERAND (arg0, 1));
12462 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12463 arg1, notc);
12464 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12465 if (integer_nonzerop (dandnotc))
12466 return omit_one_operand_loc (loc, type, rslt, arg0);
12467 }
12468
12469 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12470 Similarly for NE_EXPR. */
12471 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12472 && TREE_CODE (arg1) == INTEGER_CST
12473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12474 {
12475 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12476 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12477 TREE_OPERAND (arg0, 1), notd);
12478 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12479 if (integer_nonzerop (candnotd))
12480 return omit_one_operand_loc (loc, type, rslt, arg0);
12481 }
12482
12483 /* If this is a comparison of a field, we may be able to simplify it. */
12484 if ((TREE_CODE (arg0) == COMPONENT_REF
12485 || TREE_CODE (arg0) == BIT_FIELD_REF)
12486 /* Handle the constant case even without -O
12487 to make sure the warnings are given. */
12488 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12489 {
12490 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12491 if (t1)
12492 return t1;
12493 }
12494
12495 /* Optimize comparisons of strlen vs zero to a compare of the
12496 first character of the string vs zero. To wit,
12497 strlen(ptr) == 0 => *ptr == 0
12498 strlen(ptr) != 0 => *ptr != 0
12499 Other cases should reduce to one of these two (or a constant)
12500 due to the return value of strlen being unsigned. */
12501 if (TREE_CODE (arg0) == CALL_EXPR
12502 && integer_zerop (arg1))
12503 {
12504 tree fndecl = get_callee_fndecl (arg0);
12505
12506 if (fndecl
12507 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12508 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12509 && call_expr_nargs (arg0) == 1
12510 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12511 {
12512 tree iref = build_fold_indirect_ref_loc (loc,
12513 CALL_EXPR_ARG (arg0, 0));
12514 return fold_build2_loc (loc, code, type, iref,
12515 build_int_cst (TREE_TYPE (iref), 0));
12516 }
12517 }
12518
12519 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12520 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12521 if (TREE_CODE (arg0) == RSHIFT_EXPR
12522 && integer_zerop (arg1)
12523 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12524 {
12525 tree arg00 = TREE_OPERAND (arg0, 0);
12526 tree arg01 = TREE_OPERAND (arg0, 1);
12527 tree itype = TREE_TYPE (arg00);
12528 if (TREE_INT_CST_HIGH (arg01) == 0
12529 && TREE_INT_CST_LOW (arg01)
12530 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12531 {
12532 if (TYPE_UNSIGNED (itype))
12533 {
12534 itype = signed_type_for (itype);
12535 arg00 = fold_convert_loc (loc, itype, arg00);
12536 }
12537 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12538 type, arg00, build_int_cst (itype, 0));
12539 }
12540 }
12541
12542 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12543 if (integer_zerop (arg1)
12544 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12545 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12546 TREE_OPERAND (arg0, 1));
12547
12548 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12549 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12550 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12551 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12552 build_int_cst (TREE_TYPE (arg1), 0));
12553 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12554 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12555 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12556 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12557 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12558 build_int_cst (TREE_TYPE (arg1), 0));
12559
12560 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12561 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12562 && TREE_CODE (arg1) == INTEGER_CST
12563 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12564 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12565 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12566 TREE_OPERAND (arg0, 1), arg1));
12567
12568 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12569 (X & C) == 0 when C is a single bit. */
12570 if (TREE_CODE (arg0) == BIT_AND_EXPR
12571 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12572 && integer_zerop (arg1)
12573 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12574 {
12575 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12576 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12577 TREE_OPERAND (arg0, 1));
12578 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12579 type, tem, arg1);
12580 }
12581
12582 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12583 constant C is a power of two, i.e. a single bit. */
12584 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12585 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12586 && integer_zerop (arg1)
12587 && integer_pow2p (TREE_OPERAND (arg0, 1))
12588 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12589 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12590 {
12591 tree arg00 = TREE_OPERAND (arg0, 0);
12592 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12593 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12594 }
12595
12596 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12597 when is C is a power of two, i.e. a single bit. */
12598 if (TREE_CODE (arg0) == BIT_AND_EXPR
12599 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12600 && integer_zerop (arg1)
12601 && integer_pow2p (TREE_OPERAND (arg0, 1))
12602 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12603 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12604 {
12605 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12606 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12607 arg000, TREE_OPERAND (arg0, 1));
12608 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12609 tem, build_int_cst (TREE_TYPE (tem), 0));
12610 }
12611
12612 if (integer_zerop (arg1)
12613 && tree_expr_nonzero_p (arg0))
12614 {
12615 tree res = constant_boolean_node (code==NE_EXPR, type);
12616 return omit_one_operand_loc (loc, type, res, arg0);
12617 }
12618
12619 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12620 if (TREE_CODE (arg0) == NEGATE_EXPR
12621 && TREE_CODE (arg1) == NEGATE_EXPR)
12622 return fold_build2_loc (loc, code, type,
12623 TREE_OPERAND (arg0, 0),
12624 TREE_OPERAND (arg1, 0));
12625
12626 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12627 if (TREE_CODE (arg0) == BIT_AND_EXPR
12628 && TREE_CODE (arg1) == BIT_AND_EXPR)
12629 {
12630 tree arg00 = TREE_OPERAND (arg0, 0);
12631 tree arg01 = TREE_OPERAND (arg0, 1);
12632 tree arg10 = TREE_OPERAND (arg1, 0);
12633 tree arg11 = TREE_OPERAND (arg1, 1);
12634 tree itype = TREE_TYPE (arg0);
12635
12636 if (operand_equal_p (arg01, arg11, 0))
12637 return fold_build2_loc (loc, code, type,
12638 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12639 fold_build2_loc (loc,
12640 BIT_XOR_EXPR, itype,
12641 arg00, arg10),
12642 arg01),
12643 build_int_cst (itype, 0));
12644
12645 if (operand_equal_p (arg01, arg10, 0))
12646 return fold_build2_loc (loc, code, type,
12647 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12648 fold_build2_loc (loc,
12649 BIT_XOR_EXPR, itype,
12650 arg00, arg11),
12651 arg01),
12652 build_int_cst (itype, 0));
12653
12654 if (operand_equal_p (arg00, arg11, 0))
12655 return fold_build2_loc (loc, code, type,
12656 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12657 fold_build2_loc (loc,
12658 BIT_XOR_EXPR, itype,
12659 arg01, arg10),
12660 arg00),
12661 build_int_cst (itype, 0));
12662
12663 if (operand_equal_p (arg00, arg10, 0))
12664 return fold_build2_loc (loc, code, type,
12665 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12666 fold_build2_loc (loc,
12667 BIT_XOR_EXPR, itype,
12668 arg01, arg11),
12669 arg00),
12670 build_int_cst (itype, 0));
12671 }
12672
12673 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12674 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12675 {
12676 tree arg00 = TREE_OPERAND (arg0, 0);
12677 tree arg01 = TREE_OPERAND (arg0, 1);
12678 tree arg10 = TREE_OPERAND (arg1, 0);
12679 tree arg11 = TREE_OPERAND (arg1, 1);
12680 tree itype = TREE_TYPE (arg0);
12681
12682 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12683 operand_equal_p guarantees no side-effects so we don't need
12684 to use omit_one_operand on Z. */
12685 if (operand_equal_p (arg01, arg11, 0))
12686 return fold_build2_loc (loc, code, type, arg00, arg10);
12687 if (operand_equal_p (arg01, arg10, 0))
12688 return fold_build2_loc (loc, code, type, arg00, arg11);
12689 if (operand_equal_p (arg00, arg11, 0))
12690 return fold_build2_loc (loc, code, type, arg01, arg10);
12691 if (operand_equal_p (arg00, arg10, 0))
12692 return fold_build2_loc (loc, code, type, arg01, arg11);
12693
12694 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12695 if (TREE_CODE (arg01) == INTEGER_CST
12696 && TREE_CODE (arg11) == INTEGER_CST)
12697 return fold_build2_loc (loc, code, type,
12698 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
12699 fold_build2_loc (loc,
12700 BIT_XOR_EXPR, itype,
12701 arg01, arg11)),
12702 arg10);
12703 }
12704
12705 /* Attempt to simplify equality/inequality comparisons of complex
12706 values. Only lower the comparison if the result is known or
12707 can be simplified to a single scalar comparison. */
12708 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12709 || TREE_CODE (arg0) == COMPLEX_CST)
12710 && (TREE_CODE (arg1) == COMPLEX_EXPR
12711 || TREE_CODE (arg1) == COMPLEX_CST))
12712 {
12713 tree real0, imag0, real1, imag1;
12714 tree rcond, icond;
12715
12716 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12717 {
12718 real0 = TREE_OPERAND (arg0, 0);
12719 imag0 = TREE_OPERAND (arg0, 1);
12720 }
12721 else
12722 {
12723 real0 = TREE_REALPART (arg0);
12724 imag0 = TREE_IMAGPART (arg0);
12725 }
12726
12727 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12728 {
12729 real1 = TREE_OPERAND (arg1, 0);
12730 imag1 = TREE_OPERAND (arg1, 1);
12731 }
12732 else
12733 {
12734 real1 = TREE_REALPART (arg1);
12735 imag1 = TREE_IMAGPART (arg1);
12736 }
12737
12738 rcond = fold_binary_loc (loc, code, type, real0, real1);
12739 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12740 {
12741 if (integer_zerop (rcond))
12742 {
12743 if (code == EQ_EXPR)
12744 return omit_two_operands_loc (loc, type, boolean_false_node,
12745 imag0, imag1);
12746 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12747 }
12748 else
12749 {
12750 if (code == NE_EXPR)
12751 return omit_two_operands_loc (loc, type, boolean_true_node,
12752 imag0, imag1);
12753 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12754 }
12755 }
12756
12757 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12758 if (icond && TREE_CODE (icond) == INTEGER_CST)
12759 {
12760 if (integer_zerop (icond))
12761 {
12762 if (code == EQ_EXPR)
12763 return omit_two_operands_loc (loc, type, boolean_false_node,
12764 real0, real1);
12765 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12766 }
12767 else
12768 {
12769 if (code == NE_EXPR)
12770 return omit_two_operands_loc (loc, type, boolean_true_node,
12771 real0, real1);
12772 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12773 }
12774 }
12775 }
12776
12777 return NULL_TREE;
12778
12779 case LT_EXPR:
12780 case GT_EXPR:
12781 case LE_EXPR:
12782 case GE_EXPR:
12783 tem = fold_comparison (loc, code, type, op0, op1);
12784 if (tem != NULL_TREE)
12785 return tem;
12786
12787 /* Transform comparisons of the form X +- C CMP X. */
12788 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12789 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12790 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12791 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12792 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12793 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12794 {
12795 tree arg01 = TREE_OPERAND (arg0, 1);
12796 enum tree_code code0 = TREE_CODE (arg0);
12797 int is_positive;
12798
12799 if (TREE_CODE (arg01) == REAL_CST)
12800 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12801 else
12802 is_positive = tree_int_cst_sgn (arg01);
12803
12804 /* (X - c) > X becomes false. */
12805 if (code == GT_EXPR
12806 && ((code0 == MINUS_EXPR && is_positive >= 0)
12807 || (code0 == PLUS_EXPR && is_positive <= 0)))
12808 {
12809 if (TREE_CODE (arg01) == INTEGER_CST
12810 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12811 fold_overflow_warning (("assuming signed overflow does not "
12812 "occur when assuming that (X - c) > X "
12813 "is always false"),
12814 WARN_STRICT_OVERFLOW_ALL);
12815 return constant_boolean_node (0, type);
12816 }
12817
12818 /* Likewise (X + c) < X becomes false. */
12819 if (code == LT_EXPR
12820 && ((code0 == PLUS_EXPR && is_positive >= 0)
12821 || (code0 == MINUS_EXPR && is_positive <= 0)))
12822 {
12823 if (TREE_CODE (arg01) == INTEGER_CST
12824 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12825 fold_overflow_warning (("assuming signed overflow does not "
12826 "occur when assuming that "
12827 "(X + c) < X is always false"),
12828 WARN_STRICT_OVERFLOW_ALL);
12829 return constant_boolean_node (0, type);
12830 }
12831
12832 /* Convert (X - c) <= X to true. */
12833 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12834 && code == LE_EXPR
12835 && ((code0 == MINUS_EXPR && is_positive >= 0)
12836 || (code0 == PLUS_EXPR && is_positive <= 0)))
12837 {
12838 if (TREE_CODE (arg01) == INTEGER_CST
12839 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12840 fold_overflow_warning (("assuming signed overflow does not "
12841 "occur when assuming that "
12842 "(X - c) <= X is always true"),
12843 WARN_STRICT_OVERFLOW_ALL);
12844 return constant_boolean_node (1, type);
12845 }
12846
12847 /* Convert (X + c) >= X to true. */
12848 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12849 && code == GE_EXPR
12850 && ((code0 == PLUS_EXPR && is_positive >= 0)
12851 || (code0 == MINUS_EXPR && is_positive <= 0)))
12852 {
12853 if (TREE_CODE (arg01) == INTEGER_CST
12854 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12855 fold_overflow_warning (("assuming signed overflow does not "
12856 "occur when assuming that "
12857 "(X + c) >= X is always true"),
12858 WARN_STRICT_OVERFLOW_ALL);
12859 return constant_boolean_node (1, type);
12860 }
12861
12862 if (TREE_CODE (arg01) == INTEGER_CST)
12863 {
12864 /* Convert X + c > X and X - c < X to true for integers. */
12865 if (code == GT_EXPR
12866 && ((code0 == PLUS_EXPR && is_positive > 0)
12867 || (code0 == MINUS_EXPR && is_positive < 0)))
12868 {
12869 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12870 fold_overflow_warning (("assuming signed overflow does "
12871 "not occur when assuming that "
12872 "(X + c) > X is always true"),
12873 WARN_STRICT_OVERFLOW_ALL);
12874 return constant_boolean_node (1, type);
12875 }
12876
12877 if (code == LT_EXPR
12878 && ((code0 == MINUS_EXPR && is_positive > 0)
12879 || (code0 == PLUS_EXPR && is_positive < 0)))
12880 {
12881 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12882 fold_overflow_warning (("assuming signed overflow does "
12883 "not occur when assuming that "
12884 "(X - c) < X is always true"),
12885 WARN_STRICT_OVERFLOW_ALL);
12886 return constant_boolean_node (1, type);
12887 }
12888
12889 /* Convert X + c <= X and X - c >= X to false for integers. */
12890 if (code == LE_EXPR
12891 && ((code0 == PLUS_EXPR && is_positive > 0)
12892 || (code0 == MINUS_EXPR && is_positive < 0)))
12893 {
12894 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12895 fold_overflow_warning (("assuming signed overflow does "
12896 "not occur when assuming that "
12897 "(X + c) <= X is always false"),
12898 WARN_STRICT_OVERFLOW_ALL);
12899 return constant_boolean_node (0, type);
12900 }
12901
12902 if (code == GE_EXPR
12903 && ((code0 == MINUS_EXPR && is_positive > 0)
12904 || (code0 == PLUS_EXPR && is_positive < 0)))
12905 {
12906 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12907 fold_overflow_warning (("assuming signed overflow does "
12908 "not occur when assuming that "
12909 "(X - c) >= X is always false"),
12910 WARN_STRICT_OVERFLOW_ALL);
12911 return constant_boolean_node (0, type);
12912 }
12913 }
12914 }
12915
12916 /* Comparisons with the highest or lowest possible integer of
12917 the specified precision will have known values. */
12918 {
12919 tree arg1_type = TREE_TYPE (arg1);
12920 unsigned int width = TYPE_PRECISION (arg1_type);
12921
12922 if (TREE_CODE (arg1) == INTEGER_CST
12923 && width <= 2 * HOST_BITS_PER_WIDE_INT
12924 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12925 {
12926 HOST_WIDE_INT signed_max_hi;
12927 unsigned HOST_WIDE_INT signed_max_lo;
12928 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12929
12930 if (width <= HOST_BITS_PER_WIDE_INT)
12931 {
12932 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12933 - 1;
12934 signed_max_hi = 0;
12935 max_hi = 0;
12936
12937 if (TYPE_UNSIGNED (arg1_type))
12938 {
12939 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12940 min_lo = 0;
12941 min_hi = 0;
12942 }
12943 else
12944 {
12945 max_lo = signed_max_lo;
12946 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12947 min_hi = -1;
12948 }
12949 }
12950 else
12951 {
12952 width -= HOST_BITS_PER_WIDE_INT;
12953 signed_max_lo = -1;
12954 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12955 - 1;
12956 max_lo = -1;
12957 min_lo = 0;
12958
12959 if (TYPE_UNSIGNED (arg1_type))
12960 {
12961 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12962 min_hi = 0;
12963 }
12964 else
12965 {
12966 max_hi = signed_max_hi;
12967 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12968 }
12969 }
12970
12971 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12972 && TREE_INT_CST_LOW (arg1) == max_lo)
12973 switch (code)
12974 {
12975 case GT_EXPR:
12976 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12977
12978 case GE_EXPR:
12979 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12980
12981 case LE_EXPR:
12982 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12983
12984 case LT_EXPR:
12985 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12986
12987 /* The GE_EXPR and LT_EXPR cases above are not normally
12988 reached because of previous transformations. */
12989
12990 default:
12991 break;
12992 }
12993 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12994 == max_hi
12995 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12996 switch (code)
12997 {
12998 case GT_EXPR:
12999 arg1 = const_binop (PLUS_EXPR, arg1,
13000 build_int_cst (TREE_TYPE (arg1), 1));
13001 return fold_build2_loc (loc, EQ_EXPR, type,
13002 fold_convert_loc (loc,
13003 TREE_TYPE (arg1), arg0),
13004 arg1);
13005 case LE_EXPR:
13006 arg1 = const_binop (PLUS_EXPR, arg1,
13007 build_int_cst (TREE_TYPE (arg1), 1));
13008 return fold_build2_loc (loc, NE_EXPR, type,
13009 fold_convert_loc (loc, TREE_TYPE (arg1),
13010 arg0),
13011 arg1);
13012 default:
13013 break;
13014 }
13015 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13016 == min_hi
13017 && TREE_INT_CST_LOW (arg1) == min_lo)
13018 switch (code)
13019 {
13020 case LT_EXPR:
13021 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13022
13023 case LE_EXPR:
13024 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13025
13026 case GE_EXPR:
13027 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13028
13029 case GT_EXPR:
13030 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13031
13032 default:
13033 break;
13034 }
13035 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13036 == min_hi
13037 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13038 switch (code)
13039 {
13040 case GE_EXPR:
13041 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13042 return fold_build2_loc (loc, NE_EXPR, type,
13043 fold_convert_loc (loc,
13044 TREE_TYPE (arg1), arg0),
13045 arg1);
13046 case LT_EXPR:
13047 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13048 return fold_build2_loc (loc, EQ_EXPR, type,
13049 fold_convert_loc (loc, TREE_TYPE (arg1),
13050 arg0),
13051 arg1);
13052 default:
13053 break;
13054 }
13055
13056 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13057 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13058 && TYPE_UNSIGNED (arg1_type)
13059 /* We will flip the signedness of the comparison operator
13060 associated with the mode of arg1, so the sign bit is
13061 specified by this mode. Check that arg1 is the signed
13062 max associated with this sign bit. */
13063 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13064 /* signed_type does not work on pointer types. */
13065 && INTEGRAL_TYPE_P (arg1_type))
13066 {
13067 /* The following case also applies to X < signed_max+1
13068 and X >= signed_max+1 because previous transformations. */
13069 if (code == LE_EXPR || code == GT_EXPR)
13070 {
13071 tree st;
13072 st = signed_type_for (TREE_TYPE (arg1));
13073 return fold_build2_loc (loc,
13074 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13075 type, fold_convert_loc (loc, st, arg0),
13076 build_int_cst (st, 0));
13077 }
13078 }
13079 }
13080 }
13081
13082 /* If we are comparing an ABS_EXPR with a constant, we can
13083 convert all the cases into explicit comparisons, but they may
13084 well not be faster than doing the ABS and one comparison.
13085 But ABS (X) <= C is a range comparison, which becomes a subtraction
13086 and a comparison, and is probably faster. */
13087 if (code == LE_EXPR
13088 && TREE_CODE (arg1) == INTEGER_CST
13089 && TREE_CODE (arg0) == ABS_EXPR
13090 && ! TREE_SIDE_EFFECTS (arg0)
13091 && (0 != (tem = negate_expr (arg1)))
13092 && TREE_CODE (tem) == INTEGER_CST
13093 && !TREE_OVERFLOW (tem))
13094 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13095 build2 (GE_EXPR, type,
13096 TREE_OPERAND (arg0, 0), tem),
13097 build2 (LE_EXPR, type,
13098 TREE_OPERAND (arg0, 0), arg1));
13099
13100 /* Convert ABS_EXPR<x> >= 0 to true. */
13101 strict_overflow_p = false;
13102 if (code == GE_EXPR
13103 && (integer_zerop (arg1)
13104 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13105 && real_zerop (arg1)))
13106 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13107 {
13108 if (strict_overflow_p)
13109 fold_overflow_warning (("assuming signed overflow does not occur "
13110 "when simplifying comparison of "
13111 "absolute value and zero"),
13112 WARN_STRICT_OVERFLOW_CONDITIONAL);
13113 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13114 }
13115
13116 /* Convert ABS_EXPR<x> < 0 to false. */
13117 strict_overflow_p = false;
13118 if (code == LT_EXPR
13119 && (integer_zerop (arg1) || real_zerop (arg1))
13120 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13121 {
13122 if (strict_overflow_p)
13123 fold_overflow_warning (("assuming signed overflow does not occur "
13124 "when simplifying comparison of "
13125 "absolute value and zero"),
13126 WARN_STRICT_OVERFLOW_CONDITIONAL);
13127 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13128 }
13129
13130 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13131 and similarly for >= into !=. */
13132 if ((code == LT_EXPR || code == GE_EXPR)
13133 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13134 && TREE_CODE (arg1) == LSHIFT_EXPR
13135 && integer_onep (TREE_OPERAND (arg1, 0)))
13136 {
13137 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13138 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13139 TREE_OPERAND (arg1, 1)),
13140 build_int_cst (TREE_TYPE (arg0), 0));
13141 goto fold_binary_exit;
13142 }
13143
13144 if ((code == LT_EXPR || code == GE_EXPR)
13145 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13146 && CONVERT_EXPR_P (arg1)
13147 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13148 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13149 {
13150 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13151 fold_convert_loc (loc, TREE_TYPE (arg0),
13152 build2 (RSHIFT_EXPR,
13153 TREE_TYPE (arg0), arg0,
13154 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13155 1))),
13156 build_int_cst (TREE_TYPE (arg0), 0));
13157 goto fold_binary_exit;
13158 }
13159
13160 return NULL_TREE;
13161
13162 case UNORDERED_EXPR:
13163 case ORDERED_EXPR:
13164 case UNLT_EXPR:
13165 case UNLE_EXPR:
13166 case UNGT_EXPR:
13167 case UNGE_EXPR:
13168 case UNEQ_EXPR:
13169 case LTGT_EXPR:
13170 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13171 {
13172 t1 = fold_relational_const (code, type, arg0, arg1);
13173 if (t1 != NULL_TREE)
13174 return t1;
13175 }
13176
13177 /* If the first operand is NaN, the result is constant. */
13178 if (TREE_CODE (arg0) == REAL_CST
13179 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13180 && (code != LTGT_EXPR || ! flag_trapping_math))
13181 {
13182 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13183 ? integer_zero_node
13184 : integer_one_node;
13185 return omit_one_operand_loc (loc, type, t1, arg1);
13186 }
13187
13188 /* If the second operand is NaN, the result is constant. */
13189 if (TREE_CODE (arg1) == REAL_CST
13190 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13191 && (code != LTGT_EXPR || ! flag_trapping_math))
13192 {
13193 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13194 ? integer_zero_node
13195 : integer_one_node;
13196 return omit_one_operand_loc (loc, type, t1, arg0);
13197 }
13198
13199 /* Simplify unordered comparison of something with itself. */
13200 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13201 && operand_equal_p (arg0, arg1, 0))
13202 return constant_boolean_node (1, type);
13203
13204 if (code == LTGT_EXPR
13205 && !flag_trapping_math
13206 && operand_equal_p (arg0, arg1, 0))
13207 return constant_boolean_node (0, type);
13208
13209 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13210 {
13211 tree targ0 = strip_float_extensions (arg0);
13212 tree targ1 = strip_float_extensions (arg1);
13213 tree newtype = TREE_TYPE (targ0);
13214
13215 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13216 newtype = TREE_TYPE (targ1);
13217
13218 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13219 return fold_build2_loc (loc, code, type,
13220 fold_convert_loc (loc, newtype, targ0),
13221 fold_convert_loc (loc, newtype, targ1));
13222 }
13223
13224 return NULL_TREE;
13225
13226 case COMPOUND_EXPR:
13227 /* When pedantic, a compound expression can be neither an lvalue
13228 nor an integer constant expression. */
13229 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13230 return NULL_TREE;
13231 /* Don't let (0, 0) be null pointer constant. */
13232 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13233 : fold_convert_loc (loc, type, arg1);
13234 return pedantic_non_lvalue_loc (loc, tem);
13235
13236 case COMPLEX_EXPR:
13237 if ((TREE_CODE (arg0) == REAL_CST
13238 && TREE_CODE (arg1) == REAL_CST)
13239 || (TREE_CODE (arg0) == INTEGER_CST
13240 && TREE_CODE (arg1) == INTEGER_CST))
13241 return build_complex (type, arg0, arg1);
13242 return NULL_TREE;
13243
13244 case ASSERT_EXPR:
13245 /* An ASSERT_EXPR should never be passed to fold_binary. */
13246 gcc_unreachable ();
13247
13248 default:
13249 return NULL_TREE;
13250 } /* switch (code) */
13251 fold_binary_exit:
13252 protected_set_expr_location (tem, loc);
13253 return tem;
13254 }
13255
13256 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13257 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13258 of GOTO_EXPR. */
13259
13260 static tree
13261 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13262 {
13263 switch (TREE_CODE (*tp))
13264 {
13265 case LABEL_EXPR:
13266 return *tp;
13267
13268 case GOTO_EXPR:
13269 *walk_subtrees = 0;
13270
13271 /* ... fall through ... */
13272
13273 default:
13274 return NULL_TREE;
13275 }
13276 }
13277
13278 /* Return whether the sub-tree ST contains a label which is accessible from
13279 outside the sub-tree. */
13280
13281 static bool
13282 contains_label_p (tree st)
13283 {
13284 return
13285 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13286 }
13287
13288 /* Fold a ternary expression of code CODE and type TYPE with operands
13289 OP0, OP1, and OP2. Return the folded expression if folding is
13290 successful. Otherwise, return NULL_TREE. */
13291
13292 tree
13293 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13294 tree op0, tree op1, tree op2)
13295 {
13296 tree tem;
13297 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13298 enum tree_code_class kind = TREE_CODE_CLASS (code);
13299
13300 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13301 && TREE_CODE_LENGTH (code) == 3);
13302
13303 /* Strip any conversions that don't change the mode. This is safe
13304 for every expression, except for a comparison expression because
13305 its signedness is derived from its operands. So, in the latter
13306 case, only strip conversions that don't change the signedness.
13307
13308 Note that this is done as an internal manipulation within the
13309 constant folder, in order to find the simplest representation of
13310 the arguments so that their form can be studied. In any cases,
13311 the appropriate type conversions should be put back in the tree
13312 that will get out of the constant folder. */
13313 if (op0)
13314 {
13315 arg0 = op0;
13316 STRIP_NOPS (arg0);
13317 }
13318
13319 if (op1)
13320 {
13321 arg1 = op1;
13322 STRIP_NOPS (arg1);
13323 }
13324
13325 if (op2)
13326 {
13327 arg2 = op2;
13328 STRIP_NOPS (arg2);
13329 }
13330
13331 switch (code)
13332 {
13333 case COMPONENT_REF:
13334 if (TREE_CODE (arg0) == CONSTRUCTOR
13335 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13336 {
13337 unsigned HOST_WIDE_INT idx;
13338 tree field, value;
13339 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13340 if (field == arg1)
13341 return value;
13342 }
13343 return NULL_TREE;
13344
13345 case COND_EXPR:
13346 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13347 so all simple results must be passed through pedantic_non_lvalue. */
13348 if (TREE_CODE (arg0) == INTEGER_CST)
13349 {
13350 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13351 tem = integer_zerop (arg0) ? op2 : op1;
13352 /* Only optimize constant conditions when the selected branch
13353 has the same type as the COND_EXPR. This avoids optimizing
13354 away "c ? x : throw", where the throw has a void type.
13355 Avoid throwing away that operand which contains label. */
13356 if ((!TREE_SIDE_EFFECTS (unused_op)
13357 || !contains_label_p (unused_op))
13358 && (! VOID_TYPE_P (TREE_TYPE (tem))
13359 || VOID_TYPE_P (type)))
13360 return pedantic_non_lvalue_loc (loc, tem);
13361 return NULL_TREE;
13362 }
13363 if (operand_equal_p (arg1, op2, 0))
13364 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13365
13366 /* If we have A op B ? A : C, we may be able to convert this to a
13367 simpler expression, depending on the operation and the values
13368 of B and C. Signed zeros prevent all of these transformations,
13369 for reasons given above each one.
13370
13371 Also try swapping the arguments and inverting the conditional. */
13372 if (COMPARISON_CLASS_P (arg0)
13373 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13374 arg1, TREE_OPERAND (arg0, 1))
13375 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13376 {
13377 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13378 if (tem)
13379 return tem;
13380 }
13381
13382 if (COMPARISON_CLASS_P (arg0)
13383 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13384 op2,
13385 TREE_OPERAND (arg0, 1))
13386 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13387 {
13388 tem = fold_truth_not_expr (loc, arg0);
13389 if (tem && COMPARISON_CLASS_P (tem))
13390 {
13391 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13392 if (tem)
13393 return tem;
13394 }
13395 }
13396
13397 /* If the second operand is simpler than the third, swap them
13398 since that produces better jump optimization results. */
13399 if (truth_value_p (TREE_CODE (arg0))
13400 && tree_swap_operands_p (op1, op2, false))
13401 {
13402 /* See if this can be inverted. If it can't, possibly because
13403 it was a floating-point inequality comparison, don't do
13404 anything. */
13405 tem = fold_truth_not_expr (loc, arg0);
13406 if (tem)
13407 return fold_build3_loc (loc, code, type, tem, op2, op1);
13408 }
13409
13410 /* Convert A ? 1 : 0 to simply A. */
13411 if (integer_onep (op1)
13412 && integer_zerop (op2)
13413 /* If we try to convert OP0 to our type, the
13414 call to fold will try to move the conversion inside
13415 a COND, which will recurse. In that case, the COND_EXPR
13416 is probably the best choice, so leave it alone. */
13417 && type == TREE_TYPE (arg0))
13418 return pedantic_non_lvalue_loc (loc, arg0);
13419
13420 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13421 over COND_EXPR in cases such as floating point comparisons. */
13422 if (integer_zerop (op1)
13423 && integer_onep (op2)
13424 && truth_value_p (TREE_CODE (arg0)))
13425 return pedantic_non_lvalue_loc (loc,
13426 fold_convert_loc (loc, type,
13427 invert_truthvalue_loc (loc,
13428 arg0)));
13429
13430 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13431 if (TREE_CODE (arg0) == LT_EXPR
13432 && integer_zerop (TREE_OPERAND (arg0, 1))
13433 && integer_zerop (op2)
13434 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13435 {
13436 /* sign_bit_p only checks ARG1 bits within A's precision.
13437 If <sign bit of A> has wider type than A, bits outside
13438 of A's precision in <sign bit of A> need to be checked.
13439 If they are all 0, this optimization needs to be done
13440 in unsigned A's type, if they are all 1 in signed A's type,
13441 otherwise this can't be done. */
13442 if (TYPE_PRECISION (TREE_TYPE (tem))
13443 < TYPE_PRECISION (TREE_TYPE (arg1))
13444 && TYPE_PRECISION (TREE_TYPE (tem))
13445 < TYPE_PRECISION (type))
13446 {
13447 unsigned HOST_WIDE_INT mask_lo;
13448 HOST_WIDE_INT mask_hi;
13449 int inner_width, outer_width;
13450 tree tem_type;
13451
13452 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13453 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13454 if (outer_width > TYPE_PRECISION (type))
13455 outer_width = TYPE_PRECISION (type);
13456
13457 if (outer_width > HOST_BITS_PER_WIDE_INT)
13458 {
13459 mask_hi = ((unsigned HOST_WIDE_INT) -1
13460 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13461 mask_lo = -1;
13462 }
13463 else
13464 {
13465 mask_hi = 0;
13466 mask_lo = ((unsigned HOST_WIDE_INT) -1
13467 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13468 }
13469 if (inner_width > HOST_BITS_PER_WIDE_INT)
13470 {
13471 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13472 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13473 mask_lo = 0;
13474 }
13475 else
13476 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13477 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13478
13479 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13480 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13481 {
13482 tem_type = signed_type_for (TREE_TYPE (tem));
13483 tem = fold_convert_loc (loc, tem_type, tem);
13484 }
13485 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13486 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13487 {
13488 tem_type = unsigned_type_for (TREE_TYPE (tem));
13489 tem = fold_convert_loc (loc, tem_type, tem);
13490 }
13491 else
13492 tem = NULL;
13493 }
13494
13495 if (tem)
13496 return
13497 fold_convert_loc (loc, type,
13498 fold_build2_loc (loc, BIT_AND_EXPR,
13499 TREE_TYPE (tem), tem,
13500 fold_convert_loc (loc,
13501 TREE_TYPE (tem),
13502 arg1)));
13503 }
13504
13505 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13506 already handled above. */
13507 if (TREE_CODE (arg0) == BIT_AND_EXPR
13508 && integer_onep (TREE_OPERAND (arg0, 1))
13509 && integer_zerop (op2)
13510 && integer_pow2p (arg1))
13511 {
13512 tree tem = TREE_OPERAND (arg0, 0);
13513 STRIP_NOPS (tem);
13514 if (TREE_CODE (tem) == RSHIFT_EXPR
13515 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13516 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13517 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13518 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13519 TREE_OPERAND (tem, 0), arg1);
13520 }
13521
13522 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13523 is probably obsolete because the first operand should be a
13524 truth value (that's why we have the two cases above), but let's
13525 leave it in until we can confirm this for all front-ends. */
13526 if (integer_zerop (op2)
13527 && TREE_CODE (arg0) == NE_EXPR
13528 && integer_zerop (TREE_OPERAND (arg0, 1))
13529 && integer_pow2p (arg1)
13530 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13531 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13532 arg1, OEP_ONLY_CONST))
13533 return pedantic_non_lvalue_loc (loc,
13534 fold_convert_loc (loc, type,
13535 TREE_OPERAND (arg0, 0)));
13536
13537 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13538 if (integer_zerop (op2)
13539 && truth_value_p (TREE_CODE (arg0))
13540 && truth_value_p (TREE_CODE (arg1)))
13541 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13542 fold_convert_loc (loc, type, arg0),
13543 arg1);
13544
13545 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13546 if (integer_onep (op2)
13547 && truth_value_p (TREE_CODE (arg0))
13548 && truth_value_p (TREE_CODE (arg1)))
13549 {
13550 /* Only perform transformation if ARG0 is easily inverted. */
13551 tem = fold_truth_not_expr (loc, arg0);
13552 if (tem)
13553 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13554 fold_convert_loc (loc, type, tem),
13555 arg1);
13556 }
13557
13558 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13559 if (integer_zerop (arg1)
13560 && truth_value_p (TREE_CODE (arg0))
13561 && truth_value_p (TREE_CODE (op2)))
13562 {
13563 /* Only perform transformation if ARG0 is easily inverted. */
13564 tem = fold_truth_not_expr (loc, arg0);
13565 if (tem)
13566 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13567 fold_convert_loc (loc, type, tem),
13568 op2);
13569 }
13570
13571 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13572 if (integer_onep (arg1)
13573 && truth_value_p (TREE_CODE (arg0))
13574 && truth_value_p (TREE_CODE (op2)))
13575 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13576 fold_convert_loc (loc, type, arg0),
13577 op2);
13578
13579 return NULL_TREE;
13580
13581 case CALL_EXPR:
13582 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13583 of fold_ternary on them. */
13584 gcc_unreachable ();
13585
13586 case BIT_FIELD_REF:
13587 if ((TREE_CODE (arg0) == VECTOR_CST
13588 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13589 && type == TREE_TYPE (TREE_TYPE (arg0)))
13590 {
13591 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13592 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13593
13594 if (width != 0
13595 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13596 && (idx % width) == 0
13597 && (idx = idx / width)
13598 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13599 {
13600 tree elements = NULL_TREE;
13601
13602 if (TREE_CODE (arg0) == VECTOR_CST)
13603 elements = TREE_VECTOR_CST_ELTS (arg0);
13604 else
13605 {
13606 unsigned HOST_WIDE_INT idx;
13607 tree value;
13608
13609 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13610 elements = tree_cons (NULL_TREE, value, elements);
13611 }
13612 while (idx-- > 0 && elements)
13613 elements = TREE_CHAIN (elements);
13614 if (elements)
13615 return TREE_VALUE (elements);
13616 else
13617 return build_zero_cst (type);
13618 }
13619 }
13620
13621 /* A bit-field-ref that referenced the full argument can be stripped. */
13622 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13623 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13624 && integer_zerop (op2))
13625 return fold_convert_loc (loc, type, arg0);
13626
13627 return NULL_TREE;
13628
13629 case FMA_EXPR:
13630 /* For integers we can decompose the FMA if possible. */
13631 if (TREE_CODE (arg0) == INTEGER_CST
13632 && TREE_CODE (arg1) == INTEGER_CST)
13633 return fold_build2_loc (loc, PLUS_EXPR, type,
13634 const_binop (MULT_EXPR, arg0, arg1), arg2);
13635 if (integer_zerop (arg2))
13636 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13637
13638 return fold_fma (loc, type, arg0, arg1, arg2);
13639
13640 default:
13641 return NULL_TREE;
13642 } /* switch (code) */
13643 }
13644
13645 /* Perform constant folding and related simplification of EXPR.
13646 The related simplifications include x*1 => x, x*0 => 0, etc.,
13647 and application of the associative law.
13648 NOP_EXPR conversions may be removed freely (as long as we
13649 are careful not to change the type of the overall expression).
13650 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13651 but we can constant-fold them if they have constant operands. */
13652
13653 #ifdef ENABLE_FOLD_CHECKING
13654 # define fold(x) fold_1 (x)
13655 static tree fold_1 (tree);
13656 static
13657 #endif
13658 tree
13659 fold (tree expr)
13660 {
13661 const tree t = expr;
13662 enum tree_code code = TREE_CODE (t);
13663 enum tree_code_class kind = TREE_CODE_CLASS (code);
13664 tree tem;
13665 location_t loc = EXPR_LOCATION (expr);
13666
13667 /* Return right away if a constant. */
13668 if (kind == tcc_constant)
13669 return t;
13670
13671 /* CALL_EXPR-like objects with variable numbers of operands are
13672 treated specially. */
13673 if (kind == tcc_vl_exp)
13674 {
13675 if (code == CALL_EXPR)
13676 {
13677 tem = fold_call_expr (loc, expr, false);
13678 return tem ? tem : expr;
13679 }
13680 return expr;
13681 }
13682
13683 if (IS_EXPR_CODE_CLASS (kind))
13684 {
13685 tree type = TREE_TYPE (t);
13686 tree op0, op1, op2;
13687
13688 switch (TREE_CODE_LENGTH (code))
13689 {
13690 case 1:
13691 op0 = TREE_OPERAND (t, 0);
13692 tem = fold_unary_loc (loc, code, type, op0);
13693 return tem ? tem : expr;
13694 case 2:
13695 op0 = TREE_OPERAND (t, 0);
13696 op1 = TREE_OPERAND (t, 1);
13697 tem = fold_binary_loc (loc, code, type, op0, op1);
13698 return tem ? tem : expr;
13699 case 3:
13700 op0 = TREE_OPERAND (t, 0);
13701 op1 = TREE_OPERAND (t, 1);
13702 op2 = TREE_OPERAND (t, 2);
13703 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13704 return tem ? tem : expr;
13705 default:
13706 break;
13707 }
13708 }
13709
13710 switch (code)
13711 {
13712 case ARRAY_REF:
13713 {
13714 tree op0 = TREE_OPERAND (t, 0);
13715 tree op1 = TREE_OPERAND (t, 1);
13716
13717 if (TREE_CODE (op1) == INTEGER_CST
13718 && TREE_CODE (op0) == CONSTRUCTOR
13719 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13720 {
13721 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13722 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13723 unsigned HOST_WIDE_INT begin = 0;
13724
13725 /* Find a matching index by means of a binary search. */
13726 while (begin != end)
13727 {
13728 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13729 tree index = VEC_index (constructor_elt, elts, middle)->index;
13730
13731 if (TREE_CODE (index) == INTEGER_CST
13732 && tree_int_cst_lt (index, op1))
13733 begin = middle + 1;
13734 else if (TREE_CODE (index) == INTEGER_CST
13735 && tree_int_cst_lt (op1, index))
13736 end = middle;
13737 else if (TREE_CODE (index) == RANGE_EXPR
13738 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13739 begin = middle + 1;
13740 else if (TREE_CODE (index) == RANGE_EXPR
13741 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13742 end = middle;
13743 else
13744 return VEC_index (constructor_elt, elts, middle)->value;
13745 }
13746 }
13747
13748 return t;
13749 }
13750
13751 case CONST_DECL:
13752 return fold (DECL_INITIAL (t));
13753
13754 default:
13755 return t;
13756 } /* switch (code) */
13757 }
13758
13759 #ifdef ENABLE_FOLD_CHECKING
13760 #undef fold
13761
13762 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13763 static void fold_check_failed (const_tree, const_tree);
13764 void print_fold_checksum (const_tree);
13765
13766 /* When --enable-checking=fold, compute a digest of expr before
13767 and after actual fold call to see if fold did not accidentally
13768 change original expr. */
13769
13770 tree
13771 fold (tree expr)
13772 {
13773 tree ret;
13774 struct md5_ctx ctx;
13775 unsigned char checksum_before[16], checksum_after[16];
13776 htab_t ht;
13777
13778 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13779 md5_init_ctx (&ctx);
13780 fold_checksum_tree (expr, &ctx, ht);
13781 md5_finish_ctx (&ctx, checksum_before);
13782 htab_empty (ht);
13783
13784 ret = fold_1 (expr);
13785
13786 md5_init_ctx (&ctx);
13787 fold_checksum_tree (expr, &ctx, ht);
13788 md5_finish_ctx (&ctx, checksum_after);
13789 htab_delete (ht);
13790
13791 if (memcmp (checksum_before, checksum_after, 16))
13792 fold_check_failed (expr, ret);
13793
13794 return ret;
13795 }
13796
13797 void
13798 print_fold_checksum (const_tree expr)
13799 {
13800 struct md5_ctx ctx;
13801 unsigned char checksum[16], cnt;
13802 htab_t ht;
13803
13804 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13805 md5_init_ctx (&ctx);
13806 fold_checksum_tree (expr, &ctx, ht);
13807 md5_finish_ctx (&ctx, checksum);
13808 htab_delete (ht);
13809 for (cnt = 0; cnt < 16; ++cnt)
13810 fprintf (stderr, "%02x", checksum[cnt]);
13811 putc ('\n', stderr);
13812 }
13813
13814 static void
13815 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13816 {
13817 internal_error ("fold check: original tree changed by fold");
13818 }
13819
13820 static void
13821 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13822 {
13823 void **slot;
13824 enum tree_code code;
13825 union tree_node buf;
13826 int i, len;
13827
13828 recursive_label:
13829
13830 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13831 <= sizeof (struct tree_function_decl))
13832 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13833 if (expr == NULL)
13834 return;
13835 slot = (void **) htab_find_slot (ht, expr, INSERT);
13836 if (*slot != NULL)
13837 return;
13838 *slot = CONST_CAST_TREE (expr);
13839 code = TREE_CODE (expr);
13840 if (TREE_CODE_CLASS (code) == tcc_declaration
13841 && DECL_ASSEMBLER_NAME_SET_P (expr))
13842 {
13843 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13844 memcpy ((char *) &buf, expr, tree_size (expr));
13845 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13846 expr = (tree) &buf;
13847 }
13848 else if (TREE_CODE_CLASS (code) == tcc_type
13849 && (TYPE_POINTER_TO (expr)
13850 || TYPE_REFERENCE_TO (expr)
13851 || TYPE_CACHED_VALUES_P (expr)
13852 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13853 || TYPE_NEXT_VARIANT (expr)))
13854 {
13855 /* Allow these fields to be modified. */
13856 tree tmp;
13857 memcpy ((char *) &buf, expr, tree_size (expr));
13858 expr = tmp = (tree) &buf;
13859 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13860 TYPE_POINTER_TO (tmp) = NULL;
13861 TYPE_REFERENCE_TO (tmp) = NULL;
13862 TYPE_NEXT_VARIANT (tmp) = NULL;
13863 if (TYPE_CACHED_VALUES_P (tmp))
13864 {
13865 TYPE_CACHED_VALUES_P (tmp) = 0;
13866 TYPE_CACHED_VALUES (tmp) = NULL;
13867 }
13868 }
13869 md5_process_bytes (expr, tree_size (expr), ctx);
13870 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13871 if (TREE_CODE_CLASS (code) != tcc_type
13872 && TREE_CODE_CLASS (code) != tcc_declaration
13873 && code != TREE_LIST
13874 && code != SSA_NAME)
13875 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13876 switch (TREE_CODE_CLASS (code))
13877 {
13878 case tcc_constant:
13879 switch (code)
13880 {
13881 case STRING_CST:
13882 md5_process_bytes (TREE_STRING_POINTER (expr),
13883 TREE_STRING_LENGTH (expr), ctx);
13884 break;
13885 case COMPLEX_CST:
13886 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13887 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13888 break;
13889 case VECTOR_CST:
13890 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13891 break;
13892 default:
13893 break;
13894 }
13895 break;
13896 case tcc_exceptional:
13897 switch (code)
13898 {
13899 case TREE_LIST:
13900 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13901 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13902 expr = TREE_CHAIN (expr);
13903 goto recursive_label;
13904 break;
13905 case TREE_VEC:
13906 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13907 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13908 break;
13909 default:
13910 break;
13911 }
13912 break;
13913 case tcc_expression:
13914 case tcc_reference:
13915 case tcc_comparison:
13916 case tcc_unary:
13917 case tcc_binary:
13918 case tcc_statement:
13919 case tcc_vl_exp:
13920 len = TREE_OPERAND_LENGTH (expr);
13921 for (i = 0; i < len; ++i)
13922 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13923 break;
13924 case tcc_declaration:
13925 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13926 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13927 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13928 {
13929 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13930 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13931 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13932 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13933 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13934 }
13935 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13936 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13937
13938 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13939 {
13940 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13941 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13942 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13943 }
13944 break;
13945 case tcc_type:
13946 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13947 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13948 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13949 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13950 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13951 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13952 if (INTEGRAL_TYPE_P (expr)
13953 || SCALAR_FLOAT_TYPE_P (expr))
13954 {
13955 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13956 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13957 }
13958 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13959 if (TREE_CODE (expr) == RECORD_TYPE
13960 || TREE_CODE (expr) == UNION_TYPE
13961 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13962 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13963 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13964 break;
13965 default:
13966 break;
13967 }
13968 }
13969
13970 /* Helper function for outputting the checksum of a tree T. When
13971 debugging with gdb, you can "define mynext" to be "next" followed
13972 by "call debug_fold_checksum (op0)", then just trace down till the
13973 outputs differ. */
13974
13975 DEBUG_FUNCTION void
13976 debug_fold_checksum (const_tree t)
13977 {
13978 int i;
13979 unsigned char checksum[16];
13980 struct md5_ctx ctx;
13981 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13982
13983 md5_init_ctx (&ctx);
13984 fold_checksum_tree (t, &ctx, ht);
13985 md5_finish_ctx (&ctx, checksum);
13986 htab_empty (ht);
13987
13988 for (i = 0; i < 16; i++)
13989 fprintf (stderr, "%d ", checksum[i]);
13990
13991 fprintf (stderr, "\n");
13992 }
13993
13994 #endif
13995
13996 /* Fold a unary tree expression with code CODE of type TYPE with an
13997 operand OP0. LOC is the location of the resulting expression.
13998 Return a folded expression if successful. Otherwise, return a tree
13999 expression with code CODE of type TYPE with an operand OP0. */
14000
14001 tree
14002 fold_build1_stat_loc (location_t loc,
14003 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14004 {
14005 tree tem;
14006 #ifdef ENABLE_FOLD_CHECKING
14007 unsigned char checksum_before[16], checksum_after[16];
14008 struct md5_ctx ctx;
14009 htab_t ht;
14010
14011 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14012 md5_init_ctx (&ctx);
14013 fold_checksum_tree (op0, &ctx, ht);
14014 md5_finish_ctx (&ctx, checksum_before);
14015 htab_empty (ht);
14016 #endif
14017
14018 tem = fold_unary_loc (loc, code, type, op0);
14019 if (!tem)
14020 {
14021 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14022 SET_EXPR_LOCATION (tem, loc);
14023 }
14024
14025 #ifdef ENABLE_FOLD_CHECKING
14026 md5_init_ctx (&ctx);
14027 fold_checksum_tree (op0, &ctx, ht);
14028 md5_finish_ctx (&ctx, checksum_after);
14029 htab_delete (ht);
14030
14031 if (memcmp (checksum_before, checksum_after, 16))
14032 fold_check_failed (op0, tem);
14033 #endif
14034 return tem;
14035 }
14036
14037 /* Fold a binary tree expression with code CODE of type TYPE with
14038 operands OP0 and OP1. LOC is the location of the resulting
14039 expression. Return a folded expression if successful. Otherwise,
14040 return a tree expression with code CODE of type TYPE with operands
14041 OP0 and OP1. */
14042
14043 tree
14044 fold_build2_stat_loc (location_t loc,
14045 enum tree_code code, tree type, tree op0, tree op1
14046 MEM_STAT_DECL)
14047 {
14048 tree tem;
14049 #ifdef ENABLE_FOLD_CHECKING
14050 unsigned char checksum_before_op0[16],
14051 checksum_before_op1[16],
14052 checksum_after_op0[16],
14053 checksum_after_op1[16];
14054 struct md5_ctx ctx;
14055 htab_t ht;
14056
14057 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14058 md5_init_ctx (&ctx);
14059 fold_checksum_tree (op0, &ctx, ht);
14060 md5_finish_ctx (&ctx, checksum_before_op0);
14061 htab_empty (ht);
14062
14063 md5_init_ctx (&ctx);
14064 fold_checksum_tree (op1, &ctx, ht);
14065 md5_finish_ctx (&ctx, checksum_before_op1);
14066 htab_empty (ht);
14067 #endif
14068
14069 tem = fold_binary_loc (loc, code, type, op0, op1);
14070 if (!tem)
14071 {
14072 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14073 SET_EXPR_LOCATION (tem, loc);
14074 }
14075
14076 #ifdef ENABLE_FOLD_CHECKING
14077 md5_init_ctx (&ctx);
14078 fold_checksum_tree (op0, &ctx, ht);
14079 md5_finish_ctx (&ctx, checksum_after_op0);
14080 htab_empty (ht);
14081
14082 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14083 fold_check_failed (op0, tem);
14084
14085 md5_init_ctx (&ctx);
14086 fold_checksum_tree (op1, &ctx, ht);
14087 md5_finish_ctx (&ctx, checksum_after_op1);
14088 htab_delete (ht);
14089
14090 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14091 fold_check_failed (op1, tem);
14092 #endif
14093 return tem;
14094 }
14095
14096 /* Fold a ternary tree expression with code CODE of type TYPE with
14097 operands OP0, OP1, and OP2. Return a folded expression if
14098 successful. Otherwise, return a tree expression with code CODE of
14099 type TYPE with operands OP0, OP1, and OP2. */
14100
14101 tree
14102 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14103 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14104 {
14105 tree tem;
14106 #ifdef ENABLE_FOLD_CHECKING
14107 unsigned char checksum_before_op0[16],
14108 checksum_before_op1[16],
14109 checksum_before_op2[16],
14110 checksum_after_op0[16],
14111 checksum_after_op1[16],
14112 checksum_after_op2[16];
14113 struct md5_ctx ctx;
14114 htab_t ht;
14115
14116 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14117 md5_init_ctx (&ctx);
14118 fold_checksum_tree (op0, &ctx, ht);
14119 md5_finish_ctx (&ctx, checksum_before_op0);
14120 htab_empty (ht);
14121
14122 md5_init_ctx (&ctx);
14123 fold_checksum_tree (op1, &ctx, ht);
14124 md5_finish_ctx (&ctx, checksum_before_op1);
14125 htab_empty (ht);
14126
14127 md5_init_ctx (&ctx);
14128 fold_checksum_tree (op2, &ctx, ht);
14129 md5_finish_ctx (&ctx, checksum_before_op2);
14130 htab_empty (ht);
14131 #endif
14132
14133 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14134 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14135 if (!tem)
14136 {
14137 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14138 SET_EXPR_LOCATION (tem, loc);
14139 }
14140
14141 #ifdef ENABLE_FOLD_CHECKING
14142 md5_init_ctx (&ctx);
14143 fold_checksum_tree (op0, &ctx, ht);
14144 md5_finish_ctx (&ctx, checksum_after_op0);
14145 htab_empty (ht);
14146
14147 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14148 fold_check_failed (op0, tem);
14149
14150 md5_init_ctx (&ctx);
14151 fold_checksum_tree (op1, &ctx, ht);
14152 md5_finish_ctx (&ctx, checksum_after_op1);
14153 htab_empty (ht);
14154
14155 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14156 fold_check_failed (op1, tem);
14157
14158 md5_init_ctx (&ctx);
14159 fold_checksum_tree (op2, &ctx, ht);
14160 md5_finish_ctx (&ctx, checksum_after_op2);
14161 htab_delete (ht);
14162
14163 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14164 fold_check_failed (op2, tem);
14165 #endif
14166 return tem;
14167 }
14168
14169 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14170 arguments in ARGARRAY, and a null static chain.
14171 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14172 of type TYPE from the given operands as constructed by build_call_array. */
14173
14174 tree
14175 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14176 int nargs, tree *argarray)
14177 {
14178 tree tem;
14179 #ifdef ENABLE_FOLD_CHECKING
14180 unsigned char checksum_before_fn[16],
14181 checksum_before_arglist[16],
14182 checksum_after_fn[16],
14183 checksum_after_arglist[16];
14184 struct md5_ctx ctx;
14185 htab_t ht;
14186 int i;
14187
14188 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14189 md5_init_ctx (&ctx);
14190 fold_checksum_tree (fn, &ctx, ht);
14191 md5_finish_ctx (&ctx, checksum_before_fn);
14192 htab_empty (ht);
14193
14194 md5_init_ctx (&ctx);
14195 for (i = 0; i < nargs; i++)
14196 fold_checksum_tree (argarray[i], &ctx, ht);
14197 md5_finish_ctx (&ctx, checksum_before_arglist);
14198 htab_empty (ht);
14199 #endif
14200
14201 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14202
14203 #ifdef ENABLE_FOLD_CHECKING
14204 md5_init_ctx (&ctx);
14205 fold_checksum_tree (fn, &ctx, ht);
14206 md5_finish_ctx (&ctx, checksum_after_fn);
14207 htab_empty (ht);
14208
14209 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14210 fold_check_failed (fn, tem);
14211
14212 md5_init_ctx (&ctx);
14213 for (i = 0; i < nargs; i++)
14214 fold_checksum_tree (argarray[i], &ctx, ht);
14215 md5_finish_ctx (&ctx, checksum_after_arglist);
14216 htab_delete (ht);
14217
14218 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14219 fold_check_failed (NULL_TREE, tem);
14220 #endif
14221 return tem;
14222 }
14223
14224 /* Perform constant folding and related simplification of initializer
14225 expression EXPR. These behave identically to "fold_buildN" but ignore
14226 potential run-time traps and exceptions that fold must preserve. */
14227
14228 #define START_FOLD_INIT \
14229 int saved_signaling_nans = flag_signaling_nans;\
14230 int saved_trapping_math = flag_trapping_math;\
14231 int saved_rounding_math = flag_rounding_math;\
14232 int saved_trapv = flag_trapv;\
14233 int saved_folding_initializer = folding_initializer;\
14234 flag_signaling_nans = 0;\
14235 flag_trapping_math = 0;\
14236 flag_rounding_math = 0;\
14237 flag_trapv = 0;\
14238 folding_initializer = 1;
14239
14240 #define END_FOLD_INIT \
14241 flag_signaling_nans = saved_signaling_nans;\
14242 flag_trapping_math = saved_trapping_math;\
14243 flag_rounding_math = saved_rounding_math;\
14244 flag_trapv = saved_trapv;\
14245 folding_initializer = saved_folding_initializer;
14246
14247 tree
14248 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14249 tree type, tree op)
14250 {
14251 tree result;
14252 START_FOLD_INIT;
14253
14254 result = fold_build1_loc (loc, code, type, op);
14255
14256 END_FOLD_INIT;
14257 return result;
14258 }
14259
14260 tree
14261 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14262 tree type, tree op0, tree op1)
14263 {
14264 tree result;
14265 START_FOLD_INIT;
14266
14267 result = fold_build2_loc (loc, code, type, op0, op1);
14268
14269 END_FOLD_INIT;
14270 return result;
14271 }
14272
14273 tree
14274 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14275 tree type, tree op0, tree op1, tree op2)
14276 {
14277 tree result;
14278 START_FOLD_INIT;
14279
14280 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14281
14282 END_FOLD_INIT;
14283 return result;
14284 }
14285
14286 tree
14287 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14288 int nargs, tree *argarray)
14289 {
14290 tree result;
14291 START_FOLD_INIT;
14292
14293 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14294
14295 END_FOLD_INIT;
14296 return result;
14297 }
14298
14299 #undef START_FOLD_INIT
14300 #undef END_FOLD_INIT
14301
14302 /* Determine if first argument is a multiple of second argument. Return 0 if
14303 it is not, or we cannot easily determined it to be.
14304
14305 An example of the sort of thing we care about (at this point; this routine
14306 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14307 fold cases do now) is discovering that
14308
14309 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14310
14311 is a multiple of
14312
14313 SAVE_EXPR (J * 8)
14314
14315 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14316
14317 This code also handles discovering that
14318
14319 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14320
14321 is a multiple of 8 so we don't have to worry about dealing with a
14322 possible remainder.
14323
14324 Note that we *look* inside a SAVE_EXPR only to determine how it was
14325 calculated; it is not safe for fold to do much of anything else with the
14326 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14327 at run time. For example, the latter example above *cannot* be implemented
14328 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14329 evaluation time of the original SAVE_EXPR is not necessarily the same at
14330 the time the new expression is evaluated. The only optimization of this
14331 sort that would be valid is changing
14332
14333 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14334
14335 divided by 8 to
14336
14337 SAVE_EXPR (I) * SAVE_EXPR (J)
14338
14339 (where the same SAVE_EXPR (J) is used in the original and the
14340 transformed version). */
14341
14342 int
14343 multiple_of_p (tree type, const_tree top, const_tree bottom)
14344 {
14345 if (operand_equal_p (top, bottom, 0))
14346 return 1;
14347
14348 if (TREE_CODE (type) != INTEGER_TYPE)
14349 return 0;
14350
14351 switch (TREE_CODE (top))
14352 {
14353 case BIT_AND_EXPR:
14354 /* Bitwise and provides a power of two multiple. If the mask is
14355 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14356 if (!integer_pow2p (bottom))
14357 return 0;
14358 /* FALLTHRU */
14359
14360 case MULT_EXPR:
14361 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14362 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14363
14364 case PLUS_EXPR:
14365 case MINUS_EXPR:
14366 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14367 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14368
14369 case LSHIFT_EXPR:
14370 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14371 {
14372 tree op1, t1;
14373
14374 op1 = TREE_OPERAND (top, 1);
14375 /* const_binop may not detect overflow correctly,
14376 so check for it explicitly here. */
14377 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14378 > TREE_INT_CST_LOW (op1)
14379 && TREE_INT_CST_HIGH (op1) == 0
14380 && 0 != (t1 = fold_convert (type,
14381 const_binop (LSHIFT_EXPR,
14382 size_one_node,
14383 op1)))
14384 && !TREE_OVERFLOW (t1))
14385 return multiple_of_p (type, t1, bottom);
14386 }
14387 return 0;
14388
14389 case NOP_EXPR:
14390 /* Can't handle conversions from non-integral or wider integral type. */
14391 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14392 || (TYPE_PRECISION (type)
14393 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14394 return 0;
14395
14396 /* .. fall through ... */
14397
14398 case SAVE_EXPR:
14399 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14400
14401 case COND_EXPR:
14402 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14403 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14404
14405 case INTEGER_CST:
14406 if (TREE_CODE (bottom) != INTEGER_CST
14407 || integer_zerop (bottom)
14408 || (TYPE_UNSIGNED (type)
14409 && (tree_int_cst_sgn (top) < 0
14410 || tree_int_cst_sgn (bottom) < 0)))
14411 return 0;
14412 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14413 top, bottom, 0));
14414
14415 default:
14416 return 0;
14417 }
14418 }
14419
14420 /* Return true if CODE or TYPE is known to be non-negative. */
14421
14422 static bool
14423 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14424 {
14425 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14426 && truth_value_p (code))
14427 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14428 have a signed:1 type (where the value is -1 and 0). */
14429 return true;
14430 return false;
14431 }
14432
14433 /* Return true if (CODE OP0) is known to be non-negative. If the return
14434 value is based on the assumption that signed overflow is undefined,
14435 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14436 *STRICT_OVERFLOW_P. */
14437
14438 bool
14439 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14440 bool *strict_overflow_p)
14441 {
14442 if (TYPE_UNSIGNED (type))
14443 return true;
14444
14445 switch (code)
14446 {
14447 case ABS_EXPR:
14448 /* We can't return 1 if flag_wrapv is set because
14449 ABS_EXPR<INT_MIN> = INT_MIN. */
14450 if (!INTEGRAL_TYPE_P (type))
14451 return true;
14452 if (TYPE_OVERFLOW_UNDEFINED (type))
14453 {
14454 *strict_overflow_p = true;
14455 return true;
14456 }
14457 break;
14458
14459 case NON_LVALUE_EXPR:
14460 case FLOAT_EXPR:
14461 case FIX_TRUNC_EXPR:
14462 return tree_expr_nonnegative_warnv_p (op0,
14463 strict_overflow_p);
14464
14465 case NOP_EXPR:
14466 {
14467 tree inner_type = TREE_TYPE (op0);
14468 tree outer_type = type;
14469
14470 if (TREE_CODE (outer_type) == REAL_TYPE)
14471 {
14472 if (TREE_CODE (inner_type) == REAL_TYPE)
14473 return tree_expr_nonnegative_warnv_p (op0,
14474 strict_overflow_p);
14475 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14476 {
14477 if (TYPE_UNSIGNED (inner_type))
14478 return true;
14479 return tree_expr_nonnegative_warnv_p (op0,
14480 strict_overflow_p);
14481 }
14482 }
14483 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14484 {
14485 if (TREE_CODE (inner_type) == REAL_TYPE)
14486 return tree_expr_nonnegative_warnv_p (op0,
14487 strict_overflow_p);
14488 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14489 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14490 && TYPE_UNSIGNED (inner_type);
14491 }
14492 }
14493 break;
14494
14495 default:
14496 return tree_simple_nonnegative_warnv_p (code, type);
14497 }
14498
14499 /* We don't know sign of `t', so be conservative and return false. */
14500 return false;
14501 }
14502
14503 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14504 value is based on the assumption that signed overflow is undefined,
14505 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14506 *STRICT_OVERFLOW_P. */
14507
14508 bool
14509 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14510 tree op1, bool *strict_overflow_p)
14511 {
14512 if (TYPE_UNSIGNED (type))
14513 return true;
14514
14515 switch (code)
14516 {
14517 case POINTER_PLUS_EXPR:
14518 case PLUS_EXPR:
14519 if (FLOAT_TYPE_P (type))
14520 return (tree_expr_nonnegative_warnv_p (op0,
14521 strict_overflow_p)
14522 && tree_expr_nonnegative_warnv_p (op1,
14523 strict_overflow_p));
14524
14525 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14526 both unsigned and at least 2 bits shorter than the result. */
14527 if (TREE_CODE (type) == INTEGER_TYPE
14528 && TREE_CODE (op0) == NOP_EXPR
14529 && TREE_CODE (op1) == NOP_EXPR)
14530 {
14531 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14532 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14533 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14534 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14535 {
14536 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14537 TYPE_PRECISION (inner2)) + 1;
14538 return prec < TYPE_PRECISION (type);
14539 }
14540 }
14541 break;
14542
14543 case MULT_EXPR:
14544 if (FLOAT_TYPE_P (type))
14545 {
14546 /* x * x for floating point x is always non-negative. */
14547 if (operand_equal_p (op0, op1, 0))
14548 return true;
14549 return (tree_expr_nonnegative_warnv_p (op0,
14550 strict_overflow_p)
14551 && tree_expr_nonnegative_warnv_p (op1,
14552 strict_overflow_p));
14553 }
14554
14555 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14556 both unsigned and their total bits is shorter than the result. */
14557 if (TREE_CODE (type) == INTEGER_TYPE
14558 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14559 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14560 {
14561 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14562 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14563 : TREE_TYPE (op0);
14564 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14565 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14566 : TREE_TYPE (op1);
14567
14568 bool unsigned0 = TYPE_UNSIGNED (inner0);
14569 bool unsigned1 = TYPE_UNSIGNED (inner1);
14570
14571 if (TREE_CODE (op0) == INTEGER_CST)
14572 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14573
14574 if (TREE_CODE (op1) == INTEGER_CST)
14575 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14576
14577 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14578 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14579 {
14580 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14581 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14582 : TYPE_PRECISION (inner0);
14583
14584 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14585 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14586 : TYPE_PRECISION (inner1);
14587
14588 return precision0 + precision1 < TYPE_PRECISION (type);
14589 }
14590 }
14591 return false;
14592
14593 case BIT_AND_EXPR:
14594 case MAX_EXPR:
14595 return (tree_expr_nonnegative_warnv_p (op0,
14596 strict_overflow_p)
14597 || tree_expr_nonnegative_warnv_p (op1,
14598 strict_overflow_p));
14599
14600 case BIT_IOR_EXPR:
14601 case BIT_XOR_EXPR:
14602 case MIN_EXPR:
14603 case RDIV_EXPR:
14604 case TRUNC_DIV_EXPR:
14605 case CEIL_DIV_EXPR:
14606 case FLOOR_DIV_EXPR:
14607 case ROUND_DIV_EXPR:
14608 return (tree_expr_nonnegative_warnv_p (op0,
14609 strict_overflow_p)
14610 && tree_expr_nonnegative_warnv_p (op1,
14611 strict_overflow_p));
14612
14613 case TRUNC_MOD_EXPR:
14614 case CEIL_MOD_EXPR:
14615 case FLOOR_MOD_EXPR:
14616 case ROUND_MOD_EXPR:
14617 return tree_expr_nonnegative_warnv_p (op0,
14618 strict_overflow_p);
14619 default:
14620 return tree_simple_nonnegative_warnv_p (code, type);
14621 }
14622
14623 /* We don't know sign of `t', so be conservative and return false. */
14624 return false;
14625 }
14626
14627 /* Return true if T is known to be non-negative. If the return
14628 value is based on the assumption that signed overflow is undefined,
14629 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14630 *STRICT_OVERFLOW_P. */
14631
14632 bool
14633 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14634 {
14635 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14636 return true;
14637
14638 switch (TREE_CODE (t))
14639 {
14640 case INTEGER_CST:
14641 return tree_int_cst_sgn (t) >= 0;
14642
14643 case REAL_CST:
14644 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14645
14646 case FIXED_CST:
14647 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14648
14649 case COND_EXPR:
14650 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14651 strict_overflow_p)
14652 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14653 strict_overflow_p));
14654 default:
14655 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14656 TREE_TYPE (t));
14657 }
14658 /* We don't know sign of `t', so be conservative and return false. */
14659 return false;
14660 }
14661
14662 /* Return true if T is known to be non-negative. If the return
14663 value is based on the assumption that signed overflow is undefined,
14664 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14665 *STRICT_OVERFLOW_P. */
14666
14667 bool
14668 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14669 tree arg0, tree arg1, bool *strict_overflow_p)
14670 {
14671 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14672 switch (DECL_FUNCTION_CODE (fndecl))
14673 {
14674 CASE_FLT_FN (BUILT_IN_ACOS):
14675 CASE_FLT_FN (BUILT_IN_ACOSH):
14676 CASE_FLT_FN (BUILT_IN_CABS):
14677 CASE_FLT_FN (BUILT_IN_COSH):
14678 CASE_FLT_FN (BUILT_IN_ERFC):
14679 CASE_FLT_FN (BUILT_IN_EXP):
14680 CASE_FLT_FN (BUILT_IN_EXP10):
14681 CASE_FLT_FN (BUILT_IN_EXP2):
14682 CASE_FLT_FN (BUILT_IN_FABS):
14683 CASE_FLT_FN (BUILT_IN_FDIM):
14684 CASE_FLT_FN (BUILT_IN_HYPOT):
14685 CASE_FLT_FN (BUILT_IN_POW10):
14686 CASE_INT_FN (BUILT_IN_FFS):
14687 CASE_INT_FN (BUILT_IN_PARITY):
14688 CASE_INT_FN (BUILT_IN_POPCOUNT):
14689 case BUILT_IN_BSWAP32:
14690 case BUILT_IN_BSWAP64:
14691 /* Always true. */
14692 return true;
14693
14694 CASE_FLT_FN (BUILT_IN_SQRT):
14695 /* sqrt(-0.0) is -0.0. */
14696 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14697 return true;
14698 return tree_expr_nonnegative_warnv_p (arg0,
14699 strict_overflow_p);
14700
14701 CASE_FLT_FN (BUILT_IN_ASINH):
14702 CASE_FLT_FN (BUILT_IN_ATAN):
14703 CASE_FLT_FN (BUILT_IN_ATANH):
14704 CASE_FLT_FN (BUILT_IN_CBRT):
14705 CASE_FLT_FN (BUILT_IN_CEIL):
14706 CASE_FLT_FN (BUILT_IN_ERF):
14707 CASE_FLT_FN (BUILT_IN_EXPM1):
14708 CASE_FLT_FN (BUILT_IN_FLOOR):
14709 CASE_FLT_FN (BUILT_IN_FMOD):
14710 CASE_FLT_FN (BUILT_IN_FREXP):
14711 CASE_FLT_FN (BUILT_IN_LCEIL):
14712 CASE_FLT_FN (BUILT_IN_LDEXP):
14713 CASE_FLT_FN (BUILT_IN_LFLOOR):
14714 CASE_FLT_FN (BUILT_IN_LLCEIL):
14715 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14716 CASE_FLT_FN (BUILT_IN_LLRINT):
14717 CASE_FLT_FN (BUILT_IN_LLROUND):
14718 CASE_FLT_FN (BUILT_IN_LRINT):
14719 CASE_FLT_FN (BUILT_IN_LROUND):
14720 CASE_FLT_FN (BUILT_IN_MODF):
14721 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14722 CASE_FLT_FN (BUILT_IN_RINT):
14723 CASE_FLT_FN (BUILT_IN_ROUND):
14724 CASE_FLT_FN (BUILT_IN_SCALB):
14725 CASE_FLT_FN (BUILT_IN_SCALBLN):
14726 CASE_FLT_FN (BUILT_IN_SCALBN):
14727 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14728 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14729 CASE_FLT_FN (BUILT_IN_SINH):
14730 CASE_FLT_FN (BUILT_IN_TANH):
14731 CASE_FLT_FN (BUILT_IN_TRUNC):
14732 /* True if the 1st argument is nonnegative. */
14733 return tree_expr_nonnegative_warnv_p (arg0,
14734 strict_overflow_p);
14735
14736 CASE_FLT_FN (BUILT_IN_FMAX):
14737 /* True if the 1st OR 2nd arguments are nonnegative. */
14738 return (tree_expr_nonnegative_warnv_p (arg0,
14739 strict_overflow_p)
14740 || (tree_expr_nonnegative_warnv_p (arg1,
14741 strict_overflow_p)));
14742
14743 CASE_FLT_FN (BUILT_IN_FMIN):
14744 /* True if the 1st AND 2nd arguments are nonnegative. */
14745 return (tree_expr_nonnegative_warnv_p (arg0,
14746 strict_overflow_p)
14747 && (tree_expr_nonnegative_warnv_p (arg1,
14748 strict_overflow_p)));
14749
14750 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14751 /* True if the 2nd argument is nonnegative. */
14752 return tree_expr_nonnegative_warnv_p (arg1,
14753 strict_overflow_p);
14754
14755 CASE_FLT_FN (BUILT_IN_POWI):
14756 /* True if the 1st argument is nonnegative or the second
14757 argument is an even integer. */
14758 if (TREE_CODE (arg1) == INTEGER_CST
14759 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14760 return true;
14761 return tree_expr_nonnegative_warnv_p (arg0,
14762 strict_overflow_p);
14763
14764 CASE_FLT_FN (BUILT_IN_POW):
14765 /* True if the 1st argument is nonnegative or the second
14766 argument is an even integer valued real. */
14767 if (TREE_CODE (arg1) == REAL_CST)
14768 {
14769 REAL_VALUE_TYPE c;
14770 HOST_WIDE_INT n;
14771
14772 c = TREE_REAL_CST (arg1);
14773 n = real_to_integer (&c);
14774 if ((n & 1) == 0)
14775 {
14776 REAL_VALUE_TYPE cint;
14777 real_from_integer (&cint, VOIDmode, n,
14778 n < 0 ? -1 : 0, 0);
14779 if (real_identical (&c, &cint))
14780 return true;
14781 }
14782 }
14783 return tree_expr_nonnegative_warnv_p (arg0,
14784 strict_overflow_p);
14785
14786 default:
14787 break;
14788 }
14789 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14790 type);
14791 }
14792
14793 /* Return true if T is known to be non-negative. If the return
14794 value is based on the assumption that signed overflow is undefined,
14795 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14796 *STRICT_OVERFLOW_P. */
14797
14798 bool
14799 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14800 {
14801 enum tree_code code = TREE_CODE (t);
14802 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14803 return true;
14804
14805 switch (code)
14806 {
14807 case TARGET_EXPR:
14808 {
14809 tree temp = TARGET_EXPR_SLOT (t);
14810 t = TARGET_EXPR_INITIAL (t);
14811
14812 /* If the initializer is non-void, then it's a normal expression
14813 that will be assigned to the slot. */
14814 if (!VOID_TYPE_P (t))
14815 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14816
14817 /* Otherwise, the initializer sets the slot in some way. One common
14818 way is an assignment statement at the end of the initializer. */
14819 while (1)
14820 {
14821 if (TREE_CODE (t) == BIND_EXPR)
14822 t = expr_last (BIND_EXPR_BODY (t));
14823 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14824 || TREE_CODE (t) == TRY_CATCH_EXPR)
14825 t = expr_last (TREE_OPERAND (t, 0));
14826 else if (TREE_CODE (t) == STATEMENT_LIST)
14827 t = expr_last (t);
14828 else
14829 break;
14830 }
14831 if (TREE_CODE (t) == MODIFY_EXPR
14832 && TREE_OPERAND (t, 0) == temp)
14833 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14834 strict_overflow_p);
14835
14836 return false;
14837 }
14838
14839 case CALL_EXPR:
14840 {
14841 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14842 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14843
14844 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14845 get_callee_fndecl (t),
14846 arg0,
14847 arg1,
14848 strict_overflow_p);
14849 }
14850 case COMPOUND_EXPR:
14851 case MODIFY_EXPR:
14852 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14853 strict_overflow_p);
14854 case BIND_EXPR:
14855 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14856 strict_overflow_p);
14857 case SAVE_EXPR:
14858 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14859 strict_overflow_p);
14860
14861 default:
14862 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14863 TREE_TYPE (t));
14864 }
14865
14866 /* We don't know sign of `t', so be conservative and return false. */
14867 return false;
14868 }
14869
14870 /* Return true if T is known to be non-negative. If the return
14871 value is based on the assumption that signed overflow is undefined,
14872 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14873 *STRICT_OVERFLOW_P. */
14874
14875 bool
14876 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14877 {
14878 enum tree_code code;
14879 if (t == error_mark_node)
14880 return false;
14881
14882 code = TREE_CODE (t);
14883 switch (TREE_CODE_CLASS (code))
14884 {
14885 case tcc_binary:
14886 case tcc_comparison:
14887 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14888 TREE_TYPE (t),
14889 TREE_OPERAND (t, 0),
14890 TREE_OPERAND (t, 1),
14891 strict_overflow_p);
14892
14893 case tcc_unary:
14894 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14895 TREE_TYPE (t),
14896 TREE_OPERAND (t, 0),
14897 strict_overflow_p);
14898
14899 case tcc_constant:
14900 case tcc_declaration:
14901 case tcc_reference:
14902 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14903
14904 default:
14905 break;
14906 }
14907
14908 switch (code)
14909 {
14910 case TRUTH_AND_EXPR:
14911 case TRUTH_OR_EXPR:
14912 case TRUTH_XOR_EXPR:
14913 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14914 TREE_TYPE (t),
14915 TREE_OPERAND (t, 0),
14916 TREE_OPERAND (t, 1),
14917 strict_overflow_p);
14918 case TRUTH_NOT_EXPR:
14919 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14920 TREE_TYPE (t),
14921 TREE_OPERAND (t, 0),
14922 strict_overflow_p);
14923
14924 case COND_EXPR:
14925 case CONSTRUCTOR:
14926 case OBJ_TYPE_REF:
14927 case ASSERT_EXPR:
14928 case ADDR_EXPR:
14929 case WITH_SIZE_EXPR:
14930 case SSA_NAME:
14931 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14932
14933 default:
14934 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14935 }
14936 }
14937
14938 /* Return true if `t' is known to be non-negative. Handle warnings
14939 about undefined signed overflow. */
14940
14941 bool
14942 tree_expr_nonnegative_p (tree t)
14943 {
14944 bool ret, strict_overflow_p;
14945
14946 strict_overflow_p = false;
14947 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14948 if (strict_overflow_p)
14949 fold_overflow_warning (("assuming signed overflow does not occur when "
14950 "determining that expression is always "
14951 "non-negative"),
14952 WARN_STRICT_OVERFLOW_MISC);
14953 return ret;
14954 }
14955
14956
14957 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14958 For floating point we further ensure that T is not denormal.
14959 Similar logic is present in nonzero_address in rtlanal.h.
14960
14961 If the return value is based on the assumption that signed overflow
14962 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14963 change *STRICT_OVERFLOW_P. */
14964
14965 bool
14966 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14967 bool *strict_overflow_p)
14968 {
14969 switch (code)
14970 {
14971 case ABS_EXPR:
14972 return tree_expr_nonzero_warnv_p (op0,
14973 strict_overflow_p);
14974
14975 case NOP_EXPR:
14976 {
14977 tree inner_type = TREE_TYPE (op0);
14978 tree outer_type = type;
14979
14980 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14981 && tree_expr_nonzero_warnv_p (op0,
14982 strict_overflow_p));
14983 }
14984 break;
14985
14986 case NON_LVALUE_EXPR:
14987 return tree_expr_nonzero_warnv_p (op0,
14988 strict_overflow_p);
14989
14990 default:
14991 break;
14992 }
14993
14994 return false;
14995 }
14996
14997 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14998 For floating point we further ensure that T is not denormal.
14999 Similar logic is present in nonzero_address in rtlanal.h.
15000
15001 If the return value is based on the assumption that signed overflow
15002 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15003 change *STRICT_OVERFLOW_P. */
15004
15005 bool
15006 tree_binary_nonzero_warnv_p (enum tree_code code,
15007 tree type,
15008 tree op0,
15009 tree op1, bool *strict_overflow_p)
15010 {
15011 bool sub_strict_overflow_p;
15012 switch (code)
15013 {
15014 case POINTER_PLUS_EXPR:
15015 case PLUS_EXPR:
15016 if (TYPE_OVERFLOW_UNDEFINED (type))
15017 {
15018 /* With the presence of negative values it is hard
15019 to say something. */
15020 sub_strict_overflow_p = false;
15021 if (!tree_expr_nonnegative_warnv_p (op0,
15022 &sub_strict_overflow_p)
15023 || !tree_expr_nonnegative_warnv_p (op1,
15024 &sub_strict_overflow_p))
15025 return false;
15026 /* One of operands must be positive and the other non-negative. */
15027 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15028 overflows, on a twos-complement machine the sum of two
15029 nonnegative numbers can never be zero. */
15030 return (tree_expr_nonzero_warnv_p (op0,
15031 strict_overflow_p)
15032 || tree_expr_nonzero_warnv_p (op1,
15033 strict_overflow_p));
15034 }
15035 break;
15036
15037 case MULT_EXPR:
15038 if (TYPE_OVERFLOW_UNDEFINED (type))
15039 {
15040 if (tree_expr_nonzero_warnv_p (op0,
15041 strict_overflow_p)
15042 && tree_expr_nonzero_warnv_p (op1,
15043 strict_overflow_p))
15044 {
15045 *strict_overflow_p = true;
15046 return true;
15047 }
15048 }
15049 break;
15050
15051 case MIN_EXPR:
15052 sub_strict_overflow_p = false;
15053 if (tree_expr_nonzero_warnv_p (op0,
15054 &sub_strict_overflow_p)
15055 && tree_expr_nonzero_warnv_p (op1,
15056 &sub_strict_overflow_p))
15057 {
15058 if (sub_strict_overflow_p)
15059 *strict_overflow_p = true;
15060 }
15061 break;
15062
15063 case MAX_EXPR:
15064 sub_strict_overflow_p = false;
15065 if (tree_expr_nonzero_warnv_p (op0,
15066 &sub_strict_overflow_p))
15067 {
15068 if (sub_strict_overflow_p)
15069 *strict_overflow_p = true;
15070
15071 /* When both operands are nonzero, then MAX must be too. */
15072 if (tree_expr_nonzero_warnv_p (op1,
15073 strict_overflow_p))
15074 return true;
15075
15076 /* MAX where operand 0 is positive is positive. */
15077 return tree_expr_nonnegative_warnv_p (op0,
15078 strict_overflow_p);
15079 }
15080 /* MAX where operand 1 is positive is positive. */
15081 else if (tree_expr_nonzero_warnv_p (op1,
15082 &sub_strict_overflow_p)
15083 && tree_expr_nonnegative_warnv_p (op1,
15084 &sub_strict_overflow_p))
15085 {
15086 if (sub_strict_overflow_p)
15087 *strict_overflow_p = true;
15088 return true;
15089 }
15090 break;
15091
15092 case BIT_IOR_EXPR:
15093 return (tree_expr_nonzero_warnv_p (op1,
15094 strict_overflow_p)
15095 || tree_expr_nonzero_warnv_p (op0,
15096 strict_overflow_p));
15097
15098 default:
15099 break;
15100 }
15101
15102 return false;
15103 }
15104
15105 /* Return true when T is an address and is known to be nonzero.
15106 For floating point we further ensure that T is not denormal.
15107 Similar logic is present in nonzero_address in rtlanal.h.
15108
15109 If the return value is based on the assumption that signed overflow
15110 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15111 change *STRICT_OVERFLOW_P. */
15112
15113 bool
15114 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15115 {
15116 bool sub_strict_overflow_p;
15117 switch (TREE_CODE (t))
15118 {
15119 case INTEGER_CST:
15120 return !integer_zerop (t);
15121
15122 case ADDR_EXPR:
15123 {
15124 tree base = TREE_OPERAND (t, 0);
15125 if (!DECL_P (base))
15126 base = get_base_address (base);
15127
15128 if (!base)
15129 return false;
15130
15131 /* Weak declarations may link to NULL. Other things may also be NULL
15132 so protect with -fdelete-null-pointer-checks; but not variables
15133 allocated on the stack. */
15134 if (DECL_P (base)
15135 && (flag_delete_null_pointer_checks
15136 || (DECL_CONTEXT (base)
15137 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15138 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15139 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15140
15141 /* Constants are never weak. */
15142 if (CONSTANT_CLASS_P (base))
15143 return true;
15144
15145 return false;
15146 }
15147
15148 case COND_EXPR:
15149 sub_strict_overflow_p = false;
15150 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15151 &sub_strict_overflow_p)
15152 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15153 &sub_strict_overflow_p))
15154 {
15155 if (sub_strict_overflow_p)
15156 *strict_overflow_p = true;
15157 return true;
15158 }
15159 break;
15160
15161 default:
15162 break;
15163 }
15164 return false;
15165 }
15166
15167 /* Return true when T is an address and is known to be nonzero.
15168 For floating point we further ensure that T is not denormal.
15169 Similar logic is present in nonzero_address in rtlanal.h.
15170
15171 If the return value is based on the assumption that signed overflow
15172 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15173 change *STRICT_OVERFLOW_P. */
15174
15175 bool
15176 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15177 {
15178 tree type = TREE_TYPE (t);
15179 enum tree_code code;
15180
15181 /* Doing something useful for floating point would need more work. */
15182 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15183 return false;
15184
15185 code = TREE_CODE (t);
15186 switch (TREE_CODE_CLASS (code))
15187 {
15188 case tcc_unary:
15189 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15190 strict_overflow_p);
15191 case tcc_binary:
15192 case tcc_comparison:
15193 return tree_binary_nonzero_warnv_p (code, type,
15194 TREE_OPERAND (t, 0),
15195 TREE_OPERAND (t, 1),
15196 strict_overflow_p);
15197 case tcc_constant:
15198 case tcc_declaration:
15199 case tcc_reference:
15200 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15201
15202 default:
15203 break;
15204 }
15205
15206 switch (code)
15207 {
15208 case TRUTH_NOT_EXPR:
15209 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15210 strict_overflow_p);
15211
15212 case TRUTH_AND_EXPR:
15213 case TRUTH_OR_EXPR:
15214 case TRUTH_XOR_EXPR:
15215 return tree_binary_nonzero_warnv_p (code, type,
15216 TREE_OPERAND (t, 0),
15217 TREE_OPERAND (t, 1),
15218 strict_overflow_p);
15219
15220 case COND_EXPR:
15221 case CONSTRUCTOR:
15222 case OBJ_TYPE_REF:
15223 case ASSERT_EXPR:
15224 case ADDR_EXPR:
15225 case WITH_SIZE_EXPR:
15226 case SSA_NAME:
15227 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15228
15229 case COMPOUND_EXPR:
15230 case MODIFY_EXPR:
15231 case BIND_EXPR:
15232 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15233 strict_overflow_p);
15234
15235 case SAVE_EXPR:
15236 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15237 strict_overflow_p);
15238
15239 case CALL_EXPR:
15240 return alloca_call_p (t);
15241
15242 default:
15243 break;
15244 }
15245 return false;
15246 }
15247
15248 /* Return true when T is an address and is known to be nonzero.
15249 Handle warnings about undefined signed overflow. */
15250
15251 bool
15252 tree_expr_nonzero_p (tree t)
15253 {
15254 bool ret, strict_overflow_p;
15255
15256 strict_overflow_p = false;
15257 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15258 if (strict_overflow_p)
15259 fold_overflow_warning (("assuming signed overflow does not occur when "
15260 "determining that expression is always "
15261 "non-zero"),
15262 WARN_STRICT_OVERFLOW_MISC);
15263 return ret;
15264 }
15265
15266 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15267 attempt to fold the expression to a constant without modifying TYPE,
15268 OP0 or OP1.
15269
15270 If the expression could be simplified to a constant, then return
15271 the constant. If the expression would not be simplified to a
15272 constant, then return NULL_TREE. */
15273
15274 tree
15275 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15276 {
15277 tree tem = fold_binary (code, type, op0, op1);
15278 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15279 }
15280
15281 /* Given the components of a unary expression CODE, TYPE and OP0,
15282 attempt to fold the expression to a constant without modifying
15283 TYPE or OP0.
15284
15285 If the expression could be simplified to a constant, then return
15286 the constant. If the expression would not be simplified to a
15287 constant, then return NULL_TREE. */
15288
15289 tree
15290 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15291 {
15292 tree tem = fold_unary (code, type, op0);
15293 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15294 }
15295
15296 /* If EXP represents referencing an element in a constant string
15297 (either via pointer arithmetic or array indexing), return the
15298 tree representing the value accessed, otherwise return NULL. */
15299
15300 tree
15301 fold_read_from_constant_string (tree exp)
15302 {
15303 if ((TREE_CODE (exp) == INDIRECT_REF
15304 || TREE_CODE (exp) == ARRAY_REF)
15305 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15306 {
15307 tree exp1 = TREE_OPERAND (exp, 0);
15308 tree index;
15309 tree string;
15310 location_t loc = EXPR_LOCATION (exp);
15311
15312 if (TREE_CODE (exp) == INDIRECT_REF)
15313 string = string_constant (exp1, &index);
15314 else
15315 {
15316 tree low_bound = array_ref_low_bound (exp);
15317 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15318
15319 /* Optimize the special-case of a zero lower bound.
15320
15321 We convert the low_bound to sizetype to avoid some problems
15322 with constant folding. (E.g. suppose the lower bound is 1,
15323 and its mode is QI. Without the conversion,l (ARRAY
15324 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15325 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15326 if (! integer_zerop (low_bound))
15327 index = size_diffop_loc (loc, index,
15328 fold_convert_loc (loc, sizetype, low_bound));
15329
15330 string = exp1;
15331 }
15332
15333 if (string
15334 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15335 && TREE_CODE (string) == STRING_CST
15336 && TREE_CODE (index) == INTEGER_CST
15337 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15338 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15339 == MODE_INT)
15340 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15341 return build_int_cst_type (TREE_TYPE (exp),
15342 (TREE_STRING_POINTER (string)
15343 [TREE_INT_CST_LOW (index)]));
15344 }
15345 return NULL;
15346 }
15347
15348 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15349 an integer constant, real, or fixed-point constant.
15350
15351 TYPE is the type of the result. */
15352
15353 static tree
15354 fold_negate_const (tree arg0, tree type)
15355 {
15356 tree t = NULL_TREE;
15357
15358 switch (TREE_CODE (arg0))
15359 {
15360 case INTEGER_CST:
15361 {
15362 double_int val = tree_to_double_int (arg0);
15363 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15364
15365 t = force_fit_type_double (type, val, 1,
15366 (overflow | TREE_OVERFLOW (arg0))
15367 && !TYPE_UNSIGNED (type));
15368 break;
15369 }
15370
15371 case REAL_CST:
15372 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15373 break;
15374
15375 case FIXED_CST:
15376 {
15377 FIXED_VALUE_TYPE f;
15378 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15379 &(TREE_FIXED_CST (arg0)), NULL,
15380 TYPE_SATURATING (type));
15381 t = build_fixed (type, f);
15382 /* Propagate overflow flags. */
15383 if (overflow_p | TREE_OVERFLOW (arg0))
15384 TREE_OVERFLOW (t) = 1;
15385 break;
15386 }
15387
15388 default:
15389 gcc_unreachable ();
15390 }
15391
15392 return t;
15393 }
15394
15395 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15396 an integer constant or real constant.
15397
15398 TYPE is the type of the result. */
15399
15400 tree
15401 fold_abs_const (tree arg0, tree type)
15402 {
15403 tree t = NULL_TREE;
15404
15405 switch (TREE_CODE (arg0))
15406 {
15407 case INTEGER_CST:
15408 {
15409 double_int val = tree_to_double_int (arg0);
15410
15411 /* If the value is unsigned or non-negative, then the absolute value
15412 is the same as the ordinary value. */
15413 if (TYPE_UNSIGNED (type)
15414 || !double_int_negative_p (val))
15415 t = arg0;
15416
15417 /* If the value is negative, then the absolute value is
15418 its negation. */
15419 else
15420 {
15421 int overflow;
15422
15423 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15424 t = force_fit_type_double (type, val, -1,
15425 overflow | TREE_OVERFLOW (arg0));
15426 }
15427 }
15428 break;
15429
15430 case REAL_CST:
15431 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15432 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15433 else
15434 t = arg0;
15435 break;
15436
15437 default:
15438 gcc_unreachable ();
15439 }
15440
15441 return t;
15442 }
15443
15444 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15445 constant. TYPE is the type of the result. */
15446
15447 static tree
15448 fold_not_const (const_tree arg0, tree type)
15449 {
15450 double_int val;
15451
15452 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15453
15454 val = double_int_not (tree_to_double_int (arg0));
15455 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15456 }
15457
15458 /* Given CODE, a relational operator, the target type, TYPE and two
15459 constant operands OP0 and OP1, return the result of the
15460 relational operation. If the result is not a compile time
15461 constant, then return NULL_TREE. */
15462
15463 static tree
15464 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15465 {
15466 int result, invert;
15467
15468 /* From here on, the only cases we handle are when the result is
15469 known to be a constant. */
15470
15471 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15472 {
15473 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15474 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15475
15476 /* Handle the cases where either operand is a NaN. */
15477 if (real_isnan (c0) || real_isnan (c1))
15478 {
15479 switch (code)
15480 {
15481 case EQ_EXPR:
15482 case ORDERED_EXPR:
15483 result = 0;
15484 break;
15485
15486 case NE_EXPR:
15487 case UNORDERED_EXPR:
15488 case UNLT_EXPR:
15489 case UNLE_EXPR:
15490 case UNGT_EXPR:
15491 case UNGE_EXPR:
15492 case UNEQ_EXPR:
15493 result = 1;
15494 break;
15495
15496 case LT_EXPR:
15497 case LE_EXPR:
15498 case GT_EXPR:
15499 case GE_EXPR:
15500 case LTGT_EXPR:
15501 if (flag_trapping_math)
15502 return NULL_TREE;
15503 result = 0;
15504 break;
15505
15506 default:
15507 gcc_unreachable ();
15508 }
15509
15510 return constant_boolean_node (result, type);
15511 }
15512
15513 return constant_boolean_node (real_compare (code, c0, c1), type);
15514 }
15515
15516 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15517 {
15518 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15519 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15520 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15521 }
15522
15523 /* Handle equality/inequality of complex constants. */
15524 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15525 {
15526 tree rcond = fold_relational_const (code, type,
15527 TREE_REALPART (op0),
15528 TREE_REALPART (op1));
15529 tree icond = fold_relational_const (code, type,
15530 TREE_IMAGPART (op0),
15531 TREE_IMAGPART (op1));
15532 if (code == EQ_EXPR)
15533 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15534 else if (code == NE_EXPR)
15535 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15536 else
15537 return NULL_TREE;
15538 }
15539
15540 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15541
15542 To compute GT, swap the arguments and do LT.
15543 To compute GE, do LT and invert the result.
15544 To compute LE, swap the arguments, do LT and invert the result.
15545 To compute NE, do EQ and invert the result.
15546
15547 Therefore, the code below must handle only EQ and LT. */
15548
15549 if (code == LE_EXPR || code == GT_EXPR)
15550 {
15551 tree tem = op0;
15552 op0 = op1;
15553 op1 = tem;
15554 code = swap_tree_comparison (code);
15555 }
15556
15557 /* Note that it is safe to invert for real values here because we
15558 have already handled the one case that it matters. */
15559
15560 invert = 0;
15561 if (code == NE_EXPR || code == GE_EXPR)
15562 {
15563 invert = 1;
15564 code = invert_tree_comparison (code, false);
15565 }
15566
15567 /* Compute a result for LT or EQ if args permit;
15568 Otherwise return T. */
15569 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15570 {
15571 if (code == EQ_EXPR)
15572 result = tree_int_cst_equal (op0, op1);
15573 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15574 result = INT_CST_LT_UNSIGNED (op0, op1);
15575 else
15576 result = INT_CST_LT (op0, op1);
15577 }
15578 else
15579 return NULL_TREE;
15580
15581 if (invert)
15582 result ^= 1;
15583 return constant_boolean_node (result, type);
15584 }
15585
15586 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15587 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15588 itself. */
15589
15590 tree
15591 fold_build_cleanup_point_expr (tree type, tree expr)
15592 {
15593 /* If the expression does not have side effects then we don't have to wrap
15594 it with a cleanup point expression. */
15595 if (!TREE_SIDE_EFFECTS (expr))
15596 return expr;
15597
15598 /* If the expression is a return, check to see if the expression inside the
15599 return has no side effects or the right hand side of the modify expression
15600 inside the return. If either don't have side effects set we don't need to
15601 wrap the expression in a cleanup point expression. Note we don't check the
15602 left hand side of the modify because it should always be a return decl. */
15603 if (TREE_CODE (expr) == RETURN_EXPR)
15604 {
15605 tree op = TREE_OPERAND (expr, 0);
15606 if (!op || !TREE_SIDE_EFFECTS (op))
15607 return expr;
15608 op = TREE_OPERAND (op, 1);
15609 if (!TREE_SIDE_EFFECTS (op))
15610 return expr;
15611 }
15612
15613 return build1 (CLEANUP_POINT_EXPR, type, expr);
15614 }
15615
15616 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15617 of an indirection through OP0, or NULL_TREE if no simplification is
15618 possible. */
15619
15620 tree
15621 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15622 {
15623 tree sub = op0;
15624 tree subtype;
15625
15626 STRIP_NOPS (sub);
15627 subtype = TREE_TYPE (sub);
15628 if (!POINTER_TYPE_P (subtype))
15629 return NULL_TREE;
15630
15631 if (TREE_CODE (sub) == ADDR_EXPR)
15632 {
15633 tree op = TREE_OPERAND (sub, 0);
15634 tree optype = TREE_TYPE (op);
15635 /* *&CONST_DECL -> to the value of the const decl. */
15636 if (TREE_CODE (op) == CONST_DECL)
15637 return DECL_INITIAL (op);
15638 /* *&p => p; make sure to handle *&"str"[cst] here. */
15639 if (type == optype)
15640 {
15641 tree fop = fold_read_from_constant_string (op);
15642 if (fop)
15643 return fop;
15644 else
15645 return op;
15646 }
15647 /* *(foo *)&fooarray => fooarray[0] */
15648 else if (TREE_CODE (optype) == ARRAY_TYPE
15649 && type == TREE_TYPE (optype))
15650 {
15651 tree type_domain = TYPE_DOMAIN (optype);
15652 tree min_val = size_zero_node;
15653 if (type_domain && TYPE_MIN_VALUE (type_domain))
15654 min_val = TYPE_MIN_VALUE (type_domain);
15655 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15656 SET_EXPR_LOCATION (op0, loc);
15657 return op0;
15658 }
15659 /* *(foo *)&complexfoo => __real__ complexfoo */
15660 else if (TREE_CODE (optype) == COMPLEX_TYPE
15661 && type == TREE_TYPE (optype))
15662 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15663 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15664 else if (TREE_CODE (optype) == VECTOR_TYPE
15665 && type == TREE_TYPE (optype))
15666 {
15667 tree part_width = TYPE_SIZE (type);
15668 tree index = bitsize_int (0);
15669 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15670 }
15671 }
15672
15673 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15674 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15675 {
15676 tree op00 = TREE_OPERAND (sub, 0);
15677 tree op01 = TREE_OPERAND (sub, 1);
15678
15679 STRIP_NOPS (op00);
15680 if (TREE_CODE (op00) == ADDR_EXPR)
15681 {
15682 tree op00type;
15683 op00 = TREE_OPERAND (op00, 0);
15684 op00type = TREE_TYPE (op00);
15685
15686 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15687 if (TREE_CODE (op00type) == VECTOR_TYPE
15688 && type == TREE_TYPE (op00type))
15689 {
15690 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15691 tree part_width = TYPE_SIZE (type);
15692 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15693 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15694 tree index = bitsize_int (indexi);
15695
15696 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
15697 return fold_build3_loc (loc,
15698 BIT_FIELD_REF, type, op00,
15699 part_width, index);
15700
15701 }
15702 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15703 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15704 && type == TREE_TYPE (op00type))
15705 {
15706 tree size = TYPE_SIZE_UNIT (type);
15707 if (tree_int_cst_equal (size, op01))
15708 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15709 }
15710 /* ((foo *)&fooarray)[1] => fooarray[1] */
15711 else if (TREE_CODE (op00type) == ARRAY_TYPE
15712 && type == TREE_TYPE (op00type))
15713 {
15714 tree type_domain = TYPE_DOMAIN (op00type);
15715 tree min_val = size_zero_node;
15716 if (type_domain && TYPE_MIN_VALUE (type_domain))
15717 min_val = TYPE_MIN_VALUE (type_domain);
15718 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15719 TYPE_SIZE_UNIT (type));
15720 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15721 op0 = build4 (ARRAY_REF, type, op00, op01,
15722 NULL_TREE, NULL_TREE);
15723 SET_EXPR_LOCATION (op0, loc);
15724 return op0;
15725 }
15726 }
15727 }
15728
15729 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15730 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15731 && type == TREE_TYPE (TREE_TYPE (subtype)))
15732 {
15733 tree type_domain;
15734 tree min_val = size_zero_node;
15735 sub = build_fold_indirect_ref_loc (loc, sub);
15736 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15737 if (type_domain && TYPE_MIN_VALUE (type_domain))
15738 min_val = TYPE_MIN_VALUE (type_domain);
15739 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15740 SET_EXPR_LOCATION (op0, loc);
15741 return op0;
15742 }
15743
15744 return NULL_TREE;
15745 }
15746
15747 /* Builds an expression for an indirection through T, simplifying some
15748 cases. */
15749
15750 tree
15751 build_fold_indirect_ref_loc (location_t loc, tree t)
15752 {
15753 tree type = TREE_TYPE (TREE_TYPE (t));
15754 tree sub = fold_indirect_ref_1 (loc, type, t);
15755
15756 if (sub)
15757 return sub;
15758
15759 t = build1 (INDIRECT_REF, type, t);
15760 SET_EXPR_LOCATION (t, loc);
15761 return t;
15762 }
15763
15764 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15765
15766 tree
15767 fold_indirect_ref_loc (location_t loc, tree t)
15768 {
15769 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15770
15771 if (sub)
15772 return sub;
15773 else
15774 return t;
15775 }
15776
15777 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15778 whose result is ignored. The type of the returned tree need not be
15779 the same as the original expression. */
15780
15781 tree
15782 fold_ignored_result (tree t)
15783 {
15784 if (!TREE_SIDE_EFFECTS (t))
15785 return integer_zero_node;
15786
15787 for (;;)
15788 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15789 {
15790 case tcc_unary:
15791 t = TREE_OPERAND (t, 0);
15792 break;
15793
15794 case tcc_binary:
15795 case tcc_comparison:
15796 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15797 t = TREE_OPERAND (t, 0);
15798 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15799 t = TREE_OPERAND (t, 1);
15800 else
15801 return t;
15802 break;
15803
15804 case tcc_expression:
15805 switch (TREE_CODE (t))
15806 {
15807 case COMPOUND_EXPR:
15808 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15809 return t;
15810 t = TREE_OPERAND (t, 0);
15811 break;
15812
15813 case COND_EXPR:
15814 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15815 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15816 return t;
15817 t = TREE_OPERAND (t, 0);
15818 break;
15819
15820 default:
15821 return t;
15822 }
15823 break;
15824
15825 default:
15826 return t;
15827 }
15828 }
15829
15830 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15831 This can only be applied to objects of a sizetype. */
15832
15833 tree
15834 round_up_loc (location_t loc, tree value, int divisor)
15835 {
15836 tree div = NULL_TREE;
15837
15838 gcc_assert (divisor > 0);
15839 if (divisor == 1)
15840 return value;
15841
15842 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15843 have to do anything. Only do this when we are not given a const,
15844 because in that case, this check is more expensive than just
15845 doing it. */
15846 if (TREE_CODE (value) != INTEGER_CST)
15847 {
15848 div = build_int_cst (TREE_TYPE (value), divisor);
15849
15850 if (multiple_of_p (TREE_TYPE (value), value, div))
15851 return value;
15852 }
15853
15854 /* If divisor is a power of two, simplify this to bit manipulation. */
15855 if (divisor == (divisor & -divisor))
15856 {
15857 if (TREE_CODE (value) == INTEGER_CST)
15858 {
15859 double_int val = tree_to_double_int (value);
15860 bool overflow_p;
15861
15862 if ((val.low & (divisor - 1)) == 0)
15863 return value;
15864
15865 overflow_p = TREE_OVERFLOW (value);
15866 val.low &= ~(divisor - 1);
15867 val.low += divisor;
15868 if (val.low == 0)
15869 {
15870 val.high++;
15871 if (val.high == 0)
15872 overflow_p = true;
15873 }
15874
15875 return force_fit_type_double (TREE_TYPE (value), val,
15876 -1, overflow_p);
15877 }
15878 else
15879 {
15880 tree t;
15881
15882 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15883 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15884 t = build_int_cst (TREE_TYPE (value), -divisor);
15885 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15886 }
15887 }
15888 else
15889 {
15890 if (!div)
15891 div = build_int_cst (TREE_TYPE (value), divisor);
15892 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15893 value = size_binop_loc (loc, MULT_EXPR, value, div);
15894 }
15895
15896 return value;
15897 }
15898
15899 /* Likewise, but round down. */
15900
15901 tree
15902 round_down_loc (location_t loc, tree value, int divisor)
15903 {
15904 tree div = NULL_TREE;
15905
15906 gcc_assert (divisor > 0);
15907 if (divisor == 1)
15908 return value;
15909
15910 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15911 have to do anything. Only do this when we are not given a const,
15912 because in that case, this check is more expensive than just
15913 doing it. */
15914 if (TREE_CODE (value) != INTEGER_CST)
15915 {
15916 div = build_int_cst (TREE_TYPE (value), divisor);
15917
15918 if (multiple_of_p (TREE_TYPE (value), value, div))
15919 return value;
15920 }
15921
15922 /* If divisor is a power of two, simplify this to bit manipulation. */
15923 if (divisor == (divisor & -divisor))
15924 {
15925 tree t;
15926
15927 t = build_int_cst (TREE_TYPE (value), -divisor);
15928 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15929 }
15930 else
15931 {
15932 if (!div)
15933 div = build_int_cst (TREE_TYPE (value), divisor);
15934 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15935 value = size_binop_loc (loc, MULT_EXPR, value, div);
15936 }
15937
15938 return value;
15939 }
15940
15941 /* Returns the pointer to the base of the object addressed by EXP and
15942 extracts the information about the offset of the access, storing it
15943 to PBITPOS and POFFSET. */
15944
15945 static tree
15946 split_address_to_core_and_offset (tree exp,
15947 HOST_WIDE_INT *pbitpos, tree *poffset)
15948 {
15949 tree core;
15950 enum machine_mode mode;
15951 int unsignedp, volatilep;
15952 HOST_WIDE_INT bitsize;
15953 location_t loc = EXPR_LOCATION (exp);
15954
15955 if (TREE_CODE (exp) == ADDR_EXPR)
15956 {
15957 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15958 poffset, &mode, &unsignedp, &volatilep,
15959 false);
15960 core = build_fold_addr_expr_loc (loc, core);
15961 }
15962 else
15963 {
15964 core = exp;
15965 *pbitpos = 0;
15966 *poffset = NULL_TREE;
15967 }
15968
15969 return core;
15970 }
15971
15972 /* Returns true if addresses of E1 and E2 differ by a constant, false
15973 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15974
15975 bool
15976 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15977 {
15978 tree core1, core2;
15979 HOST_WIDE_INT bitpos1, bitpos2;
15980 tree toffset1, toffset2, tdiff, type;
15981
15982 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15983 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15984
15985 if (bitpos1 % BITS_PER_UNIT != 0
15986 || bitpos2 % BITS_PER_UNIT != 0
15987 || !operand_equal_p (core1, core2, 0))
15988 return false;
15989
15990 if (toffset1 && toffset2)
15991 {
15992 type = TREE_TYPE (toffset1);
15993 if (type != TREE_TYPE (toffset2))
15994 toffset2 = fold_convert (type, toffset2);
15995
15996 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15997 if (!cst_and_fits_in_hwi (tdiff))
15998 return false;
15999
16000 *diff = int_cst_value (tdiff);
16001 }
16002 else if (toffset1 || toffset2)
16003 {
16004 /* If only one of the offsets is non-constant, the difference cannot
16005 be a constant. */
16006 return false;
16007 }
16008 else
16009 *diff = 0;
16010
16011 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16012 return true;
16013 }
16014
16015 /* Simplify the floating point expression EXP when the sign of the
16016 result is not significant. Return NULL_TREE if no simplification
16017 is possible. */
16018
16019 tree
16020 fold_strip_sign_ops (tree exp)
16021 {
16022 tree arg0, arg1;
16023 location_t loc = EXPR_LOCATION (exp);
16024
16025 switch (TREE_CODE (exp))
16026 {
16027 case ABS_EXPR:
16028 case NEGATE_EXPR:
16029 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16030 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16031
16032 case MULT_EXPR:
16033 case RDIV_EXPR:
16034 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16035 return NULL_TREE;
16036 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16037 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16038 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16039 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16040 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16041 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16042 break;
16043
16044 case COMPOUND_EXPR:
16045 arg0 = TREE_OPERAND (exp, 0);
16046 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16047 if (arg1)
16048 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16049 break;
16050
16051 case COND_EXPR:
16052 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16053 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16054 if (arg0 || arg1)
16055 return fold_build3_loc (loc,
16056 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16057 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16058 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16059 break;
16060
16061 case CALL_EXPR:
16062 {
16063 const enum built_in_function fcode = builtin_mathfn_code (exp);
16064 switch (fcode)
16065 {
16066 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16067 /* Strip copysign function call, return the 1st argument. */
16068 arg0 = CALL_EXPR_ARG (exp, 0);
16069 arg1 = CALL_EXPR_ARG (exp, 1);
16070 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16071
16072 default:
16073 /* Strip sign ops from the argument of "odd" math functions. */
16074 if (negate_mathfn_p (fcode))
16075 {
16076 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16077 if (arg0)
16078 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16079 }
16080 break;
16081 }
16082 }
16083 break;
16084
16085 default:
16086 break;
16087 }
16088 return NULL_TREE;
16089 }