* g++.dg/cpp0x/nullptr21.c: Remove printfs, make self-checking.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide and size_binop.
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
40
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
64
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
68
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
89 };
90
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
143
144 static location_t
145 expr_location_or (tree t, location_t loc)
146 {
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
149 }
150
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
153
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
156 {
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
162 {
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
165 }
166 return x;
167 }
168
169
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
173 addition.
174
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
177 sign. */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
179 \f
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
183
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
186 {
187 double_int quo, rem;
188 int uns;
189
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194
195 quo = double_int_divmod (tree_to_double_int (arg1),
196 tree_to_double_int (arg2),
197 uns, code, &rem);
198
199 if (double_int_zero_p (rem))
200 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
201
202 return NULL_TREE;
203 }
204 \f
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
213
214 static int fold_deferring_overflow_warnings;
215
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
220
221 static const char* fold_deferred_overflow_warning;
222
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
225
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
227
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
230
231 void
232 fold_defer_overflow_warnings (void)
233 {
234 ++fold_deferring_overflow_warnings;
235 }
236
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
245
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
248 {
249 const char *warnmsg;
250 location_t locus;
251
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
255 {
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
261 }
262
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
265
266 if (!issue || warnmsg == NULL)
267 return;
268
269 if (gimple_no_warning_p (stmt))
270 return;
271
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
276
277 if (!issue_strict_overflow_warning (code))
278 return;
279
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
285 }
286
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
289
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
292 {
293 fold_undefer_overflow_warnings (false, NULL, 0);
294 }
295
296 /* Whether we are deferring overflow warnings. */
297
298 bool
299 fold_deferring_overflow_warnings_p (void)
300 {
301 return fold_deferring_overflow_warnings > 0;
302 }
303
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
306
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
309 {
310 if (fold_deferring_overflow_warnings > 0)
311 {
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
314 {
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
317 }
318 }
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
321 }
322 \f
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
325
326 static bool
327 negate_mathfn_p (enum built_in_function code)
328 {
329 switch (code)
330 {
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
355
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
361
362 default:
363 break;
364 }
365 return false;
366 }
367
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
370
371 bool
372 may_negate_without_overflow_p (const_tree t)
373 {
374 unsigned HOST_WIDE_INT val;
375 unsigned int prec;
376 tree type;
377
378 gcc_assert (TREE_CODE (t) == INTEGER_CST);
379
380 type = TREE_TYPE (t);
381 if (TYPE_UNSIGNED (type))
382 return false;
383
384 prec = TYPE_PRECISION (type);
385 if (prec > HOST_BITS_PER_WIDE_INT)
386 {
387 if (TREE_INT_CST_LOW (t) != 0)
388 return true;
389 prec -= HOST_BITS_PER_WIDE_INT;
390 val = TREE_INT_CST_HIGH (t);
391 }
392 else
393 val = TREE_INT_CST_LOW (t);
394 if (prec < HOST_BITS_PER_WIDE_INT)
395 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
396 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
397 }
398
399 /* Determine whether an expression T can be cheaply negated using
400 the function negate_expr without introducing undefined overflow. */
401
402 static bool
403 negate_expr_p (tree t)
404 {
405 tree type;
406
407 if (t == 0)
408 return false;
409
410 type = TREE_TYPE (t);
411
412 STRIP_SIGN_NOPS (t);
413 switch (TREE_CODE (t))
414 {
415 case INTEGER_CST:
416 if (TYPE_OVERFLOW_WRAPS (type))
417 return true;
418
419 /* Check that -CST will not overflow type. */
420 return may_negate_without_overflow_p (t);
421 case BIT_NOT_EXPR:
422 return (INTEGRAL_TYPE_P (type)
423 && TYPE_OVERFLOW_WRAPS (type));
424
425 case FIXED_CST:
426 case NEGATE_EXPR:
427 return true;
428
429 case REAL_CST:
430 /* We want to canonicalize to positive real constants. Pretend
431 that only negative ones can be easily negated. */
432 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
433
434 case COMPLEX_CST:
435 return negate_expr_p (TREE_REALPART (t))
436 && negate_expr_p (TREE_IMAGPART (t));
437
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
441
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
444
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
456
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
463
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
467
468 /* Fall through. */
469
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
475
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
487 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 return negate_expr_p (TREE_OPERAND (t, 1))
490 || negate_expr_p (TREE_OPERAND (t, 0));
491
492 case NOP_EXPR:
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type) == REAL_TYPE)
495 {
496 tree tem = strip_float_extensions (t);
497 if (tem != t)
498 return negate_expr_p (tem);
499 }
500 break;
501
502 case CALL_EXPR:
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (builtin_mathfn_code (t)))
505 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 break;
507
508 case RSHIFT_EXPR:
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
510 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 {
512 tree op1 = TREE_OPERAND (t, 1);
513 if (TREE_INT_CST_HIGH (op1) == 0
514 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
515 == TREE_INT_CST_LOW (op1))
516 return true;
517 }
518 break;
519
520 default:
521 break;
522 }
523 return false;
524 }
525
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
529 returned. */
530
531 static tree
532 fold_negate_expr (location_t loc, tree t)
533 {
534 tree type = TREE_TYPE (t);
535 tree tem;
536
537 switch (TREE_CODE (t))
538 {
539 /* Convert - (~A) to A + 1. */
540 case BIT_NOT_EXPR:
541 if (INTEGRAL_TYPE_P (type))
542 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
543 build_int_cst (type, 1));
544 break;
545
546 case INTEGER_CST:
547 tem = fold_negate_const (t, type);
548 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
549 || !TYPE_OVERFLOW_TRAPS (type))
550 return tem;
551 break;
552
553 case REAL_CST:
554 tem = fold_negate_const (t, type);
555 /* Two's complement FP formats, such as c4x, may overflow. */
556 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
557 return tem;
558 break;
559
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
563
564 case COMPLEX_CST:
565 {
566 tree rpart = negate_expr (TREE_REALPART (t));
567 tree ipart = negate_expr (TREE_IMAGPART (t));
568
569 if ((TREE_CODE (rpart) == REAL_CST
570 && TREE_CODE (ipart) == REAL_CST)
571 || (TREE_CODE (rpart) == INTEGER_CST
572 && TREE_CODE (ipart) == INTEGER_CST))
573 return build_complex (type, rpart, ipart);
574 }
575 break;
576
577 case COMPLEX_EXPR:
578 if (negate_expr_p (t))
579 return fold_build2_loc (loc, COMPLEX_EXPR, type,
580 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
581 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
582 break;
583
584 case CONJ_EXPR:
585 if (negate_expr_p (t))
586 return fold_build1_loc (loc, CONJ_EXPR, type,
587 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
588 break;
589
590 case NEGATE_EXPR:
591 return TREE_OPERAND (t, 0);
592
593 case PLUS_EXPR:
594 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
595 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
596 {
597 /* -(A + B) -> (-B) - A. */
598 if (negate_expr_p (TREE_OPERAND (t, 1))
599 && reorder_operands_p (TREE_OPERAND (t, 0),
600 TREE_OPERAND (t, 1)))
601 {
602 tem = negate_expr (TREE_OPERAND (t, 1));
603 return fold_build2_loc (loc, MINUS_EXPR, type,
604 tem, TREE_OPERAND (t, 0));
605 }
606
607 /* -(A + B) -> (-A) - B. */
608 if (negate_expr_p (TREE_OPERAND (t, 0)))
609 {
610 tem = negate_expr (TREE_OPERAND (t, 0));
611 return fold_build2_loc (loc, MINUS_EXPR, type,
612 tem, TREE_OPERAND (t, 1));
613 }
614 }
615 break;
616
617 case MINUS_EXPR:
618 /* - (A - B) -> B - A */
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
621 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
622 return fold_build2_loc (loc, MINUS_EXPR, type,
623 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
624 break;
625
626 case MULT_EXPR:
627 if (TYPE_UNSIGNED (type))
628 break;
629
630 /* Fall through. */
631
632 case RDIV_EXPR:
633 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
634 {
635 tem = TREE_OPERAND (t, 1);
636 if (negate_expr_p (tem))
637 return fold_build2_loc (loc, TREE_CODE (t), type,
638 TREE_OPERAND (t, 0), negate_expr (tem));
639 tem = TREE_OPERAND (t, 0);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 negate_expr (tem), TREE_OPERAND (t, 1));
643 }
644 break;
645
646 case TRUNC_DIV_EXPR:
647 case ROUND_DIV_EXPR:
648 case FLOOR_DIV_EXPR:
649 case CEIL_DIV_EXPR:
650 case EXACT_DIV_EXPR:
651 /* In general we can't negate A / B, because if A is INT_MIN and
652 B is 1, we may turn this into INT_MIN / -1 which is undefined
653 and actually traps on some architectures. But if overflow is
654 undefined, we can negate, because - (INT_MIN / 1) is an
655 overflow. */
656 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
657 {
658 const char * const warnmsg = G_("assuming signed overflow does not "
659 "occur when negating a division");
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
662 {
663 if (INTEGRAL_TYPE_P (type)
664 && (TREE_CODE (tem) != INTEGER_CST
665 || integer_onep (tem)))
666 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0), negate_expr (tem));
669 }
670 tem = TREE_OPERAND (t, 0);
671 if (negate_expr_p (tem))
672 {
673 if (INTEGRAL_TYPE_P (type)
674 && (TREE_CODE (tem) != INTEGER_CST
675 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
676 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 negate_expr (tem), TREE_OPERAND (t, 1));
679 }
680 }
681 break;
682
683 case NOP_EXPR:
684 /* Convert -((double)float) into (double)(-float). */
685 if (TREE_CODE (type) == REAL_TYPE)
686 {
687 tem = strip_float_extensions (t);
688 if (tem != t && negate_expr_p (tem))
689 return fold_convert_loc (loc, type, negate_expr (tem));
690 }
691 break;
692
693 case CALL_EXPR:
694 /* Negate -f(x) as f(-x). */
695 if (negate_mathfn_p (builtin_mathfn_code (t))
696 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 {
698 tree fndecl, arg;
699
700 fndecl = get_callee_fndecl (t);
701 arg = negate_expr (CALL_EXPR_ARG (t, 0));
702 return build_call_expr_loc (loc, fndecl, 1, arg);
703 }
704 break;
705
706 case RSHIFT_EXPR:
707 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
708 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
709 {
710 tree op1 = TREE_OPERAND (t, 1);
711 if (TREE_INT_CST_HIGH (op1) == 0
712 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
713 == TREE_INT_CST_LOW (op1))
714 {
715 tree ntype = TYPE_UNSIGNED (type)
716 ? signed_type_for (type)
717 : unsigned_type_for (type);
718 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
719 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
720 return fold_convert_loc (loc, type, temp);
721 }
722 }
723 break;
724
725 default:
726 break;
727 }
728
729 return NULL_TREE;
730 }
731
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
735
736 static tree
737 negate_expr (tree t)
738 {
739 tree type, tem;
740 location_t loc;
741
742 if (t == NULL_TREE)
743 return NULL_TREE;
744
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
748
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
753 }
754 \f
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
762
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
766
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead.
769
770 If IN is itself a literal or constant, return it as appropriate.
771
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
774
775 static tree
776 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
777 tree *minus_litp, int negate_p)
778 {
779 tree var = 0;
780
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
784
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
787
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
800 {
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
805
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
813
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
818
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
827
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p)
834 var = negate_expr (var);
835 }
836 else if (TREE_CONSTANT (in))
837 *conp = in;
838 else
839 var = in;
840
841 if (negate_p)
842 {
843 if (*litp)
844 *minus_litp = *litp, *litp = 0;
845 else if (*minus_litp)
846 *litp = *minus_litp, *minus_litp = 0;
847 *conp = negate_expr (*conp);
848 var = negate_expr (var);
849 }
850
851 return var;
852 }
853
854 /* Re-associate trees split by the above function. T1 and T2 are
855 either expressions to associate or null. Return the new
856 expression, if any. LOC is the location of the new expression. If
857 we build an operation, do it in TYPE and with CODE. */
858
859 static tree
860 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
861 {
862 if (t1 == 0)
863 return t2;
864 else if (t2 == 0)
865 return t1;
866
867 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
868 try to fold this since we will have infinite recursion. But do
869 deal with any NEGATE_EXPRs. */
870 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
871 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
872 {
873 if (code == PLUS_EXPR)
874 {
875 if (TREE_CODE (t1) == NEGATE_EXPR)
876 return build2_loc (loc, MINUS_EXPR, type,
877 fold_convert_loc (loc, type, t2),
878 fold_convert_loc (loc, type,
879 TREE_OPERAND (t1, 0)));
880 else if (TREE_CODE (t2) == NEGATE_EXPR)
881 return build2_loc (loc, MINUS_EXPR, type,
882 fold_convert_loc (loc, type, t1),
883 fold_convert_loc (loc, type,
884 TREE_OPERAND (t2, 0)));
885 else if (integer_zerop (t2))
886 return fold_convert_loc (loc, type, t1);
887 }
888 else if (code == MINUS_EXPR)
889 {
890 if (integer_zerop (t2))
891 return fold_convert_loc (loc, type, t1);
892 }
893
894 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
895 fold_convert_loc (loc, type, t2));
896 }
897
898 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
899 fold_convert_loc (loc, type, t2));
900 }
901 \f
902 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
903 for use in int_const_binop, size_binop and size_diffop. */
904
905 static bool
906 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
907 {
908 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
909 return false;
910 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
911 return false;
912
913 switch (code)
914 {
915 case LSHIFT_EXPR:
916 case RSHIFT_EXPR:
917 case LROTATE_EXPR:
918 case RROTATE_EXPR:
919 return true;
920
921 default:
922 break;
923 }
924
925 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
926 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
927 && TYPE_MODE (type1) == TYPE_MODE (type2);
928 }
929
930
931 /* Combine two integer constants ARG1 and ARG2 under operation CODE
932 to produce a new constant. Return NULL_TREE if we don't know how
933 to evaluate CODE at compile-time. */
934
935 static tree
936 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
937 int overflowable)
938 {
939 double_int op1, op2, res, tmp;
940 tree t;
941 tree type = TREE_TYPE (arg1);
942 bool uns = TYPE_UNSIGNED (type);
943 bool overflow = false;
944
945 op1 = tree_to_double_int (arg1);
946 op2 = tree_to_double_int (arg2);
947
948 switch (code)
949 {
950 case BIT_IOR_EXPR:
951 res = double_int_ior (op1, op2);
952 break;
953
954 case BIT_XOR_EXPR:
955 res = double_int_xor (op1, op2);
956 break;
957
958 case BIT_AND_EXPR:
959 res = double_int_and (op1, op2);
960 break;
961
962 case RSHIFT_EXPR:
963 res = double_int_rshift (op1, double_int_to_shwi (op2),
964 TYPE_PRECISION (type), !uns);
965 break;
966
967 case LSHIFT_EXPR:
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res = double_int_lshift (op1, double_int_to_shwi (op2),
972 TYPE_PRECISION (type), !uns);
973 break;
974
975 case RROTATE_EXPR:
976 res = double_int_rrotate (op1, double_int_to_shwi (op2),
977 TYPE_PRECISION (type));
978 break;
979
980 case LROTATE_EXPR:
981 res = double_int_lrotate (op1, double_int_to_shwi (op2),
982 TYPE_PRECISION (type));
983 break;
984
985 case PLUS_EXPR:
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
987 &res.low, &res.high);
988 break;
989
990 case MINUS_EXPR:
991 neg_double (op2.low, op2.high, &res.low, &res.high);
992 add_double (op1.low, op1.high, res.low, res.high,
993 &res.low, &res.high);
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
995 break;
996
997 case MULT_EXPR:
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
999 &res.low, &res.high);
1000 break;
1001
1002 case MULT_HIGHPART_EXPR:
1003 /* ??? Need quad precision, or an additional shift operand
1004 to the multiply primitive, to handle very large highparts. */
1005 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1006 return NULL_TREE;
1007 tmp = double_int_mul (op1, op2);
1008 res = double_int_rshift (tmp, TYPE_PRECISION (type),
1009 TYPE_PRECISION (type), !uns);
1010 break;
1011
1012 case TRUNC_DIV_EXPR:
1013 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1014 case EXACT_DIV_EXPR:
1015 /* This is a shortcut for a common special case. */
1016 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1017 && !TREE_OVERFLOW (arg1)
1018 && !TREE_OVERFLOW (arg2)
1019 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1020 {
1021 if (code == CEIL_DIV_EXPR)
1022 op1.low += op2.low - 1;
1023
1024 res.low = op1.low / op2.low, res.high = 0;
1025 break;
1026 }
1027
1028 /* ... fall through ... */
1029
1030 case ROUND_DIV_EXPR:
1031 if (double_int_zero_p (op2))
1032 return NULL_TREE;
1033 if (double_int_one_p (op2))
1034 {
1035 res = op1;
1036 break;
1037 }
1038 if (double_int_equal_p (op1, op2)
1039 && ! double_int_zero_p (op1))
1040 {
1041 res = double_int_one;
1042 break;
1043 }
1044 overflow = div_and_round_double (code, uns,
1045 op1.low, op1.high, op2.low, op2.high,
1046 &res.low, &res.high,
1047 &tmp.low, &tmp.high);
1048 break;
1049
1050 case TRUNC_MOD_EXPR:
1051 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1052 /* This is a shortcut for a common special case. */
1053 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1054 && !TREE_OVERFLOW (arg1)
1055 && !TREE_OVERFLOW (arg2)
1056 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1057 {
1058 if (code == CEIL_MOD_EXPR)
1059 op1.low += op2.low - 1;
1060 res.low = op1.low % op2.low, res.high = 0;
1061 break;
1062 }
1063
1064 /* ... fall through ... */
1065
1066 case ROUND_MOD_EXPR:
1067 if (double_int_zero_p (op2))
1068 return NULL_TREE;
1069 overflow = div_and_round_double (code, uns,
1070 op1.low, op1.high, op2.low, op2.high,
1071 &tmp.low, &tmp.high,
1072 &res.low, &res.high);
1073 break;
1074
1075 case MIN_EXPR:
1076 res = double_int_min (op1, op2, uns);
1077 break;
1078
1079 case MAX_EXPR:
1080 res = double_int_max (op1, op2, uns);
1081 break;
1082
1083 default:
1084 return NULL_TREE;
1085 }
1086
1087 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1088 (!uns && overflow)
1089 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1090
1091 return t;
1092 }
1093
1094 tree
1095 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1096 {
1097 return int_const_binop_1 (code, arg1, arg2, 1);
1098 }
1099
1100 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1101 constant. We assume ARG1 and ARG2 have the same data type, or at least
1102 are the same kind of constant and the same machine mode. Return zero if
1103 combining the constants is not allowed in the current operating mode. */
1104
1105 static tree
1106 const_binop (enum tree_code code, tree arg1, tree arg2)
1107 {
1108 /* Sanity check for the recursive cases. */
1109 if (!arg1 || !arg2)
1110 return NULL_TREE;
1111
1112 STRIP_NOPS (arg1);
1113 STRIP_NOPS (arg2);
1114
1115 if (TREE_CODE (arg1) == INTEGER_CST)
1116 return int_const_binop (code, arg1, arg2);
1117
1118 if (TREE_CODE (arg1) == REAL_CST)
1119 {
1120 enum machine_mode mode;
1121 REAL_VALUE_TYPE d1;
1122 REAL_VALUE_TYPE d2;
1123 REAL_VALUE_TYPE value;
1124 REAL_VALUE_TYPE result;
1125 bool inexact;
1126 tree t, type;
1127
1128 /* The following codes are handled by real_arithmetic. */
1129 switch (code)
1130 {
1131 case PLUS_EXPR:
1132 case MINUS_EXPR:
1133 case MULT_EXPR:
1134 case RDIV_EXPR:
1135 case MIN_EXPR:
1136 case MAX_EXPR:
1137 break;
1138
1139 default:
1140 return NULL_TREE;
1141 }
1142
1143 d1 = TREE_REAL_CST (arg1);
1144 d2 = TREE_REAL_CST (arg2);
1145
1146 type = TREE_TYPE (arg1);
1147 mode = TYPE_MODE (type);
1148
1149 /* Don't perform operation if we honor signaling NaNs and
1150 either operand is a NaN. */
1151 if (HONOR_SNANS (mode)
1152 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1153 return NULL_TREE;
1154
1155 /* Don't perform operation if it would raise a division
1156 by zero exception. */
1157 if (code == RDIV_EXPR
1158 && REAL_VALUES_EQUAL (d2, dconst0)
1159 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1160 return NULL_TREE;
1161
1162 /* If either operand is a NaN, just return it. Otherwise, set up
1163 for floating-point trap; we return an overflow. */
1164 if (REAL_VALUE_ISNAN (d1))
1165 return arg1;
1166 else if (REAL_VALUE_ISNAN (d2))
1167 return arg2;
1168
1169 inexact = real_arithmetic (&value, code, &d1, &d2);
1170 real_convert (&result, mode, &value);
1171
1172 /* Don't constant fold this floating point operation if
1173 the result has overflowed and flag_trapping_math. */
1174 if (flag_trapping_math
1175 && MODE_HAS_INFINITIES (mode)
1176 && REAL_VALUE_ISINF (result)
1177 && !REAL_VALUE_ISINF (d1)
1178 && !REAL_VALUE_ISINF (d2))
1179 return NULL_TREE;
1180
1181 /* Don't constant fold this floating point operation if the
1182 result may dependent upon the run-time rounding mode and
1183 flag_rounding_math is set, or if GCC's software emulation
1184 is unable to accurately represent the result. */
1185 if ((flag_rounding_math
1186 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1187 && (inexact || !real_identical (&result, &value)))
1188 return NULL_TREE;
1189
1190 t = build_real (type, result);
1191
1192 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1193 return t;
1194 }
1195
1196 if (TREE_CODE (arg1) == FIXED_CST)
1197 {
1198 FIXED_VALUE_TYPE f1;
1199 FIXED_VALUE_TYPE f2;
1200 FIXED_VALUE_TYPE result;
1201 tree t, type;
1202 int sat_p;
1203 bool overflow_p;
1204
1205 /* The following codes are handled by fixed_arithmetic. */
1206 switch (code)
1207 {
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 case TRUNC_DIV_EXPR:
1212 f2 = TREE_FIXED_CST (arg2);
1213 break;
1214
1215 case LSHIFT_EXPR:
1216 case RSHIFT_EXPR:
1217 f2.data.high = TREE_INT_CST_HIGH (arg2);
1218 f2.data.low = TREE_INT_CST_LOW (arg2);
1219 f2.mode = SImode;
1220 break;
1221
1222 default:
1223 return NULL_TREE;
1224 }
1225
1226 f1 = TREE_FIXED_CST (arg1);
1227 type = TREE_TYPE (arg1);
1228 sat_p = TYPE_SATURATING (type);
1229 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1230 t = build_fixed (type, result);
1231 /* Propagate overflow flags. */
1232 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1233 TREE_OVERFLOW (t) = 1;
1234 return t;
1235 }
1236
1237 if (TREE_CODE (arg1) == COMPLEX_CST)
1238 {
1239 tree type = TREE_TYPE (arg1);
1240 tree r1 = TREE_REALPART (arg1);
1241 tree i1 = TREE_IMAGPART (arg1);
1242 tree r2 = TREE_REALPART (arg2);
1243 tree i2 = TREE_IMAGPART (arg2);
1244 tree real, imag;
1245
1246 switch (code)
1247 {
1248 case PLUS_EXPR:
1249 case MINUS_EXPR:
1250 real = const_binop (code, r1, r2);
1251 imag = const_binop (code, i1, i2);
1252 break;
1253
1254 case MULT_EXPR:
1255 if (COMPLEX_FLOAT_TYPE_P (type))
1256 return do_mpc_arg2 (arg1, arg2, type,
1257 /* do_nonfinite= */ folding_initializer,
1258 mpc_mul);
1259
1260 real = const_binop (MINUS_EXPR,
1261 const_binop (MULT_EXPR, r1, r2),
1262 const_binop (MULT_EXPR, i1, i2));
1263 imag = const_binop (PLUS_EXPR,
1264 const_binop (MULT_EXPR, r1, i2),
1265 const_binop (MULT_EXPR, i1, r2));
1266 break;
1267
1268 case RDIV_EXPR:
1269 if (COMPLEX_FLOAT_TYPE_P (type))
1270 return do_mpc_arg2 (arg1, arg2, type,
1271 /* do_nonfinite= */ folding_initializer,
1272 mpc_div);
1273 /* Fallthru ... */
1274 case TRUNC_DIV_EXPR:
1275 case CEIL_DIV_EXPR:
1276 case FLOOR_DIV_EXPR:
1277 case ROUND_DIV_EXPR:
1278 if (flag_complex_method == 0)
1279 {
1280 /* Keep this algorithm in sync with
1281 tree-complex.c:expand_complex_div_straight().
1282
1283 Expand complex division to scalars, straightforward algorithm.
1284 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1285 t = br*br + bi*bi
1286 */
1287 tree magsquared
1288 = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r2, r2),
1290 const_binop (MULT_EXPR, i2, i2));
1291 tree t1
1292 = const_binop (PLUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 tree t2
1296 = const_binop (MINUS_EXPR,
1297 const_binop (MULT_EXPR, i1, r2),
1298 const_binop (MULT_EXPR, r1, i2));
1299
1300 real = const_binop (code, t1, magsquared);
1301 imag = const_binop (code, t2, magsquared);
1302 }
1303 else
1304 {
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_wide().
1307
1308 Expand complex division to scalars, modified algorithm to minimize
1309 overflow with wide input ranges. */
1310 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1311 fold_abs_const (r2, TREE_TYPE (type)),
1312 fold_abs_const (i2, TREE_TYPE (type)));
1313
1314 if (integer_nonzerop (compare))
1315 {
1316 /* In the TRUE branch, we compute
1317 ratio = br/bi;
1318 div = (br * ratio) + bi;
1319 tr = (ar * ratio) + ai;
1320 ti = (ai * ratio) - ar;
1321 tr = tr / div;
1322 ti = ti / div; */
1323 tree ratio = const_binop (code, r2, i2);
1324 tree div = const_binop (PLUS_EXPR, i2,
1325 const_binop (MULT_EXPR, r2, ratio));
1326 real = const_binop (MULT_EXPR, r1, ratio);
1327 real = const_binop (PLUS_EXPR, real, i1);
1328 real = const_binop (code, real, div);
1329
1330 imag = const_binop (MULT_EXPR, i1, ratio);
1331 imag = const_binop (MINUS_EXPR, imag, r1);
1332 imag = const_binop (code, imag, div);
1333 }
1334 else
1335 {
1336 /* In the FALSE branch, we compute
1337 ratio = d/c;
1338 divisor = (d * ratio) + c;
1339 tr = (b * ratio) + a;
1340 ti = b - (a * ratio);
1341 tr = tr / div;
1342 ti = ti / div; */
1343 tree ratio = const_binop (code, i2, r2);
1344 tree div = const_binop (PLUS_EXPR, r2,
1345 const_binop (MULT_EXPR, i2, ratio));
1346
1347 real = const_binop (MULT_EXPR, i1, ratio);
1348 real = const_binop (PLUS_EXPR, real, r1);
1349 real = const_binop (code, real, div);
1350
1351 imag = const_binop (MULT_EXPR, r1, ratio);
1352 imag = const_binop (MINUS_EXPR, i1, imag);
1353 imag = const_binop (code, imag, div);
1354 }
1355 }
1356 break;
1357
1358 default:
1359 return NULL_TREE;
1360 }
1361
1362 if (real && imag)
1363 return build_complex (type, real, imag);
1364 }
1365
1366 if (TREE_CODE (arg1) == VECTOR_CST
1367 && TREE_CODE (arg2) == VECTOR_CST)
1368 {
1369 tree type = TREE_TYPE(arg1);
1370 int count = TYPE_VECTOR_SUBPARTS (type), i;
1371 tree *elts = XALLOCAVEC (tree, count);
1372
1373 for (i = 0; i < count; i++)
1374 {
1375 tree elem1 = VECTOR_CST_ELT (arg1, i);
1376 tree elem2 = VECTOR_CST_ELT (arg2, i);
1377
1378 elts[i] = const_binop (code, elem1, elem2);
1379
1380 /* It is possible that const_binop cannot handle the given
1381 code and return NULL_TREE */
1382 if(elts[i] == NULL_TREE)
1383 return NULL_TREE;
1384 }
1385
1386 return build_vector (type, elts);
1387 }
1388 return NULL_TREE;
1389 }
1390
1391 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1392 indicates which particular sizetype to create. */
1393
1394 tree
1395 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1396 {
1397 return build_int_cst (sizetype_tab[(int) kind], number);
1398 }
1399 \f
1400 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1401 is a tree code. The type of the result is taken from the operands.
1402 Both must be equivalent integer types, ala int_binop_types_match_p.
1403 If the operands are constant, so is the result. */
1404
1405 tree
1406 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1407 {
1408 tree type = TREE_TYPE (arg0);
1409
1410 if (arg0 == error_mark_node || arg1 == error_mark_node)
1411 return error_mark_node;
1412
1413 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1414 TREE_TYPE (arg1)));
1415
1416 /* Handle the special case of two integer constants faster. */
1417 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1418 {
1419 /* And some specific cases even faster than that. */
1420 if (code == PLUS_EXPR)
1421 {
1422 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1423 return arg1;
1424 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1425 return arg0;
1426 }
1427 else if (code == MINUS_EXPR)
1428 {
1429 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1430 return arg0;
1431 }
1432 else if (code == MULT_EXPR)
1433 {
1434 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1435 return arg1;
1436 }
1437
1438 /* Handle general case of two integer constants. For sizetype
1439 constant calculations we always want to know about overflow,
1440 even in the unsigned case. */
1441 return int_const_binop_1 (code, arg0, arg1, -1);
1442 }
1443
1444 return fold_build2_loc (loc, code, type, arg0, arg1);
1445 }
1446
1447 /* Given two values, either both of sizetype or both of bitsizetype,
1448 compute the difference between the two values. Return the value
1449 in signed type corresponding to the type of the operands. */
1450
1451 tree
1452 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1453 {
1454 tree type = TREE_TYPE (arg0);
1455 tree ctype;
1456
1457 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1458 TREE_TYPE (arg1)));
1459
1460 /* If the type is already signed, just do the simple thing. */
1461 if (!TYPE_UNSIGNED (type))
1462 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1463
1464 if (type == sizetype)
1465 ctype = ssizetype;
1466 else if (type == bitsizetype)
1467 ctype = sbitsizetype;
1468 else
1469 ctype = signed_type_for (type);
1470
1471 /* If either operand is not a constant, do the conversions to the signed
1472 type and subtract. The hardware will do the right thing with any
1473 overflow in the subtraction. */
1474 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1475 return size_binop_loc (loc, MINUS_EXPR,
1476 fold_convert_loc (loc, ctype, arg0),
1477 fold_convert_loc (loc, ctype, arg1));
1478
1479 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1480 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1481 overflow) and negate (which can't either). Special-case a result
1482 of zero while we're here. */
1483 if (tree_int_cst_equal (arg0, arg1))
1484 return build_int_cst (ctype, 0);
1485 else if (tree_int_cst_lt (arg1, arg0))
1486 return fold_convert_loc (loc, ctype,
1487 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1488 else
1489 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1490 fold_convert_loc (loc, ctype,
1491 size_binop_loc (loc,
1492 MINUS_EXPR,
1493 arg1, arg0)));
1494 }
1495 \f
1496 /* A subroutine of fold_convert_const handling conversions of an
1497 INTEGER_CST to another integer type. */
1498
1499 static tree
1500 fold_convert_const_int_from_int (tree type, const_tree arg1)
1501 {
1502 tree t;
1503
1504 /* Given an integer constant, make new constant with new type,
1505 appropriately sign-extended or truncated. */
1506 t = force_fit_type_double (type, tree_to_double_int (arg1),
1507 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1508 (TREE_INT_CST_HIGH (arg1) < 0
1509 && (TYPE_UNSIGNED (type)
1510 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1511 | TREE_OVERFLOW (arg1));
1512
1513 return t;
1514 }
1515
1516 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1517 to an integer type. */
1518
1519 static tree
1520 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1521 {
1522 int overflow = 0;
1523 tree t;
1524
1525 /* The following code implements the floating point to integer
1526 conversion rules required by the Java Language Specification,
1527 that IEEE NaNs are mapped to zero and values that overflow
1528 the target precision saturate, i.e. values greater than
1529 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1530 are mapped to INT_MIN. These semantics are allowed by the
1531 C and C++ standards that simply state that the behavior of
1532 FP-to-integer conversion is unspecified upon overflow. */
1533
1534 double_int val;
1535 REAL_VALUE_TYPE r;
1536 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1537
1538 switch (code)
1539 {
1540 case FIX_TRUNC_EXPR:
1541 real_trunc (&r, VOIDmode, &x);
1542 break;
1543
1544 default:
1545 gcc_unreachable ();
1546 }
1547
1548 /* If R is NaN, return zero and show we have an overflow. */
1549 if (REAL_VALUE_ISNAN (r))
1550 {
1551 overflow = 1;
1552 val = double_int_zero;
1553 }
1554
1555 /* See if R is less than the lower bound or greater than the
1556 upper bound. */
1557
1558 if (! overflow)
1559 {
1560 tree lt = TYPE_MIN_VALUE (type);
1561 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1562 if (REAL_VALUES_LESS (r, l))
1563 {
1564 overflow = 1;
1565 val = tree_to_double_int (lt);
1566 }
1567 }
1568
1569 if (! overflow)
1570 {
1571 tree ut = TYPE_MAX_VALUE (type);
1572 if (ut)
1573 {
1574 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1575 if (REAL_VALUES_LESS (u, r))
1576 {
1577 overflow = 1;
1578 val = tree_to_double_int (ut);
1579 }
1580 }
1581 }
1582
1583 if (! overflow)
1584 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1585
1586 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1587 return t;
1588 }
1589
1590 /* A subroutine of fold_convert_const handling conversions of a
1591 FIXED_CST to an integer type. */
1592
1593 static tree
1594 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1595 {
1596 tree t;
1597 double_int temp, temp_trunc;
1598 unsigned int mode;
1599
1600 /* Right shift FIXED_CST to temp by fbit. */
1601 temp = TREE_FIXED_CST (arg1).data;
1602 mode = TREE_FIXED_CST (arg1).mode;
1603 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1604 {
1605 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1606 HOST_BITS_PER_DOUBLE_INT,
1607 SIGNED_FIXED_POINT_MODE_P (mode));
1608
1609 /* Left shift temp to temp_trunc by fbit. */
1610 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1611 HOST_BITS_PER_DOUBLE_INT,
1612 SIGNED_FIXED_POINT_MODE_P (mode));
1613 }
1614 else
1615 {
1616 temp = double_int_zero;
1617 temp_trunc = double_int_zero;
1618 }
1619
1620 /* If FIXED_CST is negative, we need to round the value toward 0.
1621 By checking if the fractional bits are not zero to add 1 to temp. */
1622 if (SIGNED_FIXED_POINT_MODE_P (mode)
1623 && double_int_negative_p (temp_trunc)
1624 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1625 temp = double_int_add (temp, double_int_one);
1626
1627 /* Given a fixed-point constant, make new constant with new type,
1628 appropriately sign-extended or truncated. */
1629 t = force_fit_type_double (type, temp, -1,
1630 (double_int_negative_p (temp)
1631 && (TYPE_UNSIGNED (type)
1632 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1633 | TREE_OVERFLOW (arg1));
1634
1635 return t;
1636 }
1637
1638 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1639 to another floating point type. */
1640
1641 static tree
1642 fold_convert_const_real_from_real (tree type, const_tree arg1)
1643 {
1644 REAL_VALUE_TYPE value;
1645 tree t;
1646
1647 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1648 t = build_real (type, value);
1649
1650 /* If converting an infinity or NAN to a representation that doesn't
1651 have one, set the overflow bit so that we can produce some kind of
1652 error message at the appropriate point if necessary. It's not the
1653 most user-friendly message, but it's better than nothing. */
1654 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1655 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1656 TREE_OVERFLOW (t) = 1;
1657 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1658 && !MODE_HAS_NANS (TYPE_MODE (type)))
1659 TREE_OVERFLOW (t) = 1;
1660 /* Regular overflow, conversion produced an infinity in a mode that
1661 can't represent them. */
1662 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1663 && REAL_VALUE_ISINF (value)
1664 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1665 TREE_OVERFLOW (t) = 1;
1666 else
1667 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1668 return t;
1669 }
1670
1671 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1672 to a floating point type. */
1673
1674 static tree
1675 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1676 {
1677 REAL_VALUE_TYPE value;
1678 tree t;
1679
1680 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1681 t = build_real (type, value);
1682
1683 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1684 return t;
1685 }
1686
1687 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1688 to another fixed-point type. */
1689
1690 static tree
1691 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1692 {
1693 FIXED_VALUE_TYPE value;
1694 tree t;
1695 bool overflow_p;
1696
1697 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1698 TYPE_SATURATING (type));
1699 t = build_fixed (type, value);
1700
1701 /* Propagate overflow flags. */
1702 if (overflow_p | TREE_OVERFLOW (arg1))
1703 TREE_OVERFLOW (t) = 1;
1704 return t;
1705 }
1706
1707 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1708 to a fixed-point type. */
1709
1710 static tree
1711 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1712 {
1713 FIXED_VALUE_TYPE value;
1714 tree t;
1715 bool overflow_p;
1716
1717 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1718 TREE_INT_CST (arg1),
1719 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1720 TYPE_SATURATING (type));
1721 t = build_fixed (type, value);
1722
1723 /* Propagate overflow flags. */
1724 if (overflow_p | TREE_OVERFLOW (arg1))
1725 TREE_OVERFLOW (t) = 1;
1726 return t;
1727 }
1728
1729 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1730 to a fixed-point type. */
1731
1732 static tree
1733 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1734 {
1735 FIXED_VALUE_TYPE value;
1736 tree t;
1737 bool overflow_p;
1738
1739 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1740 &TREE_REAL_CST (arg1),
1741 TYPE_SATURATING (type));
1742 t = build_fixed (type, value);
1743
1744 /* Propagate overflow flags. */
1745 if (overflow_p | TREE_OVERFLOW (arg1))
1746 TREE_OVERFLOW (t) = 1;
1747 return t;
1748 }
1749
1750 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1751 type TYPE. If no simplification can be done return NULL_TREE. */
1752
1753 static tree
1754 fold_convert_const (enum tree_code code, tree type, tree arg1)
1755 {
1756 if (TREE_TYPE (arg1) == type)
1757 return arg1;
1758
1759 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1760 || TREE_CODE (type) == OFFSET_TYPE)
1761 {
1762 if (TREE_CODE (arg1) == INTEGER_CST)
1763 return fold_convert_const_int_from_int (type, arg1);
1764 else if (TREE_CODE (arg1) == REAL_CST)
1765 return fold_convert_const_int_from_real (code, type, arg1);
1766 else if (TREE_CODE (arg1) == FIXED_CST)
1767 return fold_convert_const_int_from_fixed (type, arg1);
1768 }
1769 else if (TREE_CODE (type) == REAL_TYPE)
1770 {
1771 if (TREE_CODE (arg1) == INTEGER_CST)
1772 return build_real_from_int_cst (type, arg1);
1773 else if (TREE_CODE (arg1) == REAL_CST)
1774 return fold_convert_const_real_from_real (type, arg1);
1775 else if (TREE_CODE (arg1) == FIXED_CST)
1776 return fold_convert_const_real_from_fixed (type, arg1);
1777 }
1778 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1779 {
1780 if (TREE_CODE (arg1) == FIXED_CST)
1781 return fold_convert_const_fixed_from_fixed (type, arg1);
1782 else if (TREE_CODE (arg1) == INTEGER_CST)
1783 return fold_convert_const_fixed_from_int (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_fixed_from_real (type, arg1);
1786 }
1787 return NULL_TREE;
1788 }
1789
1790 /* Construct a vector of zero elements of vector type TYPE. */
1791
1792 static tree
1793 build_zero_vector (tree type)
1794 {
1795 tree t;
1796
1797 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1798 return build_vector_from_val (type, t);
1799 }
1800
1801 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1802
1803 bool
1804 fold_convertible_p (const_tree type, const_tree arg)
1805 {
1806 tree orig = TREE_TYPE (arg);
1807
1808 if (type == orig)
1809 return true;
1810
1811 if (TREE_CODE (arg) == ERROR_MARK
1812 || TREE_CODE (type) == ERROR_MARK
1813 || TREE_CODE (orig) == ERROR_MARK)
1814 return false;
1815
1816 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1817 return true;
1818
1819 switch (TREE_CODE (type))
1820 {
1821 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1822 case POINTER_TYPE: case REFERENCE_TYPE:
1823 case OFFSET_TYPE:
1824 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1825 || TREE_CODE (orig) == OFFSET_TYPE)
1826 return true;
1827 return (TREE_CODE (orig) == VECTOR_TYPE
1828 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1829
1830 case REAL_TYPE:
1831 case FIXED_POINT_TYPE:
1832 case COMPLEX_TYPE:
1833 case VECTOR_TYPE:
1834 case VOID_TYPE:
1835 return TREE_CODE (type) == TREE_CODE (orig);
1836
1837 default:
1838 return false;
1839 }
1840 }
1841
1842 /* Convert expression ARG to type TYPE. Used by the middle-end for
1843 simple conversions in preference to calling the front-end's convert. */
1844
1845 tree
1846 fold_convert_loc (location_t loc, tree type, tree arg)
1847 {
1848 tree orig = TREE_TYPE (arg);
1849 tree tem;
1850
1851 if (type == orig)
1852 return arg;
1853
1854 if (TREE_CODE (arg) == ERROR_MARK
1855 || TREE_CODE (type) == ERROR_MARK
1856 || TREE_CODE (orig) == ERROR_MARK)
1857 return error_mark_node;
1858
1859 switch (TREE_CODE (type))
1860 {
1861 case POINTER_TYPE:
1862 case REFERENCE_TYPE:
1863 /* Handle conversions between pointers to different address spaces. */
1864 if (POINTER_TYPE_P (orig)
1865 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1866 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1867 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1868 /* fall through */
1869
1870 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1871 case OFFSET_TYPE:
1872 if (TREE_CODE (arg) == INTEGER_CST)
1873 {
1874 tem = fold_convert_const (NOP_EXPR, type, arg);
1875 if (tem != NULL_TREE)
1876 return tem;
1877 }
1878 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1879 || TREE_CODE (orig) == OFFSET_TYPE)
1880 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1881 if (TREE_CODE (orig) == COMPLEX_TYPE)
1882 return fold_convert_loc (loc, type,
1883 fold_build1_loc (loc, REALPART_EXPR,
1884 TREE_TYPE (orig), arg));
1885 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1886 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1887 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1888
1889 case REAL_TYPE:
1890 if (TREE_CODE (arg) == INTEGER_CST)
1891 {
1892 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1893 if (tem != NULL_TREE)
1894 return tem;
1895 }
1896 else if (TREE_CODE (arg) == REAL_CST)
1897 {
1898 tem = fold_convert_const (NOP_EXPR, type, arg);
1899 if (tem != NULL_TREE)
1900 return tem;
1901 }
1902 else if (TREE_CODE (arg) == FIXED_CST)
1903 {
1904 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1905 if (tem != NULL_TREE)
1906 return tem;
1907 }
1908
1909 switch (TREE_CODE (orig))
1910 {
1911 case INTEGER_TYPE:
1912 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1913 case POINTER_TYPE: case REFERENCE_TYPE:
1914 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1915
1916 case REAL_TYPE:
1917 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1918
1919 case FIXED_POINT_TYPE:
1920 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1921
1922 case COMPLEX_TYPE:
1923 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1924 return fold_convert_loc (loc, type, tem);
1925
1926 default:
1927 gcc_unreachable ();
1928 }
1929
1930 case FIXED_POINT_TYPE:
1931 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1932 || TREE_CODE (arg) == REAL_CST)
1933 {
1934 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1935 if (tem != NULL_TREE)
1936 goto fold_convert_exit;
1937 }
1938
1939 switch (TREE_CODE (orig))
1940 {
1941 case FIXED_POINT_TYPE:
1942 case INTEGER_TYPE:
1943 case ENUMERAL_TYPE:
1944 case BOOLEAN_TYPE:
1945 case REAL_TYPE:
1946 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1947
1948 case COMPLEX_TYPE:
1949 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1950 return fold_convert_loc (loc, type, tem);
1951
1952 default:
1953 gcc_unreachable ();
1954 }
1955
1956 case COMPLEX_TYPE:
1957 switch (TREE_CODE (orig))
1958 {
1959 case INTEGER_TYPE:
1960 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1961 case POINTER_TYPE: case REFERENCE_TYPE:
1962 case REAL_TYPE:
1963 case FIXED_POINT_TYPE:
1964 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1965 fold_convert_loc (loc, TREE_TYPE (type), arg),
1966 fold_convert_loc (loc, TREE_TYPE (type),
1967 integer_zero_node));
1968 case COMPLEX_TYPE:
1969 {
1970 tree rpart, ipart;
1971
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1973 {
1974 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1975 TREE_OPERAND (arg, 0));
1976 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1977 TREE_OPERAND (arg, 1));
1978 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1979 }
1980
1981 arg = save_expr (arg);
1982 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1983 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1984 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1985 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1987 }
1988
1989 default:
1990 gcc_unreachable ();
1991 }
1992
1993 case VECTOR_TYPE:
1994 if (integer_zerop (arg))
1995 return build_zero_vector (type);
1996 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1997 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1998 || TREE_CODE (orig) == VECTOR_TYPE);
1999 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2000
2001 case VOID_TYPE:
2002 tem = fold_ignored_result (arg);
2003 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2004
2005 default:
2006 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2007 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2008 gcc_unreachable ();
2009 }
2010 fold_convert_exit:
2011 protected_set_expr_location_unshare (tem, loc);
2012 return tem;
2013 }
2014 \f
2015 /* Return false if expr can be assumed not to be an lvalue, true
2016 otherwise. */
2017
2018 static bool
2019 maybe_lvalue_p (const_tree x)
2020 {
2021 /* We only need to wrap lvalue tree codes. */
2022 switch (TREE_CODE (x))
2023 {
2024 case VAR_DECL:
2025 case PARM_DECL:
2026 case RESULT_DECL:
2027 case LABEL_DECL:
2028 case FUNCTION_DECL:
2029 case SSA_NAME:
2030
2031 case COMPONENT_REF:
2032 case MEM_REF:
2033 case INDIRECT_REF:
2034 case ARRAY_REF:
2035 case ARRAY_RANGE_REF:
2036 case BIT_FIELD_REF:
2037 case OBJ_TYPE_REF:
2038
2039 case REALPART_EXPR:
2040 case IMAGPART_EXPR:
2041 case PREINCREMENT_EXPR:
2042 case PREDECREMENT_EXPR:
2043 case SAVE_EXPR:
2044 case TRY_CATCH_EXPR:
2045 case WITH_CLEANUP_EXPR:
2046 case COMPOUND_EXPR:
2047 case MODIFY_EXPR:
2048 case TARGET_EXPR:
2049 case COND_EXPR:
2050 case BIND_EXPR:
2051 break;
2052
2053 default:
2054 /* Assume the worst for front-end tree codes. */
2055 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2056 break;
2057 return false;
2058 }
2059
2060 return true;
2061 }
2062
2063 /* Return an expr equal to X but certainly not valid as an lvalue. */
2064
2065 tree
2066 non_lvalue_loc (location_t loc, tree x)
2067 {
2068 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2069 us. */
2070 if (in_gimple_form)
2071 return x;
2072
2073 if (! maybe_lvalue_p (x))
2074 return x;
2075 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2076 }
2077
2078 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2079 Zero means allow extended lvalues. */
2080
2081 int pedantic_lvalues;
2082
2083 /* When pedantic, return an expr equal to X but certainly not valid as a
2084 pedantic lvalue. Otherwise, return X. */
2085
2086 static tree
2087 pedantic_non_lvalue_loc (location_t loc, tree x)
2088 {
2089 if (pedantic_lvalues)
2090 return non_lvalue_loc (loc, x);
2091
2092 return protected_set_expr_location_unshare (x, loc);
2093 }
2094 \f
2095 /* Given a tree comparison code, return the code that is the logical inverse.
2096 It is generally not safe to do this for floating-point comparisons, except
2097 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
2098
2099 enum tree_code
2100 invert_tree_comparison (enum tree_code code, bool honor_nans)
2101 {
2102 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
2103 return ERROR_MARK;
2104
2105 switch (code)
2106 {
2107 case EQ_EXPR:
2108 return NE_EXPR;
2109 case NE_EXPR:
2110 return EQ_EXPR;
2111 case GT_EXPR:
2112 return honor_nans ? UNLE_EXPR : LE_EXPR;
2113 case GE_EXPR:
2114 return honor_nans ? UNLT_EXPR : LT_EXPR;
2115 case LT_EXPR:
2116 return honor_nans ? UNGE_EXPR : GE_EXPR;
2117 case LE_EXPR:
2118 return honor_nans ? UNGT_EXPR : GT_EXPR;
2119 case LTGT_EXPR:
2120 return UNEQ_EXPR;
2121 case UNEQ_EXPR:
2122 return LTGT_EXPR;
2123 case UNGT_EXPR:
2124 return LE_EXPR;
2125 case UNGE_EXPR:
2126 return LT_EXPR;
2127 case UNLT_EXPR:
2128 return GE_EXPR;
2129 case UNLE_EXPR:
2130 return GT_EXPR;
2131 case ORDERED_EXPR:
2132 return UNORDERED_EXPR;
2133 case UNORDERED_EXPR:
2134 return ORDERED_EXPR;
2135 default:
2136 gcc_unreachable ();
2137 }
2138 }
2139
2140 /* Similar, but return the comparison that results if the operands are
2141 swapped. This is safe for floating-point. */
2142
2143 enum tree_code
2144 swap_tree_comparison (enum tree_code code)
2145 {
2146 switch (code)
2147 {
2148 case EQ_EXPR:
2149 case NE_EXPR:
2150 case ORDERED_EXPR:
2151 case UNORDERED_EXPR:
2152 case LTGT_EXPR:
2153 case UNEQ_EXPR:
2154 return code;
2155 case GT_EXPR:
2156 return LT_EXPR;
2157 case GE_EXPR:
2158 return LE_EXPR;
2159 case LT_EXPR:
2160 return GT_EXPR;
2161 case LE_EXPR:
2162 return GE_EXPR;
2163 case UNGT_EXPR:
2164 return UNLT_EXPR;
2165 case UNGE_EXPR:
2166 return UNLE_EXPR;
2167 case UNLT_EXPR:
2168 return UNGT_EXPR;
2169 case UNLE_EXPR:
2170 return UNGE_EXPR;
2171 default:
2172 gcc_unreachable ();
2173 }
2174 }
2175
2176
2177 /* Convert a comparison tree code from an enum tree_code representation
2178 into a compcode bit-based encoding. This function is the inverse of
2179 compcode_to_comparison. */
2180
2181 static enum comparison_code
2182 comparison_to_compcode (enum tree_code code)
2183 {
2184 switch (code)
2185 {
2186 case LT_EXPR:
2187 return COMPCODE_LT;
2188 case EQ_EXPR:
2189 return COMPCODE_EQ;
2190 case LE_EXPR:
2191 return COMPCODE_LE;
2192 case GT_EXPR:
2193 return COMPCODE_GT;
2194 case NE_EXPR:
2195 return COMPCODE_NE;
2196 case GE_EXPR:
2197 return COMPCODE_GE;
2198 case ORDERED_EXPR:
2199 return COMPCODE_ORD;
2200 case UNORDERED_EXPR:
2201 return COMPCODE_UNORD;
2202 case UNLT_EXPR:
2203 return COMPCODE_UNLT;
2204 case UNEQ_EXPR:
2205 return COMPCODE_UNEQ;
2206 case UNLE_EXPR:
2207 return COMPCODE_UNLE;
2208 case UNGT_EXPR:
2209 return COMPCODE_UNGT;
2210 case LTGT_EXPR:
2211 return COMPCODE_LTGT;
2212 case UNGE_EXPR:
2213 return COMPCODE_UNGE;
2214 default:
2215 gcc_unreachable ();
2216 }
2217 }
2218
2219 /* Convert a compcode bit-based encoding of a comparison operator back
2220 to GCC's enum tree_code representation. This function is the
2221 inverse of comparison_to_compcode. */
2222
2223 static enum tree_code
2224 compcode_to_comparison (enum comparison_code code)
2225 {
2226 switch (code)
2227 {
2228 case COMPCODE_LT:
2229 return LT_EXPR;
2230 case COMPCODE_EQ:
2231 return EQ_EXPR;
2232 case COMPCODE_LE:
2233 return LE_EXPR;
2234 case COMPCODE_GT:
2235 return GT_EXPR;
2236 case COMPCODE_NE:
2237 return NE_EXPR;
2238 case COMPCODE_GE:
2239 return GE_EXPR;
2240 case COMPCODE_ORD:
2241 return ORDERED_EXPR;
2242 case COMPCODE_UNORD:
2243 return UNORDERED_EXPR;
2244 case COMPCODE_UNLT:
2245 return UNLT_EXPR;
2246 case COMPCODE_UNEQ:
2247 return UNEQ_EXPR;
2248 case COMPCODE_UNLE:
2249 return UNLE_EXPR;
2250 case COMPCODE_UNGT:
2251 return UNGT_EXPR;
2252 case COMPCODE_LTGT:
2253 return LTGT_EXPR;
2254 case COMPCODE_UNGE:
2255 return UNGE_EXPR;
2256 default:
2257 gcc_unreachable ();
2258 }
2259 }
2260
2261 /* Return a tree for the comparison which is the combination of
2262 doing the AND or OR (depending on CODE) of the two operations LCODE
2263 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2264 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2265 if this makes the transformation invalid. */
2266
2267 tree
2268 combine_comparisons (location_t loc,
2269 enum tree_code code, enum tree_code lcode,
2270 enum tree_code rcode, tree truth_type,
2271 tree ll_arg, tree lr_arg)
2272 {
2273 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2274 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2275 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2276 int compcode;
2277
2278 switch (code)
2279 {
2280 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2281 compcode = lcompcode & rcompcode;
2282 break;
2283
2284 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2285 compcode = lcompcode | rcompcode;
2286 break;
2287
2288 default:
2289 return NULL_TREE;
2290 }
2291
2292 if (!honor_nans)
2293 {
2294 /* Eliminate unordered comparisons, as well as LTGT and ORD
2295 which are not used unless the mode has NaNs. */
2296 compcode &= ~COMPCODE_UNORD;
2297 if (compcode == COMPCODE_LTGT)
2298 compcode = COMPCODE_NE;
2299 else if (compcode == COMPCODE_ORD)
2300 compcode = COMPCODE_TRUE;
2301 }
2302 else if (flag_trapping_math)
2303 {
2304 /* Check that the original operation and the optimized ones will trap
2305 under the same condition. */
2306 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2307 && (lcompcode != COMPCODE_EQ)
2308 && (lcompcode != COMPCODE_ORD);
2309 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2310 && (rcompcode != COMPCODE_EQ)
2311 && (rcompcode != COMPCODE_ORD);
2312 bool trap = (compcode & COMPCODE_UNORD) == 0
2313 && (compcode != COMPCODE_EQ)
2314 && (compcode != COMPCODE_ORD);
2315
2316 /* In a short-circuited boolean expression the LHS might be
2317 such that the RHS, if evaluated, will never trap. For
2318 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2319 if neither x nor y is NaN. (This is a mixed blessing: for
2320 example, the expression above will never trap, hence
2321 optimizing it to x < y would be invalid). */
2322 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2323 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2324 rtrap = false;
2325
2326 /* If the comparison was short-circuited, and only the RHS
2327 trapped, we may now generate a spurious trap. */
2328 if (rtrap && !ltrap
2329 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2330 return NULL_TREE;
2331
2332 /* If we changed the conditions that cause a trap, we lose. */
2333 if ((ltrap || rtrap) != trap)
2334 return NULL_TREE;
2335 }
2336
2337 if (compcode == COMPCODE_TRUE)
2338 return constant_boolean_node (true, truth_type);
2339 else if (compcode == COMPCODE_FALSE)
2340 return constant_boolean_node (false, truth_type);
2341 else
2342 {
2343 enum tree_code tcode;
2344
2345 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2346 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2347 }
2348 }
2349 \f
2350 /* Return nonzero if two operands (typically of the same tree node)
2351 are necessarily equal. If either argument has side-effects this
2352 function returns zero. FLAGS modifies behavior as follows:
2353
2354 If OEP_ONLY_CONST is set, only return nonzero for constants.
2355 This function tests whether the operands are indistinguishable;
2356 it does not test whether they are equal using C's == operation.
2357 The distinction is important for IEEE floating point, because
2358 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2359 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2360
2361 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2362 even though it may hold multiple values during a function.
2363 This is because a GCC tree node guarantees that nothing else is
2364 executed between the evaluation of its "operands" (which may often
2365 be evaluated in arbitrary order). Hence if the operands themselves
2366 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2367 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2368 unset means assuming isochronic (or instantaneous) tree equivalence.
2369 Unless comparing arbitrary expression trees, such as from different
2370 statements, this flag can usually be left unset.
2371
2372 If OEP_PURE_SAME is set, then pure functions with identical arguments
2373 are considered the same. It is used when the caller has other ways
2374 to ensure that global memory is unchanged in between. */
2375
2376 int
2377 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2378 {
2379 /* If either is ERROR_MARK, they aren't equal. */
2380 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2381 || TREE_TYPE (arg0) == error_mark_node
2382 || TREE_TYPE (arg1) == error_mark_node)
2383 return 0;
2384
2385 /* Similar, if either does not have a type (like a released SSA name),
2386 they aren't equal. */
2387 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2388 return 0;
2389
2390 /* Check equality of integer constants before bailing out due to
2391 precision differences. */
2392 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2393 return tree_int_cst_equal (arg0, arg1);
2394
2395 /* If both types don't have the same signedness, then we can't consider
2396 them equal. We must check this before the STRIP_NOPS calls
2397 because they may change the signedness of the arguments. As pointers
2398 strictly don't have a signedness, require either two pointers or
2399 two non-pointers as well. */
2400 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2401 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2402 return 0;
2403
2404 /* We cannot consider pointers to different address space equal. */
2405 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2406 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2407 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2408 return 0;
2409
2410 /* If both types don't have the same precision, then it is not safe
2411 to strip NOPs. */
2412 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2413 return 0;
2414
2415 STRIP_NOPS (arg0);
2416 STRIP_NOPS (arg1);
2417
2418 /* In case both args are comparisons but with different comparison
2419 code, try to swap the comparison operands of one arg to produce
2420 a match and compare that variant. */
2421 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2422 && COMPARISON_CLASS_P (arg0)
2423 && COMPARISON_CLASS_P (arg1))
2424 {
2425 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2426
2427 if (TREE_CODE (arg0) == swap_code)
2428 return operand_equal_p (TREE_OPERAND (arg0, 0),
2429 TREE_OPERAND (arg1, 1), flags)
2430 && operand_equal_p (TREE_OPERAND (arg0, 1),
2431 TREE_OPERAND (arg1, 0), flags);
2432 }
2433
2434 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2435 /* This is needed for conversions and for COMPONENT_REF.
2436 Might as well play it safe and always test this. */
2437 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2438 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2439 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2440 return 0;
2441
2442 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2443 We don't care about side effects in that case because the SAVE_EXPR
2444 takes care of that for us. In all other cases, two expressions are
2445 equal if they have no side effects. If we have two identical
2446 expressions with side effects that should be treated the same due
2447 to the only side effects being identical SAVE_EXPR's, that will
2448 be detected in the recursive calls below.
2449 If we are taking an invariant address of two identical objects
2450 they are necessarily equal as well. */
2451 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2452 && (TREE_CODE (arg0) == SAVE_EXPR
2453 || (flags & OEP_CONSTANT_ADDRESS_OF)
2454 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2455 return 1;
2456
2457 /* Next handle constant cases, those for which we can return 1 even
2458 if ONLY_CONST is set. */
2459 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2460 switch (TREE_CODE (arg0))
2461 {
2462 case INTEGER_CST:
2463 return tree_int_cst_equal (arg0, arg1);
2464
2465 case FIXED_CST:
2466 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2467 TREE_FIXED_CST (arg1));
2468
2469 case REAL_CST:
2470 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2471 TREE_REAL_CST (arg1)))
2472 return 1;
2473
2474
2475 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2476 {
2477 /* If we do not distinguish between signed and unsigned zero,
2478 consider them equal. */
2479 if (real_zerop (arg0) && real_zerop (arg1))
2480 return 1;
2481 }
2482 return 0;
2483
2484 case VECTOR_CST:
2485 {
2486 unsigned i;
2487
2488 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2489 return 0;
2490
2491 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2492 {
2493 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2494 VECTOR_CST_ELT (arg1, i), flags))
2495 return 0;
2496 }
2497 return 1;
2498 }
2499
2500 case COMPLEX_CST:
2501 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2502 flags)
2503 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2504 flags));
2505
2506 case STRING_CST:
2507 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2508 && ! memcmp (TREE_STRING_POINTER (arg0),
2509 TREE_STRING_POINTER (arg1),
2510 TREE_STRING_LENGTH (arg0)));
2511
2512 case ADDR_EXPR:
2513 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2514 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2515 ? OEP_CONSTANT_ADDRESS_OF : 0);
2516 default:
2517 break;
2518 }
2519
2520 if (flags & OEP_ONLY_CONST)
2521 return 0;
2522
2523 /* Define macros to test an operand from arg0 and arg1 for equality and a
2524 variant that allows null and views null as being different from any
2525 non-null value. In the latter case, if either is null, the both
2526 must be; otherwise, do the normal comparison. */
2527 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2528 TREE_OPERAND (arg1, N), flags)
2529
2530 #define OP_SAME_WITH_NULL(N) \
2531 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2532 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2533
2534 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2535 {
2536 case tcc_unary:
2537 /* Two conversions are equal only if signedness and modes match. */
2538 switch (TREE_CODE (arg0))
2539 {
2540 CASE_CONVERT:
2541 case FIX_TRUNC_EXPR:
2542 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2543 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2544 return 0;
2545 break;
2546 default:
2547 break;
2548 }
2549
2550 return OP_SAME (0);
2551
2552
2553 case tcc_comparison:
2554 case tcc_binary:
2555 if (OP_SAME (0) && OP_SAME (1))
2556 return 1;
2557
2558 /* For commutative ops, allow the other order. */
2559 return (commutative_tree_code (TREE_CODE (arg0))
2560 && operand_equal_p (TREE_OPERAND (arg0, 0),
2561 TREE_OPERAND (arg1, 1), flags)
2562 && operand_equal_p (TREE_OPERAND (arg0, 1),
2563 TREE_OPERAND (arg1, 0), flags));
2564
2565 case tcc_reference:
2566 /* If either of the pointer (or reference) expressions we are
2567 dereferencing contain a side effect, these cannot be equal. */
2568 if (TREE_SIDE_EFFECTS (arg0)
2569 || TREE_SIDE_EFFECTS (arg1))
2570 return 0;
2571
2572 switch (TREE_CODE (arg0))
2573 {
2574 case INDIRECT_REF:
2575 case REALPART_EXPR:
2576 case IMAGPART_EXPR:
2577 return OP_SAME (0);
2578
2579 case TARGET_MEM_REF:
2580 /* Require equal extra operands and then fall through to MEM_REF
2581 handling of the two common operands. */
2582 if (!OP_SAME_WITH_NULL (2)
2583 || !OP_SAME_WITH_NULL (3)
2584 || !OP_SAME_WITH_NULL (4))
2585 return 0;
2586 /* Fallthru. */
2587 case MEM_REF:
2588 /* Require equal access sizes, and similar pointer types.
2589 We can have incomplete types for array references of
2590 variable-sized arrays from the Fortran frontent
2591 though. */
2592 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2593 || (TYPE_SIZE (TREE_TYPE (arg0))
2594 && TYPE_SIZE (TREE_TYPE (arg1))
2595 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2596 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2597 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2598 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2599 && OP_SAME (0) && OP_SAME (1));
2600
2601 case ARRAY_REF:
2602 case ARRAY_RANGE_REF:
2603 /* Operands 2 and 3 may be null.
2604 Compare the array index by value if it is constant first as we
2605 may have different types but same value here. */
2606 return (OP_SAME (0)
2607 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2608 TREE_OPERAND (arg1, 1))
2609 || OP_SAME (1))
2610 && OP_SAME_WITH_NULL (2)
2611 && OP_SAME_WITH_NULL (3));
2612
2613 case COMPONENT_REF:
2614 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2615 may be NULL when we're called to compare MEM_EXPRs. */
2616 return OP_SAME_WITH_NULL (0)
2617 && OP_SAME (1)
2618 && OP_SAME_WITH_NULL (2);
2619
2620 case BIT_FIELD_REF:
2621 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2622
2623 default:
2624 return 0;
2625 }
2626
2627 case tcc_expression:
2628 switch (TREE_CODE (arg0))
2629 {
2630 case ADDR_EXPR:
2631 case TRUTH_NOT_EXPR:
2632 return OP_SAME (0);
2633
2634 case TRUTH_ANDIF_EXPR:
2635 case TRUTH_ORIF_EXPR:
2636 return OP_SAME (0) && OP_SAME (1);
2637
2638 case FMA_EXPR:
2639 case WIDEN_MULT_PLUS_EXPR:
2640 case WIDEN_MULT_MINUS_EXPR:
2641 if (!OP_SAME (2))
2642 return 0;
2643 /* The multiplcation operands are commutative. */
2644 /* FALLTHRU */
2645
2646 case TRUTH_AND_EXPR:
2647 case TRUTH_OR_EXPR:
2648 case TRUTH_XOR_EXPR:
2649 if (OP_SAME (0) && OP_SAME (1))
2650 return 1;
2651
2652 /* Otherwise take into account this is a commutative operation. */
2653 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2654 TREE_OPERAND (arg1, 1), flags)
2655 && operand_equal_p (TREE_OPERAND (arg0, 1),
2656 TREE_OPERAND (arg1, 0), flags));
2657
2658 case COND_EXPR:
2659 case VEC_COND_EXPR:
2660 case DOT_PROD_EXPR:
2661 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2662
2663 default:
2664 return 0;
2665 }
2666
2667 case tcc_vl_exp:
2668 switch (TREE_CODE (arg0))
2669 {
2670 case CALL_EXPR:
2671 /* If the CALL_EXPRs call different functions, then they
2672 clearly can not be equal. */
2673 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2674 flags))
2675 return 0;
2676
2677 {
2678 unsigned int cef = call_expr_flags (arg0);
2679 if (flags & OEP_PURE_SAME)
2680 cef &= ECF_CONST | ECF_PURE;
2681 else
2682 cef &= ECF_CONST;
2683 if (!cef)
2684 return 0;
2685 }
2686
2687 /* Now see if all the arguments are the same. */
2688 {
2689 const_call_expr_arg_iterator iter0, iter1;
2690 const_tree a0, a1;
2691 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2692 a1 = first_const_call_expr_arg (arg1, &iter1);
2693 a0 && a1;
2694 a0 = next_const_call_expr_arg (&iter0),
2695 a1 = next_const_call_expr_arg (&iter1))
2696 if (! operand_equal_p (a0, a1, flags))
2697 return 0;
2698
2699 /* If we get here and both argument lists are exhausted
2700 then the CALL_EXPRs are equal. */
2701 return ! (a0 || a1);
2702 }
2703 default:
2704 return 0;
2705 }
2706
2707 case tcc_declaration:
2708 /* Consider __builtin_sqrt equal to sqrt. */
2709 return (TREE_CODE (arg0) == FUNCTION_DECL
2710 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2711 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2712 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2713
2714 default:
2715 return 0;
2716 }
2717
2718 #undef OP_SAME
2719 #undef OP_SAME_WITH_NULL
2720 }
2721 \f
2722 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2723 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2724
2725 When in doubt, return 0. */
2726
2727 static int
2728 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2729 {
2730 int unsignedp1, unsignedpo;
2731 tree primarg0, primarg1, primother;
2732 unsigned int correct_width;
2733
2734 if (operand_equal_p (arg0, arg1, 0))
2735 return 1;
2736
2737 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2738 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2739 return 0;
2740
2741 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2742 and see if the inner values are the same. This removes any
2743 signedness comparison, which doesn't matter here. */
2744 primarg0 = arg0, primarg1 = arg1;
2745 STRIP_NOPS (primarg0);
2746 STRIP_NOPS (primarg1);
2747 if (operand_equal_p (primarg0, primarg1, 0))
2748 return 1;
2749
2750 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2751 actual comparison operand, ARG0.
2752
2753 First throw away any conversions to wider types
2754 already present in the operands. */
2755
2756 primarg1 = get_narrower (arg1, &unsignedp1);
2757 primother = get_narrower (other, &unsignedpo);
2758
2759 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2760 if (unsignedp1 == unsignedpo
2761 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2762 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2763 {
2764 tree type = TREE_TYPE (arg0);
2765
2766 /* Make sure shorter operand is extended the right way
2767 to match the longer operand. */
2768 primarg1 = fold_convert (signed_or_unsigned_type_for
2769 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2770
2771 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2772 return 1;
2773 }
2774
2775 return 0;
2776 }
2777 \f
2778 /* See if ARG is an expression that is either a comparison or is performing
2779 arithmetic on comparisons. The comparisons must only be comparing
2780 two different values, which will be stored in *CVAL1 and *CVAL2; if
2781 they are nonzero it means that some operands have already been found.
2782 No variables may be used anywhere else in the expression except in the
2783 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2784 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2785
2786 If this is true, return 1. Otherwise, return zero. */
2787
2788 static int
2789 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2790 {
2791 enum tree_code code = TREE_CODE (arg);
2792 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2793
2794 /* We can handle some of the tcc_expression cases here. */
2795 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2796 tclass = tcc_unary;
2797 else if (tclass == tcc_expression
2798 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2799 || code == COMPOUND_EXPR))
2800 tclass = tcc_binary;
2801
2802 else if (tclass == tcc_expression && code == SAVE_EXPR
2803 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2804 {
2805 /* If we've already found a CVAL1 or CVAL2, this expression is
2806 two complex to handle. */
2807 if (*cval1 || *cval2)
2808 return 0;
2809
2810 tclass = tcc_unary;
2811 *save_p = 1;
2812 }
2813
2814 switch (tclass)
2815 {
2816 case tcc_unary:
2817 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2818
2819 case tcc_binary:
2820 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2821 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2822 cval1, cval2, save_p));
2823
2824 case tcc_constant:
2825 return 1;
2826
2827 case tcc_expression:
2828 if (code == COND_EXPR)
2829 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2830 cval1, cval2, save_p)
2831 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2832 cval1, cval2, save_p)
2833 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2834 cval1, cval2, save_p));
2835 return 0;
2836
2837 case tcc_comparison:
2838 /* First see if we can handle the first operand, then the second. For
2839 the second operand, we know *CVAL1 can't be zero. It must be that
2840 one side of the comparison is each of the values; test for the
2841 case where this isn't true by failing if the two operands
2842 are the same. */
2843
2844 if (operand_equal_p (TREE_OPERAND (arg, 0),
2845 TREE_OPERAND (arg, 1), 0))
2846 return 0;
2847
2848 if (*cval1 == 0)
2849 *cval1 = TREE_OPERAND (arg, 0);
2850 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2851 ;
2852 else if (*cval2 == 0)
2853 *cval2 = TREE_OPERAND (arg, 0);
2854 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2855 ;
2856 else
2857 return 0;
2858
2859 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2860 ;
2861 else if (*cval2 == 0)
2862 *cval2 = TREE_OPERAND (arg, 1);
2863 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2864 ;
2865 else
2866 return 0;
2867
2868 return 1;
2869
2870 default:
2871 return 0;
2872 }
2873 }
2874 \f
2875 /* ARG is a tree that is known to contain just arithmetic operations and
2876 comparisons. Evaluate the operations in the tree substituting NEW0 for
2877 any occurrence of OLD0 as an operand of a comparison and likewise for
2878 NEW1 and OLD1. */
2879
2880 static tree
2881 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2882 tree old1, tree new1)
2883 {
2884 tree type = TREE_TYPE (arg);
2885 enum tree_code code = TREE_CODE (arg);
2886 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2887
2888 /* We can handle some of the tcc_expression cases here. */
2889 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2890 tclass = tcc_unary;
2891 else if (tclass == tcc_expression
2892 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2893 tclass = tcc_binary;
2894
2895 switch (tclass)
2896 {
2897 case tcc_unary:
2898 return fold_build1_loc (loc, code, type,
2899 eval_subst (loc, TREE_OPERAND (arg, 0),
2900 old0, new0, old1, new1));
2901
2902 case tcc_binary:
2903 return fold_build2_loc (loc, code, type,
2904 eval_subst (loc, TREE_OPERAND (arg, 0),
2905 old0, new0, old1, new1),
2906 eval_subst (loc, TREE_OPERAND (arg, 1),
2907 old0, new0, old1, new1));
2908
2909 case tcc_expression:
2910 switch (code)
2911 {
2912 case SAVE_EXPR:
2913 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2914 old1, new1);
2915
2916 case COMPOUND_EXPR:
2917 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2918 old1, new1);
2919
2920 case COND_EXPR:
2921 return fold_build3_loc (loc, code, type,
2922 eval_subst (loc, TREE_OPERAND (arg, 0),
2923 old0, new0, old1, new1),
2924 eval_subst (loc, TREE_OPERAND (arg, 1),
2925 old0, new0, old1, new1),
2926 eval_subst (loc, TREE_OPERAND (arg, 2),
2927 old0, new0, old1, new1));
2928 default:
2929 break;
2930 }
2931 /* Fall through - ??? */
2932
2933 case tcc_comparison:
2934 {
2935 tree arg0 = TREE_OPERAND (arg, 0);
2936 tree arg1 = TREE_OPERAND (arg, 1);
2937
2938 /* We need to check both for exact equality and tree equality. The
2939 former will be true if the operand has a side-effect. In that
2940 case, we know the operand occurred exactly once. */
2941
2942 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2943 arg0 = new0;
2944 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2945 arg0 = new1;
2946
2947 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2948 arg1 = new0;
2949 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2950 arg1 = new1;
2951
2952 return fold_build2_loc (loc, code, type, arg0, arg1);
2953 }
2954
2955 default:
2956 return arg;
2957 }
2958 }
2959 \f
2960 /* Return a tree for the case when the result of an expression is RESULT
2961 converted to TYPE and OMITTED was previously an operand of the expression
2962 but is now not needed (e.g., we folded OMITTED * 0).
2963
2964 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2965 the conversion of RESULT to TYPE. */
2966
2967 tree
2968 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2969 {
2970 tree t = fold_convert_loc (loc, type, result);
2971
2972 /* If the resulting operand is an empty statement, just return the omitted
2973 statement casted to void. */
2974 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2975 return build1_loc (loc, NOP_EXPR, void_type_node,
2976 fold_ignored_result (omitted));
2977
2978 if (TREE_SIDE_EFFECTS (omitted))
2979 return build2_loc (loc, COMPOUND_EXPR, type,
2980 fold_ignored_result (omitted), t);
2981
2982 return non_lvalue_loc (loc, t);
2983 }
2984
2985 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2986
2987 static tree
2988 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2989 tree omitted)
2990 {
2991 tree t = fold_convert_loc (loc, type, result);
2992
2993 /* If the resulting operand is an empty statement, just return the omitted
2994 statement casted to void. */
2995 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2996 return build1_loc (loc, NOP_EXPR, void_type_node,
2997 fold_ignored_result (omitted));
2998
2999 if (TREE_SIDE_EFFECTS (omitted))
3000 return build2_loc (loc, COMPOUND_EXPR, type,
3001 fold_ignored_result (omitted), t);
3002
3003 return pedantic_non_lvalue_loc (loc, t);
3004 }
3005
3006 /* Return a tree for the case when the result of an expression is RESULT
3007 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3008 of the expression but are now not needed.
3009
3010 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3011 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3012 evaluated before OMITTED2. Otherwise, if neither has side effects,
3013 just do the conversion of RESULT to TYPE. */
3014
3015 tree
3016 omit_two_operands_loc (location_t loc, tree type, tree result,
3017 tree omitted1, tree omitted2)
3018 {
3019 tree t = fold_convert_loc (loc, type, result);
3020
3021 if (TREE_SIDE_EFFECTS (omitted2))
3022 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3023 if (TREE_SIDE_EFFECTS (omitted1))
3024 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3025
3026 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3027 }
3028
3029 \f
3030 /* Return a simplified tree node for the truth-negation of ARG. This
3031 never alters ARG itself. We assume that ARG is an operation that
3032 returns a truth value (0 or 1).
3033
3034 FIXME: one would think we would fold the result, but it causes
3035 problems with the dominator optimizer. */
3036
3037 tree
3038 fold_truth_not_expr (location_t loc, tree arg)
3039 {
3040 tree type = TREE_TYPE (arg);
3041 enum tree_code code = TREE_CODE (arg);
3042 location_t loc1, loc2;
3043
3044 /* If this is a comparison, we can simply invert it, except for
3045 floating-point non-equality comparisons, in which case we just
3046 enclose a TRUTH_NOT_EXPR around what we have. */
3047
3048 if (TREE_CODE_CLASS (code) == tcc_comparison)
3049 {
3050 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3051 if (FLOAT_TYPE_P (op_type)
3052 && flag_trapping_math
3053 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3054 && code != NE_EXPR && code != EQ_EXPR)
3055 return NULL_TREE;
3056
3057 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3058 if (code == ERROR_MARK)
3059 return NULL_TREE;
3060
3061 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3062 TREE_OPERAND (arg, 1));
3063 }
3064
3065 switch (code)
3066 {
3067 case INTEGER_CST:
3068 return constant_boolean_node (integer_zerop (arg), type);
3069
3070 case TRUTH_AND_EXPR:
3071 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3072 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3073 return build2_loc (loc, TRUTH_OR_EXPR, type,
3074 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3075 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3076
3077 case TRUTH_OR_EXPR:
3078 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3079 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3080 return build2_loc (loc, TRUTH_AND_EXPR, type,
3081 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3082 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3083
3084 case TRUTH_XOR_EXPR:
3085 /* Here we can invert either operand. We invert the first operand
3086 unless the second operand is a TRUTH_NOT_EXPR in which case our
3087 result is the XOR of the first operand with the inside of the
3088 negation of the second operand. */
3089
3090 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3091 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3092 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3093 else
3094 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3095 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3096 TREE_OPERAND (arg, 1));
3097
3098 case TRUTH_ANDIF_EXPR:
3099 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3100 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3101 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3102 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3103 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3104
3105 case TRUTH_ORIF_EXPR:
3106 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3107 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3109 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3110 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3111
3112 case TRUTH_NOT_EXPR:
3113 return TREE_OPERAND (arg, 0);
3114
3115 case COND_EXPR:
3116 {
3117 tree arg1 = TREE_OPERAND (arg, 1);
3118 tree arg2 = TREE_OPERAND (arg, 2);
3119
3120 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3121 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3122
3123 /* A COND_EXPR may have a throw as one operand, which
3124 then has void type. Just leave void operands
3125 as they are. */
3126 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3127 VOID_TYPE_P (TREE_TYPE (arg1))
3128 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3129 VOID_TYPE_P (TREE_TYPE (arg2))
3130 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3131 }
3132
3133 case COMPOUND_EXPR:
3134 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3135 return build2_loc (loc, COMPOUND_EXPR, type,
3136 TREE_OPERAND (arg, 0),
3137 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3138
3139 case NON_LVALUE_EXPR:
3140 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3141 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3142
3143 CASE_CONVERT:
3144 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3145 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3146
3147 /* ... fall through ... */
3148
3149 case FLOAT_EXPR:
3150 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3151 return build1_loc (loc, TREE_CODE (arg), type,
3152 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3153
3154 case BIT_AND_EXPR:
3155 if (!integer_onep (TREE_OPERAND (arg, 1)))
3156 return NULL_TREE;
3157 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3158
3159 case SAVE_EXPR:
3160 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3161
3162 case CLEANUP_POINT_EXPR:
3163 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3164 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3165 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3166
3167 default:
3168 return NULL_TREE;
3169 }
3170 }
3171
3172 /* Return a simplified tree node for the truth-negation of ARG. This
3173 never alters ARG itself. We assume that ARG is an operation that
3174 returns a truth value (0 or 1).
3175
3176 FIXME: one would think we would fold the result, but it causes
3177 problems with the dominator optimizer. */
3178
3179 tree
3180 invert_truthvalue_loc (location_t loc, tree arg)
3181 {
3182 tree tem;
3183
3184 if (TREE_CODE (arg) == ERROR_MARK)
3185 return arg;
3186
3187 tem = fold_truth_not_expr (loc, arg);
3188 if (!tem)
3189 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3190
3191 return tem;
3192 }
3193
3194 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3195 operands are another bit-wise operation with a common input. If so,
3196 distribute the bit operations to save an operation and possibly two if
3197 constants are involved. For example, convert
3198 (A | B) & (A | C) into A | (B & C)
3199 Further simplification will occur if B and C are constants.
3200
3201 If this optimization cannot be done, 0 will be returned. */
3202
3203 static tree
3204 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3205 tree arg0, tree arg1)
3206 {
3207 tree common;
3208 tree left, right;
3209
3210 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3211 || TREE_CODE (arg0) == code
3212 || (TREE_CODE (arg0) != BIT_AND_EXPR
3213 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3214 return 0;
3215
3216 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3217 {
3218 common = TREE_OPERAND (arg0, 0);
3219 left = TREE_OPERAND (arg0, 1);
3220 right = TREE_OPERAND (arg1, 1);
3221 }
3222 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3223 {
3224 common = TREE_OPERAND (arg0, 0);
3225 left = TREE_OPERAND (arg0, 1);
3226 right = TREE_OPERAND (arg1, 0);
3227 }
3228 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3229 {
3230 common = TREE_OPERAND (arg0, 1);
3231 left = TREE_OPERAND (arg0, 0);
3232 right = TREE_OPERAND (arg1, 1);
3233 }
3234 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3235 {
3236 common = TREE_OPERAND (arg0, 1);
3237 left = TREE_OPERAND (arg0, 0);
3238 right = TREE_OPERAND (arg1, 0);
3239 }
3240 else
3241 return 0;
3242
3243 common = fold_convert_loc (loc, type, common);
3244 left = fold_convert_loc (loc, type, left);
3245 right = fold_convert_loc (loc, type, right);
3246 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3247 fold_build2_loc (loc, code, type, left, right));
3248 }
3249
3250 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3251 with code CODE. This optimization is unsafe. */
3252 static tree
3253 distribute_real_division (location_t loc, enum tree_code code, tree type,
3254 tree arg0, tree arg1)
3255 {
3256 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3257 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3258
3259 /* (A / C) +- (B / C) -> (A +- B) / C. */
3260 if (mul0 == mul1
3261 && operand_equal_p (TREE_OPERAND (arg0, 1),
3262 TREE_OPERAND (arg1, 1), 0))
3263 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3264 fold_build2_loc (loc, code, type,
3265 TREE_OPERAND (arg0, 0),
3266 TREE_OPERAND (arg1, 0)),
3267 TREE_OPERAND (arg0, 1));
3268
3269 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3270 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3271 TREE_OPERAND (arg1, 0), 0)
3272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3273 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3274 {
3275 REAL_VALUE_TYPE r0, r1;
3276 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3277 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3278 if (!mul0)
3279 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3280 if (!mul1)
3281 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3282 real_arithmetic (&r0, code, &r0, &r1);
3283 return fold_build2_loc (loc, MULT_EXPR, type,
3284 TREE_OPERAND (arg0, 0),
3285 build_real (type, r0));
3286 }
3287
3288 return NULL_TREE;
3289 }
3290 \f
3291 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3292 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3293
3294 static tree
3295 make_bit_field_ref (location_t loc, tree inner, tree type,
3296 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3297 {
3298 tree result, bftype;
3299
3300 if (bitpos == 0)
3301 {
3302 tree size = TYPE_SIZE (TREE_TYPE (inner));
3303 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3304 || POINTER_TYPE_P (TREE_TYPE (inner)))
3305 && host_integerp (size, 0)
3306 && tree_low_cst (size, 0) == bitsize)
3307 return fold_convert_loc (loc, type, inner);
3308 }
3309
3310 bftype = type;
3311 if (TYPE_PRECISION (bftype) != bitsize
3312 || TYPE_UNSIGNED (bftype) == !unsignedp)
3313 bftype = build_nonstandard_integer_type (bitsize, 0);
3314
3315 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3316 size_int (bitsize), bitsize_int (bitpos));
3317
3318 if (bftype != type)
3319 result = fold_convert_loc (loc, type, result);
3320
3321 return result;
3322 }
3323
3324 /* Optimize a bit-field compare.
3325
3326 There are two cases: First is a compare against a constant and the
3327 second is a comparison of two items where the fields are at the same
3328 bit position relative to the start of a chunk (byte, halfword, word)
3329 large enough to contain it. In these cases we can avoid the shift
3330 implicit in bitfield extractions.
3331
3332 For constants, we emit a compare of the shifted constant with the
3333 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3334 compared. For two fields at the same position, we do the ANDs with the
3335 similar mask and compare the result of the ANDs.
3336
3337 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3338 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3339 are the left and right operands of the comparison, respectively.
3340
3341 If the optimization described above can be done, we return the resulting
3342 tree. Otherwise we return zero. */
3343
3344 static tree
3345 optimize_bit_field_compare (location_t loc, enum tree_code code,
3346 tree compare_type, tree lhs, tree rhs)
3347 {
3348 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3349 tree type = TREE_TYPE (lhs);
3350 tree signed_type, unsigned_type;
3351 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3352 enum machine_mode lmode, rmode, nmode;
3353 int lunsignedp, runsignedp;
3354 int lvolatilep = 0, rvolatilep = 0;
3355 tree linner, rinner = NULL_TREE;
3356 tree mask;
3357 tree offset;
3358
3359 /* In the strict volatile bitfields case, doing code changes here may prevent
3360 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3361 if (flag_strict_volatile_bitfields > 0)
3362 return 0;
3363
3364 /* Get all the information about the extractions being done. If the bit size
3365 if the same as the size of the underlying object, we aren't doing an
3366 extraction at all and so can do nothing. We also don't want to
3367 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3368 then will no longer be able to replace it. */
3369 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3370 &lunsignedp, &lvolatilep, false);
3371 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3372 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3373 return 0;
3374
3375 if (!const_p)
3376 {
3377 /* If this is not a constant, we can only do something if bit positions,
3378 sizes, and signedness are the same. */
3379 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3380 &runsignedp, &rvolatilep, false);
3381
3382 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3383 || lunsignedp != runsignedp || offset != 0
3384 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3385 return 0;
3386 }
3387
3388 /* See if we can find a mode to refer to this field. We should be able to,
3389 but fail if we can't. */
3390 if (lvolatilep
3391 && GET_MODE_BITSIZE (lmode) > 0
3392 && flag_strict_volatile_bitfields > 0)
3393 nmode = lmode;
3394 else
3395 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3396 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3397 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3398 TYPE_ALIGN (TREE_TYPE (rinner))),
3399 word_mode, lvolatilep || rvolatilep);
3400 if (nmode == VOIDmode)
3401 return 0;
3402
3403 /* Set signed and unsigned types of the precision of this mode for the
3404 shifts below. */
3405 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3406 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3407
3408 /* Compute the bit position and size for the new reference and our offset
3409 within it. If the new reference is the same size as the original, we
3410 won't optimize anything, so return zero. */
3411 nbitsize = GET_MODE_BITSIZE (nmode);
3412 nbitpos = lbitpos & ~ (nbitsize - 1);
3413 lbitpos -= nbitpos;
3414 if (nbitsize == lbitsize)
3415 return 0;
3416
3417 if (BYTES_BIG_ENDIAN)
3418 lbitpos = nbitsize - lbitsize - lbitpos;
3419
3420 /* Make the mask to be used against the extracted field. */
3421 mask = build_int_cst_type (unsigned_type, -1);
3422 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3423 mask = const_binop (RSHIFT_EXPR, mask,
3424 size_int (nbitsize - lbitsize - lbitpos));
3425
3426 if (! const_p)
3427 /* If not comparing with constant, just rework the comparison
3428 and return. */
3429 return fold_build2_loc (loc, code, compare_type,
3430 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3431 make_bit_field_ref (loc, linner,
3432 unsigned_type,
3433 nbitsize, nbitpos,
3434 1),
3435 mask),
3436 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3437 make_bit_field_ref (loc, rinner,
3438 unsigned_type,
3439 nbitsize, nbitpos,
3440 1),
3441 mask));
3442
3443 /* Otherwise, we are handling the constant case. See if the constant is too
3444 big for the field. Warn and return a tree of for 0 (false) if so. We do
3445 this not only for its own sake, but to avoid having to test for this
3446 error case below. If we didn't, we might generate wrong code.
3447
3448 For unsigned fields, the constant shifted right by the field length should
3449 be all zero. For signed fields, the high-order bits should agree with
3450 the sign bit. */
3451
3452 if (lunsignedp)
3453 {
3454 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3455 fold_convert_loc (loc,
3456 unsigned_type, rhs),
3457 size_int (lbitsize))))
3458 {
3459 warning (0, "comparison is always %d due to width of bit-field",
3460 code == NE_EXPR);
3461 return constant_boolean_node (code == NE_EXPR, compare_type);
3462 }
3463 }
3464 else
3465 {
3466 tree tem = const_binop (RSHIFT_EXPR,
3467 fold_convert_loc (loc, signed_type, rhs),
3468 size_int (lbitsize - 1));
3469 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3470 {
3471 warning (0, "comparison is always %d due to width of bit-field",
3472 code == NE_EXPR);
3473 return constant_boolean_node (code == NE_EXPR, compare_type);
3474 }
3475 }
3476
3477 /* Single-bit compares should always be against zero. */
3478 if (lbitsize == 1 && ! integer_zerop (rhs))
3479 {
3480 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3481 rhs = build_int_cst (type, 0);
3482 }
3483
3484 /* Make a new bitfield reference, shift the constant over the
3485 appropriate number of bits and mask it with the computed mask
3486 (in case this was a signed field). If we changed it, make a new one. */
3487 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3488 if (lvolatilep)
3489 {
3490 TREE_SIDE_EFFECTS (lhs) = 1;
3491 TREE_THIS_VOLATILE (lhs) = 1;
3492 }
3493
3494 rhs = const_binop (BIT_AND_EXPR,
3495 const_binop (LSHIFT_EXPR,
3496 fold_convert_loc (loc, unsigned_type, rhs),
3497 size_int (lbitpos)),
3498 mask);
3499
3500 lhs = build2_loc (loc, code, compare_type,
3501 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3502 return lhs;
3503 }
3504 \f
3505 /* Subroutine for fold_truth_andor_1: decode a field reference.
3506
3507 If EXP is a comparison reference, we return the innermost reference.
3508
3509 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3510 set to the starting bit number.
3511
3512 If the innermost field can be completely contained in a mode-sized
3513 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3514
3515 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3516 otherwise it is not changed.
3517
3518 *PUNSIGNEDP is set to the signedness of the field.
3519
3520 *PMASK is set to the mask used. This is either contained in a
3521 BIT_AND_EXPR or derived from the width of the field.
3522
3523 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3524
3525 Return 0 if this is not a component reference or is one that we can't
3526 do anything with. */
3527
3528 static tree
3529 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3530 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3531 int *punsignedp, int *pvolatilep,
3532 tree *pmask, tree *pand_mask)
3533 {
3534 tree outer_type = 0;
3535 tree and_mask = 0;
3536 tree mask, inner, offset;
3537 tree unsigned_type;
3538 unsigned int precision;
3539
3540 /* All the optimizations using this function assume integer fields.
3541 There are problems with FP fields since the type_for_size call
3542 below can fail for, e.g., XFmode. */
3543 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3544 return 0;
3545
3546 /* We are interested in the bare arrangement of bits, so strip everything
3547 that doesn't affect the machine mode. However, record the type of the
3548 outermost expression if it may matter below. */
3549 if (CONVERT_EXPR_P (exp)
3550 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3551 outer_type = TREE_TYPE (exp);
3552 STRIP_NOPS (exp);
3553
3554 if (TREE_CODE (exp) == BIT_AND_EXPR)
3555 {
3556 and_mask = TREE_OPERAND (exp, 1);
3557 exp = TREE_OPERAND (exp, 0);
3558 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3559 if (TREE_CODE (and_mask) != INTEGER_CST)
3560 return 0;
3561 }
3562
3563 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3564 punsignedp, pvolatilep, false);
3565 if ((inner == exp && and_mask == 0)
3566 || *pbitsize < 0 || offset != 0
3567 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3568 return 0;
3569
3570 /* If the number of bits in the reference is the same as the bitsize of
3571 the outer type, then the outer type gives the signedness. Otherwise
3572 (in case of a small bitfield) the signedness is unchanged. */
3573 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3574 *punsignedp = TYPE_UNSIGNED (outer_type);
3575
3576 /* Compute the mask to access the bitfield. */
3577 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3578 precision = TYPE_PRECISION (unsigned_type);
3579
3580 mask = build_int_cst_type (unsigned_type, -1);
3581
3582 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3583 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3584
3585 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3586 if (and_mask != 0)
3587 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3588 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3589
3590 *pmask = mask;
3591 *pand_mask = and_mask;
3592 return inner;
3593 }
3594
3595 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3596 bit positions. */
3597
3598 static int
3599 all_ones_mask_p (const_tree mask, int size)
3600 {
3601 tree type = TREE_TYPE (mask);
3602 unsigned int precision = TYPE_PRECISION (type);
3603 tree tmask;
3604
3605 tmask = build_int_cst_type (signed_type_for (type), -1);
3606
3607 return
3608 tree_int_cst_equal (mask,
3609 const_binop (RSHIFT_EXPR,
3610 const_binop (LSHIFT_EXPR, tmask,
3611 size_int (precision - size)),
3612 size_int (precision - size)));
3613 }
3614
3615 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3616 represents the sign bit of EXP's type. If EXP represents a sign
3617 or zero extension, also test VAL against the unextended type.
3618 The return value is the (sub)expression whose sign bit is VAL,
3619 or NULL_TREE otherwise. */
3620
3621 static tree
3622 sign_bit_p (tree exp, const_tree val)
3623 {
3624 unsigned HOST_WIDE_INT mask_lo, lo;
3625 HOST_WIDE_INT mask_hi, hi;
3626 int width;
3627 tree t;
3628
3629 /* Tree EXP must have an integral type. */
3630 t = TREE_TYPE (exp);
3631 if (! INTEGRAL_TYPE_P (t))
3632 return NULL_TREE;
3633
3634 /* Tree VAL must be an integer constant. */
3635 if (TREE_CODE (val) != INTEGER_CST
3636 || TREE_OVERFLOW (val))
3637 return NULL_TREE;
3638
3639 width = TYPE_PRECISION (t);
3640 if (width > HOST_BITS_PER_WIDE_INT)
3641 {
3642 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3643 lo = 0;
3644
3645 mask_hi = ((unsigned HOST_WIDE_INT) -1
3646 >> (HOST_BITS_PER_DOUBLE_INT - width));
3647 mask_lo = -1;
3648 }
3649 else
3650 {
3651 hi = 0;
3652 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3653
3654 mask_hi = 0;
3655 mask_lo = ((unsigned HOST_WIDE_INT) -1
3656 >> (HOST_BITS_PER_WIDE_INT - width));
3657 }
3658
3659 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3660 treat VAL as if it were unsigned. */
3661 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3662 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3663 return exp;
3664
3665 /* Handle extension from a narrower type. */
3666 if (TREE_CODE (exp) == NOP_EXPR
3667 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3668 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3669
3670 return NULL_TREE;
3671 }
3672
3673 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3674 to be evaluated unconditionally. */
3675
3676 static int
3677 simple_operand_p (const_tree exp)
3678 {
3679 /* Strip any conversions that don't change the machine mode. */
3680 STRIP_NOPS (exp);
3681
3682 return (CONSTANT_CLASS_P (exp)
3683 || TREE_CODE (exp) == SSA_NAME
3684 || (DECL_P (exp)
3685 && ! TREE_ADDRESSABLE (exp)
3686 && ! TREE_THIS_VOLATILE (exp)
3687 && ! DECL_NONLOCAL (exp)
3688 /* Don't regard global variables as simple. They may be
3689 allocated in ways unknown to the compiler (shared memory,
3690 #pragma weak, etc). */
3691 && ! TREE_PUBLIC (exp)
3692 && ! DECL_EXTERNAL (exp)
3693 /* Loading a static variable is unduly expensive, but global
3694 registers aren't expensive. */
3695 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3696 }
3697
3698 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3699 to be evaluated unconditionally.
3700 I addition to simple_operand_p, we assume that comparisons, conversions,
3701 and logic-not operations are simple, if their operands are simple, too. */
3702
3703 static bool
3704 simple_operand_p_2 (tree exp)
3705 {
3706 enum tree_code code;
3707
3708 if (TREE_SIDE_EFFECTS (exp)
3709 || tree_could_trap_p (exp))
3710 return false;
3711
3712 while (CONVERT_EXPR_P (exp))
3713 exp = TREE_OPERAND (exp, 0);
3714
3715 code = TREE_CODE (exp);
3716
3717 if (TREE_CODE_CLASS (code) == tcc_comparison)
3718 return (simple_operand_p (TREE_OPERAND (exp, 0))
3719 && simple_operand_p (TREE_OPERAND (exp, 1)));
3720
3721 if (code == TRUTH_NOT_EXPR)
3722 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3723
3724 return simple_operand_p (exp);
3725 }
3726
3727 \f
3728 /* The following functions are subroutines to fold_range_test and allow it to
3729 try to change a logical combination of comparisons into a range test.
3730
3731 For example, both
3732 X == 2 || X == 3 || X == 4 || X == 5
3733 and
3734 X >= 2 && X <= 5
3735 are converted to
3736 (unsigned) (X - 2) <= 3
3737
3738 We describe each set of comparisons as being either inside or outside
3739 a range, using a variable named like IN_P, and then describe the
3740 range with a lower and upper bound. If one of the bounds is omitted,
3741 it represents either the highest or lowest value of the type.
3742
3743 In the comments below, we represent a range by two numbers in brackets
3744 preceded by a "+" to designate being inside that range, or a "-" to
3745 designate being outside that range, so the condition can be inverted by
3746 flipping the prefix. An omitted bound is represented by a "-". For
3747 example, "- [-, 10]" means being outside the range starting at the lowest
3748 possible value and ending at 10, in other words, being greater than 10.
3749 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3750 always false.
3751
3752 We set up things so that the missing bounds are handled in a consistent
3753 manner so neither a missing bound nor "true" and "false" need to be
3754 handled using a special case. */
3755
3756 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3757 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3758 and UPPER1_P are nonzero if the respective argument is an upper bound
3759 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3760 must be specified for a comparison. ARG1 will be converted to ARG0's
3761 type if both are specified. */
3762
3763 static tree
3764 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3765 tree arg1, int upper1_p)
3766 {
3767 tree tem;
3768 int result;
3769 int sgn0, sgn1;
3770
3771 /* If neither arg represents infinity, do the normal operation.
3772 Else, if not a comparison, return infinity. Else handle the special
3773 comparison rules. Note that most of the cases below won't occur, but
3774 are handled for consistency. */
3775
3776 if (arg0 != 0 && arg1 != 0)
3777 {
3778 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3779 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3780 STRIP_NOPS (tem);
3781 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3782 }
3783
3784 if (TREE_CODE_CLASS (code) != tcc_comparison)
3785 return 0;
3786
3787 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3788 for neither. In real maths, we cannot assume open ended ranges are
3789 the same. But, this is computer arithmetic, where numbers are finite.
3790 We can therefore make the transformation of any unbounded range with
3791 the value Z, Z being greater than any representable number. This permits
3792 us to treat unbounded ranges as equal. */
3793 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3794 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3795 switch (code)
3796 {
3797 case EQ_EXPR:
3798 result = sgn0 == sgn1;
3799 break;
3800 case NE_EXPR:
3801 result = sgn0 != sgn1;
3802 break;
3803 case LT_EXPR:
3804 result = sgn0 < sgn1;
3805 break;
3806 case LE_EXPR:
3807 result = sgn0 <= sgn1;
3808 break;
3809 case GT_EXPR:
3810 result = sgn0 > sgn1;
3811 break;
3812 case GE_EXPR:
3813 result = sgn0 >= sgn1;
3814 break;
3815 default:
3816 gcc_unreachable ();
3817 }
3818
3819 return constant_boolean_node (result, type);
3820 }
3821 \f
3822 /* Helper routine for make_range. Perform one step for it, return
3823 new expression if the loop should continue or NULL_TREE if it should
3824 stop. */
3825
3826 tree
3827 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3828 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3829 bool *strict_overflow_p)
3830 {
3831 tree arg0_type = TREE_TYPE (arg0);
3832 tree n_low, n_high, low = *p_low, high = *p_high;
3833 int in_p = *p_in_p, n_in_p;
3834
3835 switch (code)
3836 {
3837 case TRUTH_NOT_EXPR:
3838 *p_in_p = ! in_p;
3839 return arg0;
3840
3841 case EQ_EXPR: case NE_EXPR:
3842 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3843 /* We can only do something if the range is testing for zero
3844 and if the second operand is an integer constant. Note that
3845 saying something is "in" the range we make is done by
3846 complementing IN_P since it will set in the initial case of
3847 being not equal to zero; "out" is leaving it alone. */
3848 if (low == NULL_TREE || high == NULL_TREE
3849 || ! integer_zerop (low) || ! integer_zerop (high)
3850 || TREE_CODE (arg1) != INTEGER_CST)
3851 return NULL_TREE;
3852
3853 switch (code)
3854 {
3855 case NE_EXPR: /* - [c, c] */
3856 low = high = arg1;
3857 break;
3858 case EQ_EXPR: /* + [c, c] */
3859 in_p = ! in_p, low = high = arg1;
3860 break;
3861 case GT_EXPR: /* - [-, c] */
3862 low = 0, high = arg1;
3863 break;
3864 case GE_EXPR: /* + [c, -] */
3865 in_p = ! in_p, low = arg1, high = 0;
3866 break;
3867 case LT_EXPR: /* - [c, -] */
3868 low = arg1, high = 0;
3869 break;
3870 case LE_EXPR: /* + [-, c] */
3871 in_p = ! in_p, low = 0, high = arg1;
3872 break;
3873 default:
3874 gcc_unreachable ();
3875 }
3876
3877 /* If this is an unsigned comparison, we also know that EXP is
3878 greater than or equal to zero. We base the range tests we make
3879 on that fact, so we record it here so we can parse existing
3880 range tests. We test arg0_type since often the return type
3881 of, e.g. EQ_EXPR, is boolean. */
3882 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3883 {
3884 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3885 in_p, low, high, 1,
3886 build_int_cst (arg0_type, 0),
3887 NULL_TREE))
3888 return NULL_TREE;
3889
3890 in_p = n_in_p, low = n_low, high = n_high;
3891
3892 /* If the high bound is missing, but we have a nonzero low
3893 bound, reverse the range so it goes from zero to the low bound
3894 minus 1. */
3895 if (high == 0 && low && ! integer_zerop (low))
3896 {
3897 in_p = ! in_p;
3898 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3899 integer_one_node, 0);
3900 low = build_int_cst (arg0_type, 0);
3901 }
3902 }
3903
3904 *p_low = low;
3905 *p_high = high;
3906 *p_in_p = in_p;
3907 return arg0;
3908
3909 case NEGATE_EXPR:
3910 /* (-x) IN [a,b] -> x in [-b, -a] */
3911 n_low = range_binop (MINUS_EXPR, exp_type,
3912 build_int_cst (exp_type, 0),
3913 0, high, 1);
3914 n_high = range_binop (MINUS_EXPR, exp_type,
3915 build_int_cst (exp_type, 0),
3916 0, low, 0);
3917 if (n_high != 0 && TREE_OVERFLOW (n_high))
3918 return NULL_TREE;
3919 goto normalize;
3920
3921 case BIT_NOT_EXPR:
3922 /* ~ X -> -X - 1 */
3923 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3924 build_int_cst (exp_type, 1));
3925
3926 case PLUS_EXPR:
3927 case MINUS_EXPR:
3928 if (TREE_CODE (arg1) != INTEGER_CST)
3929 return NULL_TREE;
3930
3931 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3932 move a constant to the other side. */
3933 if (!TYPE_UNSIGNED (arg0_type)
3934 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3935 return NULL_TREE;
3936
3937 /* If EXP is signed, any overflow in the computation is undefined,
3938 so we don't worry about it so long as our computations on
3939 the bounds don't overflow. For unsigned, overflow is defined
3940 and this is exactly the right thing. */
3941 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3942 arg0_type, low, 0, arg1, 0);
3943 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3944 arg0_type, high, 1, arg1, 0);
3945 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3946 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3947 return NULL_TREE;
3948
3949 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3950 *strict_overflow_p = true;
3951
3952 normalize:
3953 /* Check for an unsigned range which has wrapped around the maximum
3954 value thus making n_high < n_low, and normalize it. */
3955 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3956 {
3957 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3958 integer_one_node, 0);
3959 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3960 integer_one_node, 0);
3961
3962 /* If the range is of the form +/- [ x+1, x ], we won't
3963 be able to normalize it. But then, it represents the
3964 whole range or the empty set, so make it
3965 +/- [ -, - ]. */
3966 if (tree_int_cst_equal (n_low, low)
3967 && tree_int_cst_equal (n_high, high))
3968 low = high = 0;
3969 else
3970 in_p = ! in_p;
3971 }
3972 else
3973 low = n_low, high = n_high;
3974
3975 *p_low = low;
3976 *p_high = high;
3977 *p_in_p = in_p;
3978 return arg0;
3979
3980 CASE_CONVERT:
3981 case NON_LVALUE_EXPR:
3982 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3983 return NULL_TREE;
3984
3985 if (! INTEGRAL_TYPE_P (arg0_type)
3986 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3987 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3988 return NULL_TREE;
3989
3990 n_low = low, n_high = high;
3991
3992 if (n_low != 0)
3993 n_low = fold_convert_loc (loc, arg0_type, n_low);
3994
3995 if (n_high != 0)
3996 n_high = fold_convert_loc (loc, arg0_type, n_high);
3997
3998 /* If we're converting arg0 from an unsigned type, to exp,
3999 a signed type, we will be doing the comparison as unsigned.
4000 The tests above have already verified that LOW and HIGH
4001 are both positive.
4002
4003 So we have to ensure that we will handle large unsigned
4004 values the same way that the current signed bounds treat
4005 negative values. */
4006
4007 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4008 {
4009 tree high_positive;
4010 tree equiv_type;
4011 /* For fixed-point modes, we need to pass the saturating flag
4012 as the 2nd parameter. */
4013 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4014 equiv_type
4015 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4016 TYPE_SATURATING (arg0_type));
4017 else
4018 equiv_type
4019 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4020
4021 /* A range without an upper bound is, naturally, unbounded.
4022 Since convert would have cropped a very large value, use
4023 the max value for the destination type. */
4024 high_positive
4025 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4026 : TYPE_MAX_VALUE (arg0_type);
4027
4028 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4029 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4030 fold_convert_loc (loc, arg0_type,
4031 high_positive),
4032 build_int_cst (arg0_type, 1));
4033
4034 /* If the low bound is specified, "and" the range with the
4035 range for which the original unsigned value will be
4036 positive. */
4037 if (low != 0)
4038 {
4039 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4040 1, fold_convert_loc (loc, arg0_type,
4041 integer_zero_node),
4042 high_positive))
4043 return NULL_TREE;
4044
4045 in_p = (n_in_p == in_p);
4046 }
4047 else
4048 {
4049 /* Otherwise, "or" the range with the range of the input
4050 that will be interpreted as negative. */
4051 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4052 1, fold_convert_loc (loc, arg0_type,
4053 integer_zero_node),
4054 high_positive))
4055 return NULL_TREE;
4056
4057 in_p = (in_p != n_in_p);
4058 }
4059 }
4060
4061 *p_low = n_low;
4062 *p_high = n_high;
4063 *p_in_p = in_p;
4064 return arg0;
4065
4066 default:
4067 return NULL_TREE;
4068 }
4069 }
4070
4071 /* Given EXP, a logical expression, set the range it is testing into
4072 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4073 actually being tested. *PLOW and *PHIGH will be made of the same
4074 type as the returned expression. If EXP is not a comparison, we
4075 will most likely not be returning a useful value and range. Set
4076 *STRICT_OVERFLOW_P to true if the return value is only valid
4077 because signed overflow is undefined; otherwise, do not change
4078 *STRICT_OVERFLOW_P. */
4079
4080 tree
4081 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4082 bool *strict_overflow_p)
4083 {
4084 enum tree_code code;
4085 tree arg0, arg1 = NULL_TREE;
4086 tree exp_type, nexp;
4087 int in_p;
4088 tree low, high;
4089 location_t loc = EXPR_LOCATION (exp);
4090
4091 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4092 and see if we can refine the range. Some of the cases below may not
4093 happen, but it doesn't seem worth worrying about this. We "continue"
4094 the outer loop when we've changed something; otherwise we "break"
4095 the switch, which will "break" the while. */
4096
4097 in_p = 0;
4098 low = high = build_int_cst (TREE_TYPE (exp), 0);
4099
4100 while (1)
4101 {
4102 code = TREE_CODE (exp);
4103 exp_type = TREE_TYPE (exp);
4104 arg0 = NULL_TREE;
4105
4106 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4107 {
4108 if (TREE_OPERAND_LENGTH (exp) > 0)
4109 arg0 = TREE_OPERAND (exp, 0);
4110 if (TREE_CODE_CLASS (code) == tcc_binary
4111 || TREE_CODE_CLASS (code) == tcc_comparison
4112 || (TREE_CODE_CLASS (code) == tcc_expression
4113 && TREE_OPERAND_LENGTH (exp) > 1))
4114 arg1 = TREE_OPERAND (exp, 1);
4115 }
4116 if (arg0 == NULL_TREE)
4117 break;
4118
4119 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4120 &high, &in_p, strict_overflow_p);
4121 if (nexp == NULL_TREE)
4122 break;
4123 exp = nexp;
4124 }
4125
4126 /* If EXP is a constant, we can evaluate whether this is true or false. */
4127 if (TREE_CODE (exp) == INTEGER_CST)
4128 {
4129 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4130 exp, 0, low, 0))
4131 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4132 exp, 1, high, 1)));
4133 low = high = 0;
4134 exp = 0;
4135 }
4136
4137 *pin_p = in_p, *plow = low, *phigh = high;
4138 return exp;
4139 }
4140 \f
4141 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4142 type, TYPE, return an expression to test if EXP is in (or out of, depending
4143 on IN_P) the range. Return 0 if the test couldn't be created. */
4144
4145 tree
4146 build_range_check (location_t loc, tree type, tree exp, int in_p,
4147 tree low, tree high)
4148 {
4149 tree etype = TREE_TYPE (exp), value;
4150
4151 #ifdef HAVE_canonicalize_funcptr_for_compare
4152 /* Disable this optimization for function pointer expressions
4153 on targets that require function pointer canonicalization. */
4154 if (HAVE_canonicalize_funcptr_for_compare
4155 && TREE_CODE (etype) == POINTER_TYPE
4156 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4157 return NULL_TREE;
4158 #endif
4159
4160 if (! in_p)
4161 {
4162 value = build_range_check (loc, type, exp, 1, low, high);
4163 if (value != 0)
4164 return invert_truthvalue_loc (loc, value);
4165
4166 return 0;
4167 }
4168
4169 if (low == 0 && high == 0)
4170 return build_int_cst (type, 1);
4171
4172 if (low == 0)
4173 return fold_build2_loc (loc, LE_EXPR, type, exp,
4174 fold_convert_loc (loc, etype, high));
4175
4176 if (high == 0)
4177 return fold_build2_loc (loc, GE_EXPR, type, exp,
4178 fold_convert_loc (loc, etype, low));
4179
4180 if (operand_equal_p (low, high, 0))
4181 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4182 fold_convert_loc (loc, etype, low));
4183
4184 if (integer_zerop (low))
4185 {
4186 if (! TYPE_UNSIGNED (etype))
4187 {
4188 etype = unsigned_type_for (etype);
4189 high = fold_convert_loc (loc, etype, high);
4190 exp = fold_convert_loc (loc, etype, exp);
4191 }
4192 return build_range_check (loc, type, exp, 1, 0, high);
4193 }
4194
4195 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4196 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4197 {
4198 unsigned HOST_WIDE_INT lo;
4199 HOST_WIDE_INT hi;
4200 int prec;
4201
4202 prec = TYPE_PRECISION (etype);
4203 if (prec <= HOST_BITS_PER_WIDE_INT)
4204 {
4205 hi = 0;
4206 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4207 }
4208 else
4209 {
4210 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4211 lo = (unsigned HOST_WIDE_INT) -1;
4212 }
4213
4214 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4215 {
4216 if (TYPE_UNSIGNED (etype))
4217 {
4218 tree signed_etype = signed_type_for (etype);
4219 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4220 etype
4221 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4222 else
4223 etype = signed_etype;
4224 exp = fold_convert_loc (loc, etype, exp);
4225 }
4226 return fold_build2_loc (loc, GT_EXPR, type, exp,
4227 build_int_cst (etype, 0));
4228 }
4229 }
4230
4231 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4232 This requires wrap-around arithmetics for the type of the expression.
4233 First make sure that arithmetics in this type is valid, then make sure
4234 that it wraps around. */
4235 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4236 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4237 TYPE_UNSIGNED (etype));
4238
4239 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4240 {
4241 tree utype, minv, maxv;
4242
4243 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4244 for the type in question, as we rely on this here. */
4245 utype = unsigned_type_for (etype);
4246 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4247 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4248 integer_one_node, 1);
4249 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4250
4251 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4252 minv, 1, maxv, 1)))
4253 etype = utype;
4254 else
4255 return 0;
4256 }
4257
4258 high = fold_convert_loc (loc, etype, high);
4259 low = fold_convert_loc (loc, etype, low);
4260 exp = fold_convert_loc (loc, etype, exp);
4261
4262 value = const_binop (MINUS_EXPR, high, low);
4263
4264
4265 if (POINTER_TYPE_P (etype))
4266 {
4267 if (value != 0 && !TREE_OVERFLOW (value))
4268 {
4269 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4270 return build_range_check (loc, type,
4271 fold_build_pointer_plus_loc (loc, exp, low),
4272 1, build_int_cst (etype, 0), value);
4273 }
4274 return 0;
4275 }
4276
4277 if (value != 0 && !TREE_OVERFLOW (value))
4278 return build_range_check (loc, type,
4279 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4280 1, build_int_cst (etype, 0), value);
4281
4282 return 0;
4283 }
4284 \f
4285 /* Return the predecessor of VAL in its type, handling the infinite case. */
4286
4287 static tree
4288 range_predecessor (tree val)
4289 {
4290 tree type = TREE_TYPE (val);
4291
4292 if (INTEGRAL_TYPE_P (type)
4293 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4294 return 0;
4295 else
4296 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4297 }
4298
4299 /* Return the successor of VAL in its type, handling the infinite case. */
4300
4301 static tree
4302 range_successor (tree val)
4303 {
4304 tree type = TREE_TYPE (val);
4305
4306 if (INTEGRAL_TYPE_P (type)
4307 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4308 return 0;
4309 else
4310 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4311 }
4312
4313 /* Given two ranges, see if we can merge them into one. Return 1 if we
4314 can, 0 if we can't. Set the output range into the specified parameters. */
4315
4316 bool
4317 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4318 tree high0, int in1_p, tree low1, tree high1)
4319 {
4320 int no_overlap;
4321 int subset;
4322 int temp;
4323 tree tem;
4324 int in_p;
4325 tree low, high;
4326 int lowequal = ((low0 == 0 && low1 == 0)
4327 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4328 low0, 0, low1, 0)));
4329 int highequal = ((high0 == 0 && high1 == 0)
4330 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4331 high0, 1, high1, 1)));
4332
4333 /* Make range 0 be the range that starts first, or ends last if they
4334 start at the same value. Swap them if it isn't. */
4335 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4336 low0, 0, low1, 0))
4337 || (lowequal
4338 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4339 high1, 1, high0, 1))))
4340 {
4341 temp = in0_p, in0_p = in1_p, in1_p = temp;
4342 tem = low0, low0 = low1, low1 = tem;
4343 tem = high0, high0 = high1, high1 = tem;
4344 }
4345
4346 /* Now flag two cases, whether the ranges are disjoint or whether the
4347 second range is totally subsumed in the first. Note that the tests
4348 below are simplified by the ones above. */
4349 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4350 high0, 1, low1, 0));
4351 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4352 high1, 1, high0, 1));
4353
4354 /* We now have four cases, depending on whether we are including or
4355 excluding the two ranges. */
4356 if (in0_p && in1_p)
4357 {
4358 /* If they don't overlap, the result is false. If the second range
4359 is a subset it is the result. Otherwise, the range is from the start
4360 of the second to the end of the first. */
4361 if (no_overlap)
4362 in_p = 0, low = high = 0;
4363 else if (subset)
4364 in_p = 1, low = low1, high = high1;
4365 else
4366 in_p = 1, low = low1, high = high0;
4367 }
4368
4369 else if (in0_p && ! in1_p)
4370 {
4371 /* If they don't overlap, the result is the first range. If they are
4372 equal, the result is false. If the second range is a subset of the
4373 first, and the ranges begin at the same place, we go from just after
4374 the end of the second range to the end of the first. If the second
4375 range is not a subset of the first, or if it is a subset and both
4376 ranges end at the same place, the range starts at the start of the
4377 first range and ends just before the second range.
4378 Otherwise, we can't describe this as a single range. */
4379 if (no_overlap)
4380 in_p = 1, low = low0, high = high0;
4381 else if (lowequal && highequal)
4382 in_p = 0, low = high = 0;
4383 else if (subset && lowequal)
4384 {
4385 low = range_successor (high1);
4386 high = high0;
4387 in_p = 1;
4388 if (low == 0)
4389 {
4390 /* We are in the weird situation where high0 > high1 but
4391 high1 has no successor. Punt. */
4392 return 0;
4393 }
4394 }
4395 else if (! subset || highequal)
4396 {
4397 low = low0;
4398 high = range_predecessor (low1);
4399 in_p = 1;
4400 if (high == 0)
4401 {
4402 /* low0 < low1 but low1 has no predecessor. Punt. */
4403 return 0;
4404 }
4405 }
4406 else
4407 return 0;
4408 }
4409
4410 else if (! in0_p && in1_p)
4411 {
4412 /* If they don't overlap, the result is the second range. If the second
4413 is a subset of the first, the result is false. Otherwise,
4414 the range starts just after the first range and ends at the
4415 end of the second. */
4416 if (no_overlap)
4417 in_p = 1, low = low1, high = high1;
4418 else if (subset || highequal)
4419 in_p = 0, low = high = 0;
4420 else
4421 {
4422 low = range_successor (high0);
4423 high = high1;
4424 in_p = 1;
4425 if (low == 0)
4426 {
4427 /* high1 > high0 but high0 has no successor. Punt. */
4428 return 0;
4429 }
4430 }
4431 }
4432
4433 else
4434 {
4435 /* The case where we are excluding both ranges. Here the complex case
4436 is if they don't overlap. In that case, the only time we have a
4437 range is if they are adjacent. If the second is a subset of the
4438 first, the result is the first. Otherwise, the range to exclude
4439 starts at the beginning of the first range and ends at the end of the
4440 second. */
4441 if (no_overlap)
4442 {
4443 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4444 range_successor (high0),
4445 1, low1, 0)))
4446 in_p = 0, low = low0, high = high1;
4447 else
4448 {
4449 /* Canonicalize - [min, x] into - [-, x]. */
4450 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4451 switch (TREE_CODE (TREE_TYPE (low0)))
4452 {
4453 case ENUMERAL_TYPE:
4454 if (TYPE_PRECISION (TREE_TYPE (low0))
4455 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4456 break;
4457 /* FALLTHROUGH */
4458 case INTEGER_TYPE:
4459 if (tree_int_cst_equal (low0,
4460 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4461 low0 = 0;
4462 break;
4463 case POINTER_TYPE:
4464 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4465 && integer_zerop (low0))
4466 low0 = 0;
4467 break;
4468 default:
4469 break;
4470 }
4471
4472 /* Canonicalize - [x, max] into - [x, -]. */
4473 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4474 switch (TREE_CODE (TREE_TYPE (high1)))
4475 {
4476 case ENUMERAL_TYPE:
4477 if (TYPE_PRECISION (TREE_TYPE (high1))
4478 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4479 break;
4480 /* FALLTHROUGH */
4481 case INTEGER_TYPE:
4482 if (tree_int_cst_equal (high1,
4483 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4484 high1 = 0;
4485 break;
4486 case POINTER_TYPE:
4487 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4488 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4489 high1, 1,
4490 integer_one_node, 1)))
4491 high1 = 0;
4492 break;
4493 default:
4494 break;
4495 }
4496
4497 /* The ranges might be also adjacent between the maximum and
4498 minimum values of the given type. For
4499 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4500 return + [x + 1, y - 1]. */
4501 if (low0 == 0 && high1 == 0)
4502 {
4503 low = range_successor (high0);
4504 high = range_predecessor (low1);
4505 if (low == 0 || high == 0)
4506 return 0;
4507
4508 in_p = 1;
4509 }
4510 else
4511 return 0;
4512 }
4513 }
4514 else if (subset)
4515 in_p = 0, low = low0, high = high0;
4516 else
4517 in_p = 0, low = low0, high = high1;
4518 }
4519
4520 *pin_p = in_p, *plow = low, *phigh = high;
4521 return 1;
4522 }
4523 \f
4524
4525 /* Subroutine of fold, looking inside expressions of the form
4526 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4527 of the COND_EXPR. This function is being used also to optimize
4528 A op B ? C : A, by reversing the comparison first.
4529
4530 Return a folded expression whose code is not a COND_EXPR
4531 anymore, or NULL_TREE if no folding opportunity is found. */
4532
4533 static tree
4534 fold_cond_expr_with_comparison (location_t loc, tree type,
4535 tree arg0, tree arg1, tree arg2)
4536 {
4537 enum tree_code comp_code = TREE_CODE (arg0);
4538 tree arg00 = TREE_OPERAND (arg0, 0);
4539 tree arg01 = TREE_OPERAND (arg0, 1);
4540 tree arg1_type = TREE_TYPE (arg1);
4541 tree tem;
4542
4543 STRIP_NOPS (arg1);
4544 STRIP_NOPS (arg2);
4545
4546 /* If we have A op 0 ? A : -A, consider applying the following
4547 transformations:
4548
4549 A == 0? A : -A same as -A
4550 A != 0? A : -A same as A
4551 A >= 0? A : -A same as abs (A)
4552 A > 0? A : -A same as abs (A)
4553 A <= 0? A : -A same as -abs (A)
4554 A < 0? A : -A same as -abs (A)
4555
4556 None of these transformations work for modes with signed
4557 zeros. If A is +/-0, the first two transformations will
4558 change the sign of the result (from +0 to -0, or vice
4559 versa). The last four will fix the sign of the result,
4560 even though the original expressions could be positive or
4561 negative, depending on the sign of A.
4562
4563 Note that all these transformations are correct if A is
4564 NaN, since the two alternatives (A and -A) are also NaNs. */
4565 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4566 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4567 ? real_zerop (arg01)
4568 : integer_zerop (arg01))
4569 && ((TREE_CODE (arg2) == NEGATE_EXPR
4570 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4571 /* In the case that A is of the form X-Y, '-A' (arg2) may
4572 have already been folded to Y-X, check for that. */
4573 || (TREE_CODE (arg1) == MINUS_EXPR
4574 && TREE_CODE (arg2) == MINUS_EXPR
4575 && operand_equal_p (TREE_OPERAND (arg1, 0),
4576 TREE_OPERAND (arg2, 1), 0)
4577 && operand_equal_p (TREE_OPERAND (arg1, 1),
4578 TREE_OPERAND (arg2, 0), 0))))
4579 switch (comp_code)
4580 {
4581 case EQ_EXPR:
4582 case UNEQ_EXPR:
4583 tem = fold_convert_loc (loc, arg1_type, arg1);
4584 return pedantic_non_lvalue_loc (loc,
4585 fold_convert_loc (loc, type,
4586 negate_expr (tem)));
4587 case NE_EXPR:
4588 case LTGT_EXPR:
4589 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4590 case UNGE_EXPR:
4591 case UNGT_EXPR:
4592 if (flag_trapping_math)
4593 break;
4594 /* Fall through. */
4595 case GE_EXPR:
4596 case GT_EXPR:
4597 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4598 arg1 = fold_convert_loc (loc, signed_type_for
4599 (TREE_TYPE (arg1)), arg1);
4600 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4601 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4602 case UNLE_EXPR:
4603 case UNLT_EXPR:
4604 if (flag_trapping_math)
4605 break;
4606 case LE_EXPR:
4607 case LT_EXPR:
4608 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4609 arg1 = fold_convert_loc (loc, signed_type_for
4610 (TREE_TYPE (arg1)), arg1);
4611 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4612 return negate_expr (fold_convert_loc (loc, type, tem));
4613 default:
4614 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4615 break;
4616 }
4617
4618 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4619 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4620 both transformations are correct when A is NaN: A != 0
4621 is then true, and A == 0 is false. */
4622
4623 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4624 && integer_zerop (arg01) && integer_zerop (arg2))
4625 {
4626 if (comp_code == NE_EXPR)
4627 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4628 else if (comp_code == EQ_EXPR)
4629 return build_int_cst (type, 0);
4630 }
4631
4632 /* Try some transformations of A op B ? A : B.
4633
4634 A == B? A : B same as B
4635 A != B? A : B same as A
4636 A >= B? A : B same as max (A, B)
4637 A > B? A : B same as max (B, A)
4638 A <= B? A : B same as min (A, B)
4639 A < B? A : B same as min (B, A)
4640
4641 As above, these transformations don't work in the presence
4642 of signed zeros. For example, if A and B are zeros of
4643 opposite sign, the first two transformations will change
4644 the sign of the result. In the last four, the original
4645 expressions give different results for (A=+0, B=-0) and
4646 (A=-0, B=+0), but the transformed expressions do not.
4647
4648 The first two transformations are correct if either A or B
4649 is a NaN. In the first transformation, the condition will
4650 be false, and B will indeed be chosen. In the case of the
4651 second transformation, the condition A != B will be true,
4652 and A will be chosen.
4653
4654 The conversions to max() and min() are not correct if B is
4655 a number and A is not. The conditions in the original
4656 expressions will be false, so all four give B. The min()
4657 and max() versions would give a NaN instead. */
4658 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4659 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4660 /* Avoid these transformations if the COND_EXPR may be used
4661 as an lvalue in the C++ front-end. PR c++/19199. */
4662 && (in_gimple_form
4663 || (strcmp (lang_hooks.name, "GNU C++") != 0
4664 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4665 || ! maybe_lvalue_p (arg1)
4666 || ! maybe_lvalue_p (arg2)))
4667 {
4668 tree comp_op0 = arg00;
4669 tree comp_op1 = arg01;
4670 tree comp_type = TREE_TYPE (comp_op0);
4671
4672 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4673 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4674 {
4675 comp_type = type;
4676 comp_op0 = arg1;
4677 comp_op1 = arg2;
4678 }
4679
4680 switch (comp_code)
4681 {
4682 case EQ_EXPR:
4683 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4684 case NE_EXPR:
4685 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4686 case LE_EXPR:
4687 case LT_EXPR:
4688 case UNLE_EXPR:
4689 case UNLT_EXPR:
4690 /* In C++ a ?: expression can be an lvalue, so put the
4691 operand which will be used if they are equal first
4692 so that we can convert this back to the
4693 corresponding COND_EXPR. */
4694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4695 {
4696 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4697 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4698 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4699 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4700 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4701 comp_op1, comp_op0);
4702 return pedantic_non_lvalue_loc (loc,
4703 fold_convert_loc (loc, type, tem));
4704 }
4705 break;
4706 case GE_EXPR:
4707 case GT_EXPR:
4708 case UNGE_EXPR:
4709 case UNGT_EXPR:
4710 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4711 {
4712 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4713 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4714 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4715 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4716 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4717 comp_op1, comp_op0);
4718 return pedantic_non_lvalue_loc (loc,
4719 fold_convert_loc (loc, type, tem));
4720 }
4721 break;
4722 case UNEQ_EXPR:
4723 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4724 return pedantic_non_lvalue_loc (loc,
4725 fold_convert_loc (loc, type, arg2));
4726 break;
4727 case LTGT_EXPR:
4728 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4729 return pedantic_non_lvalue_loc (loc,
4730 fold_convert_loc (loc, type, arg1));
4731 break;
4732 default:
4733 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4734 break;
4735 }
4736 }
4737
4738 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4739 we might still be able to simplify this. For example,
4740 if C1 is one less or one more than C2, this might have started
4741 out as a MIN or MAX and been transformed by this function.
4742 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4743
4744 if (INTEGRAL_TYPE_P (type)
4745 && TREE_CODE (arg01) == INTEGER_CST
4746 && TREE_CODE (arg2) == INTEGER_CST)
4747 switch (comp_code)
4748 {
4749 case EQ_EXPR:
4750 if (TREE_CODE (arg1) == INTEGER_CST)
4751 break;
4752 /* We can replace A with C1 in this case. */
4753 arg1 = fold_convert_loc (loc, type, arg01);
4754 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4755
4756 case LT_EXPR:
4757 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4758 MIN_EXPR, to preserve the signedness of the comparison. */
4759 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4760 OEP_ONLY_CONST)
4761 && operand_equal_p (arg01,
4762 const_binop (PLUS_EXPR, arg2,
4763 build_int_cst (type, 1)),
4764 OEP_ONLY_CONST))
4765 {
4766 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4767 fold_convert_loc (loc, TREE_TYPE (arg00),
4768 arg2));
4769 return pedantic_non_lvalue_loc (loc,
4770 fold_convert_loc (loc, type, tem));
4771 }
4772 break;
4773
4774 case LE_EXPR:
4775 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4776 as above. */
4777 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4778 OEP_ONLY_CONST)
4779 && operand_equal_p (arg01,
4780 const_binop (MINUS_EXPR, arg2,
4781 build_int_cst (type, 1)),
4782 OEP_ONLY_CONST))
4783 {
4784 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4785 fold_convert_loc (loc, TREE_TYPE (arg00),
4786 arg2));
4787 return pedantic_non_lvalue_loc (loc,
4788 fold_convert_loc (loc, type, tem));
4789 }
4790 break;
4791
4792 case GT_EXPR:
4793 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4794 MAX_EXPR, to preserve the signedness of the comparison. */
4795 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4796 OEP_ONLY_CONST)
4797 && operand_equal_p (arg01,
4798 const_binop (MINUS_EXPR, arg2,
4799 build_int_cst (type, 1)),
4800 OEP_ONLY_CONST))
4801 {
4802 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4803 fold_convert_loc (loc, TREE_TYPE (arg00),
4804 arg2));
4805 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4806 }
4807 break;
4808
4809 case GE_EXPR:
4810 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4811 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4812 OEP_ONLY_CONST)
4813 && operand_equal_p (arg01,
4814 const_binop (PLUS_EXPR, arg2,
4815 build_int_cst (type, 1)),
4816 OEP_ONLY_CONST))
4817 {
4818 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4819 fold_convert_loc (loc, TREE_TYPE (arg00),
4820 arg2));
4821 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4822 }
4823 break;
4824 case NE_EXPR:
4825 break;
4826 default:
4827 gcc_unreachable ();
4828 }
4829
4830 return NULL_TREE;
4831 }
4832
4833
4834 \f
4835 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4836 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4837 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4838 false) >= 2)
4839 #endif
4840
4841 /* EXP is some logical combination of boolean tests. See if we can
4842 merge it into some range test. Return the new tree if so. */
4843
4844 static tree
4845 fold_range_test (location_t loc, enum tree_code code, tree type,
4846 tree op0, tree op1)
4847 {
4848 int or_op = (code == TRUTH_ORIF_EXPR
4849 || code == TRUTH_OR_EXPR);
4850 int in0_p, in1_p, in_p;
4851 tree low0, low1, low, high0, high1, high;
4852 bool strict_overflow_p = false;
4853 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4854 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4855 tree tem;
4856 const char * const warnmsg = G_("assuming signed overflow does not occur "
4857 "when simplifying range test");
4858
4859 /* If this is an OR operation, invert both sides; we will invert
4860 again at the end. */
4861 if (or_op)
4862 in0_p = ! in0_p, in1_p = ! in1_p;
4863
4864 /* If both expressions are the same, if we can merge the ranges, and we
4865 can build the range test, return it or it inverted. If one of the
4866 ranges is always true or always false, consider it to be the same
4867 expression as the other. */
4868 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4869 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4870 in1_p, low1, high1)
4871 && 0 != (tem = (build_range_check (loc, type,
4872 lhs != 0 ? lhs
4873 : rhs != 0 ? rhs : integer_zero_node,
4874 in_p, low, high))))
4875 {
4876 if (strict_overflow_p)
4877 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4878 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4879 }
4880
4881 /* On machines where the branch cost is expensive, if this is a
4882 short-circuited branch and the underlying object on both sides
4883 is the same, make a non-short-circuit operation. */
4884 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4885 && lhs != 0 && rhs != 0
4886 && (code == TRUTH_ANDIF_EXPR
4887 || code == TRUTH_ORIF_EXPR)
4888 && operand_equal_p (lhs, rhs, 0))
4889 {
4890 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4891 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4892 which cases we can't do this. */
4893 if (simple_operand_p (lhs))
4894 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4895 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4896 type, op0, op1);
4897
4898 else if (!lang_hooks.decls.global_bindings_p ()
4899 && !CONTAINS_PLACEHOLDER_P (lhs))
4900 {
4901 tree common = save_expr (lhs);
4902
4903 if (0 != (lhs = build_range_check (loc, type, common,
4904 or_op ? ! in0_p : in0_p,
4905 low0, high0))
4906 && (0 != (rhs = build_range_check (loc, type, common,
4907 or_op ? ! in1_p : in1_p,
4908 low1, high1))))
4909 {
4910 if (strict_overflow_p)
4911 fold_overflow_warning (warnmsg,
4912 WARN_STRICT_OVERFLOW_COMPARISON);
4913 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4914 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4915 type, lhs, rhs);
4916 }
4917 }
4918 }
4919
4920 return 0;
4921 }
4922 \f
4923 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4924 bit value. Arrange things so the extra bits will be set to zero if and
4925 only if C is signed-extended to its full width. If MASK is nonzero,
4926 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4927
4928 static tree
4929 unextend (tree c, int p, int unsignedp, tree mask)
4930 {
4931 tree type = TREE_TYPE (c);
4932 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4933 tree temp;
4934
4935 if (p == modesize || unsignedp)
4936 return c;
4937
4938 /* We work by getting just the sign bit into the low-order bit, then
4939 into the high-order bit, then sign-extend. We then XOR that value
4940 with C. */
4941 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4942 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4943
4944 /* We must use a signed type in order to get an arithmetic right shift.
4945 However, we must also avoid introducing accidental overflows, so that
4946 a subsequent call to integer_zerop will work. Hence we must
4947 do the type conversion here. At this point, the constant is either
4948 zero or one, and the conversion to a signed type can never overflow.
4949 We could get an overflow if this conversion is done anywhere else. */
4950 if (TYPE_UNSIGNED (type))
4951 temp = fold_convert (signed_type_for (type), temp);
4952
4953 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4954 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4955 if (mask != 0)
4956 temp = const_binop (BIT_AND_EXPR, temp,
4957 fold_convert (TREE_TYPE (c), mask));
4958 /* If necessary, convert the type back to match the type of C. */
4959 if (TYPE_UNSIGNED (type))
4960 temp = fold_convert (type, temp);
4961
4962 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4963 }
4964 \f
4965 /* For an expression that has the form
4966 (A && B) || ~B
4967 or
4968 (A || B) && ~B,
4969 we can drop one of the inner expressions and simplify to
4970 A || ~B
4971 or
4972 A && ~B
4973 LOC is the location of the resulting expression. OP is the inner
4974 logical operation; the left-hand side in the examples above, while CMPOP
4975 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4976 removing a condition that guards another, as in
4977 (A != NULL && A->...) || A == NULL
4978 which we must not transform. If RHS_ONLY is true, only eliminate the
4979 right-most operand of the inner logical operation. */
4980
4981 static tree
4982 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4983 bool rhs_only)
4984 {
4985 tree type = TREE_TYPE (cmpop);
4986 enum tree_code code = TREE_CODE (cmpop);
4987 enum tree_code truthop_code = TREE_CODE (op);
4988 tree lhs = TREE_OPERAND (op, 0);
4989 tree rhs = TREE_OPERAND (op, 1);
4990 tree orig_lhs = lhs, orig_rhs = rhs;
4991 enum tree_code rhs_code = TREE_CODE (rhs);
4992 enum tree_code lhs_code = TREE_CODE (lhs);
4993 enum tree_code inv_code;
4994
4995 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4996 return NULL_TREE;
4997
4998 if (TREE_CODE_CLASS (code) != tcc_comparison)
4999 return NULL_TREE;
5000
5001 if (rhs_code == truthop_code)
5002 {
5003 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5004 if (newrhs != NULL_TREE)
5005 {
5006 rhs = newrhs;
5007 rhs_code = TREE_CODE (rhs);
5008 }
5009 }
5010 if (lhs_code == truthop_code && !rhs_only)
5011 {
5012 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5013 if (newlhs != NULL_TREE)
5014 {
5015 lhs = newlhs;
5016 lhs_code = TREE_CODE (lhs);
5017 }
5018 }
5019
5020 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5021 if (inv_code == rhs_code
5022 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5023 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5024 return lhs;
5025 if (!rhs_only && inv_code == lhs_code
5026 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5027 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5028 return rhs;
5029 if (rhs != orig_rhs || lhs != orig_lhs)
5030 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5031 lhs, rhs);
5032 return NULL_TREE;
5033 }
5034
5035 /* Find ways of folding logical expressions of LHS and RHS:
5036 Try to merge two comparisons to the same innermost item.
5037 Look for range tests like "ch >= '0' && ch <= '9'".
5038 Look for combinations of simple terms on machines with expensive branches
5039 and evaluate the RHS unconditionally.
5040
5041 For example, if we have p->a == 2 && p->b == 4 and we can make an
5042 object large enough to span both A and B, we can do this with a comparison
5043 against the object ANDed with the a mask.
5044
5045 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5046 operations to do this with one comparison.
5047
5048 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5049 function and the one above.
5050
5051 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5052 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5053
5054 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5055 two operands.
5056
5057 We return the simplified tree or 0 if no optimization is possible. */
5058
5059 static tree
5060 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5061 tree lhs, tree rhs)
5062 {
5063 /* If this is the "or" of two comparisons, we can do something if
5064 the comparisons are NE_EXPR. If this is the "and", we can do something
5065 if the comparisons are EQ_EXPR. I.e.,
5066 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5067
5068 WANTED_CODE is this operation code. For single bit fields, we can
5069 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5070 comparison for one-bit fields. */
5071
5072 enum tree_code wanted_code;
5073 enum tree_code lcode, rcode;
5074 tree ll_arg, lr_arg, rl_arg, rr_arg;
5075 tree ll_inner, lr_inner, rl_inner, rr_inner;
5076 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5077 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5078 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5079 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5080 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5081 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5082 enum machine_mode lnmode, rnmode;
5083 tree ll_mask, lr_mask, rl_mask, rr_mask;
5084 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5085 tree l_const, r_const;
5086 tree lntype, rntype, result;
5087 HOST_WIDE_INT first_bit, end_bit;
5088 int volatilep;
5089
5090 /* Start by getting the comparison codes. Fail if anything is volatile.
5091 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5092 it were surrounded with a NE_EXPR. */
5093
5094 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5095 return 0;
5096
5097 lcode = TREE_CODE (lhs);
5098 rcode = TREE_CODE (rhs);
5099
5100 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5101 {
5102 lhs = build2 (NE_EXPR, truth_type, lhs,
5103 build_int_cst (TREE_TYPE (lhs), 0));
5104 lcode = NE_EXPR;
5105 }
5106
5107 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5108 {
5109 rhs = build2 (NE_EXPR, truth_type, rhs,
5110 build_int_cst (TREE_TYPE (rhs), 0));
5111 rcode = NE_EXPR;
5112 }
5113
5114 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5115 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5116 return 0;
5117
5118 ll_arg = TREE_OPERAND (lhs, 0);
5119 lr_arg = TREE_OPERAND (lhs, 1);
5120 rl_arg = TREE_OPERAND (rhs, 0);
5121 rr_arg = TREE_OPERAND (rhs, 1);
5122
5123 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5124 if (simple_operand_p (ll_arg)
5125 && simple_operand_p (lr_arg))
5126 {
5127 if (operand_equal_p (ll_arg, rl_arg, 0)
5128 && operand_equal_p (lr_arg, rr_arg, 0))
5129 {
5130 result = combine_comparisons (loc, code, lcode, rcode,
5131 truth_type, ll_arg, lr_arg);
5132 if (result)
5133 return result;
5134 }
5135 else if (operand_equal_p (ll_arg, rr_arg, 0)
5136 && operand_equal_p (lr_arg, rl_arg, 0))
5137 {
5138 result = combine_comparisons (loc, code, lcode,
5139 swap_tree_comparison (rcode),
5140 truth_type, ll_arg, lr_arg);
5141 if (result)
5142 return result;
5143 }
5144 }
5145
5146 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5147 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5148
5149 /* If the RHS can be evaluated unconditionally and its operands are
5150 simple, it wins to evaluate the RHS unconditionally on machines
5151 with expensive branches. In this case, this isn't a comparison
5152 that can be merged. */
5153
5154 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5155 false) >= 2
5156 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5157 && simple_operand_p (rl_arg)
5158 && simple_operand_p (rr_arg))
5159 {
5160 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5161 if (code == TRUTH_OR_EXPR
5162 && lcode == NE_EXPR && integer_zerop (lr_arg)
5163 && rcode == NE_EXPR && integer_zerop (rr_arg)
5164 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5165 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5166 return build2_loc (loc, NE_EXPR, truth_type,
5167 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5168 ll_arg, rl_arg),
5169 build_int_cst (TREE_TYPE (ll_arg), 0));
5170
5171 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5172 if (code == TRUTH_AND_EXPR
5173 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5174 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5175 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5176 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5177 return build2_loc (loc, EQ_EXPR, truth_type,
5178 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5179 ll_arg, rl_arg),
5180 build_int_cst (TREE_TYPE (ll_arg), 0));
5181 }
5182
5183 /* See if the comparisons can be merged. Then get all the parameters for
5184 each side. */
5185
5186 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5187 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5188 return 0;
5189
5190 volatilep = 0;
5191 ll_inner = decode_field_reference (loc, ll_arg,
5192 &ll_bitsize, &ll_bitpos, &ll_mode,
5193 &ll_unsignedp, &volatilep, &ll_mask,
5194 &ll_and_mask);
5195 lr_inner = decode_field_reference (loc, lr_arg,
5196 &lr_bitsize, &lr_bitpos, &lr_mode,
5197 &lr_unsignedp, &volatilep, &lr_mask,
5198 &lr_and_mask);
5199 rl_inner = decode_field_reference (loc, rl_arg,
5200 &rl_bitsize, &rl_bitpos, &rl_mode,
5201 &rl_unsignedp, &volatilep, &rl_mask,
5202 &rl_and_mask);
5203 rr_inner = decode_field_reference (loc, rr_arg,
5204 &rr_bitsize, &rr_bitpos, &rr_mode,
5205 &rr_unsignedp, &volatilep, &rr_mask,
5206 &rr_and_mask);
5207
5208 /* It must be true that the inner operation on the lhs of each
5209 comparison must be the same if we are to be able to do anything.
5210 Then see if we have constants. If not, the same must be true for
5211 the rhs's. */
5212 if (volatilep || ll_inner == 0 || rl_inner == 0
5213 || ! operand_equal_p (ll_inner, rl_inner, 0))
5214 return 0;
5215
5216 if (TREE_CODE (lr_arg) == INTEGER_CST
5217 && TREE_CODE (rr_arg) == INTEGER_CST)
5218 l_const = lr_arg, r_const = rr_arg;
5219 else if (lr_inner == 0 || rr_inner == 0
5220 || ! operand_equal_p (lr_inner, rr_inner, 0))
5221 return 0;
5222 else
5223 l_const = r_const = 0;
5224
5225 /* If either comparison code is not correct for our logical operation,
5226 fail. However, we can convert a one-bit comparison against zero into
5227 the opposite comparison against that bit being set in the field. */
5228
5229 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5230 if (lcode != wanted_code)
5231 {
5232 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5233 {
5234 /* Make the left operand unsigned, since we are only interested
5235 in the value of one bit. Otherwise we are doing the wrong
5236 thing below. */
5237 ll_unsignedp = 1;
5238 l_const = ll_mask;
5239 }
5240 else
5241 return 0;
5242 }
5243
5244 /* This is analogous to the code for l_const above. */
5245 if (rcode != wanted_code)
5246 {
5247 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5248 {
5249 rl_unsignedp = 1;
5250 r_const = rl_mask;
5251 }
5252 else
5253 return 0;
5254 }
5255
5256 /* See if we can find a mode that contains both fields being compared on
5257 the left. If we can't, fail. Otherwise, update all constants and masks
5258 to be relative to a field of that size. */
5259 first_bit = MIN (ll_bitpos, rl_bitpos);
5260 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5261 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5262 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5263 volatilep);
5264 if (lnmode == VOIDmode)
5265 return 0;
5266
5267 lnbitsize = GET_MODE_BITSIZE (lnmode);
5268 lnbitpos = first_bit & ~ (lnbitsize - 1);
5269 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5270 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5271
5272 if (BYTES_BIG_ENDIAN)
5273 {
5274 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5275 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5276 }
5277
5278 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5279 size_int (xll_bitpos));
5280 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5281 size_int (xrl_bitpos));
5282
5283 if (l_const)
5284 {
5285 l_const = fold_convert_loc (loc, lntype, l_const);
5286 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5287 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5288 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5289 fold_build1_loc (loc, BIT_NOT_EXPR,
5290 lntype, ll_mask))))
5291 {
5292 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5293
5294 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5295 }
5296 }
5297 if (r_const)
5298 {
5299 r_const = fold_convert_loc (loc, lntype, r_const);
5300 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5301 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5302 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5303 fold_build1_loc (loc, BIT_NOT_EXPR,
5304 lntype, rl_mask))))
5305 {
5306 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5307
5308 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5309 }
5310 }
5311
5312 /* If the right sides are not constant, do the same for it. Also,
5313 disallow this optimization if a size or signedness mismatch occurs
5314 between the left and right sides. */
5315 if (l_const == 0)
5316 {
5317 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5318 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5319 /* Make sure the two fields on the right
5320 correspond to the left without being swapped. */
5321 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5322 return 0;
5323
5324 first_bit = MIN (lr_bitpos, rr_bitpos);
5325 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5326 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5327 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5328 volatilep);
5329 if (rnmode == VOIDmode)
5330 return 0;
5331
5332 rnbitsize = GET_MODE_BITSIZE (rnmode);
5333 rnbitpos = first_bit & ~ (rnbitsize - 1);
5334 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5335 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5336
5337 if (BYTES_BIG_ENDIAN)
5338 {
5339 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5340 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5341 }
5342
5343 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5344 rntype, lr_mask),
5345 size_int (xlr_bitpos));
5346 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5347 rntype, rr_mask),
5348 size_int (xrr_bitpos));
5349
5350 /* Make a mask that corresponds to both fields being compared.
5351 Do this for both items being compared. If the operands are the
5352 same size and the bits being compared are in the same position
5353 then we can do this by masking both and comparing the masked
5354 results. */
5355 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5356 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5357 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5358 {
5359 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5360 ll_unsignedp || rl_unsignedp);
5361 if (! all_ones_mask_p (ll_mask, lnbitsize))
5362 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5363
5364 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5365 lr_unsignedp || rr_unsignedp);
5366 if (! all_ones_mask_p (lr_mask, rnbitsize))
5367 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5368
5369 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5370 }
5371
5372 /* There is still another way we can do something: If both pairs of
5373 fields being compared are adjacent, we may be able to make a wider
5374 field containing them both.
5375
5376 Note that we still must mask the lhs/rhs expressions. Furthermore,
5377 the mask must be shifted to account for the shift done by
5378 make_bit_field_ref. */
5379 if ((ll_bitsize + ll_bitpos == rl_bitpos
5380 && lr_bitsize + lr_bitpos == rr_bitpos)
5381 || (ll_bitpos == rl_bitpos + rl_bitsize
5382 && lr_bitpos == rr_bitpos + rr_bitsize))
5383 {
5384 tree type;
5385
5386 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5387 ll_bitsize + rl_bitsize,
5388 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5389 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5390 lr_bitsize + rr_bitsize,
5391 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5392
5393 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5394 size_int (MIN (xll_bitpos, xrl_bitpos)));
5395 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5396 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5397
5398 /* Convert to the smaller type before masking out unwanted bits. */
5399 type = lntype;
5400 if (lntype != rntype)
5401 {
5402 if (lnbitsize > rnbitsize)
5403 {
5404 lhs = fold_convert_loc (loc, rntype, lhs);
5405 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5406 type = rntype;
5407 }
5408 else if (lnbitsize < rnbitsize)
5409 {
5410 rhs = fold_convert_loc (loc, lntype, rhs);
5411 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5412 type = lntype;
5413 }
5414 }
5415
5416 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5417 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5418
5419 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5420 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5421
5422 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5423 }
5424
5425 return 0;
5426 }
5427
5428 /* Handle the case of comparisons with constants. If there is something in
5429 common between the masks, those bits of the constants must be the same.
5430 If not, the condition is always false. Test for this to avoid generating
5431 incorrect code below. */
5432 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5433 if (! integer_zerop (result)
5434 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5435 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5436 {
5437 if (wanted_code == NE_EXPR)
5438 {
5439 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5440 return constant_boolean_node (true, truth_type);
5441 }
5442 else
5443 {
5444 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5445 return constant_boolean_node (false, truth_type);
5446 }
5447 }
5448
5449 /* Construct the expression we will return. First get the component
5450 reference we will make. Unless the mask is all ones the width of
5451 that field, perform the mask operation. Then compare with the
5452 merged constant. */
5453 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5454 ll_unsignedp || rl_unsignedp);
5455
5456 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5457 if (! all_ones_mask_p (ll_mask, lnbitsize))
5458 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5459
5460 return build2_loc (loc, wanted_code, truth_type, result,
5461 const_binop (BIT_IOR_EXPR, l_const, r_const));
5462 }
5463 \f
5464 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5465 constant. */
5466
5467 static tree
5468 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5469 tree op0, tree op1)
5470 {
5471 tree arg0 = op0;
5472 enum tree_code op_code;
5473 tree comp_const;
5474 tree minmax_const;
5475 int consts_equal, consts_lt;
5476 tree inner;
5477
5478 STRIP_SIGN_NOPS (arg0);
5479
5480 op_code = TREE_CODE (arg0);
5481 minmax_const = TREE_OPERAND (arg0, 1);
5482 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5483 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5484 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5485 inner = TREE_OPERAND (arg0, 0);
5486
5487 /* If something does not permit us to optimize, return the original tree. */
5488 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5489 || TREE_CODE (comp_const) != INTEGER_CST
5490 || TREE_OVERFLOW (comp_const)
5491 || TREE_CODE (minmax_const) != INTEGER_CST
5492 || TREE_OVERFLOW (minmax_const))
5493 return NULL_TREE;
5494
5495 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5496 and GT_EXPR, doing the rest with recursive calls using logical
5497 simplifications. */
5498 switch (code)
5499 {
5500 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5501 {
5502 tree tem
5503 = optimize_minmax_comparison (loc,
5504 invert_tree_comparison (code, false),
5505 type, op0, op1);
5506 if (tem)
5507 return invert_truthvalue_loc (loc, tem);
5508 return NULL_TREE;
5509 }
5510
5511 case GE_EXPR:
5512 return
5513 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5514 optimize_minmax_comparison
5515 (loc, EQ_EXPR, type, arg0, comp_const),
5516 optimize_minmax_comparison
5517 (loc, GT_EXPR, type, arg0, comp_const));
5518
5519 case EQ_EXPR:
5520 if (op_code == MAX_EXPR && consts_equal)
5521 /* MAX (X, 0) == 0 -> X <= 0 */
5522 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5523
5524 else if (op_code == MAX_EXPR && consts_lt)
5525 /* MAX (X, 0) == 5 -> X == 5 */
5526 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5527
5528 else if (op_code == MAX_EXPR)
5529 /* MAX (X, 0) == -1 -> false */
5530 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5531
5532 else if (consts_equal)
5533 /* MIN (X, 0) == 0 -> X >= 0 */
5534 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5535
5536 else if (consts_lt)
5537 /* MIN (X, 0) == 5 -> false */
5538 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5539
5540 else
5541 /* MIN (X, 0) == -1 -> X == -1 */
5542 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5543
5544 case GT_EXPR:
5545 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5546 /* MAX (X, 0) > 0 -> X > 0
5547 MAX (X, 0) > 5 -> X > 5 */
5548 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5549
5550 else if (op_code == MAX_EXPR)
5551 /* MAX (X, 0) > -1 -> true */
5552 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5553
5554 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5555 /* MIN (X, 0) > 0 -> false
5556 MIN (X, 0) > 5 -> false */
5557 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5558
5559 else
5560 /* MIN (X, 0) > -1 -> X > -1 */
5561 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5562
5563 default:
5564 return NULL_TREE;
5565 }
5566 }
5567 \f
5568 /* T is an integer expression that is being multiplied, divided, or taken a
5569 modulus (CODE says which and what kind of divide or modulus) by a
5570 constant C. See if we can eliminate that operation by folding it with
5571 other operations already in T. WIDE_TYPE, if non-null, is a type that
5572 should be used for the computation if wider than our type.
5573
5574 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5575 (X * 2) + (Y * 4). We must, however, be assured that either the original
5576 expression would not overflow or that overflow is undefined for the type
5577 in the language in question.
5578
5579 If we return a non-null expression, it is an equivalent form of the
5580 original computation, but need not be in the original type.
5581
5582 We set *STRICT_OVERFLOW_P to true if the return values depends on
5583 signed overflow being undefined. Otherwise we do not change
5584 *STRICT_OVERFLOW_P. */
5585
5586 static tree
5587 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5588 bool *strict_overflow_p)
5589 {
5590 /* To avoid exponential search depth, refuse to allow recursion past
5591 three levels. Beyond that (1) it's highly unlikely that we'll find
5592 something interesting and (2) we've probably processed it before
5593 when we built the inner expression. */
5594
5595 static int depth;
5596 tree ret;
5597
5598 if (depth > 3)
5599 return NULL;
5600
5601 depth++;
5602 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5603 depth--;
5604
5605 return ret;
5606 }
5607
5608 static tree
5609 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5610 bool *strict_overflow_p)
5611 {
5612 tree type = TREE_TYPE (t);
5613 enum tree_code tcode = TREE_CODE (t);
5614 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5615 > GET_MODE_SIZE (TYPE_MODE (type)))
5616 ? wide_type : type);
5617 tree t1, t2;
5618 int same_p = tcode == code;
5619 tree op0 = NULL_TREE, op1 = NULL_TREE;
5620 bool sub_strict_overflow_p;
5621
5622 /* Don't deal with constants of zero here; they confuse the code below. */
5623 if (integer_zerop (c))
5624 return NULL_TREE;
5625
5626 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5627 op0 = TREE_OPERAND (t, 0);
5628
5629 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5630 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5631
5632 /* Note that we need not handle conditional operations here since fold
5633 already handles those cases. So just do arithmetic here. */
5634 switch (tcode)
5635 {
5636 case INTEGER_CST:
5637 /* For a constant, we can always simplify if we are a multiply
5638 or (for divide and modulus) if it is a multiple of our constant. */
5639 if (code == MULT_EXPR
5640 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5641 return const_binop (code, fold_convert (ctype, t),
5642 fold_convert (ctype, c));
5643 break;
5644
5645 CASE_CONVERT: case NON_LVALUE_EXPR:
5646 /* If op0 is an expression ... */
5647 if ((COMPARISON_CLASS_P (op0)
5648 || UNARY_CLASS_P (op0)
5649 || BINARY_CLASS_P (op0)
5650 || VL_EXP_CLASS_P (op0)
5651 || EXPRESSION_CLASS_P (op0))
5652 /* ... and has wrapping overflow, and its type is smaller
5653 than ctype, then we cannot pass through as widening. */
5654 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5655 && (TYPE_PRECISION (ctype)
5656 > TYPE_PRECISION (TREE_TYPE (op0))))
5657 /* ... or this is a truncation (t is narrower than op0),
5658 then we cannot pass through this narrowing. */
5659 || (TYPE_PRECISION (type)
5660 < TYPE_PRECISION (TREE_TYPE (op0)))
5661 /* ... or signedness changes for division or modulus,
5662 then we cannot pass through this conversion. */
5663 || (code != MULT_EXPR
5664 && (TYPE_UNSIGNED (ctype)
5665 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5666 /* ... or has undefined overflow while the converted to
5667 type has not, we cannot do the operation in the inner type
5668 as that would introduce undefined overflow. */
5669 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5670 && !TYPE_OVERFLOW_UNDEFINED (type))))
5671 break;
5672
5673 /* Pass the constant down and see if we can make a simplification. If
5674 we can, replace this expression with the inner simplification for
5675 possible later conversion to our or some other type. */
5676 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5677 && TREE_CODE (t2) == INTEGER_CST
5678 && !TREE_OVERFLOW (t2)
5679 && (0 != (t1 = extract_muldiv (op0, t2, code,
5680 code == MULT_EXPR
5681 ? ctype : NULL_TREE,
5682 strict_overflow_p))))
5683 return t1;
5684 break;
5685
5686 case ABS_EXPR:
5687 /* If widening the type changes it from signed to unsigned, then we
5688 must avoid building ABS_EXPR itself as unsigned. */
5689 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5690 {
5691 tree cstype = (*signed_type_for) (ctype);
5692 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5693 != 0)
5694 {
5695 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5696 return fold_convert (ctype, t1);
5697 }
5698 break;
5699 }
5700 /* If the constant is negative, we cannot simplify this. */
5701 if (tree_int_cst_sgn (c) == -1)
5702 break;
5703 /* FALLTHROUGH */
5704 case NEGATE_EXPR:
5705 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5706 != 0)
5707 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5708 break;
5709
5710 case MIN_EXPR: case MAX_EXPR:
5711 /* If widening the type changes the signedness, then we can't perform
5712 this optimization as that changes the result. */
5713 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5714 break;
5715
5716 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5717 sub_strict_overflow_p = false;
5718 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5719 &sub_strict_overflow_p)) != 0
5720 && (t2 = extract_muldiv (op1, c, code, wide_type,
5721 &sub_strict_overflow_p)) != 0)
5722 {
5723 if (tree_int_cst_sgn (c) < 0)
5724 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5725 if (sub_strict_overflow_p)
5726 *strict_overflow_p = true;
5727 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5728 fold_convert (ctype, t2));
5729 }
5730 break;
5731
5732 case LSHIFT_EXPR: case RSHIFT_EXPR:
5733 /* If the second operand is constant, this is a multiplication
5734 or floor division, by a power of two, so we can treat it that
5735 way unless the multiplier or divisor overflows. Signed
5736 left-shift overflow is implementation-defined rather than
5737 undefined in C90, so do not convert signed left shift into
5738 multiplication. */
5739 if (TREE_CODE (op1) == INTEGER_CST
5740 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5741 /* const_binop may not detect overflow correctly,
5742 so check for it explicitly here. */
5743 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5744 && TREE_INT_CST_HIGH (op1) == 0
5745 && 0 != (t1 = fold_convert (ctype,
5746 const_binop (LSHIFT_EXPR,
5747 size_one_node,
5748 op1)))
5749 && !TREE_OVERFLOW (t1))
5750 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5751 ? MULT_EXPR : FLOOR_DIV_EXPR,
5752 ctype,
5753 fold_convert (ctype, op0),
5754 t1),
5755 c, code, wide_type, strict_overflow_p);
5756 break;
5757
5758 case PLUS_EXPR: case MINUS_EXPR:
5759 /* See if we can eliminate the operation on both sides. If we can, we
5760 can return a new PLUS or MINUS. If we can't, the only remaining
5761 cases where we can do anything are if the second operand is a
5762 constant. */
5763 sub_strict_overflow_p = false;
5764 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5765 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5766 if (t1 != 0 && t2 != 0
5767 && (code == MULT_EXPR
5768 /* If not multiplication, we can only do this if both operands
5769 are divisible by c. */
5770 || (multiple_of_p (ctype, op0, c)
5771 && multiple_of_p (ctype, op1, c))))
5772 {
5773 if (sub_strict_overflow_p)
5774 *strict_overflow_p = true;
5775 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5776 fold_convert (ctype, t2));
5777 }
5778
5779 /* If this was a subtraction, negate OP1 and set it to be an addition.
5780 This simplifies the logic below. */
5781 if (tcode == MINUS_EXPR)
5782 {
5783 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5784 /* If OP1 was not easily negatable, the constant may be OP0. */
5785 if (TREE_CODE (op0) == INTEGER_CST)
5786 {
5787 tree tem = op0;
5788 op0 = op1;
5789 op1 = tem;
5790 tem = t1;
5791 t1 = t2;
5792 t2 = tem;
5793 }
5794 }
5795
5796 if (TREE_CODE (op1) != INTEGER_CST)
5797 break;
5798
5799 /* If either OP1 or C are negative, this optimization is not safe for
5800 some of the division and remainder types while for others we need
5801 to change the code. */
5802 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5803 {
5804 if (code == CEIL_DIV_EXPR)
5805 code = FLOOR_DIV_EXPR;
5806 else if (code == FLOOR_DIV_EXPR)
5807 code = CEIL_DIV_EXPR;
5808 else if (code != MULT_EXPR
5809 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5810 break;
5811 }
5812
5813 /* If it's a multiply or a division/modulus operation of a multiple
5814 of our constant, do the operation and verify it doesn't overflow. */
5815 if (code == MULT_EXPR
5816 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5817 {
5818 op1 = const_binop (code, fold_convert (ctype, op1),
5819 fold_convert (ctype, c));
5820 /* We allow the constant to overflow with wrapping semantics. */
5821 if (op1 == 0
5822 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5823 break;
5824 }
5825 else
5826 break;
5827
5828 /* If we have an unsigned type is not a sizetype, we cannot widen
5829 the operation since it will change the result if the original
5830 computation overflowed. */
5831 if (TYPE_UNSIGNED (ctype)
5832 && ctype != type)
5833 break;
5834
5835 /* If we were able to eliminate our operation from the first side,
5836 apply our operation to the second side and reform the PLUS. */
5837 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5838 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5839
5840 /* The last case is if we are a multiply. In that case, we can
5841 apply the distributive law to commute the multiply and addition
5842 if the multiplication of the constants doesn't overflow. */
5843 if (code == MULT_EXPR)
5844 return fold_build2 (tcode, ctype,
5845 fold_build2 (code, ctype,
5846 fold_convert (ctype, op0),
5847 fold_convert (ctype, c)),
5848 op1);
5849
5850 break;
5851
5852 case MULT_EXPR:
5853 /* We have a special case here if we are doing something like
5854 (C * 8) % 4 since we know that's zero. */
5855 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5856 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5857 /* If the multiplication can overflow we cannot optimize this. */
5858 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5859 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5860 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5861 {
5862 *strict_overflow_p = true;
5863 return omit_one_operand (type, integer_zero_node, op0);
5864 }
5865
5866 /* ... fall through ... */
5867
5868 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5869 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5870 /* If we can extract our operation from the LHS, do so and return a
5871 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5872 do something only if the second operand is a constant. */
5873 if (same_p
5874 && (t1 = extract_muldiv (op0, c, code, wide_type,
5875 strict_overflow_p)) != 0)
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5877 fold_convert (ctype, op1));
5878 else if (tcode == MULT_EXPR && code == MULT_EXPR
5879 && (t1 = extract_muldiv (op1, c, code, wide_type,
5880 strict_overflow_p)) != 0)
5881 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5882 fold_convert (ctype, t1));
5883 else if (TREE_CODE (op1) != INTEGER_CST)
5884 return 0;
5885
5886 /* If these are the same operation types, we can associate them
5887 assuming no overflow. */
5888 if (tcode == code)
5889 {
5890 double_int mul;
5891 int overflow_p;
5892 mul = double_int_mul_with_sign
5893 (double_int_ext
5894 (tree_to_double_int (op1),
5895 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5896 double_int_ext
5897 (tree_to_double_int (c),
5898 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5899 false, &overflow_p);
5900 overflow_p = ((!TYPE_UNSIGNED (ctype) && overflow_p)
5901 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5902 if (!double_int_fits_to_tree_p (ctype, mul)
5903 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5904 || !TYPE_UNSIGNED (ctype)))
5905 overflow_p = 1;
5906 if (!overflow_p)
5907 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5908 double_int_to_tree (ctype, mul));
5909 }
5910
5911 /* If these operations "cancel" each other, we have the main
5912 optimizations of this pass, which occur when either constant is a
5913 multiple of the other, in which case we replace this with either an
5914 operation or CODE or TCODE.
5915
5916 If we have an unsigned type, we cannot do this since it will change
5917 the result if the original computation overflowed. */
5918 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5919 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5920 || (tcode == MULT_EXPR
5921 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5922 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5923 && code != MULT_EXPR)))
5924 {
5925 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5926 {
5927 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5928 *strict_overflow_p = true;
5929 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5930 fold_convert (ctype,
5931 const_binop (TRUNC_DIV_EXPR,
5932 op1, c)));
5933 }
5934 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5935 {
5936 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5937 *strict_overflow_p = true;
5938 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5939 fold_convert (ctype,
5940 const_binop (TRUNC_DIV_EXPR,
5941 c, op1)));
5942 }
5943 }
5944 break;
5945
5946 default:
5947 break;
5948 }
5949
5950 return 0;
5951 }
5952 \f
5953 /* Return a node which has the indicated constant VALUE (either 0 or
5954 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5955 and is of the indicated TYPE. */
5956
5957 tree
5958 constant_boolean_node (bool value, tree type)
5959 {
5960 if (type == integer_type_node)
5961 return value ? integer_one_node : integer_zero_node;
5962 else if (type == boolean_type_node)
5963 return value ? boolean_true_node : boolean_false_node;
5964 else if (TREE_CODE (type) == VECTOR_TYPE)
5965 return build_vector_from_val (type,
5966 build_int_cst (TREE_TYPE (type),
5967 value ? -1 : 0));
5968 else
5969 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5970 }
5971
5972
5973 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5974 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5975 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5976 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5977 COND is the first argument to CODE; otherwise (as in the example
5978 given here), it is the second argument. TYPE is the type of the
5979 original expression. Return NULL_TREE if no simplification is
5980 possible. */
5981
5982 static tree
5983 fold_binary_op_with_conditional_arg (location_t loc,
5984 enum tree_code code,
5985 tree type, tree op0, tree op1,
5986 tree cond, tree arg, int cond_first_p)
5987 {
5988 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5989 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5990 tree test, true_value, false_value;
5991 tree lhs = NULL_TREE;
5992 tree rhs = NULL_TREE;
5993
5994 if (TREE_CODE (cond) == COND_EXPR)
5995 {
5996 test = TREE_OPERAND (cond, 0);
5997 true_value = TREE_OPERAND (cond, 1);
5998 false_value = TREE_OPERAND (cond, 2);
5999 /* If this operand throws an expression, then it does not make
6000 sense to try to perform a logical or arithmetic operation
6001 involving it. */
6002 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6003 lhs = true_value;
6004 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6005 rhs = false_value;
6006 }
6007 else
6008 {
6009 tree testtype = TREE_TYPE (cond);
6010 test = cond;
6011 true_value = constant_boolean_node (true, testtype);
6012 false_value = constant_boolean_node (false, testtype);
6013 }
6014
6015 /* This transformation is only worthwhile if we don't have to wrap ARG
6016 in a SAVE_EXPR and the operation can be simplified on at least one
6017 of the branches once its pushed inside the COND_EXPR. */
6018 if (!TREE_CONSTANT (arg)
6019 && (TREE_SIDE_EFFECTS (arg)
6020 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6021 return NULL_TREE;
6022
6023 arg = fold_convert_loc (loc, arg_type, arg);
6024 if (lhs == 0)
6025 {
6026 true_value = fold_convert_loc (loc, cond_type, true_value);
6027 if (cond_first_p)
6028 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6029 else
6030 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6031 }
6032 if (rhs == 0)
6033 {
6034 false_value = fold_convert_loc (loc, cond_type, false_value);
6035 if (cond_first_p)
6036 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6037 else
6038 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6039 }
6040
6041 /* Check that we have simplified at least one of the branches. */
6042 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6043 return NULL_TREE;
6044
6045 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6046 }
6047
6048 \f
6049 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6050
6051 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6052 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6053 ADDEND is the same as X.
6054
6055 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6056 and finite. The problematic cases are when X is zero, and its mode
6057 has signed zeros. In the case of rounding towards -infinity,
6058 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6059 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6060
6061 bool
6062 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6063 {
6064 if (!real_zerop (addend))
6065 return false;
6066
6067 /* Don't allow the fold with -fsignaling-nans. */
6068 if (HONOR_SNANS (TYPE_MODE (type)))
6069 return false;
6070
6071 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6072 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6073 return true;
6074
6075 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6076 if (TREE_CODE (addend) == REAL_CST
6077 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6078 negate = !negate;
6079
6080 /* The mode has signed zeros, and we have to honor their sign.
6081 In this situation, there is only one case we can return true for.
6082 X - 0 is the same as X unless rounding towards -infinity is
6083 supported. */
6084 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6085 }
6086
6087 /* Subroutine of fold() that checks comparisons of built-in math
6088 functions against real constants.
6089
6090 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6091 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6092 is the type of the result and ARG0 and ARG1 are the operands of the
6093 comparison. ARG1 must be a TREE_REAL_CST.
6094
6095 The function returns the constant folded tree if a simplification
6096 can be made, and NULL_TREE otherwise. */
6097
6098 static tree
6099 fold_mathfn_compare (location_t loc,
6100 enum built_in_function fcode, enum tree_code code,
6101 tree type, tree arg0, tree arg1)
6102 {
6103 REAL_VALUE_TYPE c;
6104
6105 if (BUILTIN_SQRT_P (fcode))
6106 {
6107 tree arg = CALL_EXPR_ARG (arg0, 0);
6108 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6109
6110 c = TREE_REAL_CST (arg1);
6111 if (REAL_VALUE_NEGATIVE (c))
6112 {
6113 /* sqrt(x) < y is always false, if y is negative. */
6114 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6115 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6116
6117 /* sqrt(x) > y is always true, if y is negative and we
6118 don't care about NaNs, i.e. negative values of x. */
6119 if (code == NE_EXPR || !HONOR_NANS (mode))
6120 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6121
6122 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6123 return fold_build2_loc (loc, GE_EXPR, type, arg,
6124 build_real (TREE_TYPE (arg), dconst0));
6125 }
6126 else if (code == GT_EXPR || code == GE_EXPR)
6127 {
6128 REAL_VALUE_TYPE c2;
6129
6130 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6131 real_convert (&c2, mode, &c2);
6132
6133 if (REAL_VALUE_ISINF (c2))
6134 {
6135 /* sqrt(x) > y is x == +Inf, when y is very large. */
6136 if (HONOR_INFINITIES (mode))
6137 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6138 build_real (TREE_TYPE (arg), c2));
6139
6140 /* sqrt(x) > y is always false, when y is very large
6141 and we don't care about infinities. */
6142 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6143 }
6144
6145 /* sqrt(x) > c is the same as x > c*c. */
6146 return fold_build2_loc (loc, code, type, arg,
6147 build_real (TREE_TYPE (arg), c2));
6148 }
6149 else if (code == LT_EXPR || code == LE_EXPR)
6150 {
6151 REAL_VALUE_TYPE c2;
6152
6153 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6154 real_convert (&c2, mode, &c2);
6155
6156 if (REAL_VALUE_ISINF (c2))
6157 {
6158 /* sqrt(x) < y is always true, when y is a very large
6159 value and we don't care about NaNs or Infinities. */
6160 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6161 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6162
6163 /* sqrt(x) < y is x != +Inf when y is very large and we
6164 don't care about NaNs. */
6165 if (! HONOR_NANS (mode))
6166 return fold_build2_loc (loc, NE_EXPR, type, arg,
6167 build_real (TREE_TYPE (arg), c2));
6168
6169 /* sqrt(x) < y is x >= 0 when y is very large and we
6170 don't care about Infinities. */
6171 if (! HONOR_INFINITIES (mode))
6172 return fold_build2_loc (loc, GE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg), dconst0));
6174
6175 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6176 arg = save_expr (arg);
6177 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6178 fold_build2_loc (loc, GE_EXPR, type, arg,
6179 build_real (TREE_TYPE (arg),
6180 dconst0)),
6181 fold_build2_loc (loc, NE_EXPR, type, arg,
6182 build_real (TREE_TYPE (arg),
6183 c2)));
6184 }
6185
6186 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6187 if (! HONOR_NANS (mode))
6188 return fold_build2_loc (loc, code, type, arg,
6189 build_real (TREE_TYPE (arg), c2));
6190
6191 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6192 arg = save_expr (arg);
6193 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6194 fold_build2_loc (loc, GE_EXPR, type, arg,
6195 build_real (TREE_TYPE (arg),
6196 dconst0)),
6197 fold_build2_loc (loc, code, type, arg,
6198 build_real (TREE_TYPE (arg),
6199 c2)));
6200 }
6201 }
6202
6203 return NULL_TREE;
6204 }
6205
6206 /* Subroutine of fold() that optimizes comparisons against Infinities,
6207 either +Inf or -Inf.
6208
6209 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6210 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6211 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6212
6213 The function returns the constant folded tree if a simplification
6214 can be made, and NULL_TREE otherwise. */
6215
6216 static tree
6217 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6218 tree arg0, tree arg1)
6219 {
6220 enum machine_mode mode;
6221 REAL_VALUE_TYPE max;
6222 tree temp;
6223 bool neg;
6224
6225 mode = TYPE_MODE (TREE_TYPE (arg0));
6226
6227 /* For negative infinity swap the sense of the comparison. */
6228 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6229 if (neg)
6230 code = swap_tree_comparison (code);
6231
6232 switch (code)
6233 {
6234 case GT_EXPR:
6235 /* x > +Inf is always false, if with ignore sNANs. */
6236 if (HONOR_SNANS (mode))
6237 return NULL_TREE;
6238 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6239
6240 case LE_EXPR:
6241 /* x <= +Inf is always true, if we don't case about NaNs. */
6242 if (! HONOR_NANS (mode))
6243 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6244
6245 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6246 arg0 = save_expr (arg0);
6247 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6248
6249 case EQ_EXPR:
6250 case GE_EXPR:
6251 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6252 real_maxval (&max, neg, mode);
6253 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6254 arg0, build_real (TREE_TYPE (arg0), max));
6255
6256 case LT_EXPR:
6257 /* x < +Inf is always equal to x <= DBL_MAX. */
6258 real_maxval (&max, neg, mode);
6259 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6260 arg0, build_real (TREE_TYPE (arg0), max));
6261
6262 case NE_EXPR:
6263 /* x != +Inf is always equal to !(x > DBL_MAX). */
6264 real_maxval (&max, neg, mode);
6265 if (! HONOR_NANS (mode))
6266 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6267 arg0, build_real (TREE_TYPE (arg0), max));
6268
6269 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6270 arg0, build_real (TREE_TYPE (arg0), max));
6271 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6272
6273 default:
6274 break;
6275 }
6276
6277 return NULL_TREE;
6278 }
6279
6280 /* Subroutine of fold() that optimizes comparisons of a division by
6281 a nonzero integer constant against an integer constant, i.e.
6282 X/C1 op C2.
6283
6284 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6285 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6286 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6287
6288 The function returns the constant folded tree if a simplification
6289 can be made, and NULL_TREE otherwise. */
6290
6291 static tree
6292 fold_div_compare (location_t loc,
6293 enum tree_code code, tree type, tree arg0, tree arg1)
6294 {
6295 tree prod, tmp, hi, lo;
6296 tree arg00 = TREE_OPERAND (arg0, 0);
6297 tree arg01 = TREE_OPERAND (arg0, 1);
6298 double_int val;
6299 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6300 bool neg_overflow;
6301 int overflow;
6302
6303 /* We have to do this the hard way to detect unsigned overflow.
6304 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6305 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6306 TREE_INT_CST_HIGH (arg01),
6307 TREE_INT_CST_LOW (arg1),
6308 TREE_INT_CST_HIGH (arg1),
6309 &val.low, &val.high, unsigned_p);
6310 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6311 neg_overflow = false;
6312
6313 if (unsigned_p)
6314 {
6315 tmp = int_const_binop (MINUS_EXPR, arg01,
6316 build_int_cst (TREE_TYPE (arg01), 1));
6317 lo = prod;
6318
6319 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6320 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6321 TREE_INT_CST_HIGH (prod),
6322 TREE_INT_CST_LOW (tmp),
6323 TREE_INT_CST_HIGH (tmp),
6324 &val.low, &val.high, unsigned_p);
6325 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6326 -1, overflow | TREE_OVERFLOW (prod));
6327 }
6328 else if (tree_int_cst_sgn (arg01) >= 0)
6329 {
6330 tmp = int_const_binop (MINUS_EXPR, arg01,
6331 build_int_cst (TREE_TYPE (arg01), 1));
6332 switch (tree_int_cst_sgn (arg1))
6333 {
6334 case -1:
6335 neg_overflow = true;
6336 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6337 hi = prod;
6338 break;
6339
6340 case 0:
6341 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6342 hi = tmp;
6343 break;
6344
6345 case 1:
6346 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6347 lo = prod;
6348 break;
6349
6350 default:
6351 gcc_unreachable ();
6352 }
6353 }
6354 else
6355 {
6356 /* A negative divisor reverses the relational operators. */
6357 code = swap_tree_comparison (code);
6358
6359 tmp = int_const_binop (PLUS_EXPR, arg01,
6360 build_int_cst (TREE_TYPE (arg01), 1));
6361 switch (tree_int_cst_sgn (arg1))
6362 {
6363 case -1:
6364 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6365 lo = prod;
6366 break;
6367
6368 case 0:
6369 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6370 lo = tmp;
6371 break;
6372
6373 case 1:
6374 neg_overflow = true;
6375 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6376 hi = prod;
6377 break;
6378
6379 default:
6380 gcc_unreachable ();
6381 }
6382 }
6383
6384 switch (code)
6385 {
6386 case EQ_EXPR:
6387 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6388 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6389 if (TREE_OVERFLOW (hi))
6390 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6391 if (TREE_OVERFLOW (lo))
6392 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6393 return build_range_check (loc, type, arg00, 1, lo, hi);
6394
6395 case NE_EXPR:
6396 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6397 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6398 if (TREE_OVERFLOW (hi))
6399 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6400 if (TREE_OVERFLOW (lo))
6401 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6402 return build_range_check (loc, type, arg00, 0, lo, hi);
6403
6404 case LT_EXPR:
6405 if (TREE_OVERFLOW (lo))
6406 {
6407 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6408 return omit_one_operand_loc (loc, type, tmp, arg00);
6409 }
6410 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6411
6412 case LE_EXPR:
6413 if (TREE_OVERFLOW (hi))
6414 {
6415 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6416 return omit_one_operand_loc (loc, type, tmp, arg00);
6417 }
6418 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6419
6420 case GT_EXPR:
6421 if (TREE_OVERFLOW (hi))
6422 {
6423 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6424 return omit_one_operand_loc (loc, type, tmp, arg00);
6425 }
6426 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6427
6428 case GE_EXPR:
6429 if (TREE_OVERFLOW (lo))
6430 {
6431 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6432 return omit_one_operand_loc (loc, type, tmp, arg00);
6433 }
6434 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6435
6436 default:
6437 break;
6438 }
6439
6440 return NULL_TREE;
6441 }
6442
6443
6444 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6445 equality/inequality test, then return a simplified form of the test
6446 using a sign testing. Otherwise return NULL. TYPE is the desired
6447 result type. */
6448
6449 static tree
6450 fold_single_bit_test_into_sign_test (location_t loc,
6451 enum tree_code code, tree arg0, tree arg1,
6452 tree result_type)
6453 {
6454 /* If this is testing a single bit, we can optimize the test. */
6455 if ((code == NE_EXPR || code == EQ_EXPR)
6456 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6457 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6458 {
6459 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6460 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6461 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6462
6463 if (arg00 != NULL_TREE
6464 /* This is only a win if casting to a signed type is cheap,
6465 i.e. when arg00's type is not a partial mode. */
6466 && TYPE_PRECISION (TREE_TYPE (arg00))
6467 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6468 {
6469 tree stype = signed_type_for (TREE_TYPE (arg00));
6470 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6471 result_type,
6472 fold_convert_loc (loc, stype, arg00),
6473 build_int_cst (stype, 0));
6474 }
6475 }
6476
6477 return NULL_TREE;
6478 }
6479
6480 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6481 equality/inequality test, then return a simplified form of
6482 the test using shifts and logical operations. Otherwise return
6483 NULL. TYPE is the desired result type. */
6484
6485 tree
6486 fold_single_bit_test (location_t loc, enum tree_code code,
6487 tree arg0, tree arg1, tree result_type)
6488 {
6489 /* If this is testing a single bit, we can optimize the test. */
6490 if ((code == NE_EXPR || code == EQ_EXPR)
6491 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6492 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6493 {
6494 tree inner = TREE_OPERAND (arg0, 0);
6495 tree type = TREE_TYPE (arg0);
6496 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6497 enum machine_mode operand_mode = TYPE_MODE (type);
6498 int ops_unsigned;
6499 tree signed_type, unsigned_type, intermediate_type;
6500 tree tem, one;
6501
6502 /* First, see if we can fold the single bit test into a sign-bit
6503 test. */
6504 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6505 result_type);
6506 if (tem)
6507 return tem;
6508
6509 /* Otherwise we have (A & C) != 0 where C is a single bit,
6510 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6511 Similarly for (A & C) == 0. */
6512
6513 /* If INNER is a right shift of a constant and it plus BITNUM does
6514 not overflow, adjust BITNUM and INNER. */
6515 if (TREE_CODE (inner) == RSHIFT_EXPR
6516 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6517 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6518 && bitnum < TYPE_PRECISION (type)
6519 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6520 bitnum - TYPE_PRECISION (type)))
6521 {
6522 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6523 inner = TREE_OPERAND (inner, 0);
6524 }
6525
6526 /* If we are going to be able to omit the AND below, we must do our
6527 operations as unsigned. If we must use the AND, we have a choice.
6528 Normally unsigned is faster, but for some machines signed is. */
6529 #ifdef LOAD_EXTEND_OP
6530 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6531 && !flag_syntax_only) ? 0 : 1;
6532 #else
6533 ops_unsigned = 1;
6534 #endif
6535
6536 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6537 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6538 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6539 inner = fold_convert_loc (loc, intermediate_type, inner);
6540
6541 if (bitnum != 0)
6542 inner = build2 (RSHIFT_EXPR, intermediate_type,
6543 inner, size_int (bitnum));
6544
6545 one = build_int_cst (intermediate_type, 1);
6546
6547 if (code == EQ_EXPR)
6548 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6549
6550 /* Put the AND last so it can combine with more things. */
6551 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6552
6553 /* Make sure to return the proper type. */
6554 inner = fold_convert_loc (loc, result_type, inner);
6555
6556 return inner;
6557 }
6558 return NULL_TREE;
6559 }
6560
6561 /* Check whether we are allowed to reorder operands arg0 and arg1,
6562 such that the evaluation of arg1 occurs before arg0. */
6563
6564 static bool
6565 reorder_operands_p (const_tree arg0, const_tree arg1)
6566 {
6567 if (! flag_evaluation_order)
6568 return true;
6569 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6570 return true;
6571 return ! TREE_SIDE_EFFECTS (arg0)
6572 && ! TREE_SIDE_EFFECTS (arg1);
6573 }
6574
6575 /* Test whether it is preferable two swap two operands, ARG0 and
6576 ARG1, for example because ARG0 is an integer constant and ARG1
6577 isn't. If REORDER is true, only recommend swapping if we can
6578 evaluate the operands in reverse order. */
6579
6580 bool
6581 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6582 {
6583 STRIP_SIGN_NOPS (arg0);
6584 STRIP_SIGN_NOPS (arg1);
6585
6586 if (TREE_CODE (arg1) == INTEGER_CST)
6587 return 0;
6588 if (TREE_CODE (arg0) == INTEGER_CST)
6589 return 1;
6590
6591 if (TREE_CODE (arg1) == REAL_CST)
6592 return 0;
6593 if (TREE_CODE (arg0) == REAL_CST)
6594 return 1;
6595
6596 if (TREE_CODE (arg1) == FIXED_CST)
6597 return 0;
6598 if (TREE_CODE (arg0) == FIXED_CST)
6599 return 1;
6600
6601 if (TREE_CODE (arg1) == COMPLEX_CST)
6602 return 0;
6603 if (TREE_CODE (arg0) == COMPLEX_CST)
6604 return 1;
6605
6606 if (TREE_CONSTANT (arg1))
6607 return 0;
6608 if (TREE_CONSTANT (arg0))
6609 return 1;
6610
6611 if (optimize_function_for_size_p (cfun))
6612 return 0;
6613
6614 if (reorder && flag_evaluation_order
6615 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6616 return 0;
6617
6618 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6619 for commutative and comparison operators. Ensuring a canonical
6620 form allows the optimizers to find additional redundancies without
6621 having to explicitly check for both orderings. */
6622 if (TREE_CODE (arg0) == SSA_NAME
6623 && TREE_CODE (arg1) == SSA_NAME
6624 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6625 return 1;
6626
6627 /* Put SSA_NAMEs last. */
6628 if (TREE_CODE (arg1) == SSA_NAME)
6629 return 0;
6630 if (TREE_CODE (arg0) == SSA_NAME)
6631 return 1;
6632
6633 /* Put variables last. */
6634 if (DECL_P (arg1))
6635 return 0;
6636 if (DECL_P (arg0))
6637 return 1;
6638
6639 return 0;
6640 }
6641
6642 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6643 ARG0 is extended to a wider type. */
6644
6645 static tree
6646 fold_widened_comparison (location_t loc, enum tree_code code,
6647 tree type, tree arg0, tree arg1)
6648 {
6649 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6650 tree arg1_unw;
6651 tree shorter_type, outer_type;
6652 tree min, max;
6653 bool above, below;
6654
6655 if (arg0_unw == arg0)
6656 return NULL_TREE;
6657 shorter_type = TREE_TYPE (arg0_unw);
6658
6659 #ifdef HAVE_canonicalize_funcptr_for_compare
6660 /* Disable this optimization if we're casting a function pointer
6661 type on targets that require function pointer canonicalization. */
6662 if (HAVE_canonicalize_funcptr_for_compare
6663 && TREE_CODE (shorter_type) == POINTER_TYPE
6664 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6665 return NULL_TREE;
6666 #endif
6667
6668 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6669 return NULL_TREE;
6670
6671 arg1_unw = get_unwidened (arg1, NULL_TREE);
6672
6673 /* If possible, express the comparison in the shorter mode. */
6674 if ((code == EQ_EXPR || code == NE_EXPR
6675 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6676 && (TREE_TYPE (arg1_unw) == shorter_type
6677 || ((TYPE_PRECISION (shorter_type)
6678 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6679 && (TYPE_UNSIGNED (shorter_type)
6680 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6681 || (TREE_CODE (arg1_unw) == INTEGER_CST
6682 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6683 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6684 && int_fits_type_p (arg1_unw, shorter_type))))
6685 return fold_build2_loc (loc, code, type, arg0_unw,
6686 fold_convert_loc (loc, shorter_type, arg1_unw));
6687
6688 if (TREE_CODE (arg1_unw) != INTEGER_CST
6689 || TREE_CODE (shorter_type) != INTEGER_TYPE
6690 || !int_fits_type_p (arg1_unw, shorter_type))
6691 return NULL_TREE;
6692
6693 /* If we are comparing with the integer that does not fit into the range
6694 of the shorter type, the result is known. */
6695 outer_type = TREE_TYPE (arg1_unw);
6696 min = lower_bound_in_type (outer_type, shorter_type);
6697 max = upper_bound_in_type (outer_type, shorter_type);
6698
6699 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6700 max, arg1_unw));
6701 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6702 arg1_unw, min));
6703
6704 switch (code)
6705 {
6706 case EQ_EXPR:
6707 if (above || below)
6708 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6709 break;
6710
6711 case NE_EXPR:
6712 if (above || below)
6713 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6714 break;
6715
6716 case LT_EXPR:
6717 case LE_EXPR:
6718 if (above)
6719 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6720 else if (below)
6721 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6722
6723 case GT_EXPR:
6724 case GE_EXPR:
6725 if (above)
6726 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6727 else if (below)
6728 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6729
6730 default:
6731 break;
6732 }
6733
6734 return NULL_TREE;
6735 }
6736
6737 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6738 ARG0 just the signedness is changed. */
6739
6740 static tree
6741 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6742 tree arg0, tree arg1)
6743 {
6744 tree arg0_inner;
6745 tree inner_type, outer_type;
6746
6747 if (!CONVERT_EXPR_P (arg0))
6748 return NULL_TREE;
6749
6750 outer_type = TREE_TYPE (arg0);
6751 arg0_inner = TREE_OPERAND (arg0, 0);
6752 inner_type = TREE_TYPE (arg0_inner);
6753
6754 #ifdef HAVE_canonicalize_funcptr_for_compare
6755 /* Disable this optimization if we're casting a function pointer
6756 type on targets that require function pointer canonicalization. */
6757 if (HAVE_canonicalize_funcptr_for_compare
6758 && TREE_CODE (inner_type) == POINTER_TYPE
6759 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6760 return NULL_TREE;
6761 #endif
6762
6763 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6764 return NULL_TREE;
6765
6766 if (TREE_CODE (arg1) != INTEGER_CST
6767 && !(CONVERT_EXPR_P (arg1)
6768 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6769 return NULL_TREE;
6770
6771 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6772 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6773 && code != NE_EXPR
6774 && code != EQ_EXPR)
6775 return NULL_TREE;
6776
6777 if (TREE_CODE (arg1) == INTEGER_CST)
6778 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6779 0, TREE_OVERFLOW (arg1));
6780 else
6781 arg1 = fold_convert_loc (loc, inner_type, arg1);
6782
6783 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6784 }
6785
6786 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6787 step of the array. Reconstructs s and delta in the case of s *
6788 delta being an integer constant (and thus already folded). ADDR is
6789 the address. MULT is the multiplicative expression. If the
6790 function succeeds, the new address expression is returned.
6791 Otherwise NULL_TREE is returned. LOC is the location of the
6792 resulting expression. */
6793
6794 static tree
6795 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6796 {
6797 tree s, delta, step;
6798 tree ref = TREE_OPERAND (addr, 0), pref;
6799 tree ret, pos;
6800 tree itype;
6801 bool mdim = false;
6802
6803 /* Strip the nops that might be added when converting op1 to sizetype. */
6804 STRIP_NOPS (op1);
6805
6806 /* Canonicalize op1 into a possibly non-constant delta
6807 and an INTEGER_CST s. */
6808 if (TREE_CODE (op1) == MULT_EXPR)
6809 {
6810 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6811
6812 STRIP_NOPS (arg0);
6813 STRIP_NOPS (arg1);
6814
6815 if (TREE_CODE (arg0) == INTEGER_CST)
6816 {
6817 s = arg0;
6818 delta = arg1;
6819 }
6820 else if (TREE_CODE (arg1) == INTEGER_CST)
6821 {
6822 s = arg1;
6823 delta = arg0;
6824 }
6825 else
6826 return NULL_TREE;
6827 }
6828 else if (TREE_CODE (op1) == INTEGER_CST)
6829 {
6830 delta = op1;
6831 s = NULL_TREE;
6832 }
6833 else
6834 {
6835 /* Simulate we are delta * 1. */
6836 delta = op1;
6837 s = integer_one_node;
6838 }
6839
6840 /* Handle &x.array the same as we would handle &x.array[0]. */
6841 if (TREE_CODE (ref) == COMPONENT_REF
6842 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6843 {
6844 tree domain;
6845
6846 /* Remember if this was a multi-dimensional array. */
6847 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6848 mdim = true;
6849
6850 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6851 if (! domain)
6852 goto cont;
6853 itype = TREE_TYPE (domain);
6854
6855 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6856 if (TREE_CODE (step) != INTEGER_CST)
6857 goto cont;
6858
6859 if (s)
6860 {
6861 if (! tree_int_cst_equal (step, s))
6862 goto cont;
6863 }
6864 else
6865 {
6866 /* Try if delta is a multiple of step. */
6867 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6868 if (! tmp)
6869 goto cont;
6870 delta = tmp;
6871 }
6872
6873 /* Only fold here if we can verify we do not overflow one
6874 dimension of a multi-dimensional array. */
6875 if (mdim)
6876 {
6877 tree tmp;
6878
6879 if (!TYPE_MIN_VALUE (domain)
6880 || !TYPE_MAX_VALUE (domain)
6881 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6882 goto cont;
6883
6884 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6885 fold_convert_loc (loc, itype,
6886 TYPE_MIN_VALUE (domain)),
6887 fold_convert_loc (loc, itype, delta));
6888 if (TREE_CODE (tmp) != INTEGER_CST
6889 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6890 goto cont;
6891 }
6892
6893 /* We found a suitable component reference. */
6894
6895 pref = TREE_OPERAND (addr, 0);
6896 ret = copy_node (pref);
6897 SET_EXPR_LOCATION (ret, loc);
6898
6899 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6900 fold_build2_loc
6901 (loc, PLUS_EXPR, itype,
6902 fold_convert_loc (loc, itype,
6903 TYPE_MIN_VALUE
6904 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6905 fold_convert_loc (loc, itype, delta)),
6906 NULL_TREE, NULL_TREE);
6907 return build_fold_addr_expr_loc (loc, ret);
6908 }
6909
6910 cont:
6911
6912 for (;; ref = TREE_OPERAND (ref, 0))
6913 {
6914 if (TREE_CODE (ref) == ARRAY_REF)
6915 {
6916 tree domain;
6917
6918 /* Remember if this was a multi-dimensional array. */
6919 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6920 mdim = true;
6921
6922 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6923 if (! domain)
6924 continue;
6925 itype = TREE_TYPE (domain);
6926
6927 step = array_ref_element_size (ref);
6928 if (TREE_CODE (step) != INTEGER_CST)
6929 continue;
6930
6931 if (s)
6932 {
6933 if (! tree_int_cst_equal (step, s))
6934 continue;
6935 }
6936 else
6937 {
6938 /* Try if delta is a multiple of step. */
6939 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6940 if (! tmp)
6941 continue;
6942 delta = tmp;
6943 }
6944
6945 /* Only fold here if we can verify we do not overflow one
6946 dimension of a multi-dimensional array. */
6947 if (mdim)
6948 {
6949 tree tmp;
6950
6951 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6952 || !TYPE_MAX_VALUE (domain)
6953 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6954 continue;
6955
6956 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6957 fold_convert_loc (loc, itype,
6958 TREE_OPERAND (ref, 1)),
6959 fold_convert_loc (loc, itype, delta));
6960 if (!tmp
6961 || TREE_CODE (tmp) != INTEGER_CST
6962 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6963 continue;
6964 }
6965
6966 break;
6967 }
6968 else
6969 mdim = false;
6970
6971 if (!handled_component_p (ref))
6972 return NULL_TREE;
6973 }
6974
6975 /* We found the suitable array reference. So copy everything up to it,
6976 and replace the index. */
6977
6978 pref = TREE_OPERAND (addr, 0);
6979 ret = copy_node (pref);
6980 SET_EXPR_LOCATION (ret, loc);
6981 pos = ret;
6982
6983 while (pref != ref)
6984 {
6985 pref = TREE_OPERAND (pref, 0);
6986 TREE_OPERAND (pos, 0) = copy_node (pref);
6987 pos = TREE_OPERAND (pos, 0);
6988 }
6989
6990 TREE_OPERAND (pos, 1)
6991 = fold_build2_loc (loc, PLUS_EXPR, itype,
6992 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6993 fold_convert_loc (loc, itype, delta));
6994 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6995 }
6996
6997
6998 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6999 means A >= Y && A != MAX, but in this case we know that
7000 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7001
7002 static tree
7003 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7004 {
7005 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7006
7007 if (TREE_CODE (bound) == LT_EXPR)
7008 a = TREE_OPERAND (bound, 0);
7009 else if (TREE_CODE (bound) == GT_EXPR)
7010 a = TREE_OPERAND (bound, 1);
7011 else
7012 return NULL_TREE;
7013
7014 typea = TREE_TYPE (a);
7015 if (!INTEGRAL_TYPE_P (typea)
7016 && !POINTER_TYPE_P (typea))
7017 return NULL_TREE;
7018
7019 if (TREE_CODE (ineq) == LT_EXPR)
7020 {
7021 a1 = TREE_OPERAND (ineq, 1);
7022 y = TREE_OPERAND (ineq, 0);
7023 }
7024 else if (TREE_CODE (ineq) == GT_EXPR)
7025 {
7026 a1 = TREE_OPERAND (ineq, 0);
7027 y = TREE_OPERAND (ineq, 1);
7028 }
7029 else
7030 return NULL_TREE;
7031
7032 if (TREE_TYPE (a1) != typea)
7033 return NULL_TREE;
7034
7035 if (POINTER_TYPE_P (typea))
7036 {
7037 /* Convert the pointer types into integer before taking the difference. */
7038 tree ta = fold_convert_loc (loc, ssizetype, a);
7039 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7040 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7041 }
7042 else
7043 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7044
7045 if (!diff || !integer_onep (diff))
7046 return NULL_TREE;
7047
7048 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7049 }
7050
7051 /* Fold a sum or difference of at least one multiplication.
7052 Returns the folded tree or NULL if no simplification could be made. */
7053
7054 static tree
7055 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7056 tree arg0, tree arg1)
7057 {
7058 tree arg00, arg01, arg10, arg11;
7059 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7060
7061 /* (A * C) +- (B * C) -> (A+-B) * C.
7062 (A * C) +- A -> A * (C+-1).
7063 We are most concerned about the case where C is a constant,
7064 but other combinations show up during loop reduction. Since
7065 it is not difficult, try all four possibilities. */
7066
7067 if (TREE_CODE (arg0) == MULT_EXPR)
7068 {
7069 arg00 = TREE_OPERAND (arg0, 0);
7070 arg01 = TREE_OPERAND (arg0, 1);
7071 }
7072 else if (TREE_CODE (arg0) == INTEGER_CST)
7073 {
7074 arg00 = build_one_cst (type);
7075 arg01 = arg0;
7076 }
7077 else
7078 {
7079 /* We cannot generate constant 1 for fract. */
7080 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7081 return NULL_TREE;
7082 arg00 = arg0;
7083 arg01 = build_one_cst (type);
7084 }
7085 if (TREE_CODE (arg1) == MULT_EXPR)
7086 {
7087 arg10 = TREE_OPERAND (arg1, 0);
7088 arg11 = TREE_OPERAND (arg1, 1);
7089 }
7090 else if (TREE_CODE (arg1) == INTEGER_CST)
7091 {
7092 arg10 = build_one_cst (type);
7093 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7094 the purpose of this canonicalization. */
7095 if (TREE_INT_CST_HIGH (arg1) == -1
7096 && negate_expr_p (arg1)
7097 && code == PLUS_EXPR)
7098 {
7099 arg11 = negate_expr (arg1);
7100 code = MINUS_EXPR;
7101 }
7102 else
7103 arg11 = arg1;
7104 }
7105 else
7106 {
7107 /* We cannot generate constant 1 for fract. */
7108 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7109 return NULL_TREE;
7110 arg10 = arg1;
7111 arg11 = build_one_cst (type);
7112 }
7113 same = NULL_TREE;
7114
7115 if (operand_equal_p (arg01, arg11, 0))
7116 same = arg01, alt0 = arg00, alt1 = arg10;
7117 else if (operand_equal_p (arg00, arg10, 0))
7118 same = arg00, alt0 = arg01, alt1 = arg11;
7119 else if (operand_equal_p (arg00, arg11, 0))
7120 same = arg00, alt0 = arg01, alt1 = arg10;
7121 else if (operand_equal_p (arg01, arg10, 0))
7122 same = arg01, alt0 = arg00, alt1 = arg11;
7123
7124 /* No identical multiplicands; see if we can find a common
7125 power-of-two factor in non-power-of-two multiplies. This
7126 can help in multi-dimensional array access. */
7127 else if (host_integerp (arg01, 0)
7128 && host_integerp (arg11, 0))
7129 {
7130 HOST_WIDE_INT int01, int11, tmp;
7131 bool swap = false;
7132 tree maybe_same;
7133 int01 = TREE_INT_CST_LOW (arg01);
7134 int11 = TREE_INT_CST_LOW (arg11);
7135
7136 /* Move min of absolute values to int11. */
7137 if (absu_hwi (int01) < absu_hwi (int11))
7138 {
7139 tmp = int01, int01 = int11, int11 = tmp;
7140 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7141 maybe_same = arg01;
7142 swap = true;
7143 }
7144 else
7145 maybe_same = arg11;
7146
7147 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7148 /* The remainder should not be a constant, otherwise we
7149 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7150 increased the number of multiplications necessary. */
7151 && TREE_CODE (arg10) != INTEGER_CST)
7152 {
7153 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7154 build_int_cst (TREE_TYPE (arg00),
7155 int01 / int11));
7156 alt1 = arg10;
7157 same = maybe_same;
7158 if (swap)
7159 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7160 }
7161 }
7162
7163 if (same)
7164 return fold_build2_loc (loc, MULT_EXPR, type,
7165 fold_build2_loc (loc, code, type,
7166 fold_convert_loc (loc, type, alt0),
7167 fold_convert_loc (loc, type, alt1)),
7168 fold_convert_loc (loc, type, same));
7169
7170 return NULL_TREE;
7171 }
7172
7173 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7174 specified by EXPR into the buffer PTR of length LEN bytes.
7175 Return the number of bytes placed in the buffer, or zero
7176 upon failure. */
7177
7178 static int
7179 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7180 {
7181 tree type = TREE_TYPE (expr);
7182 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7183 int byte, offset, word, words;
7184 unsigned char value;
7185
7186 if (total_bytes > len)
7187 return 0;
7188 words = total_bytes / UNITS_PER_WORD;
7189
7190 for (byte = 0; byte < total_bytes; byte++)
7191 {
7192 int bitpos = byte * BITS_PER_UNIT;
7193 if (bitpos < HOST_BITS_PER_WIDE_INT)
7194 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7195 else
7196 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7197 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7198
7199 if (total_bytes > UNITS_PER_WORD)
7200 {
7201 word = byte / UNITS_PER_WORD;
7202 if (WORDS_BIG_ENDIAN)
7203 word = (words - 1) - word;
7204 offset = word * UNITS_PER_WORD;
7205 if (BYTES_BIG_ENDIAN)
7206 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7207 else
7208 offset += byte % UNITS_PER_WORD;
7209 }
7210 else
7211 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7212 ptr[offset] = value;
7213 }
7214 return total_bytes;
7215 }
7216
7217
7218 /* Subroutine of native_encode_expr. Encode the REAL_CST
7219 specified by EXPR into the buffer PTR of length LEN bytes.
7220 Return the number of bytes placed in the buffer, or zero
7221 upon failure. */
7222
7223 static int
7224 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7225 {
7226 tree type = TREE_TYPE (expr);
7227 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7228 int byte, offset, word, words, bitpos;
7229 unsigned char value;
7230
7231 /* There are always 32 bits in each long, no matter the size of
7232 the hosts long. We handle floating point representations with
7233 up to 192 bits. */
7234 long tmp[6];
7235
7236 if (total_bytes > len)
7237 return 0;
7238 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7239
7240 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7241
7242 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7243 bitpos += BITS_PER_UNIT)
7244 {
7245 byte = (bitpos / BITS_PER_UNIT) & 3;
7246 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7247
7248 if (UNITS_PER_WORD < 4)
7249 {
7250 word = byte / UNITS_PER_WORD;
7251 if (WORDS_BIG_ENDIAN)
7252 word = (words - 1) - word;
7253 offset = word * UNITS_PER_WORD;
7254 if (BYTES_BIG_ENDIAN)
7255 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7256 else
7257 offset += byte % UNITS_PER_WORD;
7258 }
7259 else
7260 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7261 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7262 }
7263 return total_bytes;
7264 }
7265
7266 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7267 specified by EXPR into the buffer PTR of length LEN bytes.
7268 Return the number of bytes placed in the buffer, or zero
7269 upon failure. */
7270
7271 static int
7272 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7273 {
7274 int rsize, isize;
7275 tree part;
7276
7277 part = TREE_REALPART (expr);
7278 rsize = native_encode_expr (part, ptr, len);
7279 if (rsize == 0)
7280 return 0;
7281 part = TREE_IMAGPART (expr);
7282 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7283 if (isize != rsize)
7284 return 0;
7285 return rsize + isize;
7286 }
7287
7288
7289 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7290 specified by EXPR into the buffer PTR of length LEN bytes.
7291 Return the number of bytes placed in the buffer, or zero
7292 upon failure. */
7293
7294 static int
7295 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7296 {
7297 unsigned i, count;
7298 int size, offset;
7299 tree itype, elem;
7300
7301 offset = 0;
7302 count = VECTOR_CST_NELTS (expr);
7303 itype = TREE_TYPE (TREE_TYPE (expr));
7304 size = GET_MODE_SIZE (TYPE_MODE (itype));
7305 for (i = 0; i < count; i++)
7306 {
7307 elem = VECTOR_CST_ELT (expr, i);
7308 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7309 return 0;
7310 offset += size;
7311 }
7312 return offset;
7313 }
7314
7315
7316 /* Subroutine of native_encode_expr. Encode the STRING_CST
7317 specified by EXPR into the buffer PTR of length LEN bytes.
7318 Return the number of bytes placed in the buffer, or zero
7319 upon failure. */
7320
7321 static int
7322 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7323 {
7324 tree type = TREE_TYPE (expr);
7325 HOST_WIDE_INT total_bytes;
7326
7327 if (TREE_CODE (type) != ARRAY_TYPE
7328 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7329 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7330 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7331 return 0;
7332 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7333 if (total_bytes > len)
7334 return 0;
7335 if (TREE_STRING_LENGTH (expr) < total_bytes)
7336 {
7337 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7338 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7339 total_bytes - TREE_STRING_LENGTH (expr));
7340 }
7341 else
7342 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7343 return total_bytes;
7344 }
7345
7346
7347 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7348 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7349 buffer PTR of length LEN bytes. Return the number of bytes
7350 placed in the buffer, or zero upon failure. */
7351
7352 int
7353 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7354 {
7355 switch (TREE_CODE (expr))
7356 {
7357 case INTEGER_CST:
7358 return native_encode_int (expr, ptr, len);
7359
7360 case REAL_CST:
7361 return native_encode_real (expr, ptr, len);
7362
7363 case COMPLEX_CST:
7364 return native_encode_complex (expr, ptr, len);
7365
7366 case VECTOR_CST:
7367 return native_encode_vector (expr, ptr, len);
7368
7369 case STRING_CST:
7370 return native_encode_string (expr, ptr, len);
7371
7372 default:
7373 return 0;
7374 }
7375 }
7376
7377
7378 /* Subroutine of native_interpret_expr. Interpret the contents of
7379 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7380 If the buffer cannot be interpreted, return NULL_TREE. */
7381
7382 static tree
7383 native_interpret_int (tree type, const unsigned char *ptr, int len)
7384 {
7385 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7386 int byte, offset, word, words;
7387 unsigned char value;
7388 double_int result;
7389
7390 if (total_bytes > len)
7391 return NULL_TREE;
7392 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7393 return NULL_TREE;
7394
7395 result = double_int_zero;
7396 words = total_bytes / UNITS_PER_WORD;
7397
7398 for (byte = 0; byte < total_bytes; byte++)
7399 {
7400 int bitpos = byte * BITS_PER_UNIT;
7401 if (total_bytes > UNITS_PER_WORD)
7402 {
7403 word = byte / UNITS_PER_WORD;
7404 if (WORDS_BIG_ENDIAN)
7405 word = (words - 1) - word;
7406 offset = word * UNITS_PER_WORD;
7407 if (BYTES_BIG_ENDIAN)
7408 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7409 else
7410 offset += byte % UNITS_PER_WORD;
7411 }
7412 else
7413 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7414 value = ptr[offset];
7415
7416 if (bitpos < HOST_BITS_PER_WIDE_INT)
7417 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7418 else
7419 result.high |= (unsigned HOST_WIDE_INT) value
7420 << (bitpos - HOST_BITS_PER_WIDE_INT);
7421 }
7422
7423 return double_int_to_tree (type, result);
7424 }
7425
7426
7427 /* Subroutine of native_interpret_expr. Interpret the contents of
7428 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7429 If the buffer cannot be interpreted, return NULL_TREE. */
7430
7431 static tree
7432 native_interpret_real (tree type, const unsigned char *ptr, int len)
7433 {
7434 enum machine_mode mode = TYPE_MODE (type);
7435 int total_bytes = GET_MODE_SIZE (mode);
7436 int byte, offset, word, words, bitpos;
7437 unsigned char value;
7438 /* There are always 32 bits in each long, no matter the size of
7439 the hosts long. We handle floating point representations with
7440 up to 192 bits. */
7441 REAL_VALUE_TYPE r;
7442 long tmp[6];
7443
7444 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7445 if (total_bytes > len || total_bytes > 24)
7446 return NULL_TREE;
7447 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7448
7449 memset (tmp, 0, sizeof (tmp));
7450 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7451 bitpos += BITS_PER_UNIT)
7452 {
7453 byte = (bitpos / BITS_PER_UNIT) & 3;
7454 if (UNITS_PER_WORD < 4)
7455 {
7456 word = byte / UNITS_PER_WORD;
7457 if (WORDS_BIG_ENDIAN)
7458 word = (words - 1) - word;
7459 offset = word * UNITS_PER_WORD;
7460 if (BYTES_BIG_ENDIAN)
7461 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7462 else
7463 offset += byte % UNITS_PER_WORD;
7464 }
7465 else
7466 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7467 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7468
7469 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7470 }
7471
7472 real_from_target (&r, tmp, mode);
7473 return build_real (type, r);
7474 }
7475
7476
7477 /* Subroutine of native_interpret_expr. Interpret the contents of
7478 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7479 If the buffer cannot be interpreted, return NULL_TREE. */
7480
7481 static tree
7482 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7483 {
7484 tree etype, rpart, ipart;
7485 int size;
7486
7487 etype = TREE_TYPE (type);
7488 size = GET_MODE_SIZE (TYPE_MODE (etype));
7489 if (size * 2 > len)
7490 return NULL_TREE;
7491 rpart = native_interpret_expr (etype, ptr, size);
7492 if (!rpart)
7493 return NULL_TREE;
7494 ipart = native_interpret_expr (etype, ptr+size, size);
7495 if (!ipart)
7496 return NULL_TREE;
7497 return build_complex (type, rpart, ipart);
7498 }
7499
7500
7501 /* Subroutine of native_interpret_expr. Interpret the contents of
7502 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7503 If the buffer cannot be interpreted, return NULL_TREE. */
7504
7505 static tree
7506 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7507 {
7508 tree etype, elem;
7509 int i, size, count;
7510 tree *elements;
7511
7512 etype = TREE_TYPE (type);
7513 size = GET_MODE_SIZE (TYPE_MODE (etype));
7514 count = TYPE_VECTOR_SUBPARTS (type);
7515 if (size * count > len)
7516 return NULL_TREE;
7517
7518 elements = XALLOCAVEC (tree, count);
7519 for (i = count - 1; i >= 0; i--)
7520 {
7521 elem = native_interpret_expr (etype, ptr+(i*size), size);
7522 if (!elem)
7523 return NULL_TREE;
7524 elements[i] = elem;
7525 }
7526 return build_vector (type, elements);
7527 }
7528
7529
7530 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7531 the buffer PTR of length LEN as a constant of type TYPE. For
7532 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7533 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7534 return NULL_TREE. */
7535
7536 tree
7537 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7538 {
7539 switch (TREE_CODE (type))
7540 {
7541 case INTEGER_TYPE:
7542 case ENUMERAL_TYPE:
7543 case BOOLEAN_TYPE:
7544 case POINTER_TYPE:
7545 case REFERENCE_TYPE:
7546 return native_interpret_int (type, ptr, len);
7547
7548 case REAL_TYPE:
7549 return native_interpret_real (type, ptr, len);
7550
7551 case COMPLEX_TYPE:
7552 return native_interpret_complex (type, ptr, len);
7553
7554 case VECTOR_TYPE:
7555 return native_interpret_vector (type, ptr, len);
7556
7557 default:
7558 return NULL_TREE;
7559 }
7560 }
7561
7562 /* Returns true if we can interpret the contents of a native encoding
7563 as TYPE. */
7564
7565 static bool
7566 can_native_interpret_type_p (tree type)
7567 {
7568 switch (TREE_CODE (type))
7569 {
7570 case INTEGER_TYPE:
7571 case ENUMERAL_TYPE:
7572 case BOOLEAN_TYPE:
7573 case POINTER_TYPE:
7574 case REFERENCE_TYPE:
7575 case REAL_TYPE:
7576 case COMPLEX_TYPE:
7577 case VECTOR_TYPE:
7578 return true;
7579 default:
7580 return false;
7581 }
7582 }
7583
7584 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7585 TYPE at compile-time. If we're unable to perform the conversion
7586 return NULL_TREE. */
7587
7588 static tree
7589 fold_view_convert_expr (tree type, tree expr)
7590 {
7591 /* We support up to 512-bit values (for V8DFmode). */
7592 unsigned char buffer[64];
7593 int len;
7594
7595 /* Check that the host and target are sane. */
7596 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7597 return NULL_TREE;
7598
7599 len = native_encode_expr (expr, buffer, sizeof (buffer));
7600 if (len == 0)
7601 return NULL_TREE;
7602
7603 return native_interpret_expr (type, buffer, len);
7604 }
7605
7606 /* Build an expression for the address of T. Folds away INDIRECT_REF
7607 to avoid confusing the gimplify process. */
7608
7609 tree
7610 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7611 {
7612 /* The size of the object is not relevant when talking about its address. */
7613 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7614 t = TREE_OPERAND (t, 0);
7615
7616 if (TREE_CODE (t) == INDIRECT_REF)
7617 {
7618 t = TREE_OPERAND (t, 0);
7619
7620 if (TREE_TYPE (t) != ptrtype)
7621 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7622 }
7623 else if (TREE_CODE (t) == MEM_REF
7624 && integer_zerop (TREE_OPERAND (t, 1)))
7625 return TREE_OPERAND (t, 0);
7626 else if (TREE_CODE (t) == MEM_REF
7627 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7628 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7629 TREE_OPERAND (t, 0),
7630 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7631 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7632 {
7633 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7634
7635 if (TREE_TYPE (t) != ptrtype)
7636 t = fold_convert_loc (loc, ptrtype, t);
7637 }
7638 else
7639 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7640
7641 return t;
7642 }
7643
7644 /* Build an expression for the address of T. */
7645
7646 tree
7647 build_fold_addr_expr_loc (location_t loc, tree t)
7648 {
7649 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7650
7651 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7652 }
7653
7654 static bool vec_cst_ctor_to_array (tree, tree *);
7655
7656 /* Fold a unary expression of code CODE and type TYPE with operand
7657 OP0. Return the folded expression if folding is successful.
7658 Otherwise, return NULL_TREE. */
7659
7660 tree
7661 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7662 {
7663 tree tem;
7664 tree arg0;
7665 enum tree_code_class kind = TREE_CODE_CLASS (code);
7666
7667 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7668 && TREE_CODE_LENGTH (code) == 1);
7669
7670 arg0 = op0;
7671 if (arg0)
7672 {
7673 if (CONVERT_EXPR_CODE_P (code)
7674 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7675 {
7676 /* Don't use STRIP_NOPS, because signedness of argument type
7677 matters. */
7678 STRIP_SIGN_NOPS (arg0);
7679 }
7680 else
7681 {
7682 /* Strip any conversions that don't change the mode. This
7683 is safe for every expression, except for a comparison
7684 expression because its signedness is derived from its
7685 operands.
7686
7687 Note that this is done as an internal manipulation within
7688 the constant folder, in order to find the simplest
7689 representation of the arguments so that their form can be
7690 studied. In any cases, the appropriate type conversions
7691 should be put back in the tree that will get out of the
7692 constant folder. */
7693 STRIP_NOPS (arg0);
7694 }
7695 }
7696
7697 if (TREE_CODE_CLASS (code) == tcc_unary)
7698 {
7699 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7700 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7701 fold_build1_loc (loc, code, type,
7702 fold_convert_loc (loc, TREE_TYPE (op0),
7703 TREE_OPERAND (arg0, 1))));
7704 else if (TREE_CODE (arg0) == COND_EXPR)
7705 {
7706 tree arg01 = TREE_OPERAND (arg0, 1);
7707 tree arg02 = TREE_OPERAND (arg0, 2);
7708 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7709 arg01 = fold_build1_loc (loc, code, type,
7710 fold_convert_loc (loc,
7711 TREE_TYPE (op0), arg01));
7712 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7713 arg02 = fold_build1_loc (loc, code, type,
7714 fold_convert_loc (loc,
7715 TREE_TYPE (op0), arg02));
7716 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7717 arg01, arg02);
7718
7719 /* If this was a conversion, and all we did was to move into
7720 inside the COND_EXPR, bring it back out. But leave it if
7721 it is a conversion from integer to integer and the
7722 result precision is no wider than a word since such a
7723 conversion is cheap and may be optimized away by combine,
7724 while it couldn't if it were outside the COND_EXPR. Then return
7725 so we don't get into an infinite recursion loop taking the
7726 conversion out and then back in. */
7727
7728 if ((CONVERT_EXPR_CODE_P (code)
7729 || code == NON_LVALUE_EXPR)
7730 && TREE_CODE (tem) == COND_EXPR
7731 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7732 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7733 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7734 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7735 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7736 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7737 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7738 && (INTEGRAL_TYPE_P
7739 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7740 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7741 || flag_syntax_only))
7742 tem = build1_loc (loc, code, type,
7743 build3 (COND_EXPR,
7744 TREE_TYPE (TREE_OPERAND
7745 (TREE_OPERAND (tem, 1), 0)),
7746 TREE_OPERAND (tem, 0),
7747 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7748 TREE_OPERAND (TREE_OPERAND (tem, 2),
7749 0)));
7750 return tem;
7751 }
7752 }
7753
7754 switch (code)
7755 {
7756 case PAREN_EXPR:
7757 /* Re-association barriers around constants and other re-association
7758 barriers can be removed. */
7759 if (CONSTANT_CLASS_P (op0)
7760 || TREE_CODE (op0) == PAREN_EXPR)
7761 return fold_convert_loc (loc, type, op0);
7762 return NULL_TREE;
7763
7764 CASE_CONVERT:
7765 case FLOAT_EXPR:
7766 case FIX_TRUNC_EXPR:
7767 if (TREE_TYPE (op0) == type)
7768 return op0;
7769
7770 if (COMPARISON_CLASS_P (op0))
7771 {
7772 /* If we have (type) (a CMP b) and type is an integral type, return
7773 new expression involving the new type. Canonicalize
7774 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7775 non-integral type.
7776 Do not fold the result as that would not simplify further, also
7777 folding again results in recursions. */
7778 if (TREE_CODE (type) == BOOLEAN_TYPE)
7779 return build2_loc (loc, TREE_CODE (op0), type,
7780 TREE_OPERAND (op0, 0),
7781 TREE_OPERAND (op0, 1));
7782 else if (!INTEGRAL_TYPE_P (type))
7783 return build3_loc (loc, COND_EXPR, type, op0,
7784 constant_boolean_node (true, type),
7785 constant_boolean_node (false, type));
7786 }
7787
7788 /* Handle cases of two conversions in a row. */
7789 if (CONVERT_EXPR_P (op0))
7790 {
7791 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7792 tree inter_type = TREE_TYPE (op0);
7793 int inside_int = INTEGRAL_TYPE_P (inside_type);
7794 int inside_ptr = POINTER_TYPE_P (inside_type);
7795 int inside_float = FLOAT_TYPE_P (inside_type);
7796 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7797 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7798 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7799 int inter_int = INTEGRAL_TYPE_P (inter_type);
7800 int inter_ptr = POINTER_TYPE_P (inter_type);
7801 int inter_float = FLOAT_TYPE_P (inter_type);
7802 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7803 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7804 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7805 int final_int = INTEGRAL_TYPE_P (type);
7806 int final_ptr = POINTER_TYPE_P (type);
7807 int final_float = FLOAT_TYPE_P (type);
7808 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7809 unsigned int final_prec = TYPE_PRECISION (type);
7810 int final_unsignedp = TYPE_UNSIGNED (type);
7811
7812 /* In addition to the cases of two conversions in a row
7813 handled below, if we are converting something to its own
7814 type via an object of identical or wider precision, neither
7815 conversion is needed. */
7816 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7817 && (((inter_int || inter_ptr) && final_int)
7818 || (inter_float && final_float))
7819 && inter_prec >= final_prec)
7820 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7821
7822 /* Likewise, if the intermediate and initial types are either both
7823 float or both integer, we don't need the middle conversion if the
7824 former is wider than the latter and doesn't change the signedness
7825 (for integers). Avoid this if the final type is a pointer since
7826 then we sometimes need the middle conversion. Likewise if the
7827 final type has a precision not equal to the size of its mode. */
7828 if (((inter_int && inside_int)
7829 || (inter_float && inside_float)
7830 || (inter_vec && inside_vec))
7831 && inter_prec >= inside_prec
7832 && (inter_float || inter_vec
7833 || inter_unsignedp == inside_unsignedp)
7834 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7835 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7836 && ! final_ptr
7837 && (! final_vec || inter_prec == inside_prec))
7838 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7839
7840 /* If we have a sign-extension of a zero-extended value, we can
7841 replace that by a single zero-extension. Likewise if the
7842 final conversion does not change precision we can drop the
7843 intermediate conversion. */
7844 if (inside_int && inter_int && final_int
7845 && ((inside_prec < inter_prec && inter_prec < final_prec
7846 && inside_unsignedp && !inter_unsignedp)
7847 || final_prec == inter_prec))
7848 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7849
7850 /* Two conversions in a row are not needed unless:
7851 - some conversion is floating-point (overstrict for now), or
7852 - some conversion is a vector (overstrict for now), or
7853 - the intermediate type is narrower than both initial and
7854 final, or
7855 - the intermediate type and innermost type differ in signedness,
7856 and the outermost type is wider than the intermediate, or
7857 - the initial type is a pointer type and the precisions of the
7858 intermediate and final types differ, or
7859 - the final type is a pointer type and the precisions of the
7860 initial and intermediate types differ. */
7861 if (! inside_float && ! inter_float && ! final_float
7862 && ! inside_vec && ! inter_vec && ! final_vec
7863 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7864 && ! (inside_int && inter_int
7865 && inter_unsignedp != inside_unsignedp
7866 && inter_prec < final_prec)
7867 && ((inter_unsignedp && inter_prec > inside_prec)
7868 == (final_unsignedp && final_prec > inter_prec))
7869 && ! (inside_ptr && inter_prec != final_prec)
7870 && ! (final_ptr && inside_prec != inter_prec)
7871 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7872 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7873 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7874 }
7875
7876 /* Handle (T *)&A.B.C for A being of type T and B and C
7877 living at offset zero. This occurs frequently in
7878 C++ upcasting and then accessing the base. */
7879 if (TREE_CODE (op0) == ADDR_EXPR
7880 && POINTER_TYPE_P (type)
7881 && handled_component_p (TREE_OPERAND (op0, 0)))
7882 {
7883 HOST_WIDE_INT bitsize, bitpos;
7884 tree offset;
7885 enum machine_mode mode;
7886 int unsignedp, volatilep;
7887 tree base = TREE_OPERAND (op0, 0);
7888 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7889 &mode, &unsignedp, &volatilep, false);
7890 /* If the reference was to a (constant) zero offset, we can use
7891 the address of the base if it has the same base type
7892 as the result type and the pointer type is unqualified. */
7893 if (! offset && bitpos == 0
7894 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7895 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7896 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7897 return fold_convert_loc (loc, type,
7898 build_fold_addr_expr_loc (loc, base));
7899 }
7900
7901 if (TREE_CODE (op0) == MODIFY_EXPR
7902 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7903 /* Detect assigning a bitfield. */
7904 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7905 && DECL_BIT_FIELD
7906 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7907 {
7908 /* Don't leave an assignment inside a conversion
7909 unless assigning a bitfield. */
7910 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7911 /* First do the assignment, then return converted constant. */
7912 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7913 TREE_NO_WARNING (tem) = 1;
7914 TREE_USED (tem) = 1;
7915 return tem;
7916 }
7917
7918 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7919 constants (if x has signed type, the sign bit cannot be set
7920 in c). This folds extension into the BIT_AND_EXPR.
7921 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7922 very likely don't have maximal range for their precision and this
7923 transformation effectively doesn't preserve non-maximal ranges. */
7924 if (TREE_CODE (type) == INTEGER_TYPE
7925 && TREE_CODE (op0) == BIT_AND_EXPR
7926 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7927 {
7928 tree and_expr = op0;
7929 tree and0 = TREE_OPERAND (and_expr, 0);
7930 tree and1 = TREE_OPERAND (and_expr, 1);
7931 int change = 0;
7932
7933 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7934 || (TYPE_PRECISION (type)
7935 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7936 change = 1;
7937 else if (TYPE_PRECISION (TREE_TYPE (and1))
7938 <= HOST_BITS_PER_WIDE_INT
7939 && host_integerp (and1, 1))
7940 {
7941 unsigned HOST_WIDE_INT cst;
7942
7943 cst = tree_low_cst (and1, 1);
7944 cst &= (HOST_WIDE_INT) -1
7945 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7946 change = (cst == 0);
7947 #ifdef LOAD_EXTEND_OP
7948 if (change
7949 && !flag_syntax_only
7950 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7951 == ZERO_EXTEND))
7952 {
7953 tree uns = unsigned_type_for (TREE_TYPE (and0));
7954 and0 = fold_convert_loc (loc, uns, and0);
7955 and1 = fold_convert_loc (loc, uns, and1);
7956 }
7957 #endif
7958 }
7959 if (change)
7960 {
7961 tem = force_fit_type_double (type, tree_to_double_int (and1),
7962 0, TREE_OVERFLOW (and1));
7963 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7964 fold_convert_loc (loc, type, and0), tem);
7965 }
7966 }
7967
7968 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7969 when one of the new casts will fold away. Conservatively we assume
7970 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7971 if (POINTER_TYPE_P (type)
7972 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7973 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7974 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7975 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7976 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7977 {
7978 tree arg00 = TREE_OPERAND (arg0, 0);
7979 tree arg01 = TREE_OPERAND (arg0, 1);
7980
7981 return fold_build_pointer_plus_loc
7982 (loc, fold_convert_loc (loc, type, arg00), arg01);
7983 }
7984
7985 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7986 of the same precision, and X is an integer type not narrower than
7987 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7988 if (INTEGRAL_TYPE_P (type)
7989 && TREE_CODE (op0) == BIT_NOT_EXPR
7990 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7991 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7992 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7993 {
7994 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7995 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7996 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7997 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7998 fold_convert_loc (loc, type, tem));
7999 }
8000
8001 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8002 type of X and Y (integer types only). */
8003 if (INTEGRAL_TYPE_P (type)
8004 && TREE_CODE (op0) == MULT_EXPR
8005 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8006 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8007 {
8008 /* Be careful not to introduce new overflows. */
8009 tree mult_type;
8010 if (TYPE_OVERFLOW_WRAPS (type))
8011 mult_type = type;
8012 else
8013 mult_type = unsigned_type_for (type);
8014
8015 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8016 {
8017 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8018 fold_convert_loc (loc, mult_type,
8019 TREE_OPERAND (op0, 0)),
8020 fold_convert_loc (loc, mult_type,
8021 TREE_OPERAND (op0, 1)));
8022 return fold_convert_loc (loc, type, tem);
8023 }
8024 }
8025
8026 tem = fold_convert_const (code, type, op0);
8027 return tem ? tem : NULL_TREE;
8028
8029 case ADDR_SPACE_CONVERT_EXPR:
8030 if (integer_zerop (arg0))
8031 return fold_convert_const (code, type, arg0);
8032 return NULL_TREE;
8033
8034 case FIXED_CONVERT_EXPR:
8035 tem = fold_convert_const (code, type, arg0);
8036 return tem ? tem : NULL_TREE;
8037
8038 case VIEW_CONVERT_EXPR:
8039 if (TREE_TYPE (op0) == type)
8040 return op0;
8041 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8042 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8043 type, TREE_OPERAND (op0, 0));
8044 if (TREE_CODE (op0) == MEM_REF)
8045 return fold_build2_loc (loc, MEM_REF, type,
8046 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8047
8048 /* For integral conversions with the same precision or pointer
8049 conversions use a NOP_EXPR instead. */
8050 if ((INTEGRAL_TYPE_P (type)
8051 || POINTER_TYPE_P (type))
8052 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8053 || POINTER_TYPE_P (TREE_TYPE (op0)))
8054 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8055 return fold_convert_loc (loc, type, op0);
8056
8057 /* Strip inner integral conversions that do not change the precision. */
8058 if (CONVERT_EXPR_P (op0)
8059 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8060 || POINTER_TYPE_P (TREE_TYPE (op0)))
8061 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8062 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8063 && (TYPE_PRECISION (TREE_TYPE (op0))
8064 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8065 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8066 type, TREE_OPERAND (op0, 0));
8067
8068 return fold_view_convert_expr (type, op0);
8069
8070 case NEGATE_EXPR:
8071 tem = fold_negate_expr (loc, arg0);
8072 if (tem)
8073 return fold_convert_loc (loc, type, tem);
8074 return NULL_TREE;
8075
8076 case ABS_EXPR:
8077 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8078 return fold_abs_const (arg0, type);
8079 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8080 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8081 /* Convert fabs((double)float) into (double)fabsf(float). */
8082 else if (TREE_CODE (arg0) == NOP_EXPR
8083 && TREE_CODE (type) == REAL_TYPE)
8084 {
8085 tree targ0 = strip_float_extensions (arg0);
8086 if (targ0 != arg0)
8087 return fold_convert_loc (loc, type,
8088 fold_build1_loc (loc, ABS_EXPR,
8089 TREE_TYPE (targ0),
8090 targ0));
8091 }
8092 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8093 else if (TREE_CODE (arg0) == ABS_EXPR)
8094 return arg0;
8095 else if (tree_expr_nonnegative_p (arg0))
8096 return arg0;
8097
8098 /* Strip sign ops from argument. */
8099 if (TREE_CODE (type) == REAL_TYPE)
8100 {
8101 tem = fold_strip_sign_ops (arg0);
8102 if (tem)
8103 return fold_build1_loc (loc, ABS_EXPR, type,
8104 fold_convert_loc (loc, type, tem));
8105 }
8106 return NULL_TREE;
8107
8108 case CONJ_EXPR:
8109 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8110 return fold_convert_loc (loc, type, arg0);
8111 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8112 {
8113 tree itype = TREE_TYPE (type);
8114 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8115 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8116 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8117 negate_expr (ipart));
8118 }
8119 if (TREE_CODE (arg0) == COMPLEX_CST)
8120 {
8121 tree itype = TREE_TYPE (type);
8122 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8123 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8124 return build_complex (type, rpart, negate_expr (ipart));
8125 }
8126 if (TREE_CODE (arg0) == CONJ_EXPR)
8127 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8128 return NULL_TREE;
8129
8130 case BIT_NOT_EXPR:
8131 if (TREE_CODE (arg0) == INTEGER_CST)
8132 return fold_not_const (arg0, type);
8133 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8134 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8135 /* Convert ~ (-A) to A - 1. */
8136 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8137 return fold_build2_loc (loc, MINUS_EXPR, type,
8138 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8139 build_int_cst (type, 1));
8140 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8141 else if (INTEGRAL_TYPE_P (type)
8142 && ((TREE_CODE (arg0) == MINUS_EXPR
8143 && integer_onep (TREE_OPERAND (arg0, 1)))
8144 || (TREE_CODE (arg0) == PLUS_EXPR
8145 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8146 return fold_build1_loc (loc, NEGATE_EXPR, type,
8147 fold_convert_loc (loc, type,
8148 TREE_OPERAND (arg0, 0)));
8149 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8150 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8151 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8152 fold_convert_loc (loc, type,
8153 TREE_OPERAND (arg0, 0)))))
8154 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8155 fold_convert_loc (loc, type,
8156 TREE_OPERAND (arg0, 1)));
8157 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8158 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8159 fold_convert_loc (loc, type,
8160 TREE_OPERAND (arg0, 1)))))
8161 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8162 fold_convert_loc (loc, type,
8163 TREE_OPERAND (arg0, 0)), tem);
8164 /* Perform BIT_NOT_EXPR on each element individually. */
8165 else if (TREE_CODE (arg0) == VECTOR_CST)
8166 {
8167 tree *elements;
8168 tree elem;
8169 unsigned count = VECTOR_CST_NELTS (arg0), i;
8170
8171 elements = XALLOCAVEC (tree, count);
8172 for (i = 0; i < count; i++)
8173 {
8174 elem = VECTOR_CST_ELT (arg0, i);
8175 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8176 if (elem == NULL_TREE)
8177 break;
8178 elements[i] = elem;
8179 }
8180 if (i == count)
8181 return build_vector (type, elements);
8182 }
8183
8184 return NULL_TREE;
8185
8186 case TRUTH_NOT_EXPR:
8187 /* The argument to invert_truthvalue must have Boolean type. */
8188 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8189 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8190
8191 /* Note that the operand of this must be an int
8192 and its values must be 0 or 1.
8193 ("true" is a fixed value perhaps depending on the language,
8194 but we don't handle values other than 1 correctly yet.) */
8195 tem = fold_truth_not_expr (loc, arg0);
8196 if (!tem)
8197 return NULL_TREE;
8198 return fold_convert_loc (loc, type, tem);
8199
8200 case REALPART_EXPR:
8201 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8202 return fold_convert_loc (loc, type, arg0);
8203 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8204 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8205 TREE_OPERAND (arg0, 1));
8206 if (TREE_CODE (arg0) == COMPLEX_CST)
8207 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8208 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8209 {
8210 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8211 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8212 fold_build1_loc (loc, REALPART_EXPR, itype,
8213 TREE_OPERAND (arg0, 0)),
8214 fold_build1_loc (loc, REALPART_EXPR, itype,
8215 TREE_OPERAND (arg0, 1)));
8216 return fold_convert_loc (loc, type, tem);
8217 }
8218 if (TREE_CODE (arg0) == CONJ_EXPR)
8219 {
8220 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8221 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8222 TREE_OPERAND (arg0, 0));
8223 return fold_convert_loc (loc, type, tem);
8224 }
8225 if (TREE_CODE (arg0) == CALL_EXPR)
8226 {
8227 tree fn = get_callee_fndecl (arg0);
8228 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8229 switch (DECL_FUNCTION_CODE (fn))
8230 {
8231 CASE_FLT_FN (BUILT_IN_CEXPI):
8232 fn = mathfn_built_in (type, BUILT_IN_COS);
8233 if (fn)
8234 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8235 break;
8236
8237 default:
8238 break;
8239 }
8240 }
8241 return NULL_TREE;
8242
8243 case IMAGPART_EXPR:
8244 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8245 return build_zero_cst (type);
8246 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8247 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8248 TREE_OPERAND (arg0, 0));
8249 if (TREE_CODE (arg0) == COMPLEX_CST)
8250 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8251 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8252 {
8253 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8254 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8255 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8256 TREE_OPERAND (arg0, 0)),
8257 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8258 TREE_OPERAND (arg0, 1)));
8259 return fold_convert_loc (loc, type, tem);
8260 }
8261 if (TREE_CODE (arg0) == CONJ_EXPR)
8262 {
8263 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8264 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8265 return fold_convert_loc (loc, type, negate_expr (tem));
8266 }
8267 if (TREE_CODE (arg0) == CALL_EXPR)
8268 {
8269 tree fn = get_callee_fndecl (arg0);
8270 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8271 switch (DECL_FUNCTION_CODE (fn))
8272 {
8273 CASE_FLT_FN (BUILT_IN_CEXPI):
8274 fn = mathfn_built_in (type, BUILT_IN_SIN);
8275 if (fn)
8276 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8277 break;
8278
8279 default:
8280 break;
8281 }
8282 }
8283 return NULL_TREE;
8284
8285 case INDIRECT_REF:
8286 /* Fold *&X to X if X is an lvalue. */
8287 if (TREE_CODE (op0) == ADDR_EXPR)
8288 {
8289 tree op00 = TREE_OPERAND (op0, 0);
8290 if ((TREE_CODE (op00) == VAR_DECL
8291 || TREE_CODE (op00) == PARM_DECL
8292 || TREE_CODE (op00) == RESULT_DECL)
8293 && !TREE_READONLY (op00))
8294 return op00;
8295 }
8296 return NULL_TREE;
8297
8298 case VEC_UNPACK_LO_EXPR:
8299 case VEC_UNPACK_HI_EXPR:
8300 case VEC_UNPACK_FLOAT_LO_EXPR:
8301 case VEC_UNPACK_FLOAT_HI_EXPR:
8302 {
8303 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8304 tree *elts;
8305 enum tree_code subcode;
8306
8307 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8308 if (TREE_CODE (arg0) != VECTOR_CST)
8309 return NULL_TREE;
8310
8311 elts = XALLOCAVEC (tree, nelts * 2);
8312 if (!vec_cst_ctor_to_array (arg0, elts))
8313 return NULL_TREE;
8314
8315 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8316 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8317 elts += nelts;
8318
8319 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8320 subcode = NOP_EXPR;
8321 else
8322 subcode = FLOAT_EXPR;
8323
8324 for (i = 0; i < nelts; i++)
8325 {
8326 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8327 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8328 return NULL_TREE;
8329 }
8330
8331 return build_vector (type, elts);
8332 }
8333
8334 default:
8335 return NULL_TREE;
8336 } /* switch (code) */
8337 }
8338
8339
8340 /* If the operation was a conversion do _not_ mark a resulting constant
8341 with TREE_OVERFLOW if the original constant was not. These conversions
8342 have implementation defined behavior and retaining the TREE_OVERFLOW
8343 flag here would confuse later passes such as VRP. */
8344 tree
8345 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8346 tree type, tree op0)
8347 {
8348 tree res = fold_unary_loc (loc, code, type, op0);
8349 if (res
8350 && TREE_CODE (res) == INTEGER_CST
8351 && TREE_CODE (op0) == INTEGER_CST
8352 && CONVERT_EXPR_CODE_P (code))
8353 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8354
8355 return res;
8356 }
8357
8358 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8359 operands OP0 and OP1. LOC is the location of the resulting expression.
8360 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8361 Return the folded expression if folding is successful. Otherwise,
8362 return NULL_TREE. */
8363 static tree
8364 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8365 tree arg0, tree arg1, tree op0, tree op1)
8366 {
8367 tree tem;
8368
8369 /* We only do these simplifications if we are optimizing. */
8370 if (!optimize)
8371 return NULL_TREE;
8372
8373 /* Check for things like (A || B) && (A || C). We can convert this
8374 to A || (B && C). Note that either operator can be any of the four
8375 truth and/or operations and the transformation will still be
8376 valid. Also note that we only care about order for the
8377 ANDIF and ORIF operators. If B contains side effects, this
8378 might change the truth-value of A. */
8379 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8380 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8381 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8382 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8383 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8384 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8385 {
8386 tree a00 = TREE_OPERAND (arg0, 0);
8387 tree a01 = TREE_OPERAND (arg0, 1);
8388 tree a10 = TREE_OPERAND (arg1, 0);
8389 tree a11 = TREE_OPERAND (arg1, 1);
8390 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8391 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8392 && (code == TRUTH_AND_EXPR
8393 || code == TRUTH_OR_EXPR));
8394
8395 if (operand_equal_p (a00, a10, 0))
8396 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8397 fold_build2_loc (loc, code, type, a01, a11));
8398 else if (commutative && operand_equal_p (a00, a11, 0))
8399 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8400 fold_build2_loc (loc, code, type, a01, a10));
8401 else if (commutative && operand_equal_p (a01, a10, 0))
8402 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8403 fold_build2_loc (loc, code, type, a00, a11));
8404
8405 /* This case if tricky because we must either have commutative
8406 operators or else A10 must not have side-effects. */
8407
8408 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8409 && operand_equal_p (a01, a11, 0))
8410 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8411 fold_build2_loc (loc, code, type, a00, a10),
8412 a01);
8413 }
8414
8415 /* See if we can build a range comparison. */
8416 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8417 return tem;
8418
8419 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8420 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8421 {
8422 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8423 if (tem)
8424 return fold_build2_loc (loc, code, type, tem, arg1);
8425 }
8426
8427 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8428 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8429 {
8430 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8431 if (tem)
8432 return fold_build2_loc (loc, code, type, arg0, tem);
8433 }
8434
8435 /* Check for the possibility of merging component references. If our
8436 lhs is another similar operation, try to merge its rhs with our
8437 rhs. Then try to merge our lhs and rhs. */
8438 if (TREE_CODE (arg0) == code
8439 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8440 TREE_OPERAND (arg0, 1), arg1)))
8441 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8442
8443 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8444 return tem;
8445
8446 if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8447 false) >= 2)
8448 && LOGICAL_OP_NON_SHORT_CIRCUIT
8449 && (code == TRUTH_AND_EXPR
8450 || code == TRUTH_ANDIF_EXPR
8451 || code == TRUTH_OR_EXPR
8452 || code == TRUTH_ORIF_EXPR))
8453 {
8454 enum tree_code ncode, icode;
8455
8456 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8457 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8458 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8459
8460 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8461 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8462 We don't want to pack more than two leafs to a non-IF AND/OR
8463 expression.
8464 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8465 equal to IF-CODE, then we don't want to add right-hand operand.
8466 If the inner right-hand side of left-hand operand has
8467 side-effects, or isn't simple, then we can't add to it,
8468 as otherwise we might destroy if-sequence. */
8469 if (TREE_CODE (arg0) == icode
8470 && simple_operand_p_2 (arg1)
8471 /* Needed for sequence points to handle trappings, and
8472 side-effects. */
8473 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8474 {
8475 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8476 arg1);
8477 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8478 tem);
8479 }
8480 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8481 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8482 else if (TREE_CODE (arg1) == icode
8483 && simple_operand_p_2 (arg0)
8484 /* Needed for sequence points to handle trappings, and
8485 side-effects. */
8486 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8487 {
8488 tem = fold_build2_loc (loc, ncode, type,
8489 arg0, TREE_OPERAND (arg1, 0));
8490 return fold_build2_loc (loc, icode, type, tem,
8491 TREE_OPERAND (arg1, 1));
8492 }
8493 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8494 into (A OR B).
8495 For sequence point consistancy, we need to check for trapping,
8496 and side-effects. */
8497 else if (code == icode && simple_operand_p_2 (arg0)
8498 && simple_operand_p_2 (arg1))
8499 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8500 }
8501
8502 return NULL_TREE;
8503 }
8504
8505 /* Fold a binary expression of code CODE and type TYPE with operands
8506 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8507 Return the folded expression if folding is successful. Otherwise,
8508 return NULL_TREE. */
8509
8510 static tree
8511 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8512 {
8513 enum tree_code compl_code;
8514
8515 if (code == MIN_EXPR)
8516 compl_code = MAX_EXPR;
8517 else if (code == MAX_EXPR)
8518 compl_code = MIN_EXPR;
8519 else
8520 gcc_unreachable ();
8521
8522 /* MIN (MAX (a, b), b) == b. */
8523 if (TREE_CODE (op0) == compl_code
8524 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8525 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8526
8527 /* MIN (MAX (b, a), b) == b. */
8528 if (TREE_CODE (op0) == compl_code
8529 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8530 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8531 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8532
8533 /* MIN (a, MAX (a, b)) == a. */
8534 if (TREE_CODE (op1) == compl_code
8535 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8536 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8537 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8538
8539 /* MIN (a, MAX (b, a)) == a. */
8540 if (TREE_CODE (op1) == compl_code
8541 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8542 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8543 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8544
8545 return NULL_TREE;
8546 }
8547
8548 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8549 by changing CODE to reduce the magnitude of constants involved in
8550 ARG0 of the comparison.
8551 Returns a canonicalized comparison tree if a simplification was
8552 possible, otherwise returns NULL_TREE.
8553 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8554 valid if signed overflow is undefined. */
8555
8556 static tree
8557 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8558 tree arg0, tree arg1,
8559 bool *strict_overflow_p)
8560 {
8561 enum tree_code code0 = TREE_CODE (arg0);
8562 tree t, cst0 = NULL_TREE;
8563 int sgn0;
8564 bool swap = false;
8565
8566 /* Match A +- CST code arg1 and CST code arg1. We can change the
8567 first form only if overflow is undefined. */
8568 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8569 /* In principle pointers also have undefined overflow behavior,
8570 but that causes problems elsewhere. */
8571 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8572 && (code0 == MINUS_EXPR
8573 || code0 == PLUS_EXPR)
8574 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8575 || code0 == INTEGER_CST))
8576 return NULL_TREE;
8577
8578 /* Identify the constant in arg0 and its sign. */
8579 if (code0 == INTEGER_CST)
8580 cst0 = arg0;
8581 else
8582 cst0 = TREE_OPERAND (arg0, 1);
8583 sgn0 = tree_int_cst_sgn (cst0);
8584
8585 /* Overflowed constants and zero will cause problems. */
8586 if (integer_zerop (cst0)
8587 || TREE_OVERFLOW (cst0))
8588 return NULL_TREE;
8589
8590 /* See if we can reduce the magnitude of the constant in
8591 arg0 by changing the comparison code. */
8592 if (code0 == INTEGER_CST)
8593 {
8594 /* CST <= arg1 -> CST-1 < arg1. */
8595 if (code == LE_EXPR && sgn0 == 1)
8596 code = LT_EXPR;
8597 /* -CST < arg1 -> -CST-1 <= arg1. */
8598 else if (code == LT_EXPR && sgn0 == -1)
8599 code = LE_EXPR;
8600 /* CST > arg1 -> CST-1 >= arg1. */
8601 else if (code == GT_EXPR && sgn0 == 1)
8602 code = GE_EXPR;
8603 /* -CST >= arg1 -> -CST-1 > arg1. */
8604 else if (code == GE_EXPR && sgn0 == -1)
8605 code = GT_EXPR;
8606 else
8607 return NULL_TREE;
8608 /* arg1 code' CST' might be more canonical. */
8609 swap = true;
8610 }
8611 else
8612 {
8613 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8614 if (code == LT_EXPR
8615 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8616 code = LE_EXPR;
8617 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8618 else if (code == GT_EXPR
8619 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8620 code = GE_EXPR;
8621 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8622 else if (code == LE_EXPR
8623 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8624 code = LT_EXPR;
8625 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8626 else if (code == GE_EXPR
8627 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8628 code = GT_EXPR;
8629 else
8630 return NULL_TREE;
8631 *strict_overflow_p = true;
8632 }
8633
8634 /* Now build the constant reduced in magnitude. But not if that
8635 would produce one outside of its types range. */
8636 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8637 && ((sgn0 == 1
8638 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8639 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8640 || (sgn0 == -1
8641 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8642 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8643 /* We cannot swap the comparison here as that would cause us to
8644 endlessly recurse. */
8645 return NULL_TREE;
8646
8647 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8648 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8649 if (code0 != INTEGER_CST)
8650 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8651 t = fold_convert (TREE_TYPE (arg1), t);
8652
8653 /* If swapping might yield to a more canonical form, do so. */
8654 if (swap)
8655 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8656 else
8657 return fold_build2_loc (loc, code, type, t, arg1);
8658 }
8659
8660 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8661 overflow further. Try to decrease the magnitude of constants involved
8662 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8663 and put sole constants at the second argument position.
8664 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8665
8666 static tree
8667 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8668 tree arg0, tree arg1)
8669 {
8670 tree t;
8671 bool strict_overflow_p;
8672 const char * const warnmsg = G_("assuming signed overflow does not occur "
8673 "when reducing constant in comparison");
8674
8675 /* Try canonicalization by simplifying arg0. */
8676 strict_overflow_p = false;
8677 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8678 &strict_overflow_p);
8679 if (t)
8680 {
8681 if (strict_overflow_p)
8682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8683 return t;
8684 }
8685
8686 /* Try canonicalization by simplifying arg1 using the swapped
8687 comparison. */
8688 code = swap_tree_comparison (code);
8689 strict_overflow_p = false;
8690 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8691 &strict_overflow_p);
8692 if (t && strict_overflow_p)
8693 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8694 return t;
8695 }
8696
8697 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8698 space. This is used to avoid issuing overflow warnings for
8699 expressions like &p->x which can not wrap. */
8700
8701 static bool
8702 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8703 {
8704 unsigned HOST_WIDE_INT offset_low, total_low;
8705 HOST_WIDE_INT size, offset_high, total_high;
8706
8707 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8708 return true;
8709
8710 if (bitpos < 0)
8711 return true;
8712
8713 if (offset == NULL_TREE)
8714 {
8715 offset_low = 0;
8716 offset_high = 0;
8717 }
8718 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8719 return true;
8720 else
8721 {
8722 offset_low = TREE_INT_CST_LOW (offset);
8723 offset_high = TREE_INT_CST_HIGH (offset);
8724 }
8725
8726 if (add_double_with_sign (offset_low, offset_high,
8727 bitpos / BITS_PER_UNIT, 0,
8728 &total_low, &total_high,
8729 true))
8730 return true;
8731
8732 if (total_high != 0)
8733 return true;
8734
8735 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8736 if (size <= 0)
8737 return true;
8738
8739 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8740 array. */
8741 if (TREE_CODE (base) == ADDR_EXPR)
8742 {
8743 HOST_WIDE_INT base_size;
8744
8745 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8746 if (base_size > 0 && size < base_size)
8747 size = base_size;
8748 }
8749
8750 return total_low > (unsigned HOST_WIDE_INT) size;
8751 }
8752
8753 /* Subroutine of fold_binary. This routine performs all of the
8754 transformations that are common to the equality/inequality
8755 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8756 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8757 fold_binary should call fold_binary. Fold a comparison with
8758 tree code CODE and type TYPE with operands OP0 and OP1. Return
8759 the folded comparison or NULL_TREE. */
8760
8761 static tree
8762 fold_comparison (location_t loc, enum tree_code code, tree type,
8763 tree op0, tree op1)
8764 {
8765 tree arg0, arg1, tem;
8766
8767 arg0 = op0;
8768 arg1 = op1;
8769
8770 STRIP_SIGN_NOPS (arg0);
8771 STRIP_SIGN_NOPS (arg1);
8772
8773 tem = fold_relational_const (code, type, arg0, arg1);
8774 if (tem != NULL_TREE)
8775 return tem;
8776
8777 /* If one arg is a real or integer constant, put it last. */
8778 if (tree_swap_operands_p (arg0, arg1, true))
8779 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8780
8781 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8782 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8783 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8784 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8785 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8786 && (TREE_CODE (arg1) == INTEGER_CST
8787 && !TREE_OVERFLOW (arg1)))
8788 {
8789 tree const1 = TREE_OPERAND (arg0, 1);
8790 tree const2 = arg1;
8791 tree variable = TREE_OPERAND (arg0, 0);
8792 tree lhs;
8793 int lhs_add;
8794 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8795
8796 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8797 TREE_TYPE (arg1), const2, const1);
8798
8799 /* If the constant operation overflowed this can be
8800 simplified as a comparison against INT_MAX/INT_MIN. */
8801 if (TREE_CODE (lhs) == INTEGER_CST
8802 && TREE_OVERFLOW (lhs))
8803 {
8804 int const1_sgn = tree_int_cst_sgn (const1);
8805 enum tree_code code2 = code;
8806
8807 /* Get the sign of the constant on the lhs if the
8808 operation were VARIABLE + CONST1. */
8809 if (TREE_CODE (arg0) == MINUS_EXPR)
8810 const1_sgn = -const1_sgn;
8811
8812 /* The sign of the constant determines if we overflowed
8813 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8814 Canonicalize to the INT_MIN overflow by swapping the comparison
8815 if necessary. */
8816 if (const1_sgn == -1)
8817 code2 = swap_tree_comparison (code);
8818
8819 /* We now can look at the canonicalized case
8820 VARIABLE + 1 CODE2 INT_MIN
8821 and decide on the result. */
8822 if (code2 == LT_EXPR
8823 || code2 == LE_EXPR
8824 || code2 == EQ_EXPR)
8825 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8826 else if (code2 == NE_EXPR
8827 || code2 == GE_EXPR
8828 || code2 == GT_EXPR)
8829 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8830 }
8831
8832 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8833 && (TREE_CODE (lhs) != INTEGER_CST
8834 || !TREE_OVERFLOW (lhs)))
8835 {
8836 if (code != EQ_EXPR && code != NE_EXPR)
8837 fold_overflow_warning ("assuming signed overflow does not occur "
8838 "when changing X +- C1 cmp C2 to "
8839 "X cmp C1 +- C2",
8840 WARN_STRICT_OVERFLOW_COMPARISON);
8841 return fold_build2_loc (loc, code, type, variable, lhs);
8842 }
8843 }
8844
8845 /* For comparisons of pointers we can decompose it to a compile time
8846 comparison of the base objects and the offsets into the object.
8847 This requires at least one operand being an ADDR_EXPR or a
8848 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8849 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8850 && (TREE_CODE (arg0) == ADDR_EXPR
8851 || TREE_CODE (arg1) == ADDR_EXPR
8852 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8853 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8854 {
8855 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8856 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8857 enum machine_mode mode;
8858 int volatilep, unsignedp;
8859 bool indirect_base0 = false, indirect_base1 = false;
8860
8861 /* Get base and offset for the access. Strip ADDR_EXPR for
8862 get_inner_reference, but put it back by stripping INDIRECT_REF
8863 off the base object if possible. indirect_baseN will be true
8864 if baseN is not an address but refers to the object itself. */
8865 base0 = arg0;
8866 if (TREE_CODE (arg0) == ADDR_EXPR)
8867 {
8868 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8869 &bitsize, &bitpos0, &offset0, &mode,
8870 &unsignedp, &volatilep, false);
8871 if (TREE_CODE (base0) == INDIRECT_REF)
8872 base0 = TREE_OPERAND (base0, 0);
8873 else
8874 indirect_base0 = true;
8875 }
8876 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8877 {
8878 base0 = TREE_OPERAND (arg0, 0);
8879 STRIP_SIGN_NOPS (base0);
8880 if (TREE_CODE (base0) == ADDR_EXPR)
8881 {
8882 base0 = TREE_OPERAND (base0, 0);
8883 indirect_base0 = true;
8884 }
8885 offset0 = TREE_OPERAND (arg0, 1);
8886 if (host_integerp (offset0, 0))
8887 {
8888 HOST_WIDE_INT off = size_low_cst (offset0);
8889 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8890 * BITS_PER_UNIT)
8891 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8892 {
8893 bitpos0 = off * BITS_PER_UNIT;
8894 offset0 = NULL_TREE;
8895 }
8896 }
8897 }
8898
8899 base1 = arg1;
8900 if (TREE_CODE (arg1) == ADDR_EXPR)
8901 {
8902 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8903 &bitsize, &bitpos1, &offset1, &mode,
8904 &unsignedp, &volatilep, false);
8905 if (TREE_CODE (base1) == INDIRECT_REF)
8906 base1 = TREE_OPERAND (base1, 0);
8907 else
8908 indirect_base1 = true;
8909 }
8910 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8911 {
8912 base1 = TREE_OPERAND (arg1, 0);
8913 STRIP_SIGN_NOPS (base1);
8914 if (TREE_CODE (base1) == ADDR_EXPR)
8915 {
8916 base1 = TREE_OPERAND (base1, 0);
8917 indirect_base1 = true;
8918 }
8919 offset1 = TREE_OPERAND (arg1, 1);
8920 if (host_integerp (offset1, 0))
8921 {
8922 HOST_WIDE_INT off = size_low_cst (offset1);
8923 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8924 * BITS_PER_UNIT)
8925 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8926 {
8927 bitpos1 = off * BITS_PER_UNIT;
8928 offset1 = NULL_TREE;
8929 }
8930 }
8931 }
8932
8933 /* A local variable can never be pointed to by
8934 the default SSA name of an incoming parameter. */
8935 if ((TREE_CODE (arg0) == ADDR_EXPR
8936 && indirect_base0
8937 && TREE_CODE (base0) == VAR_DECL
8938 && auto_var_in_fn_p (base0, current_function_decl)
8939 && !indirect_base1
8940 && TREE_CODE (base1) == SSA_NAME
8941 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8942 && SSA_NAME_IS_DEFAULT_DEF (base1))
8943 || (TREE_CODE (arg1) == ADDR_EXPR
8944 && indirect_base1
8945 && TREE_CODE (base1) == VAR_DECL
8946 && auto_var_in_fn_p (base1, current_function_decl)
8947 && !indirect_base0
8948 && TREE_CODE (base0) == SSA_NAME
8949 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8950 && SSA_NAME_IS_DEFAULT_DEF (base0)))
8951 {
8952 if (code == NE_EXPR)
8953 return constant_boolean_node (1, type);
8954 else if (code == EQ_EXPR)
8955 return constant_boolean_node (0, type);
8956 }
8957 /* If we have equivalent bases we might be able to simplify. */
8958 else if (indirect_base0 == indirect_base1
8959 && operand_equal_p (base0, base1, 0))
8960 {
8961 /* We can fold this expression to a constant if the non-constant
8962 offset parts are equal. */
8963 if ((offset0 == offset1
8964 || (offset0 && offset1
8965 && operand_equal_p (offset0, offset1, 0)))
8966 && (code == EQ_EXPR
8967 || code == NE_EXPR
8968 || (indirect_base0 && DECL_P (base0))
8969 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8970
8971 {
8972 if (code != EQ_EXPR
8973 && code != NE_EXPR
8974 && bitpos0 != bitpos1
8975 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8976 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8977 fold_overflow_warning (("assuming pointer wraparound does not "
8978 "occur when comparing P +- C1 with "
8979 "P +- C2"),
8980 WARN_STRICT_OVERFLOW_CONDITIONAL);
8981
8982 switch (code)
8983 {
8984 case EQ_EXPR:
8985 return constant_boolean_node (bitpos0 == bitpos1, type);
8986 case NE_EXPR:
8987 return constant_boolean_node (bitpos0 != bitpos1, type);
8988 case LT_EXPR:
8989 return constant_boolean_node (bitpos0 < bitpos1, type);
8990 case LE_EXPR:
8991 return constant_boolean_node (bitpos0 <= bitpos1, type);
8992 case GE_EXPR:
8993 return constant_boolean_node (bitpos0 >= bitpos1, type);
8994 case GT_EXPR:
8995 return constant_boolean_node (bitpos0 > bitpos1, type);
8996 default:;
8997 }
8998 }
8999 /* We can simplify the comparison to a comparison of the variable
9000 offset parts if the constant offset parts are equal.
9001 Be careful to use signed size type here because otherwise we
9002 mess with array offsets in the wrong way. This is possible
9003 because pointer arithmetic is restricted to retain within an
9004 object and overflow on pointer differences is undefined as of
9005 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9006 else if (bitpos0 == bitpos1
9007 && ((code == EQ_EXPR || code == NE_EXPR)
9008 || (indirect_base0 && DECL_P (base0))
9009 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9010 {
9011 /* By converting to signed size type we cover middle-end pointer
9012 arithmetic which operates on unsigned pointer types of size
9013 type size and ARRAY_REF offsets which are properly sign or
9014 zero extended from their type in case it is narrower than
9015 size type. */
9016 if (offset0 == NULL_TREE)
9017 offset0 = build_int_cst (ssizetype, 0);
9018 else
9019 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9020 if (offset1 == NULL_TREE)
9021 offset1 = build_int_cst (ssizetype, 0);
9022 else
9023 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9024
9025 if (code != EQ_EXPR
9026 && code != NE_EXPR
9027 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9028 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9029 fold_overflow_warning (("assuming pointer wraparound does not "
9030 "occur when comparing P +- C1 with "
9031 "P +- C2"),
9032 WARN_STRICT_OVERFLOW_COMPARISON);
9033
9034 return fold_build2_loc (loc, code, type, offset0, offset1);
9035 }
9036 }
9037 /* For non-equal bases we can simplify if they are addresses
9038 of local binding decls or constants. */
9039 else if (indirect_base0 && indirect_base1
9040 /* We know that !operand_equal_p (base0, base1, 0)
9041 because the if condition was false. But make
9042 sure two decls are not the same. */
9043 && base0 != base1
9044 && TREE_CODE (arg0) == ADDR_EXPR
9045 && TREE_CODE (arg1) == ADDR_EXPR
9046 && (((TREE_CODE (base0) == VAR_DECL
9047 || TREE_CODE (base0) == PARM_DECL)
9048 && (targetm.binds_local_p (base0)
9049 || CONSTANT_CLASS_P (base1)))
9050 || CONSTANT_CLASS_P (base0))
9051 && (((TREE_CODE (base1) == VAR_DECL
9052 || TREE_CODE (base1) == PARM_DECL)
9053 && (targetm.binds_local_p (base1)
9054 || CONSTANT_CLASS_P (base0)))
9055 || CONSTANT_CLASS_P (base1)))
9056 {
9057 if (code == EQ_EXPR)
9058 return omit_two_operands_loc (loc, type, boolean_false_node,
9059 arg0, arg1);
9060 else if (code == NE_EXPR)
9061 return omit_two_operands_loc (loc, type, boolean_true_node,
9062 arg0, arg1);
9063 }
9064 /* For equal offsets we can simplify to a comparison of the
9065 base addresses. */
9066 else if (bitpos0 == bitpos1
9067 && (indirect_base0
9068 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9069 && (indirect_base1
9070 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9071 && ((offset0 == offset1)
9072 || (offset0 && offset1
9073 && operand_equal_p (offset0, offset1, 0))))
9074 {
9075 if (indirect_base0)
9076 base0 = build_fold_addr_expr_loc (loc, base0);
9077 if (indirect_base1)
9078 base1 = build_fold_addr_expr_loc (loc, base1);
9079 return fold_build2_loc (loc, code, type, base0, base1);
9080 }
9081 }
9082
9083 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9084 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9085 the resulting offset is smaller in absolute value than the
9086 original one. */
9087 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9088 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9089 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9090 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9091 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9092 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9093 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9094 {
9095 tree const1 = TREE_OPERAND (arg0, 1);
9096 tree const2 = TREE_OPERAND (arg1, 1);
9097 tree variable1 = TREE_OPERAND (arg0, 0);
9098 tree variable2 = TREE_OPERAND (arg1, 0);
9099 tree cst;
9100 const char * const warnmsg = G_("assuming signed overflow does not "
9101 "occur when combining constants around "
9102 "a comparison");
9103
9104 /* Put the constant on the side where it doesn't overflow and is
9105 of lower absolute value than before. */
9106 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9107 ? MINUS_EXPR : PLUS_EXPR,
9108 const2, const1);
9109 if (!TREE_OVERFLOW (cst)
9110 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9111 {
9112 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9113 return fold_build2_loc (loc, code, type,
9114 variable1,
9115 fold_build2_loc (loc,
9116 TREE_CODE (arg1), TREE_TYPE (arg1),
9117 variable2, cst));
9118 }
9119
9120 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9121 ? MINUS_EXPR : PLUS_EXPR,
9122 const1, const2);
9123 if (!TREE_OVERFLOW (cst)
9124 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9125 {
9126 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9127 return fold_build2_loc (loc, code, type,
9128 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9129 variable1, cst),
9130 variable2);
9131 }
9132 }
9133
9134 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9135 signed arithmetic case. That form is created by the compiler
9136 often enough for folding it to be of value. One example is in
9137 computing loop trip counts after Operator Strength Reduction. */
9138 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9139 && TREE_CODE (arg0) == MULT_EXPR
9140 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9141 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9142 && integer_zerop (arg1))
9143 {
9144 tree const1 = TREE_OPERAND (arg0, 1);
9145 tree const2 = arg1; /* zero */
9146 tree variable1 = TREE_OPERAND (arg0, 0);
9147 enum tree_code cmp_code = code;
9148
9149 /* Handle unfolded multiplication by zero. */
9150 if (integer_zerop (const1))
9151 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9152
9153 fold_overflow_warning (("assuming signed overflow does not occur when "
9154 "eliminating multiplication in comparison "
9155 "with zero"),
9156 WARN_STRICT_OVERFLOW_COMPARISON);
9157
9158 /* If const1 is negative we swap the sense of the comparison. */
9159 if (tree_int_cst_sgn (const1) < 0)
9160 cmp_code = swap_tree_comparison (cmp_code);
9161
9162 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9163 }
9164
9165 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9166 if (tem)
9167 return tem;
9168
9169 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9170 {
9171 tree targ0 = strip_float_extensions (arg0);
9172 tree targ1 = strip_float_extensions (arg1);
9173 tree newtype = TREE_TYPE (targ0);
9174
9175 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9176 newtype = TREE_TYPE (targ1);
9177
9178 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9179 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9180 return fold_build2_loc (loc, code, type,
9181 fold_convert_loc (loc, newtype, targ0),
9182 fold_convert_loc (loc, newtype, targ1));
9183
9184 /* (-a) CMP (-b) -> b CMP a */
9185 if (TREE_CODE (arg0) == NEGATE_EXPR
9186 && TREE_CODE (arg1) == NEGATE_EXPR)
9187 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9188 TREE_OPERAND (arg0, 0));
9189
9190 if (TREE_CODE (arg1) == REAL_CST)
9191 {
9192 REAL_VALUE_TYPE cst;
9193 cst = TREE_REAL_CST (arg1);
9194
9195 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9196 if (TREE_CODE (arg0) == NEGATE_EXPR)
9197 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9198 TREE_OPERAND (arg0, 0),
9199 build_real (TREE_TYPE (arg1),
9200 real_value_negate (&cst)));
9201
9202 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9203 /* a CMP (-0) -> a CMP 0 */
9204 if (REAL_VALUE_MINUS_ZERO (cst))
9205 return fold_build2_loc (loc, code, type, arg0,
9206 build_real (TREE_TYPE (arg1), dconst0));
9207
9208 /* x != NaN is always true, other ops are always false. */
9209 if (REAL_VALUE_ISNAN (cst)
9210 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9211 {
9212 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9213 return omit_one_operand_loc (loc, type, tem, arg0);
9214 }
9215
9216 /* Fold comparisons against infinity. */
9217 if (REAL_VALUE_ISINF (cst)
9218 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9219 {
9220 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9221 if (tem != NULL_TREE)
9222 return tem;
9223 }
9224 }
9225
9226 /* If this is a comparison of a real constant with a PLUS_EXPR
9227 or a MINUS_EXPR of a real constant, we can convert it into a
9228 comparison with a revised real constant as long as no overflow
9229 occurs when unsafe_math_optimizations are enabled. */
9230 if (flag_unsafe_math_optimizations
9231 && TREE_CODE (arg1) == REAL_CST
9232 && (TREE_CODE (arg0) == PLUS_EXPR
9233 || TREE_CODE (arg0) == MINUS_EXPR)
9234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9235 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9236 ? MINUS_EXPR : PLUS_EXPR,
9237 arg1, TREE_OPERAND (arg0, 1)))
9238 && !TREE_OVERFLOW (tem))
9239 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9240
9241 /* Likewise, we can simplify a comparison of a real constant with
9242 a MINUS_EXPR whose first operand is also a real constant, i.e.
9243 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9244 floating-point types only if -fassociative-math is set. */
9245 if (flag_associative_math
9246 && TREE_CODE (arg1) == REAL_CST
9247 && TREE_CODE (arg0) == MINUS_EXPR
9248 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9249 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9250 arg1))
9251 && !TREE_OVERFLOW (tem))
9252 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9253 TREE_OPERAND (arg0, 1), tem);
9254
9255 /* Fold comparisons against built-in math functions. */
9256 if (TREE_CODE (arg1) == REAL_CST
9257 && flag_unsafe_math_optimizations
9258 && ! flag_errno_math)
9259 {
9260 enum built_in_function fcode = builtin_mathfn_code (arg0);
9261
9262 if (fcode != END_BUILTINS)
9263 {
9264 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9265 if (tem != NULL_TREE)
9266 return tem;
9267 }
9268 }
9269 }
9270
9271 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9272 && CONVERT_EXPR_P (arg0))
9273 {
9274 /* If we are widening one operand of an integer comparison,
9275 see if the other operand is similarly being widened. Perhaps we
9276 can do the comparison in the narrower type. */
9277 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9278 if (tem)
9279 return tem;
9280
9281 /* Or if we are changing signedness. */
9282 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9283 if (tem)
9284 return tem;
9285 }
9286
9287 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9288 constant, we can simplify it. */
9289 if (TREE_CODE (arg1) == INTEGER_CST
9290 && (TREE_CODE (arg0) == MIN_EXPR
9291 || TREE_CODE (arg0) == MAX_EXPR)
9292 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9293 {
9294 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9295 if (tem)
9296 return tem;
9297 }
9298
9299 /* Simplify comparison of something with itself. (For IEEE
9300 floating-point, we can only do some of these simplifications.) */
9301 if (operand_equal_p (arg0, arg1, 0))
9302 {
9303 switch (code)
9304 {
9305 case EQ_EXPR:
9306 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9307 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9308 return constant_boolean_node (1, type);
9309 break;
9310
9311 case GE_EXPR:
9312 case LE_EXPR:
9313 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9314 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9315 return constant_boolean_node (1, type);
9316 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9317
9318 case NE_EXPR:
9319 /* For NE, we can only do this simplification if integer
9320 or we don't honor IEEE floating point NaNs. */
9321 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9322 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9323 break;
9324 /* ... fall through ... */
9325 case GT_EXPR:
9326 case LT_EXPR:
9327 return constant_boolean_node (0, type);
9328 default:
9329 gcc_unreachable ();
9330 }
9331 }
9332
9333 /* If we are comparing an expression that just has comparisons
9334 of two integer values, arithmetic expressions of those comparisons,
9335 and constants, we can simplify it. There are only three cases
9336 to check: the two values can either be equal, the first can be
9337 greater, or the second can be greater. Fold the expression for
9338 those three values. Since each value must be 0 or 1, we have
9339 eight possibilities, each of which corresponds to the constant 0
9340 or 1 or one of the six possible comparisons.
9341
9342 This handles common cases like (a > b) == 0 but also handles
9343 expressions like ((x > y) - (y > x)) > 0, which supposedly
9344 occur in macroized code. */
9345
9346 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9347 {
9348 tree cval1 = 0, cval2 = 0;
9349 int save_p = 0;
9350
9351 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9352 /* Don't handle degenerate cases here; they should already
9353 have been handled anyway. */
9354 && cval1 != 0 && cval2 != 0
9355 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9356 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9357 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9358 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9359 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9360 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9361 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9362 {
9363 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9364 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9365
9366 /* We can't just pass T to eval_subst in case cval1 or cval2
9367 was the same as ARG1. */
9368
9369 tree high_result
9370 = fold_build2_loc (loc, code, type,
9371 eval_subst (loc, arg0, cval1, maxval,
9372 cval2, minval),
9373 arg1);
9374 tree equal_result
9375 = fold_build2_loc (loc, code, type,
9376 eval_subst (loc, arg0, cval1, maxval,
9377 cval2, maxval),
9378 arg1);
9379 tree low_result
9380 = fold_build2_loc (loc, code, type,
9381 eval_subst (loc, arg0, cval1, minval,
9382 cval2, maxval),
9383 arg1);
9384
9385 /* All three of these results should be 0 or 1. Confirm they are.
9386 Then use those values to select the proper code to use. */
9387
9388 if (TREE_CODE (high_result) == INTEGER_CST
9389 && TREE_CODE (equal_result) == INTEGER_CST
9390 && TREE_CODE (low_result) == INTEGER_CST)
9391 {
9392 /* Make a 3-bit mask with the high-order bit being the
9393 value for `>', the next for '=', and the low for '<'. */
9394 switch ((integer_onep (high_result) * 4)
9395 + (integer_onep (equal_result) * 2)
9396 + integer_onep (low_result))
9397 {
9398 case 0:
9399 /* Always false. */
9400 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9401 case 1:
9402 code = LT_EXPR;
9403 break;
9404 case 2:
9405 code = EQ_EXPR;
9406 break;
9407 case 3:
9408 code = LE_EXPR;
9409 break;
9410 case 4:
9411 code = GT_EXPR;
9412 break;
9413 case 5:
9414 code = NE_EXPR;
9415 break;
9416 case 6:
9417 code = GE_EXPR;
9418 break;
9419 case 7:
9420 /* Always true. */
9421 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9422 }
9423
9424 if (save_p)
9425 {
9426 tem = save_expr (build2 (code, type, cval1, cval2));
9427 SET_EXPR_LOCATION (tem, loc);
9428 return tem;
9429 }
9430 return fold_build2_loc (loc, code, type, cval1, cval2);
9431 }
9432 }
9433 }
9434
9435 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9436 into a single range test. */
9437 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9438 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9439 && TREE_CODE (arg1) == INTEGER_CST
9440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9441 && !integer_zerop (TREE_OPERAND (arg0, 1))
9442 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9443 && !TREE_OVERFLOW (arg1))
9444 {
9445 tem = fold_div_compare (loc, code, type, arg0, arg1);
9446 if (tem != NULL_TREE)
9447 return tem;
9448 }
9449
9450 /* Fold ~X op ~Y as Y op X. */
9451 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9452 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9453 {
9454 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9455 return fold_build2_loc (loc, code, type,
9456 fold_convert_loc (loc, cmp_type,
9457 TREE_OPERAND (arg1, 0)),
9458 TREE_OPERAND (arg0, 0));
9459 }
9460
9461 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9462 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9463 && TREE_CODE (arg1) == INTEGER_CST)
9464 {
9465 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9466 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9467 TREE_OPERAND (arg0, 0),
9468 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9469 fold_convert_loc (loc, cmp_type, arg1)));
9470 }
9471
9472 return NULL_TREE;
9473 }
9474
9475
9476 /* Subroutine of fold_binary. Optimize complex multiplications of the
9477 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9478 argument EXPR represents the expression "z" of type TYPE. */
9479
9480 static tree
9481 fold_mult_zconjz (location_t loc, tree type, tree expr)
9482 {
9483 tree itype = TREE_TYPE (type);
9484 tree rpart, ipart, tem;
9485
9486 if (TREE_CODE (expr) == COMPLEX_EXPR)
9487 {
9488 rpart = TREE_OPERAND (expr, 0);
9489 ipart = TREE_OPERAND (expr, 1);
9490 }
9491 else if (TREE_CODE (expr) == COMPLEX_CST)
9492 {
9493 rpart = TREE_REALPART (expr);
9494 ipart = TREE_IMAGPART (expr);
9495 }
9496 else
9497 {
9498 expr = save_expr (expr);
9499 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9500 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9501 }
9502
9503 rpart = save_expr (rpart);
9504 ipart = save_expr (ipart);
9505 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9506 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9507 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9508 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9509 build_zero_cst (itype));
9510 }
9511
9512
9513 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9514 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9515 guarantees that P and N have the same least significant log2(M) bits.
9516 N is not otherwise constrained. In particular, N is not normalized to
9517 0 <= N < M as is common. In general, the precise value of P is unknown.
9518 M is chosen as large as possible such that constant N can be determined.
9519
9520 Returns M and sets *RESIDUE to N.
9521
9522 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9523 account. This is not always possible due to PR 35705.
9524 */
9525
9526 static unsigned HOST_WIDE_INT
9527 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9528 bool allow_func_align)
9529 {
9530 enum tree_code code;
9531
9532 *residue = 0;
9533
9534 code = TREE_CODE (expr);
9535 if (code == ADDR_EXPR)
9536 {
9537 unsigned int bitalign;
9538 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9539 *residue /= BITS_PER_UNIT;
9540 return bitalign / BITS_PER_UNIT;
9541 }
9542 else if (code == POINTER_PLUS_EXPR)
9543 {
9544 tree op0, op1;
9545 unsigned HOST_WIDE_INT modulus;
9546 enum tree_code inner_code;
9547
9548 op0 = TREE_OPERAND (expr, 0);
9549 STRIP_NOPS (op0);
9550 modulus = get_pointer_modulus_and_residue (op0, residue,
9551 allow_func_align);
9552
9553 op1 = TREE_OPERAND (expr, 1);
9554 STRIP_NOPS (op1);
9555 inner_code = TREE_CODE (op1);
9556 if (inner_code == INTEGER_CST)
9557 {
9558 *residue += TREE_INT_CST_LOW (op1);
9559 return modulus;
9560 }
9561 else if (inner_code == MULT_EXPR)
9562 {
9563 op1 = TREE_OPERAND (op1, 1);
9564 if (TREE_CODE (op1) == INTEGER_CST)
9565 {
9566 unsigned HOST_WIDE_INT align;
9567
9568 /* Compute the greatest power-of-2 divisor of op1. */
9569 align = TREE_INT_CST_LOW (op1);
9570 align &= -align;
9571
9572 /* If align is non-zero and less than *modulus, replace
9573 *modulus with align., If align is 0, then either op1 is 0
9574 or the greatest power-of-2 divisor of op1 doesn't fit in an
9575 unsigned HOST_WIDE_INT. In either case, no additional
9576 constraint is imposed. */
9577 if (align)
9578 modulus = MIN (modulus, align);
9579
9580 return modulus;
9581 }
9582 }
9583 }
9584
9585 /* If we get here, we were unable to determine anything useful about the
9586 expression. */
9587 return 1;
9588 }
9589
9590 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9591 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9592
9593 static bool
9594 vec_cst_ctor_to_array (tree arg, tree *elts)
9595 {
9596 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9597
9598 if (TREE_CODE (arg) == VECTOR_CST)
9599 {
9600 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9601 elts[i] = VECTOR_CST_ELT (arg, i);
9602 }
9603 else if (TREE_CODE (arg) == CONSTRUCTOR)
9604 {
9605 constructor_elt *elt;
9606
9607 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9608 if (i >= nelts)
9609 return false;
9610 else
9611 elts[i] = elt->value;
9612 }
9613 else
9614 return false;
9615 for (; i < nelts; i++)
9616 elts[i]
9617 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9618 return true;
9619 }
9620
9621 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9622 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9623 NULL_TREE otherwise. */
9624
9625 static tree
9626 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9627 {
9628 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9629 tree *elts;
9630 bool need_ctor = false;
9631
9632 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9633 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9634 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9635 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9636 return NULL_TREE;
9637
9638 elts = XALLOCAVEC (tree, nelts * 3);
9639 if (!vec_cst_ctor_to_array (arg0, elts)
9640 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9641 return NULL_TREE;
9642
9643 for (i = 0; i < nelts; i++)
9644 {
9645 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9646 need_ctor = true;
9647 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9648 }
9649
9650 if (need_ctor)
9651 {
9652 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9653 for (i = 0; i < nelts; i++)
9654 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9655 return build_constructor (type, v);
9656 }
9657 else
9658 return build_vector (type, &elts[2 * nelts]);
9659 }
9660
9661 /* Try to fold a pointer difference of type TYPE two address expressions of
9662 array references AREF0 and AREF1 using location LOC. Return a
9663 simplified expression for the difference or NULL_TREE. */
9664
9665 static tree
9666 fold_addr_of_array_ref_difference (location_t loc, tree type,
9667 tree aref0, tree aref1)
9668 {
9669 tree base0 = TREE_OPERAND (aref0, 0);
9670 tree base1 = TREE_OPERAND (aref1, 0);
9671 tree base_offset = build_int_cst (type, 0);
9672
9673 /* If the bases are array references as well, recurse. If the bases
9674 are pointer indirections compute the difference of the pointers.
9675 If the bases are equal, we are set. */
9676 if ((TREE_CODE (base0) == ARRAY_REF
9677 && TREE_CODE (base1) == ARRAY_REF
9678 && (base_offset
9679 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9680 || (INDIRECT_REF_P (base0)
9681 && INDIRECT_REF_P (base1)
9682 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9683 TREE_OPERAND (base0, 0),
9684 TREE_OPERAND (base1, 0))))
9685 || operand_equal_p (base0, base1, 0))
9686 {
9687 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9688 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9689 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9690 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9691 return fold_build2_loc (loc, PLUS_EXPR, type,
9692 base_offset,
9693 fold_build2_loc (loc, MULT_EXPR, type,
9694 diff, esz));
9695 }
9696 return NULL_TREE;
9697 }
9698
9699 /* If the real or vector real constant CST of type TYPE has an exact
9700 inverse, return it, else return NULL. */
9701
9702 static tree
9703 exact_inverse (tree type, tree cst)
9704 {
9705 REAL_VALUE_TYPE r;
9706 tree unit_type, *elts;
9707 enum machine_mode mode;
9708 unsigned vec_nelts, i;
9709
9710 switch (TREE_CODE (cst))
9711 {
9712 case REAL_CST:
9713 r = TREE_REAL_CST (cst);
9714
9715 if (exact_real_inverse (TYPE_MODE (type), &r))
9716 return build_real (type, r);
9717
9718 return NULL_TREE;
9719
9720 case VECTOR_CST:
9721 vec_nelts = VECTOR_CST_NELTS (cst);
9722 elts = XALLOCAVEC (tree, vec_nelts);
9723 unit_type = TREE_TYPE (type);
9724 mode = TYPE_MODE (unit_type);
9725
9726 for (i = 0; i < vec_nelts; i++)
9727 {
9728 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9729 if (!exact_real_inverse (mode, &r))
9730 return NULL_TREE;
9731 elts[i] = build_real (unit_type, r);
9732 }
9733
9734 return build_vector (type, elts);
9735
9736 default:
9737 return NULL_TREE;
9738 }
9739 }
9740
9741 /* Fold a binary expression of code CODE and type TYPE with operands
9742 OP0 and OP1. LOC is the location of the resulting expression.
9743 Return the folded expression if folding is successful. Otherwise,
9744 return NULL_TREE. */
9745
9746 tree
9747 fold_binary_loc (location_t loc,
9748 enum tree_code code, tree type, tree op0, tree op1)
9749 {
9750 enum tree_code_class kind = TREE_CODE_CLASS (code);
9751 tree arg0, arg1, tem;
9752 tree t1 = NULL_TREE;
9753 bool strict_overflow_p;
9754
9755 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9756 && TREE_CODE_LENGTH (code) == 2
9757 && op0 != NULL_TREE
9758 && op1 != NULL_TREE);
9759
9760 arg0 = op0;
9761 arg1 = op1;
9762
9763 /* Strip any conversions that don't change the mode. This is
9764 safe for every expression, except for a comparison expression
9765 because its signedness is derived from its operands. So, in
9766 the latter case, only strip conversions that don't change the
9767 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9768 preserved.
9769
9770 Note that this is done as an internal manipulation within the
9771 constant folder, in order to find the simplest representation
9772 of the arguments so that their form can be studied. In any
9773 cases, the appropriate type conversions should be put back in
9774 the tree that will get out of the constant folder. */
9775
9776 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9777 {
9778 STRIP_SIGN_NOPS (arg0);
9779 STRIP_SIGN_NOPS (arg1);
9780 }
9781 else
9782 {
9783 STRIP_NOPS (arg0);
9784 STRIP_NOPS (arg1);
9785 }
9786
9787 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9788 constant but we can't do arithmetic on them. */
9789 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9790 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9791 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9792 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9793 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9794 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9795 {
9796 if (kind == tcc_binary)
9797 {
9798 /* Make sure type and arg0 have the same saturating flag. */
9799 gcc_assert (TYPE_SATURATING (type)
9800 == TYPE_SATURATING (TREE_TYPE (arg0)));
9801 tem = const_binop (code, arg0, arg1);
9802 }
9803 else if (kind == tcc_comparison)
9804 tem = fold_relational_const (code, type, arg0, arg1);
9805 else
9806 tem = NULL_TREE;
9807
9808 if (tem != NULL_TREE)
9809 {
9810 if (TREE_TYPE (tem) != type)
9811 tem = fold_convert_loc (loc, type, tem);
9812 return tem;
9813 }
9814 }
9815
9816 /* If this is a commutative operation, and ARG0 is a constant, move it
9817 to ARG1 to reduce the number of tests below. */
9818 if (commutative_tree_code (code)
9819 && tree_swap_operands_p (arg0, arg1, true))
9820 return fold_build2_loc (loc, code, type, op1, op0);
9821
9822 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9823
9824 First check for cases where an arithmetic operation is applied to a
9825 compound, conditional, or comparison operation. Push the arithmetic
9826 operation inside the compound or conditional to see if any folding
9827 can then be done. Convert comparison to conditional for this purpose.
9828 The also optimizes non-constant cases that used to be done in
9829 expand_expr.
9830
9831 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9832 one of the operands is a comparison and the other is a comparison, a
9833 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9834 code below would make the expression more complex. Change it to a
9835 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9836 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9837
9838 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9839 || code == EQ_EXPR || code == NE_EXPR)
9840 && ((truth_value_p (TREE_CODE (arg0))
9841 && (truth_value_p (TREE_CODE (arg1))
9842 || (TREE_CODE (arg1) == BIT_AND_EXPR
9843 && integer_onep (TREE_OPERAND (arg1, 1)))))
9844 || (truth_value_p (TREE_CODE (arg1))
9845 && (truth_value_p (TREE_CODE (arg0))
9846 || (TREE_CODE (arg0) == BIT_AND_EXPR
9847 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9848 {
9849 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9850 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9851 : TRUTH_XOR_EXPR,
9852 boolean_type_node,
9853 fold_convert_loc (loc, boolean_type_node, arg0),
9854 fold_convert_loc (loc, boolean_type_node, arg1));
9855
9856 if (code == EQ_EXPR)
9857 tem = invert_truthvalue_loc (loc, tem);
9858
9859 return fold_convert_loc (loc, type, tem);
9860 }
9861
9862 if (TREE_CODE_CLASS (code) == tcc_binary
9863 || TREE_CODE_CLASS (code) == tcc_comparison)
9864 {
9865 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9866 {
9867 tem = fold_build2_loc (loc, code, type,
9868 fold_convert_loc (loc, TREE_TYPE (op0),
9869 TREE_OPERAND (arg0, 1)), op1);
9870 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9871 tem);
9872 }
9873 if (TREE_CODE (arg1) == COMPOUND_EXPR
9874 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9875 {
9876 tem = fold_build2_loc (loc, code, type, op0,
9877 fold_convert_loc (loc, TREE_TYPE (op1),
9878 TREE_OPERAND (arg1, 1)));
9879 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9880 tem);
9881 }
9882
9883 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9884 {
9885 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9886 arg0, arg1,
9887 /*cond_first_p=*/1);
9888 if (tem != NULL_TREE)
9889 return tem;
9890 }
9891
9892 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9893 {
9894 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9895 arg1, arg0,
9896 /*cond_first_p=*/0);
9897 if (tem != NULL_TREE)
9898 return tem;
9899 }
9900 }
9901
9902 switch (code)
9903 {
9904 case MEM_REF:
9905 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9906 if (TREE_CODE (arg0) == ADDR_EXPR
9907 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9908 {
9909 tree iref = TREE_OPERAND (arg0, 0);
9910 return fold_build2 (MEM_REF, type,
9911 TREE_OPERAND (iref, 0),
9912 int_const_binop (PLUS_EXPR, arg1,
9913 TREE_OPERAND (iref, 1)));
9914 }
9915
9916 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9917 if (TREE_CODE (arg0) == ADDR_EXPR
9918 && handled_component_p (TREE_OPERAND (arg0, 0)))
9919 {
9920 tree base;
9921 HOST_WIDE_INT coffset;
9922 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9923 &coffset);
9924 if (!base)
9925 return NULL_TREE;
9926 return fold_build2 (MEM_REF, type,
9927 build_fold_addr_expr (base),
9928 int_const_binop (PLUS_EXPR, arg1,
9929 size_int (coffset)));
9930 }
9931
9932 return NULL_TREE;
9933
9934 case POINTER_PLUS_EXPR:
9935 /* 0 +p index -> (type)index */
9936 if (integer_zerop (arg0))
9937 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9938
9939 /* PTR +p 0 -> PTR */
9940 if (integer_zerop (arg1))
9941 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9942
9943 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9944 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9945 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9946 return fold_convert_loc (loc, type,
9947 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9948 fold_convert_loc (loc, sizetype,
9949 arg1),
9950 fold_convert_loc (loc, sizetype,
9951 arg0)));
9952
9953 /* (PTR +p B) +p A -> PTR +p (B + A) */
9954 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9955 {
9956 tree inner;
9957 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9958 tree arg00 = TREE_OPERAND (arg0, 0);
9959 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9960 arg01, fold_convert_loc (loc, sizetype, arg1));
9961 return fold_convert_loc (loc, type,
9962 fold_build_pointer_plus_loc (loc,
9963 arg00, inner));
9964 }
9965
9966 /* PTR_CST +p CST -> CST1 */
9967 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9968 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9969 fold_convert_loc (loc, type, arg1));
9970
9971 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9972 of the array. Loop optimizer sometimes produce this type of
9973 expressions. */
9974 if (TREE_CODE (arg0) == ADDR_EXPR)
9975 {
9976 tem = try_move_mult_to_index (loc, arg0,
9977 fold_convert_loc (loc,
9978 ssizetype, arg1));
9979 if (tem)
9980 return fold_convert_loc (loc, type, tem);
9981 }
9982
9983 return NULL_TREE;
9984
9985 case PLUS_EXPR:
9986 /* A + (-B) -> A - B */
9987 if (TREE_CODE (arg1) == NEGATE_EXPR)
9988 return fold_build2_loc (loc, MINUS_EXPR, type,
9989 fold_convert_loc (loc, type, arg0),
9990 fold_convert_loc (loc, type,
9991 TREE_OPERAND (arg1, 0)));
9992 /* (-A) + B -> B - A */
9993 if (TREE_CODE (arg0) == NEGATE_EXPR
9994 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9995 return fold_build2_loc (loc, MINUS_EXPR, type,
9996 fold_convert_loc (loc, type, arg1),
9997 fold_convert_loc (loc, type,
9998 TREE_OPERAND (arg0, 0)));
9999
10000 if (INTEGRAL_TYPE_P (type))
10001 {
10002 /* Convert ~A + 1 to -A. */
10003 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10004 && integer_onep (arg1))
10005 return fold_build1_loc (loc, NEGATE_EXPR, type,
10006 fold_convert_loc (loc, type,
10007 TREE_OPERAND (arg0, 0)));
10008
10009 /* ~X + X is -1. */
10010 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10011 && !TYPE_OVERFLOW_TRAPS (type))
10012 {
10013 tree tem = TREE_OPERAND (arg0, 0);
10014
10015 STRIP_NOPS (tem);
10016 if (operand_equal_p (tem, arg1, 0))
10017 {
10018 t1 = build_int_cst_type (type, -1);
10019 return omit_one_operand_loc (loc, type, t1, arg1);
10020 }
10021 }
10022
10023 /* X + ~X is -1. */
10024 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10025 && !TYPE_OVERFLOW_TRAPS (type))
10026 {
10027 tree tem = TREE_OPERAND (arg1, 0);
10028
10029 STRIP_NOPS (tem);
10030 if (operand_equal_p (arg0, tem, 0))
10031 {
10032 t1 = build_int_cst_type (type, -1);
10033 return omit_one_operand_loc (loc, type, t1, arg0);
10034 }
10035 }
10036
10037 /* X + (X / CST) * -CST is X % CST. */
10038 if (TREE_CODE (arg1) == MULT_EXPR
10039 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10040 && operand_equal_p (arg0,
10041 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10042 {
10043 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10044 tree cst1 = TREE_OPERAND (arg1, 1);
10045 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10046 cst1, cst0);
10047 if (sum && integer_zerop (sum))
10048 return fold_convert_loc (loc, type,
10049 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10050 TREE_TYPE (arg0), arg0,
10051 cst0));
10052 }
10053 }
10054
10055 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10056 one. Make sure the type is not saturating and has the signedness of
10057 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10058 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10059 if ((TREE_CODE (arg0) == MULT_EXPR
10060 || TREE_CODE (arg1) == MULT_EXPR)
10061 && !TYPE_SATURATING (type)
10062 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10063 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10064 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10065 {
10066 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10067 if (tem)
10068 return tem;
10069 }
10070
10071 if (! FLOAT_TYPE_P (type))
10072 {
10073 if (integer_zerop (arg1))
10074 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10075
10076 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10077 with a constant, and the two constants have no bits in common,
10078 we should treat this as a BIT_IOR_EXPR since this may produce more
10079 simplifications. */
10080 if (TREE_CODE (arg0) == BIT_AND_EXPR
10081 && TREE_CODE (arg1) == BIT_AND_EXPR
10082 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10083 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10084 && integer_zerop (const_binop (BIT_AND_EXPR,
10085 TREE_OPERAND (arg0, 1),
10086 TREE_OPERAND (arg1, 1))))
10087 {
10088 code = BIT_IOR_EXPR;
10089 goto bit_ior;
10090 }
10091
10092 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10093 (plus (plus (mult) (mult)) (foo)) so that we can
10094 take advantage of the factoring cases below. */
10095 if (TYPE_OVERFLOW_WRAPS (type)
10096 && (((TREE_CODE (arg0) == PLUS_EXPR
10097 || TREE_CODE (arg0) == MINUS_EXPR)
10098 && TREE_CODE (arg1) == MULT_EXPR)
10099 || ((TREE_CODE (arg1) == PLUS_EXPR
10100 || TREE_CODE (arg1) == MINUS_EXPR)
10101 && TREE_CODE (arg0) == MULT_EXPR)))
10102 {
10103 tree parg0, parg1, parg, marg;
10104 enum tree_code pcode;
10105
10106 if (TREE_CODE (arg1) == MULT_EXPR)
10107 parg = arg0, marg = arg1;
10108 else
10109 parg = arg1, marg = arg0;
10110 pcode = TREE_CODE (parg);
10111 parg0 = TREE_OPERAND (parg, 0);
10112 parg1 = TREE_OPERAND (parg, 1);
10113 STRIP_NOPS (parg0);
10114 STRIP_NOPS (parg1);
10115
10116 if (TREE_CODE (parg0) == MULT_EXPR
10117 && TREE_CODE (parg1) != MULT_EXPR)
10118 return fold_build2_loc (loc, pcode, type,
10119 fold_build2_loc (loc, PLUS_EXPR, type,
10120 fold_convert_loc (loc, type,
10121 parg0),
10122 fold_convert_loc (loc, type,
10123 marg)),
10124 fold_convert_loc (loc, type, parg1));
10125 if (TREE_CODE (parg0) != MULT_EXPR
10126 && TREE_CODE (parg1) == MULT_EXPR)
10127 return
10128 fold_build2_loc (loc, PLUS_EXPR, type,
10129 fold_convert_loc (loc, type, parg0),
10130 fold_build2_loc (loc, pcode, type,
10131 fold_convert_loc (loc, type, marg),
10132 fold_convert_loc (loc, type,
10133 parg1)));
10134 }
10135 }
10136 else
10137 {
10138 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10139 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10140 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10141
10142 /* Likewise if the operands are reversed. */
10143 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10144 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10145
10146 /* Convert X + -C into X - C. */
10147 if (TREE_CODE (arg1) == REAL_CST
10148 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10149 {
10150 tem = fold_negate_const (arg1, type);
10151 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10152 return fold_build2_loc (loc, MINUS_EXPR, type,
10153 fold_convert_loc (loc, type, arg0),
10154 fold_convert_loc (loc, type, tem));
10155 }
10156
10157 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10158 to __complex__ ( x, y ). This is not the same for SNaNs or
10159 if signed zeros are involved. */
10160 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10162 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10163 {
10164 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10165 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10166 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10167 bool arg0rz = false, arg0iz = false;
10168 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10169 || (arg0i && (arg0iz = real_zerop (arg0i))))
10170 {
10171 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10172 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10173 if (arg0rz && arg1i && real_zerop (arg1i))
10174 {
10175 tree rp = arg1r ? arg1r
10176 : build1 (REALPART_EXPR, rtype, arg1);
10177 tree ip = arg0i ? arg0i
10178 : build1 (IMAGPART_EXPR, rtype, arg0);
10179 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10180 }
10181 else if (arg0iz && arg1r && real_zerop (arg1r))
10182 {
10183 tree rp = arg0r ? arg0r
10184 : build1 (REALPART_EXPR, rtype, arg0);
10185 tree ip = arg1i ? arg1i
10186 : build1 (IMAGPART_EXPR, rtype, arg1);
10187 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10188 }
10189 }
10190 }
10191
10192 if (flag_unsafe_math_optimizations
10193 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10194 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10195 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10196 return tem;
10197
10198 /* Convert x+x into x*2.0. */
10199 if (operand_equal_p (arg0, arg1, 0)
10200 && SCALAR_FLOAT_TYPE_P (type))
10201 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10202 build_real (type, dconst2));
10203
10204 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10205 We associate floats only if the user has specified
10206 -fassociative-math. */
10207 if (flag_associative_math
10208 && TREE_CODE (arg1) == PLUS_EXPR
10209 && TREE_CODE (arg0) != MULT_EXPR)
10210 {
10211 tree tree10 = TREE_OPERAND (arg1, 0);
10212 tree tree11 = TREE_OPERAND (arg1, 1);
10213 if (TREE_CODE (tree11) == MULT_EXPR
10214 && TREE_CODE (tree10) == MULT_EXPR)
10215 {
10216 tree tree0;
10217 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10218 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10219 }
10220 }
10221 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10222 We associate floats only if the user has specified
10223 -fassociative-math. */
10224 if (flag_associative_math
10225 && TREE_CODE (arg0) == PLUS_EXPR
10226 && TREE_CODE (arg1) != MULT_EXPR)
10227 {
10228 tree tree00 = TREE_OPERAND (arg0, 0);
10229 tree tree01 = TREE_OPERAND (arg0, 1);
10230 if (TREE_CODE (tree01) == MULT_EXPR
10231 && TREE_CODE (tree00) == MULT_EXPR)
10232 {
10233 tree tree0;
10234 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10235 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10236 }
10237 }
10238 }
10239
10240 bit_rotate:
10241 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10242 is a rotate of A by C1 bits. */
10243 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10244 is a rotate of A by B bits. */
10245 {
10246 enum tree_code code0, code1;
10247 tree rtype;
10248 code0 = TREE_CODE (arg0);
10249 code1 = TREE_CODE (arg1);
10250 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10251 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10252 && operand_equal_p (TREE_OPERAND (arg0, 0),
10253 TREE_OPERAND (arg1, 0), 0)
10254 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10255 TYPE_UNSIGNED (rtype))
10256 /* Only create rotates in complete modes. Other cases are not
10257 expanded properly. */
10258 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10259 {
10260 tree tree01, tree11;
10261 enum tree_code code01, code11;
10262
10263 tree01 = TREE_OPERAND (arg0, 1);
10264 tree11 = TREE_OPERAND (arg1, 1);
10265 STRIP_NOPS (tree01);
10266 STRIP_NOPS (tree11);
10267 code01 = TREE_CODE (tree01);
10268 code11 = TREE_CODE (tree11);
10269 if (code01 == INTEGER_CST
10270 && code11 == INTEGER_CST
10271 && TREE_INT_CST_HIGH (tree01) == 0
10272 && TREE_INT_CST_HIGH (tree11) == 0
10273 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10274 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10275 {
10276 tem = build2_loc (loc, LROTATE_EXPR,
10277 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10278 TREE_OPERAND (arg0, 0),
10279 code0 == LSHIFT_EXPR ? tree01 : tree11);
10280 return fold_convert_loc (loc, type, tem);
10281 }
10282 else if (code11 == MINUS_EXPR)
10283 {
10284 tree tree110, tree111;
10285 tree110 = TREE_OPERAND (tree11, 0);
10286 tree111 = TREE_OPERAND (tree11, 1);
10287 STRIP_NOPS (tree110);
10288 STRIP_NOPS (tree111);
10289 if (TREE_CODE (tree110) == INTEGER_CST
10290 && 0 == compare_tree_int (tree110,
10291 TYPE_PRECISION
10292 (TREE_TYPE (TREE_OPERAND
10293 (arg0, 0))))
10294 && operand_equal_p (tree01, tree111, 0))
10295 return
10296 fold_convert_loc (loc, type,
10297 build2 ((code0 == LSHIFT_EXPR
10298 ? LROTATE_EXPR
10299 : RROTATE_EXPR),
10300 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10301 TREE_OPERAND (arg0, 0), tree01));
10302 }
10303 else if (code01 == MINUS_EXPR)
10304 {
10305 tree tree010, tree011;
10306 tree010 = TREE_OPERAND (tree01, 0);
10307 tree011 = TREE_OPERAND (tree01, 1);
10308 STRIP_NOPS (tree010);
10309 STRIP_NOPS (tree011);
10310 if (TREE_CODE (tree010) == INTEGER_CST
10311 && 0 == compare_tree_int (tree010,
10312 TYPE_PRECISION
10313 (TREE_TYPE (TREE_OPERAND
10314 (arg0, 0))))
10315 && operand_equal_p (tree11, tree011, 0))
10316 return fold_convert_loc
10317 (loc, type,
10318 build2 ((code0 != LSHIFT_EXPR
10319 ? LROTATE_EXPR
10320 : RROTATE_EXPR),
10321 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10322 TREE_OPERAND (arg0, 0), tree11));
10323 }
10324 }
10325 }
10326
10327 associate:
10328 /* In most languages, can't associate operations on floats through
10329 parentheses. Rather than remember where the parentheses were, we
10330 don't associate floats at all, unless the user has specified
10331 -fassociative-math.
10332 And, we need to make sure type is not saturating. */
10333
10334 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10335 && !TYPE_SATURATING (type))
10336 {
10337 tree var0, con0, lit0, minus_lit0;
10338 tree var1, con1, lit1, minus_lit1;
10339 bool ok = true;
10340
10341 /* Split both trees into variables, constants, and literals. Then
10342 associate each group together, the constants with literals,
10343 then the result with variables. This increases the chances of
10344 literals being recombined later and of generating relocatable
10345 expressions for the sum of a constant and literal. */
10346 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10347 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10348 code == MINUS_EXPR);
10349
10350 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10351 if (code == MINUS_EXPR)
10352 code = PLUS_EXPR;
10353
10354 /* With undefined overflow we can only associate constants with one
10355 variable, and constants whose association doesn't overflow. */
10356 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10357 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10358 {
10359 if (var0 && var1)
10360 {
10361 tree tmp0 = var0;
10362 tree tmp1 = var1;
10363
10364 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10365 tmp0 = TREE_OPERAND (tmp0, 0);
10366 if (CONVERT_EXPR_P (tmp0)
10367 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10368 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10369 <= TYPE_PRECISION (type)))
10370 tmp0 = TREE_OPERAND (tmp0, 0);
10371 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10372 tmp1 = TREE_OPERAND (tmp1, 0);
10373 if (CONVERT_EXPR_P (tmp1)
10374 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10375 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10376 <= TYPE_PRECISION (type)))
10377 tmp1 = TREE_OPERAND (tmp1, 0);
10378 /* The only case we can still associate with two variables
10379 is if they are the same, modulo negation and bit-pattern
10380 preserving conversions. */
10381 if (!operand_equal_p (tmp0, tmp1, 0))
10382 ok = false;
10383 }
10384
10385 if (ok && lit0 && lit1)
10386 {
10387 tree tmp0 = fold_convert (type, lit0);
10388 tree tmp1 = fold_convert (type, lit1);
10389
10390 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10391 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10392 ok = false;
10393 }
10394 }
10395
10396 /* Only do something if we found more than two objects. Otherwise,
10397 nothing has changed and we risk infinite recursion. */
10398 if (ok
10399 && (2 < ((var0 != 0) + (var1 != 0)
10400 + (con0 != 0) + (con1 != 0)
10401 + (lit0 != 0) + (lit1 != 0)
10402 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10403 {
10404 var0 = associate_trees (loc, var0, var1, code, type);
10405 con0 = associate_trees (loc, con0, con1, code, type);
10406 lit0 = associate_trees (loc, lit0, lit1, code, type);
10407 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10408
10409 /* Preserve the MINUS_EXPR if the negative part of the literal is
10410 greater than the positive part. Otherwise, the multiplicative
10411 folding code (i.e extract_muldiv) may be fooled in case
10412 unsigned constants are subtracted, like in the following
10413 example: ((X*2 + 4) - 8U)/2. */
10414 if (minus_lit0 && lit0)
10415 {
10416 if (TREE_CODE (lit0) == INTEGER_CST
10417 && TREE_CODE (minus_lit0) == INTEGER_CST
10418 && tree_int_cst_lt (lit0, minus_lit0))
10419 {
10420 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10421 MINUS_EXPR, type);
10422 lit0 = 0;
10423 }
10424 else
10425 {
10426 lit0 = associate_trees (loc, lit0, minus_lit0,
10427 MINUS_EXPR, type);
10428 minus_lit0 = 0;
10429 }
10430 }
10431 if (minus_lit0)
10432 {
10433 if (con0 == 0)
10434 return
10435 fold_convert_loc (loc, type,
10436 associate_trees (loc, var0, minus_lit0,
10437 MINUS_EXPR, type));
10438 else
10439 {
10440 con0 = associate_trees (loc, con0, minus_lit0,
10441 MINUS_EXPR, type);
10442 return
10443 fold_convert_loc (loc, type,
10444 associate_trees (loc, var0, con0,
10445 PLUS_EXPR, type));
10446 }
10447 }
10448
10449 con0 = associate_trees (loc, con0, lit0, code, type);
10450 return
10451 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10452 code, type));
10453 }
10454 }
10455
10456 return NULL_TREE;
10457
10458 case MINUS_EXPR:
10459 /* Pointer simplifications for subtraction, simple reassociations. */
10460 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10461 {
10462 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10463 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10464 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10465 {
10466 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10467 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10468 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10469 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10470 return fold_build2_loc (loc, PLUS_EXPR, type,
10471 fold_build2_loc (loc, MINUS_EXPR, type,
10472 arg00, arg10),
10473 fold_build2_loc (loc, MINUS_EXPR, type,
10474 arg01, arg11));
10475 }
10476 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10477 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10478 {
10479 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10480 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10481 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10482 fold_convert_loc (loc, type, arg1));
10483 if (tmp)
10484 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10485 }
10486 }
10487 /* A - (-B) -> A + B */
10488 if (TREE_CODE (arg1) == NEGATE_EXPR)
10489 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10490 fold_convert_loc (loc, type,
10491 TREE_OPERAND (arg1, 0)));
10492 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10493 if (TREE_CODE (arg0) == NEGATE_EXPR
10494 && (FLOAT_TYPE_P (type)
10495 || INTEGRAL_TYPE_P (type))
10496 && negate_expr_p (arg1)
10497 && reorder_operands_p (arg0, arg1))
10498 return fold_build2_loc (loc, MINUS_EXPR, type,
10499 fold_convert_loc (loc, type,
10500 negate_expr (arg1)),
10501 fold_convert_loc (loc, type,
10502 TREE_OPERAND (arg0, 0)));
10503 /* Convert -A - 1 to ~A. */
10504 if (INTEGRAL_TYPE_P (type)
10505 && TREE_CODE (arg0) == NEGATE_EXPR
10506 && integer_onep (arg1)
10507 && !TYPE_OVERFLOW_TRAPS (type))
10508 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10509 fold_convert_loc (loc, type,
10510 TREE_OPERAND (arg0, 0)));
10511
10512 /* Convert -1 - A to ~A. */
10513 if (INTEGRAL_TYPE_P (type)
10514 && integer_all_onesp (arg0))
10515 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10516
10517
10518 /* X - (X / CST) * CST is X % CST. */
10519 if (INTEGRAL_TYPE_P (type)
10520 && TREE_CODE (arg1) == MULT_EXPR
10521 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10522 && operand_equal_p (arg0,
10523 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10524 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10525 TREE_OPERAND (arg1, 1), 0))
10526 return
10527 fold_convert_loc (loc, type,
10528 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10529 arg0, TREE_OPERAND (arg1, 1)));
10530
10531 if (! FLOAT_TYPE_P (type))
10532 {
10533 if (integer_zerop (arg0))
10534 return negate_expr (fold_convert_loc (loc, type, arg1));
10535 if (integer_zerop (arg1))
10536 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10537
10538 /* Fold A - (A & B) into ~B & A. */
10539 if (!TREE_SIDE_EFFECTS (arg0)
10540 && TREE_CODE (arg1) == BIT_AND_EXPR)
10541 {
10542 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10543 {
10544 tree arg10 = fold_convert_loc (loc, type,
10545 TREE_OPERAND (arg1, 0));
10546 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10547 fold_build1_loc (loc, BIT_NOT_EXPR,
10548 type, arg10),
10549 fold_convert_loc (loc, type, arg0));
10550 }
10551 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10552 {
10553 tree arg11 = fold_convert_loc (loc,
10554 type, TREE_OPERAND (arg1, 1));
10555 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10556 fold_build1_loc (loc, BIT_NOT_EXPR,
10557 type, arg11),
10558 fold_convert_loc (loc, type, arg0));
10559 }
10560 }
10561
10562 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10563 any power of 2 minus 1. */
10564 if (TREE_CODE (arg0) == BIT_AND_EXPR
10565 && TREE_CODE (arg1) == BIT_AND_EXPR
10566 && operand_equal_p (TREE_OPERAND (arg0, 0),
10567 TREE_OPERAND (arg1, 0), 0))
10568 {
10569 tree mask0 = TREE_OPERAND (arg0, 1);
10570 tree mask1 = TREE_OPERAND (arg1, 1);
10571 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10572
10573 if (operand_equal_p (tem, mask1, 0))
10574 {
10575 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10576 TREE_OPERAND (arg0, 0), mask1);
10577 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10578 }
10579 }
10580 }
10581
10582 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10583 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10584 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10585
10586 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10587 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10588 (-ARG1 + ARG0) reduces to -ARG1. */
10589 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10590 return negate_expr (fold_convert_loc (loc, type, arg1));
10591
10592 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10593 __complex__ ( x, -y ). This is not the same for SNaNs or if
10594 signed zeros are involved. */
10595 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10596 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10597 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10598 {
10599 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10600 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10601 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10602 bool arg0rz = false, arg0iz = false;
10603 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10604 || (arg0i && (arg0iz = real_zerop (arg0i))))
10605 {
10606 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10607 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10608 if (arg0rz && arg1i && real_zerop (arg1i))
10609 {
10610 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10611 arg1r ? arg1r
10612 : build1 (REALPART_EXPR, rtype, arg1));
10613 tree ip = arg0i ? arg0i
10614 : build1 (IMAGPART_EXPR, rtype, arg0);
10615 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10616 }
10617 else if (arg0iz && arg1r && real_zerop (arg1r))
10618 {
10619 tree rp = arg0r ? arg0r
10620 : build1 (REALPART_EXPR, rtype, arg0);
10621 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10622 arg1i ? arg1i
10623 : build1 (IMAGPART_EXPR, rtype, arg1));
10624 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10625 }
10626 }
10627 }
10628
10629 /* Fold &x - &x. This can happen from &x.foo - &x.
10630 This is unsafe for certain floats even in non-IEEE formats.
10631 In IEEE, it is unsafe because it does wrong for NaNs.
10632 Also note that operand_equal_p is always false if an operand
10633 is volatile. */
10634
10635 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10636 && operand_equal_p (arg0, arg1, 0))
10637 return build_zero_cst (type);
10638
10639 /* A - B -> A + (-B) if B is easily negatable. */
10640 if (negate_expr_p (arg1)
10641 && ((FLOAT_TYPE_P (type)
10642 /* Avoid this transformation if B is a positive REAL_CST. */
10643 && (TREE_CODE (arg1) != REAL_CST
10644 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10645 || INTEGRAL_TYPE_P (type)))
10646 return fold_build2_loc (loc, PLUS_EXPR, type,
10647 fold_convert_loc (loc, type, arg0),
10648 fold_convert_loc (loc, type,
10649 negate_expr (arg1)));
10650
10651 /* Try folding difference of addresses. */
10652 {
10653 HOST_WIDE_INT diff;
10654
10655 if ((TREE_CODE (arg0) == ADDR_EXPR
10656 || TREE_CODE (arg1) == ADDR_EXPR)
10657 && ptr_difference_const (arg0, arg1, &diff))
10658 return build_int_cst_type (type, diff);
10659 }
10660
10661 /* Fold &a[i] - &a[j] to i-j. */
10662 if (TREE_CODE (arg0) == ADDR_EXPR
10663 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10664 && TREE_CODE (arg1) == ADDR_EXPR
10665 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10666 {
10667 tree tem = fold_addr_of_array_ref_difference (loc, type,
10668 TREE_OPERAND (arg0, 0),
10669 TREE_OPERAND (arg1, 0));
10670 if (tem)
10671 return tem;
10672 }
10673
10674 if (FLOAT_TYPE_P (type)
10675 && flag_unsafe_math_optimizations
10676 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10677 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10678 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10679 return tem;
10680
10681 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10682 one. Make sure the type is not saturating and has the signedness of
10683 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10684 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10685 if ((TREE_CODE (arg0) == MULT_EXPR
10686 || TREE_CODE (arg1) == MULT_EXPR)
10687 && !TYPE_SATURATING (type)
10688 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10689 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10690 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10691 {
10692 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10693 if (tem)
10694 return tem;
10695 }
10696
10697 goto associate;
10698
10699 case MULT_EXPR:
10700 /* (-A) * (-B) -> A * B */
10701 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10702 return fold_build2_loc (loc, MULT_EXPR, type,
10703 fold_convert_loc (loc, type,
10704 TREE_OPERAND (arg0, 0)),
10705 fold_convert_loc (loc, type,
10706 negate_expr (arg1)));
10707 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10708 return fold_build2_loc (loc, MULT_EXPR, type,
10709 fold_convert_loc (loc, type,
10710 negate_expr (arg0)),
10711 fold_convert_loc (loc, type,
10712 TREE_OPERAND (arg1, 0)));
10713
10714 if (! FLOAT_TYPE_P (type))
10715 {
10716 if (integer_zerop (arg1))
10717 return omit_one_operand_loc (loc, type, arg1, arg0);
10718 if (integer_onep (arg1))
10719 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10720 /* Transform x * -1 into -x. Make sure to do the negation
10721 on the original operand with conversions not stripped
10722 because we can only strip non-sign-changing conversions. */
10723 if (integer_all_onesp (arg1))
10724 return fold_convert_loc (loc, type, negate_expr (op0));
10725 /* Transform x * -C into -x * C if x is easily negatable. */
10726 if (TREE_CODE (arg1) == INTEGER_CST
10727 && tree_int_cst_sgn (arg1) == -1
10728 && negate_expr_p (arg0)
10729 && (tem = negate_expr (arg1)) != arg1
10730 && !TREE_OVERFLOW (tem))
10731 return fold_build2_loc (loc, MULT_EXPR, type,
10732 fold_convert_loc (loc, type,
10733 negate_expr (arg0)),
10734 tem);
10735
10736 /* (a * (1 << b)) is (a << b) */
10737 if (TREE_CODE (arg1) == LSHIFT_EXPR
10738 && integer_onep (TREE_OPERAND (arg1, 0)))
10739 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10740 TREE_OPERAND (arg1, 1));
10741 if (TREE_CODE (arg0) == LSHIFT_EXPR
10742 && integer_onep (TREE_OPERAND (arg0, 0)))
10743 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10744 TREE_OPERAND (arg0, 1));
10745
10746 /* (A + A) * C -> A * 2 * C */
10747 if (TREE_CODE (arg0) == PLUS_EXPR
10748 && TREE_CODE (arg1) == INTEGER_CST
10749 && operand_equal_p (TREE_OPERAND (arg0, 0),
10750 TREE_OPERAND (arg0, 1), 0))
10751 return fold_build2_loc (loc, MULT_EXPR, type,
10752 omit_one_operand_loc (loc, type,
10753 TREE_OPERAND (arg0, 0),
10754 TREE_OPERAND (arg0, 1)),
10755 fold_build2_loc (loc, MULT_EXPR, type,
10756 build_int_cst (type, 2) , arg1));
10757
10758 strict_overflow_p = false;
10759 if (TREE_CODE (arg1) == INTEGER_CST
10760 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10761 &strict_overflow_p)))
10762 {
10763 if (strict_overflow_p)
10764 fold_overflow_warning (("assuming signed overflow does not "
10765 "occur when simplifying "
10766 "multiplication"),
10767 WARN_STRICT_OVERFLOW_MISC);
10768 return fold_convert_loc (loc, type, tem);
10769 }
10770
10771 /* Optimize z * conj(z) for integer complex numbers. */
10772 if (TREE_CODE (arg0) == CONJ_EXPR
10773 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10774 return fold_mult_zconjz (loc, type, arg1);
10775 if (TREE_CODE (arg1) == CONJ_EXPR
10776 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10777 return fold_mult_zconjz (loc, type, arg0);
10778 }
10779 else
10780 {
10781 /* Maybe fold x * 0 to 0. The expressions aren't the same
10782 when x is NaN, since x * 0 is also NaN. Nor are they the
10783 same in modes with signed zeros, since multiplying a
10784 negative value by 0 gives -0, not +0. */
10785 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10786 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10787 && real_zerop (arg1))
10788 return omit_one_operand_loc (loc, type, arg1, arg0);
10789 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10790 Likewise for complex arithmetic with signed zeros. */
10791 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10792 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10793 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10794 && real_onep (arg1))
10795 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10796
10797 /* Transform x * -1.0 into -x. */
10798 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10799 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10800 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10801 && real_minus_onep (arg1))
10802 return fold_convert_loc (loc, type, negate_expr (arg0));
10803
10804 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10805 the result for floating point types due to rounding so it is applied
10806 only if -fassociative-math was specify. */
10807 if (flag_associative_math
10808 && TREE_CODE (arg0) == RDIV_EXPR
10809 && TREE_CODE (arg1) == REAL_CST
10810 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10811 {
10812 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10813 arg1);
10814 if (tem)
10815 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10816 TREE_OPERAND (arg0, 1));
10817 }
10818
10819 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10820 if (operand_equal_p (arg0, arg1, 0))
10821 {
10822 tree tem = fold_strip_sign_ops (arg0);
10823 if (tem != NULL_TREE)
10824 {
10825 tem = fold_convert_loc (loc, type, tem);
10826 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10827 }
10828 }
10829
10830 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10831 This is not the same for NaNs or if signed zeros are
10832 involved. */
10833 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10834 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10835 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10836 && TREE_CODE (arg1) == COMPLEX_CST
10837 && real_zerop (TREE_REALPART (arg1)))
10838 {
10839 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10840 if (real_onep (TREE_IMAGPART (arg1)))
10841 return
10842 fold_build2_loc (loc, COMPLEX_EXPR, type,
10843 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10844 rtype, arg0)),
10845 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10846 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10847 return
10848 fold_build2_loc (loc, COMPLEX_EXPR, type,
10849 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10850 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10851 rtype, arg0)));
10852 }
10853
10854 /* Optimize z * conj(z) for floating point complex numbers.
10855 Guarded by flag_unsafe_math_optimizations as non-finite
10856 imaginary components don't produce scalar results. */
10857 if (flag_unsafe_math_optimizations
10858 && TREE_CODE (arg0) == CONJ_EXPR
10859 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10860 return fold_mult_zconjz (loc, type, arg1);
10861 if (flag_unsafe_math_optimizations
10862 && TREE_CODE (arg1) == CONJ_EXPR
10863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10864 return fold_mult_zconjz (loc, type, arg0);
10865
10866 if (flag_unsafe_math_optimizations)
10867 {
10868 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10869 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10870
10871 /* Optimizations of root(...)*root(...). */
10872 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10873 {
10874 tree rootfn, arg;
10875 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10876 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10877
10878 /* Optimize sqrt(x)*sqrt(x) as x. */
10879 if (BUILTIN_SQRT_P (fcode0)
10880 && operand_equal_p (arg00, arg10, 0)
10881 && ! HONOR_SNANS (TYPE_MODE (type)))
10882 return arg00;
10883
10884 /* Optimize root(x)*root(y) as root(x*y). */
10885 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10886 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10887 return build_call_expr_loc (loc, rootfn, 1, arg);
10888 }
10889
10890 /* Optimize expN(x)*expN(y) as expN(x+y). */
10891 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10892 {
10893 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10894 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10895 CALL_EXPR_ARG (arg0, 0),
10896 CALL_EXPR_ARG (arg1, 0));
10897 return build_call_expr_loc (loc, expfn, 1, arg);
10898 }
10899
10900 /* Optimizations of pow(...)*pow(...). */
10901 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10902 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10903 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10904 {
10905 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10906 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10907 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10908 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10909
10910 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10911 if (operand_equal_p (arg01, arg11, 0))
10912 {
10913 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10914 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10915 arg00, arg10);
10916 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10917 }
10918
10919 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10920 if (operand_equal_p (arg00, arg10, 0))
10921 {
10922 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10923 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10924 arg01, arg11);
10925 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10926 }
10927 }
10928
10929 /* Optimize tan(x)*cos(x) as sin(x). */
10930 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10931 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10932 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10933 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10934 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10935 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10936 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10937 CALL_EXPR_ARG (arg1, 0), 0))
10938 {
10939 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10940
10941 if (sinfn != NULL_TREE)
10942 return build_call_expr_loc (loc, sinfn, 1,
10943 CALL_EXPR_ARG (arg0, 0));
10944 }
10945
10946 /* Optimize x*pow(x,c) as pow(x,c+1). */
10947 if (fcode1 == BUILT_IN_POW
10948 || fcode1 == BUILT_IN_POWF
10949 || fcode1 == BUILT_IN_POWL)
10950 {
10951 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10952 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10953 if (TREE_CODE (arg11) == REAL_CST
10954 && !TREE_OVERFLOW (arg11)
10955 && operand_equal_p (arg0, arg10, 0))
10956 {
10957 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10958 REAL_VALUE_TYPE c;
10959 tree arg;
10960
10961 c = TREE_REAL_CST (arg11);
10962 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10963 arg = build_real (type, c);
10964 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10965 }
10966 }
10967
10968 /* Optimize pow(x,c)*x as pow(x,c+1). */
10969 if (fcode0 == BUILT_IN_POW
10970 || fcode0 == BUILT_IN_POWF
10971 || fcode0 == BUILT_IN_POWL)
10972 {
10973 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10974 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10975 if (TREE_CODE (arg01) == REAL_CST
10976 && !TREE_OVERFLOW (arg01)
10977 && operand_equal_p (arg1, arg00, 0))
10978 {
10979 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10980 REAL_VALUE_TYPE c;
10981 tree arg;
10982
10983 c = TREE_REAL_CST (arg01);
10984 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10985 arg = build_real (type, c);
10986 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10987 }
10988 }
10989
10990 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10991 if (!in_gimple_form
10992 && optimize
10993 && operand_equal_p (arg0, arg1, 0))
10994 {
10995 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10996
10997 if (powfn)
10998 {
10999 tree arg = build_real (type, dconst2);
11000 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11001 }
11002 }
11003 }
11004 }
11005 goto associate;
11006
11007 case BIT_IOR_EXPR:
11008 bit_ior:
11009 if (integer_all_onesp (arg1))
11010 return omit_one_operand_loc (loc, type, arg1, arg0);
11011 if (integer_zerop (arg1))
11012 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11013 if (operand_equal_p (arg0, arg1, 0))
11014 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11015
11016 /* ~X | X is -1. */
11017 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11018 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11019 {
11020 t1 = build_zero_cst (type);
11021 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11022 return omit_one_operand_loc (loc, type, t1, arg1);
11023 }
11024
11025 /* X | ~X is -1. */
11026 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11027 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11028 {
11029 t1 = build_zero_cst (type);
11030 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11031 return omit_one_operand_loc (loc, type, t1, arg0);
11032 }
11033
11034 /* Canonicalize (X & C1) | C2. */
11035 if (TREE_CODE (arg0) == BIT_AND_EXPR
11036 && TREE_CODE (arg1) == INTEGER_CST
11037 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11038 {
11039 double_int c1, c2, c3, msk;
11040 int width = TYPE_PRECISION (type), w;
11041 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11042 c2 = tree_to_double_int (arg1);
11043
11044 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11045 if (double_int_equal_p (double_int_and (c1, c2), c1))
11046 return omit_one_operand_loc (loc, type, arg1,
11047 TREE_OPERAND (arg0, 0));
11048
11049 msk = double_int_mask (width);
11050
11051 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11052 if (double_int_zero_p (double_int_and_not (msk,
11053 double_int_ior (c1, c2))))
11054 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11055 TREE_OPERAND (arg0, 0), arg1);
11056
11057 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11058 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11059 mode which allows further optimizations. */
11060 c1 = double_int_and (c1, msk);
11061 c2 = double_int_and (c2, msk);
11062 c3 = double_int_and_not (c1, c2);
11063 for (w = BITS_PER_UNIT;
11064 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11065 w <<= 1)
11066 {
11067 unsigned HOST_WIDE_INT mask
11068 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11069 if (((c1.low | c2.low) & mask) == mask
11070 && (c1.low & ~mask) == 0 && c1.high == 0)
11071 {
11072 c3 = uhwi_to_double_int (mask);
11073 break;
11074 }
11075 }
11076 if (!double_int_equal_p (c3, c1))
11077 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11078 fold_build2_loc (loc, BIT_AND_EXPR, type,
11079 TREE_OPERAND (arg0, 0),
11080 double_int_to_tree (type,
11081 c3)),
11082 arg1);
11083 }
11084
11085 /* (X & Y) | Y is (X, Y). */
11086 if (TREE_CODE (arg0) == BIT_AND_EXPR
11087 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11088 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11089 /* (X & Y) | X is (Y, X). */
11090 if (TREE_CODE (arg0) == BIT_AND_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11092 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11093 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11094 /* X | (X & Y) is (Y, X). */
11095 if (TREE_CODE (arg1) == BIT_AND_EXPR
11096 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11097 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11098 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11099 /* X | (Y & X) is (Y, X). */
11100 if (TREE_CODE (arg1) == BIT_AND_EXPR
11101 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11102 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11103 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11104
11105 /* (X & ~Y) | (~X & Y) is X ^ Y */
11106 if (TREE_CODE (arg0) == BIT_AND_EXPR
11107 && TREE_CODE (arg1) == BIT_AND_EXPR)
11108 {
11109 tree a0, a1, l0, l1, n0, n1;
11110
11111 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11112 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11113
11114 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11115 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11116
11117 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11118 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11119
11120 if ((operand_equal_p (n0, a0, 0)
11121 && operand_equal_p (n1, a1, 0))
11122 || (operand_equal_p (n0, a1, 0)
11123 && operand_equal_p (n1, a0, 0)))
11124 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11125 }
11126
11127 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11128 if (t1 != NULL_TREE)
11129 return t1;
11130
11131 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11132
11133 This results in more efficient code for machines without a NAND
11134 instruction. Combine will canonicalize to the first form
11135 which will allow use of NAND instructions provided by the
11136 backend if they exist. */
11137 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11138 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11139 {
11140 return
11141 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11142 build2 (BIT_AND_EXPR, type,
11143 fold_convert_loc (loc, type,
11144 TREE_OPERAND (arg0, 0)),
11145 fold_convert_loc (loc, type,
11146 TREE_OPERAND (arg1, 0))));
11147 }
11148
11149 /* See if this can be simplified into a rotate first. If that
11150 is unsuccessful continue in the association code. */
11151 goto bit_rotate;
11152
11153 case BIT_XOR_EXPR:
11154 if (integer_zerop (arg1))
11155 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11156 if (integer_all_onesp (arg1))
11157 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11158 if (operand_equal_p (arg0, arg1, 0))
11159 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11160
11161 /* ~X ^ X is -1. */
11162 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11164 {
11165 t1 = build_zero_cst (type);
11166 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11167 return omit_one_operand_loc (loc, type, t1, arg1);
11168 }
11169
11170 /* X ^ ~X is -1. */
11171 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11172 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11173 {
11174 t1 = build_zero_cst (type);
11175 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11176 return omit_one_operand_loc (loc, type, t1, arg0);
11177 }
11178
11179 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11180 with a constant, and the two constants have no bits in common,
11181 we should treat this as a BIT_IOR_EXPR since this may produce more
11182 simplifications. */
11183 if (TREE_CODE (arg0) == BIT_AND_EXPR
11184 && TREE_CODE (arg1) == BIT_AND_EXPR
11185 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11186 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11187 && integer_zerop (const_binop (BIT_AND_EXPR,
11188 TREE_OPERAND (arg0, 1),
11189 TREE_OPERAND (arg1, 1))))
11190 {
11191 code = BIT_IOR_EXPR;
11192 goto bit_ior;
11193 }
11194
11195 /* (X | Y) ^ X -> Y & ~ X*/
11196 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11197 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11198 {
11199 tree t2 = TREE_OPERAND (arg0, 1);
11200 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11201 arg1);
11202 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11203 fold_convert_loc (loc, type, t2),
11204 fold_convert_loc (loc, type, t1));
11205 return t1;
11206 }
11207
11208 /* (Y | X) ^ X -> Y & ~ X*/
11209 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11210 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11211 {
11212 tree t2 = TREE_OPERAND (arg0, 0);
11213 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11214 arg1);
11215 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11216 fold_convert_loc (loc, type, t2),
11217 fold_convert_loc (loc, type, t1));
11218 return t1;
11219 }
11220
11221 /* X ^ (X | Y) -> Y & ~ X*/
11222 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11223 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11224 {
11225 tree t2 = TREE_OPERAND (arg1, 1);
11226 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11227 arg0);
11228 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11229 fold_convert_loc (loc, type, t2),
11230 fold_convert_loc (loc, type, t1));
11231 return t1;
11232 }
11233
11234 /* X ^ (Y | X) -> Y & ~ X*/
11235 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11236 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11237 {
11238 tree t2 = TREE_OPERAND (arg1, 0);
11239 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11240 arg0);
11241 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11242 fold_convert_loc (loc, type, t2),
11243 fold_convert_loc (loc, type, t1));
11244 return t1;
11245 }
11246
11247 /* Convert ~X ^ ~Y to X ^ Y. */
11248 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11249 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11250 return fold_build2_loc (loc, code, type,
11251 fold_convert_loc (loc, type,
11252 TREE_OPERAND (arg0, 0)),
11253 fold_convert_loc (loc, type,
11254 TREE_OPERAND (arg1, 0)));
11255
11256 /* Convert ~X ^ C to X ^ ~C. */
11257 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11258 && TREE_CODE (arg1) == INTEGER_CST)
11259 return fold_build2_loc (loc, code, type,
11260 fold_convert_loc (loc, type,
11261 TREE_OPERAND (arg0, 0)),
11262 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11263
11264 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11265 if (TREE_CODE (arg0) == BIT_AND_EXPR
11266 && integer_onep (TREE_OPERAND (arg0, 1))
11267 && integer_onep (arg1))
11268 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11269 build_zero_cst (TREE_TYPE (arg0)));
11270
11271 /* Fold (X & Y) ^ Y as ~X & Y. */
11272 if (TREE_CODE (arg0) == BIT_AND_EXPR
11273 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11274 {
11275 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11276 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11277 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11278 fold_convert_loc (loc, type, arg1));
11279 }
11280 /* Fold (X & Y) ^ X as ~Y & X. */
11281 if (TREE_CODE (arg0) == BIT_AND_EXPR
11282 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11283 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11284 {
11285 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11286 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11287 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11288 fold_convert_loc (loc, type, arg1));
11289 }
11290 /* Fold X ^ (X & Y) as X & ~Y. */
11291 if (TREE_CODE (arg1) == BIT_AND_EXPR
11292 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11293 {
11294 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11295 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11296 fold_convert_loc (loc, type, arg0),
11297 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11298 }
11299 /* Fold X ^ (Y & X) as ~Y & X. */
11300 if (TREE_CODE (arg1) == BIT_AND_EXPR
11301 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11302 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11303 {
11304 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11305 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11306 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11307 fold_convert_loc (loc, type, arg0));
11308 }
11309
11310 /* See if this can be simplified into a rotate first. If that
11311 is unsuccessful continue in the association code. */
11312 goto bit_rotate;
11313
11314 case BIT_AND_EXPR:
11315 if (integer_all_onesp (arg1))
11316 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11317 if (integer_zerop (arg1))
11318 return omit_one_operand_loc (loc, type, arg1, arg0);
11319 if (operand_equal_p (arg0, arg1, 0))
11320 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11321
11322 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11323 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11324 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11325 || (TREE_CODE (arg0) == EQ_EXPR
11326 && integer_zerop (TREE_OPERAND (arg0, 1))))
11327 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11328 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11329
11330 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11331 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11332 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11333 || (TREE_CODE (arg1) == EQ_EXPR
11334 && integer_zerop (TREE_OPERAND (arg1, 1))))
11335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11336 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11337
11338 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11339 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11340 && TREE_CODE (arg1) == INTEGER_CST
11341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11342 {
11343 tree tmp1 = fold_convert_loc (loc, type, arg1);
11344 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11345 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11346 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11347 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11348 return
11349 fold_convert_loc (loc, type,
11350 fold_build2_loc (loc, BIT_IOR_EXPR,
11351 type, tmp2, tmp3));
11352 }
11353
11354 /* (X | Y) & Y is (X, Y). */
11355 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11356 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11357 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11358 /* (X | Y) & X is (Y, X). */
11359 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11360 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11361 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11362 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11363 /* X & (X | Y) is (Y, X). */
11364 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11365 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11366 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11367 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11368 /* X & (Y | X) is (Y, X). */
11369 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11370 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11371 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11372 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11373
11374 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11375 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11376 && integer_onep (TREE_OPERAND (arg0, 1))
11377 && integer_onep (arg1))
11378 {
11379 tree tem2;
11380 tem = TREE_OPERAND (arg0, 0);
11381 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11382 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11383 tem, tem2);
11384 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11385 build_zero_cst (TREE_TYPE (tem)));
11386 }
11387 /* Fold ~X & 1 as (X & 1) == 0. */
11388 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11389 && integer_onep (arg1))
11390 {
11391 tree tem2;
11392 tem = TREE_OPERAND (arg0, 0);
11393 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11394 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11395 tem, tem2);
11396 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11397 build_zero_cst (TREE_TYPE (tem)));
11398 }
11399 /* Fold !X & 1 as X == 0. */
11400 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11401 && integer_onep (arg1))
11402 {
11403 tem = TREE_OPERAND (arg0, 0);
11404 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11405 build_zero_cst (TREE_TYPE (tem)));
11406 }
11407
11408 /* Fold (X ^ Y) & Y as ~X & Y. */
11409 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11410 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11411 {
11412 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11413 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11414 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11415 fold_convert_loc (loc, type, arg1));
11416 }
11417 /* Fold (X ^ Y) & X as ~Y & X. */
11418 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11419 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11420 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11421 {
11422 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11423 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11424 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11425 fold_convert_loc (loc, type, arg1));
11426 }
11427 /* Fold X & (X ^ Y) as X & ~Y. */
11428 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11430 {
11431 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11432 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11433 fold_convert_loc (loc, type, arg0),
11434 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11435 }
11436 /* Fold X & (Y ^ X) as ~Y & X. */
11437 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11438 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11439 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11440 {
11441 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11442 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11443 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11444 fold_convert_loc (loc, type, arg0));
11445 }
11446
11447 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11448 multiple of 1 << CST. */
11449 if (TREE_CODE (arg1) == INTEGER_CST)
11450 {
11451 double_int cst1 = tree_to_double_int (arg1);
11452 double_int ncst1 = double_int_ext (double_int_neg (cst1),
11453 TYPE_PRECISION (TREE_TYPE (arg1)),
11454 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11455 if (double_int_equal_p (double_int_and (cst1, ncst1), ncst1)
11456 && multiple_of_p (type, arg0,
11457 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11458 return fold_convert_loc (loc, type, arg0);
11459 }
11460
11461 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11462 bits from CST2. */
11463 if (TREE_CODE (arg1) == INTEGER_CST
11464 && TREE_CODE (arg0) == MULT_EXPR
11465 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11466 {
11467 int arg1tz
11468 = double_int_ctz (tree_to_double_int (TREE_OPERAND (arg0, 1)));
11469 if (arg1tz > 0)
11470 {
11471 double_int arg1mask, masked;
11472 arg1mask = double_int_not (double_int_mask (arg1tz));
11473 arg1mask = double_int_ext (arg1mask, TYPE_PRECISION (type),
11474 TYPE_UNSIGNED (type));
11475 masked = double_int_and (arg1mask, tree_to_double_int (arg1));
11476 if (double_int_zero_p (masked))
11477 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11478 arg0, arg1);
11479 else if (!double_int_equal_p (masked, tree_to_double_int (arg1)))
11480 return fold_build2_loc (loc, code, type, op0,
11481 double_int_to_tree (type, masked));
11482 }
11483 }
11484
11485 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11486 ((A & N) + B) & M -> (A + B) & M
11487 Similarly if (N & M) == 0,
11488 ((A | N) + B) & M -> (A + B) & M
11489 and for - instead of + (or unary - instead of +)
11490 and/or ^ instead of |.
11491 If B is constant and (B & M) == 0, fold into A & M. */
11492 if (host_integerp (arg1, 1))
11493 {
11494 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11495 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11496 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11497 && (TREE_CODE (arg0) == PLUS_EXPR
11498 || TREE_CODE (arg0) == MINUS_EXPR
11499 || TREE_CODE (arg0) == NEGATE_EXPR)
11500 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11501 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11502 {
11503 tree pmop[2];
11504 int which = 0;
11505 unsigned HOST_WIDE_INT cst0;
11506
11507 /* Now we know that arg0 is (C + D) or (C - D) or
11508 -C and arg1 (M) is == (1LL << cst) - 1.
11509 Store C into PMOP[0] and D into PMOP[1]. */
11510 pmop[0] = TREE_OPERAND (arg0, 0);
11511 pmop[1] = NULL;
11512 if (TREE_CODE (arg0) != NEGATE_EXPR)
11513 {
11514 pmop[1] = TREE_OPERAND (arg0, 1);
11515 which = 1;
11516 }
11517
11518 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11519 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11520 & cst1) != cst1)
11521 which = -1;
11522
11523 for (; which >= 0; which--)
11524 switch (TREE_CODE (pmop[which]))
11525 {
11526 case BIT_AND_EXPR:
11527 case BIT_IOR_EXPR:
11528 case BIT_XOR_EXPR:
11529 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11530 != INTEGER_CST)
11531 break;
11532 /* tree_low_cst not used, because we don't care about
11533 the upper bits. */
11534 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11535 cst0 &= cst1;
11536 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11537 {
11538 if (cst0 != cst1)
11539 break;
11540 }
11541 else if (cst0 != 0)
11542 break;
11543 /* If C or D is of the form (A & N) where
11544 (N & M) == M, or of the form (A | N) or
11545 (A ^ N) where (N & M) == 0, replace it with A. */
11546 pmop[which] = TREE_OPERAND (pmop[which], 0);
11547 break;
11548 case INTEGER_CST:
11549 /* If C or D is a N where (N & M) == 0, it can be
11550 omitted (assumed 0). */
11551 if ((TREE_CODE (arg0) == PLUS_EXPR
11552 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11553 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11554 pmop[which] = NULL;
11555 break;
11556 default:
11557 break;
11558 }
11559
11560 /* Only build anything new if we optimized one or both arguments
11561 above. */
11562 if (pmop[0] != TREE_OPERAND (arg0, 0)
11563 || (TREE_CODE (arg0) != NEGATE_EXPR
11564 && pmop[1] != TREE_OPERAND (arg0, 1)))
11565 {
11566 tree utype = TREE_TYPE (arg0);
11567 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11568 {
11569 /* Perform the operations in a type that has defined
11570 overflow behavior. */
11571 utype = unsigned_type_for (TREE_TYPE (arg0));
11572 if (pmop[0] != NULL)
11573 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11574 if (pmop[1] != NULL)
11575 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11576 }
11577
11578 if (TREE_CODE (arg0) == NEGATE_EXPR)
11579 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11580 else if (TREE_CODE (arg0) == PLUS_EXPR)
11581 {
11582 if (pmop[0] != NULL && pmop[1] != NULL)
11583 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11584 pmop[0], pmop[1]);
11585 else if (pmop[0] != NULL)
11586 tem = pmop[0];
11587 else if (pmop[1] != NULL)
11588 tem = pmop[1];
11589 else
11590 return build_int_cst (type, 0);
11591 }
11592 else if (pmop[0] == NULL)
11593 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11594 else
11595 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11596 pmop[0], pmop[1]);
11597 /* TEM is now the new binary +, - or unary - replacement. */
11598 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11599 fold_convert_loc (loc, utype, arg1));
11600 return fold_convert_loc (loc, type, tem);
11601 }
11602 }
11603 }
11604
11605 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11606 if (t1 != NULL_TREE)
11607 return t1;
11608 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11609 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11610 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11611 {
11612 unsigned int prec
11613 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11614
11615 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11616 && (~TREE_INT_CST_LOW (arg1)
11617 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11618 return
11619 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11620 }
11621
11622 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11623
11624 This results in more efficient code for machines without a NOR
11625 instruction. Combine will canonicalize to the first form
11626 which will allow use of NOR instructions provided by the
11627 backend if they exist. */
11628 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11629 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11630 {
11631 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11632 build2 (BIT_IOR_EXPR, type,
11633 fold_convert_loc (loc, type,
11634 TREE_OPERAND (arg0, 0)),
11635 fold_convert_loc (loc, type,
11636 TREE_OPERAND (arg1, 0))));
11637 }
11638
11639 /* If arg0 is derived from the address of an object or function, we may
11640 be able to fold this expression using the object or function's
11641 alignment. */
11642 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11643 {
11644 unsigned HOST_WIDE_INT modulus, residue;
11645 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11646
11647 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11648 integer_onep (arg1));
11649
11650 /* This works because modulus is a power of 2. If this weren't the
11651 case, we'd have to replace it by its greatest power-of-2
11652 divisor: modulus & -modulus. */
11653 if (low < modulus)
11654 return build_int_cst (type, residue & low);
11655 }
11656
11657 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11658 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11659 if the new mask might be further optimized. */
11660 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11661 || TREE_CODE (arg0) == RSHIFT_EXPR)
11662 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11663 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11664 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11665 < TYPE_PRECISION (TREE_TYPE (arg0))
11666 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11667 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11668 {
11669 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11670 unsigned HOST_WIDE_INT mask
11671 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11672 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11673 tree shift_type = TREE_TYPE (arg0);
11674
11675 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11676 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11677 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11678 && TYPE_PRECISION (TREE_TYPE (arg0))
11679 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11680 {
11681 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11682 tree arg00 = TREE_OPERAND (arg0, 0);
11683 /* See if more bits can be proven as zero because of
11684 zero extension. */
11685 if (TREE_CODE (arg00) == NOP_EXPR
11686 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11687 {
11688 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11689 if (TYPE_PRECISION (inner_type)
11690 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11691 && TYPE_PRECISION (inner_type) < prec)
11692 {
11693 prec = TYPE_PRECISION (inner_type);
11694 /* See if we can shorten the right shift. */
11695 if (shiftc < prec)
11696 shift_type = inner_type;
11697 }
11698 }
11699 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11700 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11701 zerobits <<= prec - shiftc;
11702 /* For arithmetic shift if sign bit could be set, zerobits
11703 can contain actually sign bits, so no transformation is
11704 possible, unless MASK masks them all away. In that
11705 case the shift needs to be converted into logical shift. */
11706 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11707 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11708 {
11709 if ((mask & zerobits) == 0)
11710 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11711 else
11712 zerobits = 0;
11713 }
11714 }
11715
11716 /* ((X << 16) & 0xff00) is (X, 0). */
11717 if ((mask & zerobits) == mask)
11718 return omit_one_operand_loc (loc, type,
11719 build_int_cst (type, 0), arg0);
11720
11721 newmask = mask | zerobits;
11722 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11723 {
11724 unsigned int prec;
11725
11726 /* Only do the transformation if NEWMASK is some integer
11727 mode's mask. */
11728 for (prec = BITS_PER_UNIT;
11729 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11730 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11731 break;
11732 if (prec < HOST_BITS_PER_WIDE_INT
11733 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11734 {
11735 tree newmaskt;
11736
11737 if (shift_type != TREE_TYPE (arg0))
11738 {
11739 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11740 fold_convert_loc (loc, shift_type,
11741 TREE_OPERAND (arg0, 0)),
11742 TREE_OPERAND (arg0, 1));
11743 tem = fold_convert_loc (loc, type, tem);
11744 }
11745 else
11746 tem = op0;
11747 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11748 if (!tree_int_cst_equal (newmaskt, arg1))
11749 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11750 }
11751 }
11752 }
11753
11754 goto associate;
11755
11756 case RDIV_EXPR:
11757 /* Don't touch a floating-point divide by zero unless the mode
11758 of the constant can represent infinity. */
11759 if (TREE_CODE (arg1) == REAL_CST
11760 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11761 && real_zerop (arg1))
11762 return NULL_TREE;
11763
11764 /* Optimize A / A to 1.0 if we don't care about
11765 NaNs or Infinities. Skip the transformation
11766 for non-real operands. */
11767 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11768 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11769 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11770 && operand_equal_p (arg0, arg1, 0))
11771 {
11772 tree r = build_real (TREE_TYPE (arg0), dconst1);
11773
11774 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11775 }
11776
11777 /* The complex version of the above A / A optimization. */
11778 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11779 && operand_equal_p (arg0, arg1, 0))
11780 {
11781 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11782 if (! HONOR_NANS (TYPE_MODE (elem_type))
11783 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11784 {
11785 tree r = build_real (elem_type, dconst1);
11786 /* omit_two_operands will call fold_convert for us. */
11787 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11788 }
11789 }
11790
11791 /* (-A) / (-B) -> A / B */
11792 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11793 return fold_build2_loc (loc, RDIV_EXPR, type,
11794 TREE_OPERAND (arg0, 0),
11795 negate_expr (arg1));
11796 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11797 return fold_build2_loc (loc, RDIV_EXPR, type,
11798 negate_expr (arg0),
11799 TREE_OPERAND (arg1, 0));
11800
11801 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11802 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11803 && real_onep (arg1))
11804 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11805
11806 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11807 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11808 && real_minus_onep (arg1))
11809 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11810 negate_expr (arg0)));
11811
11812 /* If ARG1 is a constant, we can convert this to a multiply by the
11813 reciprocal. This does not have the same rounding properties,
11814 so only do this if -freciprocal-math. We can actually
11815 always safely do it if ARG1 is a power of two, but it's hard to
11816 tell if it is or not in a portable manner. */
11817 if (optimize
11818 && (TREE_CODE (arg1) == REAL_CST
11819 || (TREE_CODE (arg1) == COMPLEX_CST
11820 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11821 || (TREE_CODE (arg1) == VECTOR_CST
11822 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11823 {
11824 if (flag_reciprocal_math
11825 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11826 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11827 /* Find the reciprocal if optimizing and the result is exact.
11828 TODO: Complex reciprocal not implemented. */
11829 if (TREE_CODE (arg1) != COMPLEX_CST)
11830 {
11831 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11832
11833 if (inverse)
11834 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11835 }
11836 }
11837 /* Convert A/B/C to A/(B*C). */
11838 if (flag_reciprocal_math
11839 && TREE_CODE (arg0) == RDIV_EXPR)
11840 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11841 fold_build2_loc (loc, MULT_EXPR, type,
11842 TREE_OPERAND (arg0, 1), arg1));
11843
11844 /* Convert A/(B/C) to (A/B)*C. */
11845 if (flag_reciprocal_math
11846 && TREE_CODE (arg1) == RDIV_EXPR)
11847 return fold_build2_loc (loc, MULT_EXPR, type,
11848 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11849 TREE_OPERAND (arg1, 0)),
11850 TREE_OPERAND (arg1, 1));
11851
11852 /* Convert C1/(X*C2) into (C1/C2)/X. */
11853 if (flag_reciprocal_math
11854 && TREE_CODE (arg1) == MULT_EXPR
11855 && TREE_CODE (arg0) == REAL_CST
11856 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11857 {
11858 tree tem = const_binop (RDIV_EXPR, arg0,
11859 TREE_OPERAND (arg1, 1));
11860 if (tem)
11861 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11862 TREE_OPERAND (arg1, 0));
11863 }
11864
11865 if (flag_unsafe_math_optimizations)
11866 {
11867 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11868 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11869
11870 /* Optimize sin(x)/cos(x) as tan(x). */
11871 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11872 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11873 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11874 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11875 CALL_EXPR_ARG (arg1, 0), 0))
11876 {
11877 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11878
11879 if (tanfn != NULL_TREE)
11880 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11881 }
11882
11883 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11884 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11885 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11886 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11887 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11888 CALL_EXPR_ARG (arg1, 0), 0))
11889 {
11890 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11891
11892 if (tanfn != NULL_TREE)
11893 {
11894 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11895 CALL_EXPR_ARG (arg0, 0));
11896 return fold_build2_loc (loc, RDIV_EXPR, type,
11897 build_real (type, dconst1), tmp);
11898 }
11899 }
11900
11901 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11902 NaNs or Infinities. */
11903 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11904 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11905 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11906 {
11907 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11908 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11909
11910 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11911 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11912 && operand_equal_p (arg00, arg01, 0))
11913 {
11914 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11915
11916 if (cosfn != NULL_TREE)
11917 return build_call_expr_loc (loc, cosfn, 1, arg00);
11918 }
11919 }
11920
11921 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11922 NaNs or Infinities. */
11923 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11924 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11925 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11926 {
11927 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11928 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11929
11930 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11931 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11932 && operand_equal_p (arg00, arg01, 0))
11933 {
11934 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11935
11936 if (cosfn != NULL_TREE)
11937 {
11938 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11939 return fold_build2_loc (loc, RDIV_EXPR, type,
11940 build_real (type, dconst1),
11941 tmp);
11942 }
11943 }
11944 }
11945
11946 /* Optimize pow(x,c)/x as pow(x,c-1). */
11947 if (fcode0 == BUILT_IN_POW
11948 || fcode0 == BUILT_IN_POWF
11949 || fcode0 == BUILT_IN_POWL)
11950 {
11951 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11952 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11953 if (TREE_CODE (arg01) == REAL_CST
11954 && !TREE_OVERFLOW (arg01)
11955 && operand_equal_p (arg1, arg00, 0))
11956 {
11957 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11958 REAL_VALUE_TYPE c;
11959 tree arg;
11960
11961 c = TREE_REAL_CST (arg01);
11962 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11963 arg = build_real (type, c);
11964 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11965 }
11966 }
11967
11968 /* Optimize a/root(b/c) into a*root(c/b). */
11969 if (BUILTIN_ROOT_P (fcode1))
11970 {
11971 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11972
11973 if (TREE_CODE (rootarg) == RDIV_EXPR)
11974 {
11975 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11976 tree b = TREE_OPERAND (rootarg, 0);
11977 tree c = TREE_OPERAND (rootarg, 1);
11978
11979 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11980
11981 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11982 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11983 }
11984 }
11985
11986 /* Optimize x/expN(y) into x*expN(-y). */
11987 if (BUILTIN_EXPONENT_P (fcode1))
11988 {
11989 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11990 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11991 arg1 = build_call_expr_loc (loc,
11992 expfn, 1,
11993 fold_convert_loc (loc, type, arg));
11994 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11995 }
11996
11997 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11998 if (fcode1 == BUILT_IN_POW
11999 || fcode1 == BUILT_IN_POWF
12000 || fcode1 == BUILT_IN_POWL)
12001 {
12002 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12003 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12004 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12005 tree neg11 = fold_convert_loc (loc, type,
12006 negate_expr (arg11));
12007 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12008 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12009 }
12010 }
12011 return NULL_TREE;
12012
12013 case TRUNC_DIV_EXPR:
12014 /* Optimize (X & (-A)) / A where A is a power of 2,
12015 to X >> log2(A) */
12016 if (TREE_CODE (arg0) == BIT_AND_EXPR
12017 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12018 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12019 {
12020 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12021 arg1, TREE_OPERAND (arg0, 1));
12022 if (sum && integer_zerop (sum)) {
12023 unsigned long pow2;
12024
12025 if (TREE_INT_CST_LOW (arg1))
12026 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12027 else
12028 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12029 + HOST_BITS_PER_WIDE_INT;
12030
12031 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12032 TREE_OPERAND (arg0, 0),
12033 build_int_cst (integer_type_node, pow2));
12034 }
12035 }
12036
12037 /* Fall through */
12038
12039 case FLOOR_DIV_EXPR:
12040 /* Simplify A / (B << N) where A and B are positive and B is
12041 a power of 2, to A >> (N + log2(B)). */
12042 strict_overflow_p = false;
12043 if (TREE_CODE (arg1) == LSHIFT_EXPR
12044 && (TYPE_UNSIGNED (type)
12045 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12046 {
12047 tree sval = TREE_OPERAND (arg1, 0);
12048 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12049 {
12050 tree sh_cnt = TREE_OPERAND (arg1, 1);
12051 unsigned long pow2;
12052
12053 if (TREE_INT_CST_LOW (sval))
12054 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12055 else
12056 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12057 + HOST_BITS_PER_WIDE_INT;
12058
12059 if (strict_overflow_p)
12060 fold_overflow_warning (("assuming signed overflow does not "
12061 "occur when simplifying A / (B << N)"),
12062 WARN_STRICT_OVERFLOW_MISC);
12063
12064 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12065 sh_cnt,
12066 build_int_cst (TREE_TYPE (sh_cnt),
12067 pow2));
12068 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12069 fold_convert_loc (loc, type, arg0), sh_cnt);
12070 }
12071 }
12072
12073 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12074 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12075 if (INTEGRAL_TYPE_P (type)
12076 && TYPE_UNSIGNED (type)
12077 && code == FLOOR_DIV_EXPR)
12078 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12079
12080 /* Fall through */
12081
12082 case ROUND_DIV_EXPR:
12083 case CEIL_DIV_EXPR:
12084 case EXACT_DIV_EXPR:
12085 if (integer_onep (arg1))
12086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12087 if (integer_zerop (arg1))
12088 return NULL_TREE;
12089 /* X / -1 is -X. */
12090 if (!TYPE_UNSIGNED (type)
12091 && TREE_CODE (arg1) == INTEGER_CST
12092 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12093 && TREE_INT_CST_HIGH (arg1) == -1)
12094 return fold_convert_loc (loc, type, negate_expr (arg0));
12095
12096 /* Convert -A / -B to A / B when the type is signed and overflow is
12097 undefined. */
12098 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12099 && TREE_CODE (arg0) == NEGATE_EXPR
12100 && negate_expr_p (arg1))
12101 {
12102 if (INTEGRAL_TYPE_P (type))
12103 fold_overflow_warning (("assuming signed overflow does not occur "
12104 "when distributing negation across "
12105 "division"),
12106 WARN_STRICT_OVERFLOW_MISC);
12107 return fold_build2_loc (loc, code, type,
12108 fold_convert_loc (loc, type,
12109 TREE_OPERAND (arg0, 0)),
12110 fold_convert_loc (loc, type,
12111 negate_expr (arg1)));
12112 }
12113 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12114 && TREE_CODE (arg1) == NEGATE_EXPR
12115 && negate_expr_p (arg0))
12116 {
12117 if (INTEGRAL_TYPE_P (type))
12118 fold_overflow_warning (("assuming signed overflow does not occur "
12119 "when distributing negation across "
12120 "division"),
12121 WARN_STRICT_OVERFLOW_MISC);
12122 return fold_build2_loc (loc, code, type,
12123 fold_convert_loc (loc, type,
12124 negate_expr (arg0)),
12125 fold_convert_loc (loc, type,
12126 TREE_OPERAND (arg1, 0)));
12127 }
12128
12129 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12130 operation, EXACT_DIV_EXPR.
12131
12132 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12133 At one time others generated faster code, it's not clear if they do
12134 after the last round to changes to the DIV code in expmed.c. */
12135 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12136 && multiple_of_p (type, arg0, arg1))
12137 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12138
12139 strict_overflow_p = false;
12140 if (TREE_CODE (arg1) == INTEGER_CST
12141 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12142 &strict_overflow_p)))
12143 {
12144 if (strict_overflow_p)
12145 fold_overflow_warning (("assuming signed overflow does not occur "
12146 "when simplifying division"),
12147 WARN_STRICT_OVERFLOW_MISC);
12148 return fold_convert_loc (loc, type, tem);
12149 }
12150
12151 return NULL_TREE;
12152
12153 case CEIL_MOD_EXPR:
12154 case FLOOR_MOD_EXPR:
12155 case ROUND_MOD_EXPR:
12156 case TRUNC_MOD_EXPR:
12157 /* X % 1 is always zero, but be sure to preserve any side
12158 effects in X. */
12159 if (integer_onep (arg1))
12160 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12161
12162 /* X % 0, return X % 0 unchanged so that we can get the
12163 proper warnings and errors. */
12164 if (integer_zerop (arg1))
12165 return NULL_TREE;
12166
12167 /* 0 % X is always zero, but be sure to preserve any side
12168 effects in X. Place this after checking for X == 0. */
12169 if (integer_zerop (arg0))
12170 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12171
12172 /* X % -1 is zero. */
12173 if (!TYPE_UNSIGNED (type)
12174 && TREE_CODE (arg1) == INTEGER_CST
12175 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12176 && TREE_INT_CST_HIGH (arg1) == -1)
12177 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12178
12179 /* X % -C is the same as X % C. */
12180 if (code == TRUNC_MOD_EXPR
12181 && !TYPE_UNSIGNED (type)
12182 && TREE_CODE (arg1) == INTEGER_CST
12183 && !TREE_OVERFLOW (arg1)
12184 && TREE_INT_CST_HIGH (arg1) < 0
12185 && !TYPE_OVERFLOW_TRAPS (type)
12186 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12187 && !sign_bit_p (arg1, arg1))
12188 return fold_build2_loc (loc, code, type,
12189 fold_convert_loc (loc, type, arg0),
12190 fold_convert_loc (loc, type,
12191 negate_expr (arg1)));
12192
12193 /* X % -Y is the same as X % Y. */
12194 if (code == TRUNC_MOD_EXPR
12195 && !TYPE_UNSIGNED (type)
12196 && TREE_CODE (arg1) == NEGATE_EXPR
12197 && !TYPE_OVERFLOW_TRAPS (type))
12198 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12199 fold_convert_loc (loc, type,
12200 TREE_OPERAND (arg1, 0)));
12201
12202 strict_overflow_p = false;
12203 if (TREE_CODE (arg1) == INTEGER_CST
12204 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12205 &strict_overflow_p)))
12206 {
12207 if (strict_overflow_p)
12208 fold_overflow_warning (("assuming signed overflow does not occur "
12209 "when simplifying modulus"),
12210 WARN_STRICT_OVERFLOW_MISC);
12211 return fold_convert_loc (loc, type, tem);
12212 }
12213
12214 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12215 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12216 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12217 && (TYPE_UNSIGNED (type)
12218 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12219 {
12220 tree c = arg1;
12221 /* Also optimize A % (C << N) where C is a power of 2,
12222 to A & ((C << N) - 1). */
12223 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12224 c = TREE_OPERAND (arg1, 0);
12225
12226 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12227 {
12228 tree mask
12229 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12230 build_int_cst (TREE_TYPE (arg1), 1));
12231 if (strict_overflow_p)
12232 fold_overflow_warning (("assuming signed overflow does not "
12233 "occur when simplifying "
12234 "X % (power of two)"),
12235 WARN_STRICT_OVERFLOW_MISC);
12236 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12237 fold_convert_loc (loc, type, arg0),
12238 fold_convert_loc (loc, type, mask));
12239 }
12240 }
12241
12242 return NULL_TREE;
12243
12244 case LROTATE_EXPR:
12245 case RROTATE_EXPR:
12246 if (integer_all_onesp (arg0))
12247 return omit_one_operand_loc (loc, type, arg0, arg1);
12248 goto shift;
12249
12250 case RSHIFT_EXPR:
12251 /* Optimize -1 >> x for arithmetic right shifts. */
12252 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12253 && tree_expr_nonnegative_p (arg1))
12254 return omit_one_operand_loc (loc, type, arg0, arg1);
12255 /* ... fall through ... */
12256
12257 case LSHIFT_EXPR:
12258 shift:
12259 if (integer_zerop (arg1))
12260 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12261 if (integer_zerop (arg0))
12262 return omit_one_operand_loc (loc, type, arg0, arg1);
12263
12264 /* Since negative shift count is not well-defined,
12265 don't try to compute it in the compiler. */
12266 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12267 return NULL_TREE;
12268
12269 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12270 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12271 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12272 && host_integerp (TREE_OPERAND (arg0, 1), false)
12273 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12274 {
12275 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12276 + TREE_INT_CST_LOW (arg1));
12277
12278 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12279 being well defined. */
12280 if (low >= TYPE_PRECISION (type))
12281 {
12282 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12283 low = low % TYPE_PRECISION (type);
12284 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12285 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12286 TREE_OPERAND (arg0, 0));
12287 else
12288 low = TYPE_PRECISION (type) - 1;
12289 }
12290
12291 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12292 build_int_cst (type, low));
12293 }
12294
12295 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12296 into x & ((unsigned)-1 >> c) for unsigned types. */
12297 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12298 || (TYPE_UNSIGNED (type)
12299 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12300 && host_integerp (arg1, false)
12301 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12302 && host_integerp (TREE_OPERAND (arg0, 1), false)
12303 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12304 {
12305 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12306 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12307 tree lshift;
12308 tree arg00;
12309
12310 if (low0 == low1)
12311 {
12312 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12313
12314 lshift = build_int_cst (type, -1);
12315 lshift = int_const_binop (code, lshift, arg1);
12316
12317 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12318 }
12319 }
12320
12321 /* Rewrite an LROTATE_EXPR by a constant into an
12322 RROTATE_EXPR by a new constant. */
12323 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12324 {
12325 tree tem = build_int_cst (TREE_TYPE (arg1),
12326 TYPE_PRECISION (type));
12327 tem = const_binop (MINUS_EXPR, tem, arg1);
12328 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12329 }
12330
12331 /* If we have a rotate of a bit operation with the rotate count and
12332 the second operand of the bit operation both constant,
12333 permute the two operations. */
12334 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12335 && (TREE_CODE (arg0) == BIT_AND_EXPR
12336 || TREE_CODE (arg0) == BIT_IOR_EXPR
12337 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12339 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12340 fold_build2_loc (loc, code, type,
12341 TREE_OPERAND (arg0, 0), arg1),
12342 fold_build2_loc (loc, code, type,
12343 TREE_OPERAND (arg0, 1), arg1));
12344
12345 /* Two consecutive rotates adding up to the precision of the
12346 type can be ignored. */
12347 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12348 && TREE_CODE (arg0) == RROTATE_EXPR
12349 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12350 && TREE_INT_CST_HIGH (arg1) == 0
12351 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12352 && ((TREE_INT_CST_LOW (arg1)
12353 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12354 == (unsigned int) TYPE_PRECISION (type)))
12355 return TREE_OPERAND (arg0, 0);
12356
12357 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12358 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12359 if the latter can be further optimized. */
12360 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12361 && TREE_CODE (arg0) == BIT_AND_EXPR
12362 && TREE_CODE (arg1) == INTEGER_CST
12363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12364 {
12365 tree mask = fold_build2_loc (loc, code, type,
12366 fold_convert_loc (loc, type,
12367 TREE_OPERAND (arg0, 1)),
12368 arg1);
12369 tree shift = fold_build2_loc (loc, code, type,
12370 fold_convert_loc (loc, type,
12371 TREE_OPERAND (arg0, 0)),
12372 arg1);
12373 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12374 if (tem)
12375 return tem;
12376 }
12377
12378 return NULL_TREE;
12379
12380 case MIN_EXPR:
12381 if (operand_equal_p (arg0, arg1, 0))
12382 return omit_one_operand_loc (loc, type, arg0, arg1);
12383 if (INTEGRAL_TYPE_P (type)
12384 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12385 return omit_one_operand_loc (loc, type, arg1, arg0);
12386 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12387 if (tem)
12388 return tem;
12389 goto associate;
12390
12391 case MAX_EXPR:
12392 if (operand_equal_p (arg0, arg1, 0))
12393 return omit_one_operand_loc (loc, type, arg0, arg1);
12394 if (INTEGRAL_TYPE_P (type)
12395 && TYPE_MAX_VALUE (type)
12396 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12397 return omit_one_operand_loc (loc, type, arg1, arg0);
12398 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12399 if (tem)
12400 return tem;
12401 goto associate;
12402
12403 case TRUTH_ANDIF_EXPR:
12404 /* Note that the operands of this must be ints
12405 and their values must be 0 or 1.
12406 ("true" is a fixed value perhaps depending on the language.) */
12407 /* If first arg is constant zero, return it. */
12408 if (integer_zerop (arg0))
12409 return fold_convert_loc (loc, type, arg0);
12410 case TRUTH_AND_EXPR:
12411 /* If either arg is constant true, drop it. */
12412 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12413 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12414 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12415 /* Preserve sequence points. */
12416 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12417 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12418 /* If second arg is constant zero, result is zero, but first arg
12419 must be evaluated. */
12420 if (integer_zerop (arg1))
12421 return omit_one_operand_loc (loc, type, arg1, arg0);
12422 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12423 case will be handled here. */
12424 if (integer_zerop (arg0))
12425 return omit_one_operand_loc (loc, type, arg0, arg1);
12426
12427 /* !X && X is always false. */
12428 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12429 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12430 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12431 /* X && !X is always false. */
12432 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12433 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12434 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12435
12436 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12437 means A >= Y && A != MAX, but in this case we know that
12438 A < X <= MAX. */
12439
12440 if (!TREE_SIDE_EFFECTS (arg0)
12441 && !TREE_SIDE_EFFECTS (arg1))
12442 {
12443 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12444 if (tem && !operand_equal_p (tem, arg0, 0))
12445 return fold_build2_loc (loc, code, type, tem, arg1);
12446
12447 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12448 if (tem && !operand_equal_p (tem, arg1, 0))
12449 return fold_build2_loc (loc, code, type, arg0, tem);
12450 }
12451
12452 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12453 != NULL_TREE)
12454 return tem;
12455
12456 return NULL_TREE;
12457
12458 case TRUTH_ORIF_EXPR:
12459 /* Note that the operands of this must be ints
12460 and their values must be 0 or true.
12461 ("true" is a fixed value perhaps depending on the language.) */
12462 /* If first arg is constant true, return it. */
12463 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12464 return fold_convert_loc (loc, type, arg0);
12465 case TRUTH_OR_EXPR:
12466 /* If either arg is constant zero, drop it. */
12467 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12468 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12469 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12470 /* Preserve sequence points. */
12471 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12472 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12473 /* If second arg is constant true, result is true, but we must
12474 evaluate first arg. */
12475 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12476 return omit_one_operand_loc (loc, type, arg1, arg0);
12477 /* Likewise for first arg, but note this only occurs here for
12478 TRUTH_OR_EXPR. */
12479 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12480 return omit_one_operand_loc (loc, type, arg0, arg1);
12481
12482 /* !X || X is always true. */
12483 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12484 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12485 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12486 /* X || !X is always true. */
12487 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12488 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12489 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12490
12491 /* (X && !Y) || (!X && Y) is X ^ Y */
12492 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12493 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12494 {
12495 tree a0, a1, l0, l1, n0, n1;
12496
12497 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12498 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12499
12500 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12501 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12502
12503 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12504 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12505
12506 if ((operand_equal_p (n0, a0, 0)
12507 && operand_equal_p (n1, a1, 0))
12508 || (operand_equal_p (n0, a1, 0)
12509 && operand_equal_p (n1, a0, 0)))
12510 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12511 }
12512
12513 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12514 != NULL_TREE)
12515 return tem;
12516
12517 return NULL_TREE;
12518
12519 case TRUTH_XOR_EXPR:
12520 /* If the second arg is constant zero, drop it. */
12521 if (integer_zerop (arg1))
12522 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12523 /* If the second arg is constant true, this is a logical inversion. */
12524 if (integer_onep (arg1))
12525 {
12526 /* Only call invert_truthvalue if operand is a truth value. */
12527 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12528 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12529 else
12530 tem = invert_truthvalue_loc (loc, arg0);
12531 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12532 }
12533 /* Identical arguments cancel to zero. */
12534 if (operand_equal_p (arg0, arg1, 0))
12535 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12536
12537 /* !X ^ X is always true. */
12538 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12539 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12540 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12541
12542 /* X ^ !X is always true. */
12543 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12544 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12545 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12546
12547 return NULL_TREE;
12548
12549 case EQ_EXPR:
12550 case NE_EXPR:
12551 STRIP_NOPS (arg0);
12552 STRIP_NOPS (arg1);
12553
12554 tem = fold_comparison (loc, code, type, op0, op1);
12555 if (tem != NULL_TREE)
12556 return tem;
12557
12558 /* bool_var != 0 becomes bool_var. */
12559 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12560 && code == NE_EXPR)
12561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12562
12563 /* bool_var == 1 becomes bool_var. */
12564 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12565 && code == EQ_EXPR)
12566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12567
12568 /* bool_var != 1 becomes !bool_var. */
12569 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12570 && code == NE_EXPR)
12571 return fold_convert_loc (loc, type,
12572 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12573 TREE_TYPE (arg0), arg0));
12574
12575 /* bool_var == 0 becomes !bool_var. */
12576 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12577 && code == EQ_EXPR)
12578 return fold_convert_loc (loc, type,
12579 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12580 TREE_TYPE (arg0), arg0));
12581
12582 /* !exp != 0 becomes !exp */
12583 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12584 && code == NE_EXPR)
12585 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12586
12587 /* If this is an equality comparison of the address of two non-weak,
12588 unaliased symbols neither of which are extern (since we do not
12589 have access to attributes for externs), then we know the result. */
12590 if (TREE_CODE (arg0) == ADDR_EXPR
12591 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12592 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12593 && ! lookup_attribute ("alias",
12594 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12595 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12596 && TREE_CODE (arg1) == ADDR_EXPR
12597 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12598 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12599 && ! lookup_attribute ("alias",
12600 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12601 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12602 {
12603 /* We know that we're looking at the address of two
12604 non-weak, unaliased, static _DECL nodes.
12605
12606 It is both wasteful and incorrect to call operand_equal_p
12607 to compare the two ADDR_EXPR nodes. It is wasteful in that
12608 all we need to do is test pointer equality for the arguments
12609 to the two ADDR_EXPR nodes. It is incorrect to use
12610 operand_equal_p as that function is NOT equivalent to a
12611 C equality test. It can in fact return false for two
12612 objects which would test as equal using the C equality
12613 operator. */
12614 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12615 return constant_boolean_node (equal
12616 ? code == EQ_EXPR : code != EQ_EXPR,
12617 type);
12618 }
12619
12620 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12621 a MINUS_EXPR of a constant, we can convert it into a comparison with
12622 a revised constant as long as no overflow occurs. */
12623 if (TREE_CODE (arg1) == INTEGER_CST
12624 && (TREE_CODE (arg0) == PLUS_EXPR
12625 || TREE_CODE (arg0) == MINUS_EXPR)
12626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12627 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12628 ? MINUS_EXPR : PLUS_EXPR,
12629 fold_convert_loc (loc, TREE_TYPE (arg0),
12630 arg1),
12631 TREE_OPERAND (arg0, 1)))
12632 && !TREE_OVERFLOW (tem))
12633 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12634
12635 /* Similarly for a NEGATE_EXPR. */
12636 if (TREE_CODE (arg0) == NEGATE_EXPR
12637 && TREE_CODE (arg1) == INTEGER_CST
12638 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12639 arg1)))
12640 && TREE_CODE (tem) == INTEGER_CST
12641 && !TREE_OVERFLOW (tem))
12642 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12643
12644 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12645 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12646 && TREE_CODE (arg1) == INTEGER_CST
12647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12648 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12649 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12650 fold_convert_loc (loc,
12651 TREE_TYPE (arg0),
12652 arg1),
12653 TREE_OPERAND (arg0, 1)));
12654
12655 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12656 if ((TREE_CODE (arg0) == PLUS_EXPR
12657 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12658 || TREE_CODE (arg0) == MINUS_EXPR)
12659 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12660 0)),
12661 arg1, 0)
12662 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12663 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12664 {
12665 tree val = TREE_OPERAND (arg0, 1);
12666 return omit_two_operands_loc (loc, type,
12667 fold_build2_loc (loc, code, type,
12668 val,
12669 build_int_cst (TREE_TYPE (val),
12670 0)),
12671 TREE_OPERAND (arg0, 0), arg1);
12672 }
12673
12674 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12675 if (TREE_CODE (arg0) == MINUS_EXPR
12676 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12677 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12678 1)),
12679 arg1, 0)
12680 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12681 {
12682 return omit_two_operands_loc (loc, type,
12683 code == NE_EXPR
12684 ? boolean_true_node : boolean_false_node,
12685 TREE_OPERAND (arg0, 1), arg1);
12686 }
12687
12688 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12689 for !=. Don't do this for ordered comparisons due to overflow. */
12690 if (TREE_CODE (arg0) == MINUS_EXPR
12691 && integer_zerop (arg1))
12692 return fold_build2_loc (loc, code, type,
12693 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12694
12695 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12696 if (TREE_CODE (arg0) == ABS_EXPR
12697 && (integer_zerop (arg1) || real_zerop (arg1)))
12698 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12699
12700 /* If this is an EQ or NE comparison with zero and ARG0 is
12701 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12702 two operations, but the latter can be done in one less insn
12703 on machines that have only two-operand insns or on which a
12704 constant cannot be the first operand. */
12705 if (TREE_CODE (arg0) == BIT_AND_EXPR
12706 && integer_zerop (arg1))
12707 {
12708 tree arg00 = TREE_OPERAND (arg0, 0);
12709 tree arg01 = TREE_OPERAND (arg0, 1);
12710 if (TREE_CODE (arg00) == LSHIFT_EXPR
12711 && integer_onep (TREE_OPERAND (arg00, 0)))
12712 {
12713 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12714 arg01, TREE_OPERAND (arg00, 1));
12715 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12716 build_int_cst (TREE_TYPE (arg0), 1));
12717 return fold_build2_loc (loc, code, type,
12718 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12719 arg1);
12720 }
12721 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12722 && integer_onep (TREE_OPERAND (arg01, 0)))
12723 {
12724 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12725 arg00, TREE_OPERAND (arg01, 1));
12726 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12727 build_int_cst (TREE_TYPE (arg0), 1));
12728 return fold_build2_loc (loc, code, type,
12729 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12730 arg1);
12731 }
12732 }
12733
12734 /* If this is an NE or EQ comparison of zero against the result of a
12735 signed MOD operation whose second operand is a power of 2, make
12736 the MOD operation unsigned since it is simpler and equivalent. */
12737 if (integer_zerop (arg1)
12738 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12739 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12740 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12741 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12742 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12743 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12744 {
12745 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12746 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12747 fold_convert_loc (loc, newtype,
12748 TREE_OPERAND (arg0, 0)),
12749 fold_convert_loc (loc, newtype,
12750 TREE_OPERAND (arg0, 1)));
12751
12752 return fold_build2_loc (loc, code, type, newmod,
12753 fold_convert_loc (loc, newtype, arg1));
12754 }
12755
12756 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12757 C1 is a valid shift constant, and C2 is a power of two, i.e.
12758 a single bit. */
12759 if (TREE_CODE (arg0) == BIT_AND_EXPR
12760 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12761 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12762 == INTEGER_CST
12763 && integer_pow2p (TREE_OPERAND (arg0, 1))
12764 && integer_zerop (arg1))
12765 {
12766 tree itype = TREE_TYPE (arg0);
12767 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12768 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12769
12770 /* Check for a valid shift count. */
12771 if (TREE_INT_CST_HIGH (arg001) == 0
12772 && TREE_INT_CST_LOW (arg001) < prec)
12773 {
12774 tree arg01 = TREE_OPERAND (arg0, 1);
12775 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12776 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12777 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12778 can be rewritten as (X & (C2 << C1)) != 0. */
12779 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12780 {
12781 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12782 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12783 return fold_build2_loc (loc, code, type, tem,
12784 fold_convert_loc (loc, itype, arg1));
12785 }
12786 /* Otherwise, for signed (arithmetic) shifts,
12787 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12788 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12789 else if (!TYPE_UNSIGNED (itype))
12790 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12791 arg000, build_int_cst (itype, 0));
12792 /* Otherwise, of unsigned (logical) shifts,
12793 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12794 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12795 else
12796 return omit_one_operand_loc (loc, type,
12797 code == EQ_EXPR ? integer_one_node
12798 : integer_zero_node,
12799 arg000);
12800 }
12801 }
12802
12803 /* If we have (A & C) == C where C is a power of 2, convert this into
12804 (A & C) != 0. Similarly for NE_EXPR. */
12805 if (TREE_CODE (arg0) == BIT_AND_EXPR
12806 && integer_pow2p (TREE_OPERAND (arg0, 1))
12807 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12808 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12809 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12810 integer_zero_node));
12811
12812 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12813 bit, then fold the expression into A < 0 or A >= 0. */
12814 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12815 if (tem)
12816 return tem;
12817
12818 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12819 Similarly for NE_EXPR. */
12820 if (TREE_CODE (arg0) == BIT_AND_EXPR
12821 && TREE_CODE (arg1) == INTEGER_CST
12822 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12823 {
12824 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12825 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12826 TREE_OPERAND (arg0, 1));
12827 tree dandnotc
12828 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12829 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12830 notc);
12831 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12832 if (integer_nonzerop (dandnotc))
12833 return omit_one_operand_loc (loc, type, rslt, arg0);
12834 }
12835
12836 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12837 Similarly for NE_EXPR. */
12838 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12839 && TREE_CODE (arg1) == INTEGER_CST
12840 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12841 {
12842 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12843 tree candnotd
12844 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12845 TREE_OPERAND (arg0, 1),
12846 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12847 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12848 if (integer_nonzerop (candnotd))
12849 return omit_one_operand_loc (loc, type, rslt, arg0);
12850 }
12851
12852 /* If this is a comparison of a field, we may be able to simplify it. */
12853 if ((TREE_CODE (arg0) == COMPONENT_REF
12854 || TREE_CODE (arg0) == BIT_FIELD_REF)
12855 /* Handle the constant case even without -O
12856 to make sure the warnings are given. */
12857 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12858 {
12859 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12860 if (t1)
12861 return t1;
12862 }
12863
12864 /* Optimize comparisons of strlen vs zero to a compare of the
12865 first character of the string vs zero. To wit,
12866 strlen(ptr) == 0 => *ptr == 0
12867 strlen(ptr) != 0 => *ptr != 0
12868 Other cases should reduce to one of these two (or a constant)
12869 due to the return value of strlen being unsigned. */
12870 if (TREE_CODE (arg0) == CALL_EXPR
12871 && integer_zerop (arg1))
12872 {
12873 tree fndecl = get_callee_fndecl (arg0);
12874
12875 if (fndecl
12876 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12877 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12878 && call_expr_nargs (arg0) == 1
12879 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12880 {
12881 tree iref = build_fold_indirect_ref_loc (loc,
12882 CALL_EXPR_ARG (arg0, 0));
12883 return fold_build2_loc (loc, code, type, iref,
12884 build_int_cst (TREE_TYPE (iref), 0));
12885 }
12886 }
12887
12888 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12889 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12890 if (TREE_CODE (arg0) == RSHIFT_EXPR
12891 && integer_zerop (arg1)
12892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12893 {
12894 tree arg00 = TREE_OPERAND (arg0, 0);
12895 tree arg01 = TREE_OPERAND (arg0, 1);
12896 tree itype = TREE_TYPE (arg00);
12897 if (TREE_INT_CST_HIGH (arg01) == 0
12898 && TREE_INT_CST_LOW (arg01)
12899 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12900 {
12901 if (TYPE_UNSIGNED (itype))
12902 {
12903 itype = signed_type_for (itype);
12904 arg00 = fold_convert_loc (loc, itype, arg00);
12905 }
12906 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12907 type, arg00, build_int_cst (itype, 0));
12908 }
12909 }
12910
12911 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12912 if (integer_zerop (arg1)
12913 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12914 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12915 TREE_OPERAND (arg0, 1));
12916
12917 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12918 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12919 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12920 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12921 build_zero_cst (TREE_TYPE (arg0)));
12922 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12923 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12924 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12925 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12926 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12927 build_zero_cst (TREE_TYPE (arg0)));
12928
12929 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12930 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12931 && TREE_CODE (arg1) == INTEGER_CST
12932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12933 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12934 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12935 TREE_OPERAND (arg0, 1), arg1));
12936
12937 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12938 (X & C) == 0 when C is a single bit. */
12939 if (TREE_CODE (arg0) == BIT_AND_EXPR
12940 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12941 && integer_zerop (arg1)
12942 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12943 {
12944 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12945 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12946 TREE_OPERAND (arg0, 1));
12947 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12948 type, tem,
12949 fold_convert_loc (loc, TREE_TYPE (arg0),
12950 arg1));
12951 }
12952
12953 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12954 constant C is a power of two, i.e. a single bit. */
12955 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12956 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12957 && integer_zerop (arg1)
12958 && integer_pow2p (TREE_OPERAND (arg0, 1))
12959 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12960 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12961 {
12962 tree arg00 = TREE_OPERAND (arg0, 0);
12963 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12964 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12965 }
12966
12967 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12968 when is C is a power of two, i.e. a single bit. */
12969 if (TREE_CODE (arg0) == BIT_AND_EXPR
12970 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12971 && integer_zerop (arg1)
12972 && integer_pow2p (TREE_OPERAND (arg0, 1))
12973 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12974 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12975 {
12976 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12977 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12978 arg000, TREE_OPERAND (arg0, 1));
12979 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12980 tem, build_int_cst (TREE_TYPE (tem), 0));
12981 }
12982
12983 if (integer_zerop (arg1)
12984 && tree_expr_nonzero_p (arg0))
12985 {
12986 tree res = constant_boolean_node (code==NE_EXPR, type);
12987 return omit_one_operand_loc (loc, type, res, arg0);
12988 }
12989
12990 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12991 if (TREE_CODE (arg0) == NEGATE_EXPR
12992 && TREE_CODE (arg1) == NEGATE_EXPR)
12993 return fold_build2_loc (loc, code, type,
12994 TREE_OPERAND (arg0, 0),
12995 fold_convert_loc (loc, TREE_TYPE (arg0),
12996 TREE_OPERAND (arg1, 0)));
12997
12998 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12999 if (TREE_CODE (arg0) == BIT_AND_EXPR
13000 && TREE_CODE (arg1) == BIT_AND_EXPR)
13001 {
13002 tree arg00 = TREE_OPERAND (arg0, 0);
13003 tree arg01 = TREE_OPERAND (arg0, 1);
13004 tree arg10 = TREE_OPERAND (arg1, 0);
13005 tree arg11 = TREE_OPERAND (arg1, 1);
13006 tree itype = TREE_TYPE (arg0);
13007
13008 if (operand_equal_p (arg01, arg11, 0))
13009 return fold_build2_loc (loc, code, type,
13010 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13011 fold_build2_loc (loc,
13012 BIT_XOR_EXPR, itype,
13013 arg00, arg10),
13014 arg01),
13015 build_zero_cst (itype));
13016
13017 if (operand_equal_p (arg01, arg10, 0))
13018 return fold_build2_loc (loc, code, type,
13019 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13020 fold_build2_loc (loc,
13021 BIT_XOR_EXPR, itype,
13022 arg00, arg11),
13023 arg01),
13024 build_zero_cst (itype));
13025
13026 if (operand_equal_p (arg00, arg11, 0))
13027 return fold_build2_loc (loc, code, type,
13028 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13029 fold_build2_loc (loc,
13030 BIT_XOR_EXPR, itype,
13031 arg01, arg10),
13032 arg00),
13033 build_zero_cst (itype));
13034
13035 if (operand_equal_p (arg00, arg10, 0))
13036 return fold_build2_loc (loc, code, type,
13037 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13038 fold_build2_loc (loc,
13039 BIT_XOR_EXPR, itype,
13040 arg01, arg11),
13041 arg00),
13042 build_zero_cst (itype));
13043 }
13044
13045 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13046 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13047 {
13048 tree arg00 = TREE_OPERAND (arg0, 0);
13049 tree arg01 = TREE_OPERAND (arg0, 1);
13050 tree arg10 = TREE_OPERAND (arg1, 0);
13051 tree arg11 = TREE_OPERAND (arg1, 1);
13052 tree itype = TREE_TYPE (arg0);
13053
13054 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13055 operand_equal_p guarantees no side-effects so we don't need
13056 to use omit_one_operand on Z. */
13057 if (operand_equal_p (arg01, arg11, 0))
13058 return fold_build2_loc (loc, code, type, arg00,
13059 fold_convert_loc (loc, TREE_TYPE (arg00),
13060 arg10));
13061 if (operand_equal_p (arg01, arg10, 0))
13062 return fold_build2_loc (loc, code, type, arg00,
13063 fold_convert_loc (loc, TREE_TYPE (arg00),
13064 arg11));
13065 if (operand_equal_p (arg00, arg11, 0))
13066 return fold_build2_loc (loc, code, type, arg01,
13067 fold_convert_loc (loc, TREE_TYPE (arg01),
13068 arg10));
13069 if (operand_equal_p (arg00, arg10, 0))
13070 return fold_build2_loc (loc, code, type, arg01,
13071 fold_convert_loc (loc, TREE_TYPE (arg01),
13072 arg11));
13073
13074 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13075 if (TREE_CODE (arg01) == INTEGER_CST
13076 && TREE_CODE (arg11) == INTEGER_CST)
13077 {
13078 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13079 fold_convert_loc (loc, itype, arg11));
13080 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13081 return fold_build2_loc (loc, code, type, tem,
13082 fold_convert_loc (loc, itype, arg10));
13083 }
13084 }
13085
13086 /* Attempt to simplify equality/inequality comparisons of complex
13087 values. Only lower the comparison if the result is known or
13088 can be simplified to a single scalar comparison. */
13089 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13090 || TREE_CODE (arg0) == COMPLEX_CST)
13091 && (TREE_CODE (arg1) == COMPLEX_EXPR
13092 || TREE_CODE (arg1) == COMPLEX_CST))
13093 {
13094 tree real0, imag0, real1, imag1;
13095 tree rcond, icond;
13096
13097 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13098 {
13099 real0 = TREE_OPERAND (arg0, 0);
13100 imag0 = TREE_OPERAND (arg0, 1);
13101 }
13102 else
13103 {
13104 real0 = TREE_REALPART (arg0);
13105 imag0 = TREE_IMAGPART (arg0);
13106 }
13107
13108 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13109 {
13110 real1 = TREE_OPERAND (arg1, 0);
13111 imag1 = TREE_OPERAND (arg1, 1);
13112 }
13113 else
13114 {
13115 real1 = TREE_REALPART (arg1);
13116 imag1 = TREE_IMAGPART (arg1);
13117 }
13118
13119 rcond = fold_binary_loc (loc, code, type, real0, real1);
13120 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13121 {
13122 if (integer_zerop (rcond))
13123 {
13124 if (code == EQ_EXPR)
13125 return omit_two_operands_loc (loc, type, boolean_false_node,
13126 imag0, imag1);
13127 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13128 }
13129 else
13130 {
13131 if (code == NE_EXPR)
13132 return omit_two_operands_loc (loc, type, boolean_true_node,
13133 imag0, imag1);
13134 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13135 }
13136 }
13137
13138 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13139 if (icond && TREE_CODE (icond) == INTEGER_CST)
13140 {
13141 if (integer_zerop (icond))
13142 {
13143 if (code == EQ_EXPR)
13144 return omit_two_operands_loc (loc, type, boolean_false_node,
13145 real0, real1);
13146 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13147 }
13148 else
13149 {
13150 if (code == NE_EXPR)
13151 return omit_two_operands_loc (loc, type, boolean_true_node,
13152 real0, real1);
13153 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13154 }
13155 }
13156 }
13157
13158 return NULL_TREE;
13159
13160 case LT_EXPR:
13161 case GT_EXPR:
13162 case LE_EXPR:
13163 case GE_EXPR:
13164 tem = fold_comparison (loc, code, type, op0, op1);
13165 if (tem != NULL_TREE)
13166 return tem;
13167
13168 /* Transform comparisons of the form X +- C CMP X. */
13169 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13171 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13172 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13173 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13174 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13175 {
13176 tree arg01 = TREE_OPERAND (arg0, 1);
13177 enum tree_code code0 = TREE_CODE (arg0);
13178 int is_positive;
13179
13180 if (TREE_CODE (arg01) == REAL_CST)
13181 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13182 else
13183 is_positive = tree_int_cst_sgn (arg01);
13184
13185 /* (X - c) > X becomes false. */
13186 if (code == GT_EXPR
13187 && ((code0 == MINUS_EXPR && is_positive >= 0)
13188 || (code0 == PLUS_EXPR && is_positive <= 0)))
13189 {
13190 if (TREE_CODE (arg01) == INTEGER_CST
13191 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13192 fold_overflow_warning (("assuming signed overflow does not "
13193 "occur when assuming that (X - c) > X "
13194 "is always false"),
13195 WARN_STRICT_OVERFLOW_ALL);
13196 return constant_boolean_node (0, type);
13197 }
13198
13199 /* Likewise (X + c) < X becomes false. */
13200 if (code == LT_EXPR
13201 && ((code0 == PLUS_EXPR && is_positive >= 0)
13202 || (code0 == MINUS_EXPR && is_positive <= 0)))
13203 {
13204 if (TREE_CODE (arg01) == INTEGER_CST
13205 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13206 fold_overflow_warning (("assuming signed overflow does not "
13207 "occur when assuming that "
13208 "(X + c) < X is always false"),
13209 WARN_STRICT_OVERFLOW_ALL);
13210 return constant_boolean_node (0, type);
13211 }
13212
13213 /* Convert (X - c) <= X to true. */
13214 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13215 && code == LE_EXPR
13216 && ((code0 == MINUS_EXPR && is_positive >= 0)
13217 || (code0 == PLUS_EXPR && is_positive <= 0)))
13218 {
13219 if (TREE_CODE (arg01) == INTEGER_CST
13220 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13221 fold_overflow_warning (("assuming signed overflow does not "
13222 "occur when assuming that "
13223 "(X - c) <= X is always true"),
13224 WARN_STRICT_OVERFLOW_ALL);
13225 return constant_boolean_node (1, type);
13226 }
13227
13228 /* Convert (X + c) >= X to true. */
13229 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13230 && code == GE_EXPR
13231 && ((code0 == PLUS_EXPR && is_positive >= 0)
13232 || (code0 == MINUS_EXPR && is_positive <= 0)))
13233 {
13234 if (TREE_CODE (arg01) == INTEGER_CST
13235 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13236 fold_overflow_warning (("assuming signed overflow does not "
13237 "occur when assuming that "
13238 "(X + c) >= X is always true"),
13239 WARN_STRICT_OVERFLOW_ALL);
13240 return constant_boolean_node (1, type);
13241 }
13242
13243 if (TREE_CODE (arg01) == INTEGER_CST)
13244 {
13245 /* Convert X + c > X and X - c < X to true for integers. */
13246 if (code == GT_EXPR
13247 && ((code0 == PLUS_EXPR && is_positive > 0)
13248 || (code0 == MINUS_EXPR && is_positive < 0)))
13249 {
13250 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13251 fold_overflow_warning (("assuming signed overflow does "
13252 "not occur when assuming that "
13253 "(X + c) > X is always true"),
13254 WARN_STRICT_OVERFLOW_ALL);
13255 return constant_boolean_node (1, type);
13256 }
13257
13258 if (code == LT_EXPR
13259 && ((code0 == MINUS_EXPR && is_positive > 0)
13260 || (code0 == PLUS_EXPR && is_positive < 0)))
13261 {
13262 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13263 fold_overflow_warning (("assuming signed overflow does "
13264 "not occur when assuming that "
13265 "(X - c) < X is always true"),
13266 WARN_STRICT_OVERFLOW_ALL);
13267 return constant_boolean_node (1, type);
13268 }
13269
13270 /* Convert X + c <= X and X - c >= X to false for integers. */
13271 if (code == LE_EXPR
13272 && ((code0 == PLUS_EXPR && is_positive > 0)
13273 || (code0 == MINUS_EXPR && is_positive < 0)))
13274 {
13275 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13276 fold_overflow_warning (("assuming signed overflow does "
13277 "not occur when assuming that "
13278 "(X + c) <= X is always false"),
13279 WARN_STRICT_OVERFLOW_ALL);
13280 return constant_boolean_node (0, type);
13281 }
13282
13283 if (code == GE_EXPR
13284 && ((code0 == MINUS_EXPR && is_positive > 0)
13285 || (code0 == PLUS_EXPR && is_positive < 0)))
13286 {
13287 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13288 fold_overflow_warning (("assuming signed overflow does "
13289 "not occur when assuming that "
13290 "(X - c) >= X is always false"),
13291 WARN_STRICT_OVERFLOW_ALL);
13292 return constant_boolean_node (0, type);
13293 }
13294 }
13295 }
13296
13297 /* Comparisons with the highest or lowest possible integer of
13298 the specified precision will have known values. */
13299 {
13300 tree arg1_type = TREE_TYPE (arg1);
13301 unsigned int width = TYPE_PRECISION (arg1_type);
13302
13303 if (TREE_CODE (arg1) == INTEGER_CST
13304 && width <= HOST_BITS_PER_DOUBLE_INT
13305 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13306 {
13307 HOST_WIDE_INT signed_max_hi;
13308 unsigned HOST_WIDE_INT signed_max_lo;
13309 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13310
13311 if (width <= HOST_BITS_PER_WIDE_INT)
13312 {
13313 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13314 - 1;
13315 signed_max_hi = 0;
13316 max_hi = 0;
13317
13318 if (TYPE_UNSIGNED (arg1_type))
13319 {
13320 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13321 min_lo = 0;
13322 min_hi = 0;
13323 }
13324 else
13325 {
13326 max_lo = signed_max_lo;
13327 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13328 min_hi = -1;
13329 }
13330 }
13331 else
13332 {
13333 width -= HOST_BITS_PER_WIDE_INT;
13334 signed_max_lo = -1;
13335 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13336 - 1;
13337 max_lo = -1;
13338 min_lo = 0;
13339
13340 if (TYPE_UNSIGNED (arg1_type))
13341 {
13342 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13343 min_hi = 0;
13344 }
13345 else
13346 {
13347 max_hi = signed_max_hi;
13348 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13349 }
13350 }
13351
13352 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13353 && TREE_INT_CST_LOW (arg1) == max_lo)
13354 switch (code)
13355 {
13356 case GT_EXPR:
13357 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13358
13359 case GE_EXPR:
13360 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13361
13362 case LE_EXPR:
13363 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13364
13365 case LT_EXPR:
13366 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13367
13368 /* The GE_EXPR and LT_EXPR cases above are not normally
13369 reached because of previous transformations. */
13370
13371 default:
13372 break;
13373 }
13374 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13375 == max_hi
13376 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13377 switch (code)
13378 {
13379 case GT_EXPR:
13380 arg1 = const_binop (PLUS_EXPR, arg1,
13381 build_int_cst (TREE_TYPE (arg1), 1));
13382 return fold_build2_loc (loc, EQ_EXPR, type,
13383 fold_convert_loc (loc,
13384 TREE_TYPE (arg1), arg0),
13385 arg1);
13386 case LE_EXPR:
13387 arg1 = const_binop (PLUS_EXPR, arg1,
13388 build_int_cst (TREE_TYPE (arg1), 1));
13389 return fold_build2_loc (loc, NE_EXPR, type,
13390 fold_convert_loc (loc, TREE_TYPE (arg1),
13391 arg0),
13392 arg1);
13393 default:
13394 break;
13395 }
13396 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13397 == min_hi
13398 && TREE_INT_CST_LOW (arg1) == min_lo)
13399 switch (code)
13400 {
13401 case LT_EXPR:
13402 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13403
13404 case LE_EXPR:
13405 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13406
13407 case GE_EXPR:
13408 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13409
13410 case GT_EXPR:
13411 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13412
13413 default:
13414 break;
13415 }
13416 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13417 == min_hi
13418 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13419 switch (code)
13420 {
13421 case GE_EXPR:
13422 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13423 return fold_build2_loc (loc, NE_EXPR, type,
13424 fold_convert_loc (loc,
13425 TREE_TYPE (arg1), arg0),
13426 arg1);
13427 case LT_EXPR:
13428 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13429 return fold_build2_loc (loc, EQ_EXPR, type,
13430 fold_convert_loc (loc, TREE_TYPE (arg1),
13431 arg0),
13432 arg1);
13433 default:
13434 break;
13435 }
13436
13437 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13438 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13439 && TYPE_UNSIGNED (arg1_type)
13440 /* We will flip the signedness of the comparison operator
13441 associated with the mode of arg1, so the sign bit is
13442 specified by this mode. Check that arg1 is the signed
13443 max associated with this sign bit. */
13444 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13445 /* signed_type does not work on pointer types. */
13446 && INTEGRAL_TYPE_P (arg1_type))
13447 {
13448 /* The following case also applies to X < signed_max+1
13449 and X >= signed_max+1 because previous transformations. */
13450 if (code == LE_EXPR || code == GT_EXPR)
13451 {
13452 tree st;
13453 st = signed_type_for (TREE_TYPE (arg1));
13454 return fold_build2_loc (loc,
13455 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13456 type, fold_convert_loc (loc, st, arg0),
13457 build_int_cst (st, 0));
13458 }
13459 }
13460 }
13461 }
13462
13463 /* If we are comparing an ABS_EXPR with a constant, we can
13464 convert all the cases into explicit comparisons, but they may
13465 well not be faster than doing the ABS and one comparison.
13466 But ABS (X) <= C is a range comparison, which becomes a subtraction
13467 and a comparison, and is probably faster. */
13468 if (code == LE_EXPR
13469 && TREE_CODE (arg1) == INTEGER_CST
13470 && TREE_CODE (arg0) == ABS_EXPR
13471 && ! TREE_SIDE_EFFECTS (arg0)
13472 && (0 != (tem = negate_expr (arg1)))
13473 && TREE_CODE (tem) == INTEGER_CST
13474 && !TREE_OVERFLOW (tem))
13475 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13476 build2 (GE_EXPR, type,
13477 TREE_OPERAND (arg0, 0), tem),
13478 build2 (LE_EXPR, type,
13479 TREE_OPERAND (arg0, 0), arg1));
13480
13481 /* Convert ABS_EXPR<x> >= 0 to true. */
13482 strict_overflow_p = false;
13483 if (code == GE_EXPR
13484 && (integer_zerop (arg1)
13485 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13486 && real_zerop (arg1)))
13487 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13488 {
13489 if (strict_overflow_p)
13490 fold_overflow_warning (("assuming signed overflow does not occur "
13491 "when simplifying comparison of "
13492 "absolute value and zero"),
13493 WARN_STRICT_OVERFLOW_CONDITIONAL);
13494 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13495 }
13496
13497 /* Convert ABS_EXPR<x> < 0 to false. */
13498 strict_overflow_p = false;
13499 if (code == LT_EXPR
13500 && (integer_zerop (arg1) || real_zerop (arg1))
13501 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13502 {
13503 if (strict_overflow_p)
13504 fold_overflow_warning (("assuming signed overflow does not occur "
13505 "when simplifying comparison of "
13506 "absolute value and zero"),
13507 WARN_STRICT_OVERFLOW_CONDITIONAL);
13508 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13509 }
13510
13511 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13512 and similarly for >= into !=. */
13513 if ((code == LT_EXPR || code == GE_EXPR)
13514 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13515 && TREE_CODE (arg1) == LSHIFT_EXPR
13516 && integer_onep (TREE_OPERAND (arg1, 0)))
13517 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13518 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13519 TREE_OPERAND (arg1, 1)),
13520 build_zero_cst (TREE_TYPE (arg0)));
13521
13522 if ((code == LT_EXPR || code == GE_EXPR)
13523 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13524 && CONVERT_EXPR_P (arg1)
13525 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13526 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13527 {
13528 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13529 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13530 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13531 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13532 build_zero_cst (TREE_TYPE (arg0)));
13533 }
13534
13535 return NULL_TREE;
13536
13537 case UNORDERED_EXPR:
13538 case ORDERED_EXPR:
13539 case UNLT_EXPR:
13540 case UNLE_EXPR:
13541 case UNGT_EXPR:
13542 case UNGE_EXPR:
13543 case UNEQ_EXPR:
13544 case LTGT_EXPR:
13545 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13546 {
13547 t1 = fold_relational_const (code, type, arg0, arg1);
13548 if (t1 != NULL_TREE)
13549 return t1;
13550 }
13551
13552 /* If the first operand is NaN, the result is constant. */
13553 if (TREE_CODE (arg0) == REAL_CST
13554 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13555 && (code != LTGT_EXPR || ! flag_trapping_math))
13556 {
13557 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13558 ? integer_zero_node
13559 : integer_one_node;
13560 return omit_one_operand_loc (loc, type, t1, arg1);
13561 }
13562
13563 /* If the second operand is NaN, the result is constant. */
13564 if (TREE_CODE (arg1) == REAL_CST
13565 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13566 && (code != LTGT_EXPR || ! flag_trapping_math))
13567 {
13568 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13569 ? integer_zero_node
13570 : integer_one_node;
13571 return omit_one_operand_loc (loc, type, t1, arg0);
13572 }
13573
13574 /* Simplify unordered comparison of something with itself. */
13575 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13576 && operand_equal_p (arg0, arg1, 0))
13577 return constant_boolean_node (1, type);
13578
13579 if (code == LTGT_EXPR
13580 && !flag_trapping_math
13581 && operand_equal_p (arg0, arg1, 0))
13582 return constant_boolean_node (0, type);
13583
13584 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13585 {
13586 tree targ0 = strip_float_extensions (arg0);
13587 tree targ1 = strip_float_extensions (arg1);
13588 tree newtype = TREE_TYPE (targ0);
13589
13590 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13591 newtype = TREE_TYPE (targ1);
13592
13593 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13594 return fold_build2_loc (loc, code, type,
13595 fold_convert_loc (loc, newtype, targ0),
13596 fold_convert_loc (loc, newtype, targ1));
13597 }
13598
13599 return NULL_TREE;
13600
13601 case COMPOUND_EXPR:
13602 /* When pedantic, a compound expression can be neither an lvalue
13603 nor an integer constant expression. */
13604 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13605 return NULL_TREE;
13606 /* Don't let (0, 0) be null pointer constant. */
13607 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13608 : fold_convert_loc (loc, type, arg1);
13609 return pedantic_non_lvalue_loc (loc, tem);
13610
13611 case COMPLEX_EXPR:
13612 if ((TREE_CODE (arg0) == REAL_CST
13613 && TREE_CODE (arg1) == REAL_CST)
13614 || (TREE_CODE (arg0) == INTEGER_CST
13615 && TREE_CODE (arg1) == INTEGER_CST))
13616 return build_complex (type, arg0, arg1);
13617 if (TREE_CODE (arg0) == REALPART_EXPR
13618 && TREE_CODE (arg1) == IMAGPART_EXPR
13619 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13620 && operand_equal_p (TREE_OPERAND (arg0, 0),
13621 TREE_OPERAND (arg1, 0), 0))
13622 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13623 TREE_OPERAND (arg1, 0));
13624 return NULL_TREE;
13625
13626 case ASSERT_EXPR:
13627 /* An ASSERT_EXPR should never be passed to fold_binary. */
13628 gcc_unreachable ();
13629
13630 case VEC_PACK_TRUNC_EXPR:
13631 case VEC_PACK_FIX_TRUNC_EXPR:
13632 {
13633 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13634 tree *elts;
13635
13636 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13637 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13638 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13639 return NULL_TREE;
13640
13641 elts = XALLOCAVEC (tree, nelts);
13642 if (!vec_cst_ctor_to_array (arg0, elts)
13643 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13644 return NULL_TREE;
13645
13646 for (i = 0; i < nelts; i++)
13647 {
13648 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13649 ? NOP_EXPR : FIX_TRUNC_EXPR,
13650 TREE_TYPE (type), elts[i]);
13651 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13652 return NULL_TREE;
13653 }
13654
13655 return build_vector (type, elts);
13656 }
13657
13658 case VEC_WIDEN_MULT_LO_EXPR:
13659 case VEC_WIDEN_MULT_HI_EXPR:
13660 case VEC_WIDEN_MULT_EVEN_EXPR:
13661 case VEC_WIDEN_MULT_ODD_EXPR:
13662 {
13663 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13664 unsigned int out, ofs, scale;
13665 tree *elts;
13666
13667 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13668 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13669 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13670 return NULL_TREE;
13671
13672 elts = XALLOCAVEC (tree, nelts * 4);
13673 if (!vec_cst_ctor_to_array (arg0, elts)
13674 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13675 return NULL_TREE;
13676
13677 if (code == VEC_WIDEN_MULT_LO_EXPR)
13678 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13679 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13680 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13681 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13682 scale = 1, ofs = 0;
13683 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13684 scale = 1, ofs = 1;
13685
13686 for (out = 0; out < nelts; out++)
13687 {
13688 unsigned int in1 = (out << scale) + ofs;
13689 unsigned int in2 = in1 + nelts * 2;
13690 tree t1, t2;
13691
13692 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13693 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13694
13695 if (t1 == NULL_TREE || t2 == NULL_TREE)
13696 return NULL_TREE;
13697 elts[out] = const_binop (MULT_EXPR, t1, t2);
13698 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13699 return NULL_TREE;
13700 }
13701
13702 return build_vector (type, elts);
13703 }
13704
13705 default:
13706 return NULL_TREE;
13707 } /* switch (code) */
13708 }
13709
13710 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13711 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13712 of GOTO_EXPR. */
13713
13714 static tree
13715 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13716 {
13717 switch (TREE_CODE (*tp))
13718 {
13719 case LABEL_EXPR:
13720 return *tp;
13721
13722 case GOTO_EXPR:
13723 *walk_subtrees = 0;
13724
13725 /* ... fall through ... */
13726
13727 default:
13728 return NULL_TREE;
13729 }
13730 }
13731
13732 /* Return whether the sub-tree ST contains a label which is accessible from
13733 outside the sub-tree. */
13734
13735 static bool
13736 contains_label_p (tree st)
13737 {
13738 return
13739 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13740 }
13741
13742 /* Fold a ternary expression of code CODE and type TYPE with operands
13743 OP0, OP1, and OP2. Return the folded expression if folding is
13744 successful. Otherwise, return NULL_TREE. */
13745
13746 tree
13747 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13748 tree op0, tree op1, tree op2)
13749 {
13750 tree tem;
13751 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13752 enum tree_code_class kind = TREE_CODE_CLASS (code);
13753
13754 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13755 && TREE_CODE_LENGTH (code) == 3);
13756
13757 /* Strip any conversions that don't change the mode. This is safe
13758 for every expression, except for a comparison expression because
13759 its signedness is derived from its operands. So, in the latter
13760 case, only strip conversions that don't change the signedness.
13761
13762 Note that this is done as an internal manipulation within the
13763 constant folder, in order to find the simplest representation of
13764 the arguments so that their form can be studied. In any cases,
13765 the appropriate type conversions should be put back in the tree
13766 that will get out of the constant folder. */
13767 if (op0)
13768 {
13769 arg0 = op0;
13770 STRIP_NOPS (arg0);
13771 }
13772
13773 if (op1)
13774 {
13775 arg1 = op1;
13776 STRIP_NOPS (arg1);
13777 }
13778
13779 if (op2)
13780 {
13781 arg2 = op2;
13782 STRIP_NOPS (arg2);
13783 }
13784
13785 switch (code)
13786 {
13787 case COMPONENT_REF:
13788 if (TREE_CODE (arg0) == CONSTRUCTOR
13789 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13790 {
13791 unsigned HOST_WIDE_INT idx;
13792 tree field, value;
13793 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13794 if (field == arg1)
13795 return value;
13796 }
13797 return NULL_TREE;
13798
13799 case COND_EXPR:
13800 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13801 so all simple results must be passed through pedantic_non_lvalue. */
13802 if (TREE_CODE (arg0) == INTEGER_CST)
13803 {
13804 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13805 tem = integer_zerop (arg0) ? op2 : op1;
13806 /* Only optimize constant conditions when the selected branch
13807 has the same type as the COND_EXPR. This avoids optimizing
13808 away "c ? x : throw", where the throw has a void type.
13809 Avoid throwing away that operand which contains label. */
13810 if ((!TREE_SIDE_EFFECTS (unused_op)
13811 || !contains_label_p (unused_op))
13812 && (! VOID_TYPE_P (TREE_TYPE (tem))
13813 || VOID_TYPE_P (type)))
13814 return pedantic_non_lvalue_loc (loc, tem);
13815 return NULL_TREE;
13816 }
13817 if (operand_equal_p (arg1, op2, 0))
13818 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13819
13820 /* If we have A op B ? A : C, we may be able to convert this to a
13821 simpler expression, depending on the operation and the values
13822 of B and C. Signed zeros prevent all of these transformations,
13823 for reasons given above each one.
13824
13825 Also try swapping the arguments and inverting the conditional. */
13826 if (COMPARISON_CLASS_P (arg0)
13827 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13828 arg1, TREE_OPERAND (arg0, 1))
13829 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13830 {
13831 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13832 if (tem)
13833 return tem;
13834 }
13835
13836 if (COMPARISON_CLASS_P (arg0)
13837 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13838 op2,
13839 TREE_OPERAND (arg0, 1))
13840 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13841 {
13842 location_t loc0 = expr_location_or (arg0, loc);
13843 tem = fold_truth_not_expr (loc0, arg0);
13844 if (tem && COMPARISON_CLASS_P (tem))
13845 {
13846 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13847 if (tem)
13848 return tem;
13849 }
13850 }
13851
13852 /* If the second operand is simpler than the third, swap them
13853 since that produces better jump optimization results. */
13854 if (truth_value_p (TREE_CODE (arg0))
13855 && tree_swap_operands_p (op1, op2, false))
13856 {
13857 location_t loc0 = expr_location_or (arg0, loc);
13858 /* See if this can be inverted. If it can't, possibly because
13859 it was a floating-point inequality comparison, don't do
13860 anything. */
13861 tem = fold_truth_not_expr (loc0, arg0);
13862 if (tem)
13863 return fold_build3_loc (loc, code, type, tem, op2, op1);
13864 }
13865
13866 /* Convert A ? 1 : 0 to simply A. */
13867 if (integer_onep (op1)
13868 && integer_zerop (op2)
13869 /* If we try to convert OP0 to our type, the
13870 call to fold will try to move the conversion inside
13871 a COND, which will recurse. In that case, the COND_EXPR
13872 is probably the best choice, so leave it alone. */
13873 && type == TREE_TYPE (arg0))
13874 return pedantic_non_lvalue_loc (loc, arg0);
13875
13876 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13877 over COND_EXPR in cases such as floating point comparisons. */
13878 if (integer_zerop (op1)
13879 && integer_onep (op2)
13880 && truth_value_p (TREE_CODE (arg0)))
13881 return pedantic_non_lvalue_loc (loc,
13882 fold_convert_loc (loc, type,
13883 invert_truthvalue_loc (loc,
13884 arg0)));
13885
13886 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13887 if (TREE_CODE (arg0) == LT_EXPR
13888 && integer_zerop (TREE_OPERAND (arg0, 1))
13889 && integer_zerop (op2)
13890 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13891 {
13892 /* sign_bit_p only checks ARG1 bits within A's precision.
13893 If <sign bit of A> has wider type than A, bits outside
13894 of A's precision in <sign bit of A> need to be checked.
13895 If they are all 0, this optimization needs to be done
13896 in unsigned A's type, if they are all 1 in signed A's type,
13897 otherwise this can't be done. */
13898 if (TYPE_PRECISION (TREE_TYPE (tem))
13899 < TYPE_PRECISION (TREE_TYPE (arg1))
13900 && TYPE_PRECISION (TREE_TYPE (tem))
13901 < TYPE_PRECISION (type))
13902 {
13903 unsigned HOST_WIDE_INT mask_lo;
13904 HOST_WIDE_INT mask_hi;
13905 int inner_width, outer_width;
13906 tree tem_type;
13907
13908 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13909 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13910 if (outer_width > TYPE_PRECISION (type))
13911 outer_width = TYPE_PRECISION (type);
13912
13913 if (outer_width > HOST_BITS_PER_WIDE_INT)
13914 {
13915 mask_hi = ((unsigned HOST_WIDE_INT) -1
13916 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13917 mask_lo = -1;
13918 }
13919 else
13920 {
13921 mask_hi = 0;
13922 mask_lo = ((unsigned HOST_WIDE_INT) -1
13923 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13924 }
13925 if (inner_width > HOST_BITS_PER_WIDE_INT)
13926 {
13927 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13928 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13929 mask_lo = 0;
13930 }
13931 else
13932 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13933 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13934
13935 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13936 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13937 {
13938 tem_type = signed_type_for (TREE_TYPE (tem));
13939 tem = fold_convert_loc (loc, tem_type, tem);
13940 }
13941 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13942 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13943 {
13944 tem_type = unsigned_type_for (TREE_TYPE (tem));
13945 tem = fold_convert_loc (loc, tem_type, tem);
13946 }
13947 else
13948 tem = NULL;
13949 }
13950
13951 if (tem)
13952 return
13953 fold_convert_loc (loc, type,
13954 fold_build2_loc (loc, BIT_AND_EXPR,
13955 TREE_TYPE (tem), tem,
13956 fold_convert_loc (loc,
13957 TREE_TYPE (tem),
13958 arg1)));
13959 }
13960
13961 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13962 already handled above. */
13963 if (TREE_CODE (arg0) == BIT_AND_EXPR
13964 && integer_onep (TREE_OPERAND (arg0, 1))
13965 && integer_zerop (op2)
13966 && integer_pow2p (arg1))
13967 {
13968 tree tem = TREE_OPERAND (arg0, 0);
13969 STRIP_NOPS (tem);
13970 if (TREE_CODE (tem) == RSHIFT_EXPR
13971 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13972 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13973 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13974 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13975 TREE_OPERAND (tem, 0), arg1);
13976 }
13977
13978 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13979 is probably obsolete because the first operand should be a
13980 truth value (that's why we have the two cases above), but let's
13981 leave it in until we can confirm this for all front-ends. */
13982 if (integer_zerop (op2)
13983 && TREE_CODE (arg0) == NE_EXPR
13984 && integer_zerop (TREE_OPERAND (arg0, 1))
13985 && integer_pow2p (arg1)
13986 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13987 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13988 arg1, OEP_ONLY_CONST))
13989 return pedantic_non_lvalue_loc (loc,
13990 fold_convert_loc (loc, type,
13991 TREE_OPERAND (arg0, 0)));
13992
13993 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13994 if (integer_zerop (op2)
13995 && truth_value_p (TREE_CODE (arg0))
13996 && truth_value_p (TREE_CODE (arg1)))
13997 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13998 fold_convert_loc (loc, type, arg0),
13999 arg1);
14000
14001 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14002 if (integer_onep (op2)
14003 && truth_value_p (TREE_CODE (arg0))
14004 && truth_value_p (TREE_CODE (arg1)))
14005 {
14006 location_t loc0 = expr_location_or (arg0, loc);
14007 /* Only perform transformation if ARG0 is easily inverted. */
14008 tem = fold_truth_not_expr (loc0, arg0);
14009 if (tem)
14010 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14011 fold_convert_loc (loc, type, tem),
14012 arg1);
14013 }
14014
14015 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14016 if (integer_zerop (arg1)
14017 && truth_value_p (TREE_CODE (arg0))
14018 && truth_value_p (TREE_CODE (op2)))
14019 {
14020 location_t loc0 = expr_location_or (arg0, loc);
14021 /* Only perform transformation if ARG0 is easily inverted. */
14022 tem = fold_truth_not_expr (loc0, arg0);
14023 if (tem)
14024 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14025 fold_convert_loc (loc, type, tem),
14026 op2);
14027 }
14028
14029 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14030 if (integer_onep (arg1)
14031 && truth_value_p (TREE_CODE (arg0))
14032 && truth_value_p (TREE_CODE (op2)))
14033 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14034 fold_convert_loc (loc, type, arg0),
14035 op2);
14036
14037 return NULL_TREE;
14038
14039 case CALL_EXPR:
14040 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14041 of fold_ternary on them. */
14042 gcc_unreachable ();
14043
14044 case BIT_FIELD_REF:
14045 if ((TREE_CODE (arg0) == VECTOR_CST
14046 || (TREE_CODE (arg0) == CONSTRUCTOR
14047 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14048 && (type == TREE_TYPE (TREE_TYPE (arg0))
14049 || (TREE_CODE (type) == VECTOR_TYPE
14050 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14051 {
14052 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14053 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14054 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14055 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14056
14057 if (n != 0
14058 && (idx % width) == 0
14059 && (n % width) == 0
14060 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14061 {
14062 idx = idx / width;
14063 n = n / width;
14064 if (TREE_CODE (type) == VECTOR_TYPE)
14065 {
14066 if (TREE_CODE (arg0) == VECTOR_CST)
14067 {
14068 tree *vals = XALLOCAVEC (tree, n);
14069 unsigned i;
14070 for (i = 0; i < n; ++i)
14071 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14072 return build_vector (type, vals);
14073 }
14074 else
14075 {
14076 VEC(constructor_elt, gc) *vals;
14077 unsigned i;
14078 if (CONSTRUCTOR_NELTS (arg0) == 0)
14079 return build_constructor (type, NULL);
14080 vals = VEC_alloc (constructor_elt, gc, n);
14081 for (i = 0; i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14082 ++i)
14083 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14084 CONSTRUCTOR_ELT
14085 (arg0, idx + i)->value);
14086 return build_constructor (type, vals);
14087 }
14088 }
14089 else if (n == 1)
14090 {
14091 if (TREE_CODE (arg0) == VECTOR_CST)
14092 return VECTOR_CST_ELT (arg0, idx);
14093 else if (idx < CONSTRUCTOR_NELTS (arg0))
14094 return CONSTRUCTOR_ELT (arg0, idx)->value;
14095 return build_zero_cst (type);
14096 }
14097 }
14098 }
14099
14100 /* A bit-field-ref that referenced the full argument can be stripped. */
14101 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14102 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14103 && integer_zerop (op2))
14104 return fold_convert_loc (loc, type, arg0);
14105
14106 /* On constants we can use native encode/interpret to constant
14107 fold (nearly) all BIT_FIELD_REFs. */
14108 if (CONSTANT_CLASS_P (arg0)
14109 && can_native_interpret_type_p (type)
14110 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14111 /* This limitation should not be necessary, we just need to
14112 round this up to mode size. */
14113 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14114 /* Need bit-shifting of the buffer to relax the following. */
14115 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14116 {
14117 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14118 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14119 unsigned HOST_WIDE_INT clen;
14120 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14121 /* ??? We cannot tell native_encode_expr to start at
14122 some random byte only. So limit us to a reasonable amount
14123 of work. */
14124 if (clen <= 4096)
14125 {
14126 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14127 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14128 if (len > 0
14129 && len * BITS_PER_UNIT >= bitpos + bitsize)
14130 {
14131 tree v = native_interpret_expr (type,
14132 b + bitpos / BITS_PER_UNIT,
14133 bitsize / BITS_PER_UNIT);
14134 if (v)
14135 return v;
14136 }
14137 }
14138 }
14139
14140 return NULL_TREE;
14141
14142 case FMA_EXPR:
14143 /* For integers we can decompose the FMA if possible. */
14144 if (TREE_CODE (arg0) == INTEGER_CST
14145 && TREE_CODE (arg1) == INTEGER_CST)
14146 return fold_build2_loc (loc, PLUS_EXPR, type,
14147 const_binop (MULT_EXPR, arg0, arg1), arg2);
14148 if (integer_zerop (arg2))
14149 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14150
14151 return fold_fma (loc, type, arg0, arg1, arg2);
14152
14153 case VEC_PERM_EXPR:
14154 if (TREE_CODE (arg2) == VECTOR_CST)
14155 {
14156 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14157 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14158 tree t;
14159 bool need_mask_canon = false;
14160
14161 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14162 for (i = 0; i < nelts; i++)
14163 {
14164 tree val = VECTOR_CST_ELT (arg2, i);
14165 if (TREE_CODE (val) != INTEGER_CST)
14166 return NULL_TREE;
14167
14168 sel[i] = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
14169 if (TREE_INT_CST_HIGH (val)
14170 || ((unsigned HOST_WIDE_INT)
14171 TREE_INT_CST_LOW (val) != sel[i]))
14172 need_mask_canon = true;
14173 }
14174
14175 if ((TREE_CODE (arg0) == VECTOR_CST
14176 || TREE_CODE (arg0) == CONSTRUCTOR)
14177 && (TREE_CODE (arg1) == VECTOR_CST
14178 || TREE_CODE (arg1) == CONSTRUCTOR))
14179 {
14180 t = fold_vec_perm (type, arg0, arg1, sel);
14181 if (t != NULL_TREE)
14182 return t;
14183 }
14184
14185 if (need_mask_canon && arg2 == op2)
14186 {
14187 tree *tsel = XALLOCAVEC (tree, nelts);
14188 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14189 for (i = 0; i < nelts; i++)
14190 tsel[i] = build_int_cst (eltype, sel[nelts - i - 1]);
14191 t = build_vector (TREE_TYPE (arg2), tsel);
14192 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
14193 }
14194 }
14195 return NULL_TREE;
14196
14197 default:
14198 return NULL_TREE;
14199 } /* switch (code) */
14200 }
14201
14202 /* Perform constant folding and related simplification of EXPR.
14203 The related simplifications include x*1 => x, x*0 => 0, etc.,
14204 and application of the associative law.
14205 NOP_EXPR conversions may be removed freely (as long as we
14206 are careful not to change the type of the overall expression).
14207 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14208 but we can constant-fold them if they have constant operands. */
14209
14210 #ifdef ENABLE_FOLD_CHECKING
14211 # define fold(x) fold_1 (x)
14212 static tree fold_1 (tree);
14213 static
14214 #endif
14215 tree
14216 fold (tree expr)
14217 {
14218 const tree t = expr;
14219 enum tree_code code = TREE_CODE (t);
14220 enum tree_code_class kind = TREE_CODE_CLASS (code);
14221 tree tem;
14222 location_t loc = EXPR_LOCATION (expr);
14223
14224 /* Return right away if a constant. */
14225 if (kind == tcc_constant)
14226 return t;
14227
14228 /* CALL_EXPR-like objects with variable numbers of operands are
14229 treated specially. */
14230 if (kind == tcc_vl_exp)
14231 {
14232 if (code == CALL_EXPR)
14233 {
14234 tem = fold_call_expr (loc, expr, false);
14235 return tem ? tem : expr;
14236 }
14237 return expr;
14238 }
14239
14240 if (IS_EXPR_CODE_CLASS (kind))
14241 {
14242 tree type = TREE_TYPE (t);
14243 tree op0, op1, op2;
14244
14245 switch (TREE_CODE_LENGTH (code))
14246 {
14247 case 1:
14248 op0 = TREE_OPERAND (t, 0);
14249 tem = fold_unary_loc (loc, code, type, op0);
14250 return tem ? tem : expr;
14251 case 2:
14252 op0 = TREE_OPERAND (t, 0);
14253 op1 = TREE_OPERAND (t, 1);
14254 tem = fold_binary_loc (loc, code, type, op0, op1);
14255 return tem ? tem : expr;
14256 case 3:
14257 op0 = TREE_OPERAND (t, 0);
14258 op1 = TREE_OPERAND (t, 1);
14259 op2 = TREE_OPERAND (t, 2);
14260 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14261 return tem ? tem : expr;
14262 default:
14263 break;
14264 }
14265 }
14266
14267 switch (code)
14268 {
14269 case ARRAY_REF:
14270 {
14271 tree op0 = TREE_OPERAND (t, 0);
14272 tree op1 = TREE_OPERAND (t, 1);
14273
14274 if (TREE_CODE (op1) == INTEGER_CST
14275 && TREE_CODE (op0) == CONSTRUCTOR
14276 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14277 {
14278 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14279 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14280 unsigned HOST_WIDE_INT begin = 0;
14281
14282 /* Find a matching index by means of a binary search. */
14283 while (begin != end)
14284 {
14285 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14286 tree index = VEC_index (constructor_elt, elts, middle)->index;
14287
14288 if (TREE_CODE (index) == INTEGER_CST
14289 && tree_int_cst_lt (index, op1))
14290 begin = middle + 1;
14291 else if (TREE_CODE (index) == INTEGER_CST
14292 && tree_int_cst_lt (op1, index))
14293 end = middle;
14294 else if (TREE_CODE (index) == RANGE_EXPR
14295 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14296 begin = middle + 1;
14297 else if (TREE_CODE (index) == RANGE_EXPR
14298 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14299 end = middle;
14300 else
14301 return VEC_index (constructor_elt, elts, middle)->value;
14302 }
14303 }
14304
14305 return t;
14306 }
14307
14308 case CONST_DECL:
14309 return fold (DECL_INITIAL (t));
14310
14311 default:
14312 return t;
14313 } /* switch (code) */
14314 }
14315
14316 #ifdef ENABLE_FOLD_CHECKING
14317 #undef fold
14318
14319 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14320 static void fold_check_failed (const_tree, const_tree);
14321 void print_fold_checksum (const_tree);
14322
14323 /* When --enable-checking=fold, compute a digest of expr before
14324 and after actual fold call to see if fold did not accidentally
14325 change original expr. */
14326
14327 tree
14328 fold (tree expr)
14329 {
14330 tree ret;
14331 struct md5_ctx ctx;
14332 unsigned char checksum_before[16], checksum_after[16];
14333 htab_t ht;
14334
14335 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14336 md5_init_ctx (&ctx);
14337 fold_checksum_tree (expr, &ctx, ht);
14338 md5_finish_ctx (&ctx, checksum_before);
14339 htab_empty (ht);
14340
14341 ret = fold_1 (expr);
14342
14343 md5_init_ctx (&ctx);
14344 fold_checksum_tree (expr, &ctx, ht);
14345 md5_finish_ctx (&ctx, checksum_after);
14346 htab_delete (ht);
14347
14348 if (memcmp (checksum_before, checksum_after, 16))
14349 fold_check_failed (expr, ret);
14350
14351 return ret;
14352 }
14353
14354 void
14355 print_fold_checksum (const_tree expr)
14356 {
14357 struct md5_ctx ctx;
14358 unsigned char checksum[16], cnt;
14359 htab_t ht;
14360
14361 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14362 md5_init_ctx (&ctx);
14363 fold_checksum_tree (expr, &ctx, ht);
14364 md5_finish_ctx (&ctx, checksum);
14365 htab_delete (ht);
14366 for (cnt = 0; cnt < 16; ++cnt)
14367 fprintf (stderr, "%02x", checksum[cnt]);
14368 putc ('\n', stderr);
14369 }
14370
14371 static void
14372 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14373 {
14374 internal_error ("fold check: original tree changed by fold");
14375 }
14376
14377 static void
14378 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14379 {
14380 void **slot;
14381 enum tree_code code;
14382 union tree_node buf;
14383 int i, len;
14384
14385 recursive_label:
14386 if (expr == NULL)
14387 return;
14388 slot = (void **) htab_find_slot (ht, expr, INSERT);
14389 if (*slot != NULL)
14390 return;
14391 *slot = CONST_CAST_TREE (expr);
14392 code = TREE_CODE (expr);
14393 if (TREE_CODE_CLASS (code) == tcc_declaration
14394 && DECL_ASSEMBLER_NAME_SET_P (expr))
14395 {
14396 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14397 memcpy ((char *) &buf, expr, tree_size (expr));
14398 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14399 expr = (tree) &buf;
14400 }
14401 else if (TREE_CODE_CLASS (code) == tcc_type
14402 && (TYPE_POINTER_TO (expr)
14403 || TYPE_REFERENCE_TO (expr)
14404 || TYPE_CACHED_VALUES_P (expr)
14405 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14406 || TYPE_NEXT_VARIANT (expr)))
14407 {
14408 /* Allow these fields to be modified. */
14409 tree tmp;
14410 memcpy ((char *) &buf, expr, tree_size (expr));
14411 expr = tmp = (tree) &buf;
14412 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14413 TYPE_POINTER_TO (tmp) = NULL;
14414 TYPE_REFERENCE_TO (tmp) = NULL;
14415 TYPE_NEXT_VARIANT (tmp) = NULL;
14416 if (TYPE_CACHED_VALUES_P (tmp))
14417 {
14418 TYPE_CACHED_VALUES_P (tmp) = 0;
14419 TYPE_CACHED_VALUES (tmp) = NULL;
14420 }
14421 }
14422 md5_process_bytes (expr, tree_size (expr), ctx);
14423 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14424 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14425 if (TREE_CODE_CLASS (code) != tcc_type
14426 && TREE_CODE_CLASS (code) != tcc_declaration
14427 && code != TREE_LIST
14428 && code != SSA_NAME
14429 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14430 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14431 switch (TREE_CODE_CLASS (code))
14432 {
14433 case tcc_constant:
14434 switch (code)
14435 {
14436 case STRING_CST:
14437 md5_process_bytes (TREE_STRING_POINTER (expr),
14438 TREE_STRING_LENGTH (expr), ctx);
14439 break;
14440 case COMPLEX_CST:
14441 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14442 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14443 break;
14444 case VECTOR_CST:
14445 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14446 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14447 break;
14448 default:
14449 break;
14450 }
14451 break;
14452 case tcc_exceptional:
14453 switch (code)
14454 {
14455 case TREE_LIST:
14456 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14457 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14458 expr = TREE_CHAIN (expr);
14459 goto recursive_label;
14460 break;
14461 case TREE_VEC:
14462 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14463 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14464 break;
14465 default:
14466 break;
14467 }
14468 break;
14469 case tcc_expression:
14470 case tcc_reference:
14471 case tcc_comparison:
14472 case tcc_unary:
14473 case tcc_binary:
14474 case tcc_statement:
14475 case tcc_vl_exp:
14476 len = TREE_OPERAND_LENGTH (expr);
14477 for (i = 0; i < len; ++i)
14478 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14479 break;
14480 case tcc_declaration:
14481 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14482 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14483 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14484 {
14485 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14486 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14487 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14488 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14489 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14490 }
14491 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14492 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14493
14494 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14495 {
14496 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14497 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14498 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14499 }
14500 break;
14501 case tcc_type:
14502 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14503 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14504 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14505 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14506 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14507 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14508 if (INTEGRAL_TYPE_P (expr)
14509 || SCALAR_FLOAT_TYPE_P (expr))
14510 {
14511 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14512 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14513 }
14514 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14515 if (TREE_CODE (expr) == RECORD_TYPE
14516 || TREE_CODE (expr) == UNION_TYPE
14517 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14518 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14519 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14520 break;
14521 default:
14522 break;
14523 }
14524 }
14525
14526 /* Helper function for outputting the checksum of a tree T. When
14527 debugging with gdb, you can "define mynext" to be "next" followed
14528 by "call debug_fold_checksum (op0)", then just trace down till the
14529 outputs differ. */
14530
14531 DEBUG_FUNCTION void
14532 debug_fold_checksum (const_tree t)
14533 {
14534 int i;
14535 unsigned char checksum[16];
14536 struct md5_ctx ctx;
14537 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14538
14539 md5_init_ctx (&ctx);
14540 fold_checksum_tree (t, &ctx, ht);
14541 md5_finish_ctx (&ctx, checksum);
14542 htab_empty (ht);
14543
14544 for (i = 0; i < 16; i++)
14545 fprintf (stderr, "%d ", checksum[i]);
14546
14547 fprintf (stderr, "\n");
14548 }
14549
14550 #endif
14551
14552 /* Fold a unary tree expression with code CODE of type TYPE with an
14553 operand OP0. LOC is the location of the resulting expression.
14554 Return a folded expression if successful. Otherwise, return a tree
14555 expression with code CODE of type TYPE with an operand OP0. */
14556
14557 tree
14558 fold_build1_stat_loc (location_t loc,
14559 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14560 {
14561 tree tem;
14562 #ifdef ENABLE_FOLD_CHECKING
14563 unsigned char checksum_before[16], checksum_after[16];
14564 struct md5_ctx ctx;
14565 htab_t ht;
14566
14567 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14568 md5_init_ctx (&ctx);
14569 fold_checksum_tree (op0, &ctx, ht);
14570 md5_finish_ctx (&ctx, checksum_before);
14571 htab_empty (ht);
14572 #endif
14573
14574 tem = fold_unary_loc (loc, code, type, op0);
14575 if (!tem)
14576 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14577
14578 #ifdef ENABLE_FOLD_CHECKING
14579 md5_init_ctx (&ctx);
14580 fold_checksum_tree (op0, &ctx, ht);
14581 md5_finish_ctx (&ctx, checksum_after);
14582 htab_delete (ht);
14583
14584 if (memcmp (checksum_before, checksum_after, 16))
14585 fold_check_failed (op0, tem);
14586 #endif
14587 return tem;
14588 }
14589
14590 /* Fold a binary tree expression with code CODE of type TYPE with
14591 operands OP0 and OP1. LOC is the location of the resulting
14592 expression. Return a folded expression if successful. Otherwise,
14593 return a tree expression with code CODE of type TYPE with operands
14594 OP0 and OP1. */
14595
14596 tree
14597 fold_build2_stat_loc (location_t loc,
14598 enum tree_code code, tree type, tree op0, tree op1
14599 MEM_STAT_DECL)
14600 {
14601 tree tem;
14602 #ifdef ENABLE_FOLD_CHECKING
14603 unsigned char checksum_before_op0[16],
14604 checksum_before_op1[16],
14605 checksum_after_op0[16],
14606 checksum_after_op1[16];
14607 struct md5_ctx ctx;
14608 htab_t ht;
14609
14610 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14611 md5_init_ctx (&ctx);
14612 fold_checksum_tree (op0, &ctx, ht);
14613 md5_finish_ctx (&ctx, checksum_before_op0);
14614 htab_empty (ht);
14615
14616 md5_init_ctx (&ctx);
14617 fold_checksum_tree (op1, &ctx, ht);
14618 md5_finish_ctx (&ctx, checksum_before_op1);
14619 htab_empty (ht);
14620 #endif
14621
14622 tem = fold_binary_loc (loc, code, type, op0, op1);
14623 if (!tem)
14624 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14625
14626 #ifdef ENABLE_FOLD_CHECKING
14627 md5_init_ctx (&ctx);
14628 fold_checksum_tree (op0, &ctx, ht);
14629 md5_finish_ctx (&ctx, checksum_after_op0);
14630 htab_empty (ht);
14631
14632 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14633 fold_check_failed (op0, tem);
14634
14635 md5_init_ctx (&ctx);
14636 fold_checksum_tree (op1, &ctx, ht);
14637 md5_finish_ctx (&ctx, checksum_after_op1);
14638 htab_delete (ht);
14639
14640 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14641 fold_check_failed (op1, tem);
14642 #endif
14643 return tem;
14644 }
14645
14646 /* Fold a ternary tree expression with code CODE of type TYPE with
14647 operands OP0, OP1, and OP2. Return a folded expression if
14648 successful. Otherwise, return a tree expression with code CODE of
14649 type TYPE with operands OP0, OP1, and OP2. */
14650
14651 tree
14652 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14653 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14654 {
14655 tree tem;
14656 #ifdef ENABLE_FOLD_CHECKING
14657 unsigned char checksum_before_op0[16],
14658 checksum_before_op1[16],
14659 checksum_before_op2[16],
14660 checksum_after_op0[16],
14661 checksum_after_op1[16],
14662 checksum_after_op2[16];
14663 struct md5_ctx ctx;
14664 htab_t ht;
14665
14666 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14667 md5_init_ctx (&ctx);
14668 fold_checksum_tree (op0, &ctx, ht);
14669 md5_finish_ctx (&ctx, checksum_before_op0);
14670 htab_empty (ht);
14671
14672 md5_init_ctx (&ctx);
14673 fold_checksum_tree (op1, &ctx, ht);
14674 md5_finish_ctx (&ctx, checksum_before_op1);
14675 htab_empty (ht);
14676
14677 md5_init_ctx (&ctx);
14678 fold_checksum_tree (op2, &ctx, ht);
14679 md5_finish_ctx (&ctx, checksum_before_op2);
14680 htab_empty (ht);
14681 #endif
14682
14683 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14684 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14685 if (!tem)
14686 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14687
14688 #ifdef ENABLE_FOLD_CHECKING
14689 md5_init_ctx (&ctx);
14690 fold_checksum_tree (op0, &ctx, ht);
14691 md5_finish_ctx (&ctx, checksum_after_op0);
14692 htab_empty (ht);
14693
14694 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14695 fold_check_failed (op0, tem);
14696
14697 md5_init_ctx (&ctx);
14698 fold_checksum_tree (op1, &ctx, ht);
14699 md5_finish_ctx (&ctx, checksum_after_op1);
14700 htab_empty (ht);
14701
14702 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14703 fold_check_failed (op1, tem);
14704
14705 md5_init_ctx (&ctx);
14706 fold_checksum_tree (op2, &ctx, ht);
14707 md5_finish_ctx (&ctx, checksum_after_op2);
14708 htab_delete (ht);
14709
14710 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14711 fold_check_failed (op2, tem);
14712 #endif
14713 return tem;
14714 }
14715
14716 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14717 arguments in ARGARRAY, and a null static chain.
14718 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14719 of type TYPE from the given operands as constructed by build_call_array. */
14720
14721 tree
14722 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14723 int nargs, tree *argarray)
14724 {
14725 tree tem;
14726 #ifdef ENABLE_FOLD_CHECKING
14727 unsigned char checksum_before_fn[16],
14728 checksum_before_arglist[16],
14729 checksum_after_fn[16],
14730 checksum_after_arglist[16];
14731 struct md5_ctx ctx;
14732 htab_t ht;
14733 int i;
14734
14735 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14736 md5_init_ctx (&ctx);
14737 fold_checksum_tree (fn, &ctx, ht);
14738 md5_finish_ctx (&ctx, checksum_before_fn);
14739 htab_empty (ht);
14740
14741 md5_init_ctx (&ctx);
14742 for (i = 0; i < nargs; i++)
14743 fold_checksum_tree (argarray[i], &ctx, ht);
14744 md5_finish_ctx (&ctx, checksum_before_arglist);
14745 htab_empty (ht);
14746 #endif
14747
14748 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14749
14750 #ifdef ENABLE_FOLD_CHECKING
14751 md5_init_ctx (&ctx);
14752 fold_checksum_tree (fn, &ctx, ht);
14753 md5_finish_ctx (&ctx, checksum_after_fn);
14754 htab_empty (ht);
14755
14756 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14757 fold_check_failed (fn, tem);
14758
14759 md5_init_ctx (&ctx);
14760 for (i = 0; i < nargs; i++)
14761 fold_checksum_tree (argarray[i], &ctx, ht);
14762 md5_finish_ctx (&ctx, checksum_after_arglist);
14763 htab_delete (ht);
14764
14765 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14766 fold_check_failed (NULL_TREE, tem);
14767 #endif
14768 return tem;
14769 }
14770
14771 /* Perform constant folding and related simplification of initializer
14772 expression EXPR. These behave identically to "fold_buildN" but ignore
14773 potential run-time traps and exceptions that fold must preserve. */
14774
14775 #define START_FOLD_INIT \
14776 int saved_signaling_nans = flag_signaling_nans;\
14777 int saved_trapping_math = flag_trapping_math;\
14778 int saved_rounding_math = flag_rounding_math;\
14779 int saved_trapv = flag_trapv;\
14780 int saved_folding_initializer = folding_initializer;\
14781 flag_signaling_nans = 0;\
14782 flag_trapping_math = 0;\
14783 flag_rounding_math = 0;\
14784 flag_trapv = 0;\
14785 folding_initializer = 1;
14786
14787 #define END_FOLD_INIT \
14788 flag_signaling_nans = saved_signaling_nans;\
14789 flag_trapping_math = saved_trapping_math;\
14790 flag_rounding_math = saved_rounding_math;\
14791 flag_trapv = saved_trapv;\
14792 folding_initializer = saved_folding_initializer;
14793
14794 tree
14795 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14796 tree type, tree op)
14797 {
14798 tree result;
14799 START_FOLD_INIT;
14800
14801 result = fold_build1_loc (loc, code, type, op);
14802
14803 END_FOLD_INIT;
14804 return result;
14805 }
14806
14807 tree
14808 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14809 tree type, tree op0, tree op1)
14810 {
14811 tree result;
14812 START_FOLD_INIT;
14813
14814 result = fold_build2_loc (loc, code, type, op0, op1);
14815
14816 END_FOLD_INIT;
14817 return result;
14818 }
14819
14820 tree
14821 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14822 tree type, tree op0, tree op1, tree op2)
14823 {
14824 tree result;
14825 START_FOLD_INIT;
14826
14827 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14828
14829 END_FOLD_INIT;
14830 return result;
14831 }
14832
14833 tree
14834 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14835 int nargs, tree *argarray)
14836 {
14837 tree result;
14838 START_FOLD_INIT;
14839
14840 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14841
14842 END_FOLD_INIT;
14843 return result;
14844 }
14845
14846 #undef START_FOLD_INIT
14847 #undef END_FOLD_INIT
14848
14849 /* Determine if first argument is a multiple of second argument. Return 0 if
14850 it is not, or we cannot easily determined it to be.
14851
14852 An example of the sort of thing we care about (at this point; this routine
14853 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14854 fold cases do now) is discovering that
14855
14856 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14857
14858 is a multiple of
14859
14860 SAVE_EXPR (J * 8)
14861
14862 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14863
14864 This code also handles discovering that
14865
14866 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14867
14868 is a multiple of 8 so we don't have to worry about dealing with a
14869 possible remainder.
14870
14871 Note that we *look* inside a SAVE_EXPR only to determine how it was
14872 calculated; it is not safe for fold to do much of anything else with the
14873 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14874 at run time. For example, the latter example above *cannot* be implemented
14875 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14876 evaluation time of the original SAVE_EXPR is not necessarily the same at
14877 the time the new expression is evaluated. The only optimization of this
14878 sort that would be valid is changing
14879
14880 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14881
14882 divided by 8 to
14883
14884 SAVE_EXPR (I) * SAVE_EXPR (J)
14885
14886 (where the same SAVE_EXPR (J) is used in the original and the
14887 transformed version). */
14888
14889 int
14890 multiple_of_p (tree type, const_tree top, const_tree bottom)
14891 {
14892 if (operand_equal_p (top, bottom, 0))
14893 return 1;
14894
14895 if (TREE_CODE (type) != INTEGER_TYPE)
14896 return 0;
14897
14898 switch (TREE_CODE (top))
14899 {
14900 case BIT_AND_EXPR:
14901 /* Bitwise and provides a power of two multiple. If the mask is
14902 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14903 if (!integer_pow2p (bottom))
14904 return 0;
14905 /* FALLTHRU */
14906
14907 case MULT_EXPR:
14908 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14909 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14910
14911 case PLUS_EXPR:
14912 case MINUS_EXPR:
14913 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14914 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14915
14916 case LSHIFT_EXPR:
14917 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14918 {
14919 tree op1, t1;
14920
14921 op1 = TREE_OPERAND (top, 1);
14922 /* const_binop may not detect overflow correctly,
14923 so check for it explicitly here. */
14924 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14925 > TREE_INT_CST_LOW (op1)
14926 && TREE_INT_CST_HIGH (op1) == 0
14927 && 0 != (t1 = fold_convert (type,
14928 const_binop (LSHIFT_EXPR,
14929 size_one_node,
14930 op1)))
14931 && !TREE_OVERFLOW (t1))
14932 return multiple_of_p (type, t1, bottom);
14933 }
14934 return 0;
14935
14936 case NOP_EXPR:
14937 /* Can't handle conversions from non-integral or wider integral type. */
14938 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14939 || (TYPE_PRECISION (type)
14940 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14941 return 0;
14942
14943 /* .. fall through ... */
14944
14945 case SAVE_EXPR:
14946 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14947
14948 case COND_EXPR:
14949 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14950 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14951
14952 case INTEGER_CST:
14953 if (TREE_CODE (bottom) != INTEGER_CST
14954 || integer_zerop (bottom)
14955 || (TYPE_UNSIGNED (type)
14956 && (tree_int_cst_sgn (top) < 0
14957 || tree_int_cst_sgn (bottom) < 0)))
14958 return 0;
14959 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14960 top, bottom));
14961
14962 default:
14963 return 0;
14964 }
14965 }
14966
14967 /* Return true if CODE or TYPE is known to be non-negative. */
14968
14969 static bool
14970 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14971 {
14972 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14973 && truth_value_p (code))
14974 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14975 have a signed:1 type (where the value is -1 and 0). */
14976 return true;
14977 return false;
14978 }
14979
14980 /* Return true if (CODE OP0) is known to be non-negative. If the return
14981 value is based on the assumption that signed overflow is undefined,
14982 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14983 *STRICT_OVERFLOW_P. */
14984
14985 bool
14986 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14987 bool *strict_overflow_p)
14988 {
14989 if (TYPE_UNSIGNED (type))
14990 return true;
14991
14992 switch (code)
14993 {
14994 case ABS_EXPR:
14995 /* We can't return 1 if flag_wrapv is set because
14996 ABS_EXPR<INT_MIN> = INT_MIN. */
14997 if (!INTEGRAL_TYPE_P (type))
14998 return true;
14999 if (TYPE_OVERFLOW_UNDEFINED (type))
15000 {
15001 *strict_overflow_p = true;
15002 return true;
15003 }
15004 break;
15005
15006 case NON_LVALUE_EXPR:
15007 case FLOAT_EXPR:
15008 case FIX_TRUNC_EXPR:
15009 return tree_expr_nonnegative_warnv_p (op0,
15010 strict_overflow_p);
15011
15012 case NOP_EXPR:
15013 {
15014 tree inner_type = TREE_TYPE (op0);
15015 tree outer_type = type;
15016
15017 if (TREE_CODE (outer_type) == REAL_TYPE)
15018 {
15019 if (TREE_CODE (inner_type) == REAL_TYPE)
15020 return tree_expr_nonnegative_warnv_p (op0,
15021 strict_overflow_p);
15022 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15023 {
15024 if (TYPE_UNSIGNED (inner_type))
15025 return true;
15026 return tree_expr_nonnegative_warnv_p (op0,
15027 strict_overflow_p);
15028 }
15029 }
15030 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15031 {
15032 if (TREE_CODE (inner_type) == REAL_TYPE)
15033 return tree_expr_nonnegative_warnv_p (op0,
15034 strict_overflow_p);
15035 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15036 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15037 && TYPE_UNSIGNED (inner_type);
15038 }
15039 }
15040 break;
15041
15042 default:
15043 return tree_simple_nonnegative_warnv_p (code, type);
15044 }
15045
15046 /* We don't know sign of `t', so be conservative and return false. */
15047 return false;
15048 }
15049
15050 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15051 value is based on the assumption that signed overflow is undefined,
15052 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15053 *STRICT_OVERFLOW_P. */
15054
15055 bool
15056 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15057 tree op1, bool *strict_overflow_p)
15058 {
15059 if (TYPE_UNSIGNED (type))
15060 return true;
15061
15062 switch (code)
15063 {
15064 case POINTER_PLUS_EXPR:
15065 case PLUS_EXPR:
15066 if (FLOAT_TYPE_P (type))
15067 return (tree_expr_nonnegative_warnv_p (op0,
15068 strict_overflow_p)
15069 && tree_expr_nonnegative_warnv_p (op1,
15070 strict_overflow_p));
15071
15072 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15073 both unsigned and at least 2 bits shorter than the result. */
15074 if (TREE_CODE (type) == INTEGER_TYPE
15075 && TREE_CODE (op0) == NOP_EXPR
15076 && TREE_CODE (op1) == NOP_EXPR)
15077 {
15078 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15079 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15080 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15081 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15082 {
15083 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15084 TYPE_PRECISION (inner2)) + 1;
15085 return prec < TYPE_PRECISION (type);
15086 }
15087 }
15088 break;
15089
15090 case MULT_EXPR:
15091 if (FLOAT_TYPE_P (type))
15092 {
15093 /* x * x for floating point x is always non-negative. */
15094 if (operand_equal_p (op0, op1, 0))
15095 return true;
15096 return (tree_expr_nonnegative_warnv_p (op0,
15097 strict_overflow_p)
15098 && tree_expr_nonnegative_warnv_p (op1,
15099 strict_overflow_p));
15100 }
15101
15102 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15103 both unsigned and their total bits is shorter than the result. */
15104 if (TREE_CODE (type) == INTEGER_TYPE
15105 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15106 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15107 {
15108 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15109 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15110 : TREE_TYPE (op0);
15111 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15112 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15113 : TREE_TYPE (op1);
15114
15115 bool unsigned0 = TYPE_UNSIGNED (inner0);
15116 bool unsigned1 = TYPE_UNSIGNED (inner1);
15117
15118 if (TREE_CODE (op0) == INTEGER_CST)
15119 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15120
15121 if (TREE_CODE (op1) == INTEGER_CST)
15122 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15123
15124 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15125 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15126 {
15127 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15128 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15129 : TYPE_PRECISION (inner0);
15130
15131 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15132 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15133 : TYPE_PRECISION (inner1);
15134
15135 return precision0 + precision1 < TYPE_PRECISION (type);
15136 }
15137 }
15138 return false;
15139
15140 case BIT_AND_EXPR:
15141 case MAX_EXPR:
15142 return (tree_expr_nonnegative_warnv_p (op0,
15143 strict_overflow_p)
15144 || tree_expr_nonnegative_warnv_p (op1,
15145 strict_overflow_p));
15146
15147 case BIT_IOR_EXPR:
15148 case BIT_XOR_EXPR:
15149 case MIN_EXPR:
15150 case RDIV_EXPR:
15151 case TRUNC_DIV_EXPR:
15152 case CEIL_DIV_EXPR:
15153 case FLOOR_DIV_EXPR:
15154 case ROUND_DIV_EXPR:
15155 return (tree_expr_nonnegative_warnv_p (op0,
15156 strict_overflow_p)
15157 && tree_expr_nonnegative_warnv_p (op1,
15158 strict_overflow_p));
15159
15160 case TRUNC_MOD_EXPR:
15161 case CEIL_MOD_EXPR:
15162 case FLOOR_MOD_EXPR:
15163 case ROUND_MOD_EXPR:
15164 return tree_expr_nonnegative_warnv_p (op0,
15165 strict_overflow_p);
15166 default:
15167 return tree_simple_nonnegative_warnv_p (code, type);
15168 }
15169
15170 /* We don't know sign of `t', so be conservative and return false. */
15171 return false;
15172 }
15173
15174 /* Return true if T is known to be non-negative. If the return
15175 value is based on the assumption that signed overflow is undefined,
15176 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15177 *STRICT_OVERFLOW_P. */
15178
15179 bool
15180 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15181 {
15182 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15183 return true;
15184
15185 switch (TREE_CODE (t))
15186 {
15187 case INTEGER_CST:
15188 return tree_int_cst_sgn (t) >= 0;
15189
15190 case REAL_CST:
15191 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15192
15193 case FIXED_CST:
15194 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15195
15196 case COND_EXPR:
15197 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15198 strict_overflow_p)
15199 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15200 strict_overflow_p));
15201 default:
15202 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15203 TREE_TYPE (t));
15204 }
15205 /* We don't know sign of `t', so be conservative and return false. */
15206 return false;
15207 }
15208
15209 /* Return true if T is known to be non-negative. If the return
15210 value is based on the assumption that signed overflow is undefined,
15211 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15212 *STRICT_OVERFLOW_P. */
15213
15214 bool
15215 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15216 tree arg0, tree arg1, bool *strict_overflow_p)
15217 {
15218 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15219 switch (DECL_FUNCTION_CODE (fndecl))
15220 {
15221 CASE_FLT_FN (BUILT_IN_ACOS):
15222 CASE_FLT_FN (BUILT_IN_ACOSH):
15223 CASE_FLT_FN (BUILT_IN_CABS):
15224 CASE_FLT_FN (BUILT_IN_COSH):
15225 CASE_FLT_FN (BUILT_IN_ERFC):
15226 CASE_FLT_FN (BUILT_IN_EXP):
15227 CASE_FLT_FN (BUILT_IN_EXP10):
15228 CASE_FLT_FN (BUILT_IN_EXP2):
15229 CASE_FLT_FN (BUILT_IN_FABS):
15230 CASE_FLT_FN (BUILT_IN_FDIM):
15231 CASE_FLT_FN (BUILT_IN_HYPOT):
15232 CASE_FLT_FN (BUILT_IN_POW10):
15233 CASE_INT_FN (BUILT_IN_FFS):
15234 CASE_INT_FN (BUILT_IN_PARITY):
15235 CASE_INT_FN (BUILT_IN_POPCOUNT):
15236 case BUILT_IN_BSWAP32:
15237 case BUILT_IN_BSWAP64:
15238 /* Always true. */
15239 return true;
15240
15241 CASE_FLT_FN (BUILT_IN_SQRT):
15242 /* sqrt(-0.0) is -0.0. */
15243 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15244 return true;
15245 return tree_expr_nonnegative_warnv_p (arg0,
15246 strict_overflow_p);
15247
15248 CASE_FLT_FN (BUILT_IN_ASINH):
15249 CASE_FLT_FN (BUILT_IN_ATAN):
15250 CASE_FLT_FN (BUILT_IN_ATANH):
15251 CASE_FLT_FN (BUILT_IN_CBRT):
15252 CASE_FLT_FN (BUILT_IN_CEIL):
15253 CASE_FLT_FN (BUILT_IN_ERF):
15254 CASE_FLT_FN (BUILT_IN_EXPM1):
15255 CASE_FLT_FN (BUILT_IN_FLOOR):
15256 CASE_FLT_FN (BUILT_IN_FMOD):
15257 CASE_FLT_FN (BUILT_IN_FREXP):
15258 CASE_FLT_FN (BUILT_IN_ICEIL):
15259 CASE_FLT_FN (BUILT_IN_IFLOOR):
15260 CASE_FLT_FN (BUILT_IN_IRINT):
15261 CASE_FLT_FN (BUILT_IN_IROUND):
15262 CASE_FLT_FN (BUILT_IN_LCEIL):
15263 CASE_FLT_FN (BUILT_IN_LDEXP):
15264 CASE_FLT_FN (BUILT_IN_LFLOOR):
15265 CASE_FLT_FN (BUILT_IN_LLCEIL):
15266 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15267 CASE_FLT_FN (BUILT_IN_LLRINT):
15268 CASE_FLT_FN (BUILT_IN_LLROUND):
15269 CASE_FLT_FN (BUILT_IN_LRINT):
15270 CASE_FLT_FN (BUILT_IN_LROUND):
15271 CASE_FLT_FN (BUILT_IN_MODF):
15272 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15273 CASE_FLT_FN (BUILT_IN_RINT):
15274 CASE_FLT_FN (BUILT_IN_ROUND):
15275 CASE_FLT_FN (BUILT_IN_SCALB):
15276 CASE_FLT_FN (BUILT_IN_SCALBLN):
15277 CASE_FLT_FN (BUILT_IN_SCALBN):
15278 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15279 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15280 CASE_FLT_FN (BUILT_IN_SINH):
15281 CASE_FLT_FN (BUILT_IN_TANH):
15282 CASE_FLT_FN (BUILT_IN_TRUNC):
15283 /* True if the 1st argument is nonnegative. */
15284 return tree_expr_nonnegative_warnv_p (arg0,
15285 strict_overflow_p);
15286
15287 CASE_FLT_FN (BUILT_IN_FMAX):
15288 /* True if the 1st OR 2nd arguments are nonnegative. */
15289 return (tree_expr_nonnegative_warnv_p (arg0,
15290 strict_overflow_p)
15291 || (tree_expr_nonnegative_warnv_p (arg1,
15292 strict_overflow_p)));
15293
15294 CASE_FLT_FN (BUILT_IN_FMIN):
15295 /* True if the 1st AND 2nd arguments are nonnegative. */
15296 return (tree_expr_nonnegative_warnv_p (arg0,
15297 strict_overflow_p)
15298 && (tree_expr_nonnegative_warnv_p (arg1,
15299 strict_overflow_p)));
15300
15301 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15302 /* True if the 2nd argument is nonnegative. */
15303 return tree_expr_nonnegative_warnv_p (arg1,
15304 strict_overflow_p);
15305
15306 CASE_FLT_FN (BUILT_IN_POWI):
15307 /* True if the 1st argument is nonnegative or the second
15308 argument is an even integer. */
15309 if (TREE_CODE (arg1) == INTEGER_CST
15310 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15311 return true;
15312 return tree_expr_nonnegative_warnv_p (arg0,
15313 strict_overflow_p);
15314
15315 CASE_FLT_FN (BUILT_IN_POW):
15316 /* True if the 1st argument is nonnegative or the second
15317 argument is an even integer valued real. */
15318 if (TREE_CODE (arg1) == REAL_CST)
15319 {
15320 REAL_VALUE_TYPE c;
15321 HOST_WIDE_INT n;
15322
15323 c = TREE_REAL_CST (arg1);
15324 n = real_to_integer (&c);
15325 if ((n & 1) == 0)
15326 {
15327 REAL_VALUE_TYPE cint;
15328 real_from_integer (&cint, VOIDmode, n,
15329 n < 0 ? -1 : 0, 0);
15330 if (real_identical (&c, &cint))
15331 return true;
15332 }
15333 }
15334 return tree_expr_nonnegative_warnv_p (arg0,
15335 strict_overflow_p);
15336
15337 default:
15338 break;
15339 }
15340 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15341 type);
15342 }
15343
15344 /* Return true if T is known to be non-negative. If the return
15345 value is based on the assumption that signed overflow is undefined,
15346 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15347 *STRICT_OVERFLOW_P. */
15348
15349 bool
15350 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15351 {
15352 enum tree_code code = TREE_CODE (t);
15353 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15354 return true;
15355
15356 switch (code)
15357 {
15358 case TARGET_EXPR:
15359 {
15360 tree temp = TARGET_EXPR_SLOT (t);
15361 t = TARGET_EXPR_INITIAL (t);
15362
15363 /* If the initializer is non-void, then it's a normal expression
15364 that will be assigned to the slot. */
15365 if (!VOID_TYPE_P (t))
15366 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15367
15368 /* Otherwise, the initializer sets the slot in some way. One common
15369 way is an assignment statement at the end of the initializer. */
15370 while (1)
15371 {
15372 if (TREE_CODE (t) == BIND_EXPR)
15373 t = expr_last (BIND_EXPR_BODY (t));
15374 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15375 || TREE_CODE (t) == TRY_CATCH_EXPR)
15376 t = expr_last (TREE_OPERAND (t, 0));
15377 else if (TREE_CODE (t) == STATEMENT_LIST)
15378 t = expr_last (t);
15379 else
15380 break;
15381 }
15382 if (TREE_CODE (t) == MODIFY_EXPR
15383 && TREE_OPERAND (t, 0) == temp)
15384 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15385 strict_overflow_p);
15386
15387 return false;
15388 }
15389
15390 case CALL_EXPR:
15391 {
15392 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15393 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15394
15395 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15396 get_callee_fndecl (t),
15397 arg0,
15398 arg1,
15399 strict_overflow_p);
15400 }
15401 case COMPOUND_EXPR:
15402 case MODIFY_EXPR:
15403 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15404 strict_overflow_p);
15405 case BIND_EXPR:
15406 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15407 strict_overflow_p);
15408 case SAVE_EXPR:
15409 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15410 strict_overflow_p);
15411
15412 default:
15413 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15414 TREE_TYPE (t));
15415 }
15416
15417 /* We don't know sign of `t', so be conservative and return false. */
15418 return false;
15419 }
15420
15421 /* Return true if T is known to be non-negative. If the return
15422 value is based on the assumption that signed overflow is undefined,
15423 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15424 *STRICT_OVERFLOW_P. */
15425
15426 bool
15427 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15428 {
15429 enum tree_code code;
15430 if (t == error_mark_node)
15431 return false;
15432
15433 code = TREE_CODE (t);
15434 switch (TREE_CODE_CLASS (code))
15435 {
15436 case tcc_binary:
15437 case tcc_comparison:
15438 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15439 TREE_TYPE (t),
15440 TREE_OPERAND (t, 0),
15441 TREE_OPERAND (t, 1),
15442 strict_overflow_p);
15443
15444 case tcc_unary:
15445 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15446 TREE_TYPE (t),
15447 TREE_OPERAND (t, 0),
15448 strict_overflow_p);
15449
15450 case tcc_constant:
15451 case tcc_declaration:
15452 case tcc_reference:
15453 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15454
15455 default:
15456 break;
15457 }
15458
15459 switch (code)
15460 {
15461 case TRUTH_AND_EXPR:
15462 case TRUTH_OR_EXPR:
15463 case TRUTH_XOR_EXPR:
15464 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15465 TREE_TYPE (t),
15466 TREE_OPERAND (t, 0),
15467 TREE_OPERAND (t, 1),
15468 strict_overflow_p);
15469 case TRUTH_NOT_EXPR:
15470 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15471 TREE_TYPE (t),
15472 TREE_OPERAND (t, 0),
15473 strict_overflow_p);
15474
15475 case COND_EXPR:
15476 case CONSTRUCTOR:
15477 case OBJ_TYPE_REF:
15478 case ASSERT_EXPR:
15479 case ADDR_EXPR:
15480 case WITH_SIZE_EXPR:
15481 case SSA_NAME:
15482 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15483
15484 default:
15485 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15486 }
15487 }
15488
15489 /* Return true if `t' is known to be non-negative. Handle warnings
15490 about undefined signed overflow. */
15491
15492 bool
15493 tree_expr_nonnegative_p (tree t)
15494 {
15495 bool ret, strict_overflow_p;
15496
15497 strict_overflow_p = false;
15498 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15499 if (strict_overflow_p)
15500 fold_overflow_warning (("assuming signed overflow does not occur when "
15501 "determining that expression is always "
15502 "non-negative"),
15503 WARN_STRICT_OVERFLOW_MISC);
15504 return ret;
15505 }
15506
15507
15508 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15509 For floating point we further ensure that T is not denormal.
15510 Similar logic is present in nonzero_address in rtlanal.h.
15511
15512 If the return value is based on the assumption that signed overflow
15513 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15514 change *STRICT_OVERFLOW_P. */
15515
15516 bool
15517 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15518 bool *strict_overflow_p)
15519 {
15520 switch (code)
15521 {
15522 case ABS_EXPR:
15523 return tree_expr_nonzero_warnv_p (op0,
15524 strict_overflow_p);
15525
15526 case NOP_EXPR:
15527 {
15528 tree inner_type = TREE_TYPE (op0);
15529 tree outer_type = type;
15530
15531 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15532 && tree_expr_nonzero_warnv_p (op0,
15533 strict_overflow_p));
15534 }
15535 break;
15536
15537 case NON_LVALUE_EXPR:
15538 return tree_expr_nonzero_warnv_p (op0,
15539 strict_overflow_p);
15540
15541 default:
15542 break;
15543 }
15544
15545 return false;
15546 }
15547
15548 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15549 For floating point we further ensure that T is not denormal.
15550 Similar logic is present in nonzero_address in rtlanal.h.
15551
15552 If the return value is based on the assumption that signed overflow
15553 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15554 change *STRICT_OVERFLOW_P. */
15555
15556 bool
15557 tree_binary_nonzero_warnv_p (enum tree_code code,
15558 tree type,
15559 tree op0,
15560 tree op1, bool *strict_overflow_p)
15561 {
15562 bool sub_strict_overflow_p;
15563 switch (code)
15564 {
15565 case POINTER_PLUS_EXPR:
15566 case PLUS_EXPR:
15567 if (TYPE_OVERFLOW_UNDEFINED (type))
15568 {
15569 /* With the presence of negative values it is hard
15570 to say something. */
15571 sub_strict_overflow_p = false;
15572 if (!tree_expr_nonnegative_warnv_p (op0,
15573 &sub_strict_overflow_p)
15574 || !tree_expr_nonnegative_warnv_p (op1,
15575 &sub_strict_overflow_p))
15576 return false;
15577 /* One of operands must be positive and the other non-negative. */
15578 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15579 overflows, on a twos-complement machine the sum of two
15580 nonnegative numbers can never be zero. */
15581 return (tree_expr_nonzero_warnv_p (op0,
15582 strict_overflow_p)
15583 || tree_expr_nonzero_warnv_p (op1,
15584 strict_overflow_p));
15585 }
15586 break;
15587
15588 case MULT_EXPR:
15589 if (TYPE_OVERFLOW_UNDEFINED (type))
15590 {
15591 if (tree_expr_nonzero_warnv_p (op0,
15592 strict_overflow_p)
15593 && tree_expr_nonzero_warnv_p (op1,
15594 strict_overflow_p))
15595 {
15596 *strict_overflow_p = true;
15597 return true;
15598 }
15599 }
15600 break;
15601
15602 case MIN_EXPR:
15603 sub_strict_overflow_p = false;
15604 if (tree_expr_nonzero_warnv_p (op0,
15605 &sub_strict_overflow_p)
15606 && tree_expr_nonzero_warnv_p (op1,
15607 &sub_strict_overflow_p))
15608 {
15609 if (sub_strict_overflow_p)
15610 *strict_overflow_p = true;
15611 }
15612 break;
15613
15614 case MAX_EXPR:
15615 sub_strict_overflow_p = false;
15616 if (tree_expr_nonzero_warnv_p (op0,
15617 &sub_strict_overflow_p))
15618 {
15619 if (sub_strict_overflow_p)
15620 *strict_overflow_p = true;
15621
15622 /* When both operands are nonzero, then MAX must be too. */
15623 if (tree_expr_nonzero_warnv_p (op1,
15624 strict_overflow_p))
15625 return true;
15626
15627 /* MAX where operand 0 is positive is positive. */
15628 return tree_expr_nonnegative_warnv_p (op0,
15629 strict_overflow_p);
15630 }
15631 /* MAX where operand 1 is positive is positive. */
15632 else if (tree_expr_nonzero_warnv_p (op1,
15633 &sub_strict_overflow_p)
15634 && tree_expr_nonnegative_warnv_p (op1,
15635 &sub_strict_overflow_p))
15636 {
15637 if (sub_strict_overflow_p)
15638 *strict_overflow_p = true;
15639 return true;
15640 }
15641 break;
15642
15643 case BIT_IOR_EXPR:
15644 return (tree_expr_nonzero_warnv_p (op1,
15645 strict_overflow_p)
15646 || tree_expr_nonzero_warnv_p (op0,
15647 strict_overflow_p));
15648
15649 default:
15650 break;
15651 }
15652
15653 return false;
15654 }
15655
15656 /* Return true when T is an address and is known to be nonzero.
15657 For floating point we further ensure that T is not denormal.
15658 Similar logic is present in nonzero_address in rtlanal.h.
15659
15660 If the return value is based on the assumption that signed overflow
15661 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15662 change *STRICT_OVERFLOW_P. */
15663
15664 bool
15665 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15666 {
15667 bool sub_strict_overflow_p;
15668 switch (TREE_CODE (t))
15669 {
15670 case INTEGER_CST:
15671 return !integer_zerop (t);
15672
15673 case ADDR_EXPR:
15674 {
15675 tree base = TREE_OPERAND (t, 0);
15676 if (!DECL_P (base))
15677 base = get_base_address (base);
15678
15679 if (!base)
15680 return false;
15681
15682 /* Weak declarations may link to NULL. Other things may also be NULL
15683 so protect with -fdelete-null-pointer-checks; but not variables
15684 allocated on the stack. */
15685 if (DECL_P (base)
15686 && (flag_delete_null_pointer_checks
15687 || (DECL_CONTEXT (base)
15688 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15689 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15690 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15691
15692 /* Constants are never weak. */
15693 if (CONSTANT_CLASS_P (base))
15694 return true;
15695
15696 return false;
15697 }
15698
15699 case COND_EXPR:
15700 sub_strict_overflow_p = false;
15701 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15702 &sub_strict_overflow_p)
15703 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15704 &sub_strict_overflow_p))
15705 {
15706 if (sub_strict_overflow_p)
15707 *strict_overflow_p = true;
15708 return true;
15709 }
15710 break;
15711
15712 default:
15713 break;
15714 }
15715 return false;
15716 }
15717
15718 /* Return true when T is an address and is known to be nonzero.
15719 For floating point we further ensure that T is not denormal.
15720 Similar logic is present in nonzero_address in rtlanal.h.
15721
15722 If the return value is based on the assumption that signed overflow
15723 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15724 change *STRICT_OVERFLOW_P. */
15725
15726 bool
15727 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15728 {
15729 tree type = TREE_TYPE (t);
15730 enum tree_code code;
15731
15732 /* Doing something useful for floating point would need more work. */
15733 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15734 return false;
15735
15736 code = TREE_CODE (t);
15737 switch (TREE_CODE_CLASS (code))
15738 {
15739 case tcc_unary:
15740 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15741 strict_overflow_p);
15742 case tcc_binary:
15743 case tcc_comparison:
15744 return tree_binary_nonzero_warnv_p (code, type,
15745 TREE_OPERAND (t, 0),
15746 TREE_OPERAND (t, 1),
15747 strict_overflow_p);
15748 case tcc_constant:
15749 case tcc_declaration:
15750 case tcc_reference:
15751 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15752
15753 default:
15754 break;
15755 }
15756
15757 switch (code)
15758 {
15759 case TRUTH_NOT_EXPR:
15760 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15761 strict_overflow_p);
15762
15763 case TRUTH_AND_EXPR:
15764 case TRUTH_OR_EXPR:
15765 case TRUTH_XOR_EXPR:
15766 return tree_binary_nonzero_warnv_p (code, type,
15767 TREE_OPERAND (t, 0),
15768 TREE_OPERAND (t, 1),
15769 strict_overflow_p);
15770
15771 case COND_EXPR:
15772 case CONSTRUCTOR:
15773 case OBJ_TYPE_REF:
15774 case ASSERT_EXPR:
15775 case ADDR_EXPR:
15776 case WITH_SIZE_EXPR:
15777 case SSA_NAME:
15778 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15779
15780 case COMPOUND_EXPR:
15781 case MODIFY_EXPR:
15782 case BIND_EXPR:
15783 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15784 strict_overflow_p);
15785
15786 case SAVE_EXPR:
15787 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15788 strict_overflow_p);
15789
15790 case CALL_EXPR:
15791 return alloca_call_p (t);
15792
15793 default:
15794 break;
15795 }
15796 return false;
15797 }
15798
15799 /* Return true when T is an address and is known to be nonzero.
15800 Handle warnings about undefined signed overflow. */
15801
15802 bool
15803 tree_expr_nonzero_p (tree t)
15804 {
15805 bool ret, strict_overflow_p;
15806
15807 strict_overflow_p = false;
15808 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15809 if (strict_overflow_p)
15810 fold_overflow_warning (("assuming signed overflow does not occur when "
15811 "determining that expression is always "
15812 "non-zero"),
15813 WARN_STRICT_OVERFLOW_MISC);
15814 return ret;
15815 }
15816
15817 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15818 attempt to fold the expression to a constant without modifying TYPE,
15819 OP0 or OP1.
15820
15821 If the expression could be simplified to a constant, then return
15822 the constant. If the expression would not be simplified to a
15823 constant, then return NULL_TREE. */
15824
15825 tree
15826 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15827 {
15828 tree tem = fold_binary (code, type, op0, op1);
15829 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15830 }
15831
15832 /* Given the components of a unary expression CODE, TYPE and OP0,
15833 attempt to fold the expression to a constant without modifying
15834 TYPE or OP0.
15835
15836 If the expression could be simplified to a constant, then return
15837 the constant. If the expression would not be simplified to a
15838 constant, then return NULL_TREE. */
15839
15840 tree
15841 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15842 {
15843 tree tem = fold_unary (code, type, op0);
15844 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15845 }
15846
15847 /* If EXP represents referencing an element in a constant string
15848 (either via pointer arithmetic or array indexing), return the
15849 tree representing the value accessed, otherwise return NULL. */
15850
15851 tree
15852 fold_read_from_constant_string (tree exp)
15853 {
15854 if ((TREE_CODE (exp) == INDIRECT_REF
15855 || TREE_CODE (exp) == ARRAY_REF)
15856 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15857 {
15858 tree exp1 = TREE_OPERAND (exp, 0);
15859 tree index;
15860 tree string;
15861 location_t loc = EXPR_LOCATION (exp);
15862
15863 if (TREE_CODE (exp) == INDIRECT_REF)
15864 string = string_constant (exp1, &index);
15865 else
15866 {
15867 tree low_bound = array_ref_low_bound (exp);
15868 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15869
15870 /* Optimize the special-case of a zero lower bound.
15871
15872 We convert the low_bound to sizetype to avoid some problems
15873 with constant folding. (E.g. suppose the lower bound is 1,
15874 and its mode is QI. Without the conversion,l (ARRAY
15875 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15876 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15877 if (! integer_zerop (low_bound))
15878 index = size_diffop_loc (loc, index,
15879 fold_convert_loc (loc, sizetype, low_bound));
15880
15881 string = exp1;
15882 }
15883
15884 if (string
15885 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15886 && TREE_CODE (string) == STRING_CST
15887 && TREE_CODE (index) == INTEGER_CST
15888 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15889 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15890 == MODE_INT)
15891 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15892 return build_int_cst_type (TREE_TYPE (exp),
15893 (TREE_STRING_POINTER (string)
15894 [TREE_INT_CST_LOW (index)]));
15895 }
15896 return NULL;
15897 }
15898
15899 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15900 an integer constant, real, or fixed-point constant.
15901
15902 TYPE is the type of the result. */
15903
15904 static tree
15905 fold_negate_const (tree arg0, tree type)
15906 {
15907 tree t = NULL_TREE;
15908
15909 switch (TREE_CODE (arg0))
15910 {
15911 case INTEGER_CST:
15912 {
15913 double_int val = tree_to_double_int (arg0);
15914 int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15915
15916 t = force_fit_type_double (type, val, 1,
15917 (overflow | TREE_OVERFLOW (arg0))
15918 && !TYPE_UNSIGNED (type));
15919 break;
15920 }
15921
15922 case REAL_CST:
15923 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15924 break;
15925
15926 case FIXED_CST:
15927 {
15928 FIXED_VALUE_TYPE f;
15929 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15930 &(TREE_FIXED_CST (arg0)), NULL,
15931 TYPE_SATURATING (type));
15932 t = build_fixed (type, f);
15933 /* Propagate overflow flags. */
15934 if (overflow_p | TREE_OVERFLOW (arg0))
15935 TREE_OVERFLOW (t) = 1;
15936 break;
15937 }
15938
15939 default:
15940 gcc_unreachable ();
15941 }
15942
15943 return t;
15944 }
15945
15946 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15947 an integer constant or real constant.
15948
15949 TYPE is the type of the result. */
15950
15951 tree
15952 fold_abs_const (tree arg0, tree type)
15953 {
15954 tree t = NULL_TREE;
15955
15956 switch (TREE_CODE (arg0))
15957 {
15958 case INTEGER_CST:
15959 {
15960 double_int val = tree_to_double_int (arg0);
15961
15962 /* If the value is unsigned or non-negative, then the absolute value
15963 is the same as the ordinary value. */
15964 if (TYPE_UNSIGNED (type)
15965 || !double_int_negative_p (val))
15966 t = arg0;
15967
15968 /* If the value is negative, then the absolute value is
15969 its negation. */
15970 else
15971 {
15972 int overflow;
15973
15974 overflow = neg_double (val.low, val.high, &val.low, &val.high);
15975 t = force_fit_type_double (type, val, -1,
15976 overflow | TREE_OVERFLOW (arg0));
15977 }
15978 }
15979 break;
15980
15981 case REAL_CST:
15982 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15983 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15984 else
15985 t = arg0;
15986 break;
15987
15988 default:
15989 gcc_unreachable ();
15990 }
15991
15992 return t;
15993 }
15994
15995 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15996 constant. TYPE is the type of the result. */
15997
15998 static tree
15999 fold_not_const (const_tree arg0, tree type)
16000 {
16001 double_int val;
16002
16003 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16004
16005 val = double_int_not (tree_to_double_int (arg0));
16006 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16007 }
16008
16009 /* Given CODE, a relational operator, the target type, TYPE and two
16010 constant operands OP0 and OP1, return the result of the
16011 relational operation. If the result is not a compile time
16012 constant, then return NULL_TREE. */
16013
16014 static tree
16015 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16016 {
16017 int result, invert;
16018
16019 /* From here on, the only cases we handle are when the result is
16020 known to be a constant. */
16021
16022 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16023 {
16024 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16025 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16026
16027 /* Handle the cases where either operand is a NaN. */
16028 if (real_isnan (c0) || real_isnan (c1))
16029 {
16030 switch (code)
16031 {
16032 case EQ_EXPR:
16033 case ORDERED_EXPR:
16034 result = 0;
16035 break;
16036
16037 case NE_EXPR:
16038 case UNORDERED_EXPR:
16039 case UNLT_EXPR:
16040 case UNLE_EXPR:
16041 case UNGT_EXPR:
16042 case UNGE_EXPR:
16043 case UNEQ_EXPR:
16044 result = 1;
16045 break;
16046
16047 case LT_EXPR:
16048 case LE_EXPR:
16049 case GT_EXPR:
16050 case GE_EXPR:
16051 case LTGT_EXPR:
16052 if (flag_trapping_math)
16053 return NULL_TREE;
16054 result = 0;
16055 break;
16056
16057 default:
16058 gcc_unreachable ();
16059 }
16060
16061 return constant_boolean_node (result, type);
16062 }
16063
16064 return constant_boolean_node (real_compare (code, c0, c1), type);
16065 }
16066
16067 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16068 {
16069 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16070 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16071 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16072 }
16073
16074 /* Handle equality/inequality of complex constants. */
16075 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16076 {
16077 tree rcond = fold_relational_const (code, type,
16078 TREE_REALPART (op0),
16079 TREE_REALPART (op1));
16080 tree icond = fold_relational_const (code, type,
16081 TREE_IMAGPART (op0),
16082 TREE_IMAGPART (op1));
16083 if (code == EQ_EXPR)
16084 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16085 else if (code == NE_EXPR)
16086 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16087 else
16088 return NULL_TREE;
16089 }
16090
16091 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16092
16093 To compute GT, swap the arguments and do LT.
16094 To compute GE, do LT and invert the result.
16095 To compute LE, swap the arguments, do LT and invert the result.
16096 To compute NE, do EQ and invert the result.
16097
16098 Therefore, the code below must handle only EQ and LT. */
16099
16100 if (code == LE_EXPR || code == GT_EXPR)
16101 {
16102 tree tem = op0;
16103 op0 = op1;
16104 op1 = tem;
16105 code = swap_tree_comparison (code);
16106 }
16107
16108 /* Note that it is safe to invert for real values here because we
16109 have already handled the one case that it matters. */
16110
16111 invert = 0;
16112 if (code == NE_EXPR || code == GE_EXPR)
16113 {
16114 invert = 1;
16115 code = invert_tree_comparison (code, false);
16116 }
16117
16118 /* Compute a result for LT or EQ if args permit;
16119 Otherwise return T. */
16120 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16121 {
16122 if (code == EQ_EXPR)
16123 result = tree_int_cst_equal (op0, op1);
16124 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16125 result = INT_CST_LT_UNSIGNED (op0, op1);
16126 else
16127 result = INT_CST_LT (op0, op1);
16128 }
16129 else
16130 return NULL_TREE;
16131
16132 if (invert)
16133 result ^= 1;
16134 return constant_boolean_node (result, type);
16135 }
16136
16137 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16138 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16139 itself. */
16140
16141 tree
16142 fold_build_cleanup_point_expr (tree type, tree expr)
16143 {
16144 /* If the expression does not have side effects then we don't have to wrap
16145 it with a cleanup point expression. */
16146 if (!TREE_SIDE_EFFECTS (expr))
16147 return expr;
16148
16149 /* If the expression is a return, check to see if the expression inside the
16150 return has no side effects or the right hand side of the modify expression
16151 inside the return. If either don't have side effects set we don't need to
16152 wrap the expression in a cleanup point expression. Note we don't check the
16153 left hand side of the modify because it should always be a return decl. */
16154 if (TREE_CODE (expr) == RETURN_EXPR)
16155 {
16156 tree op = TREE_OPERAND (expr, 0);
16157 if (!op || !TREE_SIDE_EFFECTS (op))
16158 return expr;
16159 op = TREE_OPERAND (op, 1);
16160 if (!TREE_SIDE_EFFECTS (op))
16161 return expr;
16162 }
16163
16164 return build1 (CLEANUP_POINT_EXPR, type, expr);
16165 }
16166
16167 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16168 of an indirection through OP0, or NULL_TREE if no simplification is
16169 possible. */
16170
16171 tree
16172 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16173 {
16174 tree sub = op0;
16175 tree subtype;
16176
16177 STRIP_NOPS (sub);
16178 subtype = TREE_TYPE (sub);
16179 if (!POINTER_TYPE_P (subtype))
16180 return NULL_TREE;
16181
16182 if (TREE_CODE (sub) == ADDR_EXPR)
16183 {
16184 tree op = TREE_OPERAND (sub, 0);
16185 tree optype = TREE_TYPE (op);
16186 /* *&CONST_DECL -> to the value of the const decl. */
16187 if (TREE_CODE (op) == CONST_DECL)
16188 return DECL_INITIAL (op);
16189 /* *&p => p; make sure to handle *&"str"[cst] here. */
16190 if (type == optype)
16191 {
16192 tree fop = fold_read_from_constant_string (op);
16193 if (fop)
16194 return fop;
16195 else
16196 return op;
16197 }
16198 /* *(foo *)&fooarray => fooarray[0] */
16199 else if (TREE_CODE (optype) == ARRAY_TYPE
16200 && type == TREE_TYPE (optype)
16201 && (!in_gimple_form
16202 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16203 {
16204 tree type_domain = TYPE_DOMAIN (optype);
16205 tree min_val = size_zero_node;
16206 if (type_domain && TYPE_MIN_VALUE (type_domain))
16207 min_val = TYPE_MIN_VALUE (type_domain);
16208 if (in_gimple_form
16209 && TREE_CODE (min_val) != INTEGER_CST)
16210 return NULL_TREE;
16211 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16212 NULL_TREE, NULL_TREE);
16213 }
16214 /* *(foo *)&complexfoo => __real__ complexfoo */
16215 else if (TREE_CODE (optype) == COMPLEX_TYPE
16216 && type == TREE_TYPE (optype))
16217 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16218 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16219 else if (TREE_CODE (optype) == VECTOR_TYPE
16220 && type == TREE_TYPE (optype))
16221 {
16222 tree part_width = TYPE_SIZE (type);
16223 tree index = bitsize_int (0);
16224 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16225 }
16226 }
16227
16228 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16229 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16230 {
16231 tree op00 = TREE_OPERAND (sub, 0);
16232 tree op01 = TREE_OPERAND (sub, 1);
16233
16234 STRIP_NOPS (op00);
16235 if (TREE_CODE (op00) == ADDR_EXPR)
16236 {
16237 tree op00type;
16238 op00 = TREE_OPERAND (op00, 0);
16239 op00type = TREE_TYPE (op00);
16240
16241 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16242 if (TREE_CODE (op00type) == VECTOR_TYPE
16243 && type == TREE_TYPE (op00type))
16244 {
16245 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16246 tree part_width = TYPE_SIZE (type);
16247 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16248 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16249 tree index = bitsize_int (indexi);
16250
16251 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16252 return fold_build3_loc (loc,
16253 BIT_FIELD_REF, type, op00,
16254 part_width, index);
16255
16256 }
16257 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16258 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16259 && type == TREE_TYPE (op00type))
16260 {
16261 tree size = TYPE_SIZE_UNIT (type);
16262 if (tree_int_cst_equal (size, op01))
16263 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16264 }
16265 /* ((foo *)&fooarray)[1] => fooarray[1] */
16266 else if (TREE_CODE (op00type) == ARRAY_TYPE
16267 && type == TREE_TYPE (op00type))
16268 {
16269 tree type_domain = TYPE_DOMAIN (op00type);
16270 tree min_val = size_zero_node;
16271 if (type_domain && TYPE_MIN_VALUE (type_domain))
16272 min_val = TYPE_MIN_VALUE (type_domain);
16273 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16274 TYPE_SIZE_UNIT (type));
16275 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16276 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16277 NULL_TREE, NULL_TREE);
16278 }
16279 }
16280 }
16281
16282 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16283 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16284 && type == TREE_TYPE (TREE_TYPE (subtype))
16285 && (!in_gimple_form
16286 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16287 {
16288 tree type_domain;
16289 tree min_val = size_zero_node;
16290 sub = build_fold_indirect_ref_loc (loc, sub);
16291 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16292 if (type_domain && TYPE_MIN_VALUE (type_domain))
16293 min_val = TYPE_MIN_VALUE (type_domain);
16294 if (in_gimple_form
16295 && TREE_CODE (min_val) != INTEGER_CST)
16296 return NULL_TREE;
16297 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16298 NULL_TREE);
16299 }
16300
16301 return NULL_TREE;
16302 }
16303
16304 /* Builds an expression for an indirection through T, simplifying some
16305 cases. */
16306
16307 tree
16308 build_fold_indirect_ref_loc (location_t loc, tree t)
16309 {
16310 tree type = TREE_TYPE (TREE_TYPE (t));
16311 tree sub = fold_indirect_ref_1 (loc, type, t);
16312
16313 if (sub)
16314 return sub;
16315
16316 return build1_loc (loc, INDIRECT_REF, type, t);
16317 }
16318
16319 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16320
16321 tree
16322 fold_indirect_ref_loc (location_t loc, tree t)
16323 {
16324 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16325
16326 if (sub)
16327 return sub;
16328 else
16329 return t;
16330 }
16331
16332 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16333 whose result is ignored. The type of the returned tree need not be
16334 the same as the original expression. */
16335
16336 tree
16337 fold_ignored_result (tree t)
16338 {
16339 if (!TREE_SIDE_EFFECTS (t))
16340 return integer_zero_node;
16341
16342 for (;;)
16343 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16344 {
16345 case tcc_unary:
16346 t = TREE_OPERAND (t, 0);
16347 break;
16348
16349 case tcc_binary:
16350 case tcc_comparison:
16351 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16352 t = TREE_OPERAND (t, 0);
16353 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16354 t = TREE_OPERAND (t, 1);
16355 else
16356 return t;
16357 break;
16358
16359 case tcc_expression:
16360 switch (TREE_CODE (t))
16361 {
16362 case COMPOUND_EXPR:
16363 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16364 return t;
16365 t = TREE_OPERAND (t, 0);
16366 break;
16367
16368 case COND_EXPR:
16369 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16370 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16371 return t;
16372 t = TREE_OPERAND (t, 0);
16373 break;
16374
16375 default:
16376 return t;
16377 }
16378 break;
16379
16380 default:
16381 return t;
16382 }
16383 }
16384
16385 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16386 This can only be applied to objects of a sizetype. */
16387
16388 tree
16389 round_up_loc (location_t loc, tree value, int divisor)
16390 {
16391 tree div = NULL_TREE;
16392
16393 gcc_assert (divisor > 0);
16394 if (divisor == 1)
16395 return value;
16396
16397 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16398 have to do anything. Only do this when we are not given a const,
16399 because in that case, this check is more expensive than just
16400 doing it. */
16401 if (TREE_CODE (value) != INTEGER_CST)
16402 {
16403 div = build_int_cst (TREE_TYPE (value), divisor);
16404
16405 if (multiple_of_p (TREE_TYPE (value), value, div))
16406 return value;
16407 }
16408
16409 /* If divisor is a power of two, simplify this to bit manipulation. */
16410 if (divisor == (divisor & -divisor))
16411 {
16412 if (TREE_CODE (value) == INTEGER_CST)
16413 {
16414 double_int val = tree_to_double_int (value);
16415 bool overflow_p;
16416
16417 if ((val.low & (divisor - 1)) == 0)
16418 return value;
16419
16420 overflow_p = TREE_OVERFLOW (value);
16421 val.low &= ~(divisor - 1);
16422 val.low += divisor;
16423 if (val.low == 0)
16424 {
16425 val.high++;
16426 if (val.high == 0)
16427 overflow_p = true;
16428 }
16429
16430 return force_fit_type_double (TREE_TYPE (value), val,
16431 -1, overflow_p);
16432 }
16433 else
16434 {
16435 tree t;
16436
16437 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16438 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16439 t = build_int_cst (TREE_TYPE (value), -divisor);
16440 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16441 }
16442 }
16443 else
16444 {
16445 if (!div)
16446 div = build_int_cst (TREE_TYPE (value), divisor);
16447 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16448 value = size_binop_loc (loc, MULT_EXPR, value, div);
16449 }
16450
16451 return value;
16452 }
16453
16454 /* Likewise, but round down. */
16455
16456 tree
16457 round_down_loc (location_t loc, tree value, int divisor)
16458 {
16459 tree div = NULL_TREE;
16460
16461 gcc_assert (divisor > 0);
16462 if (divisor == 1)
16463 return value;
16464
16465 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16466 have to do anything. Only do this when we are not given a const,
16467 because in that case, this check is more expensive than just
16468 doing it. */
16469 if (TREE_CODE (value) != INTEGER_CST)
16470 {
16471 div = build_int_cst (TREE_TYPE (value), divisor);
16472
16473 if (multiple_of_p (TREE_TYPE (value), value, div))
16474 return value;
16475 }
16476
16477 /* If divisor is a power of two, simplify this to bit manipulation. */
16478 if (divisor == (divisor & -divisor))
16479 {
16480 tree t;
16481
16482 t = build_int_cst (TREE_TYPE (value), -divisor);
16483 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16484 }
16485 else
16486 {
16487 if (!div)
16488 div = build_int_cst (TREE_TYPE (value), divisor);
16489 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16490 value = size_binop_loc (loc, MULT_EXPR, value, div);
16491 }
16492
16493 return value;
16494 }
16495
16496 /* Returns the pointer to the base of the object addressed by EXP and
16497 extracts the information about the offset of the access, storing it
16498 to PBITPOS and POFFSET. */
16499
16500 static tree
16501 split_address_to_core_and_offset (tree exp,
16502 HOST_WIDE_INT *pbitpos, tree *poffset)
16503 {
16504 tree core;
16505 enum machine_mode mode;
16506 int unsignedp, volatilep;
16507 HOST_WIDE_INT bitsize;
16508 location_t loc = EXPR_LOCATION (exp);
16509
16510 if (TREE_CODE (exp) == ADDR_EXPR)
16511 {
16512 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16513 poffset, &mode, &unsignedp, &volatilep,
16514 false);
16515 core = build_fold_addr_expr_loc (loc, core);
16516 }
16517 else
16518 {
16519 core = exp;
16520 *pbitpos = 0;
16521 *poffset = NULL_TREE;
16522 }
16523
16524 return core;
16525 }
16526
16527 /* Returns true if addresses of E1 and E2 differ by a constant, false
16528 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16529
16530 bool
16531 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16532 {
16533 tree core1, core2;
16534 HOST_WIDE_INT bitpos1, bitpos2;
16535 tree toffset1, toffset2, tdiff, type;
16536
16537 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16538 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16539
16540 if (bitpos1 % BITS_PER_UNIT != 0
16541 || bitpos2 % BITS_PER_UNIT != 0
16542 || !operand_equal_p (core1, core2, 0))
16543 return false;
16544
16545 if (toffset1 && toffset2)
16546 {
16547 type = TREE_TYPE (toffset1);
16548 if (type != TREE_TYPE (toffset2))
16549 toffset2 = fold_convert (type, toffset2);
16550
16551 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16552 if (!cst_and_fits_in_hwi (tdiff))
16553 return false;
16554
16555 *diff = int_cst_value (tdiff);
16556 }
16557 else if (toffset1 || toffset2)
16558 {
16559 /* If only one of the offsets is non-constant, the difference cannot
16560 be a constant. */
16561 return false;
16562 }
16563 else
16564 *diff = 0;
16565
16566 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16567 return true;
16568 }
16569
16570 /* Simplify the floating point expression EXP when the sign of the
16571 result is not significant. Return NULL_TREE if no simplification
16572 is possible. */
16573
16574 tree
16575 fold_strip_sign_ops (tree exp)
16576 {
16577 tree arg0, arg1;
16578 location_t loc = EXPR_LOCATION (exp);
16579
16580 switch (TREE_CODE (exp))
16581 {
16582 case ABS_EXPR:
16583 case NEGATE_EXPR:
16584 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16585 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16586
16587 case MULT_EXPR:
16588 case RDIV_EXPR:
16589 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16590 return NULL_TREE;
16591 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16592 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16593 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16594 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16595 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16596 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16597 break;
16598
16599 case COMPOUND_EXPR:
16600 arg0 = TREE_OPERAND (exp, 0);
16601 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16602 if (arg1)
16603 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16604 break;
16605
16606 case COND_EXPR:
16607 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16608 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16609 if (arg0 || arg1)
16610 return fold_build3_loc (loc,
16611 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16612 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16613 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16614 break;
16615
16616 case CALL_EXPR:
16617 {
16618 const enum built_in_function fcode = builtin_mathfn_code (exp);
16619 switch (fcode)
16620 {
16621 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16622 /* Strip copysign function call, return the 1st argument. */
16623 arg0 = CALL_EXPR_ARG (exp, 0);
16624 arg1 = CALL_EXPR_ARG (exp, 1);
16625 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16626
16627 default:
16628 /* Strip sign ops from the argument of "odd" math functions. */
16629 if (negate_mathfn_p (fcode))
16630 {
16631 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16632 if (arg0)
16633 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16634 }
16635 break;
16636 }
16637 }
16638 break;
16639
16640 default:
16641 break;
16642 }
16643 return NULL_TREE;
16644 }