Daily bump.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012, 2013 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide and size_binop.
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
40
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hash-table.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
64
65 /* Nonzero if we are folding constants inside an initializer; zero
66 otherwise. */
67 int folding_initializer = 0;
68
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
73 COMPCODE_FALSE = 0,
74 COMPCODE_LT = 1,
75 COMPCODE_EQ = 2,
76 COMPCODE_LE = 3,
77 COMPCODE_GT = 4,
78 COMPCODE_LTGT = 5,
79 COMPCODE_GE = 6,
80 COMPCODE_ORD = 7,
81 COMPCODE_UNORD = 8,
82 COMPCODE_UNLT = 9,
83 COMPCODE_UNEQ = 10,
84 COMPCODE_UNLE = 11,
85 COMPCODE_UNGT = 12,
86 COMPCODE_NE = 13,
87 COMPCODE_UNGE = 14,
88 COMPCODE_TRUE = 15
89 };
90
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
111 tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
128 tree, tree,
129 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
132 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
143
144 static location_t
145 expr_location_or (tree t, location_t loc)
146 {
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc == UNKNOWN_LOCATION ? loc : tloc;
149 }
150
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
153
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
156 {
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
162 {
163 x = copy_node (x);
164 SET_EXPR_LOCATION (x, loc);
165 }
166 return x;
167 }
168 \f
169 /* If ARG2 divides ARG1 with zero remainder, carries out the division
170 of type CODE and returns the quotient.
171 Otherwise returns NULL_TREE. */
172
173 tree
174 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
175 {
176 double_int quo, rem;
177 int uns;
178
179 /* The sign of the division is according to operand two, that
180 does the correct thing for POINTER_PLUS_EXPR where we want
181 a signed division. */
182 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
183
184 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
185 uns, code, &rem);
186
187 if (rem.is_zero ())
188 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
189
190 return NULL_TREE;
191 }
192 \f
193 /* This is nonzero if we should defer warnings about undefined
194 overflow. This facility exists because these warnings are a
195 special case. The code to estimate loop iterations does not want
196 to issue any warnings, since it works with expressions which do not
197 occur in user code. Various bits of cleanup code call fold(), but
198 only use the result if it has certain characteristics (e.g., is a
199 constant); that code only wants to issue a warning if the result is
200 used. */
201
202 static int fold_deferring_overflow_warnings;
203
204 /* If a warning about undefined overflow is deferred, this is the
205 warning. Note that this may cause us to turn two warnings into
206 one, but that is fine since it is sufficient to only give one
207 warning per expression. */
208
209 static const char* fold_deferred_overflow_warning;
210
211 /* If a warning about undefined overflow is deferred, this is the
212 level at which the warning should be emitted. */
213
214 static enum warn_strict_overflow_code fold_deferred_overflow_code;
215
216 /* Start deferring overflow warnings. We could use a stack here to
217 permit nested calls, but at present it is not necessary. */
218
219 void
220 fold_defer_overflow_warnings (void)
221 {
222 ++fold_deferring_overflow_warnings;
223 }
224
225 /* Stop deferring overflow warnings. If there is a pending warning,
226 and ISSUE is true, then issue the warning if appropriate. STMT is
227 the statement with which the warning should be associated (used for
228 location information); STMT may be NULL. CODE is the level of the
229 warning--a warn_strict_overflow_code value. This function will use
230 the smaller of CODE and the deferred code when deciding whether to
231 issue the warning. CODE may be zero to mean to always use the
232 deferred code. */
233
234 void
235 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
236 {
237 const char *warnmsg;
238 location_t locus;
239
240 gcc_assert (fold_deferring_overflow_warnings > 0);
241 --fold_deferring_overflow_warnings;
242 if (fold_deferring_overflow_warnings > 0)
243 {
244 if (fold_deferred_overflow_warning != NULL
245 && code != 0
246 && code < (int) fold_deferred_overflow_code)
247 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
248 return;
249 }
250
251 warnmsg = fold_deferred_overflow_warning;
252 fold_deferred_overflow_warning = NULL;
253
254 if (!issue || warnmsg == NULL)
255 return;
256
257 if (gimple_no_warning_p (stmt))
258 return;
259
260 /* Use the smallest code level when deciding to issue the
261 warning. */
262 if (code == 0 || code > (int) fold_deferred_overflow_code)
263 code = fold_deferred_overflow_code;
264
265 if (!issue_strict_overflow_warning (code))
266 return;
267
268 if (stmt == NULL)
269 locus = input_location;
270 else
271 locus = gimple_location (stmt);
272 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
273 }
274
275 /* Stop deferring overflow warnings, ignoring any deferred
276 warnings. */
277
278 void
279 fold_undefer_and_ignore_overflow_warnings (void)
280 {
281 fold_undefer_overflow_warnings (false, NULL, 0);
282 }
283
284 /* Whether we are deferring overflow warnings. */
285
286 bool
287 fold_deferring_overflow_warnings_p (void)
288 {
289 return fold_deferring_overflow_warnings > 0;
290 }
291
292 /* This is called when we fold something based on the fact that signed
293 overflow is undefined. */
294
295 static void
296 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
297 {
298 if (fold_deferring_overflow_warnings > 0)
299 {
300 if (fold_deferred_overflow_warning == NULL
301 || wc < fold_deferred_overflow_code)
302 {
303 fold_deferred_overflow_warning = gmsgid;
304 fold_deferred_overflow_code = wc;
305 }
306 }
307 else if (issue_strict_overflow_warning (wc))
308 warning (OPT_Wstrict_overflow, gmsgid);
309 }
310 \f
311 /* Return true if the built-in mathematical function specified by CODE
312 is odd, i.e. -f(x) == f(-x). */
313
314 static bool
315 negate_mathfn_p (enum built_in_function code)
316 {
317 switch (code)
318 {
319 CASE_FLT_FN (BUILT_IN_ASIN):
320 CASE_FLT_FN (BUILT_IN_ASINH):
321 CASE_FLT_FN (BUILT_IN_ATAN):
322 CASE_FLT_FN (BUILT_IN_ATANH):
323 CASE_FLT_FN (BUILT_IN_CASIN):
324 CASE_FLT_FN (BUILT_IN_CASINH):
325 CASE_FLT_FN (BUILT_IN_CATAN):
326 CASE_FLT_FN (BUILT_IN_CATANH):
327 CASE_FLT_FN (BUILT_IN_CBRT):
328 CASE_FLT_FN (BUILT_IN_CPROJ):
329 CASE_FLT_FN (BUILT_IN_CSIN):
330 CASE_FLT_FN (BUILT_IN_CSINH):
331 CASE_FLT_FN (BUILT_IN_CTAN):
332 CASE_FLT_FN (BUILT_IN_CTANH):
333 CASE_FLT_FN (BUILT_IN_ERF):
334 CASE_FLT_FN (BUILT_IN_LLROUND):
335 CASE_FLT_FN (BUILT_IN_LROUND):
336 CASE_FLT_FN (BUILT_IN_ROUND):
337 CASE_FLT_FN (BUILT_IN_SIN):
338 CASE_FLT_FN (BUILT_IN_SINH):
339 CASE_FLT_FN (BUILT_IN_TAN):
340 CASE_FLT_FN (BUILT_IN_TANH):
341 CASE_FLT_FN (BUILT_IN_TRUNC):
342 return true;
343
344 CASE_FLT_FN (BUILT_IN_LLRINT):
345 CASE_FLT_FN (BUILT_IN_LRINT):
346 CASE_FLT_FN (BUILT_IN_NEARBYINT):
347 CASE_FLT_FN (BUILT_IN_RINT):
348 return !flag_rounding_math;
349
350 default:
351 break;
352 }
353 return false;
354 }
355
356 /* Check whether we may negate an integer constant T without causing
357 overflow. */
358
359 bool
360 may_negate_without_overflow_p (const_tree t)
361 {
362 unsigned HOST_WIDE_INT val;
363 unsigned int prec;
364 tree type;
365
366 gcc_assert (TREE_CODE (t) == INTEGER_CST);
367
368 type = TREE_TYPE (t);
369 if (TYPE_UNSIGNED (type))
370 return false;
371
372 prec = TYPE_PRECISION (type);
373 if (prec > HOST_BITS_PER_WIDE_INT)
374 {
375 if (TREE_INT_CST_LOW (t) != 0)
376 return true;
377 prec -= HOST_BITS_PER_WIDE_INT;
378 val = TREE_INT_CST_HIGH (t);
379 }
380 else
381 val = TREE_INT_CST_LOW (t);
382 if (prec < HOST_BITS_PER_WIDE_INT)
383 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
384 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
385 }
386
387 /* Determine whether an expression T can be cheaply negated using
388 the function negate_expr without introducing undefined overflow. */
389
390 static bool
391 negate_expr_p (tree t)
392 {
393 tree type;
394
395 if (t == 0)
396 return false;
397
398 type = TREE_TYPE (t);
399
400 STRIP_SIGN_NOPS (t);
401 switch (TREE_CODE (t))
402 {
403 case INTEGER_CST:
404 if (TYPE_OVERFLOW_WRAPS (type))
405 return true;
406
407 /* Check that -CST will not overflow type. */
408 return may_negate_without_overflow_p (t);
409 case BIT_NOT_EXPR:
410 return (INTEGRAL_TYPE_P (type)
411 && TYPE_OVERFLOW_WRAPS (type));
412
413 case FIXED_CST:
414 case NEGATE_EXPR:
415 return true;
416
417 case REAL_CST:
418 /* We want to canonicalize to positive real constants. Pretend
419 that only negative ones can be easily negated. */
420 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
421
422 case COMPLEX_CST:
423 return negate_expr_p (TREE_REALPART (t))
424 && negate_expr_p (TREE_IMAGPART (t));
425
426 case COMPLEX_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0))
428 && negate_expr_p (TREE_OPERAND (t, 1));
429
430 case CONJ_EXPR:
431 return negate_expr_p (TREE_OPERAND (t, 0));
432
433 case PLUS_EXPR:
434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
435 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1))
439 && reorder_operands_p (TREE_OPERAND (t, 0),
440 TREE_OPERAND (t, 1)))
441 return true;
442 /* -(A + B) -> (-A) - B. */
443 return negate_expr_p (TREE_OPERAND (t, 0));
444
445 case MINUS_EXPR:
446 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
447 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
449 && reorder_operands_p (TREE_OPERAND (t, 0),
450 TREE_OPERAND (t, 1));
451
452 case MULT_EXPR:
453 if (TYPE_UNSIGNED (TREE_TYPE (t)))
454 break;
455
456 /* Fall through. */
457
458 case RDIV_EXPR:
459 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
460 return negate_expr_p (TREE_OPERAND (t, 1))
461 || negate_expr_p (TREE_OPERAND (t, 0));
462 break;
463
464 case TRUNC_DIV_EXPR:
465 case ROUND_DIV_EXPR:
466 case FLOOR_DIV_EXPR:
467 case CEIL_DIV_EXPR:
468 case EXACT_DIV_EXPR:
469 /* In general we can't negate A / B, because if A is INT_MIN and
470 B is 1, we may turn this into INT_MIN / -1 which is undefined
471 and actually traps on some architectures. But if overflow is
472 undefined, we can negate, because - (INT_MIN / 1) is an
473 overflow. */
474 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
475 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
476 break;
477 return negate_expr_p (TREE_OPERAND (t, 1))
478 || negate_expr_p (TREE_OPERAND (t, 0));
479
480 case NOP_EXPR:
481 /* Negate -((double)float) as (double)(-float). */
482 if (TREE_CODE (type) == REAL_TYPE)
483 {
484 tree tem = strip_float_extensions (t);
485 if (tem != t)
486 return negate_expr_p (tem);
487 }
488 break;
489
490 case CALL_EXPR:
491 /* Negate -f(x) as f(-x). */
492 if (negate_mathfn_p (builtin_mathfn_code (t)))
493 return negate_expr_p (CALL_EXPR_ARG (t, 0));
494 break;
495
496 case RSHIFT_EXPR:
497 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
498 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
499 {
500 tree op1 = TREE_OPERAND (t, 1);
501 if (TREE_INT_CST_HIGH (op1) == 0
502 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
503 == TREE_INT_CST_LOW (op1))
504 return true;
505 }
506 break;
507
508 default:
509 break;
510 }
511 return false;
512 }
513
514 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
515 simplification is possible.
516 If negate_expr_p would return true for T, NULL_TREE will never be
517 returned. */
518
519 static tree
520 fold_negate_expr (location_t loc, tree t)
521 {
522 tree type = TREE_TYPE (t);
523 tree tem;
524
525 switch (TREE_CODE (t))
526 {
527 /* Convert - (~A) to A + 1. */
528 case BIT_NOT_EXPR:
529 if (INTEGRAL_TYPE_P (type))
530 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
531 build_int_cst (type, 1));
532 break;
533
534 case INTEGER_CST:
535 tem = fold_negate_const (t, type);
536 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
537 || !TYPE_OVERFLOW_TRAPS (type))
538 return tem;
539 break;
540
541 case REAL_CST:
542 tem = fold_negate_const (t, type);
543 /* Two's complement FP formats, such as c4x, may overflow. */
544 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
545 return tem;
546 break;
547
548 case FIXED_CST:
549 tem = fold_negate_const (t, type);
550 return tem;
551
552 case COMPLEX_CST:
553 {
554 tree rpart = negate_expr (TREE_REALPART (t));
555 tree ipart = negate_expr (TREE_IMAGPART (t));
556
557 if ((TREE_CODE (rpart) == REAL_CST
558 && TREE_CODE (ipart) == REAL_CST)
559 || (TREE_CODE (rpart) == INTEGER_CST
560 && TREE_CODE (ipart) == INTEGER_CST))
561 return build_complex (type, rpart, ipart);
562 }
563 break;
564
565 case COMPLEX_EXPR:
566 if (negate_expr_p (t))
567 return fold_build2_loc (loc, COMPLEX_EXPR, type,
568 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
569 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
570 break;
571
572 case CONJ_EXPR:
573 if (negate_expr_p (t))
574 return fold_build1_loc (loc, CONJ_EXPR, type,
575 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
576 break;
577
578 case NEGATE_EXPR:
579 return TREE_OPERAND (t, 0);
580
581 case PLUS_EXPR:
582 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
583 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
584 {
585 /* -(A + B) -> (-B) - A. */
586 if (negate_expr_p (TREE_OPERAND (t, 1))
587 && reorder_operands_p (TREE_OPERAND (t, 0),
588 TREE_OPERAND (t, 1)))
589 {
590 tem = negate_expr (TREE_OPERAND (t, 1));
591 return fold_build2_loc (loc, MINUS_EXPR, type,
592 tem, TREE_OPERAND (t, 0));
593 }
594
595 /* -(A + B) -> (-A) - B. */
596 if (negate_expr_p (TREE_OPERAND (t, 0)))
597 {
598 tem = negate_expr (TREE_OPERAND (t, 0));
599 return fold_build2_loc (loc, MINUS_EXPR, type,
600 tem, TREE_OPERAND (t, 1));
601 }
602 }
603 break;
604
605 case MINUS_EXPR:
606 /* - (A - B) -> B - A */
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
609 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
610 return fold_build2_loc (loc, MINUS_EXPR, type,
611 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
612 break;
613
614 case MULT_EXPR:
615 if (TYPE_UNSIGNED (type))
616 break;
617
618 /* Fall through. */
619
620 case RDIV_EXPR:
621 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
622 {
623 tem = TREE_OPERAND (t, 1);
624 if (negate_expr_p (tem))
625 return fold_build2_loc (loc, TREE_CODE (t), type,
626 TREE_OPERAND (t, 0), negate_expr (tem));
627 tem = TREE_OPERAND (t, 0);
628 if (negate_expr_p (tem))
629 return fold_build2_loc (loc, TREE_CODE (t), type,
630 negate_expr (tem), TREE_OPERAND (t, 1));
631 }
632 break;
633
634 case TRUNC_DIV_EXPR:
635 case ROUND_DIV_EXPR:
636 case FLOOR_DIV_EXPR:
637 case CEIL_DIV_EXPR:
638 case EXACT_DIV_EXPR:
639 /* In general we can't negate A / B, because if A is INT_MIN and
640 B is 1, we may turn this into INT_MIN / -1 which is undefined
641 and actually traps on some architectures. But if overflow is
642 undefined, we can negate, because - (INT_MIN / 1) is an
643 overflow. */
644 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
645 {
646 const char * const warnmsg = G_("assuming signed overflow does not "
647 "occur when negating a division");
648 tem = TREE_OPERAND (t, 1);
649 if (negate_expr_p (tem))
650 {
651 if (INTEGRAL_TYPE_P (type)
652 && (TREE_CODE (tem) != INTEGER_CST
653 || integer_onep (tem)))
654 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 TREE_OPERAND (t, 0), negate_expr (tem));
657 }
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
660 {
661 if (INTEGRAL_TYPE_P (type)
662 && (TREE_CODE (tem) != INTEGER_CST
663 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
667 }
668 }
669 break;
670
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
674 {
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
678 }
679 break;
680
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (builtin_mathfn_code (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
685 {
686 tree fndecl, arg;
687
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
691 }
692 break;
693
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
697 {
698 tree op1 = TREE_OPERAND (t, 1);
699 if (TREE_INT_CST_HIGH (op1) == 0
700 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
701 == TREE_INT_CST_LOW (op1))
702 {
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
709 }
710 }
711 break;
712
713 default:
714 break;
715 }
716
717 return NULL_TREE;
718 }
719
720 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
721 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
722 return NULL_TREE. */
723
724 static tree
725 negate_expr (tree t)
726 {
727 tree type, tem;
728 location_t loc;
729
730 if (t == NULL_TREE)
731 return NULL_TREE;
732
733 loc = EXPR_LOCATION (t);
734 type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
736
737 tem = fold_negate_expr (loc, t);
738 if (!tem)
739 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
740 return fold_convert_loc (loc, type, tem);
741 }
742 \f
743 /* Split a tree IN into a constant, literal and variable parts that could be
744 combined with CODE to make IN. "constant" means an expression with
745 TREE_CONSTANT but that isn't an actual constant. CODE must be a
746 commutative arithmetic operation. Store the constant part into *CONP,
747 the literal in *LITP and return the variable part. If a part isn't
748 present, set it to null. If the tree does not decompose in this way,
749 return the entire tree as the variable part and the other parts as null.
750
751 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
752 case, we negate an operand that was subtracted. Except if it is a
753 literal for which we use *MINUS_LITP instead.
754
755 If NEGATE_P is true, we are negating all of IN, again except a literal
756 for which we use *MINUS_LITP instead.
757
758 If IN is itself a literal or constant, return it as appropriate.
759
760 Note that we do not guarantee that any of the three values will be the
761 same type as IN, but they will have the same signedness and mode. */
762
763 static tree
764 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
765 tree *minus_litp, int negate_p)
766 {
767 tree var = 0;
768
769 *conp = 0;
770 *litp = 0;
771 *minus_litp = 0;
772
773 /* Strip any conversions that don't change the machine mode or signedness. */
774 STRIP_SIGN_NOPS (in);
775
776 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
777 || TREE_CODE (in) == FIXED_CST)
778 *litp = in;
779 else if (TREE_CODE (in) == code
780 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
781 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
782 /* We can associate addition and subtraction together (even
783 though the C standard doesn't say so) for integers because
784 the value is not affected. For reals, the value might be
785 affected, so we can't. */
786 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
787 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
788 {
789 tree op0 = TREE_OPERAND (in, 0);
790 tree op1 = TREE_OPERAND (in, 1);
791 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
792 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
793
794 /* First see if either of the operands is a literal, then a constant. */
795 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
796 || TREE_CODE (op0) == FIXED_CST)
797 *litp = op0, op0 = 0;
798 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
799 || TREE_CODE (op1) == FIXED_CST)
800 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
801
802 if (op0 != 0 && TREE_CONSTANT (op0))
803 *conp = op0, op0 = 0;
804 else if (op1 != 0 && TREE_CONSTANT (op1))
805 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
806
807 /* If we haven't dealt with either operand, this is not a case we can
808 decompose. Otherwise, VAR is either of the ones remaining, if any. */
809 if (op0 != 0 && op1 != 0)
810 var = in;
811 else if (op0 != 0)
812 var = op0;
813 else
814 var = op1, neg_var_p = neg1_p;
815
816 /* Now do any needed negations. */
817 if (neg_litp_p)
818 *minus_litp = *litp, *litp = 0;
819 if (neg_conp_p)
820 *conp = negate_expr (*conp);
821 if (neg_var_p)
822 var = negate_expr (var);
823 }
824 else if (TREE_CODE (in) == BIT_NOT_EXPR
825 && code == PLUS_EXPR)
826 {
827 /* -X - 1 is folded to ~X, undo that here. */
828 *minus_litp = build_one_cst (TREE_TYPE (in));
829 var = negate_expr (TREE_OPERAND (in, 0));
830 }
831 else if (TREE_CONSTANT (in))
832 *conp = in;
833 else
834 var = in;
835
836 if (negate_p)
837 {
838 if (*litp)
839 *minus_litp = *litp, *litp = 0;
840 else if (*minus_litp)
841 *litp = *minus_litp, *minus_litp = 0;
842 *conp = negate_expr (*conp);
843 var = negate_expr (var);
844 }
845
846 return var;
847 }
848
849 /* Re-associate trees split by the above function. T1 and T2 are
850 either expressions to associate or null. Return the new
851 expression, if any. LOC is the location of the new expression. If
852 we build an operation, do it in TYPE and with CODE. */
853
854 static tree
855 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
856 {
857 if (t1 == 0)
858 return t2;
859 else if (t2 == 0)
860 return t1;
861
862 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
863 try to fold this since we will have infinite recursion. But do
864 deal with any NEGATE_EXPRs. */
865 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
866 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
867 {
868 if (code == PLUS_EXPR)
869 {
870 if (TREE_CODE (t1) == NEGATE_EXPR)
871 return build2_loc (loc, MINUS_EXPR, type,
872 fold_convert_loc (loc, type, t2),
873 fold_convert_loc (loc, type,
874 TREE_OPERAND (t1, 0)));
875 else if (TREE_CODE (t2) == NEGATE_EXPR)
876 return build2_loc (loc, MINUS_EXPR, type,
877 fold_convert_loc (loc, type, t1),
878 fold_convert_loc (loc, type,
879 TREE_OPERAND (t2, 0)));
880 else if (integer_zerop (t2))
881 return fold_convert_loc (loc, type, t1);
882 }
883 else if (code == MINUS_EXPR)
884 {
885 if (integer_zerop (t2))
886 return fold_convert_loc (loc, type, t1);
887 }
888
889 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
890 fold_convert_loc (loc, type, t2));
891 }
892
893 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
894 fold_convert_loc (loc, type, t2));
895 }
896 \f
897 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
898 for use in int_const_binop, size_binop and size_diffop. */
899
900 static bool
901 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
902 {
903 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
904 return false;
905 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
906 return false;
907
908 switch (code)
909 {
910 case LSHIFT_EXPR:
911 case RSHIFT_EXPR:
912 case LROTATE_EXPR:
913 case RROTATE_EXPR:
914 return true;
915
916 default:
917 break;
918 }
919
920 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
921 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
922 && TYPE_MODE (type1) == TYPE_MODE (type2);
923 }
924
925
926 /* Combine two integer constants ARG1 and ARG2 under operation CODE
927 to produce a new constant. Return NULL_TREE if we don't know how
928 to evaluate CODE at compile-time. */
929
930 static tree
931 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
932 int overflowable)
933 {
934 double_int op1, op2, res, tmp;
935 tree t;
936 tree type = TREE_TYPE (arg1);
937 bool uns = TYPE_UNSIGNED (type);
938 bool overflow = false;
939
940 op1 = tree_to_double_int (arg1);
941 op2 = tree_to_double_int (arg2);
942
943 switch (code)
944 {
945 case BIT_IOR_EXPR:
946 res = op1 | op2;
947 break;
948
949 case BIT_XOR_EXPR:
950 res = op1 ^ op2;
951 break;
952
953 case BIT_AND_EXPR:
954 res = op1 & op2;
955 break;
956
957 case RSHIFT_EXPR:
958 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
959 break;
960
961 case LSHIFT_EXPR:
962 /* It's unclear from the C standard whether shifts can overflow.
963 The following code ignores overflow; perhaps a C standard
964 interpretation ruling is needed. */
965 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
966 break;
967
968 case RROTATE_EXPR:
969 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
970 break;
971
972 case LROTATE_EXPR:
973 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
974 break;
975
976 case PLUS_EXPR:
977 res = op1.add_with_sign (op2, false, &overflow);
978 break;
979
980 case MINUS_EXPR:
981 res = op1.sub_with_overflow (op2, &overflow);
982 break;
983
984 case MULT_EXPR:
985 res = op1.mul_with_sign (op2, false, &overflow);
986 break;
987
988 case MULT_HIGHPART_EXPR:
989 /* ??? Need quad precision, or an additional shift operand
990 to the multiply primitive, to handle very large highparts. */
991 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
992 return NULL_TREE;
993 tmp = op1 - op2;
994 res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
995 break;
996
997 case TRUNC_DIV_EXPR:
998 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
999 case EXACT_DIV_EXPR:
1000 /* This is a shortcut for a common special case. */
1001 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1002 && !TREE_OVERFLOW (arg1)
1003 && !TREE_OVERFLOW (arg2)
1004 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1005 {
1006 if (code == CEIL_DIV_EXPR)
1007 op1.low += op2.low - 1;
1008
1009 res.low = op1.low / op2.low, res.high = 0;
1010 break;
1011 }
1012
1013 /* ... fall through ... */
1014
1015 case ROUND_DIV_EXPR:
1016 if (op2.is_zero ())
1017 return NULL_TREE;
1018 if (op2.is_one ())
1019 {
1020 res = op1;
1021 break;
1022 }
1023 if (op1 == op2 && !op1.is_zero ())
1024 {
1025 res = double_int_one;
1026 break;
1027 }
1028 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1029 break;
1030
1031 case TRUNC_MOD_EXPR:
1032 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1033 /* This is a shortcut for a common special case. */
1034 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1035 && !TREE_OVERFLOW (arg1)
1036 && !TREE_OVERFLOW (arg2)
1037 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1038 {
1039 if (code == CEIL_MOD_EXPR)
1040 op1.low += op2.low - 1;
1041 res.low = op1.low % op2.low, res.high = 0;
1042 break;
1043 }
1044
1045 /* ... fall through ... */
1046
1047 case ROUND_MOD_EXPR:
1048 if (op2.is_zero ())
1049 return NULL_TREE;
1050 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1051 break;
1052
1053 case MIN_EXPR:
1054 res = op1.min (op2, uns);
1055 break;
1056
1057 case MAX_EXPR:
1058 res = op1.max (op2, uns);
1059 break;
1060
1061 default:
1062 return NULL_TREE;
1063 }
1064
1065 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1066 (!uns && overflow)
1067 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1068
1069 return t;
1070 }
1071
1072 tree
1073 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1074 {
1075 return int_const_binop_1 (code, arg1, arg2, 1);
1076 }
1077
1078 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1079 constant. We assume ARG1 and ARG2 have the same data type, or at least
1080 are the same kind of constant and the same machine mode. Return zero if
1081 combining the constants is not allowed in the current operating mode. */
1082
1083 static tree
1084 const_binop (enum tree_code code, tree arg1, tree arg2)
1085 {
1086 /* Sanity check for the recursive cases. */
1087 if (!arg1 || !arg2)
1088 return NULL_TREE;
1089
1090 STRIP_NOPS (arg1);
1091 STRIP_NOPS (arg2);
1092
1093 if (TREE_CODE (arg1) == INTEGER_CST)
1094 return int_const_binop (code, arg1, arg2);
1095
1096 if (TREE_CODE (arg1) == REAL_CST)
1097 {
1098 enum machine_mode mode;
1099 REAL_VALUE_TYPE d1;
1100 REAL_VALUE_TYPE d2;
1101 REAL_VALUE_TYPE value;
1102 REAL_VALUE_TYPE result;
1103 bool inexact;
1104 tree t, type;
1105
1106 /* The following codes are handled by real_arithmetic. */
1107 switch (code)
1108 {
1109 case PLUS_EXPR:
1110 case MINUS_EXPR:
1111 case MULT_EXPR:
1112 case RDIV_EXPR:
1113 case MIN_EXPR:
1114 case MAX_EXPR:
1115 break;
1116
1117 default:
1118 return NULL_TREE;
1119 }
1120
1121 d1 = TREE_REAL_CST (arg1);
1122 d2 = TREE_REAL_CST (arg2);
1123
1124 type = TREE_TYPE (arg1);
1125 mode = TYPE_MODE (type);
1126
1127 /* Don't perform operation if we honor signaling NaNs and
1128 either operand is a NaN. */
1129 if (HONOR_SNANS (mode)
1130 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1131 return NULL_TREE;
1132
1133 /* Don't perform operation if it would raise a division
1134 by zero exception. */
1135 if (code == RDIV_EXPR
1136 && REAL_VALUES_EQUAL (d2, dconst0)
1137 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1138 return NULL_TREE;
1139
1140 /* If either operand is a NaN, just return it. Otherwise, set up
1141 for floating-point trap; we return an overflow. */
1142 if (REAL_VALUE_ISNAN (d1))
1143 return arg1;
1144 else if (REAL_VALUE_ISNAN (d2))
1145 return arg2;
1146
1147 inexact = real_arithmetic (&value, code, &d1, &d2);
1148 real_convert (&result, mode, &value);
1149
1150 /* Don't constant fold this floating point operation if
1151 the result has overflowed and flag_trapping_math. */
1152 if (flag_trapping_math
1153 && MODE_HAS_INFINITIES (mode)
1154 && REAL_VALUE_ISINF (result)
1155 && !REAL_VALUE_ISINF (d1)
1156 && !REAL_VALUE_ISINF (d2))
1157 return NULL_TREE;
1158
1159 /* Don't constant fold this floating point operation if the
1160 result may dependent upon the run-time rounding mode and
1161 flag_rounding_math is set, or if GCC's software emulation
1162 is unable to accurately represent the result. */
1163 if ((flag_rounding_math
1164 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1165 && (inexact || !real_identical (&result, &value)))
1166 return NULL_TREE;
1167
1168 t = build_real (type, result);
1169
1170 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1171 return t;
1172 }
1173
1174 if (TREE_CODE (arg1) == FIXED_CST)
1175 {
1176 FIXED_VALUE_TYPE f1;
1177 FIXED_VALUE_TYPE f2;
1178 FIXED_VALUE_TYPE result;
1179 tree t, type;
1180 int sat_p;
1181 bool overflow_p;
1182
1183 /* The following codes are handled by fixed_arithmetic. */
1184 switch (code)
1185 {
1186 case PLUS_EXPR:
1187 case MINUS_EXPR:
1188 case MULT_EXPR:
1189 case TRUNC_DIV_EXPR:
1190 f2 = TREE_FIXED_CST (arg2);
1191 break;
1192
1193 case LSHIFT_EXPR:
1194 case RSHIFT_EXPR:
1195 f2.data.high = TREE_INT_CST_HIGH (arg2);
1196 f2.data.low = TREE_INT_CST_LOW (arg2);
1197 f2.mode = SImode;
1198 break;
1199
1200 default:
1201 return NULL_TREE;
1202 }
1203
1204 f1 = TREE_FIXED_CST (arg1);
1205 type = TREE_TYPE (arg1);
1206 sat_p = TYPE_SATURATING (type);
1207 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1208 t = build_fixed (type, result);
1209 /* Propagate overflow flags. */
1210 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1211 TREE_OVERFLOW (t) = 1;
1212 return t;
1213 }
1214
1215 if (TREE_CODE (arg1) == COMPLEX_CST)
1216 {
1217 tree type = TREE_TYPE (arg1);
1218 tree r1 = TREE_REALPART (arg1);
1219 tree i1 = TREE_IMAGPART (arg1);
1220 tree r2 = TREE_REALPART (arg2);
1221 tree i2 = TREE_IMAGPART (arg2);
1222 tree real, imag;
1223
1224 switch (code)
1225 {
1226 case PLUS_EXPR:
1227 case MINUS_EXPR:
1228 real = const_binop (code, r1, r2);
1229 imag = const_binop (code, i1, i2);
1230 break;
1231
1232 case MULT_EXPR:
1233 if (COMPLEX_FLOAT_TYPE_P (type))
1234 return do_mpc_arg2 (arg1, arg2, type,
1235 /* do_nonfinite= */ folding_initializer,
1236 mpc_mul);
1237
1238 real = const_binop (MINUS_EXPR,
1239 const_binop (MULT_EXPR, r1, r2),
1240 const_binop (MULT_EXPR, i1, i2));
1241 imag = const_binop (PLUS_EXPR,
1242 const_binop (MULT_EXPR, r1, i2),
1243 const_binop (MULT_EXPR, i1, r2));
1244 break;
1245
1246 case RDIV_EXPR:
1247 if (COMPLEX_FLOAT_TYPE_P (type))
1248 return do_mpc_arg2 (arg1, arg2, type,
1249 /* do_nonfinite= */ folding_initializer,
1250 mpc_div);
1251 /* Fallthru ... */
1252 case TRUNC_DIV_EXPR:
1253 case CEIL_DIV_EXPR:
1254 case FLOOR_DIV_EXPR:
1255 case ROUND_DIV_EXPR:
1256 if (flag_complex_method == 0)
1257 {
1258 /* Keep this algorithm in sync with
1259 tree-complex.c:expand_complex_div_straight().
1260
1261 Expand complex division to scalars, straightforward algorithm.
1262 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1263 t = br*br + bi*bi
1264 */
1265 tree magsquared
1266 = const_binop (PLUS_EXPR,
1267 const_binop (MULT_EXPR, r2, r2),
1268 const_binop (MULT_EXPR, i2, i2));
1269 tree t1
1270 = const_binop (PLUS_EXPR,
1271 const_binop (MULT_EXPR, r1, r2),
1272 const_binop (MULT_EXPR, i1, i2));
1273 tree t2
1274 = const_binop (MINUS_EXPR,
1275 const_binop (MULT_EXPR, i1, r2),
1276 const_binop (MULT_EXPR, r1, i2));
1277
1278 real = const_binop (code, t1, magsquared);
1279 imag = const_binop (code, t2, magsquared);
1280 }
1281 else
1282 {
1283 /* Keep this algorithm in sync with
1284 tree-complex.c:expand_complex_div_wide().
1285
1286 Expand complex division to scalars, modified algorithm to minimize
1287 overflow with wide input ranges. */
1288 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1289 fold_abs_const (r2, TREE_TYPE (type)),
1290 fold_abs_const (i2, TREE_TYPE (type)));
1291
1292 if (integer_nonzerop (compare))
1293 {
1294 /* In the TRUE branch, we compute
1295 ratio = br/bi;
1296 div = (br * ratio) + bi;
1297 tr = (ar * ratio) + ai;
1298 ti = (ai * ratio) - ar;
1299 tr = tr / div;
1300 ti = ti / div; */
1301 tree ratio = const_binop (code, r2, i2);
1302 tree div = const_binop (PLUS_EXPR, i2,
1303 const_binop (MULT_EXPR, r2, ratio));
1304 real = const_binop (MULT_EXPR, r1, ratio);
1305 real = const_binop (PLUS_EXPR, real, i1);
1306 real = const_binop (code, real, div);
1307
1308 imag = const_binop (MULT_EXPR, i1, ratio);
1309 imag = const_binop (MINUS_EXPR, imag, r1);
1310 imag = const_binop (code, imag, div);
1311 }
1312 else
1313 {
1314 /* In the FALSE branch, we compute
1315 ratio = d/c;
1316 divisor = (d * ratio) + c;
1317 tr = (b * ratio) + a;
1318 ti = b - (a * ratio);
1319 tr = tr / div;
1320 ti = ti / div; */
1321 tree ratio = const_binop (code, i2, r2);
1322 tree div = const_binop (PLUS_EXPR, r2,
1323 const_binop (MULT_EXPR, i2, ratio));
1324
1325 real = const_binop (MULT_EXPR, i1, ratio);
1326 real = const_binop (PLUS_EXPR, real, r1);
1327 real = const_binop (code, real, div);
1328
1329 imag = const_binop (MULT_EXPR, r1, ratio);
1330 imag = const_binop (MINUS_EXPR, i1, imag);
1331 imag = const_binop (code, imag, div);
1332 }
1333 }
1334 break;
1335
1336 default:
1337 return NULL_TREE;
1338 }
1339
1340 if (real && imag)
1341 return build_complex (type, real, imag);
1342 }
1343
1344 if (TREE_CODE (arg1) == VECTOR_CST
1345 && TREE_CODE (arg2) == VECTOR_CST)
1346 {
1347 tree type = TREE_TYPE(arg1);
1348 int count = TYPE_VECTOR_SUBPARTS (type), i;
1349 tree *elts = XALLOCAVEC (tree, count);
1350
1351 for (i = 0; i < count; i++)
1352 {
1353 tree elem1 = VECTOR_CST_ELT (arg1, i);
1354 tree elem2 = VECTOR_CST_ELT (arg2, i);
1355
1356 elts[i] = const_binop (code, elem1, elem2);
1357
1358 /* It is possible that const_binop cannot handle the given
1359 code and return NULL_TREE */
1360 if(elts[i] == NULL_TREE)
1361 return NULL_TREE;
1362 }
1363
1364 return build_vector (type, elts);
1365 }
1366 return NULL_TREE;
1367 }
1368
1369 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1370 indicates which particular sizetype to create. */
1371
1372 tree
1373 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1374 {
1375 return build_int_cst (sizetype_tab[(int) kind], number);
1376 }
1377 \f
1378 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1379 is a tree code. The type of the result is taken from the operands.
1380 Both must be equivalent integer types, ala int_binop_types_match_p.
1381 If the operands are constant, so is the result. */
1382
1383 tree
1384 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1385 {
1386 tree type = TREE_TYPE (arg0);
1387
1388 if (arg0 == error_mark_node || arg1 == error_mark_node)
1389 return error_mark_node;
1390
1391 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1392 TREE_TYPE (arg1)));
1393
1394 /* Handle the special case of two integer constants faster. */
1395 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1396 {
1397 /* And some specific cases even faster than that. */
1398 if (code == PLUS_EXPR)
1399 {
1400 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1401 return arg1;
1402 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1403 return arg0;
1404 }
1405 else if (code == MINUS_EXPR)
1406 {
1407 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1408 return arg0;
1409 }
1410 else if (code == MULT_EXPR)
1411 {
1412 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1413 return arg1;
1414 }
1415
1416 /* Handle general case of two integer constants. For sizetype
1417 constant calculations we always want to know about overflow,
1418 even in the unsigned case. */
1419 return int_const_binop_1 (code, arg0, arg1, -1);
1420 }
1421
1422 return fold_build2_loc (loc, code, type, arg0, arg1);
1423 }
1424
1425 /* Given two values, either both of sizetype or both of bitsizetype,
1426 compute the difference between the two values. Return the value
1427 in signed type corresponding to the type of the operands. */
1428
1429 tree
1430 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1431 {
1432 tree type = TREE_TYPE (arg0);
1433 tree ctype;
1434
1435 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1436 TREE_TYPE (arg1)));
1437
1438 /* If the type is already signed, just do the simple thing. */
1439 if (!TYPE_UNSIGNED (type))
1440 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1441
1442 if (type == sizetype)
1443 ctype = ssizetype;
1444 else if (type == bitsizetype)
1445 ctype = sbitsizetype;
1446 else
1447 ctype = signed_type_for (type);
1448
1449 /* If either operand is not a constant, do the conversions to the signed
1450 type and subtract. The hardware will do the right thing with any
1451 overflow in the subtraction. */
1452 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1453 return size_binop_loc (loc, MINUS_EXPR,
1454 fold_convert_loc (loc, ctype, arg0),
1455 fold_convert_loc (loc, ctype, arg1));
1456
1457 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1458 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1459 overflow) and negate (which can't either). Special-case a result
1460 of zero while we're here. */
1461 if (tree_int_cst_equal (arg0, arg1))
1462 return build_int_cst (ctype, 0);
1463 else if (tree_int_cst_lt (arg1, arg0))
1464 return fold_convert_loc (loc, ctype,
1465 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1466 else
1467 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1468 fold_convert_loc (loc, ctype,
1469 size_binop_loc (loc,
1470 MINUS_EXPR,
1471 arg1, arg0)));
1472 }
1473 \f
1474 /* A subroutine of fold_convert_const handling conversions of an
1475 INTEGER_CST to another integer type. */
1476
1477 static tree
1478 fold_convert_const_int_from_int (tree type, const_tree arg1)
1479 {
1480 tree t;
1481
1482 /* Given an integer constant, make new constant with new type,
1483 appropriately sign-extended or truncated. */
1484 t = force_fit_type_double (type, tree_to_double_int (arg1),
1485 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1486 (TREE_INT_CST_HIGH (arg1) < 0
1487 && (TYPE_UNSIGNED (type)
1488 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1489 | TREE_OVERFLOW (arg1));
1490
1491 return t;
1492 }
1493
1494 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1495 to an integer type. */
1496
1497 static tree
1498 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1499 {
1500 int overflow = 0;
1501 tree t;
1502
1503 /* The following code implements the floating point to integer
1504 conversion rules required by the Java Language Specification,
1505 that IEEE NaNs are mapped to zero and values that overflow
1506 the target precision saturate, i.e. values greater than
1507 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1508 are mapped to INT_MIN. These semantics are allowed by the
1509 C and C++ standards that simply state that the behavior of
1510 FP-to-integer conversion is unspecified upon overflow. */
1511
1512 double_int val;
1513 REAL_VALUE_TYPE r;
1514 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1515
1516 switch (code)
1517 {
1518 case FIX_TRUNC_EXPR:
1519 real_trunc (&r, VOIDmode, &x);
1520 break;
1521
1522 default:
1523 gcc_unreachable ();
1524 }
1525
1526 /* If R is NaN, return zero and show we have an overflow. */
1527 if (REAL_VALUE_ISNAN (r))
1528 {
1529 overflow = 1;
1530 val = double_int_zero;
1531 }
1532
1533 /* See if R is less than the lower bound or greater than the
1534 upper bound. */
1535
1536 if (! overflow)
1537 {
1538 tree lt = TYPE_MIN_VALUE (type);
1539 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1540 if (REAL_VALUES_LESS (r, l))
1541 {
1542 overflow = 1;
1543 val = tree_to_double_int (lt);
1544 }
1545 }
1546
1547 if (! overflow)
1548 {
1549 tree ut = TYPE_MAX_VALUE (type);
1550 if (ut)
1551 {
1552 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1553 if (REAL_VALUES_LESS (u, r))
1554 {
1555 overflow = 1;
1556 val = tree_to_double_int (ut);
1557 }
1558 }
1559 }
1560
1561 if (! overflow)
1562 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1563
1564 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1565 return t;
1566 }
1567
1568 /* A subroutine of fold_convert_const handling conversions of a
1569 FIXED_CST to an integer type. */
1570
1571 static tree
1572 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1573 {
1574 tree t;
1575 double_int temp, temp_trunc;
1576 unsigned int mode;
1577
1578 /* Right shift FIXED_CST to temp by fbit. */
1579 temp = TREE_FIXED_CST (arg1).data;
1580 mode = TREE_FIXED_CST (arg1).mode;
1581 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1582 {
1583 temp = temp.rshift (GET_MODE_FBIT (mode),
1584 HOST_BITS_PER_DOUBLE_INT,
1585 SIGNED_FIXED_POINT_MODE_P (mode));
1586
1587 /* Left shift temp to temp_trunc by fbit. */
1588 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1589 HOST_BITS_PER_DOUBLE_INT,
1590 SIGNED_FIXED_POINT_MODE_P (mode));
1591 }
1592 else
1593 {
1594 temp = double_int_zero;
1595 temp_trunc = double_int_zero;
1596 }
1597
1598 /* If FIXED_CST is negative, we need to round the value toward 0.
1599 By checking if the fractional bits are not zero to add 1 to temp. */
1600 if (SIGNED_FIXED_POINT_MODE_P (mode)
1601 && temp_trunc.is_negative ()
1602 && TREE_FIXED_CST (arg1).data != temp_trunc)
1603 temp += double_int_one;
1604
1605 /* Given a fixed-point constant, make new constant with new type,
1606 appropriately sign-extended or truncated. */
1607 t = force_fit_type_double (type, temp, -1,
1608 (temp.is_negative ()
1609 && (TYPE_UNSIGNED (type)
1610 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1611 | TREE_OVERFLOW (arg1));
1612
1613 return t;
1614 }
1615
1616 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1617 to another floating point type. */
1618
1619 static tree
1620 fold_convert_const_real_from_real (tree type, const_tree arg1)
1621 {
1622 REAL_VALUE_TYPE value;
1623 tree t;
1624
1625 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1626 t = build_real (type, value);
1627
1628 /* If converting an infinity or NAN to a representation that doesn't
1629 have one, set the overflow bit so that we can produce some kind of
1630 error message at the appropriate point if necessary. It's not the
1631 most user-friendly message, but it's better than nothing. */
1632 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1633 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1634 TREE_OVERFLOW (t) = 1;
1635 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1636 && !MODE_HAS_NANS (TYPE_MODE (type)))
1637 TREE_OVERFLOW (t) = 1;
1638 /* Regular overflow, conversion produced an infinity in a mode that
1639 can't represent them. */
1640 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1641 && REAL_VALUE_ISINF (value)
1642 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1643 TREE_OVERFLOW (t) = 1;
1644 else
1645 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1646 return t;
1647 }
1648
1649 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1650 to a floating point type. */
1651
1652 static tree
1653 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1654 {
1655 REAL_VALUE_TYPE value;
1656 tree t;
1657
1658 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1659 t = build_real (type, value);
1660
1661 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1662 return t;
1663 }
1664
1665 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1666 to another fixed-point type. */
1667
1668 static tree
1669 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1670 {
1671 FIXED_VALUE_TYPE value;
1672 tree t;
1673 bool overflow_p;
1674
1675 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1676 TYPE_SATURATING (type));
1677 t = build_fixed (type, value);
1678
1679 /* Propagate overflow flags. */
1680 if (overflow_p | TREE_OVERFLOW (arg1))
1681 TREE_OVERFLOW (t) = 1;
1682 return t;
1683 }
1684
1685 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1686 to a fixed-point type. */
1687
1688 static tree
1689 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1690 {
1691 FIXED_VALUE_TYPE value;
1692 tree t;
1693 bool overflow_p;
1694
1695 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1696 TREE_INT_CST (arg1),
1697 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1698 TYPE_SATURATING (type));
1699 t = build_fixed (type, value);
1700
1701 /* Propagate overflow flags. */
1702 if (overflow_p | TREE_OVERFLOW (arg1))
1703 TREE_OVERFLOW (t) = 1;
1704 return t;
1705 }
1706
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to a fixed-point type. */
1709
1710 static tree
1711 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1712 {
1713 FIXED_VALUE_TYPE value;
1714 tree t;
1715 bool overflow_p;
1716
1717 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1718 &TREE_REAL_CST (arg1),
1719 TYPE_SATURATING (type));
1720 t = build_fixed (type, value);
1721
1722 /* Propagate overflow flags. */
1723 if (overflow_p | TREE_OVERFLOW (arg1))
1724 TREE_OVERFLOW (t) = 1;
1725 return t;
1726 }
1727
1728 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1729 type TYPE. If no simplification can be done return NULL_TREE. */
1730
1731 static tree
1732 fold_convert_const (enum tree_code code, tree type, tree arg1)
1733 {
1734 if (TREE_TYPE (arg1) == type)
1735 return arg1;
1736
1737 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1738 || TREE_CODE (type) == OFFSET_TYPE)
1739 {
1740 if (TREE_CODE (arg1) == INTEGER_CST)
1741 return fold_convert_const_int_from_int (type, arg1);
1742 else if (TREE_CODE (arg1) == REAL_CST)
1743 return fold_convert_const_int_from_real (code, type, arg1);
1744 else if (TREE_CODE (arg1) == FIXED_CST)
1745 return fold_convert_const_int_from_fixed (type, arg1);
1746 }
1747 else if (TREE_CODE (type) == REAL_TYPE)
1748 {
1749 if (TREE_CODE (arg1) == INTEGER_CST)
1750 return build_real_from_int_cst (type, arg1);
1751 else if (TREE_CODE (arg1) == REAL_CST)
1752 return fold_convert_const_real_from_real (type, arg1);
1753 else if (TREE_CODE (arg1) == FIXED_CST)
1754 return fold_convert_const_real_from_fixed (type, arg1);
1755 }
1756 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1757 {
1758 if (TREE_CODE (arg1) == FIXED_CST)
1759 return fold_convert_const_fixed_from_fixed (type, arg1);
1760 else if (TREE_CODE (arg1) == INTEGER_CST)
1761 return fold_convert_const_fixed_from_int (type, arg1);
1762 else if (TREE_CODE (arg1) == REAL_CST)
1763 return fold_convert_const_fixed_from_real (type, arg1);
1764 }
1765 return NULL_TREE;
1766 }
1767
1768 /* Construct a vector of zero elements of vector type TYPE. */
1769
1770 static tree
1771 build_zero_vector (tree type)
1772 {
1773 tree t;
1774
1775 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1776 return build_vector_from_val (type, t);
1777 }
1778
1779 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1780
1781 bool
1782 fold_convertible_p (const_tree type, const_tree arg)
1783 {
1784 tree orig = TREE_TYPE (arg);
1785
1786 if (type == orig)
1787 return true;
1788
1789 if (TREE_CODE (arg) == ERROR_MARK
1790 || TREE_CODE (type) == ERROR_MARK
1791 || TREE_CODE (orig) == ERROR_MARK)
1792 return false;
1793
1794 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1795 return true;
1796
1797 switch (TREE_CODE (type))
1798 {
1799 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1800 case POINTER_TYPE: case REFERENCE_TYPE:
1801 case OFFSET_TYPE:
1802 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1803 || TREE_CODE (orig) == OFFSET_TYPE)
1804 return true;
1805 return (TREE_CODE (orig) == VECTOR_TYPE
1806 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1807
1808 case REAL_TYPE:
1809 case FIXED_POINT_TYPE:
1810 case COMPLEX_TYPE:
1811 case VECTOR_TYPE:
1812 case VOID_TYPE:
1813 return TREE_CODE (type) == TREE_CODE (orig);
1814
1815 default:
1816 return false;
1817 }
1818 }
1819
1820 /* Convert expression ARG to type TYPE. Used by the middle-end for
1821 simple conversions in preference to calling the front-end's convert. */
1822
1823 tree
1824 fold_convert_loc (location_t loc, tree type, tree arg)
1825 {
1826 tree orig = TREE_TYPE (arg);
1827 tree tem;
1828
1829 if (type == orig)
1830 return arg;
1831
1832 if (TREE_CODE (arg) == ERROR_MARK
1833 || TREE_CODE (type) == ERROR_MARK
1834 || TREE_CODE (orig) == ERROR_MARK)
1835 return error_mark_node;
1836
1837 switch (TREE_CODE (type))
1838 {
1839 case POINTER_TYPE:
1840 case REFERENCE_TYPE:
1841 /* Handle conversions between pointers to different address spaces. */
1842 if (POINTER_TYPE_P (orig)
1843 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1844 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1845 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1846 /* fall through */
1847
1848 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1849 case OFFSET_TYPE:
1850 if (TREE_CODE (arg) == INTEGER_CST)
1851 {
1852 tem = fold_convert_const (NOP_EXPR, type, arg);
1853 if (tem != NULL_TREE)
1854 return tem;
1855 }
1856 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1857 || TREE_CODE (orig) == OFFSET_TYPE)
1858 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1859 if (TREE_CODE (orig) == COMPLEX_TYPE)
1860 return fold_convert_loc (loc, type,
1861 fold_build1_loc (loc, REALPART_EXPR,
1862 TREE_TYPE (orig), arg));
1863 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1864 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1865 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1866
1867 case REAL_TYPE:
1868 if (TREE_CODE (arg) == INTEGER_CST)
1869 {
1870 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1871 if (tem != NULL_TREE)
1872 return tem;
1873 }
1874 else if (TREE_CODE (arg) == REAL_CST)
1875 {
1876 tem = fold_convert_const (NOP_EXPR, type, arg);
1877 if (tem != NULL_TREE)
1878 return tem;
1879 }
1880 else if (TREE_CODE (arg) == FIXED_CST)
1881 {
1882 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1883 if (tem != NULL_TREE)
1884 return tem;
1885 }
1886
1887 switch (TREE_CODE (orig))
1888 {
1889 case INTEGER_TYPE:
1890 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1891 case POINTER_TYPE: case REFERENCE_TYPE:
1892 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1893
1894 case REAL_TYPE:
1895 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1896
1897 case FIXED_POINT_TYPE:
1898 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1899
1900 case COMPLEX_TYPE:
1901 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1902 return fold_convert_loc (loc, type, tem);
1903
1904 default:
1905 gcc_unreachable ();
1906 }
1907
1908 case FIXED_POINT_TYPE:
1909 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1910 || TREE_CODE (arg) == REAL_CST)
1911 {
1912 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 goto fold_convert_exit;
1915 }
1916
1917 switch (TREE_CODE (orig))
1918 {
1919 case FIXED_POINT_TYPE:
1920 case INTEGER_TYPE:
1921 case ENUMERAL_TYPE:
1922 case BOOLEAN_TYPE:
1923 case REAL_TYPE:
1924 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1925
1926 case COMPLEX_TYPE:
1927 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1928 return fold_convert_loc (loc, type, tem);
1929
1930 default:
1931 gcc_unreachable ();
1932 }
1933
1934 case COMPLEX_TYPE:
1935 switch (TREE_CODE (orig))
1936 {
1937 case INTEGER_TYPE:
1938 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1939 case POINTER_TYPE: case REFERENCE_TYPE:
1940 case REAL_TYPE:
1941 case FIXED_POINT_TYPE:
1942 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1943 fold_convert_loc (loc, TREE_TYPE (type), arg),
1944 fold_convert_loc (loc, TREE_TYPE (type),
1945 integer_zero_node));
1946 case COMPLEX_TYPE:
1947 {
1948 tree rpart, ipart;
1949
1950 if (TREE_CODE (arg) == COMPLEX_EXPR)
1951 {
1952 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1953 TREE_OPERAND (arg, 0));
1954 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1955 TREE_OPERAND (arg, 1));
1956 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1957 }
1958
1959 arg = save_expr (arg);
1960 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1961 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1962 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1963 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1964 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1965 }
1966
1967 default:
1968 gcc_unreachable ();
1969 }
1970
1971 case VECTOR_TYPE:
1972 if (integer_zerop (arg))
1973 return build_zero_vector (type);
1974 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1975 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1976 || TREE_CODE (orig) == VECTOR_TYPE);
1977 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
1978
1979 case VOID_TYPE:
1980 tem = fold_ignored_result (arg);
1981 return fold_build1_loc (loc, NOP_EXPR, type, tem);
1982
1983 default:
1984 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1985 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1986 gcc_unreachable ();
1987 }
1988 fold_convert_exit:
1989 protected_set_expr_location_unshare (tem, loc);
1990 return tem;
1991 }
1992 \f
1993 /* Return false if expr can be assumed not to be an lvalue, true
1994 otherwise. */
1995
1996 static bool
1997 maybe_lvalue_p (const_tree x)
1998 {
1999 /* We only need to wrap lvalue tree codes. */
2000 switch (TREE_CODE (x))
2001 {
2002 case VAR_DECL:
2003 case PARM_DECL:
2004 case RESULT_DECL:
2005 case LABEL_DECL:
2006 case FUNCTION_DECL:
2007 case SSA_NAME:
2008
2009 case COMPONENT_REF:
2010 case MEM_REF:
2011 case INDIRECT_REF:
2012 case ARRAY_REF:
2013 case ARRAY_RANGE_REF:
2014 case BIT_FIELD_REF:
2015 case OBJ_TYPE_REF:
2016
2017 case REALPART_EXPR:
2018 case IMAGPART_EXPR:
2019 case PREINCREMENT_EXPR:
2020 case PREDECREMENT_EXPR:
2021 case SAVE_EXPR:
2022 case TRY_CATCH_EXPR:
2023 case WITH_CLEANUP_EXPR:
2024 case COMPOUND_EXPR:
2025 case MODIFY_EXPR:
2026 case TARGET_EXPR:
2027 case COND_EXPR:
2028 case BIND_EXPR:
2029 break;
2030
2031 default:
2032 /* Assume the worst for front-end tree codes. */
2033 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2034 break;
2035 return false;
2036 }
2037
2038 return true;
2039 }
2040
2041 /* Return an expr equal to X but certainly not valid as an lvalue. */
2042
2043 tree
2044 non_lvalue_loc (location_t loc, tree x)
2045 {
2046 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2047 us. */
2048 if (in_gimple_form)
2049 return x;
2050
2051 if (! maybe_lvalue_p (x))
2052 return x;
2053 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2054 }
2055
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2058
2059 int pedantic_lvalues;
2060
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2063
2064 static tree
2065 pedantic_non_lvalue_loc (location_t loc, tree x)
2066 {
2067 if (pedantic_lvalues)
2068 return non_lvalue_loc (loc, x);
2069
2070 return protected_set_expr_location_unshare (x, loc);
2071 }
2072 \f
2073 /* Given a tree comparison code, return the code that is the logical inverse.
2074 It is generally not safe to do this for floating-point comparisons, except
2075 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2076 ERROR_MARK in this case. */
2077
2078 enum tree_code
2079 invert_tree_comparison (enum tree_code code, bool honor_nans)
2080 {
2081 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2082 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2083 return ERROR_MARK;
2084
2085 switch (code)
2086 {
2087 case EQ_EXPR:
2088 return NE_EXPR;
2089 case NE_EXPR:
2090 return EQ_EXPR;
2091 case GT_EXPR:
2092 return honor_nans ? UNLE_EXPR : LE_EXPR;
2093 case GE_EXPR:
2094 return honor_nans ? UNLT_EXPR : LT_EXPR;
2095 case LT_EXPR:
2096 return honor_nans ? UNGE_EXPR : GE_EXPR;
2097 case LE_EXPR:
2098 return honor_nans ? UNGT_EXPR : GT_EXPR;
2099 case LTGT_EXPR:
2100 return UNEQ_EXPR;
2101 case UNEQ_EXPR:
2102 return LTGT_EXPR;
2103 case UNGT_EXPR:
2104 return LE_EXPR;
2105 case UNGE_EXPR:
2106 return LT_EXPR;
2107 case UNLT_EXPR:
2108 return GE_EXPR;
2109 case UNLE_EXPR:
2110 return GT_EXPR;
2111 case ORDERED_EXPR:
2112 return UNORDERED_EXPR;
2113 case UNORDERED_EXPR:
2114 return ORDERED_EXPR;
2115 default:
2116 gcc_unreachable ();
2117 }
2118 }
2119
2120 /* Similar, but return the comparison that results if the operands are
2121 swapped. This is safe for floating-point. */
2122
2123 enum tree_code
2124 swap_tree_comparison (enum tree_code code)
2125 {
2126 switch (code)
2127 {
2128 case EQ_EXPR:
2129 case NE_EXPR:
2130 case ORDERED_EXPR:
2131 case UNORDERED_EXPR:
2132 case LTGT_EXPR:
2133 case UNEQ_EXPR:
2134 return code;
2135 case GT_EXPR:
2136 return LT_EXPR;
2137 case GE_EXPR:
2138 return LE_EXPR;
2139 case LT_EXPR:
2140 return GT_EXPR;
2141 case LE_EXPR:
2142 return GE_EXPR;
2143 case UNGT_EXPR:
2144 return UNLT_EXPR;
2145 case UNGE_EXPR:
2146 return UNLE_EXPR;
2147 case UNLT_EXPR:
2148 return UNGT_EXPR;
2149 case UNLE_EXPR:
2150 return UNGE_EXPR;
2151 default:
2152 gcc_unreachable ();
2153 }
2154 }
2155
2156
2157 /* Convert a comparison tree code from an enum tree_code representation
2158 into a compcode bit-based encoding. This function is the inverse of
2159 compcode_to_comparison. */
2160
2161 static enum comparison_code
2162 comparison_to_compcode (enum tree_code code)
2163 {
2164 switch (code)
2165 {
2166 case LT_EXPR:
2167 return COMPCODE_LT;
2168 case EQ_EXPR:
2169 return COMPCODE_EQ;
2170 case LE_EXPR:
2171 return COMPCODE_LE;
2172 case GT_EXPR:
2173 return COMPCODE_GT;
2174 case NE_EXPR:
2175 return COMPCODE_NE;
2176 case GE_EXPR:
2177 return COMPCODE_GE;
2178 case ORDERED_EXPR:
2179 return COMPCODE_ORD;
2180 case UNORDERED_EXPR:
2181 return COMPCODE_UNORD;
2182 case UNLT_EXPR:
2183 return COMPCODE_UNLT;
2184 case UNEQ_EXPR:
2185 return COMPCODE_UNEQ;
2186 case UNLE_EXPR:
2187 return COMPCODE_UNLE;
2188 case UNGT_EXPR:
2189 return COMPCODE_UNGT;
2190 case LTGT_EXPR:
2191 return COMPCODE_LTGT;
2192 case UNGE_EXPR:
2193 return COMPCODE_UNGE;
2194 default:
2195 gcc_unreachable ();
2196 }
2197 }
2198
2199 /* Convert a compcode bit-based encoding of a comparison operator back
2200 to GCC's enum tree_code representation. This function is the
2201 inverse of comparison_to_compcode. */
2202
2203 static enum tree_code
2204 compcode_to_comparison (enum comparison_code code)
2205 {
2206 switch (code)
2207 {
2208 case COMPCODE_LT:
2209 return LT_EXPR;
2210 case COMPCODE_EQ:
2211 return EQ_EXPR;
2212 case COMPCODE_LE:
2213 return LE_EXPR;
2214 case COMPCODE_GT:
2215 return GT_EXPR;
2216 case COMPCODE_NE:
2217 return NE_EXPR;
2218 case COMPCODE_GE:
2219 return GE_EXPR;
2220 case COMPCODE_ORD:
2221 return ORDERED_EXPR;
2222 case COMPCODE_UNORD:
2223 return UNORDERED_EXPR;
2224 case COMPCODE_UNLT:
2225 return UNLT_EXPR;
2226 case COMPCODE_UNEQ:
2227 return UNEQ_EXPR;
2228 case COMPCODE_UNLE:
2229 return UNLE_EXPR;
2230 case COMPCODE_UNGT:
2231 return UNGT_EXPR;
2232 case COMPCODE_LTGT:
2233 return LTGT_EXPR;
2234 case COMPCODE_UNGE:
2235 return UNGE_EXPR;
2236 default:
2237 gcc_unreachable ();
2238 }
2239 }
2240
2241 /* Return a tree for the comparison which is the combination of
2242 doing the AND or OR (depending on CODE) of the two operations LCODE
2243 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2244 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2245 if this makes the transformation invalid. */
2246
2247 tree
2248 combine_comparisons (location_t loc,
2249 enum tree_code code, enum tree_code lcode,
2250 enum tree_code rcode, tree truth_type,
2251 tree ll_arg, tree lr_arg)
2252 {
2253 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2254 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2255 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2256 int compcode;
2257
2258 switch (code)
2259 {
2260 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2261 compcode = lcompcode & rcompcode;
2262 break;
2263
2264 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2265 compcode = lcompcode | rcompcode;
2266 break;
2267
2268 default:
2269 return NULL_TREE;
2270 }
2271
2272 if (!honor_nans)
2273 {
2274 /* Eliminate unordered comparisons, as well as LTGT and ORD
2275 which are not used unless the mode has NaNs. */
2276 compcode &= ~COMPCODE_UNORD;
2277 if (compcode == COMPCODE_LTGT)
2278 compcode = COMPCODE_NE;
2279 else if (compcode == COMPCODE_ORD)
2280 compcode = COMPCODE_TRUE;
2281 }
2282 else if (flag_trapping_math)
2283 {
2284 /* Check that the original operation and the optimized ones will trap
2285 under the same condition. */
2286 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2287 && (lcompcode != COMPCODE_EQ)
2288 && (lcompcode != COMPCODE_ORD);
2289 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2290 && (rcompcode != COMPCODE_EQ)
2291 && (rcompcode != COMPCODE_ORD);
2292 bool trap = (compcode & COMPCODE_UNORD) == 0
2293 && (compcode != COMPCODE_EQ)
2294 && (compcode != COMPCODE_ORD);
2295
2296 /* In a short-circuited boolean expression the LHS might be
2297 such that the RHS, if evaluated, will never trap. For
2298 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2299 if neither x nor y is NaN. (This is a mixed blessing: for
2300 example, the expression above will never trap, hence
2301 optimizing it to x < y would be invalid). */
2302 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2303 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2304 rtrap = false;
2305
2306 /* If the comparison was short-circuited, and only the RHS
2307 trapped, we may now generate a spurious trap. */
2308 if (rtrap && !ltrap
2309 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2310 return NULL_TREE;
2311
2312 /* If we changed the conditions that cause a trap, we lose. */
2313 if ((ltrap || rtrap) != trap)
2314 return NULL_TREE;
2315 }
2316
2317 if (compcode == COMPCODE_TRUE)
2318 return constant_boolean_node (true, truth_type);
2319 else if (compcode == COMPCODE_FALSE)
2320 return constant_boolean_node (false, truth_type);
2321 else
2322 {
2323 enum tree_code tcode;
2324
2325 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2326 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2327 }
2328 }
2329 \f
2330 /* Return nonzero if two operands (typically of the same tree node)
2331 are necessarily equal. If either argument has side-effects this
2332 function returns zero. FLAGS modifies behavior as follows:
2333
2334 If OEP_ONLY_CONST is set, only return nonzero for constants.
2335 This function tests whether the operands are indistinguishable;
2336 it does not test whether they are equal using C's == operation.
2337 The distinction is important for IEEE floating point, because
2338 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2339 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2340
2341 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2342 even though it may hold multiple values during a function.
2343 This is because a GCC tree node guarantees that nothing else is
2344 executed between the evaluation of its "operands" (which may often
2345 be evaluated in arbitrary order). Hence if the operands themselves
2346 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2347 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2348 unset means assuming isochronic (or instantaneous) tree equivalence.
2349 Unless comparing arbitrary expression trees, such as from different
2350 statements, this flag can usually be left unset.
2351
2352 If OEP_PURE_SAME is set, then pure functions with identical arguments
2353 are considered the same. It is used when the caller has other ways
2354 to ensure that global memory is unchanged in between. */
2355
2356 int
2357 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2358 {
2359 /* If either is ERROR_MARK, they aren't equal. */
2360 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2361 || TREE_TYPE (arg0) == error_mark_node
2362 || TREE_TYPE (arg1) == error_mark_node)
2363 return 0;
2364
2365 /* Similar, if either does not have a type (like a released SSA name),
2366 they aren't equal. */
2367 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2368 return 0;
2369
2370 /* Check equality of integer constants before bailing out due to
2371 precision differences. */
2372 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2373 return tree_int_cst_equal (arg0, arg1);
2374
2375 /* If both types don't have the same signedness, then we can't consider
2376 them equal. We must check this before the STRIP_NOPS calls
2377 because they may change the signedness of the arguments. As pointers
2378 strictly don't have a signedness, require either two pointers or
2379 two non-pointers as well. */
2380 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2381 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2382 return 0;
2383
2384 /* We cannot consider pointers to different address space equal. */
2385 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2386 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2387 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2388 return 0;
2389
2390 /* If both types don't have the same precision, then it is not safe
2391 to strip NOPs. */
2392 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2393 return 0;
2394
2395 STRIP_NOPS (arg0);
2396 STRIP_NOPS (arg1);
2397
2398 /* In case both args are comparisons but with different comparison
2399 code, try to swap the comparison operands of one arg to produce
2400 a match and compare that variant. */
2401 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2402 && COMPARISON_CLASS_P (arg0)
2403 && COMPARISON_CLASS_P (arg1))
2404 {
2405 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2406
2407 if (TREE_CODE (arg0) == swap_code)
2408 return operand_equal_p (TREE_OPERAND (arg0, 0),
2409 TREE_OPERAND (arg1, 1), flags)
2410 && operand_equal_p (TREE_OPERAND (arg0, 1),
2411 TREE_OPERAND (arg1, 0), flags);
2412 }
2413
2414 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2415 /* This is needed for conversions and for COMPONENT_REF.
2416 Might as well play it safe and always test this. */
2417 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2418 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2419 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2420 return 0;
2421
2422 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2423 We don't care about side effects in that case because the SAVE_EXPR
2424 takes care of that for us. In all other cases, two expressions are
2425 equal if they have no side effects. If we have two identical
2426 expressions with side effects that should be treated the same due
2427 to the only side effects being identical SAVE_EXPR's, that will
2428 be detected in the recursive calls below.
2429 If we are taking an invariant address of two identical objects
2430 they are necessarily equal as well. */
2431 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2432 && (TREE_CODE (arg0) == SAVE_EXPR
2433 || (flags & OEP_CONSTANT_ADDRESS_OF)
2434 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2435 return 1;
2436
2437 /* Next handle constant cases, those for which we can return 1 even
2438 if ONLY_CONST is set. */
2439 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2440 switch (TREE_CODE (arg0))
2441 {
2442 case INTEGER_CST:
2443 return tree_int_cst_equal (arg0, arg1);
2444
2445 case FIXED_CST:
2446 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2447 TREE_FIXED_CST (arg1));
2448
2449 case REAL_CST:
2450 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2451 TREE_REAL_CST (arg1)))
2452 return 1;
2453
2454
2455 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2456 {
2457 /* If we do not distinguish between signed and unsigned zero,
2458 consider them equal. */
2459 if (real_zerop (arg0) && real_zerop (arg1))
2460 return 1;
2461 }
2462 return 0;
2463
2464 case VECTOR_CST:
2465 {
2466 unsigned i;
2467
2468 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2469 return 0;
2470
2471 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2472 {
2473 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2474 VECTOR_CST_ELT (arg1, i), flags))
2475 return 0;
2476 }
2477 return 1;
2478 }
2479
2480 case COMPLEX_CST:
2481 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2482 flags)
2483 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2484 flags));
2485
2486 case STRING_CST:
2487 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2488 && ! memcmp (TREE_STRING_POINTER (arg0),
2489 TREE_STRING_POINTER (arg1),
2490 TREE_STRING_LENGTH (arg0)));
2491
2492 case ADDR_EXPR:
2493 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2494 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2495 ? OEP_CONSTANT_ADDRESS_OF : 0);
2496 default:
2497 break;
2498 }
2499
2500 if (flags & OEP_ONLY_CONST)
2501 return 0;
2502
2503 /* Define macros to test an operand from arg0 and arg1 for equality and a
2504 variant that allows null and views null as being different from any
2505 non-null value. In the latter case, if either is null, the both
2506 must be; otherwise, do the normal comparison. */
2507 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2508 TREE_OPERAND (arg1, N), flags)
2509
2510 #define OP_SAME_WITH_NULL(N) \
2511 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2512 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2513
2514 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2515 {
2516 case tcc_unary:
2517 /* Two conversions are equal only if signedness and modes match. */
2518 switch (TREE_CODE (arg0))
2519 {
2520 CASE_CONVERT:
2521 case FIX_TRUNC_EXPR:
2522 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2523 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2524 return 0;
2525 break;
2526 default:
2527 break;
2528 }
2529
2530 return OP_SAME (0);
2531
2532
2533 case tcc_comparison:
2534 case tcc_binary:
2535 if (OP_SAME (0) && OP_SAME (1))
2536 return 1;
2537
2538 /* For commutative ops, allow the other order. */
2539 return (commutative_tree_code (TREE_CODE (arg0))
2540 && operand_equal_p (TREE_OPERAND (arg0, 0),
2541 TREE_OPERAND (arg1, 1), flags)
2542 && operand_equal_p (TREE_OPERAND (arg0, 1),
2543 TREE_OPERAND (arg1, 0), flags));
2544
2545 case tcc_reference:
2546 /* If either of the pointer (or reference) expressions we are
2547 dereferencing contain a side effect, these cannot be equal. */
2548 if (TREE_SIDE_EFFECTS (arg0)
2549 || TREE_SIDE_EFFECTS (arg1))
2550 return 0;
2551
2552 switch (TREE_CODE (arg0))
2553 {
2554 case INDIRECT_REF:
2555 case REALPART_EXPR:
2556 case IMAGPART_EXPR:
2557 return OP_SAME (0);
2558
2559 case TARGET_MEM_REF:
2560 /* Require equal extra operands and then fall through to MEM_REF
2561 handling of the two common operands. */
2562 if (!OP_SAME_WITH_NULL (2)
2563 || !OP_SAME_WITH_NULL (3)
2564 || !OP_SAME_WITH_NULL (4))
2565 return 0;
2566 /* Fallthru. */
2567 case MEM_REF:
2568 /* Require equal access sizes, and similar pointer types.
2569 We can have incomplete types for array references of
2570 variable-sized arrays from the Fortran frontent
2571 though. */
2572 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2573 || (TYPE_SIZE (TREE_TYPE (arg0))
2574 && TYPE_SIZE (TREE_TYPE (arg1))
2575 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2576 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2577 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2578 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2579 && OP_SAME (0) && OP_SAME (1));
2580
2581 case ARRAY_REF:
2582 case ARRAY_RANGE_REF:
2583 /* Operands 2 and 3 may be null.
2584 Compare the array index by value if it is constant first as we
2585 may have different types but same value here. */
2586 return (OP_SAME (0)
2587 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2588 TREE_OPERAND (arg1, 1))
2589 || OP_SAME (1))
2590 && OP_SAME_WITH_NULL (2)
2591 && OP_SAME_WITH_NULL (3));
2592
2593 case COMPONENT_REF:
2594 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2595 may be NULL when we're called to compare MEM_EXPRs. */
2596 return OP_SAME_WITH_NULL (0)
2597 && OP_SAME (1)
2598 && OP_SAME_WITH_NULL (2);
2599
2600 case BIT_FIELD_REF:
2601 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2602
2603 default:
2604 return 0;
2605 }
2606
2607 case tcc_expression:
2608 switch (TREE_CODE (arg0))
2609 {
2610 case ADDR_EXPR:
2611 case TRUTH_NOT_EXPR:
2612 return OP_SAME (0);
2613
2614 case TRUTH_ANDIF_EXPR:
2615 case TRUTH_ORIF_EXPR:
2616 return OP_SAME (0) && OP_SAME (1);
2617
2618 case FMA_EXPR:
2619 case WIDEN_MULT_PLUS_EXPR:
2620 case WIDEN_MULT_MINUS_EXPR:
2621 if (!OP_SAME (2))
2622 return 0;
2623 /* The multiplcation operands are commutative. */
2624 /* FALLTHRU */
2625
2626 case TRUTH_AND_EXPR:
2627 case TRUTH_OR_EXPR:
2628 case TRUTH_XOR_EXPR:
2629 if (OP_SAME (0) && OP_SAME (1))
2630 return 1;
2631
2632 /* Otherwise take into account this is a commutative operation. */
2633 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2634 TREE_OPERAND (arg1, 1), flags)
2635 && operand_equal_p (TREE_OPERAND (arg0, 1),
2636 TREE_OPERAND (arg1, 0), flags));
2637
2638 case COND_EXPR:
2639 case VEC_COND_EXPR:
2640 case DOT_PROD_EXPR:
2641 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2642
2643 default:
2644 return 0;
2645 }
2646
2647 case tcc_vl_exp:
2648 switch (TREE_CODE (arg0))
2649 {
2650 case CALL_EXPR:
2651 /* If the CALL_EXPRs call different functions, then they
2652 clearly can not be equal. */
2653 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2654 flags))
2655 return 0;
2656
2657 {
2658 unsigned int cef = call_expr_flags (arg0);
2659 if (flags & OEP_PURE_SAME)
2660 cef &= ECF_CONST | ECF_PURE;
2661 else
2662 cef &= ECF_CONST;
2663 if (!cef)
2664 return 0;
2665 }
2666
2667 /* Now see if all the arguments are the same. */
2668 {
2669 const_call_expr_arg_iterator iter0, iter1;
2670 const_tree a0, a1;
2671 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2672 a1 = first_const_call_expr_arg (arg1, &iter1);
2673 a0 && a1;
2674 a0 = next_const_call_expr_arg (&iter0),
2675 a1 = next_const_call_expr_arg (&iter1))
2676 if (! operand_equal_p (a0, a1, flags))
2677 return 0;
2678
2679 /* If we get here and both argument lists are exhausted
2680 then the CALL_EXPRs are equal. */
2681 return ! (a0 || a1);
2682 }
2683 default:
2684 return 0;
2685 }
2686
2687 case tcc_declaration:
2688 /* Consider __builtin_sqrt equal to sqrt. */
2689 return (TREE_CODE (arg0) == FUNCTION_DECL
2690 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2691 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2692 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2693
2694 default:
2695 return 0;
2696 }
2697
2698 #undef OP_SAME
2699 #undef OP_SAME_WITH_NULL
2700 }
2701 \f
2702 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2703 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2704
2705 When in doubt, return 0. */
2706
2707 static int
2708 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2709 {
2710 int unsignedp1, unsignedpo;
2711 tree primarg0, primarg1, primother;
2712 unsigned int correct_width;
2713
2714 if (operand_equal_p (arg0, arg1, 0))
2715 return 1;
2716
2717 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2718 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2719 return 0;
2720
2721 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2722 and see if the inner values are the same. This removes any
2723 signedness comparison, which doesn't matter here. */
2724 primarg0 = arg0, primarg1 = arg1;
2725 STRIP_NOPS (primarg0);
2726 STRIP_NOPS (primarg1);
2727 if (operand_equal_p (primarg0, primarg1, 0))
2728 return 1;
2729
2730 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2731 actual comparison operand, ARG0.
2732
2733 First throw away any conversions to wider types
2734 already present in the operands. */
2735
2736 primarg1 = get_narrower (arg1, &unsignedp1);
2737 primother = get_narrower (other, &unsignedpo);
2738
2739 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2740 if (unsignedp1 == unsignedpo
2741 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2742 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2743 {
2744 tree type = TREE_TYPE (arg0);
2745
2746 /* Make sure shorter operand is extended the right way
2747 to match the longer operand. */
2748 primarg1 = fold_convert (signed_or_unsigned_type_for
2749 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2750
2751 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2752 return 1;
2753 }
2754
2755 return 0;
2756 }
2757 \f
2758 /* See if ARG is an expression that is either a comparison or is performing
2759 arithmetic on comparisons. The comparisons must only be comparing
2760 two different values, which will be stored in *CVAL1 and *CVAL2; if
2761 they are nonzero it means that some operands have already been found.
2762 No variables may be used anywhere else in the expression except in the
2763 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2764 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2765
2766 If this is true, return 1. Otherwise, return zero. */
2767
2768 static int
2769 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2770 {
2771 enum tree_code code = TREE_CODE (arg);
2772 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2773
2774 /* We can handle some of the tcc_expression cases here. */
2775 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2776 tclass = tcc_unary;
2777 else if (tclass == tcc_expression
2778 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2779 || code == COMPOUND_EXPR))
2780 tclass = tcc_binary;
2781
2782 else if (tclass == tcc_expression && code == SAVE_EXPR
2783 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2784 {
2785 /* If we've already found a CVAL1 or CVAL2, this expression is
2786 two complex to handle. */
2787 if (*cval1 || *cval2)
2788 return 0;
2789
2790 tclass = tcc_unary;
2791 *save_p = 1;
2792 }
2793
2794 switch (tclass)
2795 {
2796 case tcc_unary:
2797 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2798
2799 case tcc_binary:
2800 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2801 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2802 cval1, cval2, save_p));
2803
2804 case tcc_constant:
2805 return 1;
2806
2807 case tcc_expression:
2808 if (code == COND_EXPR)
2809 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2810 cval1, cval2, save_p)
2811 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2812 cval1, cval2, save_p)
2813 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2814 cval1, cval2, save_p));
2815 return 0;
2816
2817 case tcc_comparison:
2818 /* First see if we can handle the first operand, then the second. For
2819 the second operand, we know *CVAL1 can't be zero. It must be that
2820 one side of the comparison is each of the values; test for the
2821 case where this isn't true by failing if the two operands
2822 are the same. */
2823
2824 if (operand_equal_p (TREE_OPERAND (arg, 0),
2825 TREE_OPERAND (arg, 1), 0))
2826 return 0;
2827
2828 if (*cval1 == 0)
2829 *cval1 = TREE_OPERAND (arg, 0);
2830 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2831 ;
2832 else if (*cval2 == 0)
2833 *cval2 = TREE_OPERAND (arg, 0);
2834 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2835 ;
2836 else
2837 return 0;
2838
2839 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2840 ;
2841 else if (*cval2 == 0)
2842 *cval2 = TREE_OPERAND (arg, 1);
2843 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2844 ;
2845 else
2846 return 0;
2847
2848 return 1;
2849
2850 default:
2851 return 0;
2852 }
2853 }
2854 \f
2855 /* ARG is a tree that is known to contain just arithmetic operations and
2856 comparisons. Evaluate the operations in the tree substituting NEW0 for
2857 any occurrence of OLD0 as an operand of a comparison and likewise for
2858 NEW1 and OLD1. */
2859
2860 static tree
2861 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2862 tree old1, tree new1)
2863 {
2864 tree type = TREE_TYPE (arg);
2865 enum tree_code code = TREE_CODE (arg);
2866 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2867
2868 /* We can handle some of the tcc_expression cases here. */
2869 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2870 tclass = tcc_unary;
2871 else if (tclass == tcc_expression
2872 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2873 tclass = tcc_binary;
2874
2875 switch (tclass)
2876 {
2877 case tcc_unary:
2878 return fold_build1_loc (loc, code, type,
2879 eval_subst (loc, TREE_OPERAND (arg, 0),
2880 old0, new0, old1, new1));
2881
2882 case tcc_binary:
2883 return fold_build2_loc (loc, code, type,
2884 eval_subst (loc, TREE_OPERAND (arg, 0),
2885 old0, new0, old1, new1),
2886 eval_subst (loc, TREE_OPERAND (arg, 1),
2887 old0, new0, old1, new1));
2888
2889 case tcc_expression:
2890 switch (code)
2891 {
2892 case SAVE_EXPR:
2893 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2894 old1, new1);
2895
2896 case COMPOUND_EXPR:
2897 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2898 old1, new1);
2899
2900 case COND_EXPR:
2901 return fold_build3_loc (loc, code, type,
2902 eval_subst (loc, TREE_OPERAND (arg, 0),
2903 old0, new0, old1, new1),
2904 eval_subst (loc, TREE_OPERAND (arg, 1),
2905 old0, new0, old1, new1),
2906 eval_subst (loc, TREE_OPERAND (arg, 2),
2907 old0, new0, old1, new1));
2908 default:
2909 break;
2910 }
2911 /* Fall through - ??? */
2912
2913 case tcc_comparison:
2914 {
2915 tree arg0 = TREE_OPERAND (arg, 0);
2916 tree arg1 = TREE_OPERAND (arg, 1);
2917
2918 /* We need to check both for exact equality and tree equality. The
2919 former will be true if the operand has a side-effect. In that
2920 case, we know the operand occurred exactly once. */
2921
2922 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2923 arg0 = new0;
2924 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2925 arg0 = new1;
2926
2927 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2928 arg1 = new0;
2929 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2930 arg1 = new1;
2931
2932 return fold_build2_loc (loc, code, type, arg0, arg1);
2933 }
2934
2935 default:
2936 return arg;
2937 }
2938 }
2939 \f
2940 /* Return a tree for the case when the result of an expression is RESULT
2941 converted to TYPE and OMITTED was previously an operand of the expression
2942 but is now not needed (e.g., we folded OMITTED * 0).
2943
2944 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2945 the conversion of RESULT to TYPE. */
2946
2947 tree
2948 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2949 {
2950 tree t = fold_convert_loc (loc, type, result);
2951
2952 /* If the resulting operand is an empty statement, just return the omitted
2953 statement casted to void. */
2954 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2955 return build1_loc (loc, NOP_EXPR, void_type_node,
2956 fold_ignored_result (omitted));
2957
2958 if (TREE_SIDE_EFFECTS (omitted))
2959 return build2_loc (loc, COMPOUND_EXPR, type,
2960 fold_ignored_result (omitted), t);
2961
2962 return non_lvalue_loc (loc, t);
2963 }
2964
2965 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2966
2967 static tree
2968 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2969 tree omitted)
2970 {
2971 tree t = fold_convert_loc (loc, type, result);
2972
2973 /* If the resulting operand is an empty statement, just return the omitted
2974 statement casted to void. */
2975 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2976 return build1_loc (loc, NOP_EXPR, void_type_node,
2977 fold_ignored_result (omitted));
2978
2979 if (TREE_SIDE_EFFECTS (omitted))
2980 return build2_loc (loc, COMPOUND_EXPR, type,
2981 fold_ignored_result (omitted), t);
2982
2983 return pedantic_non_lvalue_loc (loc, t);
2984 }
2985
2986 /* Return a tree for the case when the result of an expression is RESULT
2987 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2988 of the expression but are now not needed.
2989
2990 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2991 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2992 evaluated before OMITTED2. Otherwise, if neither has side effects,
2993 just do the conversion of RESULT to TYPE. */
2994
2995 tree
2996 omit_two_operands_loc (location_t loc, tree type, tree result,
2997 tree omitted1, tree omitted2)
2998 {
2999 tree t = fold_convert_loc (loc, type, result);
3000
3001 if (TREE_SIDE_EFFECTS (omitted2))
3002 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3003 if (TREE_SIDE_EFFECTS (omitted1))
3004 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3005
3006 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3007 }
3008
3009 \f
3010 /* Return a simplified tree node for the truth-negation of ARG. This
3011 never alters ARG itself. We assume that ARG is an operation that
3012 returns a truth value (0 or 1).
3013
3014 FIXME: one would think we would fold the result, but it causes
3015 problems with the dominator optimizer. */
3016
3017 tree
3018 fold_truth_not_expr (location_t loc, tree arg)
3019 {
3020 tree type = TREE_TYPE (arg);
3021 enum tree_code code = TREE_CODE (arg);
3022 location_t loc1, loc2;
3023
3024 /* If this is a comparison, we can simply invert it, except for
3025 floating-point non-equality comparisons, in which case we just
3026 enclose a TRUTH_NOT_EXPR around what we have. */
3027
3028 if (TREE_CODE_CLASS (code) == tcc_comparison)
3029 {
3030 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3031 if (FLOAT_TYPE_P (op_type)
3032 && flag_trapping_math
3033 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3034 && code != NE_EXPR && code != EQ_EXPR)
3035 return NULL_TREE;
3036
3037 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3038 if (code == ERROR_MARK)
3039 return NULL_TREE;
3040
3041 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3042 TREE_OPERAND (arg, 1));
3043 }
3044
3045 switch (code)
3046 {
3047 case INTEGER_CST:
3048 return constant_boolean_node (integer_zerop (arg), type);
3049
3050 case TRUTH_AND_EXPR:
3051 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3052 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3053 return build2_loc (loc, TRUTH_OR_EXPR, type,
3054 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3055 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3056
3057 case TRUTH_OR_EXPR:
3058 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3059 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3060 return build2_loc (loc, TRUTH_AND_EXPR, type,
3061 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3062 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3063
3064 case TRUTH_XOR_EXPR:
3065 /* Here we can invert either operand. We invert the first operand
3066 unless the second operand is a TRUTH_NOT_EXPR in which case our
3067 result is the XOR of the first operand with the inside of the
3068 negation of the second operand. */
3069
3070 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3071 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3072 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3073 else
3074 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3075 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3076 TREE_OPERAND (arg, 1));
3077
3078 case TRUTH_ANDIF_EXPR:
3079 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3080 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3081 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3082 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3083 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3084
3085 case TRUTH_ORIF_EXPR:
3086 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3087 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3088 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3089 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3090 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091
3092 case TRUTH_NOT_EXPR:
3093 return TREE_OPERAND (arg, 0);
3094
3095 case COND_EXPR:
3096 {
3097 tree arg1 = TREE_OPERAND (arg, 1);
3098 tree arg2 = TREE_OPERAND (arg, 2);
3099
3100 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3101 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3102
3103 /* A COND_EXPR may have a throw as one operand, which
3104 then has void type. Just leave void operands
3105 as they are. */
3106 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3107 VOID_TYPE_P (TREE_TYPE (arg1))
3108 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3109 VOID_TYPE_P (TREE_TYPE (arg2))
3110 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3111 }
3112
3113 case COMPOUND_EXPR:
3114 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3115 return build2_loc (loc, COMPOUND_EXPR, type,
3116 TREE_OPERAND (arg, 0),
3117 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3118
3119 case NON_LVALUE_EXPR:
3120 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3121 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3122
3123 CASE_CONVERT:
3124 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3125 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3126
3127 /* ... fall through ... */
3128
3129 case FLOAT_EXPR:
3130 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3131 return build1_loc (loc, TREE_CODE (arg), type,
3132 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3133
3134 case BIT_AND_EXPR:
3135 if (!integer_onep (TREE_OPERAND (arg, 1)))
3136 return NULL_TREE;
3137 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3138
3139 case SAVE_EXPR:
3140 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3141
3142 case CLEANUP_POINT_EXPR:
3143 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3144 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3145 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3146
3147 default:
3148 return NULL_TREE;
3149 }
3150 }
3151
3152 /* Return a simplified tree node for the truth-negation of ARG. This
3153 never alters ARG itself. We assume that ARG is an operation that
3154 returns a truth value (0 or 1).
3155
3156 FIXME: one would think we would fold the result, but it causes
3157 problems with the dominator optimizer. */
3158
3159 tree
3160 invert_truthvalue_loc (location_t loc, tree arg)
3161 {
3162 tree tem;
3163
3164 if (TREE_CODE (arg) == ERROR_MARK)
3165 return arg;
3166
3167 tem = fold_truth_not_expr (loc, arg);
3168 if (!tem)
3169 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3170
3171 return tem;
3172 }
3173
3174 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3175 operands are another bit-wise operation with a common input. If so,
3176 distribute the bit operations to save an operation and possibly two if
3177 constants are involved. For example, convert
3178 (A | B) & (A | C) into A | (B & C)
3179 Further simplification will occur if B and C are constants.
3180
3181 If this optimization cannot be done, 0 will be returned. */
3182
3183 static tree
3184 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3185 tree arg0, tree arg1)
3186 {
3187 tree common;
3188 tree left, right;
3189
3190 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3191 || TREE_CODE (arg0) == code
3192 || (TREE_CODE (arg0) != BIT_AND_EXPR
3193 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3194 return 0;
3195
3196 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3197 {
3198 common = TREE_OPERAND (arg0, 0);
3199 left = TREE_OPERAND (arg0, 1);
3200 right = TREE_OPERAND (arg1, 1);
3201 }
3202 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3203 {
3204 common = TREE_OPERAND (arg0, 0);
3205 left = TREE_OPERAND (arg0, 1);
3206 right = TREE_OPERAND (arg1, 0);
3207 }
3208 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3209 {
3210 common = TREE_OPERAND (arg0, 1);
3211 left = TREE_OPERAND (arg0, 0);
3212 right = TREE_OPERAND (arg1, 1);
3213 }
3214 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3215 {
3216 common = TREE_OPERAND (arg0, 1);
3217 left = TREE_OPERAND (arg0, 0);
3218 right = TREE_OPERAND (arg1, 0);
3219 }
3220 else
3221 return 0;
3222
3223 common = fold_convert_loc (loc, type, common);
3224 left = fold_convert_loc (loc, type, left);
3225 right = fold_convert_loc (loc, type, right);
3226 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3227 fold_build2_loc (loc, code, type, left, right));
3228 }
3229
3230 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3231 with code CODE. This optimization is unsafe. */
3232 static tree
3233 distribute_real_division (location_t loc, enum tree_code code, tree type,
3234 tree arg0, tree arg1)
3235 {
3236 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3237 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3238
3239 /* (A / C) +- (B / C) -> (A +- B) / C. */
3240 if (mul0 == mul1
3241 && operand_equal_p (TREE_OPERAND (arg0, 1),
3242 TREE_OPERAND (arg1, 1), 0))
3243 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3244 fold_build2_loc (loc, code, type,
3245 TREE_OPERAND (arg0, 0),
3246 TREE_OPERAND (arg1, 0)),
3247 TREE_OPERAND (arg0, 1));
3248
3249 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3250 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3251 TREE_OPERAND (arg1, 0), 0)
3252 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3253 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3254 {
3255 REAL_VALUE_TYPE r0, r1;
3256 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3257 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3258 if (!mul0)
3259 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3260 if (!mul1)
3261 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3262 real_arithmetic (&r0, code, &r0, &r1);
3263 return fold_build2_loc (loc, MULT_EXPR, type,
3264 TREE_OPERAND (arg0, 0),
3265 build_real (type, r0));
3266 }
3267
3268 return NULL_TREE;
3269 }
3270 \f
3271 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3272 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3273
3274 static tree
3275 make_bit_field_ref (location_t loc, tree inner, tree type,
3276 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3277 {
3278 tree result, bftype;
3279
3280 if (bitpos == 0)
3281 {
3282 tree size = TYPE_SIZE (TREE_TYPE (inner));
3283 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3284 || POINTER_TYPE_P (TREE_TYPE (inner)))
3285 && host_integerp (size, 0)
3286 && tree_low_cst (size, 0) == bitsize)
3287 return fold_convert_loc (loc, type, inner);
3288 }
3289
3290 bftype = type;
3291 if (TYPE_PRECISION (bftype) != bitsize
3292 || TYPE_UNSIGNED (bftype) == !unsignedp)
3293 bftype = build_nonstandard_integer_type (bitsize, 0);
3294
3295 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3296 size_int (bitsize), bitsize_int (bitpos));
3297
3298 if (bftype != type)
3299 result = fold_convert_loc (loc, type, result);
3300
3301 return result;
3302 }
3303
3304 /* Optimize a bit-field compare.
3305
3306 There are two cases: First is a compare against a constant and the
3307 second is a comparison of two items where the fields are at the same
3308 bit position relative to the start of a chunk (byte, halfword, word)
3309 large enough to contain it. In these cases we can avoid the shift
3310 implicit in bitfield extractions.
3311
3312 For constants, we emit a compare of the shifted constant with the
3313 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3314 compared. For two fields at the same position, we do the ANDs with the
3315 similar mask and compare the result of the ANDs.
3316
3317 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3318 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3319 are the left and right operands of the comparison, respectively.
3320
3321 If the optimization described above can be done, we return the resulting
3322 tree. Otherwise we return zero. */
3323
3324 static tree
3325 optimize_bit_field_compare (location_t loc, enum tree_code code,
3326 tree compare_type, tree lhs, tree rhs)
3327 {
3328 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3329 tree type = TREE_TYPE (lhs);
3330 tree signed_type, unsigned_type;
3331 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3332 enum machine_mode lmode, rmode, nmode;
3333 int lunsignedp, runsignedp;
3334 int lvolatilep = 0, rvolatilep = 0;
3335 tree linner, rinner = NULL_TREE;
3336 tree mask;
3337 tree offset;
3338
3339 /* In the strict volatile bitfields case, doing code changes here may prevent
3340 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3341 if (flag_strict_volatile_bitfields > 0)
3342 return 0;
3343
3344 /* Get all the information about the extractions being done. If the bit size
3345 if the same as the size of the underlying object, we aren't doing an
3346 extraction at all and so can do nothing. We also don't want to
3347 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3348 then will no longer be able to replace it. */
3349 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3350 &lunsignedp, &lvolatilep, false);
3351 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3352 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3353 return 0;
3354
3355 if (!const_p)
3356 {
3357 /* If this is not a constant, we can only do something if bit positions,
3358 sizes, and signedness are the same. */
3359 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3360 &runsignedp, &rvolatilep, false);
3361
3362 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3363 || lunsignedp != runsignedp || offset != 0
3364 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3365 return 0;
3366 }
3367
3368 /* See if we can find a mode to refer to this field. We should be able to,
3369 but fail if we can't. */
3370 if (lvolatilep
3371 && GET_MODE_BITSIZE (lmode) > 0
3372 && flag_strict_volatile_bitfields > 0)
3373 nmode = lmode;
3374 else
3375 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3376 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3377 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3378 TYPE_ALIGN (TREE_TYPE (rinner))),
3379 word_mode, lvolatilep || rvolatilep);
3380 if (nmode == VOIDmode)
3381 return 0;
3382
3383 /* Set signed and unsigned types of the precision of this mode for the
3384 shifts below. */
3385 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3386 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3387
3388 /* Compute the bit position and size for the new reference and our offset
3389 within it. If the new reference is the same size as the original, we
3390 won't optimize anything, so return zero. */
3391 nbitsize = GET_MODE_BITSIZE (nmode);
3392 nbitpos = lbitpos & ~ (nbitsize - 1);
3393 lbitpos -= nbitpos;
3394 if (nbitsize == lbitsize)
3395 return 0;
3396
3397 if (BYTES_BIG_ENDIAN)
3398 lbitpos = nbitsize - lbitsize - lbitpos;
3399
3400 /* Make the mask to be used against the extracted field. */
3401 mask = build_int_cst_type (unsigned_type, -1);
3402 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3403 mask = const_binop (RSHIFT_EXPR, mask,
3404 size_int (nbitsize - lbitsize - lbitpos));
3405
3406 if (! const_p)
3407 /* If not comparing with constant, just rework the comparison
3408 and return. */
3409 return fold_build2_loc (loc, code, compare_type,
3410 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3411 make_bit_field_ref (loc, linner,
3412 unsigned_type,
3413 nbitsize, nbitpos,
3414 1),
3415 mask),
3416 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3417 make_bit_field_ref (loc, rinner,
3418 unsigned_type,
3419 nbitsize, nbitpos,
3420 1),
3421 mask));
3422
3423 /* Otherwise, we are handling the constant case. See if the constant is too
3424 big for the field. Warn and return a tree of for 0 (false) if so. We do
3425 this not only for its own sake, but to avoid having to test for this
3426 error case below. If we didn't, we might generate wrong code.
3427
3428 For unsigned fields, the constant shifted right by the field length should
3429 be all zero. For signed fields, the high-order bits should agree with
3430 the sign bit. */
3431
3432 if (lunsignedp)
3433 {
3434 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3435 fold_convert_loc (loc,
3436 unsigned_type, rhs),
3437 size_int (lbitsize))))
3438 {
3439 warning (0, "comparison is always %d due to width of bit-field",
3440 code == NE_EXPR);
3441 return constant_boolean_node (code == NE_EXPR, compare_type);
3442 }
3443 }
3444 else
3445 {
3446 tree tem = const_binop (RSHIFT_EXPR,
3447 fold_convert_loc (loc, signed_type, rhs),
3448 size_int (lbitsize - 1));
3449 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3450 {
3451 warning (0, "comparison is always %d due to width of bit-field",
3452 code == NE_EXPR);
3453 return constant_boolean_node (code == NE_EXPR, compare_type);
3454 }
3455 }
3456
3457 /* Single-bit compares should always be against zero. */
3458 if (lbitsize == 1 && ! integer_zerop (rhs))
3459 {
3460 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3461 rhs = build_int_cst (type, 0);
3462 }
3463
3464 /* Make a new bitfield reference, shift the constant over the
3465 appropriate number of bits and mask it with the computed mask
3466 (in case this was a signed field). If we changed it, make a new one. */
3467 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3468 if (lvolatilep)
3469 {
3470 TREE_SIDE_EFFECTS (lhs) = 1;
3471 TREE_THIS_VOLATILE (lhs) = 1;
3472 }
3473
3474 rhs = const_binop (BIT_AND_EXPR,
3475 const_binop (LSHIFT_EXPR,
3476 fold_convert_loc (loc, unsigned_type, rhs),
3477 size_int (lbitpos)),
3478 mask);
3479
3480 lhs = build2_loc (loc, code, compare_type,
3481 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3482 return lhs;
3483 }
3484 \f
3485 /* Subroutine for fold_truth_andor_1: decode a field reference.
3486
3487 If EXP is a comparison reference, we return the innermost reference.
3488
3489 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3490 set to the starting bit number.
3491
3492 If the innermost field can be completely contained in a mode-sized
3493 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3494
3495 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3496 otherwise it is not changed.
3497
3498 *PUNSIGNEDP is set to the signedness of the field.
3499
3500 *PMASK is set to the mask used. This is either contained in a
3501 BIT_AND_EXPR or derived from the width of the field.
3502
3503 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3504
3505 Return 0 if this is not a component reference or is one that we can't
3506 do anything with. */
3507
3508 static tree
3509 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3510 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3511 int *punsignedp, int *pvolatilep,
3512 tree *pmask, tree *pand_mask)
3513 {
3514 tree outer_type = 0;
3515 tree and_mask = 0;
3516 tree mask, inner, offset;
3517 tree unsigned_type;
3518 unsigned int precision;
3519
3520 /* All the optimizations using this function assume integer fields.
3521 There are problems with FP fields since the type_for_size call
3522 below can fail for, e.g., XFmode. */
3523 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3524 return 0;
3525
3526 /* We are interested in the bare arrangement of bits, so strip everything
3527 that doesn't affect the machine mode. However, record the type of the
3528 outermost expression if it may matter below. */
3529 if (CONVERT_EXPR_P (exp)
3530 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3531 outer_type = TREE_TYPE (exp);
3532 STRIP_NOPS (exp);
3533
3534 if (TREE_CODE (exp) == BIT_AND_EXPR)
3535 {
3536 and_mask = TREE_OPERAND (exp, 1);
3537 exp = TREE_OPERAND (exp, 0);
3538 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3539 if (TREE_CODE (and_mask) != INTEGER_CST)
3540 return 0;
3541 }
3542
3543 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3544 punsignedp, pvolatilep, false);
3545 if ((inner == exp && and_mask == 0)
3546 || *pbitsize < 0 || offset != 0
3547 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3548 return 0;
3549
3550 /* If the number of bits in the reference is the same as the bitsize of
3551 the outer type, then the outer type gives the signedness. Otherwise
3552 (in case of a small bitfield) the signedness is unchanged. */
3553 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3554 *punsignedp = TYPE_UNSIGNED (outer_type);
3555
3556 /* Compute the mask to access the bitfield. */
3557 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3558 precision = TYPE_PRECISION (unsigned_type);
3559
3560 mask = build_int_cst_type (unsigned_type, -1);
3561
3562 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3563 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3564
3565 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3566 if (and_mask != 0)
3567 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3568 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3569
3570 *pmask = mask;
3571 *pand_mask = and_mask;
3572 return inner;
3573 }
3574
3575 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3576 bit positions. */
3577
3578 static int
3579 all_ones_mask_p (const_tree mask, int size)
3580 {
3581 tree type = TREE_TYPE (mask);
3582 unsigned int precision = TYPE_PRECISION (type);
3583 tree tmask;
3584
3585 tmask = build_int_cst_type (signed_type_for (type), -1);
3586
3587 return
3588 tree_int_cst_equal (mask,
3589 const_binop (RSHIFT_EXPR,
3590 const_binop (LSHIFT_EXPR, tmask,
3591 size_int (precision - size)),
3592 size_int (precision - size)));
3593 }
3594
3595 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3596 represents the sign bit of EXP's type. If EXP represents a sign
3597 or zero extension, also test VAL against the unextended type.
3598 The return value is the (sub)expression whose sign bit is VAL,
3599 or NULL_TREE otherwise. */
3600
3601 static tree
3602 sign_bit_p (tree exp, const_tree val)
3603 {
3604 unsigned HOST_WIDE_INT mask_lo, lo;
3605 HOST_WIDE_INT mask_hi, hi;
3606 int width;
3607 tree t;
3608
3609 /* Tree EXP must have an integral type. */
3610 t = TREE_TYPE (exp);
3611 if (! INTEGRAL_TYPE_P (t))
3612 return NULL_TREE;
3613
3614 /* Tree VAL must be an integer constant. */
3615 if (TREE_CODE (val) != INTEGER_CST
3616 || TREE_OVERFLOW (val))
3617 return NULL_TREE;
3618
3619 width = TYPE_PRECISION (t);
3620 if (width > HOST_BITS_PER_WIDE_INT)
3621 {
3622 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3623 lo = 0;
3624
3625 mask_hi = ((unsigned HOST_WIDE_INT) -1
3626 >> (HOST_BITS_PER_DOUBLE_INT - width));
3627 mask_lo = -1;
3628 }
3629 else
3630 {
3631 hi = 0;
3632 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3633
3634 mask_hi = 0;
3635 mask_lo = ((unsigned HOST_WIDE_INT) -1
3636 >> (HOST_BITS_PER_WIDE_INT - width));
3637 }
3638
3639 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3640 treat VAL as if it were unsigned. */
3641 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3642 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3643 return exp;
3644
3645 /* Handle extension from a narrower type. */
3646 if (TREE_CODE (exp) == NOP_EXPR
3647 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3648 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3649
3650 return NULL_TREE;
3651 }
3652
3653 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3654 to be evaluated unconditionally. */
3655
3656 static int
3657 simple_operand_p (const_tree exp)
3658 {
3659 /* Strip any conversions that don't change the machine mode. */
3660 STRIP_NOPS (exp);
3661
3662 return (CONSTANT_CLASS_P (exp)
3663 || TREE_CODE (exp) == SSA_NAME
3664 || (DECL_P (exp)
3665 && ! TREE_ADDRESSABLE (exp)
3666 && ! TREE_THIS_VOLATILE (exp)
3667 && ! DECL_NONLOCAL (exp)
3668 /* Don't regard global variables as simple. They may be
3669 allocated in ways unknown to the compiler (shared memory,
3670 #pragma weak, etc). */
3671 && ! TREE_PUBLIC (exp)
3672 && ! DECL_EXTERNAL (exp)
3673 /* Loading a static variable is unduly expensive, but global
3674 registers aren't expensive. */
3675 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3676 }
3677
3678 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3679 to be evaluated unconditionally.
3680 I addition to simple_operand_p, we assume that comparisons, conversions,
3681 and logic-not operations are simple, if their operands are simple, too. */
3682
3683 static bool
3684 simple_operand_p_2 (tree exp)
3685 {
3686 enum tree_code code;
3687
3688 if (TREE_SIDE_EFFECTS (exp)
3689 || tree_could_trap_p (exp))
3690 return false;
3691
3692 while (CONVERT_EXPR_P (exp))
3693 exp = TREE_OPERAND (exp, 0);
3694
3695 code = TREE_CODE (exp);
3696
3697 if (TREE_CODE_CLASS (code) == tcc_comparison)
3698 return (simple_operand_p (TREE_OPERAND (exp, 0))
3699 && simple_operand_p (TREE_OPERAND (exp, 1)));
3700
3701 if (code == TRUTH_NOT_EXPR)
3702 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3703
3704 return simple_operand_p (exp);
3705 }
3706
3707 \f
3708 /* The following functions are subroutines to fold_range_test and allow it to
3709 try to change a logical combination of comparisons into a range test.
3710
3711 For example, both
3712 X == 2 || X == 3 || X == 4 || X == 5
3713 and
3714 X >= 2 && X <= 5
3715 are converted to
3716 (unsigned) (X - 2) <= 3
3717
3718 We describe each set of comparisons as being either inside or outside
3719 a range, using a variable named like IN_P, and then describe the
3720 range with a lower and upper bound. If one of the bounds is omitted,
3721 it represents either the highest or lowest value of the type.
3722
3723 In the comments below, we represent a range by two numbers in brackets
3724 preceded by a "+" to designate being inside that range, or a "-" to
3725 designate being outside that range, so the condition can be inverted by
3726 flipping the prefix. An omitted bound is represented by a "-". For
3727 example, "- [-, 10]" means being outside the range starting at the lowest
3728 possible value and ending at 10, in other words, being greater than 10.
3729 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3730 always false.
3731
3732 We set up things so that the missing bounds are handled in a consistent
3733 manner so neither a missing bound nor "true" and "false" need to be
3734 handled using a special case. */
3735
3736 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3737 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3738 and UPPER1_P are nonzero if the respective argument is an upper bound
3739 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3740 must be specified for a comparison. ARG1 will be converted to ARG0's
3741 type if both are specified. */
3742
3743 static tree
3744 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3745 tree arg1, int upper1_p)
3746 {
3747 tree tem;
3748 int result;
3749 int sgn0, sgn1;
3750
3751 /* If neither arg represents infinity, do the normal operation.
3752 Else, if not a comparison, return infinity. Else handle the special
3753 comparison rules. Note that most of the cases below won't occur, but
3754 are handled for consistency. */
3755
3756 if (arg0 != 0 && arg1 != 0)
3757 {
3758 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3759 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3760 STRIP_NOPS (tem);
3761 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3762 }
3763
3764 if (TREE_CODE_CLASS (code) != tcc_comparison)
3765 return 0;
3766
3767 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3768 for neither. In real maths, we cannot assume open ended ranges are
3769 the same. But, this is computer arithmetic, where numbers are finite.
3770 We can therefore make the transformation of any unbounded range with
3771 the value Z, Z being greater than any representable number. This permits
3772 us to treat unbounded ranges as equal. */
3773 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3774 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3775 switch (code)
3776 {
3777 case EQ_EXPR:
3778 result = sgn0 == sgn1;
3779 break;
3780 case NE_EXPR:
3781 result = sgn0 != sgn1;
3782 break;
3783 case LT_EXPR:
3784 result = sgn0 < sgn1;
3785 break;
3786 case LE_EXPR:
3787 result = sgn0 <= sgn1;
3788 break;
3789 case GT_EXPR:
3790 result = sgn0 > sgn1;
3791 break;
3792 case GE_EXPR:
3793 result = sgn0 >= sgn1;
3794 break;
3795 default:
3796 gcc_unreachable ();
3797 }
3798
3799 return constant_boolean_node (result, type);
3800 }
3801 \f
3802 /* Helper routine for make_range. Perform one step for it, return
3803 new expression if the loop should continue or NULL_TREE if it should
3804 stop. */
3805
3806 tree
3807 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3808 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3809 bool *strict_overflow_p)
3810 {
3811 tree arg0_type = TREE_TYPE (arg0);
3812 tree n_low, n_high, low = *p_low, high = *p_high;
3813 int in_p = *p_in_p, n_in_p;
3814
3815 switch (code)
3816 {
3817 case TRUTH_NOT_EXPR:
3818 *p_in_p = ! in_p;
3819 return arg0;
3820
3821 case EQ_EXPR: case NE_EXPR:
3822 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3823 /* We can only do something if the range is testing for zero
3824 and if the second operand is an integer constant. Note that
3825 saying something is "in" the range we make is done by
3826 complementing IN_P since it will set in the initial case of
3827 being not equal to zero; "out" is leaving it alone. */
3828 if (low == NULL_TREE || high == NULL_TREE
3829 || ! integer_zerop (low) || ! integer_zerop (high)
3830 || TREE_CODE (arg1) != INTEGER_CST)
3831 return NULL_TREE;
3832
3833 switch (code)
3834 {
3835 case NE_EXPR: /* - [c, c] */
3836 low = high = arg1;
3837 break;
3838 case EQ_EXPR: /* + [c, c] */
3839 in_p = ! in_p, low = high = arg1;
3840 break;
3841 case GT_EXPR: /* - [-, c] */
3842 low = 0, high = arg1;
3843 break;
3844 case GE_EXPR: /* + [c, -] */
3845 in_p = ! in_p, low = arg1, high = 0;
3846 break;
3847 case LT_EXPR: /* - [c, -] */
3848 low = arg1, high = 0;
3849 break;
3850 case LE_EXPR: /* + [-, c] */
3851 in_p = ! in_p, low = 0, high = arg1;
3852 break;
3853 default:
3854 gcc_unreachable ();
3855 }
3856
3857 /* If this is an unsigned comparison, we also know that EXP is
3858 greater than or equal to zero. We base the range tests we make
3859 on that fact, so we record it here so we can parse existing
3860 range tests. We test arg0_type since often the return type
3861 of, e.g. EQ_EXPR, is boolean. */
3862 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3863 {
3864 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3865 in_p, low, high, 1,
3866 build_int_cst (arg0_type, 0),
3867 NULL_TREE))
3868 return NULL_TREE;
3869
3870 in_p = n_in_p, low = n_low, high = n_high;
3871
3872 /* If the high bound is missing, but we have a nonzero low
3873 bound, reverse the range so it goes from zero to the low bound
3874 minus 1. */
3875 if (high == 0 && low && ! integer_zerop (low))
3876 {
3877 in_p = ! in_p;
3878 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3879 integer_one_node, 0);
3880 low = build_int_cst (arg0_type, 0);
3881 }
3882 }
3883
3884 *p_low = low;
3885 *p_high = high;
3886 *p_in_p = in_p;
3887 return arg0;
3888
3889 case NEGATE_EXPR:
3890 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3891 low and high are non-NULL, then normalize will DTRT. */
3892 if (!TYPE_UNSIGNED (arg0_type)
3893 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3894 {
3895 if (low == NULL_TREE)
3896 low = TYPE_MIN_VALUE (arg0_type);
3897 if (high == NULL_TREE)
3898 high = TYPE_MAX_VALUE (arg0_type);
3899 }
3900
3901 /* (-x) IN [a,b] -> x in [-b, -a] */
3902 n_low = range_binop (MINUS_EXPR, exp_type,
3903 build_int_cst (exp_type, 0),
3904 0, high, 1);
3905 n_high = range_binop (MINUS_EXPR, exp_type,
3906 build_int_cst (exp_type, 0),
3907 0, low, 0);
3908 if (n_high != 0 && TREE_OVERFLOW (n_high))
3909 return NULL_TREE;
3910 goto normalize;
3911
3912 case BIT_NOT_EXPR:
3913 /* ~ X -> -X - 1 */
3914 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3915 build_int_cst (exp_type, 1));
3916
3917 case PLUS_EXPR:
3918 case MINUS_EXPR:
3919 if (TREE_CODE (arg1) != INTEGER_CST)
3920 return NULL_TREE;
3921
3922 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3923 move a constant to the other side. */
3924 if (!TYPE_UNSIGNED (arg0_type)
3925 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3926 return NULL_TREE;
3927
3928 /* If EXP is signed, any overflow in the computation is undefined,
3929 so we don't worry about it so long as our computations on
3930 the bounds don't overflow. For unsigned, overflow is defined
3931 and this is exactly the right thing. */
3932 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3933 arg0_type, low, 0, arg1, 0);
3934 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3935 arg0_type, high, 1, arg1, 0);
3936 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3937 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3938 return NULL_TREE;
3939
3940 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3941 *strict_overflow_p = true;
3942
3943 normalize:
3944 /* Check for an unsigned range which has wrapped around the maximum
3945 value thus making n_high < n_low, and normalize it. */
3946 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3947 {
3948 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3949 integer_one_node, 0);
3950 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3951 integer_one_node, 0);
3952
3953 /* If the range is of the form +/- [ x+1, x ], we won't
3954 be able to normalize it. But then, it represents the
3955 whole range or the empty set, so make it
3956 +/- [ -, - ]. */
3957 if (tree_int_cst_equal (n_low, low)
3958 && tree_int_cst_equal (n_high, high))
3959 low = high = 0;
3960 else
3961 in_p = ! in_p;
3962 }
3963 else
3964 low = n_low, high = n_high;
3965
3966 *p_low = low;
3967 *p_high = high;
3968 *p_in_p = in_p;
3969 return arg0;
3970
3971 CASE_CONVERT:
3972 case NON_LVALUE_EXPR:
3973 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3974 return NULL_TREE;
3975
3976 if (! INTEGRAL_TYPE_P (arg0_type)
3977 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3978 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3979 return NULL_TREE;
3980
3981 n_low = low, n_high = high;
3982
3983 if (n_low != 0)
3984 n_low = fold_convert_loc (loc, arg0_type, n_low);
3985
3986 if (n_high != 0)
3987 n_high = fold_convert_loc (loc, arg0_type, n_high);
3988
3989 /* If we're converting arg0 from an unsigned type, to exp,
3990 a signed type, we will be doing the comparison as unsigned.
3991 The tests above have already verified that LOW and HIGH
3992 are both positive.
3993
3994 So we have to ensure that we will handle large unsigned
3995 values the same way that the current signed bounds treat
3996 negative values. */
3997
3998 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3999 {
4000 tree high_positive;
4001 tree equiv_type;
4002 /* For fixed-point modes, we need to pass the saturating flag
4003 as the 2nd parameter. */
4004 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4005 equiv_type
4006 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4007 TYPE_SATURATING (arg0_type));
4008 else
4009 equiv_type
4010 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4011
4012 /* A range without an upper bound is, naturally, unbounded.
4013 Since convert would have cropped a very large value, use
4014 the max value for the destination type. */
4015 high_positive
4016 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4017 : TYPE_MAX_VALUE (arg0_type);
4018
4019 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4020 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4021 fold_convert_loc (loc, arg0_type,
4022 high_positive),
4023 build_int_cst (arg0_type, 1));
4024
4025 /* If the low bound is specified, "and" the range with the
4026 range for which the original unsigned value will be
4027 positive. */
4028 if (low != 0)
4029 {
4030 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4031 1, fold_convert_loc (loc, arg0_type,
4032 integer_zero_node),
4033 high_positive))
4034 return NULL_TREE;
4035
4036 in_p = (n_in_p == in_p);
4037 }
4038 else
4039 {
4040 /* Otherwise, "or" the range with the range of the input
4041 that will be interpreted as negative. */
4042 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4043 1, fold_convert_loc (loc, arg0_type,
4044 integer_zero_node),
4045 high_positive))
4046 return NULL_TREE;
4047
4048 in_p = (in_p != n_in_p);
4049 }
4050 }
4051
4052 *p_low = n_low;
4053 *p_high = n_high;
4054 *p_in_p = in_p;
4055 return arg0;
4056
4057 default:
4058 return NULL_TREE;
4059 }
4060 }
4061
4062 /* Given EXP, a logical expression, set the range it is testing into
4063 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4064 actually being tested. *PLOW and *PHIGH will be made of the same
4065 type as the returned expression. If EXP is not a comparison, we
4066 will most likely not be returning a useful value and range. Set
4067 *STRICT_OVERFLOW_P to true if the return value is only valid
4068 because signed overflow is undefined; otherwise, do not change
4069 *STRICT_OVERFLOW_P. */
4070
4071 tree
4072 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4073 bool *strict_overflow_p)
4074 {
4075 enum tree_code code;
4076 tree arg0, arg1 = NULL_TREE;
4077 tree exp_type, nexp;
4078 int in_p;
4079 tree low, high;
4080 location_t loc = EXPR_LOCATION (exp);
4081
4082 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4083 and see if we can refine the range. Some of the cases below may not
4084 happen, but it doesn't seem worth worrying about this. We "continue"
4085 the outer loop when we've changed something; otherwise we "break"
4086 the switch, which will "break" the while. */
4087
4088 in_p = 0;
4089 low = high = build_int_cst (TREE_TYPE (exp), 0);
4090
4091 while (1)
4092 {
4093 code = TREE_CODE (exp);
4094 exp_type = TREE_TYPE (exp);
4095 arg0 = NULL_TREE;
4096
4097 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4098 {
4099 if (TREE_OPERAND_LENGTH (exp) > 0)
4100 arg0 = TREE_OPERAND (exp, 0);
4101 if (TREE_CODE_CLASS (code) == tcc_binary
4102 || TREE_CODE_CLASS (code) == tcc_comparison
4103 || (TREE_CODE_CLASS (code) == tcc_expression
4104 && TREE_OPERAND_LENGTH (exp) > 1))
4105 arg1 = TREE_OPERAND (exp, 1);
4106 }
4107 if (arg0 == NULL_TREE)
4108 break;
4109
4110 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4111 &high, &in_p, strict_overflow_p);
4112 if (nexp == NULL_TREE)
4113 break;
4114 exp = nexp;
4115 }
4116
4117 /* If EXP is a constant, we can evaluate whether this is true or false. */
4118 if (TREE_CODE (exp) == INTEGER_CST)
4119 {
4120 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4121 exp, 0, low, 0))
4122 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4123 exp, 1, high, 1)));
4124 low = high = 0;
4125 exp = 0;
4126 }
4127
4128 *pin_p = in_p, *plow = low, *phigh = high;
4129 return exp;
4130 }
4131 \f
4132 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4133 type, TYPE, return an expression to test if EXP is in (or out of, depending
4134 on IN_P) the range. Return 0 if the test couldn't be created. */
4135
4136 tree
4137 build_range_check (location_t loc, tree type, tree exp, int in_p,
4138 tree low, tree high)
4139 {
4140 tree etype = TREE_TYPE (exp), value;
4141
4142 #ifdef HAVE_canonicalize_funcptr_for_compare
4143 /* Disable this optimization for function pointer expressions
4144 on targets that require function pointer canonicalization. */
4145 if (HAVE_canonicalize_funcptr_for_compare
4146 && TREE_CODE (etype) == POINTER_TYPE
4147 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4148 return NULL_TREE;
4149 #endif
4150
4151 if (! in_p)
4152 {
4153 value = build_range_check (loc, type, exp, 1, low, high);
4154 if (value != 0)
4155 return invert_truthvalue_loc (loc, value);
4156
4157 return 0;
4158 }
4159
4160 if (low == 0 && high == 0)
4161 return build_int_cst (type, 1);
4162
4163 if (low == 0)
4164 return fold_build2_loc (loc, LE_EXPR, type, exp,
4165 fold_convert_loc (loc, etype, high));
4166
4167 if (high == 0)
4168 return fold_build2_loc (loc, GE_EXPR, type, exp,
4169 fold_convert_loc (loc, etype, low));
4170
4171 if (operand_equal_p (low, high, 0))
4172 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4173 fold_convert_loc (loc, etype, low));
4174
4175 if (integer_zerop (low))
4176 {
4177 if (! TYPE_UNSIGNED (etype))
4178 {
4179 etype = unsigned_type_for (etype);
4180 high = fold_convert_loc (loc, etype, high);
4181 exp = fold_convert_loc (loc, etype, exp);
4182 }
4183 return build_range_check (loc, type, exp, 1, 0, high);
4184 }
4185
4186 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4187 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4188 {
4189 unsigned HOST_WIDE_INT lo;
4190 HOST_WIDE_INT hi;
4191 int prec;
4192
4193 prec = TYPE_PRECISION (etype);
4194 if (prec <= HOST_BITS_PER_WIDE_INT)
4195 {
4196 hi = 0;
4197 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4198 }
4199 else
4200 {
4201 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4202 lo = (unsigned HOST_WIDE_INT) -1;
4203 }
4204
4205 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4206 {
4207 if (TYPE_UNSIGNED (etype))
4208 {
4209 tree signed_etype = signed_type_for (etype);
4210 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4211 etype
4212 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4213 else
4214 etype = signed_etype;
4215 exp = fold_convert_loc (loc, etype, exp);
4216 }
4217 return fold_build2_loc (loc, GT_EXPR, type, exp,
4218 build_int_cst (etype, 0));
4219 }
4220 }
4221
4222 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4223 This requires wrap-around arithmetics for the type of the expression.
4224 First make sure that arithmetics in this type is valid, then make sure
4225 that it wraps around. */
4226 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4227 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4228 TYPE_UNSIGNED (etype));
4229
4230 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4231 {
4232 tree utype, minv, maxv;
4233
4234 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4235 for the type in question, as we rely on this here. */
4236 utype = unsigned_type_for (etype);
4237 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4238 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4239 integer_one_node, 1);
4240 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4241
4242 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4243 minv, 1, maxv, 1)))
4244 etype = utype;
4245 else
4246 return 0;
4247 }
4248
4249 high = fold_convert_loc (loc, etype, high);
4250 low = fold_convert_loc (loc, etype, low);
4251 exp = fold_convert_loc (loc, etype, exp);
4252
4253 value = const_binop (MINUS_EXPR, high, low);
4254
4255
4256 if (POINTER_TYPE_P (etype))
4257 {
4258 if (value != 0 && !TREE_OVERFLOW (value))
4259 {
4260 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4261 return build_range_check (loc, type,
4262 fold_build_pointer_plus_loc (loc, exp, low),
4263 1, build_int_cst (etype, 0), value);
4264 }
4265 return 0;
4266 }
4267
4268 if (value != 0 && !TREE_OVERFLOW (value))
4269 return build_range_check (loc, type,
4270 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4271 1, build_int_cst (etype, 0), value);
4272
4273 return 0;
4274 }
4275 \f
4276 /* Return the predecessor of VAL in its type, handling the infinite case. */
4277
4278 static tree
4279 range_predecessor (tree val)
4280 {
4281 tree type = TREE_TYPE (val);
4282
4283 if (INTEGRAL_TYPE_P (type)
4284 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4285 return 0;
4286 else
4287 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4288 }
4289
4290 /* Return the successor of VAL in its type, handling the infinite case. */
4291
4292 static tree
4293 range_successor (tree val)
4294 {
4295 tree type = TREE_TYPE (val);
4296
4297 if (INTEGRAL_TYPE_P (type)
4298 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4299 return 0;
4300 else
4301 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4302 }
4303
4304 /* Given two ranges, see if we can merge them into one. Return 1 if we
4305 can, 0 if we can't. Set the output range into the specified parameters. */
4306
4307 bool
4308 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4309 tree high0, int in1_p, tree low1, tree high1)
4310 {
4311 int no_overlap;
4312 int subset;
4313 int temp;
4314 tree tem;
4315 int in_p;
4316 tree low, high;
4317 int lowequal = ((low0 == 0 && low1 == 0)
4318 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4319 low0, 0, low1, 0)));
4320 int highequal = ((high0 == 0 && high1 == 0)
4321 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4322 high0, 1, high1, 1)));
4323
4324 /* Make range 0 be the range that starts first, or ends last if they
4325 start at the same value. Swap them if it isn't. */
4326 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4327 low0, 0, low1, 0))
4328 || (lowequal
4329 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4330 high1, 1, high0, 1))))
4331 {
4332 temp = in0_p, in0_p = in1_p, in1_p = temp;
4333 tem = low0, low0 = low1, low1 = tem;
4334 tem = high0, high0 = high1, high1 = tem;
4335 }
4336
4337 /* Now flag two cases, whether the ranges are disjoint or whether the
4338 second range is totally subsumed in the first. Note that the tests
4339 below are simplified by the ones above. */
4340 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4341 high0, 1, low1, 0));
4342 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4343 high1, 1, high0, 1));
4344
4345 /* We now have four cases, depending on whether we are including or
4346 excluding the two ranges. */
4347 if (in0_p && in1_p)
4348 {
4349 /* If they don't overlap, the result is false. If the second range
4350 is a subset it is the result. Otherwise, the range is from the start
4351 of the second to the end of the first. */
4352 if (no_overlap)
4353 in_p = 0, low = high = 0;
4354 else if (subset)
4355 in_p = 1, low = low1, high = high1;
4356 else
4357 in_p = 1, low = low1, high = high0;
4358 }
4359
4360 else if (in0_p && ! in1_p)
4361 {
4362 /* If they don't overlap, the result is the first range. If they are
4363 equal, the result is false. If the second range is a subset of the
4364 first, and the ranges begin at the same place, we go from just after
4365 the end of the second range to the end of the first. If the second
4366 range is not a subset of the first, or if it is a subset and both
4367 ranges end at the same place, the range starts at the start of the
4368 first range and ends just before the second range.
4369 Otherwise, we can't describe this as a single range. */
4370 if (no_overlap)
4371 in_p = 1, low = low0, high = high0;
4372 else if (lowequal && highequal)
4373 in_p = 0, low = high = 0;
4374 else if (subset && lowequal)
4375 {
4376 low = range_successor (high1);
4377 high = high0;
4378 in_p = 1;
4379 if (low == 0)
4380 {
4381 /* We are in the weird situation where high0 > high1 but
4382 high1 has no successor. Punt. */
4383 return 0;
4384 }
4385 }
4386 else if (! subset || highequal)
4387 {
4388 low = low0;
4389 high = range_predecessor (low1);
4390 in_p = 1;
4391 if (high == 0)
4392 {
4393 /* low0 < low1 but low1 has no predecessor. Punt. */
4394 return 0;
4395 }
4396 }
4397 else
4398 return 0;
4399 }
4400
4401 else if (! in0_p && in1_p)
4402 {
4403 /* If they don't overlap, the result is the second range. If the second
4404 is a subset of the first, the result is false. Otherwise,
4405 the range starts just after the first range and ends at the
4406 end of the second. */
4407 if (no_overlap)
4408 in_p = 1, low = low1, high = high1;
4409 else if (subset || highequal)
4410 in_p = 0, low = high = 0;
4411 else
4412 {
4413 low = range_successor (high0);
4414 high = high1;
4415 in_p = 1;
4416 if (low == 0)
4417 {
4418 /* high1 > high0 but high0 has no successor. Punt. */
4419 return 0;
4420 }
4421 }
4422 }
4423
4424 else
4425 {
4426 /* The case where we are excluding both ranges. Here the complex case
4427 is if they don't overlap. In that case, the only time we have a
4428 range is if they are adjacent. If the second is a subset of the
4429 first, the result is the first. Otherwise, the range to exclude
4430 starts at the beginning of the first range and ends at the end of the
4431 second. */
4432 if (no_overlap)
4433 {
4434 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4435 range_successor (high0),
4436 1, low1, 0)))
4437 in_p = 0, low = low0, high = high1;
4438 else
4439 {
4440 /* Canonicalize - [min, x] into - [-, x]. */
4441 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4442 switch (TREE_CODE (TREE_TYPE (low0)))
4443 {
4444 case ENUMERAL_TYPE:
4445 if (TYPE_PRECISION (TREE_TYPE (low0))
4446 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4447 break;
4448 /* FALLTHROUGH */
4449 case INTEGER_TYPE:
4450 if (tree_int_cst_equal (low0,
4451 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4452 low0 = 0;
4453 break;
4454 case POINTER_TYPE:
4455 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4456 && integer_zerop (low0))
4457 low0 = 0;
4458 break;
4459 default:
4460 break;
4461 }
4462
4463 /* Canonicalize - [x, max] into - [x, -]. */
4464 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4465 switch (TREE_CODE (TREE_TYPE (high1)))
4466 {
4467 case ENUMERAL_TYPE:
4468 if (TYPE_PRECISION (TREE_TYPE (high1))
4469 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4470 break;
4471 /* FALLTHROUGH */
4472 case INTEGER_TYPE:
4473 if (tree_int_cst_equal (high1,
4474 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4475 high1 = 0;
4476 break;
4477 case POINTER_TYPE:
4478 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4479 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4480 high1, 1,
4481 integer_one_node, 1)))
4482 high1 = 0;
4483 break;
4484 default:
4485 break;
4486 }
4487
4488 /* The ranges might be also adjacent between the maximum and
4489 minimum values of the given type. For
4490 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4491 return + [x + 1, y - 1]. */
4492 if (low0 == 0 && high1 == 0)
4493 {
4494 low = range_successor (high0);
4495 high = range_predecessor (low1);
4496 if (low == 0 || high == 0)
4497 return 0;
4498
4499 in_p = 1;
4500 }
4501 else
4502 return 0;
4503 }
4504 }
4505 else if (subset)
4506 in_p = 0, low = low0, high = high0;
4507 else
4508 in_p = 0, low = low0, high = high1;
4509 }
4510
4511 *pin_p = in_p, *plow = low, *phigh = high;
4512 return 1;
4513 }
4514 \f
4515
4516 /* Subroutine of fold, looking inside expressions of the form
4517 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4518 of the COND_EXPR. This function is being used also to optimize
4519 A op B ? C : A, by reversing the comparison first.
4520
4521 Return a folded expression whose code is not a COND_EXPR
4522 anymore, or NULL_TREE if no folding opportunity is found. */
4523
4524 static tree
4525 fold_cond_expr_with_comparison (location_t loc, tree type,
4526 tree arg0, tree arg1, tree arg2)
4527 {
4528 enum tree_code comp_code = TREE_CODE (arg0);
4529 tree arg00 = TREE_OPERAND (arg0, 0);
4530 tree arg01 = TREE_OPERAND (arg0, 1);
4531 tree arg1_type = TREE_TYPE (arg1);
4532 tree tem;
4533
4534 STRIP_NOPS (arg1);
4535 STRIP_NOPS (arg2);
4536
4537 /* If we have A op 0 ? A : -A, consider applying the following
4538 transformations:
4539
4540 A == 0? A : -A same as -A
4541 A != 0? A : -A same as A
4542 A >= 0? A : -A same as abs (A)
4543 A > 0? A : -A same as abs (A)
4544 A <= 0? A : -A same as -abs (A)
4545 A < 0? A : -A same as -abs (A)
4546
4547 None of these transformations work for modes with signed
4548 zeros. If A is +/-0, the first two transformations will
4549 change the sign of the result (from +0 to -0, or vice
4550 versa). The last four will fix the sign of the result,
4551 even though the original expressions could be positive or
4552 negative, depending on the sign of A.
4553
4554 Note that all these transformations are correct if A is
4555 NaN, since the two alternatives (A and -A) are also NaNs. */
4556 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4557 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4558 ? real_zerop (arg01)
4559 : integer_zerop (arg01))
4560 && ((TREE_CODE (arg2) == NEGATE_EXPR
4561 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4562 /* In the case that A is of the form X-Y, '-A' (arg2) may
4563 have already been folded to Y-X, check for that. */
4564 || (TREE_CODE (arg1) == MINUS_EXPR
4565 && TREE_CODE (arg2) == MINUS_EXPR
4566 && operand_equal_p (TREE_OPERAND (arg1, 0),
4567 TREE_OPERAND (arg2, 1), 0)
4568 && operand_equal_p (TREE_OPERAND (arg1, 1),
4569 TREE_OPERAND (arg2, 0), 0))))
4570 switch (comp_code)
4571 {
4572 case EQ_EXPR:
4573 case UNEQ_EXPR:
4574 tem = fold_convert_loc (loc, arg1_type, arg1);
4575 return pedantic_non_lvalue_loc (loc,
4576 fold_convert_loc (loc, type,
4577 negate_expr (tem)));
4578 case NE_EXPR:
4579 case LTGT_EXPR:
4580 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4581 case UNGE_EXPR:
4582 case UNGT_EXPR:
4583 if (flag_trapping_math)
4584 break;
4585 /* Fall through. */
4586 case GE_EXPR:
4587 case GT_EXPR:
4588 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4589 arg1 = fold_convert_loc (loc, signed_type_for
4590 (TREE_TYPE (arg1)), arg1);
4591 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4592 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4593 case UNLE_EXPR:
4594 case UNLT_EXPR:
4595 if (flag_trapping_math)
4596 break;
4597 case LE_EXPR:
4598 case LT_EXPR:
4599 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4600 arg1 = fold_convert_loc (loc, signed_type_for
4601 (TREE_TYPE (arg1)), arg1);
4602 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4603 return negate_expr (fold_convert_loc (loc, type, tem));
4604 default:
4605 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4606 break;
4607 }
4608
4609 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4610 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4611 both transformations are correct when A is NaN: A != 0
4612 is then true, and A == 0 is false. */
4613
4614 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4615 && integer_zerop (arg01) && integer_zerop (arg2))
4616 {
4617 if (comp_code == NE_EXPR)
4618 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4619 else if (comp_code == EQ_EXPR)
4620 return build_int_cst (type, 0);
4621 }
4622
4623 /* Try some transformations of A op B ? A : B.
4624
4625 A == B? A : B same as B
4626 A != B? A : B same as A
4627 A >= B? A : B same as max (A, B)
4628 A > B? A : B same as max (B, A)
4629 A <= B? A : B same as min (A, B)
4630 A < B? A : B same as min (B, A)
4631
4632 As above, these transformations don't work in the presence
4633 of signed zeros. For example, if A and B are zeros of
4634 opposite sign, the first two transformations will change
4635 the sign of the result. In the last four, the original
4636 expressions give different results for (A=+0, B=-0) and
4637 (A=-0, B=+0), but the transformed expressions do not.
4638
4639 The first two transformations are correct if either A or B
4640 is a NaN. In the first transformation, the condition will
4641 be false, and B will indeed be chosen. In the case of the
4642 second transformation, the condition A != B will be true,
4643 and A will be chosen.
4644
4645 The conversions to max() and min() are not correct if B is
4646 a number and A is not. The conditions in the original
4647 expressions will be false, so all four give B. The min()
4648 and max() versions would give a NaN instead. */
4649 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4650 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4651 /* Avoid these transformations if the COND_EXPR may be used
4652 as an lvalue in the C++ front-end. PR c++/19199. */
4653 && (in_gimple_form
4654 || (strcmp (lang_hooks.name, "GNU C++") != 0
4655 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4656 || ! maybe_lvalue_p (arg1)
4657 || ! maybe_lvalue_p (arg2)))
4658 {
4659 tree comp_op0 = arg00;
4660 tree comp_op1 = arg01;
4661 tree comp_type = TREE_TYPE (comp_op0);
4662
4663 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4664 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4665 {
4666 comp_type = type;
4667 comp_op0 = arg1;
4668 comp_op1 = arg2;
4669 }
4670
4671 switch (comp_code)
4672 {
4673 case EQ_EXPR:
4674 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4675 case NE_EXPR:
4676 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4677 case LE_EXPR:
4678 case LT_EXPR:
4679 case UNLE_EXPR:
4680 case UNLT_EXPR:
4681 /* In C++ a ?: expression can be an lvalue, so put the
4682 operand which will be used if they are equal first
4683 so that we can convert this back to the
4684 corresponding COND_EXPR. */
4685 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4686 {
4687 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4688 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4689 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4690 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4691 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4692 comp_op1, comp_op0);
4693 return pedantic_non_lvalue_loc (loc,
4694 fold_convert_loc (loc, type, tem));
4695 }
4696 break;
4697 case GE_EXPR:
4698 case GT_EXPR:
4699 case UNGE_EXPR:
4700 case UNGT_EXPR:
4701 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4702 {
4703 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4704 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4705 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4706 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4707 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4708 comp_op1, comp_op0);
4709 return pedantic_non_lvalue_loc (loc,
4710 fold_convert_loc (loc, type, tem));
4711 }
4712 break;
4713 case UNEQ_EXPR:
4714 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4715 return pedantic_non_lvalue_loc (loc,
4716 fold_convert_loc (loc, type, arg2));
4717 break;
4718 case LTGT_EXPR:
4719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4720 return pedantic_non_lvalue_loc (loc,
4721 fold_convert_loc (loc, type, arg1));
4722 break;
4723 default:
4724 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4725 break;
4726 }
4727 }
4728
4729 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4730 we might still be able to simplify this. For example,
4731 if C1 is one less or one more than C2, this might have started
4732 out as a MIN or MAX and been transformed by this function.
4733 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4734
4735 if (INTEGRAL_TYPE_P (type)
4736 && TREE_CODE (arg01) == INTEGER_CST
4737 && TREE_CODE (arg2) == INTEGER_CST)
4738 switch (comp_code)
4739 {
4740 case EQ_EXPR:
4741 if (TREE_CODE (arg1) == INTEGER_CST)
4742 break;
4743 /* We can replace A with C1 in this case. */
4744 arg1 = fold_convert_loc (loc, type, arg01);
4745 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4746
4747 case LT_EXPR:
4748 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4749 MIN_EXPR, to preserve the signedness of the comparison. */
4750 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4751 OEP_ONLY_CONST)
4752 && operand_equal_p (arg01,
4753 const_binop (PLUS_EXPR, arg2,
4754 build_int_cst (type, 1)),
4755 OEP_ONLY_CONST))
4756 {
4757 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4758 fold_convert_loc (loc, TREE_TYPE (arg00),
4759 arg2));
4760 return pedantic_non_lvalue_loc (loc,
4761 fold_convert_loc (loc, type, tem));
4762 }
4763 break;
4764
4765 case LE_EXPR:
4766 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4767 as above. */
4768 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4769 OEP_ONLY_CONST)
4770 && operand_equal_p (arg01,
4771 const_binop (MINUS_EXPR, arg2,
4772 build_int_cst (type, 1)),
4773 OEP_ONLY_CONST))
4774 {
4775 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4776 fold_convert_loc (loc, TREE_TYPE (arg00),
4777 arg2));
4778 return pedantic_non_lvalue_loc (loc,
4779 fold_convert_loc (loc, type, tem));
4780 }
4781 break;
4782
4783 case GT_EXPR:
4784 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4785 MAX_EXPR, to preserve the signedness of the comparison. */
4786 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4787 OEP_ONLY_CONST)
4788 && operand_equal_p (arg01,
4789 const_binop (MINUS_EXPR, arg2,
4790 build_int_cst (type, 1)),
4791 OEP_ONLY_CONST))
4792 {
4793 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4794 fold_convert_loc (loc, TREE_TYPE (arg00),
4795 arg2));
4796 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4797 }
4798 break;
4799
4800 case GE_EXPR:
4801 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4802 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4803 OEP_ONLY_CONST)
4804 && operand_equal_p (arg01,
4805 const_binop (PLUS_EXPR, arg2,
4806 build_int_cst (type, 1)),
4807 OEP_ONLY_CONST))
4808 {
4809 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4810 fold_convert_loc (loc, TREE_TYPE (arg00),
4811 arg2));
4812 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4813 }
4814 break;
4815 case NE_EXPR:
4816 break;
4817 default:
4818 gcc_unreachable ();
4819 }
4820
4821 return NULL_TREE;
4822 }
4823
4824
4825 \f
4826 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4827 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4828 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4829 false) >= 2)
4830 #endif
4831
4832 /* EXP is some logical combination of boolean tests. See if we can
4833 merge it into some range test. Return the new tree if so. */
4834
4835 static tree
4836 fold_range_test (location_t loc, enum tree_code code, tree type,
4837 tree op0, tree op1)
4838 {
4839 int or_op = (code == TRUTH_ORIF_EXPR
4840 || code == TRUTH_OR_EXPR);
4841 int in0_p, in1_p, in_p;
4842 tree low0, low1, low, high0, high1, high;
4843 bool strict_overflow_p = false;
4844 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4845 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4846 tree tem;
4847 const char * const warnmsg = G_("assuming signed overflow does not occur "
4848 "when simplifying range test");
4849
4850 /* If this is an OR operation, invert both sides; we will invert
4851 again at the end. */
4852 if (or_op)
4853 in0_p = ! in0_p, in1_p = ! in1_p;
4854
4855 /* If both expressions are the same, if we can merge the ranges, and we
4856 can build the range test, return it or it inverted. If one of the
4857 ranges is always true or always false, consider it to be the same
4858 expression as the other. */
4859 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4860 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4861 in1_p, low1, high1)
4862 && 0 != (tem = (build_range_check (loc, type,
4863 lhs != 0 ? lhs
4864 : rhs != 0 ? rhs : integer_zero_node,
4865 in_p, low, high))))
4866 {
4867 if (strict_overflow_p)
4868 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4869 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4870 }
4871
4872 /* On machines where the branch cost is expensive, if this is a
4873 short-circuited branch and the underlying object on both sides
4874 is the same, make a non-short-circuit operation. */
4875 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4876 && lhs != 0 && rhs != 0
4877 && (code == TRUTH_ANDIF_EXPR
4878 || code == TRUTH_ORIF_EXPR)
4879 && operand_equal_p (lhs, rhs, 0))
4880 {
4881 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4882 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4883 which cases we can't do this. */
4884 if (simple_operand_p (lhs))
4885 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4886 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4887 type, op0, op1);
4888
4889 else if (!lang_hooks.decls.global_bindings_p ()
4890 && !CONTAINS_PLACEHOLDER_P (lhs))
4891 {
4892 tree common = save_expr (lhs);
4893
4894 if (0 != (lhs = build_range_check (loc, type, common,
4895 or_op ? ! in0_p : in0_p,
4896 low0, high0))
4897 && (0 != (rhs = build_range_check (loc, type, common,
4898 or_op ? ! in1_p : in1_p,
4899 low1, high1))))
4900 {
4901 if (strict_overflow_p)
4902 fold_overflow_warning (warnmsg,
4903 WARN_STRICT_OVERFLOW_COMPARISON);
4904 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4905 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4906 type, lhs, rhs);
4907 }
4908 }
4909 }
4910
4911 return 0;
4912 }
4913 \f
4914 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4915 bit value. Arrange things so the extra bits will be set to zero if and
4916 only if C is signed-extended to its full width. If MASK is nonzero,
4917 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4918
4919 static tree
4920 unextend (tree c, int p, int unsignedp, tree mask)
4921 {
4922 tree type = TREE_TYPE (c);
4923 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4924 tree temp;
4925
4926 if (p == modesize || unsignedp)
4927 return c;
4928
4929 /* We work by getting just the sign bit into the low-order bit, then
4930 into the high-order bit, then sign-extend. We then XOR that value
4931 with C. */
4932 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4933 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4934
4935 /* We must use a signed type in order to get an arithmetic right shift.
4936 However, we must also avoid introducing accidental overflows, so that
4937 a subsequent call to integer_zerop will work. Hence we must
4938 do the type conversion here. At this point, the constant is either
4939 zero or one, and the conversion to a signed type can never overflow.
4940 We could get an overflow if this conversion is done anywhere else. */
4941 if (TYPE_UNSIGNED (type))
4942 temp = fold_convert (signed_type_for (type), temp);
4943
4944 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4945 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4946 if (mask != 0)
4947 temp = const_binop (BIT_AND_EXPR, temp,
4948 fold_convert (TREE_TYPE (c), mask));
4949 /* If necessary, convert the type back to match the type of C. */
4950 if (TYPE_UNSIGNED (type))
4951 temp = fold_convert (type, temp);
4952
4953 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4954 }
4955 \f
4956 /* For an expression that has the form
4957 (A && B) || ~B
4958 or
4959 (A || B) && ~B,
4960 we can drop one of the inner expressions and simplify to
4961 A || ~B
4962 or
4963 A && ~B
4964 LOC is the location of the resulting expression. OP is the inner
4965 logical operation; the left-hand side in the examples above, while CMPOP
4966 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4967 removing a condition that guards another, as in
4968 (A != NULL && A->...) || A == NULL
4969 which we must not transform. If RHS_ONLY is true, only eliminate the
4970 right-most operand of the inner logical operation. */
4971
4972 static tree
4973 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4974 bool rhs_only)
4975 {
4976 tree type = TREE_TYPE (cmpop);
4977 enum tree_code code = TREE_CODE (cmpop);
4978 enum tree_code truthop_code = TREE_CODE (op);
4979 tree lhs = TREE_OPERAND (op, 0);
4980 tree rhs = TREE_OPERAND (op, 1);
4981 tree orig_lhs = lhs, orig_rhs = rhs;
4982 enum tree_code rhs_code = TREE_CODE (rhs);
4983 enum tree_code lhs_code = TREE_CODE (lhs);
4984 enum tree_code inv_code;
4985
4986 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4987 return NULL_TREE;
4988
4989 if (TREE_CODE_CLASS (code) != tcc_comparison)
4990 return NULL_TREE;
4991
4992 if (rhs_code == truthop_code)
4993 {
4994 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
4995 if (newrhs != NULL_TREE)
4996 {
4997 rhs = newrhs;
4998 rhs_code = TREE_CODE (rhs);
4999 }
5000 }
5001 if (lhs_code == truthop_code && !rhs_only)
5002 {
5003 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5004 if (newlhs != NULL_TREE)
5005 {
5006 lhs = newlhs;
5007 lhs_code = TREE_CODE (lhs);
5008 }
5009 }
5010
5011 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5012 if (inv_code == rhs_code
5013 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5014 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5015 return lhs;
5016 if (!rhs_only && inv_code == lhs_code
5017 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5018 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5019 return rhs;
5020 if (rhs != orig_rhs || lhs != orig_lhs)
5021 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5022 lhs, rhs);
5023 return NULL_TREE;
5024 }
5025
5026 /* Find ways of folding logical expressions of LHS and RHS:
5027 Try to merge two comparisons to the same innermost item.
5028 Look for range tests like "ch >= '0' && ch <= '9'".
5029 Look for combinations of simple terms on machines with expensive branches
5030 and evaluate the RHS unconditionally.
5031
5032 For example, if we have p->a == 2 && p->b == 4 and we can make an
5033 object large enough to span both A and B, we can do this with a comparison
5034 against the object ANDed with the a mask.
5035
5036 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5037 operations to do this with one comparison.
5038
5039 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5040 function and the one above.
5041
5042 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5043 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5044
5045 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5046 two operands.
5047
5048 We return the simplified tree or 0 if no optimization is possible. */
5049
5050 static tree
5051 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5052 tree lhs, tree rhs)
5053 {
5054 /* If this is the "or" of two comparisons, we can do something if
5055 the comparisons are NE_EXPR. If this is the "and", we can do something
5056 if the comparisons are EQ_EXPR. I.e.,
5057 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5058
5059 WANTED_CODE is this operation code. For single bit fields, we can
5060 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5061 comparison for one-bit fields. */
5062
5063 enum tree_code wanted_code;
5064 enum tree_code lcode, rcode;
5065 tree ll_arg, lr_arg, rl_arg, rr_arg;
5066 tree ll_inner, lr_inner, rl_inner, rr_inner;
5067 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5068 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5069 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5070 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5071 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5072 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5073 enum machine_mode lnmode, rnmode;
5074 tree ll_mask, lr_mask, rl_mask, rr_mask;
5075 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5076 tree l_const, r_const;
5077 tree lntype, rntype, result;
5078 HOST_WIDE_INT first_bit, end_bit;
5079 int volatilep;
5080
5081 /* Start by getting the comparison codes. Fail if anything is volatile.
5082 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5083 it were surrounded with a NE_EXPR. */
5084
5085 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5086 return 0;
5087
5088 lcode = TREE_CODE (lhs);
5089 rcode = TREE_CODE (rhs);
5090
5091 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5092 {
5093 lhs = build2 (NE_EXPR, truth_type, lhs,
5094 build_int_cst (TREE_TYPE (lhs), 0));
5095 lcode = NE_EXPR;
5096 }
5097
5098 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5099 {
5100 rhs = build2 (NE_EXPR, truth_type, rhs,
5101 build_int_cst (TREE_TYPE (rhs), 0));
5102 rcode = NE_EXPR;
5103 }
5104
5105 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5106 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5107 return 0;
5108
5109 ll_arg = TREE_OPERAND (lhs, 0);
5110 lr_arg = TREE_OPERAND (lhs, 1);
5111 rl_arg = TREE_OPERAND (rhs, 0);
5112 rr_arg = TREE_OPERAND (rhs, 1);
5113
5114 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5115 if (simple_operand_p (ll_arg)
5116 && simple_operand_p (lr_arg))
5117 {
5118 if (operand_equal_p (ll_arg, rl_arg, 0)
5119 && operand_equal_p (lr_arg, rr_arg, 0))
5120 {
5121 result = combine_comparisons (loc, code, lcode, rcode,
5122 truth_type, ll_arg, lr_arg);
5123 if (result)
5124 return result;
5125 }
5126 else if (operand_equal_p (ll_arg, rr_arg, 0)
5127 && operand_equal_p (lr_arg, rl_arg, 0))
5128 {
5129 result = combine_comparisons (loc, code, lcode,
5130 swap_tree_comparison (rcode),
5131 truth_type, ll_arg, lr_arg);
5132 if (result)
5133 return result;
5134 }
5135 }
5136
5137 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5138 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5139
5140 /* If the RHS can be evaluated unconditionally and its operands are
5141 simple, it wins to evaluate the RHS unconditionally on machines
5142 with expensive branches. In this case, this isn't a comparison
5143 that can be merged. */
5144
5145 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5146 false) >= 2
5147 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5148 && simple_operand_p (rl_arg)
5149 && simple_operand_p (rr_arg))
5150 {
5151 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5152 if (code == TRUTH_OR_EXPR
5153 && lcode == NE_EXPR && integer_zerop (lr_arg)
5154 && rcode == NE_EXPR && integer_zerop (rr_arg)
5155 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5156 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5157 return build2_loc (loc, NE_EXPR, truth_type,
5158 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5159 ll_arg, rl_arg),
5160 build_int_cst (TREE_TYPE (ll_arg), 0));
5161
5162 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5163 if (code == TRUTH_AND_EXPR
5164 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5165 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5166 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5167 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5168 return build2_loc (loc, EQ_EXPR, truth_type,
5169 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5170 ll_arg, rl_arg),
5171 build_int_cst (TREE_TYPE (ll_arg), 0));
5172 }
5173
5174 /* See if the comparisons can be merged. Then get all the parameters for
5175 each side. */
5176
5177 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5178 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5179 return 0;
5180
5181 volatilep = 0;
5182 ll_inner = decode_field_reference (loc, ll_arg,
5183 &ll_bitsize, &ll_bitpos, &ll_mode,
5184 &ll_unsignedp, &volatilep, &ll_mask,
5185 &ll_and_mask);
5186 lr_inner = decode_field_reference (loc, lr_arg,
5187 &lr_bitsize, &lr_bitpos, &lr_mode,
5188 &lr_unsignedp, &volatilep, &lr_mask,
5189 &lr_and_mask);
5190 rl_inner = decode_field_reference (loc, rl_arg,
5191 &rl_bitsize, &rl_bitpos, &rl_mode,
5192 &rl_unsignedp, &volatilep, &rl_mask,
5193 &rl_and_mask);
5194 rr_inner = decode_field_reference (loc, rr_arg,
5195 &rr_bitsize, &rr_bitpos, &rr_mode,
5196 &rr_unsignedp, &volatilep, &rr_mask,
5197 &rr_and_mask);
5198
5199 /* It must be true that the inner operation on the lhs of each
5200 comparison must be the same if we are to be able to do anything.
5201 Then see if we have constants. If not, the same must be true for
5202 the rhs's. */
5203 if (volatilep || ll_inner == 0 || rl_inner == 0
5204 || ! operand_equal_p (ll_inner, rl_inner, 0))
5205 return 0;
5206
5207 if (TREE_CODE (lr_arg) == INTEGER_CST
5208 && TREE_CODE (rr_arg) == INTEGER_CST)
5209 l_const = lr_arg, r_const = rr_arg;
5210 else if (lr_inner == 0 || rr_inner == 0
5211 || ! operand_equal_p (lr_inner, rr_inner, 0))
5212 return 0;
5213 else
5214 l_const = r_const = 0;
5215
5216 /* If either comparison code is not correct for our logical operation,
5217 fail. However, we can convert a one-bit comparison against zero into
5218 the opposite comparison against that bit being set in the field. */
5219
5220 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5221 if (lcode != wanted_code)
5222 {
5223 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5224 {
5225 /* Make the left operand unsigned, since we are only interested
5226 in the value of one bit. Otherwise we are doing the wrong
5227 thing below. */
5228 ll_unsignedp = 1;
5229 l_const = ll_mask;
5230 }
5231 else
5232 return 0;
5233 }
5234
5235 /* This is analogous to the code for l_const above. */
5236 if (rcode != wanted_code)
5237 {
5238 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5239 {
5240 rl_unsignedp = 1;
5241 r_const = rl_mask;
5242 }
5243 else
5244 return 0;
5245 }
5246
5247 /* See if we can find a mode that contains both fields being compared on
5248 the left. If we can't, fail. Otherwise, update all constants and masks
5249 to be relative to a field of that size. */
5250 first_bit = MIN (ll_bitpos, rl_bitpos);
5251 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5252 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5253 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5254 volatilep);
5255 if (lnmode == VOIDmode)
5256 return 0;
5257
5258 lnbitsize = GET_MODE_BITSIZE (lnmode);
5259 lnbitpos = first_bit & ~ (lnbitsize - 1);
5260 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5261 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5262
5263 if (BYTES_BIG_ENDIAN)
5264 {
5265 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5266 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5267 }
5268
5269 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5270 size_int (xll_bitpos));
5271 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5272 size_int (xrl_bitpos));
5273
5274 if (l_const)
5275 {
5276 l_const = fold_convert_loc (loc, lntype, l_const);
5277 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5278 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5279 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5280 fold_build1_loc (loc, BIT_NOT_EXPR,
5281 lntype, ll_mask))))
5282 {
5283 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5284
5285 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5286 }
5287 }
5288 if (r_const)
5289 {
5290 r_const = fold_convert_loc (loc, lntype, r_const);
5291 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5292 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5293 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5294 fold_build1_loc (loc, BIT_NOT_EXPR,
5295 lntype, rl_mask))))
5296 {
5297 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5298
5299 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5300 }
5301 }
5302
5303 /* If the right sides are not constant, do the same for it. Also,
5304 disallow this optimization if a size or signedness mismatch occurs
5305 between the left and right sides. */
5306 if (l_const == 0)
5307 {
5308 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5309 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5310 /* Make sure the two fields on the right
5311 correspond to the left without being swapped. */
5312 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5313 return 0;
5314
5315 first_bit = MIN (lr_bitpos, rr_bitpos);
5316 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5317 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5318 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5319 volatilep);
5320 if (rnmode == VOIDmode)
5321 return 0;
5322
5323 rnbitsize = GET_MODE_BITSIZE (rnmode);
5324 rnbitpos = first_bit & ~ (rnbitsize - 1);
5325 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5326 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5327
5328 if (BYTES_BIG_ENDIAN)
5329 {
5330 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5331 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5332 }
5333
5334 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5335 rntype, lr_mask),
5336 size_int (xlr_bitpos));
5337 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5338 rntype, rr_mask),
5339 size_int (xrr_bitpos));
5340
5341 /* Make a mask that corresponds to both fields being compared.
5342 Do this for both items being compared. If the operands are the
5343 same size and the bits being compared are in the same position
5344 then we can do this by masking both and comparing the masked
5345 results. */
5346 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5347 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5348 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5349 {
5350 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5351 ll_unsignedp || rl_unsignedp);
5352 if (! all_ones_mask_p (ll_mask, lnbitsize))
5353 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5354
5355 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5356 lr_unsignedp || rr_unsignedp);
5357 if (! all_ones_mask_p (lr_mask, rnbitsize))
5358 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5359
5360 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5361 }
5362
5363 /* There is still another way we can do something: If both pairs of
5364 fields being compared are adjacent, we may be able to make a wider
5365 field containing them both.
5366
5367 Note that we still must mask the lhs/rhs expressions. Furthermore,
5368 the mask must be shifted to account for the shift done by
5369 make_bit_field_ref. */
5370 if ((ll_bitsize + ll_bitpos == rl_bitpos
5371 && lr_bitsize + lr_bitpos == rr_bitpos)
5372 || (ll_bitpos == rl_bitpos + rl_bitsize
5373 && lr_bitpos == rr_bitpos + rr_bitsize))
5374 {
5375 tree type;
5376
5377 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5378 ll_bitsize + rl_bitsize,
5379 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5380 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5381 lr_bitsize + rr_bitsize,
5382 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5383
5384 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5385 size_int (MIN (xll_bitpos, xrl_bitpos)));
5386 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5387 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5388
5389 /* Convert to the smaller type before masking out unwanted bits. */
5390 type = lntype;
5391 if (lntype != rntype)
5392 {
5393 if (lnbitsize > rnbitsize)
5394 {
5395 lhs = fold_convert_loc (loc, rntype, lhs);
5396 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5397 type = rntype;
5398 }
5399 else if (lnbitsize < rnbitsize)
5400 {
5401 rhs = fold_convert_loc (loc, lntype, rhs);
5402 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5403 type = lntype;
5404 }
5405 }
5406
5407 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5408 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5409
5410 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5411 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5412
5413 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5414 }
5415
5416 return 0;
5417 }
5418
5419 /* Handle the case of comparisons with constants. If there is something in
5420 common between the masks, those bits of the constants must be the same.
5421 If not, the condition is always false. Test for this to avoid generating
5422 incorrect code below. */
5423 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5424 if (! integer_zerop (result)
5425 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5426 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5427 {
5428 if (wanted_code == NE_EXPR)
5429 {
5430 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5431 return constant_boolean_node (true, truth_type);
5432 }
5433 else
5434 {
5435 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5436 return constant_boolean_node (false, truth_type);
5437 }
5438 }
5439
5440 /* Construct the expression we will return. First get the component
5441 reference we will make. Unless the mask is all ones the width of
5442 that field, perform the mask operation. Then compare with the
5443 merged constant. */
5444 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5445 ll_unsignedp || rl_unsignedp);
5446
5447 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5448 if (! all_ones_mask_p (ll_mask, lnbitsize))
5449 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5450
5451 return build2_loc (loc, wanted_code, truth_type, result,
5452 const_binop (BIT_IOR_EXPR, l_const, r_const));
5453 }
5454 \f
5455 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5456 constant. */
5457
5458 static tree
5459 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5460 tree op0, tree op1)
5461 {
5462 tree arg0 = op0;
5463 enum tree_code op_code;
5464 tree comp_const;
5465 tree minmax_const;
5466 int consts_equal, consts_lt;
5467 tree inner;
5468
5469 STRIP_SIGN_NOPS (arg0);
5470
5471 op_code = TREE_CODE (arg0);
5472 minmax_const = TREE_OPERAND (arg0, 1);
5473 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5474 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5475 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5476 inner = TREE_OPERAND (arg0, 0);
5477
5478 /* If something does not permit us to optimize, return the original tree. */
5479 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5480 || TREE_CODE (comp_const) != INTEGER_CST
5481 || TREE_OVERFLOW (comp_const)
5482 || TREE_CODE (minmax_const) != INTEGER_CST
5483 || TREE_OVERFLOW (minmax_const))
5484 return NULL_TREE;
5485
5486 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5487 and GT_EXPR, doing the rest with recursive calls using logical
5488 simplifications. */
5489 switch (code)
5490 {
5491 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5492 {
5493 tree tem
5494 = optimize_minmax_comparison (loc,
5495 invert_tree_comparison (code, false),
5496 type, op0, op1);
5497 if (tem)
5498 return invert_truthvalue_loc (loc, tem);
5499 return NULL_TREE;
5500 }
5501
5502 case GE_EXPR:
5503 return
5504 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5505 optimize_minmax_comparison
5506 (loc, EQ_EXPR, type, arg0, comp_const),
5507 optimize_minmax_comparison
5508 (loc, GT_EXPR, type, arg0, comp_const));
5509
5510 case EQ_EXPR:
5511 if (op_code == MAX_EXPR && consts_equal)
5512 /* MAX (X, 0) == 0 -> X <= 0 */
5513 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5514
5515 else if (op_code == MAX_EXPR && consts_lt)
5516 /* MAX (X, 0) == 5 -> X == 5 */
5517 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5518
5519 else if (op_code == MAX_EXPR)
5520 /* MAX (X, 0) == -1 -> false */
5521 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5522
5523 else if (consts_equal)
5524 /* MIN (X, 0) == 0 -> X >= 0 */
5525 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5526
5527 else if (consts_lt)
5528 /* MIN (X, 0) == 5 -> false */
5529 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5530
5531 else
5532 /* MIN (X, 0) == -1 -> X == -1 */
5533 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5534
5535 case GT_EXPR:
5536 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5537 /* MAX (X, 0) > 0 -> X > 0
5538 MAX (X, 0) > 5 -> X > 5 */
5539 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5540
5541 else if (op_code == MAX_EXPR)
5542 /* MAX (X, 0) > -1 -> true */
5543 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5544
5545 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5546 /* MIN (X, 0) > 0 -> false
5547 MIN (X, 0) > 5 -> false */
5548 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5549
5550 else
5551 /* MIN (X, 0) > -1 -> X > -1 */
5552 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5553
5554 default:
5555 return NULL_TREE;
5556 }
5557 }
5558 \f
5559 /* T is an integer expression that is being multiplied, divided, or taken a
5560 modulus (CODE says which and what kind of divide or modulus) by a
5561 constant C. See if we can eliminate that operation by folding it with
5562 other operations already in T. WIDE_TYPE, if non-null, is a type that
5563 should be used for the computation if wider than our type.
5564
5565 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5566 (X * 2) + (Y * 4). We must, however, be assured that either the original
5567 expression would not overflow or that overflow is undefined for the type
5568 in the language in question.
5569
5570 If we return a non-null expression, it is an equivalent form of the
5571 original computation, but need not be in the original type.
5572
5573 We set *STRICT_OVERFLOW_P to true if the return values depends on
5574 signed overflow being undefined. Otherwise we do not change
5575 *STRICT_OVERFLOW_P. */
5576
5577 static tree
5578 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5579 bool *strict_overflow_p)
5580 {
5581 /* To avoid exponential search depth, refuse to allow recursion past
5582 three levels. Beyond that (1) it's highly unlikely that we'll find
5583 something interesting and (2) we've probably processed it before
5584 when we built the inner expression. */
5585
5586 static int depth;
5587 tree ret;
5588
5589 if (depth > 3)
5590 return NULL;
5591
5592 depth++;
5593 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5594 depth--;
5595
5596 return ret;
5597 }
5598
5599 static tree
5600 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5601 bool *strict_overflow_p)
5602 {
5603 tree type = TREE_TYPE (t);
5604 enum tree_code tcode = TREE_CODE (t);
5605 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5606 > GET_MODE_SIZE (TYPE_MODE (type)))
5607 ? wide_type : type);
5608 tree t1, t2;
5609 int same_p = tcode == code;
5610 tree op0 = NULL_TREE, op1 = NULL_TREE;
5611 bool sub_strict_overflow_p;
5612
5613 /* Don't deal with constants of zero here; they confuse the code below. */
5614 if (integer_zerop (c))
5615 return NULL_TREE;
5616
5617 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5618 op0 = TREE_OPERAND (t, 0);
5619
5620 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5621 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5622
5623 /* Note that we need not handle conditional operations here since fold
5624 already handles those cases. So just do arithmetic here. */
5625 switch (tcode)
5626 {
5627 case INTEGER_CST:
5628 /* For a constant, we can always simplify if we are a multiply
5629 or (for divide and modulus) if it is a multiple of our constant. */
5630 if (code == MULT_EXPR
5631 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5632 return const_binop (code, fold_convert (ctype, t),
5633 fold_convert (ctype, c));
5634 break;
5635
5636 CASE_CONVERT: case NON_LVALUE_EXPR:
5637 /* If op0 is an expression ... */
5638 if ((COMPARISON_CLASS_P (op0)
5639 || UNARY_CLASS_P (op0)
5640 || BINARY_CLASS_P (op0)
5641 || VL_EXP_CLASS_P (op0)
5642 || EXPRESSION_CLASS_P (op0))
5643 /* ... and has wrapping overflow, and its type is smaller
5644 than ctype, then we cannot pass through as widening. */
5645 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5646 && (TYPE_PRECISION (ctype)
5647 > TYPE_PRECISION (TREE_TYPE (op0))))
5648 /* ... or this is a truncation (t is narrower than op0),
5649 then we cannot pass through this narrowing. */
5650 || (TYPE_PRECISION (type)
5651 < TYPE_PRECISION (TREE_TYPE (op0)))
5652 /* ... or signedness changes for division or modulus,
5653 then we cannot pass through this conversion. */
5654 || (code != MULT_EXPR
5655 && (TYPE_UNSIGNED (ctype)
5656 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5657 /* ... or has undefined overflow while the converted to
5658 type has not, we cannot do the operation in the inner type
5659 as that would introduce undefined overflow. */
5660 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5661 && !TYPE_OVERFLOW_UNDEFINED (type))))
5662 break;
5663
5664 /* Pass the constant down and see if we can make a simplification. If
5665 we can, replace this expression with the inner simplification for
5666 possible later conversion to our or some other type. */
5667 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5668 && TREE_CODE (t2) == INTEGER_CST
5669 && !TREE_OVERFLOW (t2)
5670 && (0 != (t1 = extract_muldiv (op0, t2, code,
5671 code == MULT_EXPR
5672 ? ctype : NULL_TREE,
5673 strict_overflow_p))))
5674 return t1;
5675 break;
5676
5677 case ABS_EXPR:
5678 /* If widening the type changes it from signed to unsigned, then we
5679 must avoid building ABS_EXPR itself as unsigned. */
5680 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5681 {
5682 tree cstype = (*signed_type_for) (ctype);
5683 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5684 != 0)
5685 {
5686 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5687 return fold_convert (ctype, t1);
5688 }
5689 break;
5690 }
5691 /* If the constant is negative, we cannot simplify this. */
5692 if (tree_int_cst_sgn (c) == -1)
5693 break;
5694 /* FALLTHROUGH */
5695 case NEGATE_EXPR:
5696 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5697 != 0)
5698 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5699 break;
5700
5701 case MIN_EXPR: case MAX_EXPR:
5702 /* If widening the type changes the signedness, then we can't perform
5703 this optimization as that changes the result. */
5704 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5705 break;
5706
5707 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5708 sub_strict_overflow_p = false;
5709 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5710 &sub_strict_overflow_p)) != 0
5711 && (t2 = extract_muldiv (op1, c, code, wide_type,
5712 &sub_strict_overflow_p)) != 0)
5713 {
5714 if (tree_int_cst_sgn (c) < 0)
5715 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5716 if (sub_strict_overflow_p)
5717 *strict_overflow_p = true;
5718 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5719 fold_convert (ctype, t2));
5720 }
5721 break;
5722
5723 case LSHIFT_EXPR: case RSHIFT_EXPR:
5724 /* If the second operand is constant, this is a multiplication
5725 or floor division, by a power of two, so we can treat it that
5726 way unless the multiplier or divisor overflows. Signed
5727 left-shift overflow is implementation-defined rather than
5728 undefined in C90, so do not convert signed left shift into
5729 multiplication. */
5730 if (TREE_CODE (op1) == INTEGER_CST
5731 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5732 /* const_binop may not detect overflow correctly,
5733 so check for it explicitly here. */
5734 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5735 && TREE_INT_CST_HIGH (op1) == 0
5736 && 0 != (t1 = fold_convert (ctype,
5737 const_binop (LSHIFT_EXPR,
5738 size_one_node,
5739 op1)))
5740 && !TREE_OVERFLOW (t1))
5741 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5742 ? MULT_EXPR : FLOOR_DIV_EXPR,
5743 ctype,
5744 fold_convert (ctype, op0),
5745 t1),
5746 c, code, wide_type, strict_overflow_p);
5747 break;
5748
5749 case PLUS_EXPR: case MINUS_EXPR:
5750 /* See if we can eliminate the operation on both sides. If we can, we
5751 can return a new PLUS or MINUS. If we can't, the only remaining
5752 cases where we can do anything are if the second operand is a
5753 constant. */
5754 sub_strict_overflow_p = false;
5755 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5756 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5757 if (t1 != 0 && t2 != 0
5758 && (code == MULT_EXPR
5759 /* If not multiplication, we can only do this if both operands
5760 are divisible by c. */
5761 || (multiple_of_p (ctype, op0, c)
5762 && multiple_of_p (ctype, op1, c))))
5763 {
5764 if (sub_strict_overflow_p)
5765 *strict_overflow_p = true;
5766 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5767 fold_convert (ctype, t2));
5768 }
5769
5770 /* If this was a subtraction, negate OP1 and set it to be an addition.
5771 This simplifies the logic below. */
5772 if (tcode == MINUS_EXPR)
5773 {
5774 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5775 /* If OP1 was not easily negatable, the constant may be OP0. */
5776 if (TREE_CODE (op0) == INTEGER_CST)
5777 {
5778 tree tem = op0;
5779 op0 = op1;
5780 op1 = tem;
5781 tem = t1;
5782 t1 = t2;
5783 t2 = tem;
5784 }
5785 }
5786
5787 if (TREE_CODE (op1) != INTEGER_CST)
5788 break;
5789
5790 /* If either OP1 or C are negative, this optimization is not safe for
5791 some of the division and remainder types while for others we need
5792 to change the code. */
5793 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5794 {
5795 if (code == CEIL_DIV_EXPR)
5796 code = FLOOR_DIV_EXPR;
5797 else if (code == FLOOR_DIV_EXPR)
5798 code = CEIL_DIV_EXPR;
5799 else if (code != MULT_EXPR
5800 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5801 break;
5802 }
5803
5804 /* If it's a multiply or a division/modulus operation of a multiple
5805 of our constant, do the operation and verify it doesn't overflow. */
5806 if (code == MULT_EXPR
5807 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5808 {
5809 op1 = const_binop (code, fold_convert (ctype, op1),
5810 fold_convert (ctype, c));
5811 /* We allow the constant to overflow with wrapping semantics. */
5812 if (op1 == 0
5813 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5814 break;
5815 }
5816 else
5817 break;
5818
5819 /* If we have an unsigned type, we cannot widen the operation since it
5820 will change the result if the original computation overflowed. */
5821 if (TYPE_UNSIGNED (ctype) && ctype != type)
5822 break;
5823
5824 /* If we were able to eliminate our operation from the first side,
5825 apply our operation to the second side and reform the PLUS. */
5826 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5827 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5828
5829 /* The last case is if we are a multiply. In that case, we can
5830 apply the distributive law to commute the multiply and addition
5831 if the multiplication of the constants doesn't overflow. */
5832 if (code == MULT_EXPR)
5833 return fold_build2 (tcode, ctype,
5834 fold_build2 (code, ctype,
5835 fold_convert (ctype, op0),
5836 fold_convert (ctype, c)),
5837 op1);
5838
5839 break;
5840
5841 case MULT_EXPR:
5842 /* We have a special case here if we are doing something like
5843 (C * 8) % 4 since we know that's zero. */
5844 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5845 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5846 /* If the multiplication can overflow we cannot optimize this. */
5847 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5848 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5849 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5850 {
5851 *strict_overflow_p = true;
5852 return omit_one_operand (type, integer_zero_node, op0);
5853 }
5854
5855 /* ... fall through ... */
5856
5857 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5858 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5859 /* If we can extract our operation from the LHS, do so and return a
5860 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5861 do something only if the second operand is a constant. */
5862 if (same_p
5863 && (t1 = extract_muldiv (op0, c, code, wide_type,
5864 strict_overflow_p)) != 0)
5865 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5866 fold_convert (ctype, op1));
5867 else if (tcode == MULT_EXPR && code == MULT_EXPR
5868 && (t1 = extract_muldiv (op1, c, code, wide_type,
5869 strict_overflow_p)) != 0)
5870 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5871 fold_convert (ctype, t1));
5872 else if (TREE_CODE (op1) != INTEGER_CST)
5873 return 0;
5874
5875 /* If these are the same operation types, we can associate them
5876 assuming no overflow. */
5877 if (tcode == code)
5878 {
5879 double_int mul;
5880 bool overflow_p;
5881 unsigned prec = TYPE_PRECISION (ctype);
5882 bool uns = TYPE_UNSIGNED (ctype);
5883 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5884 double_int dic = tree_to_double_int (c).ext (prec, uns);
5885 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5886 overflow_p = ((!uns && overflow_p)
5887 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5888 if (!double_int_fits_to_tree_p (ctype, mul)
5889 && ((uns && tcode != MULT_EXPR) || !uns))
5890 overflow_p = 1;
5891 if (!overflow_p)
5892 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5893 double_int_to_tree (ctype, mul));
5894 }
5895
5896 /* If these operations "cancel" each other, we have the main
5897 optimizations of this pass, which occur when either constant is a
5898 multiple of the other, in which case we replace this with either an
5899 operation or CODE or TCODE.
5900
5901 If we have an unsigned type, we cannot do this since it will change
5902 the result if the original computation overflowed. */
5903 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5904 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5905 || (tcode == MULT_EXPR
5906 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5907 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5908 && code != MULT_EXPR)))
5909 {
5910 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5911 {
5912 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5913 *strict_overflow_p = true;
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5915 fold_convert (ctype,
5916 const_binop (TRUNC_DIV_EXPR,
5917 op1, c)));
5918 }
5919 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5920 {
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5922 *strict_overflow_p = true;
5923 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5924 fold_convert (ctype,
5925 const_binop (TRUNC_DIV_EXPR,
5926 c, op1)));
5927 }
5928 }
5929 break;
5930
5931 default:
5932 break;
5933 }
5934
5935 return 0;
5936 }
5937 \f
5938 /* Return a node which has the indicated constant VALUE (either 0 or
5939 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5940 and is of the indicated TYPE. */
5941
5942 tree
5943 constant_boolean_node (bool value, tree type)
5944 {
5945 if (type == integer_type_node)
5946 return value ? integer_one_node : integer_zero_node;
5947 else if (type == boolean_type_node)
5948 return value ? boolean_true_node : boolean_false_node;
5949 else if (TREE_CODE (type) == VECTOR_TYPE)
5950 return build_vector_from_val (type,
5951 build_int_cst (TREE_TYPE (type),
5952 value ? -1 : 0));
5953 else
5954 return fold_convert (type, value ? integer_one_node : integer_zero_node);
5955 }
5956
5957
5958 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5959 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5960 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5961 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5962 COND is the first argument to CODE; otherwise (as in the example
5963 given here), it is the second argument. TYPE is the type of the
5964 original expression. Return NULL_TREE if no simplification is
5965 possible. */
5966
5967 static tree
5968 fold_binary_op_with_conditional_arg (location_t loc,
5969 enum tree_code code,
5970 tree type, tree op0, tree op1,
5971 tree cond, tree arg, int cond_first_p)
5972 {
5973 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5974 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5975 tree test, true_value, false_value;
5976 tree lhs = NULL_TREE;
5977 tree rhs = NULL_TREE;
5978 enum tree_code cond_code = COND_EXPR;
5979
5980 if (TREE_CODE (cond) == COND_EXPR
5981 || TREE_CODE (cond) == VEC_COND_EXPR)
5982 {
5983 test = TREE_OPERAND (cond, 0);
5984 true_value = TREE_OPERAND (cond, 1);
5985 false_value = TREE_OPERAND (cond, 2);
5986 /* If this operand throws an expression, then it does not make
5987 sense to try to perform a logical or arithmetic operation
5988 involving it. */
5989 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5990 lhs = true_value;
5991 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5992 rhs = false_value;
5993 }
5994 else
5995 {
5996 tree testtype = TREE_TYPE (cond);
5997 test = cond;
5998 true_value = constant_boolean_node (true, testtype);
5999 false_value = constant_boolean_node (false, testtype);
6000 }
6001
6002 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6003 cond_code = VEC_COND_EXPR;
6004
6005 /* This transformation is only worthwhile if we don't have to wrap ARG
6006 in a SAVE_EXPR and the operation can be simplified without recursing
6007 on at least one of the branches once its pushed inside the COND_EXPR. */
6008 if (!TREE_CONSTANT (arg)
6009 && (TREE_SIDE_EFFECTS (arg)
6010 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6011 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6012 return NULL_TREE;
6013
6014 arg = fold_convert_loc (loc, arg_type, arg);
6015 if (lhs == 0)
6016 {
6017 true_value = fold_convert_loc (loc, cond_type, true_value);
6018 if (cond_first_p)
6019 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6020 else
6021 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6022 }
6023 if (rhs == 0)
6024 {
6025 false_value = fold_convert_loc (loc, cond_type, false_value);
6026 if (cond_first_p)
6027 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6028 else
6029 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6030 }
6031
6032 /* Check that we have simplified at least one of the branches. */
6033 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6034 return NULL_TREE;
6035
6036 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6037 }
6038
6039 \f
6040 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6041
6042 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6043 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6044 ADDEND is the same as X.
6045
6046 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6047 and finite. The problematic cases are when X is zero, and its mode
6048 has signed zeros. In the case of rounding towards -infinity,
6049 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6050 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6051
6052 bool
6053 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6054 {
6055 if (!real_zerop (addend))
6056 return false;
6057
6058 /* Don't allow the fold with -fsignaling-nans. */
6059 if (HONOR_SNANS (TYPE_MODE (type)))
6060 return false;
6061
6062 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6063 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6064 return true;
6065
6066 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6067 if (TREE_CODE (addend) == REAL_CST
6068 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6069 negate = !negate;
6070
6071 /* The mode has signed zeros, and we have to honor their sign.
6072 In this situation, there is only one case we can return true for.
6073 X - 0 is the same as X unless rounding towards -infinity is
6074 supported. */
6075 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6076 }
6077
6078 /* Subroutine of fold() that checks comparisons of built-in math
6079 functions against real constants.
6080
6081 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6082 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6083 is the type of the result and ARG0 and ARG1 are the operands of the
6084 comparison. ARG1 must be a TREE_REAL_CST.
6085
6086 The function returns the constant folded tree if a simplification
6087 can be made, and NULL_TREE otherwise. */
6088
6089 static tree
6090 fold_mathfn_compare (location_t loc,
6091 enum built_in_function fcode, enum tree_code code,
6092 tree type, tree arg0, tree arg1)
6093 {
6094 REAL_VALUE_TYPE c;
6095
6096 if (BUILTIN_SQRT_P (fcode))
6097 {
6098 tree arg = CALL_EXPR_ARG (arg0, 0);
6099 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6100
6101 c = TREE_REAL_CST (arg1);
6102 if (REAL_VALUE_NEGATIVE (c))
6103 {
6104 /* sqrt(x) < y is always false, if y is negative. */
6105 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6106 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6107
6108 /* sqrt(x) > y is always true, if y is negative and we
6109 don't care about NaNs, i.e. negative values of x. */
6110 if (code == NE_EXPR || !HONOR_NANS (mode))
6111 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6112
6113 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6114 return fold_build2_loc (loc, GE_EXPR, type, arg,
6115 build_real (TREE_TYPE (arg), dconst0));
6116 }
6117 else if (code == GT_EXPR || code == GE_EXPR)
6118 {
6119 REAL_VALUE_TYPE c2;
6120
6121 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6122 real_convert (&c2, mode, &c2);
6123
6124 if (REAL_VALUE_ISINF (c2))
6125 {
6126 /* sqrt(x) > y is x == +Inf, when y is very large. */
6127 if (HONOR_INFINITIES (mode))
6128 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6129 build_real (TREE_TYPE (arg), c2));
6130
6131 /* sqrt(x) > y is always false, when y is very large
6132 and we don't care about infinities. */
6133 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6134 }
6135
6136 /* sqrt(x) > c is the same as x > c*c. */
6137 return fold_build2_loc (loc, code, type, arg,
6138 build_real (TREE_TYPE (arg), c2));
6139 }
6140 else if (code == LT_EXPR || code == LE_EXPR)
6141 {
6142 REAL_VALUE_TYPE c2;
6143
6144 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6145 real_convert (&c2, mode, &c2);
6146
6147 if (REAL_VALUE_ISINF (c2))
6148 {
6149 /* sqrt(x) < y is always true, when y is a very large
6150 value and we don't care about NaNs or Infinities. */
6151 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6152 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6153
6154 /* sqrt(x) < y is x != +Inf when y is very large and we
6155 don't care about NaNs. */
6156 if (! HONOR_NANS (mode))
6157 return fold_build2_loc (loc, NE_EXPR, type, arg,
6158 build_real (TREE_TYPE (arg), c2));
6159
6160 /* sqrt(x) < y is x >= 0 when y is very large and we
6161 don't care about Infinities. */
6162 if (! HONOR_INFINITIES (mode))
6163 return fold_build2_loc (loc, GE_EXPR, type, arg,
6164 build_real (TREE_TYPE (arg), dconst0));
6165
6166 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6167 arg = save_expr (arg);
6168 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6169 fold_build2_loc (loc, GE_EXPR, type, arg,
6170 build_real (TREE_TYPE (arg),
6171 dconst0)),
6172 fold_build2_loc (loc, NE_EXPR, type, arg,
6173 build_real (TREE_TYPE (arg),
6174 c2)));
6175 }
6176
6177 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6178 if (! HONOR_NANS (mode))
6179 return fold_build2_loc (loc, code, type, arg,
6180 build_real (TREE_TYPE (arg), c2));
6181
6182 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6183 arg = save_expr (arg);
6184 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6185 fold_build2_loc (loc, GE_EXPR, type, arg,
6186 build_real (TREE_TYPE (arg),
6187 dconst0)),
6188 fold_build2_loc (loc, code, type, arg,
6189 build_real (TREE_TYPE (arg),
6190 c2)));
6191 }
6192 }
6193
6194 return NULL_TREE;
6195 }
6196
6197 /* Subroutine of fold() that optimizes comparisons against Infinities,
6198 either +Inf or -Inf.
6199
6200 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6201 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6202 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6203
6204 The function returns the constant folded tree if a simplification
6205 can be made, and NULL_TREE otherwise. */
6206
6207 static tree
6208 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6209 tree arg0, tree arg1)
6210 {
6211 enum machine_mode mode;
6212 REAL_VALUE_TYPE max;
6213 tree temp;
6214 bool neg;
6215
6216 mode = TYPE_MODE (TREE_TYPE (arg0));
6217
6218 /* For negative infinity swap the sense of the comparison. */
6219 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6220 if (neg)
6221 code = swap_tree_comparison (code);
6222
6223 switch (code)
6224 {
6225 case GT_EXPR:
6226 /* x > +Inf is always false, if with ignore sNANs. */
6227 if (HONOR_SNANS (mode))
6228 return NULL_TREE;
6229 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6230
6231 case LE_EXPR:
6232 /* x <= +Inf is always true, if we don't case about NaNs. */
6233 if (! HONOR_NANS (mode))
6234 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6235
6236 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6237 arg0 = save_expr (arg0);
6238 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6239
6240 case EQ_EXPR:
6241 case GE_EXPR:
6242 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6243 real_maxval (&max, neg, mode);
6244 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6245 arg0, build_real (TREE_TYPE (arg0), max));
6246
6247 case LT_EXPR:
6248 /* x < +Inf is always equal to x <= DBL_MAX. */
6249 real_maxval (&max, neg, mode);
6250 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6251 arg0, build_real (TREE_TYPE (arg0), max));
6252
6253 case NE_EXPR:
6254 /* x != +Inf is always equal to !(x > DBL_MAX). */
6255 real_maxval (&max, neg, mode);
6256 if (! HONOR_NANS (mode))
6257 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6258 arg0, build_real (TREE_TYPE (arg0), max));
6259
6260 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6261 arg0, build_real (TREE_TYPE (arg0), max));
6262 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6263
6264 default:
6265 break;
6266 }
6267
6268 return NULL_TREE;
6269 }
6270
6271 /* Subroutine of fold() that optimizes comparisons of a division by
6272 a nonzero integer constant against an integer constant, i.e.
6273 X/C1 op C2.
6274
6275 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6276 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6277 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6278
6279 The function returns the constant folded tree if a simplification
6280 can be made, and NULL_TREE otherwise. */
6281
6282 static tree
6283 fold_div_compare (location_t loc,
6284 enum tree_code code, tree type, tree arg0, tree arg1)
6285 {
6286 tree prod, tmp, hi, lo;
6287 tree arg00 = TREE_OPERAND (arg0, 0);
6288 tree arg01 = TREE_OPERAND (arg0, 1);
6289 double_int val;
6290 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6291 bool neg_overflow;
6292 bool overflow;
6293
6294 /* We have to do this the hard way to detect unsigned overflow.
6295 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6296 val = TREE_INT_CST (arg01)
6297 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6298 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6299 neg_overflow = false;
6300
6301 if (unsigned_p)
6302 {
6303 tmp = int_const_binop (MINUS_EXPR, arg01,
6304 build_int_cst (TREE_TYPE (arg01), 1));
6305 lo = prod;
6306
6307 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6308 val = TREE_INT_CST (prod)
6309 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6310 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6311 -1, overflow | TREE_OVERFLOW (prod));
6312 }
6313 else if (tree_int_cst_sgn (arg01) >= 0)
6314 {
6315 tmp = int_const_binop (MINUS_EXPR, arg01,
6316 build_int_cst (TREE_TYPE (arg01), 1));
6317 switch (tree_int_cst_sgn (arg1))
6318 {
6319 case -1:
6320 neg_overflow = true;
6321 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6322 hi = prod;
6323 break;
6324
6325 case 0:
6326 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6327 hi = tmp;
6328 break;
6329
6330 case 1:
6331 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6332 lo = prod;
6333 break;
6334
6335 default:
6336 gcc_unreachable ();
6337 }
6338 }
6339 else
6340 {
6341 /* A negative divisor reverses the relational operators. */
6342 code = swap_tree_comparison (code);
6343
6344 tmp = int_const_binop (PLUS_EXPR, arg01,
6345 build_int_cst (TREE_TYPE (arg01), 1));
6346 switch (tree_int_cst_sgn (arg1))
6347 {
6348 case -1:
6349 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6350 lo = prod;
6351 break;
6352
6353 case 0:
6354 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6355 lo = tmp;
6356 break;
6357
6358 case 1:
6359 neg_overflow = true;
6360 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6361 hi = prod;
6362 break;
6363
6364 default:
6365 gcc_unreachable ();
6366 }
6367 }
6368
6369 switch (code)
6370 {
6371 case EQ_EXPR:
6372 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6373 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6374 if (TREE_OVERFLOW (hi))
6375 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6376 if (TREE_OVERFLOW (lo))
6377 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6378 return build_range_check (loc, type, arg00, 1, lo, hi);
6379
6380 case NE_EXPR:
6381 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6382 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6383 if (TREE_OVERFLOW (hi))
6384 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6385 if (TREE_OVERFLOW (lo))
6386 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6387 return build_range_check (loc, type, arg00, 0, lo, hi);
6388
6389 case LT_EXPR:
6390 if (TREE_OVERFLOW (lo))
6391 {
6392 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6393 return omit_one_operand_loc (loc, type, tmp, arg00);
6394 }
6395 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6396
6397 case LE_EXPR:
6398 if (TREE_OVERFLOW (hi))
6399 {
6400 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6401 return omit_one_operand_loc (loc, type, tmp, arg00);
6402 }
6403 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6404
6405 case GT_EXPR:
6406 if (TREE_OVERFLOW (hi))
6407 {
6408 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6409 return omit_one_operand_loc (loc, type, tmp, arg00);
6410 }
6411 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6412
6413 case GE_EXPR:
6414 if (TREE_OVERFLOW (lo))
6415 {
6416 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6417 return omit_one_operand_loc (loc, type, tmp, arg00);
6418 }
6419 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6420
6421 default:
6422 break;
6423 }
6424
6425 return NULL_TREE;
6426 }
6427
6428
6429 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6430 equality/inequality test, then return a simplified form of the test
6431 using a sign testing. Otherwise return NULL. TYPE is the desired
6432 result type. */
6433
6434 static tree
6435 fold_single_bit_test_into_sign_test (location_t loc,
6436 enum tree_code code, tree arg0, tree arg1,
6437 tree result_type)
6438 {
6439 /* If this is testing a single bit, we can optimize the test. */
6440 if ((code == NE_EXPR || code == EQ_EXPR)
6441 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6442 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6443 {
6444 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6445 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6446 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6447
6448 if (arg00 != NULL_TREE
6449 /* This is only a win if casting to a signed type is cheap,
6450 i.e. when arg00's type is not a partial mode. */
6451 && TYPE_PRECISION (TREE_TYPE (arg00))
6452 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6453 {
6454 tree stype = signed_type_for (TREE_TYPE (arg00));
6455 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6456 result_type,
6457 fold_convert_loc (loc, stype, arg00),
6458 build_int_cst (stype, 0));
6459 }
6460 }
6461
6462 return NULL_TREE;
6463 }
6464
6465 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6466 equality/inequality test, then return a simplified form of
6467 the test using shifts and logical operations. Otherwise return
6468 NULL. TYPE is the desired result type. */
6469
6470 tree
6471 fold_single_bit_test (location_t loc, enum tree_code code,
6472 tree arg0, tree arg1, tree result_type)
6473 {
6474 /* If this is testing a single bit, we can optimize the test. */
6475 if ((code == NE_EXPR || code == EQ_EXPR)
6476 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6477 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6478 {
6479 tree inner = TREE_OPERAND (arg0, 0);
6480 tree type = TREE_TYPE (arg0);
6481 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6482 enum machine_mode operand_mode = TYPE_MODE (type);
6483 int ops_unsigned;
6484 tree signed_type, unsigned_type, intermediate_type;
6485 tree tem, one;
6486
6487 /* First, see if we can fold the single bit test into a sign-bit
6488 test. */
6489 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6490 result_type);
6491 if (tem)
6492 return tem;
6493
6494 /* Otherwise we have (A & C) != 0 where C is a single bit,
6495 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6496 Similarly for (A & C) == 0. */
6497
6498 /* If INNER is a right shift of a constant and it plus BITNUM does
6499 not overflow, adjust BITNUM and INNER. */
6500 if (TREE_CODE (inner) == RSHIFT_EXPR
6501 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6502 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6503 && bitnum < TYPE_PRECISION (type)
6504 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6505 bitnum - TYPE_PRECISION (type)))
6506 {
6507 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6508 inner = TREE_OPERAND (inner, 0);
6509 }
6510
6511 /* If we are going to be able to omit the AND below, we must do our
6512 operations as unsigned. If we must use the AND, we have a choice.
6513 Normally unsigned is faster, but for some machines signed is. */
6514 #ifdef LOAD_EXTEND_OP
6515 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6516 && !flag_syntax_only) ? 0 : 1;
6517 #else
6518 ops_unsigned = 1;
6519 #endif
6520
6521 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6522 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6523 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6524 inner = fold_convert_loc (loc, intermediate_type, inner);
6525
6526 if (bitnum != 0)
6527 inner = build2 (RSHIFT_EXPR, intermediate_type,
6528 inner, size_int (bitnum));
6529
6530 one = build_int_cst (intermediate_type, 1);
6531
6532 if (code == EQ_EXPR)
6533 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6534
6535 /* Put the AND last so it can combine with more things. */
6536 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6537
6538 /* Make sure to return the proper type. */
6539 inner = fold_convert_loc (loc, result_type, inner);
6540
6541 return inner;
6542 }
6543 return NULL_TREE;
6544 }
6545
6546 /* Check whether we are allowed to reorder operands arg0 and arg1,
6547 such that the evaluation of arg1 occurs before arg0. */
6548
6549 static bool
6550 reorder_operands_p (const_tree arg0, const_tree arg1)
6551 {
6552 if (! flag_evaluation_order)
6553 return true;
6554 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6555 return true;
6556 return ! TREE_SIDE_EFFECTS (arg0)
6557 && ! TREE_SIDE_EFFECTS (arg1);
6558 }
6559
6560 /* Test whether it is preferable two swap two operands, ARG0 and
6561 ARG1, for example because ARG0 is an integer constant and ARG1
6562 isn't. If REORDER is true, only recommend swapping if we can
6563 evaluate the operands in reverse order. */
6564
6565 bool
6566 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6567 {
6568 STRIP_SIGN_NOPS (arg0);
6569 STRIP_SIGN_NOPS (arg1);
6570
6571 if (TREE_CODE (arg1) == INTEGER_CST)
6572 return 0;
6573 if (TREE_CODE (arg0) == INTEGER_CST)
6574 return 1;
6575
6576 if (TREE_CODE (arg1) == REAL_CST)
6577 return 0;
6578 if (TREE_CODE (arg0) == REAL_CST)
6579 return 1;
6580
6581 if (TREE_CODE (arg1) == FIXED_CST)
6582 return 0;
6583 if (TREE_CODE (arg0) == FIXED_CST)
6584 return 1;
6585
6586 if (TREE_CODE (arg1) == COMPLEX_CST)
6587 return 0;
6588 if (TREE_CODE (arg0) == COMPLEX_CST)
6589 return 1;
6590
6591 if (TREE_CONSTANT (arg1))
6592 return 0;
6593 if (TREE_CONSTANT (arg0))
6594 return 1;
6595
6596 if (optimize_function_for_size_p (cfun))
6597 return 0;
6598
6599 if (reorder && flag_evaluation_order
6600 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6601 return 0;
6602
6603 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6604 for commutative and comparison operators. Ensuring a canonical
6605 form allows the optimizers to find additional redundancies without
6606 having to explicitly check for both orderings. */
6607 if (TREE_CODE (arg0) == SSA_NAME
6608 && TREE_CODE (arg1) == SSA_NAME
6609 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6610 return 1;
6611
6612 /* Put SSA_NAMEs last. */
6613 if (TREE_CODE (arg1) == SSA_NAME)
6614 return 0;
6615 if (TREE_CODE (arg0) == SSA_NAME)
6616 return 1;
6617
6618 /* Put variables last. */
6619 if (DECL_P (arg1))
6620 return 0;
6621 if (DECL_P (arg0))
6622 return 1;
6623
6624 return 0;
6625 }
6626
6627 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6628 ARG0 is extended to a wider type. */
6629
6630 static tree
6631 fold_widened_comparison (location_t loc, enum tree_code code,
6632 tree type, tree arg0, tree arg1)
6633 {
6634 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6635 tree arg1_unw;
6636 tree shorter_type, outer_type;
6637 tree min, max;
6638 bool above, below;
6639
6640 if (arg0_unw == arg0)
6641 return NULL_TREE;
6642 shorter_type = TREE_TYPE (arg0_unw);
6643
6644 #ifdef HAVE_canonicalize_funcptr_for_compare
6645 /* Disable this optimization if we're casting a function pointer
6646 type on targets that require function pointer canonicalization. */
6647 if (HAVE_canonicalize_funcptr_for_compare
6648 && TREE_CODE (shorter_type) == POINTER_TYPE
6649 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6650 return NULL_TREE;
6651 #endif
6652
6653 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6654 return NULL_TREE;
6655
6656 arg1_unw = get_unwidened (arg1, NULL_TREE);
6657
6658 /* If possible, express the comparison in the shorter mode. */
6659 if ((code == EQ_EXPR || code == NE_EXPR
6660 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6661 && (TREE_TYPE (arg1_unw) == shorter_type
6662 || ((TYPE_PRECISION (shorter_type)
6663 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6664 && (TYPE_UNSIGNED (shorter_type)
6665 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6666 || (TREE_CODE (arg1_unw) == INTEGER_CST
6667 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6668 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6669 && int_fits_type_p (arg1_unw, shorter_type))))
6670 return fold_build2_loc (loc, code, type, arg0_unw,
6671 fold_convert_loc (loc, shorter_type, arg1_unw));
6672
6673 if (TREE_CODE (arg1_unw) != INTEGER_CST
6674 || TREE_CODE (shorter_type) != INTEGER_TYPE
6675 || !int_fits_type_p (arg1_unw, shorter_type))
6676 return NULL_TREE;
6677
6678 /* If we are comparing with the integer that does not fit into the range
6679 of the shorter type, the result is known. */
6680 outer_type = TREE_TYPE (arg1_unw);
6681 min = lower_bound_in_type (outer_type, shorter_type);
6682 max = upper_bound_in_type (outer_type, shorter_type);
6683
6684 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6685 max, arg1_unw));
6686 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6687 arg1_unw, min));
6688
6689 switch (code)
6690 {
6691 case EQ_EXPR:
6692 if (above || below)
6693 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6694 break;
6695
6696 case NE_EXPR:
6697 if (above || below)
6698 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6699 break;
6700
6701 case LT_EXPR:
6702 case LE_EXPR:
6703 if (above)
6704 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6705 else if (below)
6706 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6707
6708 case GT_EXPR:
6709 case GE_EXPR:
6710 if (above)
6711 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6712 else if (below)
6713 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6714
6715 default:
6716 break;
6717 }
6718
6719 return NULL_TREE;
6720 }
6721
6722 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6723 ARG0 just the signedness is changed. */
6724
6725 static tree
6726 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6727 tree arg0, tree arg1)
6728 {
6729 tree arg0_inner;
6730 tree inner_type, outer_type;
6731
6732 if (!CONVERT_EXPR_P (arg0))
6733 return NULL_TREE;
6734
6735 outer_type = TREE_TYPE (arg0);
6736 arg0_inner = TREE_OPERAND (arg0, 0);
6737 inner_type = TREE_TYPE (arg0_inner);
6738
6739 #ifdef HAVE_canonicalize_funcptr_for_compare
6740 /* Disable this optimization if we're casting a function pointer
6741 type on targets that require function pointer canonicalization. */
6742 if (HAVE_canonicalize_funcptr_for_compare
6743 && TREE_CODE (inner_type) == POINTER_TYPE
6744 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6745 return NULL_TREE;
6746 #endif
6747
6748 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6749 return NULL_TREE;
6750
6751 if (TREE_CODE (arg1) != INTEGER_CST
6752 && !(CONVERT_EXPR_P (arg1)
6753 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6754 return NULL_TREE;
6755
6756 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6757 && code != NE_EXPR
6758 && code != EQ_EXPR)
6759 return NULL_TREE;
6760
6761 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6762 return NULL_TREE;
6763
6764 if (TREE_CODE (arg1) == INTEGER_CST)
6765 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6766 0, TREE_OVERFLOW (arg1));
6767 else
6768 arg1 = fold_convert_loc (loc, inner_type, arg1);
6769
6770 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6771 }
6772
6773 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6774 step of the array. Reconstructs s and delta in the case of s *
6775 delta being an integer constant (and thus already folded). ADDR is
6776 the address. MULT is the multiplicative expression. If the
6777 function succeeds, the new address expression is returned.
6778 Otherwise NULL_TREE is returned. LOC is the location of the
6779 resulting expression. */
6780
6781 static tree
6782 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6783 {
6784 tree s, delta, step;
6785 tree ref = TREE_OPERAND (addr, 0), pref;
6786 tree ret, pos;
6787 tree itype;
6788 bool mdim = false;
6789
6790 /* Strip the nops that might be added when converting op1 to sizetype. */
6791 STRIP_NOPS (op1);
6792
6793 /* Canonicalize op1 into a possibly non-constant delta
6794 and an INTEGER_CST s. */
6795 if (TREE_CODE (op1) == MULT_EXPR)
6796 {
6797 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6798
6799 STRIP_NOPS (arg0);
6800 STRIP_NOPS (arg1);
6801
6802 if (TREE_CODE (arg0) == INTEGER_CST)
6803 {
6804 s = arg0;
6805 delta = arg1;
6806 }
6807 else if (TREE_CODE (arg1) == INTEGER_CST)
6808 {
6809 s = arg1;
6810 delta = arg0;
6811 }
6812 else
6813 return NULL_TREE;
6814 }
6815 else if (TREE_CODE (op1) == INTEGER_CST)
6816 {
6817 delta = op1;
6818 s = NULL_TREE;
6819 }
6820 else
6821 {
6822 /* Simulate we are delta * 1. */
6823 delta = op1;
6824 s = integer_one_node;
6825 }
6826
6827 /* Handle &x.array the same as we would handle &x.array[0]. */
6828 if (TREE_CODE (ref) == COMPONENT_REF
6829 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6830 {
6831 tree domain;
6832
6833 /* Remember if this was a multi-dimensional array. */
6834 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6835 mdim = true;
6836
6837 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6838 if (! domain)
6839 goto cont;
6840 itype = TREE_TYPE (domain);
6841
6842 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6843 if (TREE_CODE (step) != INTEGER_CST)
6844 goto cont;
6845
6846 if (s)
6847 {
6848 if (! tree_int_cst_equal (step, s))
6849 goto cont;
6850 }
6851 else
6852 {
6853 /* Try if delta is a multiple of step. */
6854 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6855 if (! tmp)
6856 goto cont;
6857 delta = tmp;
6858 }
6859
6860 /* Only fold here if we can verify we do not overflow one
6861 dimension of a multi-dimensional array. */
6862 if (mdim)
6863 {
6864 tree tmp;
6865
6866 if (!TYPE_MIN_VALUE (domain)
6867 || !TYPE_MAX_VALUE (domain)
6868 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6869 goto cont;
6870
6871 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6872 fold_convert_loc (loc, itype,
6873 TYPE_MIN_VALUE (domain)),
6874 fold_convert_loc (loc, itype, delta));
6875 if (TREE_CODE (tmp) != INTEGER_CST
6876 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6877 goto cont;
6878 }
6879
6880 /* We found a suitable component reference. */
6881
6882 pref = TREE_OPERAND (addr, 0);
6883 ret = copy_node (pref);
6884 SET_EXPR_LOCATION (ret, loc);
6885
6886 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6887 fold_build2_loc
6888 (loc, PLUS_EXPR, itype,
6889 fold_convert_loc (loc, itype,
6890 TYPE_MIN_VALUE
6891 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6892 fold_convert_loc (loc, itype, delta)),
6893 NULL_TREE, NULL_TREE);
6894 return build_fold_addr_expr_loc (loc, ret);
6895 }
6896
6897 cont:
6898
6899 for (;; ref = TREE_OPERAND (ref, 0))
6900 {
6901 if (TREE_CODE (ref) == ARRAY_REF)
6902 {
6903 tree domain;
6904
6905 /* Remember if this was a multi-dimensional array. */
6906 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6907 mdim = true;
6908
6909 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6910 if (! domain)
6911 continue;
6912 itype = TREE_TYPE (domain);
6913
6914 step = array_ref_element_size (ref);
6915 if (TREE_CODE (step) != INTEGER_CST)
6916 continue;
6917
6918 if (s)
6919 {
6920 if (! tree_int_cst_equal (step, s))
6921 continue;
6922 }
6923 else
6924 {
6925 /* Try if delta is a multiple of step. */
6926 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6927 if (! tmp)
6928 continue;
6929 delta = tmp;
6930 }
6931
6932 /* Only fold here if we can verify we do not overflow one
6933 dimension of a multi-dimensional array. */
6934 if (mdim)
6935 {
6936 tree tmp;
6937
6938 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6939 || !TYPE_MAX_VALUE (domain)
6940 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6941 continue;
6942
6943 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6944 fold_convert_loc (loc, itype,
6945 TREE_OPERAND (ref, 1)),
6946 fold_convert_loc (loc, itype, delta));
6947 if (!tmp
6948 || TREE_CODE (tmp) != INTEGER_CST
6949 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6950 continue;
6951 }
6952
6953 break;
6954 }
6955 else
6956 mdim = false;
6957
6958 if (!handled_component_p (ref))
6959 return NULL_TREE;
6960 }
6961
6962 /* We found the suitable array reference. So copy everything up to it,
6963 and replace the index. */
6964
6965 pref = TREE_OPERAND (addr, 0);
6966 ret = copy_node (pref);
6967 SET_EXPR_LOCATION (ret, loc);
6968 pos = ret;
6969
6970 while (pref != ref)
6971 {
6972 pref = TREE_OPERAND (pref, 0);
6973 TREE_OPERAND (pos, 0) = copy_node (pref);
6974 pos = TREE_OPERAND (pos, 0);
6975 }
6976
6977 TREE_OPERAND (pos, 1)
6978 = fold_build2_loc (loc, PLUS_EXPR, itype,
6979 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
6980 fold_convert_loc (loc, itype, delta));
6981 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
6982 }
6983
6984
6985 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6986 means A >= Y && A != MAX, but in this case we know that
6987 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6988
6989 static tree
6990 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6991 {
6992 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6993
6994 if (TREE_CODE (bound) == LT_EXPR)
6995 a = TREE_OPERAND (bound, 0);
6996 else if (TREE_CODE (bound) == GT_EXPR)
6997 a = TREE_OPERAND (bound, 1);
6998 else
6999 return NULL_TREE;
7000
7001 typea = TREE_TYPE (a);
7002 if (!INTEGRAL_TYPE_P (typea)
7003 && !POINTER_TYPE_P (typea))
7004 return NULL_TREE;
7005
7006 if (TREE_CODE (ineq) == LT_EXPR)
7007 {
7008 a1 = TREE_OPERAND (ineq, 1);
7009 y = TREE_OPERAND (ineq, 0);
7010 }
7011 else if (TREE_CODE (ineq) == GT_EXPR)
7012 {
7013 a1 = TREE_OPERAND (ineq, 0);
7014 y = TREE_OPERAND (ineq, 1);
7015 }
7016 else
7017 return NULL_TREE;
7018
7019 if (TREE_TYPE (a1) != typea)
7020 return NULL_TREE;
7021
7022 if (POINTER_TYPE_P (typea))
7023 {
7024 /* Convert the pointer types into integer before taking the difference. */
7025 tree ta = fold_convert_loc (loc, ssizetype, a);
7026 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7027 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7028 }
7029 else
7030 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7031
7032 if (!diff || !integer_onep (diff))
7033 return NULL_TREE;
7034
7035 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7036 }
7037
7038 /* Fold a sum or difference of at least one multiplication.
7039 Returns the folded tree or NULL if no simplification could be made. */
7040
7041 static tree
7042 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7043 tree arg0, tree arg1)
7044 {
7045 tree arg00, arg01, arg10, arg11;
7046 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7047
7048 /* (A * C) +- (B * C) -> (A+-B) * C.
7049 (A * C) +- A -> A * (C+-1).
7050 We are most concerned about the case where C is a constant,
7051 but other combinations show up during loop reduction. Since
7052 it is not difficult, try all four possibilities. */
7053
7054 if (TREE_CODE (arg0) == MULT_EXPR)
7055 {
7056 arg00 = TREE_OPERAND (arg0, 0);
7057 arg01 = TREE_OPERAND (arg0, 1);
7058 }
7059 else if (TREE_CODE (arg0) == INTEGER_CST)
7060 {
7061 arg00 = build_one_cst (type);
7062 arg01 = arg0;
7063 }
7064 else
7065 {
7066 /* We cannot generate constant 1 for fract. */
7067 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7068 return NULL_TREE;
7069 arg00 = arg0;
7070 arg01 = build_one_cst (type);
7071 }
7072 if (TREE_CODE (arg1) == MULT_EXPR)
7073 {
7074 arg10 = TREE_OPERAND (arg1, 0);
7075 arg11 = TREE_OPERAND (arg1, 1);
7076 }
7077 else if (TREE_CODE (arg1) == INTEGER_CST)
7078 {
7079 arg10 = build_one_cst (type);
7080 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7081 the purpose of this canonicalization. */
7082 if (TREE_INT_CST_HIGH (arg1) == -1
7083 && negate_expr_p (arg1)
7084 && code == PLUS_EXPR)
7085 {
7086 arg11 = negate_expr (arg1);
7087 code = MINUS_EXPR;
7088 }
7089 else
7090 arg11 = arg1;
7091 }
7092 else
7093 {
7094 /* We cannot generate constant 1 for fract. */
7095 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7096 return NULL_TREE;
7097 arg10 = arg1;
7098 arg11 = build_one_cst (type);
7099 }
7100 same = NULL_TREE;
7101
7102 if (operand_equal_p (arg01, arg11, 0))
7103 same = arg01, alt0 = arg00, alt1 = arg10;
7104 else if (operand_equal_p (arg00, arg10, 0))
7105 same = arg00, alt0 = arg01, alt1 = arg11;
7106 else if (operand_equal_p (arg00, arg11, 0))
7107 same = arg00, alt0 = arg01, alt1 = arg10;
7108 else if (operand_equal_p (arg01, arg10, 0))
7109 same = arg01, alt0 = arg00, alt1 = arg11;
7110
7111 /* No identical multiplicands; see if we can find a common
7112 power-of-two factor in non-power-of-two multiplies. This
7113 can help in multi-dimensional array access. */
7114 else if (host_integerp (arg01, 0)
7115 && host_integerp (arg11, 0))
7116 {
7117 HOST_WIDE_INT int01, int11, tmp;
7118 bool swap = false;
7119 tree maybe_same;
7120 int01 = TREE_INT_CST_LOW (arg01);
7121 int11 = TREE_INT_CST_LOW (arg11);
7122
7123 /* Move min of absolute values to int11. */
7124 if (absu_hwi (int01) < absu_hwi (int11))
7125 {
7126 tmp = int01, int01 = int11, int11 = tmp;
7127 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7128 maybe_same = arg01;
7129 swap = true;
7130 }
7131 else
7132 maybe_same = arg11;
7133
7134 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7135 /* The remainder should not be a constant, otherwise we
7136 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7137 increased the number of multiplications necessary. */
7138 && TREE_CODE (arg10) != INTEGER_CST)
7139 {
7140 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7141 build_int_cst (TREE_TYPE (arg00),
7142 int01 / int11));
7143 alt1 = arg10;
7144 same = maybe_same;
7145 if (swap)
7146 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7147 }
7148 }
7149
7150 if (same)
7151 return fold_build2_loc (loc, MULT_EXPR, type,
7152 fold_build2_loc (loc, code, type,
7153 fold_convert_loc (loc, type, alt0),
7154 fold_convert_loc (loc, type, alt1)),
7155 fold_convert_loc (loc, type, same));
7156
7157 return NULL_TREE;
7158 }
7159
7160 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7161 specified by EXPR into the buffer PTR of length LEN bytes.
7162 Return the number of bytes placed in the buffer, or zero
7163 upon failure. */
7164
7165 static int
7166 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7167 {
7168 tree type = TREE_TYPE (expr);
7169 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7170 int byte, offset, word, words;
7171 unsigned char value;
7172
7173 if (total_bytes > len)
7174 return 0;
7175 words = total_bytes / UNITS_PER_WORD;
7176
7177 for (byte = 0; byte < total_bytes; byte++)
7178 {
7179 int bitpos = byte * BITS_PER_UNIT;
7180 if (bitpos < HOST_BITS_PER_WIDE_INT)
7181 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7182 else
7183 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7184 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7185
7186 if (total_bytes > UNITS_PER_WORD)
7187 {
7188 word = byte / UNITS_PER_WORD;
7189 if (WORDS_BIG_ENDIAN)
7190 word = (words - 1) - word;
7191 offset = word * UNITS_PER_WORD;
7192 if (BYTES_BIG_ENDIAN)
7193 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7194 else
7195 offset += byte % UNITS_PER_WORD;
7196 }
7197 else
7198 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7199 ptr[offset] = value;
7200 }
7201 return total_bytes;
7202 }
7203
7204
7205 /* Subroutine of native_encode_expr. Encode the REAL_CST
7206 specified by EXPR into the buffer PTR of length LEN bytes.
7207 Return the number of bytes placed in the buffer, or zero
7208 upon failure. */
7209
7210 static int
7211 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7212 {
7213 tree type = TREE_TYPE (expr);
7214 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7215 int byte, offset, word, words, bitpos;
7216 unsigned char value;
7217
7218 /* There are always 32 bits in each long, no matter the size of
7219 the hosts long. We handle floating point representations with
7220 up to 192 bits. */
7221 long tmp[6];
7222
7223 if (total_bytes > len)
7224 return 0;
7225 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7226
7227 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7228
7229 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7230 bitpos += BITS_PER_UNIT)
7231 {
7232 byte = (bitpos / BITS_PER_UNIT) & 3;
7233 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7234
7235 if (UNITS_PER_WORD < 4)
7236 {
7237 word = byte / UNITS_PER_WORD;
7238 if (WORDS_BIG_ENDIAN)
7239 word = (words - 1) - word;
7240 offset = word * UNITS_PER_WORD;
7241 if (BYTES_BIG_ENDIAN)
7242 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7243 else
7244 offset += byte % UNITS_PER_WORD;
7245 }
7246 else
7247 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7248 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7249 }
7250 return total_bytes;
7251 }
7252
7253 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7254 specified by EXPR into the buffer PTR of length LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero
7256 upon failure. */
7257
7258 static int
7259 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7260 {
7261 int rsize, isize;
7262 tree part;
7263
7264 part = TREE_REALPART (expr);
7265 rsize = native_encode_expr (part, ptr, len);
7266 if (rsize == 0)
7267 return 0;
7268 part = TREE_IMAGPART (expr);
7269 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7270 if (isize != rsize)
7271 return 0;
7272 return rsize + isize;
7273 }
7274
7275
7276 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7277 specified by EXPR into the buffer PTR of length LEN bytes.
7278 Return the number of bytes placed in the buffer, or zero
7279 upon failure. */
7280
7281 static int
7282 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7283 {
7284 unsigned i, count;
7285 int size, offset;
7286 tree itype, elem;
7287
7288 offset = 0;
7289 count = VECTOR_CST_NELTS (expr);
7290 itype = TREE_TYPE (TREE_TYPE (expr));
7291 size = GET_MODE_SIZE (TYPE_MODE (itype));
7292 for (i = 0; i < count; i++)
7293 {
7294 elem = VECTOR_CST_ELT (expr, i);
7295 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7296 return 0;
7297 offset += size;
7298 }
7299 return offset;
7300 }
7301
7302
7303 /* Subroutine of native_encode_expr. Encode the STRING_CST
7304 specified by EXPR into the buffer PTR of length LEN bytes.
7305 Return the number of bytes placed in the buffer, or zero
7306 upon failure. */
7307
7308 static int
7309 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7310 {
7311 tree type = TREE_TYPE (expr);
7312 HOST_WIDE_INT total_bytes;
7313
7314 if (TREE_CODE (type) != ARRAY_TYPE
7315 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7316 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7317 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7318 return 0;
7319 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7320 if (total_bytes > len)
7321 return 0;
7322 if (TREE_STRING_LENGTH (expr) < total_bytes)
7323 {
7324 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7325 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7326 total_bytes - TREE_STRING_LENGTH (expr));
7327 }
7328 else
7329 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7330 return total_bytes;
7331 }
7332
7333
7334 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7335 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7336 buffer PTR of length LEN bytes. Return the number of bytes
7337 placed in the buffer, or zero upon failure. */
7338
7339 int
7340 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7341 {
7342 switch (TREE_CODE (expr))
7343 {
7344 case INTEGER_CST:
7345 return native_encode_int (expr, ptr, len);
7346
7347 case REAL_CST:
7348 return native_encode_real (expr, ptr, len);
7349
7350 case COMPLEX_CST:
7351 return native_encode_complex (expr, ptr, len);
7352
7353 case VECTOR_CST:
7354 return native_encode_vector (expr, ptr, len);
7355
7356 case STRING_CST:
7357 return native_encode_string (expr, ptr, len);
7358
7359 default:
7360 return 0;
7361 }
7362 }
7363
7364
7365 /* Subroutine of native_interpret_expr. Interpret the contents of
7366 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7367 If the buffer cannot be interpreted, return NULL_TREE. */
7368
7369 static tree
7370 native_interpret_int (tree type, const unsigned char *ptr, int len)
7371 {
7372 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7373 int byte, offset, word, words;
7374 unsigned char value;
7375 double_int result;
7376
7377 if (total_bytes > len)
7378 return NULL_TREE;
7379 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7380 return NULL_TREE;
7381
7382 result = double_int_zero;
7383 words = total_bytes / UNITS_PER_WORD;
7384
7385 for (byte = 0; byte < total_bytes; byte++)
7386 {
7387 int bitpos = byte * BITS_PER_UNIT;
7388 if (total_bytes > UNITS_PER_WORD)
7389 {
7390 word = byte / UNITS_PER_WORD;
7391 if (WORDS_BIG_ENDIAN)
7392 word = (words - 1) - word;
7393 offset = word * UNITS_PER_WORD;
7394 if (BYTES_BIG_ENDIAN)
7395 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7396 else
7397 offset += byte % UNITS_PER_WORD;
7398 }
7399 else
7400 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7401 value = ptr[offset];
7402
7403 if (bitpos < HOST_BITS_PER_WIDE_INT)
7404 result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7405 else
7406 result.high |= (unsigned HOST_WIDE_INT) value
7407 << (bitpos - HOST_BITS_PER_WIDE_INT);
7408 }
7409
7410 return double_int_to_tree (type, result);
7411 }
7412
7413
7414 /* Subroutine of native_interpret_expr. Interpret the contents of
7415 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7416 If the buffer cannot be interpreted, return NULL_TREE. */
7417
7418 static tree
7419 native_interpret_real (tree type, const unsigned char *ptr, int len)
7420 {
7421 enum machine_mode mode = TYPE_MODE (type);
7422 int total_bytes = GET_MODE_SIZE (mode);
7423 int byte, offset, word, words, bitpos;
7424 unsigned char value;
7425 /* There are always 32 bits in each long, no matter the size of
7426 the hosts long. We handle floating point representations with
7427 up to 192 bits. */
7428 REAL_VALUE_TYPE r;
7429 long tmp[6];
7430
7431 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7432 if (total_bytes > len || total_bytes > 24)
7433 return NULL_TREE;
7434 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7435
7436 memset (tmp, 0, sizeof (tmp));
7437 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7438 bitpos += BITS_PER_UNIT)
7439 {
7440 byte = (bitpos / BITS_PER_UNIT) & 3;
7441 if (UNITS_PER_WORD < 4)
7442 {
7443 word = byte / UNITS_PER_WORD;
7444 if (WORDS_BIG_ENDIAN)
7445 word = (words - 1) - word;
7446 offset = word * UNITS_PER_WORD;
7447 if (BYTES_BIG_ENDIAN)
7448 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7449 else
7450 offset += byte % UNITS_PER_WORD;
7451 }
7452 else
7453 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7454 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7455
7456 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7457 }
7458
7459 real_from_target (&r, tmp, mode);
7460 return build_real (type, r);
7461 }
7462
7463
7464 /* Subroutine of native_interpret_expr. Interpret the contents of
7465 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7466 If the buffer cannot be interpreted, return NULL_TREE. */
7467
7468 static tree
7469 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7470 {
7471 tree etype, rpart, ipart;
7472 int size;
7473
7474 etype = TREE_TYPE (type);
7475 size = GET_MODE_SIZE (TYPE_MODE (etype));
7476 if (size * 2 > len)
7477 return NULL_TREE;
7478 rpart = native_interpret_expr (etype, ptr, size);
7479 if (!rpart)
7480 return NULL_TREE;
7481 ipart = native_interpret_expr (etype, ptr+size, size);
7482 if (!ipart)
7483 return NULL_TREE;
7484 return build_complex (type, rpart, ipart);
7485 }
7486
7487
7488 /* Subroutine of native_interpret_expr. Interpret the contents of
7489 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7490 If the buffer cannot be interpreted, return NULL_TREE. */
7491
7492 static tree
7493 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7494 {
7495 tree etype, elem;
7496 int i, size, count;
7497 tree *elements;
7498
7499 etype = TREE_TYPE (type);
7500 size = GET_MODE_SIZE (TYPE_MODE (etype));
7501 count = TYPE_VECTOR_SUBPARTS (type);
7502 if (size * count > len)
7503 return NULL_TREE;
7504
7505 elements = XALLOCAVEC (tree, count);
7506 for (i = count - 1; i >= 0; i--)
7507 {
7508 elem = native_interpret_expr (etype, ptr+(i*size), size);
7509 if (!elem)
7510 return NULL_TREE;
7511 elements[i] = elem;
7512 }
7513 return build_vector (type, elements);
7514 }
7515
7516
7517 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7518 the buffer PTR of length LEN as a constant of type TYPE. For
7519 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7520 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7521 return NULL_TREE. */
7522
7523 tree
7524 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7525 {
7526 switch (TREE_CODE (type))
7527 {
7528 case INTEGER_TYPE:
7529 case ENUMERAL_TYPE:
7530 case BOOLEAN_TYPE:
7531 case POINTER_TYPE:
7532 case REFERENCE_TYPE:
7533 return native_interpret_int (type, ptr, len);
7534
7535 case REAL_TYPE:
7536 return native_interpret_real (type, ptr, len);
7537
7538 case COMPLEX_TYPE:
7539 return native_interpret_complex (type, ptr, len);
7540
7541 case VECTOR_TYPE:
7542 return native_interpret_vector (type, ptr, len);
7543
7544 default:
7545 return NULL_TREE;
7546 }
7547 }
7548
7549 /* Returns true if we can interpret the contents of a native encoding
7550 as TYPE. */
7551
7552 static bool
7553 can_native_interpret_type_p (tree type)
7554 {
7555 switch (TREE_CODE (type))
7556 {
7557 case INTEGER_TYPE:
7558 case ENUMERAL_TYPE:
7559 case BOOLEAN_TYPE:
7560 case POINTER_TYPE:
7561 case REFERENCE_TYPE:
7562 case REAL_TYPE:
7563 case COMPLEX_TYPE:
7564 case VECTOR_TYPE:
7565 return true;
7566 default:
7567 return false;
7568 }
7569 }
7570
7571 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7572 TYPE at compile-time. If we're unable to perform the conversion
7573 return NULL_TREE. */
7574
7575 static tree
7576 fold_view_convert_expr (tree type, tree expr)
7577 {
7578 /* We support up to 512-bit values (for V8DFmode). */
7579 unsigned char buffer[64];
7580 int len;
7581
7582 /* Check that the host and target are sane. */
7583 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7584 return NULL_TREE;
7585
7586 len = native_encode_expr (expr, buffer, sizeof (buffer));
7587 if (len == 0)
7588 return NULL_TREE;
7589
7590 return native_interpret_expr (type, buffer, len);
7591 }
7592
7593 /* Build an expression for the address of T. Folds away INDIRECT_REF
7594 to avoid confusing the gimplify process. */
7595
7596 tree
7597 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7598 {
7599 /* The size of the object is not relevant when talking about its address. */
7600 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7601 t = TREE_OPERAND (t, 0);
7602
7603 if (TREE_CODE (t) == INDIRECT_REF)
7604 {
7605 t = TREE_OPERAND (t, 0);
7606
7607 if (TREE_TYPE (t) != ptrtype)
7608 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7609 }
7610 else if (TREE_CODE (t) == MEM_REF
7611 && integer_zerop (TREE_OPERAND (t, 1)))
7612 return TREE_OPERAND (t, 0);
7613 else if (TREE_CODE (t) == MEM_REF
7614 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7615 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7616 TREE_OPERAND (t, 0),
7617 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7618 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7619 {
7620 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7621
7622 if (TREE_TYPE (t) != ptrtype)
7623 t = fold_convert_loc (loc, ptrtype, t);
7624 }
7625 else
7626 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7627
7628 return t;
7629 }
7630
7631 /* Build an expression for the address of T. */
7632
7633 tree
7634 build_fold_addr_expr_loc (location_t loc, tree t)
7635 {
7636 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7637
7638 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7639 }
7640
7641 static bool vec_cst_ctor_to_array (tree, tree *);
7642
7643 /* Fold a unary expression of code CODE and type TYPE with operand
7644 OP0. Return the folded expression if folding is successful.
7645 Otherwise, return NULL_TREE. */
7646
7647 tree
7648 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7649 {
7650 tree tem;
7651 tree arg0;
7652 enum tree_code_class kind = TREE_CODE_CLASS (code);
7653
7654 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7655 && TREE_CODE_LENGTH (code) == 1);
7656
7657 arg0 = op0;
7658 if (arg0)
7659 {
7660 if (CONVERT_EXPR_CODE_P (code)
7661 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7662 {
7663 /* Don't use STRIP_NOPS, because signedness of argument type
7664 matters. */
7665 STRIP_SIGN_NOPS (arg0);
7666 }
7667 else
7668 {
7669 /* Strip any conversions that don't change the mode. This
7670 is safe for every expression, except for a comparison
7671 expression because its signedness is derived from its
7672 operands.
7673
7674 Note that this is done as an internal manipulation within
7675 the constant folder, in order to find the simplest
7676 representation of the arguments so that their form can be
7677 studied. In any cases, the appropriate type conversions
7678 should be put back in the tree that will get out of the
7679 constant folder. */
7680 STRIP_NOPS (arg0);
7681 }
7682 }
7683
7684 if (TREE_CODE_CLASS (code) == tcc_unary)
7685 {
7686 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7687 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7688 fold_build1_loc (loc, code, type,
7689 fold_convert_loc (loc, TREE_TYPE (op0),
7690 TREE_OPERAND (arg0, 1))));
7691 else if (TREE_CODE (arg0) == COND_EXPR)
7692 {
7693 tree arg01 = TREE_OPERAND (arg0, 1);
7694 tree arg02 = TREE_OPERAND (arg0, 2);
7695 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7696 arg01 = fold_build1_loc (loc, code, type,
7697 fold_convert_loc (loc,
7698 TREE_TYPE (op0), arg01));
7699 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7700 arg02 = fold_build1_loc (loc, code, type,
7701 fold_convert_loc (loc,
7702 TREE_TYPE (op0), arg02));
7703 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7704 arg01, arg02);
7705
7706 /* If this was a conversion, and all we did was to move into
7707 inside the COND_EXPR, bring it back out. But leave it if
7708 it is a conversion from integer to integer and the
7709 result precision is no wider than a word since such a
7710 conversion is cheap and may be optimized away by combine,
7711 while it couldn't if it were outside the COND_EXPR. Then return
7712 so we don't get into an infinite recursion loop taking the
7713 conversion out and then back in. */
7714
7715 if ((CONVERT_EXPR_CODE_P (code)
7716 || code == NON_LVALUE_EXPR)
7717 && TREE_CODE (tem) == COND_EXPR
7718 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7719 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7720 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7721 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7722 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7723 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7724 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7725 && (INTEGRAL_TYPE_P
7726 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7727 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7728 || flag_syntax_only))
7729 tem = build1_loc (loc, code, type,
7730 build3 (COND_EXPR,
7731 TREE_TYPE (TREE_OPERAND
7732 (TREE_OPERAND (tem, 1), 0)),
7733 TREE_OPERAND (tem, 0),
7734 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7735 TREE_OPERAND (TREE_OPERAND (tem, 2),
7736 0)));
7737 return tem;
7738 }
7739 }
7740
7741 switch (code)
7742 {
7743 case PAREN_EXPR:
7744 /* Re-association barriers around constants and other re-association
7745 barriers can be removed. */
7746 if (CONSTANT_CLASS_P (op0)
7747 || TREE_CODE (op0) == PAREN_EXPR)
7748 return fold_convert_loc (loc, type, op0);
7749 return NULL_TREE;
7750
7751 CASE_CONVERT:
7752 case FLOAT_EXPR:
7753 case FIX_TRUNC_EXPR:
7754 if (TREE_TYPE (op0) == type)
7755 return op0;
7756
7757 if (COMPARISON_CLASS_P (op0))
7758 {
7759 /* If we have (type) (a CMP b) and type is an integral type, return
7760 new expression involving the new type. Canonicalize
7761 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7762 non-integral type.
7763 Do not fold the result as that would not simplify further, also
7764 folding again results in recursions. */
7765 if (TREE_CODE (type) == BOOLEAN_TYPE)
7766 return build2_loc (loc, TREE_CODE (op0), type,
7767 TREE_OPERAND (op0, 0),
7768 TREE_OPERAND (op0, 1));
7769 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7770 && TREE_CODE (type) != VECTOR_TYPE)
7771 return build3_loc (loc, COND_EXPR, type, op0,
7772 constant_boolean_node (true, type),
7773 constant_boolean_node (false, type));
7774 }
7775
7776 /* Handle cases of two conversions in a row. */
7777 if (CONVERT_EXPR_P (op0))
7778 {
7779 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7780 tree inter_type = TREE_TYPE (op0);
7781 int inside_int = INTEGRAL_TYPE_P (inside_type);
7782 int inside_ptr = POINTER_TYPE_P (inside_type);
7783 int inside_float = FLOAT_TYPE_P (inside_type);
7784 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7785 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7786 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7787 int inter_int = INTEGRAL_TYPE_P (inter_type);
7788 int inter_ptr = POINTER_TYPE_P (inter_type);
7789 int inter_float = FLOAT_TYPE_P (inter_type);
7790 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7791 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7792 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7793 int final_int = INTEGRAL_TYPE_P (type);
7794 int final_ptr = POINTER_TYPE_P (type);
7795 int final_float = FLOAT_TYPE_P (type);
7796 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7797 unsigned int final_prec = TYPE_PRECISION (type);
7798 int final_unsignedp = TYPE_UNSIGNED (type);
7799
7800 /* In addition to the cases of two conversions in a row
7801 handled below, if we are converting something to its own
7802 type via an object of identical or wider precision, neither
7803 conversion is needed. */
7804 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7805 && (((inter_int || inter_ptr) && final_int)
7806 || (inter_float && final_float))
7807 && inter_prec >= final_prec)
7808 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7809
7810 /* Likewise, if the intermediate and initial types are either both
7811 float or both integer, we don't need the middle conversion if the
7812 former is wider than the latter and doesn't change the signedness
7813 (for integers). Avoid this if the final type is a pointer since
7814 then we sometimes need the middle conversion. Likewise if the
7815 final type has a precision not equal to the size of its mode. */
7816 if (((inter_int && inside_int)
7817 || (inter_float && inside_float)
7818 || (inter_vec && inside_vec))
7819 && inter_prec >= inside_prec
7820 && (inter_float || inter_vec
7821 || inter_unsignedp == inside_unsignedp)
7822 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7823 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7824 && ! final_ptr
7825 && (! final_vec || inter_prec == inside_prec))
7826 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7827
7828 /* If we have a sign-extension of a zero-extended value, we can
7829 replace that by a single zero-extension. Likewise if the
7830 final conversion does not change precision we can drop the
7831 intermediate conversion. */
7832 if (inside_int && inter_int && final_int
7833 && ((inside_prec < inter_prec && inter_prec < final_prec
7834 && inside_unsignedp && !inter_unsignedp)
7835 || final_prec == inter_prec))
7836 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7837
7838 /* Two conversions in a row are not needed unless:
7839 - some conversion is floating-point (overstrict for now), or
7840 - some conversion is a vector (overstrict for now), or
7841 - the intermediate type is narrower than both initial and
7842 final, or
7843 - the intermediate type and innermost type differ in signedness,
7844 and the outermost type is wider than the intermediate, or
7845 - the initial type is a pointer type and the precisions of the
7846 intermediate and final types differ, or
7847 - the final type is a pointer type and the precisions of the
7848 initial and intermediate types differ. */
7849 if (! inside_float && ! inter_float && ! final_float
7850 && ! inside_vec && ! inter_vec && ! final_vec
7851 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7852 && ! (inside_int && inter_int
7853 && inter_unsignedp != inside_unsignedp
7854 && inter_prec < final_prec)
7855 && ((inter_unsignedp && inter_prec > inside_prec)
7856 == (final_unsignedp && final_prec > inter_prec))
7857 && ! (inside_ptr && inter_prec != final_prec)
7858 && ! (final_ptr && inside_prec != inter_prec)
7859 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7860 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7861 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7862 }
7863
7864 /* Handle (T *)&A.B.C for A being of type T and B and C
7865 living at offset zero. This occurs frequently in
7866 C++ upcasting and then accessing the base. */
7867 if (TREE_CODE (op0) == ADDR_EXPR
7868 && POINTER_TYPE_P (type)
7869 && handled_component_p (TREE_OPERAND (op0, 0)))
7870 {
7871 HOST_WIDE_INT bitsize, bitpos;
7872 tree offset;
7873 enum machine_mode mode;
7874 int unsignedp, volatilep;
7875 tree base = TREE_OPERAND (op0, 0);
7876 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7877 &mode, &unsignedp, &volatilep, false);
7878 /* If the reference was to a (constant) zero offset, we can use
7879 the address of the base if it has the same base type
7880 as the result type and the pointer type is unqualified. */
7881 if (! offset && bitpos == 0
7882 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7883 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7884 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7885 return fold_convert_loc (loc, type,
7886 build_fold_addr_expr_loc (loc, base));
7887 }
7888
7889 if (TREE_CODE (op0) == MODIFY_EXPR
7890 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7891 /* Detect assigning a bitfield. */
7892 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7893 && DECL_BIT_FIELD
7894 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7895 {
7896 /* Don't leave an assignment inside a conversion
7897 unless assigning a bitfield. */
7898 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7899 /* First do the assignment, then return converted constant. */
7900 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7901 TREE_NO_WARNING (tem) = 1;
7902 TREE_USED (tem) = 1;
7903 return tem;
7904 }
7905
7906 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7907 constants (if x has signed type, the sign bit cannot be set
7908 in c). This folds extension into the BIT_AND_EXPR.
7909 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7910 very likely don't have maximal range for their precision and this
7911 transformation effectively doesn't preserve non-maximal ranges. */
7912 if (TREE_CODE (type) == INTEGER_TYPE
7913 && TREE_CODE (op0) == BIT_AND_EXPR
7914 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7915 {
7916 tree and_expr = op0;
7917 tree and0 = TREE_OPERAND (and_expr, 0);
7918 tree and1 = TREE_OPERAND (and_expr, 1);
7919 int change = 0;
7920
7921 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7922 || (TYPE_PRECISION (type)
7923 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7924 change = 1;
7925 else if (TYPE_PRECISION (TREE_TYPE (and1))
7926 <= HOST_BITS_PER_WIDE_INT
7927 && host_integerp (and1, 1))
7928 {
7929 unsigned HOST_WIDE_INT cst;
7930
7931 cst = tree_low_cst (and1, 1);
7932 cst &= (HOST_WIDE_INT) -1
7933 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7934 change = (cst == 0);
7935 #ifdef LOAD_EXTEND_OP
7936 if (change
7937 && !flag_syntax_only
7938 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7939 == ZERO_EXTEND))
7940 {
7941 tree uns = unsigned_type_for (TREE_TYPE (and0));
7942 and0 = fold_convert_loc (loc, uns, and0);
7943 and1 = fold_convert_loc (loc, uns, and1);
7944 }
7945 #endif
7946 }
7947 if (change)
7948 {
7949 tem = force_fit_type_double (type, tree_to_double_int (and1),
7950 0, TREE_OVERFLOW (and1));
7951 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7952 fold_convert_loc (loc, type, and0), tem);
7953 }
7954 }
7955
7956 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7957 when one of the new casts will fold away. Conservatively we assume
7958 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7959 if (POINTER_TYPE_P (type)
7960 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7961 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7962 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7963 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7964 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7965 {
7966 tree arg00 = TREE_OPERAND (arg0, 0);
7967 tree arg01 = TREE_OPERAND (arg0, 1);
7968
7969 return fold_build_pointer_plus_loc
7970 (loc, fold_convert_loc (loc, type, arg00), arg01);
7971 }
7972
7973 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7974 of the same precision, and X is an integer type not narrower than
7975 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7976 if (INTEGRAL_TYPE_P (type)
7977 && TREE_CODE (op0) == BIT_NOT_EXPR
7978 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7979 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7980 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7981 {
7982 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7983 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7984 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7985 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7986 fold_convert_loc (loc, type, tem));
7987 }
7988
7989 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7990 type of X and Y (integer types only). */
7991 if (INTEGRAL_TYPE_P (type)
7992 && TREE_CODE (op0) == MULT_EXPR
7993 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7994 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7995 {
7996 /* Be careful not to introduce new overflows. */
7997 tree mult_type;
7998 if (TYPE_OVERFLOW_WRAPS (type))
7999 mult_type = type;
8000 else
8001 mult_type = unsigned_type_for (type);
8002
8003 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8004 {
8005 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8006 fold_convert_loc (loc, mult_type,
8007 TREE_OPERAND (op0, 0)),
8008 fold_convert_loc (loc, mult_type,
8009 TREE_OPERAND (op0, 1)));
8010 return fold_convert_loc (loc, type, tem);
8011 }
8012 }
8013
8014 tem = fold_convert_const (code, type, op0);
8015 return tem ? tem : NULL_TREE;
8016
8017 case ADDR_SPACE_CONVERT_EXPR:
8018 if (integer_zerop (arg0))
8019 return fold_convert_const (code, type, arg0);
8020 return NULL_TREE;
8021
8022 case FIXED_CONVERT_EXPR:
8023 tem = fold_convert_const (code, type, arg0);
8024 return tem ? tem : NULL_TREE;
8025
8026 case VIEW_CONVERT_EXPR:
8027 if (TREE_TYPE (op0) == type)
8028 return op0;
8029 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8030 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8031 type, TREE_OPERAND (op0, 0));
8032 if (TREE_CODE (op0) == MEM_REF)
8033 return fold_build2_loc (loc, MEM_REF, type,
8034 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8035
8036 /* For integral conversions with the same precision or pointer
8037 conversions use a NOP_EXPR instead. */
8038 if ((INTEGRAL_TYPE_P (type)
8039 || POINTER_TYPE_P (type))
8040 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8041 || POINTER_TYPE_P (TREE_TYPE (op0)))
8042 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8043 return fold_convert_loc (loc, type, op0);
8044
8045 /* Strip inner integral conversions that do not change the precision. */
8046 if (CONVERT_EXPR_P (op0)
8047 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8048 || POINTER_TYPE_P (TREE_TYPE (op0)))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8050 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8051 && (TYPE_PRECISION (TREE_TYPE (op0))
8052 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8053 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8054 type, TREE_OPERAND (op0, 0));
8055
8056 return fold_view_convert_expr (type, op0);
8057
8058 case NEGATE_EXPR:
8059 tem = fold_negate_expr (loc, arg0);
8060 if (tem)
8061 return fold_convert_loc (loc, type, tem);
8062 return NULL_TREE;
8063
8064 case ABS_EXPR:
8065 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8066 return fold_abs_const (arg0, type);
8067 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8068 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8069 /* Convert fabs((double)float) into (double)fabsf(float). */
8070 else if (TREE_CODE (arg0) == NOP_EXPR
8071 && TREE_CODE (type) == REAL_TYPE)
8072 {
8073 tree targ0 = strip_float_extensions (arg0);
8074 if (targ0 != arg0)
8075 return fold_convert_loc (loc, type,
8076 fold_build1_loc (loc, ABS_EXPR,
8077 TREE_TYPE (targ0),
8078 targ0));
8079 }
8080 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8081 else if (TREE_CODE (arg0) == ABS_EXPR)
8082 return arg0;
8083 else if (tree_expr_nonnegative_p (arg0))
8084 return arg0;
8085
8086 /* Strip sign ops from argument. */
8087 if (TREE_CODE (type) == REAL_TYPE)
8088 {
8089 tem = fold_strip_sign_ops (arg0);
8090 if (tem)
8091 return fold_build1_loc (loc, ABS_EXPR, type,
8092 fold_convert_loc (loc, type, tem));
8093 }
8094 return NULL_TREE;
8095
8096 case CONJ_EXPR:
8097 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8098 return fold_convert_loc (loc, type, arg0);
8099 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8100 {
8101 tree itype = TREE_TYPE (type);
8102 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8103 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8104 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8105 negate_expr (ipart));
8106 }
8107 if (TREE_CODE (arg0) == COMPLEX_CST)
8108 {
8109 tree itype = TREE_TYPE (type);
8110 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8111 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8112 return build_complex (type, rpart, negate_expr (ipart));
8113 }
8114 if (TREE_CODE (arg0) == CONJ_EXPR)
8115 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8116 return NULL_TREE;
8117
8118 case BIT_NOT_EXPR:
8119 if (TREE_CODE (arg0) == INTEGER_CST)
8120 return fold_not_const (arg0, type);
8121 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8122 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8123 /* Convert ~ (-A) to A - 1. */
8124 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8125 return fold_build2_loc (loc, MINUS_EXPR, type,
8126 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8127 build_int_cst (type, 1));
8128 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8129 else if (INTEGRAL_TYPE_P (type)
8130 && ((TREE_CODE (arg0) == MINUS_EXPR
8131 && integer_onep (TREE_OPERAND (arg0, 1)))
8132 || (TREE_CODE (arg0) == PLUS_EXPR
8133 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8134 return fold_build1_loc (loc, NEGATE_EXPR, type,
8135 fold_convert_loc (loc, type,
8136 TREE_OPERAND (arg0, 0)));
8137 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8138 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8139 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8140 fold_convert_loc (loc, type,
8141 TREE_OPERAND (arg0, 0)))))
8142 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8143 fold_convert_loc (loc, type,
8144 TREE_OPERAND (arg0, 1)));
8145 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8146 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8147 fold_convert_loc (loc, type,
8148 TREE_OPERAND (arg0, 1)))))
8149 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8150 fold_convert_loc (loc, type,
8151 TREE_OPERAND (arg0, 0)), tem);
8152 /* Perform BIT_NOT_EXPR on each element individually. */
8153 else if (TREE_CODE (arg0) == VECTOR_CST)
8154 {
8155 tree *elements;
8156 tree elem;
8157 unsigned count = VECTOR_CST_NELTS (arg0), i;
8158
8159 elements = XALLOCAVEC (tree, count);
8160 for (i = 0; i < count; i++)
8161 {
8162 elem = VECTOR_CST_ELT (arg0, i);
8163 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8164 if (elem == NULL_TREE)
8165 break;
8166 elements[i] = elem;
8167 }
8168 if (i == count)
8169 return build_vector (type, elements);
8170 }
8171
8172 return NULL_TREE;
8173
8174 case TRUTH_NOT_EXPR:
8175 /* The argument to invert_truthvalue must have Boolean type. */
8176 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8177 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8178
8179 /* Note that the operand of this must be an int
8180 and its values must be 0 or 1.
8181 ("true" is a fixed value perhaps depending on the language,
8182 but we don't handle values other than 1 correctly yet.) */
8183 tem = fold_truth_not_expr (loc, arg0);
8184 if (!tem)
8185 return NULL_TREE;
8186 return fold_convert_loc (loc, type, tem);
8187
8188 case REALPART_EXPR:
8189 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8190 return fold_convert_loc (loc, type, arg0);
8191 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8192 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8193 TREE_OPERAND (arg0, 1));
8194 if (TREE_CODE (arg0) == COMPLEX_CST)
8195 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8196 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8197 {
8198 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8199 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8200 fold_build1_loc (loc, REALPART_EXPR, itype,
8201 TREE_OPERAND (arg0, 0)),
8202 fold_build1_loc (loc, REALPART_EXPR, itype,
8203 TREE_OPERAND (arg0, 1)));
8204 return fold_convert_loc (loc, type, tem);
8205 }
8206 if (TREE_CODE (arg0) == CONJ_EXPR)
8207 {
8208 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8209 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8210 TREE_OPERAND (arg0, 0));
8211 return fold_convert_loc (loc, type, tem);
8212 }
8213 if (TREE_CODE (arg0) == CALL_EXPR)
8214 {
8215 tree fn = get_callee_fndecl (arg0);
8216 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8217 switch (DECL_FUNCTION_CODE (fn))
8218 {
8219 CASE_FLT_FN (BUILT_IN_CEXPI):
8220 fn = mathfn_built_in (type, BUILT_IN_COS);
8221 if (fn)
8222 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8223 break;
8224
8225 default:
8226 break;
8227 }
8228 }
8229 return NULL_TREE;
8230
8231 case IMAGPART_EXPR:
8232 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8233 return build_zero_cst (type);
8234 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8235 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8236 TREE_OPERAND (arg0, 0));
8237 if (TREE_CODE (arg0) == COMPLEX_CST)
8238 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8239 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8240 {
8241 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8242 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8243 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8244 TREE_OPERAND (arg0, 0)),
8245 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8246 TREE_OPERAND (arg0, 1)));
8247 return fold_convert_loc (loc, type, tem);
8248 }
8249 if (TREE_CODE (arg0) == CONJ_EXPR)
8250 {
8251 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8252 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8253 return fold_convert_loc (loc, type, negate_expr (tem));
8254 }
8255 if (TREE_CODE (arg0) == CALL_EXPR)
8256 {
8257 tree fn = get_callee_fndecl (arg0);
8258 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8259 switch (DECL_FUNCTION_CODE (fn))
8260 {
8261 CASE_FLT_FN (BUILT_IN_CEXPI):
8262 fn = mathfn_built_in (type, BUILT_IN_SIN);
8263 if (fn)
8264 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8265 break;
8266
8267 default:
8268 break;
8269 }
8270 }
8271 return NULL_TREE;
8272
8273 case INDIRECT_REF:
8274 /* Fold *&X to X if X is an lvalue. */
8275 if (TREE_CODE (op0) == ADDR_EXPR)
8276 {
8277 tree op00 = TREE_OPERAND (op0, 0);
8278 if ((TREE_CODE (op00) == VAR_DECL
8279 || TREE_CODE (op00) == PARM_DECL
8280 || TREE_CODE (op00) == RESULT_DECL)
8281 && !TREE_READONLY (op00))
8282 return op00;
8283 }
8284 return NULL_TREE;
8285
8286 case VEC_UNPACK_LO_EXPR:
8287 case VEC_UNPACK_HI_EXPR:
8288 case VEC_UNPACK_FLOAT_LO_EXPR:
8289 case VEC_UNPACK_FLOAT_HI_EXPR:
8290 {
8291 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8292 tree *elts;
8293 enum tree_code subcode;
8294
8295 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8296 if (TREE_CODE (arg0) != VECTOR_CST)
8297 return NULL_TREE;
8298
8299 elts = XALLOCAVEC (tree, nelts * 2);
8300 if (!vec_cst_ctor_to_array (arg0, elts))
8301 return NULL_TREE;
8302
8303 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8304 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8305 elts += nelts;
8306
8307 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8308 subcode = NOP_EXPR;
8309 else
8310 subcode = FLOAT_EXPR;
8311
8312 for (i = 0; i < nelts; i++)
8313 {
8314 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8315 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8316 return NULL_TREE;
8317 }
8318
8319 return build_vector (type, elts);
8320 }
8321
8322 case REDUC_MIN_EXPR:
8323 case REDUC_MAX_EXPR:
8324 case REDUC_PLUS_EXPR:
8325 {
8326 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8327 tree *elts;
8328 enum tree_code subcode;
8329
8330 if (TREE_CODE (op0) != VECTOR_CST)
8331 return NULL_TREE;
8332
8333 elts = XALLOCAVEC (tree, nelts);
8334 if (!vec_cst_ctor_to_array (op0, elts))
8335 return NULL_TREE;
8336
8337 switch (code)
8338 {
8339 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8340 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8341 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8342 default: gcc_unreachable ();
8343 }
8344
8345 for (i = 1; i < nelts; i++)
8346 {
8347 elts[0] = const_binop (subcode, elts[0], elts[i]);
8348 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8349 return NULL_TREE;
8350 elts[i] = build_zero_cst (TREE_TYPE (type));
8351 }
8352
8353 return build_vector (type, elts);
8354 }
8355
8356 default:
8357 return NULL_TREE;
8358 } /* switch (code) */
8359 }
8360
8361
8362 /* If the operation was a conversion do _not_ mark a resulting constant
8363 with TREE_OVERFLOW if the original constant was not. These conversions
8364 have implementation defined behavior and retaining the TREE_OVERFLOW
8365 flag here would confuse later passes such as VRP. */
8366 tree
8367 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8368 tree type, tree op0)
8369 {
8370 tree res = fold_unary_loc (loc, code, type, op0);
8371 if (res
8372 && TREE_CODE (res) == INTEGER_CST
8373 && TREE_CODE (op0) == INTEGER_CST
8374 && CONVERT_EXPR_CODE_P (code))
8375 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8376
8377 return res;
8378 }
8379
8380 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8381 operands OP0 and OP1. LOC is the location of the resulting expression.
8382 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8383 Return the folded expression if folding is successful. Otherwise,
8384 return NULL_TREE. */
8385 static tree
8386 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8387 tree arg0, tree arg1, tree op0, tree op1)
8388 {
8389 tree tem;
8390
8391 /* We only do these simplifications if we are optimizing. */
8392 if (!optimize)
8393 return NULL_TREE;
8394
8395 /* Check for things like (A || B) && (A || C). We can convert this
8396 to A || (B && C). Note that either operator can be any of the four
8397 truth and/or operations and the transformation will still be
8398 valid. Also note that we only care about order for the
8399 ANDIF and ORIF operators. If B contains side effects, this
8400 might change the truth-value of A. */
8401 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8402 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8403 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8404 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8405 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8406 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8407 {
8408 tree a00 = TREE_OPERAND (arg0, 0);
8409 tree a01 = TREE_OPERAND (arg0, 1);
8410 tree a10 = TREE_OPERAND (arg1, 0);
8411 tree a11 = TREE_OPERAND (arg1, 1);
8412 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8413 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8414 && (code == TRUTH_AND_EXPR
8415 || code == TRUTH_OR_EXPR));
8416
8417 if (operand_equal_p (a00, a10, 0))
8418 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8419 fold_build2_loc (loc, code, type, a01, a11));
8420 else if (commutative && operand_equal_p (a00, a11, 0))
8421 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8422 fold_build2_loc (loc, code, type, a01, a10));
8423 else if (commutative && operand_equal_p (a01, a10, 0))
8424 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8425 fold_build2_loc (loc, code, type, a00, a11));
8426
8427 /* This case if tricky because we must either have commutative
8428 operators or else A10 must not have side-effects. */
8429
8430 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8431 && operand_equal_p (a01, a11, 0))
8432 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8433 fold_build2_loc (loc, code, type, a00, a10),
8434 a01);
8435 }
8436
8437 /* See if we can build a range comparison. */
8438 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8439 return tem;
8440
8441 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8442 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8443 {
8444 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8445 if (tem)
8446 return fold_build2_loc (loc, code, type, tem, arg1);
8447 }
8448
8449 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8450 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8451 {
8452 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8453 if (tem)
8454 return fold_build2_loc (loc, code, type, arg0, tem);
8455 }
8456
8457 /* Check for the possibility of merging component references. If our
8458 lhs is another similar operation, try to merge its rhs with our
8459 rhs. Then try to merge our lhs and rhs. */
8460 if (TREE_CODE (arg0) == code
8461 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8462 TREE_OPERAND (arg0, 1), arg1)))
8463 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8464
8465 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8466 return tem;
8467
8468 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8469 && (code == TRUTH_AND_EXPR
8470 || code == TRUTH_ANDIF_EXPR
8471 || code == TRUTH_OR_EXPR
8472 || code == TRUTH_ORIF_EXPR))
8473 {
8474 enum tree_code ncode, icode;
8475
8476 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8477 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8478 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8479
8480 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8481 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8482 We don't want to pack more than two leafs to a non-IF AND/OR
8483 expression.
8484 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8485 equal to IF-CODE, then we don't want to add right-hand operand.
8486 If the inner right-hand side of left-hand operand has
8487 side-effects, or isn't simple, then we can't add to it,
8488 as otherwise we might destroy if-sequence. */
8489 if (TREE_CODE (arg0) == icode
8490 && simple_operand_p_2 (arg1)
8491 /* Needed for sequence points to handle trappings, and
8492 side-effects. */
8493 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8494 {
8495 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8496 arg1);
8497 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8498 tem);
8499 }
8500 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8501 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8502 else if (TREE_CODE (arg1) == icode
8503 && simple_operand_p_2 (arg0)
8504 /* Needed for sequence points to handle trappings, and
8505 side-effects. */
8506 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8507 {
8508 tem = fold_build2_loc (loc, ncode, type,
8509 arg0, TREE_OPERAND (arg1, 0));
8510 return fold_build2_loc (loc, icode, type, tem,
8511 TREE_OPERAND (arg1, 1));
8512 }
8513 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8514 into (A OR B).
8515 For sequence point consistancy, we need to check for trapping,
8516 and side-effects. */
8517 else if (code == icode && simple_operand_p_2 (arg0)
8518 && simple_operand_p_2 (arg1))
8519 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8520 }
8521
8522 return NULL_TREE;
8523 }
8524
8525 /* Fold a binary expression of code CODE and type TYPE with operands
8526 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8527 Return the folded expression if folding is successful. Otherwise,
8528 return NULL_TREE. */
8529
8530 static tree
8531 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8532 {
8533 enum tree_code compl_code;
8534
8535 if (code == MIN_EXPR)
8536 compl_code = MAX_EXPR;
8537 else if (code == MAX_EXPR)
8538 compl_code = MIN_EXPR;
8539 else
8540 gcc_unreachable ();
8541
8542 /* MIN (MAX (a, b), b) == b. */
8543 if (TREE_CODE (op0) == compl_code
8544 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8545 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8546
8547 /* MIN (MAX (b, a), b) == b. */
8548 if (TREE_CODE (op0) == compl_code
8549 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8550 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8551 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8552
8553 /* MIN (a, MAX (a, b)) == a. */
8554 if (TREE_CODE (op1) == compl_code
8555 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8556 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8557 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8558
8559 /* MIN (a, MAX (b, a)) == a. */
8560 if (TREE_CODE (op1) == compl_code
8561 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8562 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8563 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8564
8565 return NULL_TREE;
8566 }
8567
8568 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8569 by changing CODE to reduce the magnitude of constants involved in
8570 ARG0 of the comparison.
8571 Returns a canonicalized comparison tree if a simplification was
8572 possible, otherwise returns NULL_TREE.
8573 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8574 valid if signed overflow is undefined. */
8575
8576 static tree
8577 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8578 tree arg0, tree arg1,
8579 bool *strict_overflow_p)
8580 {
8581 enum tree_code code0 = TREE_CODE (arg0);
8582 tree t, cst0 = NULL_TREE;
8583 int sgn0;
8584 bool swap = false;
8585
8586 /* Match A +- CST code arg1 and CST code arg1. We can change the
8587 first form only if overflow is undefined. */
8588 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8589 /* In principle pointers also have undefined overflow behavior,
8590 but that causes problems elsewhere. */
8591 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8592 && (code0 == MINUS_EXPR
8593 || code0 == PLUS_EXPR)
8594 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8595 || code0 == INTEGER_CST))
8596 return NULL_TREE;
8597
8598 /* Identify the constant in arg0 and its sign. */
8599 if (code0 == INTEGER_CST)
8600 cst0 = arg0;
8601 else
8602 cst0 = TREE_OPERAND (arg0, 1);
8603 sgn0 = tree_int_cst_sgn (cst0);
8604
8605 /* Overflowed constants and zero will cause problems. */
8606 if (integer_zerop (cst0)
8607 || TREE_OVERFLOW (cst0))
8608 return NULL_TREE;
8609
8610 /* See if we can reduce the magnitude of the constant in
8611 arg0 by changing the comparison code. */
8612 if (code0 == INTEGER_CST)
8613 {
8614 /* CST <= arg1 -> CST-1 < arg1. */
8615 if (code == LE_EXPR && sgn0 == 1)
8616 code = LT_EXPR;
8617 /* -CST < arg1 -> -CST-1 <= arg1. */
8618 else if (code == LT_EXPR && sgn0 == -1)
8619 code = LE_EXPR;
8620 /* CST > arg1 -> CST-1 >= arg1. */
8621 else if (code == GT_EXPR && sgn0 == 1)
8622 code = GE_EXPR;
8623 /* -CST >= arg1 -> -CST-1 > arg1. */
8624 else if (code == GE_EXPR && sgn0 == -1)
8625 code = GT_EXPR;
8626 else
8627 return NULL_TREE;
8628 /* arg1 code' CST' might be more canonical. */
8629 swap = true;
8630 }
8631 else
8632 {
8633 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8634 if (code == LT_EXPR
8635 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8636 code = LE_EXPR;
8637 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8638 else if (code == GT_EXPR
8639 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8640 code = GE_EXPR;
8641 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8642 else if (code == LE_EXPR
8643 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8644 code = LT_EXPR;
8645 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8646 else if (code == GE_EXPR
8647 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8648 code = GT_EXPR;
8649 else
8650 return NULL_TREE;
8651 *strict_overflow_p = true;
8652 }
8653
8654 /* Now build the constant reduced in magnitude. But not if that
8655 would produce one outside of its types range. */
8656 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8657 && ((sgn0 == 1
8658 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8659 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8660 || (sgn0 == -1
8661 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8662 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8663 /* We cannot swap the comparison here as that would cause us to
8664 endlessly recurse. */
8665 return NULL_TREE;
8666
8667 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8668 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8669 if (code0 != INTEGER_CST)
8670 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8671 t = fold_convert (TREE_TYPE (arg1), t);
8672
8673 /* If swapping might yield to a more canonical form, do so. */
8674 if (swap)
8675 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8676 else
8677 return fold_build2_loc (loc, code, type, t, arg1);
8678 }
8679
8680 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8681 overflow further. Try to decrease the magnitude of constants involved
8682 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8683 and put sole constants at the second argument position.
8684 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8685
8686 static tree
8687 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8688 tree arg0, tree arg1)
8689 {
8690 tree t;
8691 bool strict_overflow_p;
8692 const char * const warnmsg = G_("assuming signed overflow does not occur "
8693 "when reducing constant in comparison");
8694
8695 /* Try canonicalization by simplifying arg0. */
8696 strict_overflow_p = false;
8697 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8698 &strict_overflow_p);
8699 if (t)
8700 {
8701 if (strict_overflow_p)
8702 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8703 return t;
8704 }
8705
8706 /* Try canonicalization by simplifying arg1 using the swapped
8707 comparison. */
8708 code = swap_tree_comparison (code);
8709 strict_overflow_p = false;
8710 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8711 &strict_overflow_p);
8712 if (t && strict_overflow_p)
8713 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8714 return t;
8715 }
8716
8717 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8718 space. This is used to avoid issuing overflow warnings for
8719 expressions like &p->x which can not wrap. */
8720
8721 static bool
8722 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8723 {
8724 double_int di_offset, total;
8725
8726 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8727 return true;
8728
8729 if (bitpos < 0)
8730 return true;
8731
8732 if (offset == NULL_TREE)
8733 di_offset = double_int_zero;
8734 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8735 return true;
8736 else
8737 di_offset = TREE_INT_CST (offset);
8738
8739 bool overflow;
8740 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8741 total = di_offset.add_with_sign (units, true, &overflow);
8742 if (overflow)
8743 return true;
8744
8745 if (total.high != 0)
8746 return true;
8747
8748 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8749 if (size <= 0)
8750 return true;
8751
8752 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8753 array. */
8754 if (TREE_CODE (base) == ADDR_EXPR)
8755 {
8756 HOST_WIDE_INT base_size;
8757
8758 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8759 if (base_size > 0 && size < base_size)
8760 size = base_size;
8761 }
8762
8763 return total.low > (unsigned HOST_WIDE_INT) size;
8764 }
8765
8766 /* Subroutine of fold_binary. This routine performs all of the
8767 transformations that are common to the equality/inequality
8768 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8769 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8770 fold_binary should call fold_binary. Fold a comparison with
8771 tree code CODE and type TYPE with operands OP0 and OP1. Return
8772 the folded comparison or NULL_TREE. */
8773
8774 static tree
8775 fold_comparison (location_t loc, enum tree_code code, tree type,
8776 tree op0, tree op1)
8777 {
8778 tree arg0, arg1, tem;
8779
8780 arg0 = op0;
8781 arg1 = op1;
8782
8783 STRIP_SIGN_NOPS (arg0);
8784 STRIP_SIGN_NOPS (arg1);
8785
8786 tem = fold_relational_const (code, type, arg0, arg1);
8787 if (tem != NULL_TREE)
8788 return tem;
8789
8790 /* If one arg is a real or integer constant, put it last. */
8791 if (tree_swap_operands_p (arg0, arg1, true))
8792 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8793
8794 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8795 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8796 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8797 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8798 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8799 && (TREE_CODE (arg1) == INTEGER_CST
8800 && !TREE_OVERFLOW (arg1)))
8801 {
8802 tree const1 = TREE_OPERAND (arg0, 1);
8803 tree const2 = arg1;
8804 tree variable = TREE_OPERAND (arg0, 0);
8805 tree lhs;
8806 int lhs_add;
8807 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8808
8809 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8810 TREE_TYPE (arg1), const2, const1);
8811
8812 /* If the constant operation overflowed this can be
8813 simplified as a comparison against INT_MAX/INT_MIN. */
8814 if (TREE_CODE (lhs) == INTEGER_CST
8815 && TREE_OVERFLOW (lhs))
8816 {
8817 int const1_sgn = tree_int_cst_sgn (const1);
8818 enum tree_code code2 = code;
8819
8820 /* Get the sign of the constant on the lhs if the
8821 operation were VARIABLE + CONST1. */
8822 if (TREE_CODE (arg0) == MINUS_EXPR)
8823 const1_sgn = -const1_sgn;
8824
8825 /* The sign of the constant determines if we overflowed
8826 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8827 Canonicalize to the INT_MIN overflow by swapping the comparison
8828 if necessary. */
8829 if (const1_sgn == -1)
8830 code2 = swap_tree_comparison (code);
8831
8832 /* We now can look at the canonicalized case
8833 VARIABLE + 1 CODE2 INT_MIN
8834 and decide on the result. */
8835 if (code2 == LT_EXPR
8836 || code2 == LE_EXPR
8837 || code2 == EQ_EXPR)
8838 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8839 else if (code2 == NE_EXPR
8840 || code2 == GE_EXPR
8841 || code2 == GT_EXPR)
8842 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8843 }
8844
8845 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8846 && (TREE_CODE (lhs) != INTEGER_CST
8847 || !TREE_OVERFLOW (lhs)))
8848 {
8849 if (code != EQ_EXPR && code != NE_EXPR)
8850 fold_overflow_warning ("assuming signed overflow does not occur "
8851 "when changing X +- C1 cmp C2 to "
8852 "X cmp C1 +- C2",
8853 WARN_STRICT_OVERFLOW_COMPARISON);
8854 return fold_build2_loc (loc, code, type, variable, lhs);
8855 }
8856 }
8857
8858 /* For comparisons of pointers we can decompose it to a compile time
8859 comparison of the base objects and the offsets into the object.
8860 This requires at least one operand being an ADDR_EXPR or a
8861 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8862 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8863 && (TREE_CODE (arg0) == ADDR_EXPR
8864 || TREE_CODE (arg1) == ADDR_EXPR
8865 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8866 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8867 {
8868 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8869 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8870 enum machine_mode mode;
8871 int volatilep, unsignedp;
8872 bool indirect_base0 = false, indirect_base1 = false;
8873
8874 /* Get base and offset for the access. Strip ADDR_EXPR for
8875 get_inner_reference, but put it back by stripping INDIRECT_REF
8876 off the base object if possible. indirect_baseN will be true
8877 if baseN is not an address but refers to the object itself. */
8878 base0 = arg0;
8879 if (TREE_CODE (arg0) == ADDR_EXPR)
8880 {
8881 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8882 &bitsize, &bitpos0, &offset0, &mode,
8883 &unsignedp, &volatilep, false);
8884 if (TREE_CODE (base0) == INDIRECT_REF)
8885 base0 = TREE_OPERAND (base0, 0);
8886 else
8887 indirect_base0 = true;
8888 }
8889 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8890 {
8891 base0 = TREE_OPERAND (arg0, 0);
8892 STRIP_SIGN_NOPS (base0);
8893 if (TREE_CODE (base0) == ADDR_EXPR)
8894 {
8895 base0 = TREE_OPERAND (base0, 0);
8896 indirect_base0 = true;
8897 }
8898 offset0 = TREE_OPERAND (arg0, 1);
8899 if (host_integerp (offset0, 0))
8900 {
8901 HOST_WIDE_INT off = size_low_cst (offset0);
8902 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8903 * BITS_PER_UNIT)
8904 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8905 {
8906 bitpos0 = off * BITS_PER_UNIT;
8907 offset0 = NULL_TREE;
8908 }
8909 }
8910 }
8911
8912 base1 = arg1;
8913 if (TREE_CODE (arg1) == ADDR_EXPR)
8914 {
8915 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8916 &bitsize, &bitpos1, &offset1, &mode,
8917 &unsignedp, &volatilep, false);
8918 if (TREE_CODE (base1) == INDIRECT_REF)
8919 base1 = TREE_OPERAND (base1, 0);
8920 else
8921 indirect_base1 = true;
8922 }
8923 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8924 {
8925 base1 = TREE_OPERAND (arg1, 0);
8926 STRIP_SIGN_NOPS (base1);
8927 if (TREE_CODE (base1) == ADDR_EXPR)
8928 {
8929 base1 = TREE_OPERAND (base1, 0);
8930 indirect_base1 = true;
8931 }
8932 offset1 = TREE_OPERAND (arg1, 1);
8933 if (host_integerp (offset1, 0))
8934 {
8935 HOST_WIDE_INT off = size_low_cst (offset1);
8936 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8937 * BITS_PER_UNIT)
8938 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8939 {
8940 bitpos1 = off * BITS_PER_UNIT;
8941 offset1 = NULL_TREE;
8942 }
8943 }
8944 }
8945
8946 /* A local variable can never be pointed to by
8947 the default SSA name of an incoming parameter. */
8948 if ((TREE_CODE (arg0) == ADDR_EXPR
8949 && indirect_base0
8950 && TREE_CODE (base0) == VAR_DECL
8951 && auto_var_in_fn_p (base0, current_function_decl)
8952 && !indirect_base1
8953 && TREE_CODE (base1) == SSA_NAME
8954 && SSA_NAME_IS_DEFAULT_DEF (base1)
8955 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8956 || (TREE_CODE (arg1) == ADDR_EXPR
8957 && indirect_base1
8958 && TREE_CODE (base1) == VAR_DECL
8959 && auto_var_in_fn_p (base1, current_function_decl)
8960 && !indirect_base0
8961 && TREE_CODE (base0) == SSA_NAME
8962 && SSA_NAME_IS_DEFAULT_DEF (base0)
8963 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8964 {
8965 if (code == NE_EXPR)
8966 return constant_boolean_node (1, type);
8967 else if (code == EQ_EXPR)
8968 return constant_boolean_node (0, type);
8969 }
8970 /* If we have equivalent bases we might be able to simplify. */
8971 else if (indirect_base0 == indirect_base1
8972 && operand_equal_p (base0, base1, 0))
8973 {
8974 /* We can fold this expression to a constant if the non-constant
8975 offset parts are equal. */
8976 if ((offset0 == offset1
8977 || (offset0 && offset1
8978 && operand_equal_p (offset0, offset1, 0)))
8979 && (code == EQ_EXPR
8980 || code == NE_EXPR
8981 || (indirect_base0 && DECL_P (base0))
8982 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8983
8984 {
8985 if (code != EQ_EXPR
8986 && code != NE_EXPR
8987 && bitpos0 != bitpos1
8988 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8989 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8990 fold_overflow_warning (("assuming pointer wraparound does not "
8991 "occur when comparing P +- C1 with "
8992 "P +- C2"),
8993 WARN_STRICT_OVERFLOW_CONDITIONAL);
8994
8995 switch (code)
8996 {
8997 case EQ_EXPR:
8998 return constant_boolean_node (bitpos0 == bitpos1, type);
8999 case NE_EXPR:
9000 return constant_boolean_node (bitpos0 != bitpos1, type);
9001 case LT_EXPR:
9002 return constant_boolean_node (bitpos0 < bitpos1, type);
9003 case LE_EXPR:
9004 return constant_boolean_node (bitpos0 <= bitpos1, type);
9005 case GE_EXPR:
9006 return constant_boolean_node (bitpos0 >= bitpos1, type);
9007 case GT_EXPR:
9008 return constant_boolean_node (bitpos0 > bitpos1, type);
9009 default:;
9010 }
9011 }
9012 /* We can simplify the comparison to a comparison of the variable
9013 offset parts if the constant offset parts are equal.
9014 Be careful to use signed sizetype here because otherwise we
9015 mess with array offsets in the wrong way. This is possible
9016 because pointer arithmetic is restricted to retain within an
9017 object and overflow on pointer differences is undefined as of
9018 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9019 else if (bitpos0 == bitpos1
9020 && ((code == EQ_EXPR || code == NE_EXPR)
9021 || (indirect_base0 && DECL_P (base0))
9022 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9023 {
9024 /* By converting to signed sizetype we cover middle-end pointer
9025 arithmetic which operates on unsigned pointer types of size
9026 type size and ARRAY_REF offsets which are properly sign or
9027 zero extended from their type in case it is narrower than
9028 sizetype. */
9029 if (offset0 == NULL_TREE)
9030 offset0 = build_int_cst (ssizetype, 0);
9031 else
9032 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9033 if (offset1 == NULL_TREE)
9034 offset1 = build_int_cst (ssizetype, 0);
9035 else
9036 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9037
9038 if (code != EQ_EXPR
9039 && code != NE_EXPR
9040 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9041 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9042 fold_overflow_warning (("assuming pointer wraparound does not "
9043 "occur when comparing P +- C1 with "
9044 "P +- C2"),
9045 WARN_STRICT_OVERFLOW_COMPARISON);
9046
9047 return fold_build2_loc (loc, code, type, offset0, offset1);
9048 }
9049 }
9050 /* For non-equal bases we can simplify if they are addresses
9051 of local binding decls or constants. */
9052 else if (indirect_base0 && indirect_base1
9053 /* We know that !operand_equal_p (base0, base1, 0)
9054 because the if condition was false. But make
9055 sure two decls are not the same. */
9056 && base0 != base1
9057 && TREE_CODE (arg0) == ADDR_EXPR
9058 && TREE_CODE (arg1) == ADDR_EXPR
9059 && (((TREE_CODE (base0) == VAR_DECL
9060 || TREE_CODE (base0) == PARM_DECL)
9061 && (targetm.binds_local_p (base0)
9062 || CONSTANT_CLASS_P (base1)))
9063 || CONSTANT_CLASS_P (base0))
9064 && (((TREE_CODE (base1) == VAR_DECL
9065 || TREE_CODE (base1) == PARM_DECL)
9066 && (targetm.binds_local_p (base1)
9067 || CONSTANT_CLASS_P (base0)))
9068 || CONSTANT_CLASS_P (base1)))
9069 {
9070 if (code == EQ_EXPR)
9071 return omit_two_operands_loc (loc, type, boolean_false_node,
9072 arg0, arg1);
9073 else if (code == NE_EXPR)
9074 return omit_two_operands_loc (loc, type, boolean_true_node,
9075 arg0, arg1);
9076 }
9077 /* For equal offsets we can simplify to a comparison of the
9078 base addresses. */
9079 else if (bitpos0 == bitpos1
9080 && (indirect_base0
9081 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9082 && (indirect_base1
9083 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9084 && ((offset0 == offset1)
9085 || (offset0 && offset1
9086 && operand_equal_p (offset0, offset1, 0))))
9087 {
9088 if (indirect_base0)
9089 base0 = build_fold_addr_expr_loc (loc, base0);
9090 if (indirect_base1)
9091 base1 = build_fold_addr_expr_loc (loc, base1);
9092 return fold_build2_loc (loc, code, type, base0, base1);
9093 }
9094 }
9095
9096 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9097 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9098 the resulting offset is smaller in absolute value than the
9099 original one. */
9100 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9101 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9102 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9103 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9104 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9105 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9106 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9107 {
9108 tree const1 = TREE_OPERAND (arg0, 1);
9109 tree const2 = TREE_OPERAND (arg1, 1);
9110 tree variable1 = TREE_OPERAND (arg0, 0);
9111 tree variable2 = TREE_OPERAND (arg1, 0);
9112 tree cst;
9113 const char * const warnmsg = G_("assuming signed overflow does not "
9114 "occur when combining constants around "
9115 "a comparison");
9116
9117 /* Put the constant on the side where it doesn't overflow and is
9118 of lower absolute value than before. */
9119 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9120 ? MINUS_EXPR : PLUS_EXPR,
9121 const2, const1);
9122 if (!TREE_OVERFLOW (cst)
9123 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9124 {
9125 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9126 return fold_build2_loc (loc, code, type,
9127 variable1,
9128 fold_build2_loc (loc,
9129 TREE_CODE (arg1), TREE_TYPE (arg1),
9130 variable2, cst));
9131 }
9132
9133 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9134 ? MINUS_EXPR : PLUS_EXPR,
9135 const1, const2);
9136 if (!TREE_OVERFLOW (cst)
9137 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9138 {
9139 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9140 return fold_build2_loc (loc, code, type,
9141 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9142 variable1, cst),
9143 variable2);
9144 }
9145 }
9146
9147 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9148 signed arithmetic case. That form is created by the compiler
9149 often enough for folding it to be of value. One example is in
9150 computing loop trip counts after Operator Strength Reduction. */
9151 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9152 && TREE_CODE (arg0) == MULT_EXPR
9153 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9154 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9155 && integer_zerop (arg1))
9156 {
9157 tree const1 = TREE_OPERAND (arg0, 1);
9158 tree const2 = arg1; /* zero */
9159 tree variable1 = TREE_OPERAND (arg0, 0);
9160 enum tree_code cmp_code = code;
9161
9162 /* Handle unfolded multiplication by zero. */
9163 if (integer_zerop (const1))
9164 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9165
9166 fold_overflow_warning (("assuming signed overflow does not occur when "
9167 "eliminating multiplication in comparison "
9168 "with zero"),
9169 WARN_STRICT_OVERFLOW_COMPARISON);
9170
9171 /* If const1 is negative we swap the sense of the comparison. */
9172 if (tree_int_cst_sgn (const1) < 0)
9173 cmp_code = swap_tree_comparison (cmp_code);
9174
9175 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9176 }
9177
9178 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9179 if (tem)
9180 return tem;
9181
9182 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9183 {
9184 tree targ0 = strip_float_extensions (arg0);
9185 tree targ1 = strip_float_extensions (arg1);
9186 tree newtype = TREE_TYPE (targ0);
9187
9188 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9189 newtype = TREE_TYPE (targ1);
9190
9191 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9192 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9193 return fold_build2_loc (loc, code, type,
9194 fold_convert_loc (loc, newtype, targ0),
9195 fold_convert_loc (loc, newtype, targ1));
9196
9197 /* (-a) CMP (-b) -> b CMP a */
9198 if (TREE_CODE (arg0) == NEGATE_EXPR
9199 && TREE_CODE (arg1) == NEGATE_EXPR)
9200 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9201 TREE_OPERAND (arg0, 0));
9202
9203 if (TREE_CODE (arg1) == REAL_CST)
9204 {
9205 REAL_VALUE_TYPE cst;
9206 cst = TREE_REAL_CST (arg1);
9207
9208 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9209 if (TREE_CODE (arg0) == NEGATE_EXPR)
9210 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9211 TREE_OPERAND (arg0, 0),
9212 build_real (TREE_TYPE (arg1),
9213 real_value_negate (&cst)));
9214
9215 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9216 /* a CMP (-0) -> a CMP 0 */
9217 if (REAL_VALUE_MINUS_ZERO (cst))
9218 return fold_build2_loc (loc, code, type, arg0,
9219 build_real (TREE_TYPE (arg1), dconst0));
9220
9221 /* x != NaN is always true, other ops are always false. */
9222 if (REAL_VALUE_ISNAN (cst)
9223 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9224 {
9225 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9226 return omit_one_operand_loc (loc, type, tem, arg0);
9227 }
9228
9229 /* Fold comparisons against infinity. */
9230 if (REAL_VALUE_ISINF (cst)
9231 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9232 {
9233 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9234 if (tem != NULL_TREE)
9235 return tem;
9236 }
9237 }
9238
9239 /* If this is a comparison of a real constant with a PLUS_EXPR
9240 or a MINUS_EXPR of a real constant, we can convert it into a
9241 comparison with a revised real constant as long as no overflow
9242 occurs when unsafe_math_optimizations are enabled. */
9243 if (flag_unsafe_math_optimizations
9244 && TREE_CODE (arg1) == REAL_CST
9245 && (TREE_CODE (arg0) == PLUS_EXPR
9246 || TREE_CODE (arg0) == MINUS_EXPR)
9247 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9248 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9249 ? MINUS_EXPR : PLUS_EXPR,
9250 arg1, TREE_OPERAND (arg0, 1)))
9251 && !TREE_OVERFLOW (tem))
9252 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9253
9254 /* Likewise, we can simplify a comparison of a real constant with
9255 a MINUS_EXPR whose first operand is also a real constant, i.e.
9256 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9257 floating-point types only if -fassociative-math is set. */
9258 if (flag_associative_math
9259 && TREE_CODE (arg1) == REAL_CST
9260 && TREE_CODE (arg0) == MINUS_EXPR
9261 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9262 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9263 arg1))
9264 && !TREE_OVERFLOW (tem))
9265 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9266 TREE_OPERAND (arg0, 1), tem);
9267
9268 /* Fold comparisons against built-in math functions. */
9269 if (TREE_CODE (arg1) == REAL_CST
9270 && flag_unsafe_math_optimizations
9271 && ! flag_errno_math)
9272 {
9273 enum built_in_function fcode = builtin_mathfn_code (arg0);
9274
9275 if (fcode != END_BUILTINS)
9276 {
9277 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9278 if (tem != NULL_TREE)
9279 return tem;
9280 }
9281 }
9282 }
9283
9284 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9285 && CONVERT_EXPR_P (arg0))
9286 {
9287 /* If we are widening one operand of an integer comparison,
9288 see if the other operand is similarly being widened. Perhaps we
9289 can do the comparison in the narrower type. */
9290 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9291 if (tem)
9292 return tem;
9293
9294 /* Or if we are changing signedness. */
9295 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9296 if (tem)
9297 return tem;
9298 }
9299
9300 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9301 constant, we can simplify it. */
9302 if (TREE_CODE (arg1) == INTEGER_CST
9303 && (TREE_CODE (arg0) == MIN_EXPR
9304 || TREE_CODE (arg0) == MAX_EXPR)
9305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9306 {
9307 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9308 if (tem)
9309 return tem;
9310 }
9311
9312 /* Simplify comparison of something with itself. (For IEEE
9313 floating-point, we can only do some of these simplifications.) */
9314 if (operand_equal_p (arg0, arg1, 0))
9315 {
9316 switch (code)
9317 {
9318 case EQ_EXPR:
9319 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9320 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9321 return constant_boolean_node (1, type);
9322 break;
9323
9324 case GE_EXPR:
9325 case LE_EXPR:
9326 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9327 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9328 return constant_boolean_node (1, type);
9329 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9330
9331 case NE_EXPR:
9332 /* For NE, we can only do this simplification if integer
9333 or we don't honor IEEE floating point NaNs. */
9334 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9335 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9336 break;
9337 /* ... fall through ... */
9338 case GT_EXPR:
9339 case LT_EXPR:
9340 return constant_boolean_node (0, type);
9341 default:
9342 gcc_unreachable ();
9343 }
9344 }
9345
9346 /* If we are comparing an expression that just has comparisons
9347 of two integer values, arithmetic expressions of those comparisons,
9348 and constants, we can simplify it. There are only three cases
9349 to check: the two values can either be equal, the first can be
9350 greater, or the second can be greater. Fold the expression for
9351 those three values. Since each value must be 0 or 1, we have
9352 eight possibilities, each of which corresponds to the constant 0
9353 or 1 or one of the six possible comparisons.
9354
9355 This handles common cases like (a > b) == 0 but also handles
9356 expressions like ((x > y) - (y > x)) > 0, which supposedly
9357 occur in macroized code. */
9358
9359 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9360 {
9361 tree cval1 = 0, cval2 = 0;
9362 int save_p = 0;
9363
9364 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9365 /* Don't handle degenerate cases here; they should already
9366 have been handled anyway. */
9367 && cval1 != 0 && cval2 != 0
9368 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9369 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9370 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9371 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9372 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9373 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9374 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9375 {
9376 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9377 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9378
9379 /* We can't just pass T to eval_subst in case cval1 or cval2
9380 was the same as ARG1. */
9381
9382 tree high_result
9383 = fold_build2_loc (loc, code, type,
9384 eval_subst (loc, arg0, cval1, maxval,
9385 cval2, minval),
9386 arg1);
9387 tree equal_result
9388 = fold_build2_loc (loc, code, type,
9389 eval_subst (loc, arg0, cval1, maxval,
9390 cval2, maxval),
9391 arg1);
9392 tree low_result
9393 = fold_build2_loc (loc, code, type,
9394 eval_subst (loc, arg0, cval1, minval,
9395 cval2, maxval),
9396 arg1);
9397
9398 /* All three of these results should be 0 or 1. Confirm they are.
9399 Then use those values to select the proper code to use. */
9400
9401 if (TREE_CODE (high_result) == INTEGER_CST
9402 && TREE_CODE (equal_result) == INTEGER_CST
9403 && TREE_CODE (low_result) == INTEGER_CST)
9404 {
9405 /* Make a 3-bit mask with the high-order bit being the
9406 value for `>', the next for '=', and the low for '<'. */
9407 switch ((integer_onep (high_result) * 4)
9408 + (integer_onep (equal_result) * 2)
9409 + integer_onep (low_result))
9410 {
9411 case 0:
9412 /* Always false. */
9413 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9414 case 1:
9415 code = LT_EXPR;
9416 break;
9417 case 2:
9418 code = EQ_EXPR;
9419 break;
9420 case 3:
9421 code = LE_EXPR;
9422 break;
9423 case 4:
9424 code = GT_EXPR;
9425 break;
9426 case 5:
9427 code = NE_EXPR;
9428 break;
9429 case 6:
9430 code = GE_EXPR;
9431 break;
9432 case 7:
9433 /* Always true. */
9434 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9435 }
9436
9437 if (save_p)
9438 {
9439 tem = save_expr (build2 (code, type, cval1, cval2));
9440 SET_EXPR_LOCATION (tem, loc);
9441 return tem;
9442 }
9443 return fold_build2_loc (loc, code, type, cval1, cval2);
9444 }
9445 }
9446 }
9447
9448 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9449 into a single range test. */
9450 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9451 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9452 && TREE_CODE (arg1) == INTEGER_CST
9453 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9454 && !integer_zerop (TREE_OPERAND (arg0, 1))
9455 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9456 && !TREE_OVERFLOW (arg1))
9457 {
9458 tem = fold_div_compare (loc, code, type, arg0, arg1);
9459 if (tem != NULL_TREE)
9460 return tem;
9461 }
9462
9463 /* Fold ~X op ~Y as Y op X. */
9464 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9465 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9466 {
9467 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9468 return fold_build2_loc (loc, code, type,
9469 fold_convert_loc (loc, cmp_type,
9470 TREE_OPERAND (arg1, 0)),
9471 TREE_OPERAND (arg0, 0));
9472 }
9473
9474 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9475 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9476 && TREE_CODE (arg1) == INTEGER_CST)
9477 {
9478 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9479 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9480 TREE_OPERAND (arg0, 0),
9481 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9482 fold_convert_loc (loc, cmp_type, arg1)));
9483 }
9484
9485 return NULL_TREE;
9486 }
9487
9488
9489 /* Subroutine of fold_binary. Optimize complex multiplications of the
9490 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9491 argument EXPR represents the expression "z" of type TYPE. */
9492
9493 static tree
9494 fold_mult_zconjz (location_t loc, tree type, tree expr)
9495 {
9496 tree itype = TREE_TYPE (type);
9497 tree rpart, ipart, tem;
9498
9499 if (TREE_CODE (expr) == COMPLEX_EXPR)
9500 {
9501 rpart = TREE_OPERAND (expr, 0);
9502 ipart = TREE_OPERAND (expr, 1);
9503 }
9504 else if (TREE_CODE (expr) == COMPLEX_CST)
9505 {
9506 rpart = TREE_REALPART (expr);
9507 ipart = TREE_IMAGPART (expr);
9508 }
9509 else
9510 {
9511 expr = save_expr (expr);
9512 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9513 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9514 }
9515
9516 rpart = save_expr (rpart);
9517 ipart = save_expr (ipart);
9518 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9519 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9520 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9521 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9522 build_zero_cst (itype));
9523 }
9524
9525
9526 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9527 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9528 guarantees that P and N have the same least significant log2(M) bits.
9529 N is not otherwise constrained. In particular, N is not normalized to
9530 0 <= N < M as is common. In general, the precise value of P is unknown.
9531 M is chosen as large as possible such that constant N can be determined.
9532
9533 Returns M and sets *RESIDUE to N.
9534
9535 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9536 account. This is not always possible due to PR 35705.
9537 */
9538
9539 static unsigned HOST_WIDE_INT
9540 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9541 bool allow_func_align)
9542 {
9543 enum tree_code code;
9544
9545 *residue = 0;
9546
9547 code = TREE_CODE (expr);
9548 if (code == ADDR_EXPR)
9549 {
9550 unsigned int bitalign;
9551 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9552 *residue /= BITS_PER_UNIT;
9553 return bitalign / BITS_PER_UNIT;
9554 }
9555 else if (code == POINTER_PLUS_EXPR)
9556 {
9557 tree op0, op1;
9558 unsigned HOST_WIDE_INT modulus;
9559 enum tree_code inner_code;
9560
9561 op0 = TREE_OPERAND (expr, 0);
9562 STRIP_NOPS (op0);
9563 modulus = get_pointer_modulus_and_residue (op0, residue,
9564 allow_func_align);
9565
9566 op1 = TREE_OPERAND (expr, 1);
9567 STRIP_NOPS (op1);
9568 inner_code = TREE_CODE (op1);
9569 if (inner_code == INTEGER_CST)
9570 {
9571 *residue += TREE_INT_CST_LOW (op1);
9572 return modulus;
9573 }
9574 else if (inner_code == MULT_EXPR)
9575 {
9576 op1 = TREE_OPERAND (op1, 1);
9577 if (TREE_CODE (op1) == INTEGER_CST)
9578 {
9579 unsigned HOST_WIDE_INT align;
9580
9581 /* Compute the greatest power-of-2 divisor of op1. */
9582 align = TREE_INT_CST_LOW (op1);
9583 align &= -align;
9584
9585 /* If align is non-zero and less than *modulus, replace
9586 *modulus with align., If align is 0, then either op1 is 0
9587 or the greatest power-of-2 divisor of op1 doesn't fit in an
9588 unsigned HOST_WIDE_INT. In either case, no additional
9589 constraint is imposed. */
9590 if (align)
9591 modulus = MIN (modulus, align);
9592
9593 return modulus;
9594 }
9595 }
9596 }
9597
9598 /* If we get here, we were unable to determine anything useful about the
9599 expression. */
9600 return 1;
9601 }
9602
9603 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9604 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9605
9606 static bool
9607 vec_cst_ctor_to_array (tree arg, tree *elts)
9608 {
9609 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9610
9611 if (TREE_CODE (arg) == VECTOR_CST)
9612 {
9613 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9614 elts[i] = VECTOR_CST_ELT (arg, i);
9615 }
9616 else if (TREE_CODE (arg) == CONSTRUCTOR)
9617 {
9618 constructor_elt *elt;
9619
9620 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9621 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9622 return false;
9623 else
9624 elts[i] = elt->value;
9625 }
9626 else
9627 return false;
9628 for (; i < nelts; i++)
9629 elts[i]
9630 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9631 return true;
9632 }
9633
9634 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9635 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9636 NULL_TREE otherwise. */
9637
9638 static tree
9639 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9640 {
9641 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9642 tree *elts;
9643 bool need_ctor = false;
9644
9645 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9646 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9647 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9648 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9649 return NULL_TREE;
9650
9651 elts = XALLOCAVEC (tree, nelts * 3);
9652 if (!vec_cst_ctor_to_array (arg0, elts)
9653 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9654 return NULL_TREE;
9655
9656 for (i = 0; i < nelts; i++)
9657 {
9658 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9659 need_ctor = true;
9660 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9661 }
9662
9663 if (need_ctor)
9664 {
9665 vec<constructor_elt, va_gc> *v;
9666 vec_alloc (v, nelts);
9667 for (i = 0; i < nelts; i++)
9668 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9669 return build_constructor (type, v);
9670 }
9671 else
9672 return build_vector (type, &elts[2 * nelts]);
9673 }
9674
9675 /* Try to fold a pointer difference of type TYPE two address expressions of
9676 array references AREF0 and AREF1 using location LOC. Return a
9677 simplified expression for the difference or NULL_TREE. */
9678
9679 static tree
9680 fold_addr_of_array_ref_difference (location_t loc, tree type,
9681 tree aref0, tree aref1)
9682 {
9683 tree base0 = TREE_OPERAND (aref0, 0);
9684 tree base1 = TREE_OPERAND (aref1, 0);
9685 tree base_offset = build_int_cst (type, 0);
9686
9687 /* If the bases are array references as well, recurse. If the bases
9688 are pointer indirections compute the difference of the pointers.
9689 If the bases are equal, we are set. */
9690 if ((TREE_CODE (base0) == ARRAY_REF
9691 && TREE_CODE (base1) == ARRAY_REF
9692 && (base_offset
9693 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9694 || (INDIRECT_REF_P (base0)
9695 && INDIRECT_REF_P (base1)
9696 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9697 TREE_OPERAND (base0, 0),
9698 TREE_OPERAND (base1, 0))))
9699 || operand_equal_p (base0, base1, 0))
9700 {
9701 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9702 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9703 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9704 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9705 return fold_build2_loc (loc, PLUS_EXPR, type,
9706 base_offset,
9707 fold_build2_loc (loc, MULT_EXPR, type,
9708 diff, esz));
9709 }
9710 return NULL_TREE;
9711 }
9712
9713 /* If the real or vector real constant CST of type TYPE has an exact
9714 inverse, return it, else return NULL. */
9715
9716 static tree
9717 exact_inverse (tree type, tree cst)
9718 {
9719 REAL_VALUE_TYPE r;
9720 tree unit_type, *elts;
9721 enum machine_mode mode;
9722 unsigned vec_nelts, i;
9723
9724 switch (TREE_CODE (cst))
9725 {
9726 case REAL_CST:
9727 r = TREE_REAL_CST (cst);
9728
9729 if (exact_real_inverse (TYPE_MODE (type), &r))
9730 return build_real (type, r);
9731
9732 return NULL_TREE;
9733
9734 case VECTOR_CST:
9735 vec_nelts = VECTOR_CST_NELTS (cst);
9736 elts = XALLOCAVEC (tree, vec_nelts);
9737 unit_type = TREE_TYPE (type);
9738 mode = TYPE_MODE (unit_type);
9739
9740 for (i = 0; i < vec_nelts; i++)
9741 {
9742 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9743 if (!exact_real_inverse (mode, &r))
9744 return NULL_TREE;
9745 elts[i] = build_real (unit_type, r);
9746 }
9747
9748 return build_vector (type, elts);
9749
9750 default:
9751 return NULL_TREE;
9752 }
9753 }
9754
9755 /* Fold a binary expression of code CODE and type TYPE with operands
9756 OP0 and OP1. LOC is the location of the resulting expression.
9757 Return the folded expression if folding is successful. Otherwise,
9758 return NULL_TREE. */
9759
9760 tree
9761 fold_binary_loc (location_t loc,
9762 enum tree_code code, tree type, tree op0, tree op1)
9763 {
9764 enum tree_code_class kind = TREE_CODE_CLASS (code);
9765 tree arg0, arg1, tem;
9766 tree t1 = NULL_TREE;
9767 bool strict_overflow_p;
9768
9769 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9770 && TREE_CODE_LENGTH (code) == 2
9771 && op0 != NULL_TREE
9772 && op1 != NULL_TREE);
9773
9774 arg0 = op0;
9775 arg1 = op1;
9776
9777 /* Strip any conversions that don't change the mode. This is
9778 safe for every expression, except for a comparison expression
9779 because its signedness is derived from its operands. So, in
9780 the latter case, only strip conversions that don't change the
9781 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9782 preserved.
9783
9784 Note that this is done as an internal manipulation within the
9785 constant folder, in order to find the simplest representation
9786 of the arguments so that their form can be studied. In any
9787 cases, the appropriate type conversions should be put back in
9788 the tree that will get out of the constant folder. */
9789
9790 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9791 {
9792 STRIP_SIGN_NOPS (arg0);
9793 STRIP_SIGN_NOPS (arg1);
9794 }
9795 else
9796 {
9797 STRIP_NOPS (arg0);
9798 STRIP_NOPS (arg1);
9799 }
9800
9801 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9802 constant but we can't do arithmetic on them. */
9803 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9804 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9805 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9806 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9807 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9808 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9809 {
9810 if (kind == tcc_binary)
9811 {
9812 /* Make sure type and arg0 have the same saturating flag. */
9813 gcc_assert (TYPE_SATURATING (type)
9814 == TYPE_SATURATING (TREE_TYPE (arg0)));
9815 tem = const_binop (code, arg0, arg1);
9816 }
9817 else if (kind == tcc_comparison)
9818 tem = fold_relational_const (code, type, arg0, arg1);
9819 else
9820 tem = NULL_TREE;
9821
9822 if (tem != NULL_TREE)
9823 {
9824 if (TREE_TYPE (tem) != type)
9825 tem = fold_convert_loc (loc, type, tem);
9826 return tem;
9827 }
9828 }
9829
9830 /* If this is a commutative operation, and ARG0 is a constant, move it
9831 to ARG1 to reduce the number of tests below. */
9832 if (commutative_tree_code (code)
9833 && tree_swap_operands_p (arg0, arg1, true))
9834 return fold_build2_loc (loc, code, type, op1, op0);
9835
9836 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9837
9838 First check for cases where an arithmetic operation is applied to a
9839 compound, conditional, or comparison operation. Push the arithmetic
9840 operation inside the compound or conditional to see if any folding
9841 can then be done. Convert comparison to conditional for this purpose.
9842 The also optimizes non-constant cases that used to be done in
9843 expand_expr.
9844
9845 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9846 one of the operands is a comparison and the other is a comparison, a
9847 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9848 code below would make the expression more complex. Change it to a
9849 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9850 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9851
9852 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9853 || code == EQ_EXPR || code == NE_EXPR)
9854 && TREE_CODE (type) != VECTOR_TYPE
9855 && ((truth_value_p (TREE_CODE (arg0))
9856 && (truth_value_p (TREE_CODE (arg1))
9857 || (TREE_CODE (arg1) == BIT_AND_EXPR
9858 && integer_onep (TREE_OPERAND (arg1, 1)))))
9859 || (truth_value_p (TREE_CODE (arg1))
9860 && (truth_value_p (TREE_CODE (arg0))
9861 || (TREE_CODE (arg0) == BIT_AND_EXPR
9862 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9863 {
9864 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9865 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9866 : TRUTH_XOR_EXPR,
9867 boolean_type_node,
9868 fold_convert_loc (loc, boolean_type_node, arg0),
9869 fold_convert_loc (loc, boolean_type_node, arg1));
9870
9871 if (code == EQ_EXPR)
9872 tem = invert_truthvalue_loc (loc, tem);
9873
9874 return fold_convert_loc (loc, type, tem);
9875 }
9876
9877 if (TREE_CODE_CLASS (code) == tcc_binary
9878 || TREE_CODE_CLASS (code) == tcc_comparison)
9879 {
9880 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9881 {
9882 tem = fold_build2_loc (loc, code, type,
9883 fold_convert_loc (loc, TREE_TYPE (op0),
9884 TREE_OPERAND (arg0, 1)), op1);
9885 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9886 tem);
9887 }
9888 if (TREE_CODE (arg1) == COMPOUND_EXPR
9889 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9890 {
9891 tem = fold_build2_loc (loc, code, type, op0,
9892 fold_convert_loc (loc, TREE_TYPE (op1),
9893 TREE_OPERAND (arg1, 1)));
9894 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9895 tem);
9896 }
9897
9898 if (TREE_CODE (arg0) == COND_EXPR
9899 || TREE_CODE (arg0) == VEC_COND_EXPR
9900 || COMPARISON_CLASS_P (arg0))
9901 {
9902 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9903 arg0, arg1,
9904 /*cond_first_p=*/1);
9905 if (tem != NULL_TREE)
9906 return tem;
9907 }
9908
9909 if (TREE_CODE (arg1) == COND_EXPR
9910 || TREE_CODE (arg1) == VEC_COND_EXPR
9911 || COMPARISON_CLASS_P (arg1))
9912 {
9913 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9914 arg1, arg0,
9915 /*cond_first_p=*/0);
9916 if (tem != NULL_TREE)
9917 return tem;
9918 }
9919 }
9920
9921 switch (code)
9922 {
9923 case MEM_REF:
9924 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9925 if (TREE_CODE (arg0) == ADDR_EXPR
9926 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9927 {
9928 tree iref = TREE_OPERAND (arg0, 0);
9929 return fold_build2 (MEM_REF, type,
9930 TREE_OPERAND (iref, 0),
9931 int_const_binop (PLUS_EXPR, arg1,
9932 TREE_OPERAND (iref, 1)));
9933 }
9934
9935 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9936 if (TREE_CODE (arg0) == ADDR_EXPR
9937 && handled_component_p (TREE_OPERAND (arg0, 0)))
9938 {
9939 tree base;
9940 HOST_WIDE_INT coffset;
9941 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9942 &coffset);
9943 if (!base)
9944 return NULL_TREE;
9945 return fold_build2 (MEM_REF, type,
9946 build_fold_addr_expr (base),
9947 int_const_binop (PLUS_EXPR, arg1,
9948 size_int (coffset)));
9949 }
9950
9951 return NULL_TREE;
9952
9953 case POINTER_PLUS_EXPR:
9954 /* 0 +p index -> (type)index */
9955 if (integer_zerop (arg0))
9956 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9957
9958 /* PTR +p 0 -> PTR */
9959 if (integer_zerop (arg1))
9960 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9961
9962 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9963 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9964 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9965 return fold_convert_loc (loc, type,
9966 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9967 fold_convert_loc (loc, sizetype,
9968 arg1),
9969 fold_convert_loc (loc, sizetype,
9970 arg0)));
9971
9972 /* (PTR +p B) +p A -> PTR +p (B + A) */
9973 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9974 {
9975 tree inner;
9976 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9977 tree arg00 = TREE_OPERAND (arg0, 0);
9978 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9979 arg01, fold_convert_loc (loc, sizetype, arg1));
9980 return fold_convert_loc (loc, type,
9981 fold_build_pointer_plus_loc (loc,
9982 arg00, inner));
9983 }
9984
9985 /* PTR_CST +p CST -> CST1 */
9986 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9987 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9988 fold_convert_loc (loc, type, arg1));
9989
9990 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9991 of the array. Loop optimizer sometimes produce this type of
9992 expressions. */
9993 if (TREE_CODE (arg0) == ADDR_EXPR)
9994 {
9995 tem = try_move_mult_to_index (loc, arg0,
9996 fold_convert_loc (loc,
9997 ssizetype, arg1));
9998 if (tem)
9999 return fold_convert_loc (loc, type, tem);
10000 }
10001
10002 return NULL_TREE;
10003
10004 case PLUS_EXPR:
10005 /* A + (-B) -> A - B */
10006 if (TREE_CODE (arg1) == NEGATE_EXPR)
10007 return fold_build2_loc (loc, MINUS_EXPR, type,
10008 fold_convert_loc (loc, type, arg0),
10009 fold_convert_loc (loc, type,
10010 TREE_OPERAND (arg1, 0)));
10011 /* (-A) + B -> B - A */
10012 if (TREE_CODE (arg0) == NEGATE_EXPR
10013 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10014 return fold_build2_loc (loc, MINUS_EXPR, type,
10015 fold_convert_loc (loc, type, arg1),
10016 fold_convert_loc (loc, type,
10017 TREE_OPERAND (arg0, 0)));
10018
10019 if (INTEGRAL_TYPE_P (type))
10020 {
10021 /* Convert ~A + 1 to -A. */
10022 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10023 && integer_onep (arg1))
10024 return fold_build1_loc (loc, NEGATE_EXPR, type,
10025 fold_convert_loc (loc, type,
10026 TREE_OPERAND (arg0, 0)));
10027
10028 /* ~X + X is -1. */
10029 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10030 && !TYPE_OVERFLOW_TRAPS (type))
10031 {
10032 tree tem = TREE_OPERAND (arg0, 0);
10033
10034 STRIP_NOPS (tem);
10035 if (operand_equal_p (tem, arg1, 0))
10036 {
10037 t1 = build_int_cst_type (type, -1);
10038 return omit_one_operand_loc (loc, type, t1, arg1);
10039 }
10040 }
10041
10042 /* X + ~X is -1. */
10043 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10044 && !TYPE_OVERFLOW_TRAPS (type))
10045 {
10046 tree tem = TREE_OPERAND (arg1, 0);
10047
10048 STRIP_NOPS (tem);
10049 if (operand_equal_p (arg0, tem, 0))
10050 {
10051 t1 = build_int_cst_type (type, -1);
10052 return omit_one_operand_loc (loc, type, t1, arg0);
10053 }
10054 }
10055
10056 /* X + (X / CST) * -CST is X % CST. */
10057 if (TREE_CODE (arg1) == MULT_EXPR
10058 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10059 && operand_equal_p (arg0,
10060 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10061 {
10062 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10063 tree cst1 = TREE_OPERAND (arg1, 1);
10064 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10065 cst1, cst0);
10066 if (sum && integer_zerop (sum))
10067 return fold_convert_loc (loc, type,
10068 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10069 TREE_TYPE (arg0), arg0,
10070 cst0));
10071 }
10072 }
10073
10074 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10075 one. Make sure the type is not saturating and has the signedness of
10076 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10077 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10078 if ((TREE_CODE (arg0) == MULT_EXPR
10079 || TREE_CODE (arg1) == MULT_EXPR)
10080 && !TYPE_SATURATING (type)
10081 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10082 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10083 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10084 {
10085 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10086 if (tem)
10087 return tem;
10088 }
10089
10090 if (! FLOAT_TYPE_P (type))
10091 {
10092 if (integer_zerop (arg1))
10093 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10094
10095 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10096 with a constant, and the two constants have no bits in common,
10097 we should treat this as a BIT_IOR_EXPR since this may produce more
10098 simplifications. */
10099 if (TREE_CODE (arg0) == BIT_AND_EXPR
10100 && TREE_CODE (arg1) == BIT_AND_EXPR
10101 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10102 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10103 && integer_zerop (const_binop (BIT_AND_EXPR,
10104 TREE_OPERAND (arg0, 1),
10105 TREE_OPERAND (arg1, 1))))
10106 {
10107 code = BIT_IOR_EXPR;
10108 goto bit_ior;
10109 }
10110
10111 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10112 (plus (plus (mult) (mult)) (foo)) so that we can
10113 take advantage of the factoring cases below. */
10114 if (TYPE_OVERFLOW_WRAPS (type)
10115 && (((TREE_CODE (arg0) == PLUS_EXPR
10116 || TREE_CODE (arg0) == MINUS_EXPR)
10117 && TREE_CODE (arg1) == MULT_EXPR)
10118 || ((TREE_CODE (arg1) == PLUS_EXPR
10119 || TREE_CODE (arg1) == MINUS_EXPR)
10120 && TREE_CODE (arg0) == MULT_EXPR)))
10121 {
10122 tree parg0, parg1, parg, marg;
10123 enum tree_code pcode;
10124
10125 if (TREE_CODE (arg1) == MULT_EXPR)
10126 parg = arg0, marg = arg1;
10127 else
10128 parg = arg1, marg = arg0;
10129 pcode = TREE_CODE (parg);
10130 parg0 = TREE_OPERAND (parg, 0);
10131 parg1 = TREE_OPERAND (parg, 1);
10132 STRIP_NOPS (parg0);
10133 STRIP_NOPS (parg1);
10134
10135 if (TREE_CODE (parg0) == MULT_EXPR
10136 && TREE_CODE (parg1) != MULT_EXPR)
10137 return fold_build2_loc (loc, pcode, type,
10138 fold_build2_loc (loc, PLUS_EXPR, type,
10139 fold_convert_loc (loc, type,
10140 parg0),
10141 fold_convert_loc (loc, type,
10142 marg)),
10143 fold_convert_loc (loc, type, parg1));
10144 if (TREE_CODE (parg0) != MULT_EXPR
10145 && TREE_CODE (parg1) == MULT_EXPR)
10146 return
10147 fold_build2_loc (loc, PLUS_EXPR, type,
10148 fold_convert_loc (loc, type, parg0),
10149 fold_build2_loc (loc, pcode, type,
10150 fold_convert_loc (loc, type, marg),
10151 fold_convert_loc (loc, type,
10152 parg1)));
10153 }
10154 }
10155 else
10156 {
10157 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10158 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10159 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10160
10161 /* Likewise if the operands are reversed. */
10162 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10163 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10164
10165 /* Convert X + -C into X - C. */
10166 if (TREE_CODE (arg1) == REAL_CST
10167 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10168 {
10169 tem = fold_negate_const (arg1, type);
10170 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10171 return fold_build2_loc (loc, MINUS_EXPR, type,
10172 fold_convert_loc (loc, type, arg0),
10173 fold_convert_loc (loc, type, tem));
10174 }
10175
10176 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10177 to __complex__ ( x, y ). This is not the same for SNaNs or
10178 if signed zeros are involved. */
10179 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10180 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10181 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10182 {
10183 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10184 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10185 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10186 bool arg0rz = false, arg0iz = false;
10187 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10188 || (arg0i && (arg0iz = real_zerop (arg0i))))
10189 {
10190 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10191 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10192 if (arg0rz && arg1i && real_zerop (arg1i))
10193 {
10194 tree rp = arg1r ? arg1r
10195 : build1 (REALPART_EXPR, rtype, arg1);
10196 tree ip = arg0i ? arg0i
10197 : build1 (IMAGPART_EXPR, rtype, arg0);
10198 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10199 }
10200 else if (arg0iz && arg1r && real_zerop (arg1r))
10201 {
10202 tree rp = arg0r ? arg0r
10203 : build1 (REALPART_EXPR, rtype, arg0);
10204 tree ip = arg1i ? arg1i
10205 : build1 (IMAGPART_EXPR, rtype, arg1);
10206 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10207 }
10208 }
10209 }
10210
10211 if (flag_unsafe_math_optimizations
10212 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10213 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10214 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10215 return tem;
10216
10217 /* Convert x+x into x*2.0. */
10218 if (operand_equal_p (arg0, arg1, 0)
10219 && SCALAR_FLOAT_TYPE_P (type))
10220 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10221 build_real (type, dconst2));
10222
10223 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10224 We associate floats only if the user has specified
10225 -fassociative-math. */
10226 if (flag_associative_math
10227 && TREE_CODE (arg1) == PLUS_EXPR
10228 && TREE_CODE (arg0) != MULT_EXPR)
10229 {
10230 tree tree10 = TREE_OPERAND (arg1, 0);
10231 tree tree11 = TREE_OPERAND (arg1, 1);
10232 if (TREE_CODE (tree11) == MULT_EXPR
10233 && TREE_CODE (tree10) == MULT_EXPR)
10234 {
10235 tree tree0;
10236 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10237 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10238 }
10239 }
10240 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10241 We associate floats only if the user has specified
10242 -fassociative-math. */
10243 if (flag_associative_math
10244 && TREE_CODE (arg0) == PLUS_EXPR
10245 && TREE_CODE (arg1) != MULT_EXPR)
10246 {
10247 tree tree00 = TREE_OPERAND (arg0, 0);
10248 tree tree01 = TREE_OPERAND (arg0, 1);
10249 if (TREE_CODE (tree01) == MULT_EXPR
10250 && TREE_CODE (tree00) == MULT_EXPR)
10251 {
10252 tree tree0;
10253 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10254 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10255 }
10256 }
10257 }
10258
10259 bit_rotate:
10260 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10261 is a rotate of A by C1 bits. */
10262 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10263 is a rotate of A by B bits. */
10264 {
10265 enum tree_code code0, code1;
10266 tree rtype;
10267 code0 = TREE_CODE (arg0);
10268 code1 = TREE_CODE (arg1);
10269 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10270 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10271 && operand_equal_p (TREE_OPERAND (arg0, 0),
10272 TREE_OPERAND (arg1, 0), 0)
10273 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10274 TYPE_UNSIGNED (rtype))
10275 /* Only create rotates in complete modes. Other cases are not
10276 expanded properly. */
10277 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10278 {
10279 tree tree01, tree11;
10280 enum tree_code code01, code11;
10281
10282 tree01 = TREE_OPERAND (arg0, 1);
10283 tree11 = TREE_OPERAND (arg1, 1);
10284 STRIP_NOPS (tree01);
10285 STRIP_NOPS (tree11);
10286 code01 = TREE_CODE (tree01);
10287 code11 = TREE_CODE (tree11);
10288 if (code01 == INTEGER_CST
10289 && code11 == INTEGER_CST
10290 && TREE_INT_CST_HIGH (tree01) == 0
10291 && TREE_INT_CST_HIGH (tree11) == 0
10292 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10293 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10294 {
10295 tem = build2_loc (loc, LROTATE_EXPR,
10296 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10297 TREE_OPERAND (arg0, 0),
10298 code0 == LSHIFT_EXPR ? tree01 : tree11);
10299 return fold_convert_loc (loc, type, tem);
10300 }
10301 else if (code11 == MINUS_EXPR)
10302 {
10303 tree tree110, tree111;
10304 tree110 = TREE_OPERAND (tree11, 0);
10305 tree111 = TREE_OPERAND (tree11, 1);
10306 STRIP_NOPS (tree110);
10307 STRIP_NOPS (tree111);
10308 if (TREE_CODE (tree110) == INTEGER_CST
10309 && 0 == compare_tree_int (tree110,
10310 TYPE_PRECISION
10311 (TREE_TYPE (TREE_OPERAND
10312 (arg0, 0))))
10313 && operand_equal_p (tree01, tree111, 0))
10314 return
10315 fold_convert_loc (loc, type,
10316 build2 ((code0 == LSHIFT_EXPR
10317 ? LROTATE_EXPR
10318 : RROTATE_EXPR),
10319 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10320 TREE_OPERAND (arg0, 0), tree01));
10321 }
10322 else if (code01 == MINUS_EXPR)
10323 {
10324 tree tree010, tree011;
10325 tree010 = TREE_OPERAND (tree01, 0);
10326 tree011 = TREE_OPERAND (tree01, 1);
10327 STRIP_NOPS (tree010);
10328 STRIP_NOPS (tree011);
10329 if (TREE_CODE (tree010) == INTEGER_CST
10330 && 0 == compare_tree_int (tree010,
10331 TYPE_PRECISION
10332 (TREE_TYPE (TREE_OPERAND
10333 (arg0, 0))))
10334 && operand_equal_p (tree11, tree011, 0))
10335 return fold_convert_loc
10336 (loc, type,
10337 build2 ((code0 != LSHIFT_EXPR
10338 ? LROTATE_EXPR
10339 : RROTATE_EXPR),
10340 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10341 TREE_OPERAND (arg0, 0), tree11));
10342 }
10343 }
10344 }
10345
10346 associate:
10347 /* In most languages, can't associate operations on floats through
10348 parentheses. Rather than remember where the parentheses were, we
10349 don't associate floats at all, unless the user has specified
10350 -fassociative-math.
10351 And, we need to make sure type is not saturating. */
10352
10353 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10354 && !TYPE_SATURATING (type))
10355 {
10356 tree var0, con0, lit0, minus_lit0;
10357 tree var1, con1, lit1, minus_lit1;
10358 tree atype = type;
10359 bool ok = true;
10360
10361 /* Split both trees into variables, constants, and literals. Then
10362 associate each group together, the constants with literals,
10363 then the result with variables. This increases the chances of
10364 literals being recombined later and of generating relocatable
10365 expressions for the sum of a constant and literal. */
10366 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10367 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10368 code == MINUS_EXPR);
10369
10370 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10371 if (code == MINUS_EXPR)
10372 code = PLUS_EXPR;
10373
10374 /* With undefined overflow prefer doing association in a type
10375 which wraps on overflow, if that is one of the operand types. */
10376 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10377 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10378 {
10379 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10380 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10381 atype = TREE_TYPE (arg0);
10382 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10383 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10384 atype = TREE_TYPE (arg1);
10385 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10386 }
10387
10388 /* With undefined overflow we can only associate constants with one
10389 variable, and constants whose association doesn't overflow. */
10390 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10391 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10392 {
10393 if (var0 && var1)
10394 {
10395 tree tmp0 = var0;
10396 tree tmp1 = var1;
10397
10398 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10399 tmp0 = TREE_OPERAND (tmp0, 0);
10400 if (CONVERT_EXPR_P (tmp0)
10401 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10402 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10403 <= TYPE_PRECISION (atype)))
10404 tmp0 = TREE_OPERAND (tmp0, 0);
10405 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10406 tmp1 = TREE_OPERAND (tmp1, 0);
10407 if (CONVERT_EXPR_P (tmp1)
10408 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10409 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10410 <= TYPE_PRECISION (atype)))
10411 tmp1 = TREE_OPERAND (tmp1, 0);
10412 /* The only case we can still associate with two variables
10413 is if they are the same, modulo negation and bit-pattern
10414 preserving conversions. */
10415 if (!operand_equal_p (tmp0, tmp1, 0))
10416 ok = false;
10417 }
10418 }
10419
10420 /* Only do something if we found more than two objects. Otherwise,
10421 nothing has changed and we risk infinite recursion. */
10422 if (ok
10423 && (2 < ((var0 != 0) + (var1 != 0)
10424 + (con0 != 0) + (con1 != 0)
10425 + (lit0 != 0) + (lit1 != 0)
10426 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10427 {
10428 bool any_overflows = false;
10429 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10430 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10431 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10432 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10433 var0 = associate_trees (loc, var0, var1, code, atype);
10434 con0 = associate_trees (loc, con0, con1, code, atype);
10435 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10436 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10437 code, atype);
10438
10439 /* Preserve the MINUS_EXPR if the negative part of the literal is
10440 greater than the positive part. Otherwise, the multiplicative
10441 folding code (i.e extract_muldiv) may be fooled in case
10442 unsigned constants are subtracted, like in the following
10443 example: ((X*2 + 4) - 8U)/2. */
10444 if (minus_lit0 && lit0)
10445 {
10446 if (TREE_CODE (lit0) == INTEGER_CST
10447 && TREE_CODE (minus_lit0) == INTEGER_CST
10448 && tree_int_cst_lt (lit0, minus_lit0))
10449 {
10450 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10451 MINUS_EXPR, atype);
10452 lit0 = 0;
10453 }
10454 else
10455 {
10456 lit0 = associate_trees (loc, lit0, minus_lit0,
10457 MINUS_EXPR, atype);
10458 minus_lit0 = 0;
10459 }
10460 }
10461
10462 /* Don't introduce overflows through reassociation. */
10463 if (!any_overflows
10464 && ((lit0 && TREE_OVERFLOW (lit0))
10465 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10466 return NULL_TREE;
10467
10468 if (minus_lit0)
10469 {
10470 if (con0 == 0)
10471 return
10472 fold_convert_loc (loc, type,
10473 associate_trees (loc, var0, minus_lit0,
10474 MINUS_EXPR, atype));
10475 else
10476 {
10477 con0 = associate_trees (loc, con0, minus_lit0,
10478 MINUS_EXPR, atype);
10479 return
10480 fold_convert_loc (loc, type,
10481 associate_trees (loc, var0, con0,
10482 PLUS_EXPR, atype));
10483 }
10484 }
10485
10486 con0 = associate_trees (loc, con0, lit0, code, atype);
10487 return
10488 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10489 code, atype));
10490 }
10491 }
10492
10493 return NULL_TREE;
10494
10495 case MINUS_EXPR:
10496 /* Pointer simplifications for subtraction, simple reassociations. */
10497 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10498 {
10499 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10500 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10501 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10502 {
10503 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10504 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10505 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10506 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10507 return fold_build2_loc (loc, PLUS_EXPR, type,
10508 fold_build2_loc (loc, MINUS_EXPR, type,
10509 arg00, arg10),
10510 fold_build2_loc (loc, MINUS_EXPR, type,
10511 arg01, arg11));
10512 }
10513 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10514 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10515 {
10516 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10517 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10518 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10519 fold_convert_loc (loc, type, arg1));
10520 if (tmp)
10521 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10522 }
10523 }
10524 /* A - (-B) -> A + B */
10525 if (TREE_CODE (arg1) == NEGATE_EXPR)
10526 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10527 fold_convert_loc (loc, type,
10528 TREE_OPERAND (arg1, 0)));
10529 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10530 if (TREE_CODE (arg0) == NEGATE_EXPR
10531 && (FLOAT_TYPE_P (type)
10532 || INTEGRAL_TYPE_P (type))
10533 && negate_expr_p (arg1)
10534 && reorder_operands_p (arg0, arg1))
10535 return fold_build2_loc (loc, MINUS_EXPR, type,
10536 fold_convert_loc (loc, type,
10537 negate_expr (arg1)),
10538 fold_convert_loc (loc, type,
10539 TREE_OPERAND (arg0, 0)));
10540 /* Convert -A - 1 to ~A. */
10541 if (INTEGRAL_TYPE_P (type)
10542 && TREE_CODE (arg0) == NEGATE_EXPR
10543 && integer_onep (arg1)
10544 && !TYPE_OVERFLOW_TRAPS (type))
10545 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10546 fold_convert_loc (loc, type,
10547 TREE_OPERAND (arg0, 0)));
10548
10549 /* Convert -1 - A to ~A. */
10550 if (INTEGRAL_TYPE_P (type)
10551 && integer_all_onesp (arg0))
10552 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10553
10554
10555 /* X - (X / CST) * CST is X % CST. */
10556 if (INTEGRAL_TYPE_P (type)
10557 && TREE_CODE (arg1) == MULT_EXPR
10558 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10559 && operand_equal_p (arg0,
10560 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10561 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10562 TREE_OPERAND (arg1, 1), 0))
10563 return
10564 fold_convert_loc (loc, type,
10565 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10566 arg0, TREE_OPERAND (arg1, 1)));
10567
10568 if (! FLOAT_TYPE_P (type))
10569 {
10570 if (integer_zerop (arg0))
10571 return negate_expr (fold_convert_loc (loc, type, arg1));
10572 if (integer_zerop (arg1))
10573 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10574
10575 /* Fold A - (A & B) into ~B & A. */
10576 if (!TREE_SIDE_EFFECTS (arg0)
10577 && TREE_CODE (arg1) == BIT_AND_EXPR)
10578 {
10579 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10580 {
10581 tree arg10 = fold_convert_loc (loc, type,
10582 TREE_OPERAND (arg1, 0));
10583 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10584 fold_build1_loc (loc, BIT_NOT_EXPR,
10585 type, arg10),
10586 fold_convert_loc (loc, type, arg0));
10587 }
10588 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10589 {
10590 tree arg11 = fold_convert_loc (loc,
10591 type, TREE_OPERAND (arg1, 1));
10592 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10593 fold_build1_loc (loc, BIT_NOT_EXPR,
10594 type, arg11),
10595 fold_convert_loc (loc, type, arg0));
10596 }
10597 }
10598
10599 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10600 any power of 2 minus 1. */
10601 if (TREE_CODE (arg0) == BIT_AND_EXPR
10602 && TREE_CODE (arg1) == BIT_AND_EXPR
10603 && operand_equal_p (TREE_OPERAND (arg0, 0),
10604 TREE_OPERAND (arg1, 0), 0))
10605 {
10606 tree mask0 = TREE_OPERAND (arg0, 1);
10607 tree mask1 = TREE_OPERAND (arg1, 1);
10608 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10609
10610 if (operand_equal_p (tem, mask1, 0))
10611 {
10612 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10613 TREE_OPERAND (arg0, 0), mask1);
10614 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10615 }
10616 }
10617 }
10618
10619 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10620 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10621 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10622
10623 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10624 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10625 (-ARG1 + ARG0) reduces to -ARG1. */
10626 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10627 return negate_expr (fold_convert_loc (loc, type, arg1));
10628
10629 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10630 __complex__ ( x, -y ). This is not the same for SNaNs or if
10631 signed zeros are involved. */
10632 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10633 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10634 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10635 {
10636 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10637 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10638 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10639 bool arg0rz = false, arg0iz = false;
10640 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10641 || (arg0i && (arg0iz = real_zerop (arg0i))))
10642 {
10643 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10644 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10645 if (arg0rz && arg1i && real_zerop (arg1i))
10646 {
10647 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10648 arg1r ? arg1r
10649 : build1 (REALPART_EXPR, rtype, arg1));
10650 tree ip = arg0i ? arg0i
10651 : build1 (IMAGPART_EXPR, rtype, arg0);
10652 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10653 }
10654 else if (arg0iz && arg1r && real_zerop (arg1r))
10655 {
10656 tree rp = arg0r ? arg0r
10657 : build1 (REALPART_EXPR, rtype, arg0);
10658 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10659 arg1i ? arg1i
10660 : build1 (IMAGPART_EXPR, rtype, arg1));
10661 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10662 }
10663 }
10664 }
10665
10666 /* Fold &x - &x. This can happen from &x.foo - &x.
10667 This is unsafe for certain floats even in non-IEEE formats.
10668 In IEEE, it is unsafe because it does wrong for NaNs.
10669 Also note that operand_equal_p is always false if an operand
10670 is volatile. */
10671
10672 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10673 && operand_equal_p (arg0, arg1, 0))
10674 return build_zero_cst (type);
10675
10676 /* A - B -> A + (-B) if B is easily negatable. */
10677 if (negate_expr_p (arg1)
10678 && ((FLOAT_TYPE_P (type)
10679 /* Avoid this transformation if B is a positive REAL_CST. */
10680 && (TREE_CODE (arg1) != REAL_CST
10681 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10682 || INTEGRAL_TYPE_P (type)))
10683 return fold_build2_loc (loc, PLUS_EXPR, type,
10684 fold_convert_loc (loc, type, arg0),
10685 fold_convert_loc (loc, type,
10686 negate_expr (arg1)));
10687
10688 /* Try folding difference of addresses. */
10689 {
10690 HOST_WIDE_INT diff;
10691
10692 if ((TREE_CODE (arg0) == ADDR_EXPR
10693 || TREE_CODE (arg1) == ADDR_EXPR)
10694 && ptr_difference_const (arg0, arg1, &diff))
10695 return build_int_cst_type (type, diff);
10696 }
10697
10698 /* Fold &a[i] - &a[j] to i-j. */
10699 if (TREE_CODE (arg0) == ADDR_EXPR
10700 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10701 && TREE_CODE (arg1) == ADDR_EXPR
10702 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10703 {
10704 tree tem = fold_addr_of_array_ref_difference (loc, type,
10705 TREE_OPERAND (arg0, 0),
10706 TREE_OPERAND (arg1, 0));
10707 if (tem)
10708 return tem;
10709 }
10710
10711 if (FLOAT_TYPE_P (type)
10712 && flag_unsafe_math_optimizations
10713 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10714 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10715 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10716 return tem;
10717
10718 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10719 one. Make sure the type is not saturating and has the signedness of
10720 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10721 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10722 if ((TREE_CODE (arg0) == MULT_EXPR
10723 || TREE_CODE (arg1) == MULT_EXPR)
10724 && !TYPE_SATURATING (type)
10725 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10726 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10727 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10728 {
10729 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10730 if (tem)
10731 return tem;
10732 }
10733
10734 goto associate;
10735
10736 case MULT_EXPR:
10737 /* (-A) * (-B) -> A * B */
10738 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10739 return fold_build2_loc (loc, MULT_EXPR, type,
10740 fold_convert_loc (loc, type,
10741 TREE_OPERAND (arg0, 0)),
10742 fold_convert_loc (loc, type,
10743 negate_expr (arg1)));
10744 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10745 return fold_build2_loc (loc, MULT_EXPR, type,
10746 fold_convert_loc (loc, type,
10747 negate_expr (arg0)),
10748 fold_convert_loc (loc, type,
10749 TREE_OPERAND (arg1, 0)));
10750
10751 if (! FLOAT_TYPE_P (type))
10752 {
10753 if (integer_zerop (arg1))
10754 return omit_one_operand_loc (loc, type, arg1, arg0);
10755 if (integer_onep (arg1))
10756 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10757 /* Transform x * -1 into -x. Make sure to do the negation
10758 on the original operand with conversions not stripped
10759 because we can only strip non-sign-changing conversions. */
10760 if (integer_all_onesp (arg1))
10761 return fold_convert_loc (loc, type, negate_expr (op0));
10762 /* Transform x * -C into -x * C if x is easily negatable. */
10763 if (TREE_CODE (arg1) == INTEGER_CST
10764 && tree_int_cst_sgn (arg1) == -1
10765 && negate_expr_p (arg0)
10766 && (tem = negate_expr (arg1)) != arg1
10767 && !TREE_OVERFLOW (tem))
10768 return fold_build2_loc (loc, MULT_EXPR, type,
10769 fold_convert_loc (loc, type,
10770 negate_expr (arg0)),
10771 tem);
10772
10773 /* (a * (1 << b)) is (a << b) */
10774 if (TREE_CODE (arg1) == LSHIFT_EXPR
10775 && integer_onep (TREE_OPERAND (arg1, 0)))
10776 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10777 TREE_OPERAND (arg1, 1));
10778 if (TREE_CODE (arg0) == LSHIFT_EXPR
10779 && integer_onep (TREE_OPERAND (arg0, 0)))
10780 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10781 TREE_OPERAND (arg0, 1));
10782
10783 /* (A + A) * C -> A * 2 * C */
10784 if (TREE_CODE (arg0) == PLUS_EXPR
10785 && TREE_CODE (arg1) == INTEGER_CST
10786 && operand_equal_p (TREE_OPERAND (arg0, 0),
10787 TREE_OPERAND (arg0, 1), 0))
10788 return fold_build2_loc (loc, MULT_EXPR, type,
10789 omit_one_operand_loc (loc, type,
10790 TREE_OPERAND (arg0, 0),
10791 TREE_OPERAND (arg0, 1)),
10792 fold_build2_loc (loc, MULT_EXPR, type,
10793 build_int_cst (type, 2) , arg1));
10794
10795 strict_overflow_p = false;
10796 if (TREE_CODE (arg1) == INTEGER_CST
10797 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10798 &strict_overflow_p)))
10799 {
10800 if (strict_overflow_p)
10801 fold_overflow_warning (("assuming signed overflow does not "
10802 "occur when simplifying "
10803 "multiplication"),
10804 WARN_STRICT_OVERFLOW_MISC);
10805 return fold_convert_loc (loc, type, tem);
10806 }
10807
10808 /* Optimize z * conj(z) for integer complex numbers. */
10809 if (TREE_CODE (arg0) == CONJ_EXPR
10810 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10811 return fold_mult_zconjz (loc, type, arg1);
10812 if (TREE_CODE (arg1) == CONJ_EXPR
10813 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10814 return fold_mult_zconjz (loc, type, arg0);
10815 }
10816 else
10817 {
10818 /* Maybe fold x * 0 to 0. The expressions aren't the same
10819 when x is NaN, since x * 0 is also NaN. Nor are they the
10820 same in modes with signed zeros, since multiplying a
10821 negative value by 0 gives -0, not +0. */
10822 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10823 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10824 && real_zerop (arg1))
10825 return omit_one_operand_loc (loc, type, arg1, arg0);
10826 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10827 Likewise for complex arithmetic with signed zeros. */
10828 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10829 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10830 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10831 && real_onep (arg1))
10832 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10833
10834 /* Transform x * -1.0 into -x. */
10835 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10836 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10837 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10838 && real_minus_onep (arg1))
10839 return fold_convert_loc (loc, type, negate_expr (arg0));
10840
10841 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10842 the result for floating point types due to rounding so it is applied
10843 only if -fassociative-math was specify. */
10844 if (flag_associative_math
10845 && TREE_CODE (arg0) == RDIV_EXPR
10846 && TREE_CODE (arg1) == REAL_CST
10847 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10848 {
10849 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10850 arg1);
10851 if (tem)
10852 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10853 TREE_OPERAND (arg0, 1));
10854 }
10855
10856 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10857 if (operand_equal_p (arg0, arg1, 0))
10858 {
10859 tree tem = fold_strip_sign_ops (arg0);
10860 if (tem != NULL_TREE)
10861 {
10862 tem = fold_convert_loc (loc, type, tem);
10863 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10864 }
10865 }
10866
10867 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10868 This is not the same for NaNs or if signed zeros are
10869 involved. */
10870 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10871 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10872 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10873 && TREE_CODE (arg1) == COMPLEX_CST
10874 && real_zerop (TREE_REALPART (arg1)))
10875 {
10876 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10877 if (real_onep (TREE_IMAGPART (arg1)))
10878 return
10879 fold_build2_loc (loc, COMPLEX_EXPR, type,
10880 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10881 rtype, arg0)),
10882 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10883 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10884 return
10885 fold_build2_loc (loc, COMPLEX_EXPR, type,
10886 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10887 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10888 rtype, arg0)));
10889 }
10890
10891 /* Optimize z * conj(z) for floating point complex numbers.
10892 Guarded by flag_unsafe_math_optimizations as non-finite
10893 imaginary components don't produce scalar results. */
10894 if (flag_unsafe_math_optimizations
10895 && TREE_CODE (arg0) == CONJ_EXPR
10896 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10897 return fold_mult_zconjz (loc, type, arg1);
10898 if (flag_unsafe_math_optimizations
10899 && TREE_CODE (arg1) == CONJ_EXPR
10900 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10901 return fold_mult_zconjz (loc, type, arg0);
10902
10903 if (flag_unsafe_math_optimizations)
10904 {
10905 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10906 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10907
10908 /* Optimizations of root(...)*root(...). */
10909 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10910 {
10911 tree rootfn, arg;
10912 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10913 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10914
10915 /* Optimize sqrt(x)*sqrt(x) as x. */
10916 if (BUILTIN_SQRT_P (fcode0)
10917 && operand_equal_p (arg00, arg10, 0)
10918 && ! HONOR_SNANS (TYPE_MODE (type)))
10919 return arg00;
10920
10921 /* Optimize root(x)*root(y) as root(x*y). */
10922 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10923 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10924 return build_call_expr_loc (loc, rootfn, 1, arg);
10925 }
10926
10927 /* Optimize expN(x)*expN(y) as expN(x+y). */
10928 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10929 {
10930 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10931 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10932 CALL_EXPR_ARG (arg0, 0),
10933 CALL_EXPR_ARG (arg1, 0));
10934 return build_call_expr_loc (loc, expfn, 1, arg);
10935 }
10936
10937 /* Optimizations of pow(...)*pow(...). */
10938 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10939 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10940 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10941 {
10942 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10943 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10944 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10945 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10946
10947 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10948 if (operand_equal_p (arg01, arg11, 0))
10949 {
10950 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10951 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10952 arg00, arg10);
10953 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10954 }
10955
10956 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10957 if (operand_equal_p (arg00, arg10, 0))
10958 {
10959 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10960 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10961 arg01, arg11);
10962 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10963 }
10964 }
10965
10966 /* Optimize tan(x)*cos(x) as sin(x). */
10967 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10968 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10969 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10970 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10971 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10972 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10973 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10974 CALL_EXPR_ARG (arg1, 0), 0))
10975 {
10976 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10977
10978 if (sinfn != NULL_TREE)
10979 return build_call_expr_loc (loc, sinfn, 1,
10980 CALL_EXPR_ARG (arg0, 0));
10981 }
10982
10983 /* Optimize x*pow(x,c) as pow(x,c+1). */
10984 if (fcode1 == BUILT_IN_POW
10985 || fcode1 == BUILT_IN_POWF
10986 || fcode1 == BUILT_IN_POWL)
10987 {
10988 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10989 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10990 if (TREE_CODE (arg11) == REAL_CST
10991 && !TREE_OVERFLOW (arg11)
10992 && operand_equal_p (arg0, arg10, 0))
10993 {
10994 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10995 REAL_VALUE_TYPE c;
10996 tree arg;
10997
10998 c = TREE_REAL_CST (arg11);
10999 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11000 arg = build_real (type, c);
11001 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11002 }
11003 }
11004
11005 /* Optimize pow(x,c)*x as pow(x,c+1). */
11006 if (fcode0 == BUILT_IN_POW
11007 || fcode0 == BUILT_IN_POWF
11008 || fcode0 == BUILT_IN_POWL)
11009 {
11010 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11011 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11012 if (TREE_CODE (arg01) == REAL_CST
11013 && !TREE_OVERFLOW (arg01)
11014 && operand_equal_p (arg1, arg00, 0))
11015 {
11016 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11017 REAL_VALUE_TYPE c;
11018 tree arg;
11019
11020 c = TREE_REAL_CST (arg01);
11021 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11022 arg = build_real (type, c);
11023 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11024 }
11025 }
11026
11027 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11028 if (!in_gimple_form
11029 && optimize
11030 && operand_equal_p (arg0, arg1, 0))
11031 {
11032 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11033
11034 if (powfn)
11035 {
11036 tree arg = build_real (type, dconst2);
11037 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11038 }
11039 }
11040 }
11041 }
11042 goto associate;
11043
11044 case BIT_IOR_EXPR:
11045 bit_ior:
11046 if (integer_all_onesp (arg1))
11047 return omit_one_operand_loc (loc, type, arg1, arg0);
11048 if (integer_zerop (arg1))
11049 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11050 if (operand_equal_p (arg0, arg1, 0))
11051 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11052
11053 /* ~X | X is -1. */
11054 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11055 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11056 {
11057 t1 = build_zero_cst (type);
11058 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11059 return omit_one_operand_loc (loc, type, t1, arg1);
11060 }
11061
11062 /* X | ~X is -1. */
11063 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11064 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11065 {
11066 t1 = build_zero_cst (type);
11067 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11068 return omit_one_operand_loc (loc, type, t1, arg0);
11069 }
11070
11071 /* Canonicalize (X & C1) | C2. */
11072 if (TREE_CODE (arg0) == BIT_AND_EXPR
11073 && TREE_CODE (arg1) == INTEGER_CST
11074 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11075 {
11076 double_int c1, c2, c3, msk;
11077 int width = TYPE_PRECISION (type), w;
11078 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11079 c2 = tree_to_double_int (arg1);
11080
11081 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11082 if ((c1 & c2) == c1)
11083 return omit_one_operand_loc (loc, type, arg1,
11084 TREE_OPERAND (arg0, 0));
11085
11086 msk = double_int::mask (width);
11087
11088 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11089 if (msk.and_not (c1 | c2).is_zero ())
11090 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11091 TREE_OPERAND (arg0, 0), arg1);
11092
11093 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11094 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11095 mode which allows further optimizations. */
11096 c1 &= msk;
11097 c2 &= msk;
11098 c3 = c1.and_not (c2);
11099 for (w = BITS_PER_UNIT;
11100 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11101 w <<= 1)
11102 {
11103 unsigned HOST_WIDE_INT mask
11104 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11105 if (((c1.low | c2.low) & mask) == mask
11106 && (c1.low & ~mask) == 0 && c1.high == 0)
11107 {
11108 c3 = double_int::from_uhwi (mask);
11109 break;
11110 }
11111 }
11112 if (c3 != c1)
11113 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11114 fold_build2_loc (loc, BIT_AND_EXPR, type,
11115 TREE_OPERAND (arg0, 0),
11116 double_int_to_tree (type,
11117 c3)),
11118 arg1);
11119 }
11120
11121 /* (X & Y) | Y is (X, Y). */
11122 if (TREE_CODE (arg0) == BIT_AND_EXPR
11123 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11124 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11125 /* (X & Y) | X is (Y, X). */
11126 if (TREE_CODE (arg0) == BIT_AND_EXPR
11127 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11128 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11129 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11130 /* X | (X & Y) is (Y, X). */
11131 if (TREE_CODE (arg1) == BIT_AND_EXPR
11132 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11133 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11134 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11135 /* X | (Y & X) is (Y, X). */
11136 if (TREE_CODE (arg1) == BIT_AND_EXPR
11137 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11138 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11139 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11140
11141 /* (X & ~Y) | (~X & Y) is X ^ Y */
11142 if (TREE_CODE (arg0) == BIT_AND_EXPR
11143 && TREE_CODE (arg1) == BIT_AND_EXPR)
11144 {
11145 tree a0, a1, l0, l1, n0, n1;
11146
11147 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11148 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11149
11150 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11151 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11152
11153 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11154 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11155
11156 if ((operand_equal_p (n0, a0, 0)
11157 && operand_equal_p (n1, a1, 0))
11158 || (operand_equal_p (n0, a1, 0)
11159 && operand_equal_p (n1, a0, 0)))
11160 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11161 }
11162
11163 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11164 if (t1 != NULL_TREE)
11165 return t1;
11166
11167 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11168
11169 This results in more efficient code for machines without a NAND
11170 instruction. Combine will canonicalize to the first form
11171 which will allow use of NAND instructions provided by the
11172 backend if they exist. */
11173 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11174 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11175 {
11176 return
11177 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11178 build2 (BIT_AND_EXPR, type,
11179 fold_convert_loc (loc, type,
11180 TREE_OPERAND (arg0, 0)),
11181 fold_convert_loc (loc, type,
11182 TREE_OPERAND (arg1, 0))));
11183 }
11184
11185 /* See if this can be simplified into a rotate first. If that
11186 is unsuccessful continue in the association code. */
11187 goto bit_rotate;
11188
11189 case BIT_XOR_EXPR:
11190 if (integer_zerop (arg1))
11191 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11192 if (integer_all_onesp (arg1))
11193 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11194 if (operand_equal_p (arg0, arg1, 0))
11195 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11196
11197 /* ~X ^ X is -1. */
11198 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11200 {
11201 t1 = build_zero_cst (type);
11202 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11203 return omit_one_operand_loc (loc, type, t1, arg1);
11204 }
11205
11206 /* X ^ ~X is -1. */
11207 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11208 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11209 {
11210 t1 = build_zero_cst (type);
11211 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11212 return omit_one_operand_loc (loc, type, t1, arg0);
11213 }
11214
11215 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11216 with a constant, and the two constants have no bits in common,
11217 we should treat this as a BIT_IOR_EXPR since this may produce more
11218 simplifications. */
11219 if (TREE_CODE (arg0) == BIT_AND_EXPR
11220 && TREE_CODE (arg1) == BIT_AND_EXPR
11221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11222 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11223 && integer_zerop (const_binop (BIT_AND_EXPR,
11224 TREE_OPERAND (arg0, 1),
11225 TREE_OPERAND (arg1, 1))))
11226 {
11227 code = BIT_IOR_EXPR;
11228 goto bit_ior;
11229 }
11230
11231 /* (X | Y) ^ X -> Y & ~ X*/
11232 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11233 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11234 {
11235 tree t2 = TREE_OPERAND (arg0, 1);
11236 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11237 arg1);
11238 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11239 fold_convert_loc (loc, type, t2),
11240 fold_convert_loc (loc, type, t1));
11241 return t1;
11242 }
11243
11244 /* (Y | X) ^ X -> Y & ~ X*/
11245 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11246 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11247 {
11248 tree t2 = TREE_OPERAND (arg0, 0);
11249 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11250 arg1);
11251 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11252 fold_convert_loc (loc, type, t2),
11253 fold_convert_loc (loc, type, t1));
11254 return t1;
11255 }
11256
11257 /* X ^ (X | Y) -> Y & ~ X*/
11258 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11259 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11260 {
11261 tree t2 = TREE_OPERAND (arg1, 1);
11262 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11263 arg0);
11264 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11265 fold_convert_loc (loc, type, t2),
11266 fold_convert_loc (loc, type, t1));
11267 return t1;
11268 }
11269
11270 /* X ^ (Y | X) -> Y & ~ X*/
11271 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11272 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11273 {
11274 tree t2 = TREE_OPERAND (arg1, 0);
11275 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11276 arg0);
11277 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11278 fold_convert_loc (loc, type, t2),
11279 fold_convert_loc (loc, type, t1));
11280 return t1;
11281 }
11282
11283 /* Convert ~X ^ ~Y to X ^ Y. */
11284 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11285 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11286 return fold_build2_loc (loc, code, type,
11287 fold_convert_loc (loc, type,
11288 TREE_OPERAND (arg0, 0)),
11289 fold_convert_loc (loc, type,
11290 TREE_OPERAND (arg1, 0)));
11291
11292 /* Convert ~X ^ C to X ^ ~C. */
11293 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11294 && TREE_CODE (arg1) == INTEGER_CST)
11295 return fold_build2_loc (loc, code, type,
11296 fold_convert_loc (loc, type,
11297 TREE_OPERAND (arg0, 0)),
11298 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11299
11300 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11301 if (TREE_CODE (arg0) == BIT_AND_EXPR
11302 && integer_onep (TREE_OPERAND (arg0, 1))
11303 && integer_onep (arg1))
11304 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11305 build_zero_cst (TREE_TYPE (arg0)));
11306
11307 /* Fold (X & Y) ^ Y as ~X & Y. */
11308 if (TREE_CODE (arg0) == BIT_AND_EXPR
11309 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11310 {
11311 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11312 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11313 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11314 fold_convert_loc (loc, type, arg1));
11315 }
11316 /* Fold (X & Y) ^ X as ~Y & X. */
11317 if (TREE_CODE (arg0) == BIT_AND_EXPR
11318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11319 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11320 {
11321 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11322 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11323 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11324 fold_convert_loc (loc, type, arg1));
11325 }
11326 /* Fold X ^ (X & Y) as X & ~Y. */
11327 if (TREE_CODE (arg1) == BIT_AND_EXPR
11328 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11329 {
11330 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11331 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11332 fold_convert_loc (loc, type, arg0),
11333 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11334 }
11335 /* Fold X ^ (Y & X) as ~Y & X. */
11336 if (TREE_CODE (arg1) == BIT_AND_EXPR
11337 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11338 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11339 {
11340 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11341 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11342 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11343 fold_convert_loc (loc, type, arg0));
11344 }
11345
11346 /* See if this can be simplified into a rotate first. If that
11347 is unsuccessful continue in the association code. */
11348 goto bit_rotate;
11349
11350 case BIT_AND_EXPR:
11351 if (integer_all_onesp (arg1))
11352 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11353 if (integer_zerop (arg1))
11354 return omit_one_operand_loc (loc, type, arg1, arg0);
11355 if (operand_equal_p (arg0, arg1, 0))
11356 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11357
11358 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11359 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11360 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11361 || (TREE_CODE (arg0) == EQ_EXPR
11362 && integer_zerop (TREE_OPERAND (arg0, 1))))
11363 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11364 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11365
11366 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11367 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11368 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11369 || (TREE_CODE (arg1) == EQ_EXPR
11370 && integer_zerop (TREE_OPERAND (arg1, 1))))
11371 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11372 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11373
11374 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11375 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11376 && TREE_CODE (arg1) == INTEGER_CST
11377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11378 {
11379 tree tmp1 = fold_convert_loc (loc, type, arg1);
11380 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11381 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11382 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11383 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11384 return
11385 fold_convert_loc (loc, type,
11386 fold_build2_loc (loc, BIT_IOR_EXPR,
11387 type, tmp2, tmp3));
11388 }
11389
11390 /* (X | Y) & Y is (X, Y). */
11391 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11392 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11393 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11394 /* (X | Y) & X is (Y, X). */
11395 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11396 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11397 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11398 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11399 /* X & (X | Y) is (Y, X). */
11400 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11401 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11402 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11403 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11404 /* X & (Y | X) is (Y, X). */
11405 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11406 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11407 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11408 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11409
11410 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11411 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11412 && integer_onep (TREE_OPERAND (arg0, 1))
11413 && integer_onep (arg1))
11414 {
11415 tree tem2;
11416 tem = TREE_OPERAND (arg0, 0);
11417 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11418 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11419 tem, tem2);
11420 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11421 build_zero_cst (TREE_TYPE (tem)));
11422 }
11423 /* Fold ~X & 1 as (X & 1) == 0. */
11424 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11425 && integer_onep (arg1))
11426 {
11427 tree tem2;
11428 tem = TREE_OPERAND (arg0, 0);
11429 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11430 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11431 tem, tem2);
11432 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11433 build_zero_cst (TREE_TYPE (tem)));
11434 }
11435 /* Fold !X & 1 as X == 0. */
11436 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11437 && integer_onep (arg1))
11438 {
11439 tem = TREE_OPERAND (arg0, 0);
11440 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11441 build_zero_cst (TREE_TYPE (tem)));
11442 }
11443
11444 /* Fold (X ^ Y) & Y as ~X & Y. */
11445 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11446 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11447 {
11448 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11449 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11450 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11451 fold_convert_loc (loc, type, arg1));
11452 }
11453 /* Fold (X ^ Y) & X as ~Y & X. */
11454 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11455 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11456 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11457 {
11458 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11459 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11460 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11461 fold_convert_loc (loc, type, arg1));
11462 }
11463 /* Fold X & (X ^ Y) as X & ~Y. */
11464 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11465 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11466 {
11467 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11468 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11469 fold_convert_loc (loc, type, arg0),
11470 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11471 }
11472 /* Fold X & (Y ^ X) as ~Y & X. */
11473 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11474 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11475 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11476 {
11477 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11478 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11479 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11480 fold_convert_loc (loc, type, arg0));
11481 }
11482
11483 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11484 multiple of 1 << CST. */
11485 if (TREE_CODE (arg1) == INTEGER_CST)
11486 {
11487 double_int cst1 = tree_to_double_int (arg1);
11488 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11489 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11490 if ((cst1 & ncst1) == ncst1
11491 && multiple_of_p (type, arg0,
11492 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11493 return fold_convert_loc (loc, type, arg0);
11494 }
11495
11496 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11497 bits from CST2. */
11498 if (TREE_CODE (arg1) == INTEGER_CST
11499 && TREE_CODE (arg0) == MULT_EXPR
11500 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11501 {
11502 int arg1tz
11503 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11504 if (arg1tz > 0)
11505 {
11506 double_int arg1mask, masked;
11507 arg1mask = ~double_int::mask (arg1tz);
11508 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11509 TYPE_UNSIGNED (type));
11510 masked = arg1mask & tree_to_double_int (arg1);
11511 if (masked.is_zero ())
11512 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11513 arg0, arg1);
11514 else if (masked != tree_to_double_int (arg1))
11515 return fold_build2_loc (loc, code, type, op0,
11516 double_int_to_tree (type, masked));
11517 }
11518 }
11519
11520 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11521 ((A & N) + B) & M -> (A + B) & M
11522 Similarly if (N & M) == 0,
11523 ((A | N) + B) & M -> (A + B) & M
11524 and for - instead of + (or unary - instead of +)
11525 and/or ^ instead of |.
11526 If B is constant and (B & M) == 0, fold into A & M. */
11527 if (host_integerp (arg1, 1))
11528 {
11529 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11530 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11531 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11532 && (TREE_CODE (arg0) == PLUS_EXPR
11533 || TREE_CODE (arg0) == MINUS_EXPR
11534 || TREE_CODE (arg0) == NEGATE_EXPR)
11535 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11536 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11537 {
11538 tree pmop[2];
11539 int which = 0;
11540 unsigned HOST_WIDE_INT cst0;
11541
11542 /* Now we know that arg0 is (C + D) or (C - D) or
11543 -C and arg1 (M) is == (1LL << cst) - 1.
11544 Store C into PMOP[0] and D into PMOP[1]. */
11545 pmop[0] = TREE_OPERAND (arg0, 0);
11546 pmop[1] = NULL;
11547 if (TREE_CODE (arg0) != NEGATE_EXPR)
11548 {
11549 pmop[1] = TREE_OPERAND (arg0, 1);
11550 which = 1;
11551 }
11552
11553 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11554 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11555 & cst1) != cst1)
11556 which = -1;
11557
11558 for (; which >= 0; which--)
11559 switch (TREE_CODE (pmop[which]))
11560 {
11561 case BIT_AND_EXPR:
11562 case BIT_IOR_EXPR:
11563 case BIT_XOR_EXPR:
11564 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11565 != INTEGER_CST)
11566 break;
11567 /* tree_low_cst not used, because we don't care about
11568 the upper bits. */
11569 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11570 cst0 &= cst1;
11571 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11572 {
11573 if (cst0 != cst1)
11574 break;
11575 }
11576 else if (cst0 != 0)
11577 break;
11578 /* If C or D is of the form (A & N) where
11579 (N & M) == M, or of the form (A | N) or
11580 (A ^ N) where (N & M) == 0, replace it with A. */
11581 pmop[which] = TREE_OPERAND (pmop[which], 0);
11582 break;
11583 case INTEGER_CST:
11584 /* If C or D is a N where (N & M) == 0, it can be
11585 omitted (assumed 0). */
11586 if ((TREE_CODE (arg0) == PLUS_EXPR
11587 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11588 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11589 pmop[which] = NULL;
11590 break;
11591 default:
11592 break;
11593 }
11594
11595 /* Only build anything new if we optimized one or both arguments
11596 above. */
11597 if (pmop[0] != TREE_OPERAND (arg0, 0)
11598 || (TREE_CODE (arg0) != NEGATE_EXPR
11599 && pmop[1] != TREE_OPERAND (arg0, 1)))
11600 {
11601 tree utype = TREE_TYPE (arg0);
11602 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11603 {
11604 /* Perform the operations in a type that has defined
11605 overflow behavior. */
11606 utype = unsigned_type_for (TREE_TYPE (arg0));
11607 if (pmop[0] != NULL)
11608 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11609 if (pmop[1] != NULL)
11610 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11611 }
11612
11613 if (TREE_CODE (arg0) == NEGATE_EXPR)
11614 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11615 else if (TREE_CODE (arg0) == PLUS_EXPR)
11616 {
11617 if (pmop[0] != NULL && pmop[1] != NULL)
11618 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11619 pmop[0], pmop[1]);
11620 else if (pmop[0] != NULL)
11621 tem = pmop[0];
11622 else if (pmop[1] != NULL)
11623 tem = pmop[1];
11624 else
11625 return build_int_cst (type, 0);
11626 }
11627 else if (pmop[0] == NULL)
11628 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11629 else
11630 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11631 pmop[0], pmop[1]);
11632 /* TEM is now the new binary +, - or unary - replacement. */
11633 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11634 fold_convert_loc (loc, utype, arg1));
11635 return fold_convert_loc (loc, type, tem);
11636 }
11637 }
11638 }
11639
11640 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11641 if (t1 != NULL_TREE)
11642 return t1;
11643 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11644 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11645 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11646 {
11647 unsigned int prec
11648 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11649
11650 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11651 && (~TREE_INT_CST_LOW (arg1)
11652 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11653 return
11654 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11655 }
11656
11657 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11658
11659 This results in more efficient code for machines without a NOR
11660 instruction. Combine will canonicalize to the first form
11661 which will allow use of NOR instructions provided by the
11662 backend if they exist. */
11663 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11664 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11665 {
11666 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11667 build2 (BIT_IOR_EXPR, type,
11668 fold_convert_loc (loc, type,
11669 TREE_OPERAND (arg0, 0)),
11670 fold_convert_loc (loc, type,
11671 TREE_OPERAND (arg1, 0))));
11672 }
11673
11674 /* If arg0 is derived from the address of an object or function, we may
11675 be able to fold this expression using the object or function's
11676 alignment. */
11677 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11678 {
11679 unsigned HOST_WIDE_INT modulus, residue;
11680 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11681
11682 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11683 integer_onep (arg1));
11684
11685 /* This works because modulus is a power of 2. If this weren't the
11686 case, we'd have to replace it by its greatest power-of-2
11687 divisor: modulus & -modulus. */
11688 if (low < modulus)
11689 return build_int_cst (type, residue & low);
11690 }
11691
11692 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11693 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11694 if the new mask might be further optimized. */
11695 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11696 || TREE_CODE (arg0) == RSHIFT_EXPR)
11697 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11698 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11699 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11700 < TYPE_PRECISION (TREE_TYPE (arg0))
11701 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11702 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11703 {
11704 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11705 unsigned HOST_WIDE_INT mask
11706 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11707 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11708 tree shift_type = TREE_TYPE (arg0);
11709
11710 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11711 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11712 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11713 && TYPE_PRECISION (TREE_TYPE (arg0))
11714 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11715 {
11716 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11717 tree arg00 = TREE_OPERAND (arg0, 0);
11718 /* See if more bits can be proven as zero because of
11719 zero extension. */
11720 if (TREE_CODE (arg00) == NOP_EXPR
11721 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11722 {
11723 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11724 if (TYPE_PRECISION (inner_type)
11725 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11726 && TYPE_PRECISION (inner_type) < prec)
11727 {
11728 prec = TYPE_PRECISION (inner_type);
11729 /* See if we can shorten the right shift. */
11730 if (shiftc < prec)
11731 shift_type = inner_type;
11732 }
11733 }
11734 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11735 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11736 zerobits <<= prec - shiftc;
11737 /* For arithmetic shift if sign bit could be set, zerobits
11738 can contain actually sign bits, so no transformation is
11739 possible, unless MASK masks them all away. In that
11740 case the shift needs to be converted into logical shift. */
11741 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11742 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11743 {
11744 if ((mask & zerobits) == 0)
11745 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11746 else
11747 zerobits = 0;
11748 }
11749 }
11750
11751 /* ((X << 16) & 0xff00) is (X, 0). */
11752 if ((mask & zerobits) == mask)
11753 return omit_one_operand_loc (loc, type,
11754 build_int_cst (type, 0), arg0);
11755
11756 newmask = mask | zerobits;
11757 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11758 {
11759 unsigned int prec;
11760
11761 /* Only do the transformation if NEWMASK is some integer
11762 mode's mask. */
11763 for (prec = BITS_PER_UNIT;
11764 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11765 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11766 break;
11767 if (prec < HOST_BITS_PER_WIDE_INT
11768 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11769 {
11770 tree newmaskt;
11771
11772 if (shift_type != TREE_TYPE (arg0))
11773 {
11774 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11775 fold_convert_loc (loc, shift_type,
11776 TREE_OPERAND (arg0, 0)),
11777 TREE_OPERAND (arg0, 1));
11778 tem = fold_convert_loc (loc, type, tem);
11779 }
11780 else
11781 tem = op0;
11782 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11783 if (!tree_int_cst_equal (newmaskt, arg1))
11784 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11785 }
11786 }
11787 }
11788
11789 goto associate;
11790
11791 case RDIV_EXPR:
11792 /* Don't touch a floating-point divide by zero unless the mode
11793 of the constant can represent infinity. */
11794 if (TREE_CODE (arg1) == REAL_CST
11795 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11796 && real_zerop (arg1))
11797 return NULL_TREE;
11798
11799 /* Optimize A / A to 1.0 if we don't care about
11800 NaNs or Infinities. Skip the transformation
11801 for non-real operands. */
11802 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11803 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11804 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11805 && operand_equal_p (arg0, arg1, 0))
11806 {
11807 tree r = build_real (TREE_TYPE (arg0), dconst1);
11808
11809 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11810 }
11811
11812 /* The complex version of the above A / A optimization. */
11813 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11814 && operand_equal_p (arg0, arg1, 0))
11815 {
11816 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11817 if (! HONOR_NANS (TYPE_MODE (elem_type))
11818 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11819 {
11820 tree r = build_real (elem_type, dconst1);
11821 /* omit_two_operands will call fold_convert for us. */
11822 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11823 }
11824 }
11825
11826 /* (-A) / (-B) -> A / B */
11827 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11828 return fold_build2_loc (loc, RDIV_EXPR, type,
11829 TREE_OPERAND (arg0, 0),
11830 negate_expr (arg1));
11831 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11832 return fold_build2_loc (loc, RDIV_EXPR, type,
11833 negate_expr (arg0),
11834 TREE_OPERAND (arg1, 0));
11835
11836 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11837 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11838 && real_onep (arg1))
11839 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11840
11841 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11842 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11843 && real_minus_onep (arg1))
11844 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11845 negate_expr (arg0)));
11846
11847 /* If ARG1 is a constant, we can convert this to a multiply by the
11848 reciprocal. This does not have the same rounding properties,
11849 so only do this if -freciprocal-math. We can actually
11850 always safely do it if ARG1 is a power of two, but it's hard to
11851 tell if it is or not in a portable manner. */
11852 if (optimize
11853 && (TREE_CODE (arg1) == REAL_CST
11854 || (TREE_CODE (arg1) == COMPLEX_CST
11855 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11856 || (TREE_CODE (arg1) == VECTOR_CST
11857 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11858 {
11859 if (flag_reciprocal_math
11860 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11861 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11862 /* Find the reciprocal if optimizing and the result is exact.
11863 TODO: Complex reciprocal not implemented. */
11864 if (TREE_CODE (arg1) != COMPLEX_CST)
11865 {
11866 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11867
11868 if (inverse)
11869 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11870 }
11871 }
11872 /* Convert A/B/C to A/(B*C). */
11873 if (flag_reciprocal_math
11874 && TREE_CODE (arg0) == RDIV_EXPR)
11875 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11876 fold_build2_loc (loc, MULT_EXPR, type,
11877 TREE_OPERAND (arg0, 1), arg1));
11878
11879 /* Convert A/(B/C) to (A/B)*C. */
11880 if (flag_reciprocal_math
11881 && TREE_CODE (arg1) == RDIV_EXPR)
11882 return fold_build2_loc (loc, MULT_EXPR, type,
11883 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11884 TREE_OPERAND (arg1, 0)),
11885 TREE_OPERAND (arg1, 1));
11886
11887 /* Convert C1/(X*C2) into (C1/C2)/X. */
11888 if (flag_reciprocal_math
11889 && TREE_CODE (arg1) == MULT_EXPR
11890 && TREE_CODE (arg0) == REAL_CST
11891 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11892 {
11893 tree tem = const_binop (RDIV_EXPR, arg0,
11894 TREE_OPERAND (arg1, 1));
11895 if (tem)
11896 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11897 TREE_OPERAND (arg1, 0));
11898 }
11899
11900 if (flag_unsafe_math_optimizations)
11901 {
11902 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11903 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11904
11905 /* Optimize sin(x)/cos(x) as tan(x). */
11906 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11907 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11908 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11909 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11910 CALL_EXPR_ARG (arg1, 0), 0))
11911 {
11912 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11913
11914 if (tanfn != NULL_TREE)
11915 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11916 }
11917
11918 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11919 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11920 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11921 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11922 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11923 CALL_EXPR_ARG (arg1, 0), 0))
11924 {
11925 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11926
11927 if (tanfn != NULL_TREE)
11928 {
11929 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11930 CALL_EXPR_ARG (arg0, 0));
11931 return fold_build2_loc (loc, RDIV_EXPR, type,
11932 build_real (type, dconst1), tmp);
11933 }
11934 }
11935
11936 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11937 NaNs or Infinities. */
11938 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11939 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11940 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11941 {
11942 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11943 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11944
11945 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11946 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11947 && operand_equal_p (arg00, arg01, 0))
11948 {
11949 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11950
11951 if (cosfn != NULL_TREE)
11952 return build_call_expr_loc (loc, cosfn, 1, arg00);
11953 }
11954 }
11955
11956 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11957 NaNs or Infinities. */
11958 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11959 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11960 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11961 {
11962 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11963 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11964
11965 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11966 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11967 && operand_equal_p (arg00, arg01, 0))
11968 {
11969 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11970
11971 if (cosfn != NULL_TREE)
11972 {
11973 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11974 return fold_build2_loc (loc, RDIV_EXPR, type,
11975 build_real (type, dconst1),
11976 tmp);
11977 }
11978 }
11979 }
11980
11981 /* Optimize pow(x,c)/x as pow(x,c-1). */
11982 if (fcode0 == BUILT_IN_POW
11983 || fcode0 == BUILT_IN_POWF
11984 || fcode0 == BUILT_IN_POWL)
11985 {
11986 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11987 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11988 if (TREE_CODE (arg01) == REAL_CST
11989 && !TREE_OVERFLOW (arg01)
11990 && operand_equal_p (arg1, arg00, 0))
11991 {
11992 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11993 REAL_VALUE_TYPE c;
11994 tree arg;
11995
11996 c = TREE_REAL_CST (arg01);
11997 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11998 arg = build_real (type, c);
11999 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12000 }
12001 }
12002
12003 /* Optimize a/root(b/c) into a*root(c/b). */
12004 if (BUILTIN_ROOT_P (fcode1))
12005 {
12006 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12007
12008 if (TREE_CODE (rootarg) == RDIV_EXPR)
12009 {
12010 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12011 tree b = TREE_OPERAND (rootarg, 0);
12012 tree c = TREE_OPERAND (rootarg, 1);
12013
12014 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12015
12016 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12017 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12018 }
12019 }
12020
12021 /* Optimize x/expN(y) into x*expN(-y). */
12022 if (BUILTIN_EXPONENT_P (fcode1))
12023 {
12024 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12025 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12026 arg1 = build_call_expr_loc (loc,
12027 expfn, 1,
12028 fold_convert_loc (loc, type, arg));
12029 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12030 }
12031
12032 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12033 if (fcode1 == BUILT_IN_POW
12034 || fcode1 == BUILT_IN_POWF
12035 || fcode1 == BUILT_IN_POWL)
12036 {
12037 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12038 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12039 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12040 tree neg11 = fold_convert_loc (loc, type,
12041 negate_expr (arg11));
12042 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12043 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12044 }
12045 }
12046 return NULL_TREE;
12047
12048 case TRUNC_DIV_EXPR:
12049 /* Optimize (X & (-A)) / A where A is a power of 2,
12050 to X >> log2(A) */
12051 if (TREE_CODE (arg0) == BIT_AND_EXPR
12052 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12053 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12054 {
12055 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12056 arg1, TREE_OPERAND (arg0, 1));
12057 if (sum && integer_zerop (sum)) {
12058 unsigned long pow2;
12059
12060 if (TREE_INT_CST_LOW (arg1))
12061 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12062 else
12063 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12064 + HOST_BITS_PER_WIDE_INT;
12065
12066 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12067 TREE_OPERAND (arg0, 0),
12068 build_int_cst (integer_type_node, pow2));
12069 }
12070 }
12071
12072 /* Fall through */
12073
12074 case FLOOR_DIV_EXPR:
12075 /* Simplify A / (B << N) where A and B are positive and B is
12076 a power of 2, to A >> (N + log2(B)). */
12077 strict_overflow_p = false;
12078 if (TREE_CODE (arg1) == LSHIFT_EXPR
12079 && (TYPE_UNSIGNED (type)
12080 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12081 {
12082 tree sval = TREE_OPERAND (arg1, 0);
12083 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12084 {
12085 tree sh_cnt = TREE_OPERAND (arg1, 1);
12086 unsigned long pow2;
12087
12088 if (TREE_INT_CST_LOW (sval))
12089 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12090 else
12091 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12092 + HOST_BITS_PER_WIDE_INT;
12093
12094 if (strict_overflow_p)
12095 fold_overflow_warning (("assuming signed overflow does not "
12096 "occur when simplifying A / (B << N)"),
12097 WARN_STRICT_OVERFLOW_MISC);
12098
12099 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12100 sh_cnt,
12101 build_int_cst (TREE_TYPE (sh_cnt),
12102 pow2));
12103 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12104 fold_convert_loc (loc, type, arg0), sh_cnt);
12105 }
12106 }
12107
12108 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12109 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12110 if (INTEGRAL_TYPE_P (type)
12111 && TYPE_UNSIGNED (type)
12112 && code == FLOOR_DIV_EXPR)
12113 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12114
12115 /* Fall through */
12116
12117 case ROUND_DIV_EXPR:
12118 case CEIL_DIV_EXPR:
12119 case EXACT_DIV_EXPR:
12120 if (integer_onep (arg1))
12121 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12122 if (integer_zerop (arg1))
12123 return NULL_TREE;
12124 /* X / -1 is -X. */
12125 if (!TYPE_UNSIGNED (type)
12126 && TREE_CODE (arg1) == INTEGER_CST
12127 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12128 && TREE_INT_CST_HIGH (arg1) == -1)
12129 return fold_convert_loc (loc, type, negate_expr (arg0));
12130
12131 /* Convert -A / -B to A / B when the type is signed and overflow is
12132 undefined. */
12133 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12134 && TREE_CODE (arg0) == NEGATE_EXPR
12135 && negate_expr_p (arg1))
12136 {
12137 if (INTEGRAL_TYPE_P (type))
12138 fold_overflow_warning (("assuming signed overflow does not occur "
12139 "when distributing negation across "
12140 "division"),
12141 WARN_STRICT_OVERFLOW_MISC);
12142 return fold_build2_loc (loc, code, type,
12143 fold_convert_loc (loc, type,
12144 TREE_OPERAND (arg0, 0)),
12145 fold_convert_loc (loc, type,
12146 negate_expr (arg1)));
12147 }
12148 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12149 && TREE_CODE (arg1) == NEGATE_EXPR
12150 && negate_expr_p (arg0))
12151 {
12152 if (INTEGRAL_TYPE_P (type))
12153 fold_overflow_warning (("assuming signed overflow does not occur "
12154 "when distributing negation across "
12155 "division"),
12156 WARN_STRICT_OVERFLOW_MISC);
12157 return fold_build2_loc (loc, code, type,
12158 fold_convert_loc (loc, type,
12159 negate_expr (arg0)),
12160 fold_convert_loc (loc, type,
12161 TREE_OPERAND (arg1, 0)));
12162 }
12163
12164 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12165 operation, EXACT_DIV_EXPR.
12166
12167 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12168 At one time others generated faster code, it's not clear if they do
12169 after the last round to changes to the DIV code in expmed.c. */
12170 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12171 && multiple_of_p (type, arg0, arg1))
12172 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12173
12174 strict_overflow_p = false;
12175 if (TREE_CODE (arg1) == INTEGER_CST
12176 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12177 &strict_overflow_p)))
12178 {
12179 if (strict_overflow_p)
12180 fold_overflow_warning (("assuming signed overflow does not occur "
12181 "when simplifying division"),
12182 WARN_STRICT_OVERFLOW_MISC);
12183 return fold_convert_loc (loc, type, tem);
12184 }
12185
12186 return NULL_TREE;
12187
12188 case CEIL_MOD_EXPR:
12189 case FLOOR_MOD_EXPR:
12190 case ROUND_MOD_EXPR:
12191 case TRUNC_MOD_EXPR:
12192 /* X % 1 is always zero, but be sure to preserve any side
12193 effects in X. */
12194 if (integer_onep (arg1))
12195 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12196
12197 /* X % 0, return X % 0 unchanged so that we can get the
12198 proper warnings and errors. */
12199 if (integer_zerop (arg1))
12200 return NULL_TREE;
12201
12202 /* 0 % X is always zero, but be sure to preserve any side
12203 effects in X. Place this after checking for X == 0. */
12204 if (integer_zerop (arg0))
12205 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12206
12207 /* X % -1 is zero. */
12208 if (!TYPE_UNSIGNED (type)
12209 && TREE_CODE (arg1) == INTEGER_CST
12210 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12211 && TREE_INT_CST_HIGH (arg1) == -1)
12212 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12213
12214 /* X % -C is the same as X % C. */
12215 if (code == TRUNC_MOD_EXPR
12216 && !TYPE_UNSIGNED (type)
12217 && TREE_CODE (arg1) == INTEGER_CST
12218 && !TREE_OVERFLOW (arg1)
12219 && TREE_INT_CST_HIGH (arg1) < 0
12220 && !TYPE_OVERFLOW_TRAPS (type)
12221 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12222 && !sign_bit_p (arg1, arg1))
12223 return fold_build2_loc (loc, code, type,
12224 fold_convert_loc (loc, type, arg0),
12225 fold_convert_loc (loc, type,
12226 negate_expr (arg1)));
12227
12228 /* X % -Y is the same as X % Y. */
12229 if (code == TRUNC_MOD_EXPR
12230 && !TYPE_UNSIGNED (type)
12231 && TREE_CODE (arg1) == NEGATE_EXPR
12232 && !TYPE_OVERFLOW_TRAPS (type))
12233 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12234 fold_convert_loc (loc, type,
12235 TREE_OPERAND (arg1, 0)));
12236
12237 strict_overflow_p = false;
12238 if (TREE_CODE (arg1) == INTEGER_CST
12239 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12240 &strict_overflow_p)))
12241 {
12242 if (strict_overflow_p)
12243 fold_overflow_warning (("assuming signed overflow does not occur "
12244 "when simplifying modulus"),
12245 WARN_STRICT_OVERFLOW_MISC);
12246 return fold_convert_loc (loc, type, tem);
12247 }
12248
12249 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12250 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12251 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12252 && (TYPE_UNSIGNED (type)
12253 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12254 {
12255 tree c = arg1;
12256 /* Also optimize A % (C << N) where C is a power of 2,
12257 to A & ((C << N) - 1). */
12258 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12259 c = TREE_OPERAND (arg1, 0);
12260
12261 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12262 {
12263 tree mask
12264 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12265 build_int_cst (TREE_TYPE (arg1), 1));
12266 if (strict_overflow_p)
12267 fold_overflow_warning (("assuming signed overflow does not "
12268 "occur when simplifying "
12269 "X % (power of two)"),
12270 WARN_STRICT_OVERFLOW_MISC);
12271 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12272 fold_convert_loc (loc, type, arg0),
12273 fold_convert_loc (loc, type, mask));
12274 }
12275 }
12276
12277 return NULL_TREE;
12278
12279 case LROTATE_EXPR:
12280 case RROTATE_EXPR:
12281 if (integer_all_onesp (arg0))
12282 return omit_one_operand_loc (loc, type, arg0, arg1);
12283 goto shift;
12284
12285 case RSHIFT_EXPR:
12286 /* Optimize -1 >> x for arithmetic right shifts. */
12287 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12288 && tree_expr_nonnegative_p (arg1))
12289 return omit_one_operand_loc (loc, type, arg0, arg1);
12290 /* ... fall through ... */
12291
12292 case LSHIFT_EXPR:
12293 shift:
12294 if (integer_zerop (arg1))
12295 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12296 if (integer_zerop (arg0))
12297 return omit_one_operand_loc (loc, type, arg0, arg1);
12298
12299 /* Since negative shift count is not well-defined,
12300 don't try to compute it in the compiler. */
12301 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12302 return NULL_TREE;
12303
12304 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12305 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12306 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12307 && host_integerp (TREE_OPERAND (arg0, 1), false)
12308 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12309 {
12310 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12311 + TREE_INT_CST_LOW (arg1));
12312
12313 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12314 being well defined. */
12315 if (low >= TYPE_PRECISION (type))
12316 {
12317 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12318 low = low % TYPE_PRECISION (type);
12319 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12320 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12321 TREE_OPERAND (arg0, 0));
12322 else
12323 low = TYPE_PRECISION (type) - 1;
12324 }
12325
12326 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12327 build_int_cst (type, low));
12328 }
12329
12330 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12331 into x & ((unsigned)-1 >> c) for unsigned types. */
12332 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12333 || (TYPE_UNSIGNED (type)
12334 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12335 && host_integerp (arg1, false)
12336 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12337 && host_integerp (TREE_OPERAND (arg0, 1), false)
12338 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12339 {
12340 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12341 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12342 tree lshift;
12343 tree arg00;
12344
12345 if (low0 == low1)
12346 {
12347 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12348
12349 lshift = build_int_cst (type, -1);
12350 lshift = int_const_binop (code, lshift, arg1);
12351
12352 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12353 }
12354 }
12355
12356 /* Rewrite an LROTATE_EXPR by a constant into an
12357 RROTATE_EXPR by a new constant. */
12358 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12359 {
12360 tree tem = build_int_cst (TREE_TYPE (arg1),
12361 TYPE_PRECISION (type));
12362 tem = const_binop (MINUS_EXPR, tem, arg1);
12363 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12364 }
12365
12366 /* If we have a rotate of a bit operation with the rotate count and
12367 the second operand of the bit operation both constant,
12368 permute the two operations. */
12369 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12370 && (TREE_CODE (arg0) == BIT_AND_EXPR
12371 || TREE_CODE (arg0) == BIT_IOR_EXPR
12372 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12373 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12374 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12375 fold_build2_loc (loc, code, type,
12376 TREE_OPERAND (arg0, 0), arg1),
12377 fold_build2_loc (loc, code, type,
12378 TREE_OPERAND (arg0, 1), arg1));
12379
12380 /* Two consecutive rotates adding up to the precision of the
12381 type can be ignored. */
12382 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12383 && TREE_CODE (arg0) == RROTATE_EXPR
12384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12385 && TREE_INT_CST_HIGH (arg1) == 0
12386 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12387 && ((TREE_INT_CST_LOW (arg1)
12388 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12389 == (unsigned int) TYPE_PRECISION (type)))
12390 return TREE_OPERAND (arg0, 0);
12391
12392 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12393 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12394 if the latter can be further optimized. */
12395 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12396 && TREE_CODE (arg0) == BIT_AND_EXPR
12397 && TREE_CODE (arg1) == INTEGER_CST
12398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12399 {
12400 tree mask = fold_build2_loc (loc, code, type,
12401 fold_convert_loc (loc, type,
12402 TREE_OPERAND (arg0, 1)),
12403 arg1);
12404 tree shift = fold_build2_loc (loc, code, type,
12405 fold_convert_loc (loc, type,
12406 TREE_OPERAND (arg0, 0)),
12407 arg1);
12408 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12409 if (tem)
12410 return tem;
12411 }
12412
12413 return NULL_TREE;
12414
12415 case MIN_EXPR:
12416 if (operand_equal_p (arg0, arg1, 0))
12417 return omit_one_operand_loc (loc, type, arg0, arg1);
12418 if (INTEGRAL_TYPE_P (type)
12419 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12420 return omit_one_operand_loc (loc, type, arg1, arg0);
12421 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12422 if (tem)
12423 return tem;
12424 goto associate;
12425
12426 case MAX_EXPR:
12427 if (operand_equal_p (arg0, arg1, 0))
12428 return omit_one_operand_loc (loc, type, arg0, arg1);
12429 if (INTEGRAL_TYPE_P (type)
12430 && TYPE_MAX_VALUE (type)
12431 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12432 return omit_one_operand_loc (loc, type, arg1, arg0);
12433 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12434 if (tem)
12435 return tem;
12436 goto associate;
12437
12438 case TRUTH_ANDIF_EXPR:
12439 /* Note that the operands of this must be ints
12440 and their values must be 0 or 1.
12441 ("true" is a fixed value perhaps depending on the language.) */
12442 /* If first arg is constant zero, return it. */
12443 if (integer_zerop (arg0))
12444 return fold_convert_loc (loc, type, arg0);
12445 case TRUTH_AND_EXPR:
12446 /* If either arg is constant true, drop it. */
12447 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12448 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12449 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12450 /* Preserve sequence points. */
12451 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12452 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12453 /* If second arg is constant zero, result is zero, but first arg
12454 must be evaluated. */
12455 if (integer_zerop (arg1))
12456 return omit_one_operand_loc (loc, type, arg1, arg0);
12457 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12458 case will be handled here. */
12459 if (integer_zerop (arg0))
12460 return omit_one_operand_loc (loc, type, arg0, arg1);
12461
12462 /* !X && X is always false. */
12463 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12464 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12465 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12466 /* X && !X is always false. */
12467 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12468 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12470
12471 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12472 means A >= Y && A != MAX, but in this case we know that
12473 A < X <= MAX. */
12474
12475 if (!TREE_SIDE_EFFECTS (arg0)
12476 && !TREE_SIDE_EFFECTS (arg1))
12477 {
12478 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12479 if (tem && !operand_equal_p (tem, arg0, 0))
12480 return fold_build2_loc (loc, code, type, tem, arg1);
12481
12482 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12483 if (tem && !operand_equal_p (tem, arg1, 0))
12484 return fold_build2_loc (loc, code, type, arg0, tem);
12485 }
12486
12487 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12488 != NULL_TREE)
12489 return tem;
12490
12491 return NULL_TREE;
12492
12493 case TRUTH_ORIF_EXPR:
12494 /* Note that the operands of this must be ints
12495 and their values must be 0 or true.
12496 ("true" is a fixed value perhaps depending on the language.) */
12497 /* If first arg is constant true, return it. */
12498 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12499 return fold_convert_loc (loc, type, arg0);
12500 case TRUTH_OR_EXPR:
12501 /* If either arg is constant zero, drop it. */
12502 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12503 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12504 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12505 /* Preserve sequence points. */
12506 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12507 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12508 /* If second arg is constant true, result is true, but we must
12509 evaluate first arg. */
12510 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12511 return omit_one_operand_loc (loc, type, arg1, arg0);
12512 /* Likewise for first arg, but note this only occurs here for
12513 TRUTH_OR_EXPR. */
12514 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12515 return omit_one_operand_loc (loc, type, arg0, arg1);
12516
12517 /* !X || X is always true. */
12518 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12519 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12520 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12521 /* X || !X is always true. */
12522 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12523 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12524 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12525
12526 /* (X && !Y) || (!X && Y) is X ^ Y */
12527 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12528 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12529 {
12530 tree a0, a1, l0, l1, n0, n1;
12531
12532 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12533 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12534
12535 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12536 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12537
12538 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12539 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12540
12541 if ((operand_equal_p (n0, a0, 0)
12542 && operand_equal_p (n1, a1, 0))
12543 || (operand_equal_p (n0, a1, 0)
12544 && operand_equal_p (n1, a0, 0)))
12545 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12546 }
12547
12548 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12549 != NULL_TREE)
12550 return tem;
12551
12552 return NULL_TREE;
12553
12554 case TRUTH_XOR_EXPR:
12555 /* If the second arg is constant zero, drop it. */
12556 if (integer_zerop (arg1))
12557 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12558 /* If the second arg is constant true, this is a logical inversion. */
12559 if (integer_onep (arg1))
12560 {
12561 /* Only call invert_truthvalue if operand is a truth value. */
12562 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12563 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12564 else
12565 tem = invert_truthvalue_loc (loc, arg0);
12566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12567 }
12568 /* Identical arguments cancel to zero. */
12569 if (operand_equal_p (arg0, arg1, 0))
12570 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12571
12572 /* !X ^ X is always true. */
12573 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12574 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12575 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12576
12577 /* X ^ !X is always true. */
12578 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12579 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12580 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12581
12582 return NULL_TREE;
12583
12584 case EQ_EXPR:
12585 case NE_EXPR:
12586 STRIP_NOPS (arg0);
12587 STRIP_NOPS (arg1);
12588
12589 tem = fold_comparison (loc, code, type, op0, op1);
12590 if (tem != NULL_TREE)
12591 return tem;
12592
12593 /* bool_var != 0 becomes bool_var. */
12594 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12595 && code == NE_EXPR)
12596 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12597
12598 /* bool_var == 1 becomes bool_var. */
12599 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12600 && code == EQ_EXPR)
12601 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12602
12603 /* bool_var != 1 becomes !bool_var. */
12604 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12605 && code == NE_EXPR)
12606 return fold_convert_loc (loc, type,
12607 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12608 TREE_TYPE (arg0), arg0));
12609
12610 /* bool_var == 0 becomes !bool_var. */
12611 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12612 && code == EQ_EXPR)
12613 return fold_convert_loc (loc, type,
12614 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12615 TREE_TYPE (arg0), arg0));
12616
12617 /* !exp != 0 becomes !exp */
12618 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12619 && code == NE_EXPR)
12620 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12621
12622 /* If this is an equality comparison of the address of two non-weak,
12623 unaliased symbols neither of which are extern (since we do not
12624 have access to attributes for externs), then we know the result. */
12625 if (TREE_CODE (arg0) == ADDR_EXPR
12626 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12627 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12628 && ! lookup_attribute ("alias",
12629 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12630 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12631 && TREE_CODE (arg1) == ADDR_EXPR
12632 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12633 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12634 && ! lookup_attribute ("alias",
12635 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12636 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12637 {
12638 /* We know that we're looking at the address of two
12639 non-weak, unaliased, static _DECL nodes.
12640
12641 It is both wasteful and incorrect to call operand_equal_p
12642 to compare the two ADDR_EXPR nodes. It is wasteful in that
12643 all we need to do is test pointer equality for the arguments
12644 to the two ADDR_EXPR nodes. It is incorrect to use
12645 operand_equal_p as that function is NOT equivalent to a
12646 C equality test. It can in fact return false for two
12647 objects which would test as equal using the C equality
12648 operator. */
12649 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12650 return constant_boolean_node (equal
12651 ? code == EQ_EXPR : code != EQ_EXPR,
12652 type);
12653 }
12654
12655 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12656 a MINUS_EXPR of a constant, we can convert it into a comparison with
12657 a revised constant as long as no overflow occurs. */
12658 if (TREE_CODE (arg1) == INTEGER_CST
12659 && (TREE_CODE (arg0) == PLUS_EXPR
12660 || TREE_CODE (arg0) == MINUS_EXPR)
12661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12662 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12663 ? MINUS_EXPR : PLUS_EXPR,
12664 fold_convert_loc (loc, TREE_TYPE (arg0),
12665 arg1),
12666 TREE_OPERAND (arg0, 1)))
12667 && !TREE_OVERFLOW (tem))
12668 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12669
12670 /* Similarly for a NEGATE_EXPR. */
12671 if (TREE_CODE (arg0) == NEGATE_EXPR
12672 && TREE_CODE (arg1) == INTEGER_CST
12673 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12674 arg1)))
12675 && TREE_CODE (tem) == INTEGER_CST
12676 && !TREE_OVERFLOW (tem))
12677 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12678
12679 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12680 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12681 && TREE_CODE (arg1) == INTEGER_CST
12682 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12683 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12684 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12685 fold_convert_loc (loc,
12686 TREE_TYPE (arg0),
12687 arg1),
12688 TREE_OPERAND (arg0, 1)));
12689
12690 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12691 if ((TREE_CODE (arg0) == PLUS_EXPR
12692 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12693 || TREE_CODE (arg0) == MINUS_EXPR)
12694 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12695 0)),
12696 arg1, 0)
12697 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12698 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12699 {
12700 tree val = TREE_OPERAND (arg0, 1);
12701 return omit_two_operands_loc (loc, type,
12702 fold_build2_loc (loc, code, type,
12703 val,
12704 build_int_cst (TREE_TYPE (val),
12705 0)),
12706 TREE_OPERAND (arg0, 0), arg1);
12707 }
12708
12709 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12710 if (TREE_CODE (arg0) == MINUS_EXPR
12711 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12712 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12713 1)),
12714 arg1, 0)
12715 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12716 {
12717 return omit_two_operands_loc (loc, type,
12718 code == NE_EXPR
12719 ? boolean_true_node : boolean_false_node,
12720 TREE_OPERAND (arg0, 1), arg1);
12721 }
12722
12723 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12724 for !=. Don't do this for ordered comparisons due to overflow. */
12725 if (TREE_CODE (arg0) == MINUS_EXPR
12726 && integer_zerop (arg1))
12727 return fold_build2_loc (loc, code, type,
12728 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12729
12730 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12731 if (TREE_CODE (arg0) == ABS_EXPR
12732 && (integer_zerop (arg1) || real_zerop (arg1)))
12733 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12734
12735 /* If this is an EQ or NE comparison with zero and ARG0 is
12736 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12737 two operations, but the latter can be done in one less insn
12738 on machines that have only two-operand insns or on which a
12739 constant cannot be the first operand. */
12740 if (TREE_CODE (arg0) == BIT_AND_EXPR
12741 && integer_zerop (arg1))
12742 {
12743 tree arg00 = TREE_OPERAND (arg0, 0);
12744 tree arg01 = TREE_OPERAND (arg0, 1);
12745 if (TREE_CODE (arg00) == LSHIFT_EXPR
12746 && integer_onep (TREE_OPERAND (arg00, 0)))
12747 {
12748 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12749 arg01, TREE_OPERAND (arg00, 1));
12750 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12751 build_int_cst (TREE_TYPE (arg0), 1));
12752 return fold_build2_loc (loc, code, type,
12753 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12754 arg1);
12755 }
12756 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12757 && integer_onep (TREE_OPERAND (arg01, 0)))
12758 {
12759 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12760 arg00, TREE_OPERAND (arg01, 1));
12761 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12762 build_int_cst (TREE_TYPE (arg0), 1));
12763 return fold_build2_loc (loc, code, type,
12764 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12765 arg1);
12766 }
12767 }
12768
12769 /* If this is an NE or EQ comparison of zero against the result of a
12770 signed MOD operation whose second operand is a power of 2, make
12771 the MOD operation unsigned since it is simpler and equivalent. */
12772 if (integer_zerop (arg1)
12773 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12774 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12775 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12776 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12777 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12778 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12779 {
12780 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12781 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12782 fold_convert_loc (loc, newtype,
12783 TREE_OPERAND (arg0, 0)),
12784 fold_convert_loc (loc, newtype,
12785 TREE_OPERAND (arg0, 1)));
12786
12787 return fold_build2_loc (loc, code, type, newmod,
12788 fold_convert_loc (loc, newtype, arg1));
12789 }
12790
12791 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12792 C1 is a valid shift constant, and C2 is a power of two, i.e.
12793 a single bit. */
12794 if (TREE_CODE (arg0) == BIT_AND_EXPR
12795 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12796 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12797 == INTEGER_CST
12798 && integer_pow2p (TREE_OPERAND (arg0, 1))
12799 && integer_zerop (arg1))
12800 {
12801 tree itype = TREE_TYPE (arg0);
12802 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12803 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12804
12805 /* Check for a valid shift count. */
12806 if (TREE_INT_CST_HIGH (arg001) == 0
12807 && TREE_INT_CST_LOW (arg001) < prec)
12808 {
12809 tree arg01 = TREE_OPERAND (arg0, 1);
12810 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12811 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12812 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12813 can be rewritten as (X & (C2 << C1)) != 0. */
12814 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12815 {
12816 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12817 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12818 return fold_build2_loc (loc, code, type, tem,
12819 fold_convert_loc (loc, itype, arg1));
12820 }
12821 /* Otherwise, for signed (arithmetic) shifts,
12822 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12823 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12824 else if (!TYPE_UNSIGNED (itype))
12825 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12826 arg000, build_int_cst (itype, 0));
12827 /* Otherwise, of unsigned (logical) shifts,
12828 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12829 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12830 else
12831 return omit_one_operand_loc (loc, type,
12832 code == EQ_EXPR ? integer_one_node
12833 : integer_zero_node,
12834 arg000);
12835 }
12836 }
12837
12838 /* If we have (A & C) == C where C is a power of 2, convert this into
12839 (A & C) != 0. Similarly for NE_EXPR. */
12840 if (TREE_CODE (arg0) == BIT_AND_EXPR
12841 && integer_pow2p (TREE_OPERAND (arg0, 1))
12842 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12843 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12844 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12845 integer_zero_node));
12846
12847 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12848 bit, then fold the expression into A < 0 or A >= 0. */
12849 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12850 if (tem)
12851 return tem;
12852
12853 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12854 Similarly for NE_EXPR. */
12855 if (TREE_CODE (arg0) == BIT_AND_EXPR
12856 && TREE_CODE (arg1) == INTEGER_CST
12857 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12858 {
12859 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12860 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12861 TREE_OPERAND (arg0, 1));
12862 tree dandnotc
12863 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12864 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12865 notc);
12866 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12867 if (integer_nonzerop (dandnotc))
12868 return omit_one_operand_loc (loc, type, rslt, arg0);
12869 }
12870
12871 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12872 Similarly for NE_EXPR. */
12873 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12874 && TREE_CODE (arg1) == INTEGER_CST
12875 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12876 {
12877 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12878 tree candnotd
12879 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12880 TREE_OPERAND (arg0, 1),
12881 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12882 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12883 if (integer_nonzerop (candnotd))
12884 return omit_one_operand_loc (loc, type, rslt, arg0);
12885 }
12886
12887 /* If this is a comparison of a field, we may be able to simplify it. */
12888 if ((TREE_CODE (arg0) == COMPONENT_REF
12889 || TREE_CODE (arg0) == BIT_FIELD_REF)
12890 /* Handle the constant case even without -O
12891 to make sure the warnings are given. */
12892 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12893 {
12894 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12895 if (t1)
12896 return t1;
12897 }
12898
12899 /* Optimize comparisons of strlen vs zero to a compare of the
12900 first character of the string vs zero. To wit,
12901 strlen(ptr) == 0 => *ptr == 0
12902 strlen(ptr) != 0 => *ptr != 0
12903 Other cases should reduce to one of these two (or a constant)
12904 due to the return value of strlen being unsigned. */
12905 if (TREE_CODE (arg0) == CALL_EXPR
12906 && integer_zerop (arg1))
12907 {
12908 tree fndecl = get_callee_fndecl (arg0);
12909
12910 if (fndecl
12911 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12912 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12913 && call_expr_nargs (arg0) == 1
12914 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12915 {
12916 tree iref = build_fold_indirect_ref_loc (loc,
12917 CALL_EXPR_ARG (arg0, 0));
12918 return fold_build2_loc (loc, code, type, iref,
12919 build_int_cst (TREE_TYPE (iref), 0));
12920 }
12921 }
12922
12923 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12924 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12925 if (TREE_CODE (arg0) == RSHIFT_EXPR
12926 && integer_zerop (arg1)
12927 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12928 {
12929 tree arg00 = TREE_OPERAND (arg0, 0);
12930 tree arg01 = TREE_OPERAND (arg0, 1);
12931 tree itype = TREE_TYPE (arg00);
12932 if (TREE_INT_CST_HIGH (arg01) == 0
12933 && TREE_INT_CST_LOW (arg01)
12934 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12935 {
12936 if (TYPE_UNSIGNED (itype))
12937 {
12938 itype = signed_type_for (itype);
12939 arg00 = fold_convert_loc (loc, itype, arg00);
12940 }
12941 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12942 type, arg00, build_zero_cst (itype));
12943 }
12944 }
12945
12946 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12947 if (integer_zerop (arg1)
12948 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12949 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12950 TREE_OPERAND (arg0, 1));
12951
12952 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12953 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12954 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12955 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12956 build_zero_cst (TREE_TYPE (arg0)));
12957 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12958 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12959 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12960 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12961 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12962 build_zero_cst (TREE_TYPE (arg0)));
12963
12964 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12965 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12966 && TREE_CODE (arg1) == INTEGER_CST
12967 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12968 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12969 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12970 TREE_OPERAND (arg0, 1), arg1));
12971
12972 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12973 (X & C) == 0 when C is a single bit. */
12974 if (TREE_CODE (arg0) == BIT_AND_EXPR
12975 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12976 && integer_zerop (arg1)
12977 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12978 {
12979 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12980 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12981 TREE_OPERAND (arg0, 1));
12982 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12983 type, tem,
12984 fold_convert_loc (loc, TREE_TYPE (arg0),
12985 arg1));
12986 }
12987
12988 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12989 constant C is a power of two, i.e. a single bit. */
12990 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12991 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12992 && integer_zerop (arg1)
12993 && integer_pow2p (TREE_OPERAND (arg0, 1))
12994 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12995 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12996 {
12997 tree arg00 = TREE_OPERAND (arg0, 0);
12998 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12999 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13000 }
13001
13002 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13003 when is C is a power of two, i.e. a single bit. */
13004 if (TREE_CODE (arg0) == BIT_AND_EXPR
13005 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13006 && integer_zerop (arg1)
13007 && integer_pow2p (TREE_OPERAND (arg0, 1))
13008 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13009 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13010 {
13011 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13012 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13013 arg000, TREE_OPERAND (arg0, 1));
13014 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13015 tem, build_int_cst (TREE_TYPE (tem), 0));
13016 }
13017
13018 if (integer_zerop (arg1)
13019 && tree_expr_nonzero_p (arg0))
13020 {
13021 tree res = constant_boolean_node (code==NE_EXPR, type);
13022 return omit_one_operand_loc (loc, type, res, arg0);
13023 }
13024
13025 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13026 if (TREE_CODE (arg0) == NEGATE_EXPR
13027 && TREE_CODE (arg1) == NEGATE_EXPR)
13028 return fold_build2_loc (loc, code, type,
13029 TREE_OPERAND (arg0, 0),
13030 fold_convert_loc (loc, TREE_TYPE (arg0),
13031 TREE_OPERAND (arg1, 0)));
13032
13033 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13034 if (TREE_CODE (arg0) == BIT_AND_EXPR
13035 && TREE_CODE (arg1) == BIT_AND_EXPR)
13036 {
13037 tree arg00 = TREE_OPERAND (arg0, 0);
13038 tree arg01 = TREE_OPERAND (arg0, 1);
13039 tree arg10 = TREE_OPERAND (arg1, 0);
13040 tree arg11 = TREE_OPERAND (arg1, 1);
13041 tree itype = TREE_TYPE (arg0);
13042
13043 if (operand_equal_p (arg01, arg11, 0))
13044 return fold_build2_loc (loc, code, type,
13045 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13046 fold_build2_loc (loc,
13047 BIT_XOR_EXPR, itype,
13048 arg00, arg10),
13049 arg01),
13050 build_zero_cst (itype));
13051
13052 if (operand_equal_p (arg01, arg10, 0))
13053 return fold_build2_loc (loc, code, type,
13054 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13055 fold_build2_loc (loc,
13056 BIT_XOR_EXPR, itype,
13057 arg00, arg11),
13058 arg01),
13059 build_zero_cst (itype));
13060
13061 if (operand_equal_p (arg00, arg11, 0))
13062 return fold_build2_loc (loc, code, type,
13063 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13064 fold_build2_loc (loc,
13065 BIT_XOR_EXPR, itype,
13066 arg01, arg10),
13067 arg00),
13068 build_zero_cst (itype));
13069
13070 if (operand_equal_p (arg00, arg10, 0))
13071 return fold_build2_loc (loc, code, type,
13072 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13073 fold_build2_loc (loc,
13074 BIT_XOR_EXPR, itype,
13075 arg01, arg11),
13076 arg00),
13077 build_zero_cst (itype));
13078 }
13079
13080 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13081 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13082 {
13083 tree arg00 = TREE_OPERAND (arg0, 0);
13084 tree arg01 = TREE_OPERAND (arg0, 1);
13085 tree arg10 = TREE_OPERAND (arg1, 0);
13086 tree arg11 = TREE_OPERAND (arg1, 1);
13087 tree itype = TREE_TYPE (arg0);
13088
13089 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13090 operand_equal_p guarantees no side-effects so we don't need
13091 to use omit_one_operand on Z. */
13092 if (operand_equal_p (arg01, arg11, 0))
13093 return fold_build2_loc (loc, code, type, arg00,
13094 fold_convert_loc (loc, TREE_TYPE (arg00),
13095 arg10));
13096 if (operand_equal_p (arg01, arg10, 0))
13097 return fold_build2_loc (loc, code, type, arg00,
13098 fold_convert_loc (loc, TREE_TYPE (arg00),
13099 arg11));
13100 if (operand_equal_p (arg00, arg11, 0))
13101 return fold_build2_loc (loc, code, type, arg01,
13102 fold_convert_loc (loc, TREE_TYPE (arg01),
13103 arg10));
13104 if (operand_equal_p (arg00, arg10, 0))
13105 return fold_build2_loc (loc, code, type, arg01,
13106 fold_convert_loc (loc, TREE_TYPE (arg01),
13107 arg11));
13108
13109 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13110 if (TREE_CODE (arg01) == INTEGER_CST
13111 && TREE_CODE (arg11) == INTEGER_CST)
13112 {
13113 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13114 fold_convert_loc (loc, itype, arg11));
13115 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13116 return fold_build2_loc (loc, code, type, tem,
13117 fold_convert_loc (loc, itype, arg10));
13118 }
13119 }
13120
13121 /* Attempt to simplify equality/inequality comparisons of complex
13122 values. Only lower the comparison if the result is known or
13123 can be simplified to a single scalar comparison. */
13124 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13125 || TREE_CODE (arg0) == COMPLEX_CST)
13126 && (TREE_CODE (arg1) == COMPLEX_EXPR
13127 || TREE_CODE (arg1) == COMPLEX_CST))
13128 {
13129 tree real0, imag0, real1, imag1;
13130 tree rcond, icond;
13131
13132 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13133 {
13134 real0 = TREE_OPERAND (arg0, 0);
13135 imag0 = TREE_OPERAND (arg0, 1);
13136 }
13137 else
13138 {
13139 real0 = TREE_REALPART (arg0);
13140 imag0 = TREE_IMAGPART (arg0);
13141 }
13142
13143 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13144 {
13145 real1 = TREE_OPERAND (arg1, 0);
13146 imag1 = TREE_OPERAND (arg1, 1);
13147 }
13148 else
13149 {
13150 real1 = TREE_REALPART (arg1);
13151 imag1 = TREE_IMAGPART (arg1);
13152 }
13153
13154 rcond = fold_binary_loc (loc, code, type, real0, real1);
13155 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13156 {
13157 if (integer_zerop (rcond))
13158 {
13159 if (code == EQ_EXPR)
13160 return omit_two_operands_loc (loc, type, boolean_false_node,
13161 imag0, imag1);
13162 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13163 }
13164 else
13165 {
13166 if (code == NE_EXPR)
13167 return omit_two_operands_loc (loc, type, boolean_true_node,
13168 imag0, imag1);
13169 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13170 }
13171 }
13172
13173 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13174 if (icond && TREE_CODE (icond) == INTEGER_CST)
13175 {
13176 if (integer_zerop (icond))
13177 {
13178 if (code == EQ_EXPR)
13179 return omit_two_operands_loc (loc, type, boolean_false_node,
13180 real0, real1);
13181 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13182 }
13183 else
13184 {
13185 if (code == NE_EXPR)
13186 return omit_two_operands_loc (loc, type, boolean_true_node,
13187 real0, real1);
13188 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13189 }
13190 }
13191 }
13192
13193 return NULL_TREE;
13194
13195 case LT_EXPR:
13196 case GT_EXPR:
13197 case LE_EXPR:
13198 case GE_EXPR:
13199 tem = fold_comparison (loc, code, type, op0, op1);
13200 if (tem != NULL_TREE)
13201 return tem;
13202
13203 /* Transform comparisons of the form X +- C CMP X. */
13204 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13205 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13206 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13207 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13208 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13209 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13210 {
13211 tree arg01 = TREE_OPERAND (arg0, 1);
13212 enum tree_code code0 = TREE_CODE (arg0);
13213 int is_positive;
13214
13215 if (TREE_CODE (arg01) == REAL_CST)
13216 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13217 else
13218 is_positive = tree_int_cst_sgn (arg01);
13219
13220 /* (X - c) > X becomes false. */
13221 if (code == GT_EXPR
13222 && ((code0 == MINUS_EXPR && is_positive >= 0)
13223 || (code0 == PLUS_EXPR && is_positive <= 0)))
13224 {
13225 if (TREE_CODE (arg01) == INTEGER_CST
13226 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13227 fold_overflow_warning (("assuming signed overflow does not "
13228 "occur when assuming that (X - c) > X "
13229 "is always false"),
13230 WARN_STRICT_OVERFLOW_ALL);
13231 return constant_boolean_node (0, type);
13232 }
13233
13234 /* Likewise (X + c) < X becomes false. */
13235 if (code == LT_EXPR
13236 && ((code0 == PLUS_EXPR && is_positive >= 0)
13237 || (code0 == MINUS_EXPR && is_positive <= 0)))
13238 {
13239 if (TREE_CODE (arg01) == INTEGER_CST
13240 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13241 fold_overflow_warning (("assuming signed overflow does not "
13242 "occur when assuming that "
13243 "(X + c) < X is always false"),
13244 WARN_STRICT_OVERFLOW_ALL);
13245 return constant_boolean_node (0, type);
13246 }
13247
13248 /* Convert (X - c) <= X to true. */
13249 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13250 && code == LE_EXPR
13251 && ((code0 == MINUS_EXPR && is_positive >= 0)
13252 || (code0 == PLUS_EXPR && is_positive <= 0)))
13253 {
13254 if (TREE_CODE (arg01) == INTEGER_CST
13255 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13256 fold_overflow_warning (("assuming signed overflow does not "
13257 "occur when assuming that "
13258 "(X - c) <= X is always true"),
13259 WARN_STRICT_OVERFLOW_ALL);
13260 return constant_boolean_node (1, type);
13261 }
13262
13263 /* Convert (X + c) >= X to true. */
13264 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13265 && code == GE_EXPR
13266 && ((code0 == PLUS_EXPR && is_positive >= 0)
13267 || (code0 == MINUS_EXPR && is_positive <= 0)))
13268 {
13269 if (TREE_CODE (arg01) == INTEGER_CST
13270 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13271 fold_overflow_warning (("assuming signed overflow does not "
13272 "occur when assuming that "
13273 "(X + c) >= X is always true"),
13274 WARN_STRICT_OVERFLOW_ALL);
13275 return constant_boolean_node (1, type);
13276 }
13277
13278 if (TREE_CODE (arg01) == INTEGER_CST)
13279 {
13280 /* Convert X + c > X and X - c < X to true for integers. */
13281 if (code == GT_EXPR
13282 && ((code0 == PLUS_EXPR && is_positive > 0)
13283 || (code0 == MINUS_EXPR && is_positive < 0)))
13284 {
13285 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13286 fold_overflow_warning (("assuming signed overflow does "
13287 "not occur when assuming that "
13288 "(X + c) > X is always true"),
13289 WARN_STRICT_OVERFLOW_ALL);
13290 return constant_boolean_node (1, type);
13291 }
13292
13293 if (code == LT_EXPR
13294 && ((code0 == MINUS_EXPR && is_positive > 0)
13295 || (code0 == PLUS_EXPR && is_positive < 0)))
13296 {
13297 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13298 fold_overflow_warning (("assuming signed overflow does "
13299 "not occur when assuming that "
13300 "(X - c) < X is always true"),
13301 WARN_STRICT_OVERFLOW_ALL);
13302 return constant_boolean_node (1, type);
13303 }
13304
13305 /* Convert X + c <= X and X - c >= X to false for integers. */
13306 if (code == LE_EXPR
13307 && ((code0 == PLUS_EXPR && is_positive > 0)
13308 || (code0 == MINUS_EXPR && is_positive < 0)))
13309 {
13310 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13311 fold_overflow_warning (("assuming signed overflow does "
13312 "not occur when assuming that "
13313 "(X + c) <= X is always false"),
13314 WARN_STRICT_OVERFLOW_ALL);
13315 return constant_boolean_node (0, type);
13316 }
13317
13318 if (code == GE_EXPR
13319 && ((code0 == MINUS_EXPR && is_positive > 0)
13320 || (code0 == PLUS_EXPR && is_positive < 0)))
13321 {
13322 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13323 fold_overflow_warning (("assuming signed overflow does "
13324 "not occur when assuming that "
13325 "(X - c) >= X is always false"),
13326 WARN_STRICT_OVERFLOW_ALL);
13327 return constant_boolean_node (0, type);
13328 }
13329 }
13330 }
13331
13332 /* Comparisons with the highest or lowest possible integer of
13333 the specified precision will have known values. */
13334 {
13335 tree arg1_type = TREE_TYPE (arg1);
13336 unsigned int width = TYPE_PRECISION (arg1_type);
13337
13338 if (TREE_CODE (arg1) == INTEGER_CST
13339 && width <= HOST_BITS_PER_DOUBLE_INT
13340 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13341 {
13342 HOST_WIDE_INT signed_max_hi;
13343 unsigned HOST_WIDE_INT signed_max_lo;
13344 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13345
13346 if (width <= HOST_BITS_PER_WIDE_INT)
13347 {
13348 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13349 - 1;
13350 signed_max_hi = 0;
13351 max_hi = 0;
13352
13353 if (TYPE_UNSIGNED (arg1_type))
13354 {
13355 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13356 min_lo = 0;
13357 min_hi = 0;
13358 }
13359 else
13360 {
13361 max_lo = signed_max_lo;
13362 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13363 min_hi = -1;
13364 }
13365 }
13366 else
13367 {
13368 width -= HOST_BITS_PER_WIDE_INT;
13369 signed_max_lo = -1;
13370 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13371 - 1;
13372 max_lo = -1;
13373 min_lo = 0;
13374
13375 if (TYPE_UNSIGNED (arg1_type))
13376 {
13377 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13378 min_hi = 0;
13379 }
13380 else
13381 {
13382 max_hi = signed_max_hi;
13383 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13384 }
13385 }
13386
13387 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13388 && TREE_INT_CST_LOW (arg1) == max_lo)
13389 switch (code)
13390 {
13391 case GT_EXPR:
13392 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13393
13394 case GE_EXPR:
13395 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13396
13397 case LE_EXPR:
13398 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13399
13400 case LT_EXPR:
13401 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13402
13403 /* The GE_EXPR and LT_EXPR cases above are not normally
13404 reached because of previous transformations. */
13405
13406 default:
13407 break;
13408 }
13409 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13410 == max_hi
13411 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13412 switch (code)
13413 {
13414 case GT_EXPR:
13415 arg1 = const_binop (PLUS_EXPR, arg1,
13416 build_int_cst (TREE_TYPE (arg1), 1));
13417 return fold_build2_loc (loc, EQ_EXPR, type,
13418 fold_convert_loc (loc,
13419 TREE_TYPE (arg1), arg0),
13420 arg1);
13421 case LE_EXPR:
13422 arg1 = const_binop (PLUS_EXPR, arg1,
13423 build_int_cst (TREE_TYPE (arg1), 1));
13424 return fold_build2_loc (loc, NE_EXPR, type,
13425 fold_convert_loc (loc, TREE_TYPE (arg1),
13426 arg0),
13427 arg1);
13428 default:
13429 break;
13430 }
13431 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13432 == min_hi
13433 && TREE_INT_CST_LOW (arg1) == min_lo)
13434 switch (code)
13435 {
13436 case LT_EXPR:
13437 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13438
13439 case LE_EXPR:
13440 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13441
13442 case GE_EXPR:
13443 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13444
13445 case GT_EXPR:
13446 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13447
13448 default:
13449 break;
13450 }
13451 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13452 == min_hi
13453 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13454 switch (code)
13455 {
13456 case GE_EXPR:
13457 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13458 return fold_build2_loc (loc, NE_EXPR, type,
13459 fold_convert_loc (loc,
13460 TREE_TYPE (arg1), arg0),
13461 arg1);
13462 case LT_EXPR:
13463 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13464 return fold_build2_loc (loc, EQ_EXPR, type,
13465 fold_convert_loc (loc, TREE_TYPE (arg1),
13466 arg0),
13467 arg1);
13468 default:
13469 break;
13470 }
13471
13472 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13473 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13474 && TYPE_UNSIGNED (arg1_type)
13475 /* We will flip the signedness of the comparison operator
13476 associated with the mode of arg1, so the sign bit is
13477 specified by this mode. Check that arg1 is the signed
13478 max associated with this sign bit. */
13479 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13480 /* signed_type does not work on pointer types. */
13481 && INTEGRAL_TYPE_P (arg1_type))
13482 {
13483 /* The following case also applies to X < signed_max+1
13484 and X >= signed_max+1 because previous transformations. */
13485 if (code == LE_EXPR || code == GT_EXPR)
13486 {
13487 tree st;
13488 st = signed_type_for (TREE_TYPE (arg1));
13489 return fold_build2_loc (loc,
13490 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13491 type, fold_convert_loc (loc, st, arg0),
13492 build_int_cst (st, 0));
13493 }
13494 }
13495 }
13496 }
13497
13498 /* If we are comparing an ABS_EXPR with a constant, we can
13499 convert all the cases into explicit comparisons, but they may
13500 well not be faster than doing the ABS and one comparison.
13501 But ABS (X) <= C is a range comparison, which becomes a subtraction
13502 and a comparison, and is probably faster. */
13503 if (code == LE_EXPR
13504 && TREE_CODE (arg1) == INTEGER_CST
13505 && TREE_CODE (arg0) == ABS_EXPR
13506 && ! TREE_SIDE_EFFECTS (arg0)
13507 && (0 != (tem = negate_expr (arg1)))
13508 && TREE_CODE (tem) == INTEGER_CST
13509 && !TREE_OVERFLOW (tem))
13510 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13511 build2 (GE_EXPR, type,
13512 TREE_OPERAND (arg0, 0), tem),
13513 build2 (LE_EXPR, type,
13514 TREE_OPERAND (arg0, 0), arg1));
13515
13516 /* Convert ABS_EXPR<x> >= 0 to true. */
13517 strict_overflow_p = false;
13518 if (code == GE_EXPR
13519 && (integer_zerop (arg1)
13520 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13521 && real_zerop (arg1)))
13522 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13523 {
13524 if (strict_overflow_p)
13525 fold_overflow_warning (("assuming signed overflow does not occur "
13526 "when simplifying comparison of "
13527 "absolute value and zero"),
13528 WARN_STRICT_OVERFLOW_CONDITIONAL);
13529 return omit_one_operand_loc (loc, type,
13530 constant_boolean_node (true, type),
13531 arg0);
13532 }
13533
13534 /* Convert ABS_EXPR<x> < 0 to false. */
13535 strict_overflow_p = false;
13536 if (code == LT_EXPR
13537 && (integer_zerop (arg1) || real_zerop (arg1))
13538 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13539 {
13540 if (strict_overflow_p)
13541 fold_overflow_warning (("assuming signed overflow does not occur "
13542 "when simplifying comparison of "
13543 "absolute value and zero"),
13544 WARN_STRICT_OVERFLOW_CONDITIONAL);
13545 return omit_one_operand_loc (loc, type,
13546 constant_boolean_node (false, type),
13547 arg0);
13548 }
13549
13550 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13551 and similarly for >= into !=. */
13552 if ((code == LT_EXPR || code == GE_EXPR)
13553 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13554 && TREE_CODE (arg1) == LSHIFT_EXPR
13555 && integer_onep (TREE_OPERAND (arg1, 0)))
13556 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13557 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13558 TREE_OPERAND (arg1, 1)),
13559 build_zero_cst (TREE_TYPE (arg0)));
13560
13561 if ((code == LT_EXPR || code == GE_EXPR)
13562 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13563 && CONVERT_EXPR_P (arg1)
13564 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13565 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13566 {
13567 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13568 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13569 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13570 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13571 build_zero_cst (TREE_TYPE (arg0)));
13572 }
13573
13574 return NULL_TREE;
13575
13576 case UNORDERED_EXPR:
13577 case ORDERED_EXPR:
13578 case UNLT_EXPR:
13579 case UNLE_EXPR:
13580 case UNGT_EXPR:
13581 case UNGE_EXPR:
13582 case UNEQ_EXPR:
13583 case LTGT_EXPR:
13584 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13585 {
13586 t1 = fold_relational_const (code, type, arg0, arg1);
13587 if (t1 != NULL_TREE)
13588 return t1;
13589 }
13590
13591 /* If the first operand is NaN, the result is constant. */
13592 if (TREE_CODE (arg0) == REAL_CST
13593 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13594 && (code != LTGT_EXPR || ! flag_trapping_math))
13595 {
13596 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13597 ? integer_zero_node
13598 : integer_one_node;
13599 return omit_one_operand_loc (loc, type, t1, arg1);
13600 }
13601
13602 /* If the second operand is NaN, the result is constant. */
13603 if (TREE_CODE (arg1) == REAL_CST
13604 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13605 && (code != LTGT_EXPR || ! flag_trapping_math))
13606 {
13607 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13608 ? integer_zero_node
13609 : integer_one_node;
13610 return omit_one_operand_loc (loc, type, t1, arg0);
13611 }
13612
13613 /* Simplify unordered comparison of something with itself. */
13614 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13615 && operand_equal_p (arg0, arg1, 0))
13616 return constant_boolean_node (1, type);
13617
13618 if (code == LTGT_EXPR
13619 && !flag_trapping_math
13620 && operand_equal_p (arg0, arg1, 0))
13621 return constant_boolean_node (0, type);
13622
13623 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13624 {
13625 tree targ0 = strip_float_extensions (arg0);
13626 tree targ1 = strip_float_extensions (arg1);
13627 tree newtype = TREE_TYPE (targ0);
13628
13629 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13630 newtype = TREE_TYPE (targ1);
13631
13632 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13633 return fold_build2_loc (loc, code, type,
13634 fold_convert_loc (loc, newtype, targ0),
13635 fold_convert_loc (loc, newtype, targ1));
13636 }
13637
13638 return NULL_TREE;
13639
13640 case COMPOUND_EXPR:
13641 /* When pedantic, a compound expression can be neither an lvalue
13642 nor an integer constant expression. */
13643 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13644 return NULL_TREE;
13645 /* Don't let (0, 0) be null pointer constant. */
13646 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13647 : fold_convert_loc (loc, type, arg1);
13648 return pedantic_non_lvalue_loc (loc, tem);
13649
13650 case COMPLEX_EXPR:
13651 if ((TREE_CODE (arg0) == REAL_CST
13652 && TREE_CODE (arg1) == REAL_CST)
13653 || (TREE_CODE (arg0) == INTEGER_CST
13654 && TREE_CODE (arg1) == INTEGER_CST))
13655 return build_complex (type, arg0, arg1);
13656 if (TREE_CODE (arg0) == REALPART_EXPR
13657 && TREE_CODE (arg1) == IMAGPART_EXPR
13658 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13659 && operand_equal_p (TREE_OPERAND (arg0, 0),
13660 TREE_OPERAND (arg1, 0), 0))
13661 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13662 TREE_OPERAND (arg1, 0));
13663 return NULL_TREE;
13664
13665 case ASSERT_EXPR:
13666 /* An ASSERT_EXPR should never be passed to fold_binary. */
13667 gcc_unreachable ();
13668
13669 case VEC_PACK_TRUNC_EXPR:
13670 case VEC_PACK_FIX_TRUNC_EXPR:
13671 {
13672 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13673 tree *elts;
13674
13675 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13676 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13677 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13678 return NULL_TREE;
13679
13680 elts = XALLOCAVEC (tree, nelts);
13681 if (!vec_cst_ctor_to_array (arg0, elts)
13682 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13683 return NULL_TREE;
13684
13685 for (i = 0; i < nelts; i++)
13686 {
13687 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13688 ? NOP_EXPR : FIX_TRUNC_EXPR,
13689 TREE_TYPE (type), elts[i]);
13690 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13691 return NULL_TREE;
13692 }
13693
13694 return build_vector (type, elts);
13695 }
13696
13697 case VEC_WIDEN_MULT_LO_EXPR:
13698 case VEC_WIDEN_MULT_HI_EXPR:
13699 case VEC_WIDEN_MULT_EVEN_EXPR:
13700 case VEC_WIDEN_MULT_ODD_EXPR:
13701 {
13702 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13703 unsigned int out, ofs, scale;
13704 tree *elts;
13705
13706 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13707 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13708 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13709 return NULL_TREE;
13710
13711 elts = XALLOCAVEC (tree, nelts * 4);
13712 if (!vec_cst_ctor_to_array (arg0, elts)
13713 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13714 return NULL_TREE;
13715
13716 if (code == VEC_WIDEN_MULT_LO_EXPR)
13717 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13718 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13719 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13720 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13721 scale = 1, ofs = 0;
13722 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13723 scale = 1, ofs = 1;
13724
13725 for (out = 0; out < nelts; out++)
13726 {
13727 unsigned int in1 = (out << scale) + ofs;
13728 unsigned int in2 = in1 + nelts * 2;
13729 tree t1, t2;
13730
13731 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13732 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13733
13734 if (t1 == NULL_TREE || t2 == NULL_TREE)
13735 return NULL_TREE;
13736 elts[out] = const_binop (MULT_EXPR, t1, t2);
13737 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13738 return NULL_TREE;
13739 }
13740
13741 return build_vector (type, elts);
13742 }
13743
13744 default:
13745 return NULL_TREE;
13746 } /* switch (code) */
13747 }
13748
13749 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13750 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13751 of GOTO_EXPR. */
13752
13753 static tree
13754 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13755 {
13756 switch (TREE_CODE (*tp))
13757 {
13758 case LABEL_EXPR:
13759 return *tp;
13760
13761 case GOTO_EXPR:
13762 *walk_subtrees = 0;
13763
13764 /* ... fall through ... */
13765
13766 default:
13767 return NULL_TREE;
13768 }
13769 }
13770
13771 /* Return whether the sub-tree ST contains a label which is accessible from
13772 outside the sub-tree. */
13773
13774 static bool
13775 contains_label_p (tree st)
13776 {
13777 return
13778 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13779 }
13780
13781 /* Fold a ternary expression of code CODE and type TYPE with operands
13782 OP0, OP1, and OP2. Return the folded expression if folding is
13783 successful. Otherwise, return NULL_TREE. */
13784
13785 tree
13786 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13787 tree op0, tree op1, tree op2)
13788 {
13789 tree tem;
13790 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13791 enum tree_code_class kind = TREE_CODE_CLASS (code);
13792
13793 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13794 && TREE_CODE_LENGTH (code) == 3);
13795
13796 /* Strip any conversions that don't change the mode. This is safe
13797 for every expression, except for a comparison expression because
13798 its signedness is derived from its operands. So, in the latter
13799 case, only strip conversions that don't change the signedness.
13800
13801 Note that this is done as an internal manipulation within the
13802 constant folder, in order to find the simplest representation of
13803 the arguments so that their form can be studied. In any cases,
13804 the appropriate type conversions should be put back in the tree
13805 that will get out of the constant folder. */
13806 if (op0)
13807 {
13808 arg0 = op0;
13809 STRIP_NOPS (arg0);
13810 }
13811
13812 if (op1)
13813 {
13814 arg1 = op1;
13815 STRIP_NOPS (arg1);
13816 }
13817
13818 if (op2)
13819 {
13820 arg2 = op2;
13821 STRIP_NOPS (arg2);
13822 }
13823
13824 switch (code)
13825 {
13826 case COMPONENT_REF:
13827 if (TREE_CODE (arg0) == CONSTRUCTOR
13828 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13829 {
13830 unsigned HOST_WIDE_INT idx;
13831 tree field, value;
13832 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13833 if (field == arg1)
13834 return value;
13835 }
13836 return NULL_TREE;
13837
13838 case COND_EXPR:
13839 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13840 so all simple results must be passed through pedantic_non_lvalue. */
13841 if (TREE_CODE (arg0) == INTEGER_CST)
13842 {
13843 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13844 tem = integer_zerop (arg0) ? op2 : op1;
13845 /* Only optimize constant conditions when the selected branch
13846 has the same type as the COND_EXPR. This avoids optimizing
13847 away "c ? x : throw", where the throw has a void type.
13848 Avoid throwing away that operand which contains label. */
13849 if ((!TREE_SIDE_EFFECTS (unused_op)
13850 || !contains_label_p (unused_op))
13851 && (! VOID_TYPE_P (TREE_TYPE (tem))
13852 || VOID_TYPE_P (type)))
13853 return pedantic_non_lvalue_loc (loc, tem);
13854 return NULL_TREE;
13855 }
13856 if (operand_equal_p (arg1, op2, 0))
13857 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13858
13859 /* If we have A op B ? A : C, we may be able to convert this to a
13860 simpler expression, depending on the operation and the values
13861 of B and C. Signed zeros prevent all of these transformations,
13862 for reasons given above each one.
13863
13864 Also try swapping the arguments and inverting the conditional. */
13865 if (COMPARISON_CLASS_P (arg0)
13866 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13867 arg1, TREE_OPERAND (arg0, 1))
13868 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13869 {
13870 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13871 if (tem)
13872 return tem;
13873 }
13874
13875 if (COMPARISON_CLASS_P (arg0)
13876 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13877 op2,
13878 TREE_OPERAND (arg0, 1))
13879 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13880 {
13881 location_t loc0 = expr_location_or (arg0, loc);
13882 tem = fold_truth_not_expr (loc0, arg0);
13883 if (tem && COMPARISON_CLASS_P (tem))
13884 {
13885 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13886 if (tem)
13887 return tem;
13888 }
13889 }
13890
13891 /* If the second operand is simpler than the third, swap them
13892 since that produces better jump optimization results. */
13893 if (truth_value_p (TREE_CODE (arg0))
13894 && tree_swap_operands_p (op1, op2, false))
13895 {
13896 location_t loc0 = expr_location_or (arg0, loc);
13897 /* See if this can be inverted. If it can't, possibly because
13898 it was a floating-point inequality comparison, don't do
13899 anything. */
13900 tem = fold_truth_not_expr (loc0, arg0);
13901 if (tem)
13902 return fold_build3_loc (loc, code, type, tem, op2, op1);
13903 }
13904
13905 /* Convert A ? 1 : 0 to simply A. */
13906 if (integer_onep (op1)
13907 && integer_zerop (op2)
13908 /* If we try to convert OP0 to our type, the
13909 call to fold will try to move the conversion inside
13910 a COND, which will recurse. In that case, the COND_EXPR
13911 is probably the best choice, so leave it alone. */
13912 && type == TREE_TYPE (arg0))
13913 return pedantic_non_lvalue_loc (loc, arg0);
13914
13915 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13916 over COND_EXPR in cases such as floating point comparisons. */
13917 if (integer_zerop (op1)
13918 && integer_onep (op2)
13919 && truth_value_p (TREE_CODE (arg0)))
13920 return pedantic_non_lvalue_loc (loc,
13921 fold_convert_loc (loc, type,
13922 invert_truthvalue_loc (loc,
13923 arg0)));
13924
13925 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13926 if (TREE_CODE (arg0) == LT_EXPR
13927 && integer_zerop (TREE_OPERAND (arg0, 1))
13928 && integer_zerop (op2)
13929 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13930 {
13931 /* sign_bit_p only checks ARG1 bits within A's precision.
13932 If <sign bit of A> has wider type than A, bits outside
13933 of A's precision in <sign bit of A> need to be checked.
13934 If they are all 0, this optimization needs to be done
13935 in unsigned A's type, if they are all 1 in signed A's type,
13936 otherwise this can't be done. */
13937 if (TYPE_PRECISION (TREE_TYPE (tem))
13938 < TYPE_PRECISION (TREE_TYPE (arg1))
13939 && TYPE_PRECISION (TREE_TYPE (tem))
13940 < TYPE_PRECISION (type))
13941 {
13942 unsigned HOST_WIDE_INT mask_lo;
13943 HOST_WIDE_INT mask_hi;
13944 int inner_width, outer_width;
13945 tree tem_type;
13946
13947 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13948 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13949 if (outer_width > TYPE_PRECISION (type))
13950 outer_width = TYPE_PRECISION (type);
13951
13952 if (outer_width > HOST_BITS_PER_WIDE_INT)
13953 {
13954 mask_hi = ((unsigned HOST_WIDE_INT) -1
13955 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
13956 mask_lo = -1;
13957 }
13958 else
13959 {
13960 mask_hi = 0;
13961 mask_lo = ((unsigned HOST_WIDE_INT) -1
13962 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13963 }
13964 if (inner_width > HOST_BITS_PER_WIDE_INT)
13965 {
13966 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13967 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13968 mask_lo = 0;
13969 }
13970 else
13971 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13972 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13973
13974 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13975 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13976 {
13977 tem_type = signed_type_for (TREE_TYPE (tem));
13978 tem = fold_convert_loc (loc, tem_type, tem);
13979 }
13980 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13981 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13982 {
13983 tem_type = unsigned_type_for (TREE_TYPE (tem));
13984 tem = fold_convert_loc (loc, tem_type, tem);
13985 }
13986 else
13987 tem = NULL;
13988 }
13989
13990 if (tem)
13991 return
13992 fold_convert_loc (loc, type,
13993 fold_build2_loc (loc, BIT_AND_EXPR,
13994 TREE_TYPE (tem), tem,
13995 fold_convert_loc (loc,
13996 TREE_TYPE (tem),
13997 arg1)));
13998 }
13999
14000 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14001 already handled above. */
14002 if (TREE_CODE (arg0) == BIT_AND_EXPR
14003 && integer_onep (TREE_OPERAND (arg0, 1))
14004 && integer_zerop (op2)
14005 && integer_pow2p (arg1))
14006 {
14007 tree tem = TREE_OPERAND (arg0, 0);
14008 STRIP_NOPS (tem);
14009 if (TREE_CODE (tem) == RSHIFT_EXPR
14010 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14011 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14012 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14013 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14014 TREE_OPERAND (tem, 0), arg1);
14015 }
14016
14017 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14018 is probably obsolete because the first operand should be a
14019 truth value (that's why we have the two cases above), but let's
14020 leave it in until we can confirm this for all front-ends. */
14021 if (integer_zerop (op2)
14022 && TREE_CODE (arg0) == NE_EXPR
14023 && integer_zerop (TREE_OPERAND (arg0, 1))
14024 && integer_pow2p (arg1)
14025 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14026 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14027 arg1, OEP_ONLY_CONST))
14028 return pedantic_non_lvalue_loc (loc,
14029 fold_convert_loc (loc, type,
14030 TREE_OPERAND (arg0, 0)));
14031
14032 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14033 if (integer_zerop (op2)
14034 && truth_value_p (TREE_CODE (arg0))
14035 && truth_value_p (TREE_CODE (arg1)))
14036 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14037 fold_convert_loc (loc, type, arg0),
14038 arg1);
14039
14040 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14041 if (integer_onep (op2)
14042 && truth_value_p (TREE_CODE (arg0))
14043 && truth_value_p (TREE_CODE (arg1)))
14044 {
14045 location_t loc0 = expr_location_or (arg0, loc);
14046 /* Only perform transformation if ARG0 is easily inverted. */
14047 tem = fold_truth_not_expr (loc0, arg0);
14048 if (tem)
14049 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14050 fold_convert_loc (loc, type, tem),
14051 arg1);
14052 }
14053
14054 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14055 if (integer_zerop (arg1)
14056 && truth_value_p (TREE_CODE (arg0))
14057 && truth_value_p (TREE_CODE (op2)))
14058 {
14059 location_t loc0 = expr_location_or (arg0, loc);
14060 /* Only perform transformation if ARG0 is easily inverted. */
14061 tem = fold_truth_not_expr (loc0, arg0);
14062 if (tem)
14063 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14064 fold_convert_loc (loc, type, tem),
14065 op2);
14066 }
14067
14068 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14069 if (integer_onep (arg1)
14070 && truth_value_p (TREE_CODE (arg0))
14071 && truth_value_p (TREE_CODE (op2)))
14072 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14073 fold_convert_loc (loc, type, arg0),
14074 op2);
14075
14076 return NULL_TREE;
14077
14078 case VEC_COND_EXPR:
14079 if (TREE_CODE (arg0) == VECTOR_CST)
14080 {
14081 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14082 return pedantic_non_lvalue_loc (loc, op1);
14083 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14084 return pedantic_non_lvalue_loc (loc, op2);
14085 }
14086 return NULL_TREE;
14087
14088 case CALL_EXPR:
14089 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14090 of fold_ternary on them. */
14091 gcc_unreachable ();
14092
14093 case BIT_FIELD_REF:
14094 if ((TREE_CODE (arg0) == VECTOR_CST
14095 || (TREE_CODE (arg0) == CONSTRUCTOR
14096 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14097 && (type == TREE_TYPE (TREE_TYPE (arg0))
14098 || (TREE_CODE (type) == VECTOR_TYPE
14099 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14100 {
14101 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14102 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14103 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14104 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14105
14106 if (n != 0
14107 && (idx % width) == 0
14108 && (n % width) == 0
14109 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14110 {
14111 idx = idx / width;
14112 n = n / width;
14113
14114 if (TREE_CODE (arg0) == VECTOR_CST)
14115 {
14116 if (n == 1)
14117 return VECTOR_CST_ELT (arg0, idx);
14118
14119 tree *vals = XALLOCAVEC (tree, n);
14120 for (unsigned i = 0; i < n; ++i)
14121 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14122 return build_vector (type, vals);
14123 }
14124
14125 /* Constructor elements can be subvectors. */
14126 unsigned HOST_WIDE_INT k = 1;
14127 if (CONSTRUCTOR_NELTS (arg0) != 0)
14128 {
14129 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14130 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14131 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14132 }
14133
14134 /* We keep an exact subset of the constructor elements. */
14135 if ((idx % k) == 0 && (n % k) == 0)
14136 {
14137 if (CONSTRUCTOR_NELTS (arg0) == 0)
14138 return build_constructor (type, NULL);
14139 idx /= k;
14140 n /= k;
14141 if (n == 1)
14142 {
14143 if (idx < CONSTRUCTOR_NELTS (arg0))
14144 return CONSTRUCTOR_ELT (arg0, idx)->value;
14145 return build_zero_cst (type);
14146 }
14147
14148 vec<constructor_elt, va_gc> *vals;
14149 vec_alloc (vals, n);
14150 for (unsigned i = 0;
14151 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14152 ++i)
14153 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14154 CONSTRUCTOR_ELT
14155 (arg0, idx + i)->value);
14156 return build_constructor (type, vals);
14157 }
14158 /* The bitfield references a single constructor element. */
14159 else if (idx + n <= (idx / k + 1) * k)
14160 {
14161 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14162 return build_zero_cst (type);
14163 else if (n == k)
14164 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14165 else
14166 return fold_build3_loc (loc, code, type,
14167 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14168 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14169 }
14170 }
14171 }
14172
14173 /* A bit-field-ref that referenced the full argument can be stripped. */
14174 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14175 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14176 && integer_zerop (op2))
14177 return fold_convert_loc (loc, type, arg0);
14178
14179 /* On constants we can use native encode/interpret to constant
14180 fold (nearly) all BIT_FIELD_REFs. */
14181 if (CONSTANT_CLASS_P (arg0)
14182 && can_native_interpret_type_p (type)
14183 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14184 /* This limitation should not be necessary, we just need to
14185 round this up to mode size. */
14186 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14187 /* Need bit-shifting of the buffer to relax the following. */
14188 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14189 {
14190 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14191 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14192 unsigned HOST_WIDE_INT clen;
14193 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14194 /* ??? We cannot tell native_encode_expr to start at
14195 some random byte only. So limit us to a reasonable amount
14196 of work. */
14197 if (clen <= 4096)
14198 {
14199 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14200 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14201 if (len > 0
14202 && len * BITS_PER_UNIT >= bitpos + bitsize)
14203 {
14204 tree v = native_interpret_expr (type,
14205 b + bitpos / BITS_PER_UNIT,
14206 bitsize / BITS_PER_UNIT);
14207 if (v)
14208 return v;
14209 }
14210 }
14211 }
14212
14213 return NULL_TREE;
14214
14215 case FMA_EXPR:
14216 /* For integers we can decompose the FMA if possible. */
14217 if (TREE_CODE (arg0) == INTEGER_CST
14218 && TREE_CODE (arg1) == INTEGER_CST)
14219 return fold_build2_loc (loc, PLUS_EXPR, type,
14220 const_binop (MULT_EXPR, arg0, arg1), arg2);
14221 if (integer_zerop (arg2))
14222 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14223
14224 return fold_fma (loc, type, arg0, arg1, arg2);
14225
14226 case VEC_PERM_EXPR:
14227 if (TREE_CODE (arg2) == VECTOR_CST)
14228 {
14229 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14230 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14231 tree t;
14232 bool need_mask_canon = false;
14233 bool all_in_vec0 = true;
14234 bool all_in_vec1 = true;
14235 bool maybe_identity = true;
14236 bool single_arg = (op0 == op1);
14237 bool changed = false;
14238
14239 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14240 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14241 for (i = 0; i < nelts; i++)
14242 {
14243 tree val = VECTOR_CST_ELT (arg2, i);
14244 if (TREE_CODE (val) != INTEGER_CST)
14245 return NULL_TREE;
14246
14247 sel[i] = TREE_INT_CST_LOW (val) & mask;
14248 if (TREE_INT_CST_HIGH (val)
14249 || ((unsigned HOST_WIDE_INT)
14250 TREE_INT_CST_LOW (val) != sel[i]))
14251 need_mask_canon = true;
14252
14253 if (sel[i] < nelts)
14254 all_in_vec1 = false;
14255 else
14256 all_in_vec0 = false;
14257
14258 if ((sel[i] & (nelts-1)) != i)
14259 maybe_identity = false;
14260 }
14261
14262 if (maybe_identity)
14263 {
14264 if (all_in_vec0)
14265 return op0;
14266 if (all_in_vec1)
14267 return op1;
14268 }
14269
14270 if (all_in_vec0)
14271 op1 = op0;
14272 else if (all_in_vec1)
14273 {
14274 op0 = op1;
14275 for (i = 0; i < nelts; i++)
14276 sel[i] -= nelts;
14277 need_mask_canon = true;
14278 }
14279
14280 if ((TREE_CODE (op0) == VECTOR_CST
14281 || TREE_CODE (op0) == CONSTRUCTOR)
14282 && (TREE_CODE (op1) == VECTOR_CST
14283 || TREE_CODE (op1) == CONSTRUCTOR))
14284 {
14285 t = fold_vec_perm (type, op0, op1, sel);
14286 if (t != NULL_TREE)
14287 return t;
14288 }
14289
14290 if (op0 == op1 && !single_arg)
14291 changed = true;
14292
14293 if (need_mask_canon && arg2 == op2)
14294 {
14295 tree *tsel = XALLOCAVEC (tree, nelts);
14296 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14297 for (i = 0; i < nelts; i++)
14298 tsel[i] = build_int_cst (eltype, sel[i]);
14299 op2 = build_vector (TREE_TYPE (arg2), tsel);
14300 changed = true;
14301 }
14302
14303 if (changed)
14304 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14305 }
14306 return NULL_TREE;
14307
14308 default:
14309 return NULL_TREE;
14310 } /* switch (code) */
14311 }
14312
14313 /* Perform constant folding and related simplification of EXPR.
14314 The related simplifications include x*1 => x, x*0 => 0, etc.,
14315 and application of the associative law.
14316 NOP_EXPR conversions may be removed freely (as long as we
14317 are careful not to change the type of the overall expression).
14318 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14319 but we can constant-fold them if they have constant operands. */
14320
14321 #ifdef ENABLE_FOLD_CHECKING
14322 # define fold(x) fold_1 (x)
14323 static tree fold_1 (tree);
14324 static
14325 #endif
14326 tree
14327 fold (tree expr)
14328 {
14329 const tree t = expr;
14330 enum tree_code code = TREE_CODE (t);
14331 enum tree_code_class kind = TREE_CODE_CLASS (code);
14332 tree tem;
14333 location_t loc = EXPR_LOCATION (expr);
14334
14335 /* Return right away if a constant. */
14336 if (kind == tcc_constant)
14337 return t;
14338
14339 /* CALL_EXPR-like objects with variable numbers of operands are
14340 treated specially. */
14341 if (kind == tcc_vl_exp)
14342 {
14343 if (code == CALL_EXPR)
14344 {
14345 tem = fold_call_expr (loc, expr, false);
14346 return tem ? tem : expr;
14347 }
14348 return expr;
14349 }
14350
14351 if (IS_EXPR_CODE_CLASS (kind))
14352 {
14353 tree type = TREE_TYPE (t);
14354 tree op0, op1, op2;
14355
14356 switch (TREE_CODE_LENGTH (code))
14357 {
14358 case 1:
14359 op0 = TREE_OPERAND (t, 0);
14360 tem = fold_unary_loc (loc, code, type, op0);
14361 return tem ? tem : expr;
14362 case 2:
14363 op0 = TREE_OPERAND (t, 0);
14364 op1 = TREE_OPERAND (t, 1);
14365 tem = fold_binary_loc (loc, code, type, op0, op1);
14366 return tem ? tem : expr;
14367 case 3:
14368 op0 = TREE_OPERAND (t, 0);
14369 op1 = TREE_OPERAND (t, 1);
14370 op2 = TREE_OPERAND (t, 2);
14371 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14372 return tem ? tem : expr;
14373 default:
14374 break;
14375 }
14376 }
14377
14378 switch (code)
14379 {
14380 case ARRAY_REF:
14381 {
14382 tree op0 = TREE_OPERAND (t, 0);
14383 tree op1 = TREE_OPERAND (t, 1);
14384
14385 if (TREE_CODE (op1) == INTEGER_CST
14386 && TREE_CODE (op0) == CONSTRUCTOR
14387 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14388 {
14389 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14390 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14391 unsigned HOST_WIDE_INT begin = 0;
14392
14393 /* Find a matching index by means of a binary search. */
14394 while (begin != end)
14395 {
14396 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14397 tree index = (*elts)[middle].index;
14398
14399 if (TREE_CODE (index) == INTEGER_CST
14400 && tree_int_cst_lt (index, op1))
14401 begin = middle + 1;
14402 else if (TREE_CODE (index) == INTEGER_CST
14403 && tree_int_cst_lt (op1, index))
14404 end = middle;
14405 else if (TREE_CODE (index) == RANGE_EXPR
14406 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14407 begin = middle + 1;
14408 else if (TREE_CODE (index) == RANGE_EXPR
14409 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14410 end = middle;
14411 else
14412 return (*elts)[middle].value;
14413 }
14414 }
14415
14416 return t;
14417 }
14418
14419 /* Return a VECTOR_CST if possible. */
14420 case CONSTRUCTOR:
14421 {
14422 tree type = TREE_TYPE (t);
14423 if (TREE_CODE (type) != VECTOR_TYPE)
14424 return t;
14425
14426 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14427 unsigned HOST_WIDE_INT idx, pos = 0;
14428 tree value;
14429
14430 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14431 {
14432 if (!CONSTANT_CLASS_P (value))
14433 return t;
14434 if (TREE_CODE (value) == VECTOR_CST)
14435 {
14436 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14437 vec[pos++] = VECTOR_CST_ELT (value, i);
14438 }
14439 else
14440 vec[pos++] = value;
14441 }
14442 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14443 vec[pos] = build_zero_cst (TREE_TYPE (type));
14444
14445 return build_vector (type, vec);
14446 }
14447
14448 case CONST_DECL:
14449 return fold (DECL_INITIAL (t));
14450
14451 default:
14452 return t;
14453 } /* switch (code) */
14454 }
14455
14456 #ifdef ENABLE_FOLD_CHECKING
14457 #undef fold
14458
14459 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14460 hash_table <pointer_hash <tree_node> >);
14461 static void fold_check_failed (const_tree, const_tree);
14462 void print_fold_checksum (const_tree);
14463
14464 /* When --enable-checking=fold, compute a digest of expr before
14465 and after actual fold call to see if fold did not accidentally
14466 change original expr. */
14467
14468 tree
14469 fold (tree expr)
14470 {
14471 tree ret;
14472 struct md5_ctx ctx;
14473 unsigned char checksum_before[16], checksum_after[16];
14474 hash_table <pointer_hash <tree_node> > ht;
14475
14476 ht.create (32);
14477 md5_init_ctx (&ctx);
14478 fold_checksum_tree (expr, &ctx, ht);
14479 md5_finish_ctx (&ctx, checksum_before);
14480 ht.empty ();
14481
14482 ret = fold_1 (expr);
14483
14484 md5_init_ctx (&ctx);
14485 fold_checksum_tree (expr, &ctx, ht);
14486 md5_finish_ctx (&ctx, checksum_after);
14487 ht.dispose ();
14488
14489 if (memcmp (checksum_before, checksum_after, 16))
14490 fold_check_failed (expr, ret);
14491
14492 return ret;
14493 }
14494
14495 void
14496 print_fold_checksum (const_tree expr)
14497 {
14498 struct md5_ctx ctx;
14499 unsigned char checksum[16], cnt;
14500 hash_table <pointer_hash <tree_node> > ht;
14501
14502 ht.create (32);
14503 md5_init_ctx (&ctx);
14504 fold_checksum_tree (expr, &ctx, ht);
14505 md5_finish_ctx (&ctx, checksum);
14506 ht.dispose ();
14507 for (cnt = 0; cnt < 16; ++cnt)
14508 fprintf (stderr, "%02x", checksum[cnt]);
14509 putc ('\n', stderr);
14510 }
14511
14512 static void
14513 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14514 {
14515 internal_error ("fold check: original tree changed by fold");
14516 }
14517
14518 static void
14519 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14520 hash_table <pointer_hash <tree_node> > ht)
14521 {
14522 tree_node **slot;
14523 enum tree_code code;
14524 union tree_node buf;
14525 int i, len;
14526
14527 recursive_label:
14528 if (expr == NULL)
14529 return;
14530 slot = ht.find_slot (expr, INSERT);
14531 if (*slot != NULL)
14532 return;
14533 *slot = CONST_CAST_TREE (expr);
14534 code = TREE_CODE (expr);
14535 if (TREE_CODE_CLASS (code) == tcc_declaration
14536 && DECL_ASSEMBLER_NAME_SET_P (expr))
14537 {
14538 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14539 memcpy ((char *) &buf, expr, tree_size (expr));
14540 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14541 expr = (tree) &buf;
14542 }
14543 else if (TREE_CODE_CLASS (code) == tcc_type
14544 && (TYPE_POINTER_TO (expr)
14545 || TYPE_REFERENCE_TO (expr)
14546 || TYPE_CACHED_VALUES_P (expr)
14547 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14548 || TYPE_NEXT_VARIANT (expr)))
14549 {
14550 /* Allow these fields to be modified. */
14551 tree tmp;
14552 memcpy ((char *) &buf, expr, tree_size (expr));
14553 expr = tmp = (tree) &buf;
14554 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14555 TYPE_POINTER_TO (tmp) = NULL;
14556 TYPE_REFERENCE_TO (tmp) = NULL;
14557 TYPE_NEXT_VARIANT (tmp) = NULL;
14558 if (TYPE_CACHED_VALUES_P (tmp))
14559 {
14560 TYPE_CACHED_VALUES_P (tmp) = 0;
14561 TYPE_CACHED_VALUES (tmp) = NULL;
14562 }
14563 }
14564 md5_process_bytes (expr, tree_size (expr), ctx);
14565 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14566 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14567 if (TREE_CODE_CLASS (code) != tcc_type
14568 && TREE_CODE_CLASS (code) != tcc_declaration
14569 && code != TREE_LIST
14570 && code != SSA_NAME
14571 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14572 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14573 switch (TREE_CODE_CLASS (code))
14574 {
14575 case tcc_constant:
14576 switch (code)
14577 {
14578 case STRING_CST:
14579 md5_process_bytes (TREE_STRING_POINTER (expr),
14580 TREE_STRING_LENGTH (expr), ctx);
14581 break;
14582 case COMPLEX_CST:
14583 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14584 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14585 break;
14586 case VECTOR_CST:
14587 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14588 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14589 break;
14590 default:
14591 break;
14592 }
14593 break;
14594 case tcc_exceptional:
14595 switch (code)
14596 {
14597 case TREE_LIST:
14598 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14599 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14600 expr = TREE_CHAIN (expr);
14601 goto recursive_label;
14602 break;
14603 case TREE_VEC:
14604 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14605 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14606 break;
14607 default:
14608 break;
14609 }
14610 break;
14611 case tcc_expression:
14612 case tcc_reference:
14613 case tcc_comparison:
14614 case tcc_unary:
14615 case tcc_binary:
14616 case tcc_statement:
14617 case tcc_vl_exp:
14618 len = TREE_OPERAND_LENGTH (expr);
14619 for (i = 0; i < len; ++i)
14620 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14621 break;
14622 case tcc_declaration:
14623 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14624 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14625 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14626 {
14627 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14628 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14629 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14630 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14631 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14632 }
14633 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14634 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14635
14636 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14637 {
14638 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14639 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14640 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14641 }
14642 break;
14643 case tcc_type:
14644 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14645 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14646 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14647 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14648 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14649 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14650 if (INTEGRAL_TYPE_P (expr)
14651 || SCALAR_FLOAT_TYPE_P (expr))
14652 {
14653 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14654 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14655 }
14656 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14657 if (TREE_CODE (expr) == RECORD_TYPE
14658 || TREE_CODE (expr) == UNION_TYPE
14659 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14660 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14661 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14662 break;
14663 default:
14664 break;
14665 }
14666 }
14667
14668 /* Helper function for outputting the checksum of a tree T. When
14669 debugging with gdb, you can "define mynext" to be "next" followed
14670 by "call debug_fold_checksum (op0)", then just trace down till the
14671 outputs differ. */
14672
14673 DEBUG_FUNCTION void
14674 debug_fold_checksum (const_tree t)
14675 {
14676 int i;
14677 unsigned char checksum[16];
14678 struct md5_ctx ctx;
14679 hash_table <pointer_hash <tree_node> > ht;
14680 ht.create (32);
14681
14682 md5_init_ctx (&ctx);
14683 fold_checksum_tree (t, &ctx, ht);
14684 md5_finish_ctx (&ctx, checksum);
14685 ht.empty ();
14686
14687 for (i = 0; i < 16; i++)
14688 fprintf (stderr, "%d ", checksum[i]);
14689
14690 fprintf (stderr, "\n");
14691 }
14692
14693 #endif
14694
14695 /* Fold a unary tree expression with code CODE of type TYPE with an
14696 operand OP0. LOC is the location of the resulting expression.
14697 Return a folded expression if successful. Otherwise, return a tree
14698 expression with code CODE of type TYPE with an operand OP0. */
14699
14700 tree
14701 fold_build1_stat_loc (location_t loc,
14702 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14703 {
14704 tree tem;
14705 #ifdef ENABLE_FOLD_CHECKING
14706 unsigned char checksum_before[16], checksum_after[16];
14707 struct md5_ctx ctx;
14708 hash_table <pointer_hash <tree_node> > ht;
14709
14710 ht.create (32);
14711 md5_init_ctx (&ctx);
14712 fold_checksum_tree (op0, &ctx, ht);
14713 md5_finish_ctx (&ctx, checksum_before);
14714 ht.empty ();
14715 #endif
14716
14717 tem = fold_unary_loc (loc, code, type, op0);
14718 if (!tem)
14719 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14720
14721 #ifdef ENABLE_FOLD_CHECKING
14722 md5_init_ctx (&ctx);
14723 fold_checksum_tree (op0, &ctx, ht);
14724 md5_finish_ctx (&ctx, checksum_after);
14725 ht.dispose ();
14726
14727 if (memcmp (checksum_before, checksum_after, 16))
14728 fold_check_failed (op0, tem);
14729 #endif
14730 return tem;
14731 }
14732
14733 /* Fold a binary tree expression with code CODE of type TYPE with
14734 operands OP0 and OP1. LOC is the location of the resulting
14735 expression. Return a folded expression if successful. Otherwise,
14736 return a tree expression with code CODE of type TYPE with operands
14737 OP0 and OP1. */
14738
14739 tree
14740 fold_build2_stat_loc (location_t loc,
14741 enum tree_code code, tree type, tree op0, tree op1
14742 MEM_STAT_DECL)
14743 {
14744 tree tem;
14745 #ifdef ENABLE_FOLD_CHECKING
14746 unsigned char checksum_before_op0[16],
14747 checksum_before_op1[16],
14748 checksum_after_op0[16],
14749 checksum_after_op1[16];
14750 struct md5_ctx ctx;
14751 hash_table <pointer_hash <tree_node> > ht;
14752
14753 ht.create (32);
14754 md5_init_ctx (&ctx);
14755 fold_checksum_tree (op0, &ctx, ht);
14756 md5_finish_ctx (&ctx, checksum_before_op0);
14757 ht.empty ();
14758
14759 md5_init_ctx (&ctx);
14760 fold_checksum_tree (op1, &ctx, ht);
14761 md5_finish_ctx (&ctx, checksum_before_op1);
14762 ht.empty ();
14763 #endif
14764
14765 tem = fold_binary_loc (loc, code, type, op0, op1);
14766 if (!tem)
14767 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14768
14769 #ifdef ENABLE_FOLD_CHECKING
14770 md5_init_ctx (&ctx);
14771 fold_checksum_tree (op0, &ctx, ht);
14772 md5_finish_ctx (&ctx, checksum_after_op0);
14773 ht.empty ();
14774
14775 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14776 fold_check_failed (op0, tem);
14777
14778 md5_init_ctx (&ctx);
14779 fold_checksum_tree (op1, &ctx, ht);
14780 md5_finish_ctx (&ctx, checksum_after_op1);
14781 ht.dispose ();
14782
14783 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14784 fold_check_failed (op1, tem);
14785 #endif
14786 return tem;
14787 }
14788
14789 /* Fold a ternary tree expression with code CODE of type TYPE with
14790 operands OP0, OP1, and OP2. Return a folded expression if
14791 successful. Otherwise, return a tree expression with code CODE of
14792 type TYPE with operands OP0, OP1, and OP2. */
14793
14794 tree
14795 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14796 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14797 {
14798 tree tem;
14799 #ifdef ENABLE_FOLD_CHECKING
14800 unsigned char checksum_before_op0[16],
14801 checksum_before_op1[16],
14802 checksum_before_op2[16],
14803 checksum_after_op0[16],
14804 checksum_after_op1[16],
14805 checksum_after_op2[16];
14806 struct md5_ctx ctx;
14807 hash_table <pointer_hash <tree_node> > ht;
14808
14809 ht.create (32);
14810 md5_init_ctx (&ctx);
14811 fold_checksum_tree (op0, &ctx, ht);
14812 md5_finish_ctx (&ctx, checksum_before_op0);
14813 ht.empty ();
14814
14815 md5_init_ctx (&ctx);
14816 fold_checksum_tree (op1, &ctx, ht);
14817 md5_finish_ctx (&ctx, checksum_before_op1);
14818 ht.empty ();
14819
14820 md5_init_ctx (&ctx);
14821 fold_checksum_tree (op2, &ctx, ht);
14822 md5_finish_ctx (&ctx, checksum_before_op2);
14823 ht.empty ();
14824 #endif
14825
14826 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14827 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14828 if (!tem)
14829 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14830
14831 #ifdef ENABLE_FOLD_CHECKING
14832 md5_init_ctx (&ctx);
14833 fold_checksum_tree (op0, &ctx, ht);
14834 md5_finish_ctx (&ctx, checksum_after_op0);
14835 ht.empty ();
14836
14837 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14838 fold_check_failed (op0, tem);
14839
14840 md5_init_ctx (&ctx);
14841 fold_checksum_tree (op1, &ctx, ht);
14842 md5_finish_ctx (&ctx, checksum_after_op1);
14843 ht.empty ();
14844
14845 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14846 fold_check_failed (op1, tem);
14847
14848 md5_init_ctx (&ctx);
14849 fold_checksum_tree (op2, &ctx, ht);
14850 md5_finish_ctx (&ctx, checksum_after_op2);
14851 ht.dispose ();
14852
14853 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14854 fold_check_failed (op2, tem);
14855 #endif
14856 return tem;
14857 }
14858
14859 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14860 arguments in ARGARRAY, and a null static chain.
14861 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14862 of type TYPE from the given operands as constructed by build_call_array. */
14863
14864 tree
14865 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14866 int nargs, tree *argarray)
14867 {
14868 tree tem;
14869 #ifdef ENABLE_FOLD_CHECKING
14870 unsigned char checksum_before_fn[16],
14871 checksum_before_arglist[16],
14872 checksum_after_fn[16],
14873 checksum_after_arglist[16];
14874 struct md5_ctx ctx;
14875 hash_table <pointer_hash <tree_node> > ht;
14876 int i;
14877
14878 ht.create (32);
14879 md5_init_ctx (&ctx);
14880 fold_checksum_tree (fn, &ctx, ht);
14881 md5_finish_ctx (&ctx, checksum_before_fn);
14882 ht.empty ();
14883
14884 md5_init_ctx (&ctx);
14885 for (i = 0; i < nargs; i++)
14886 fold_checksum_tree (argarray[i], &ctx, ht);
14887 md5_finish_ctx (&ctx, checksum_before_arglist);
14888 ht.empty ();
14889 #endif
14890
14891 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14892
14893 #ifdef ENABLE_FOLD_CHECKING
14894 md5_init_ctx (&ctx);
14895 fold_checksum_tree (fn, &ctx, ht);
14896 md5_finish_ctx (&ctx, checksum_after_fn);
14897 ht.empty ();
14898
14899 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14900 fold_check_failed (fn, tem);
14901
14902 md5_init_ctx (&ctx);
14903 for (i = 0; i < nargs; i++)
14904 fold_checksum_tree (argarray[i], &ctx, ht);
14905 md5_finish_ctx (&ctx, checksum_after_arglist);
14906 ht.dispose ();
14907
14908 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14909 fold_check_failed (NULL_TREE, tem);
14910 #endif
14911 return tem;
14912 }
14913
14914 /* Perform constant folding and related simplification of initializer
14915 expression EXPR. These behave identically to "fold_buildN" but ignore
14916 potential run-time traps and exceptions that fold must preserve. */
14917
14918 #define START_FOLD_INIT \
14919 int saved_signaling_nans = flag_signaling_nans;\
14920 int saved_trapping_math = flag_trapping_math;\
14921 int saved_rounding_math = flag_rounding_math;\
14922 int saved_trapv = flag_trapv;\
14923 int saved_folding_initializer = folding_initializer;\
14924 flag_signaling_nans = 0;\
14925 flag_trapping_math = 0;\
14926 flag_rounding_math = 0;\
14927 flag_trapv = 0;\
14928 folding_initializer = 1;
14929
14930 #define END_FOLD_INIT \
14931 flag_signaling_nans = saved_signaling_nans;\
14932 flag_trapping_math = saved_trapping_math;\
14933 flag_rounding_math = saved_rounding_math;\
14934 flag_trapv = saved_trapv;\
14935 folding_initializer = saved_folding_initializer;
14936
14937 tree
14938 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14939 tree type, tree op)
14940 {
14941 tree result;
14942 START_FOLD_INIT;
14943
14944 result = fold_build1_loc (loc, code, type, op);
14945
14946 END_FOLD_INIT;
14947 return result;
14948 }
14949
14950 tree
14951 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14952 tree type, tree op0, tree op1)
14953 {
14954 tree result;
14955 START_FOLD_INIT;
14956
14957 result = fold_build2_loc (loc, code, type, op0, op1);
14958
14959 END_FOLD_INIT;
14960 return result;
14961 }
14962
14963 tree
14964 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14965 tree type, tree op0, tree op1, tree op2)
14966 {
14967 tree result;
14968 START_FOLD_INIT;
14969
14970 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14971
14972 END_FOLD_INIT;
14973 return result;
14974 }
14975
14976 tree
14977 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14978 int nargs, tree *argarray)
14979 {
14980 tree result;
14981 START_FOLD_INIT;
14982
14983 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14984
14985 END_FOLD_INIT;
14986 return result;
14987 }
14988
14989 #undef START_FOLD_INIT
14990 #undef END_FOLD_INIT
14991
14992 /* Determine if first argument is a multiple of second argument. Return 0 if
14993 it is not, or we cannot easily determined it to be.
14994
14995 An example of the sort of thing we care about (at this point; this routine
14996 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14997 fold cases do now) is discovering that
14998
14999 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15000
15001 is a multiple of
15002
15003 SAVE_EXPR (J * 8)
15004
15005 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15006
15007 This code also handles discovering that
15008
15009 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15010
15011 is a multiple of 8 so we don't have to worry about dealing with a
15012 possible remainder.
15013
15014 Note that we *look* inside a SAVE_EXPR only to determine how it was
15015 calculated; it is not safe for fold to do much of anything else with the
15016 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15017 at run time. For example, the latter example above *cannot* be implemented
15018 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15019 evaluation time of the original SAVE_EXPR is not necessarily the same at
15020 the time the new expression is evaluated. The only optimization of this
15021 sort that would be valid is changing
15022
15023 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15024
15025 divided by 8 to
15026
15027 SAVE_EXPR (I) * SAVE_EXPR (J)
15028
15029 (where the same SAVE_EXPR (J) is used in the original and the
15030 transformed version). */
15031
15032 int
15033 multiple_of_p (tree type, const_tree top, const_tree bottom)
15034 {
15035 if (operand_equal_p (top, bottom, 0))
15036 return 1;
15037
15038 if (TREE_CODE (type) != INTEGER_TYPE)
15039 return 0;
15040
15041 switch (TREE_CODE (top))
15042 {
15043 case BIT_AND_EXPR:
15044 /* Bitwise and provides a power of two multiple. If the mask is
15045 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15046 if (!integer_pow2p (bottom))
15047 return 0;
15048 /* FALLTHRU */
15049
15050 case MULT_EXPR:
15051 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15052 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15053
15054 case PLUS_EXPR:
15055 case MINUS_EXPR:
15056 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15057 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15058
15059 case LSHIFT_EXPR:
15060 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15061 {
15062 tree op1, t1;
15063
15064 op1 = TREE_OPERAND (top, 1);
15065 /* const_binop may not detect overflow correctly,
15066 so check for it explicitly here. */
15067 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15068 > TREE_INT_CST_LOW (op1)
15069 && TREE_INT_CST_HIGH (op1) == 0
15070 && 0 != (t1 = fold_convert (type,
15071 const_binop (LSHIFT_EXPR,
15072 size_one_node,
15073 op1)))
15074 && !TREE_OVERFLOW (t1))
15075 return multiple_of_p (type, t1, bottom);
15076 }
15077 return 0;
15078
15079 case NOP_EXPR:
15080 /* Can't handle conversions from non-integral or wider integral type. */
15081 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15082 || (TYPE_PRECISION (type)
15083 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15084 return 0;
15085
15086 /* .. fall through ... */
15087
15088 case SAVE_EXPR:
15089 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15090
15091 case COND_EXPR:
15092 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15093 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15094
15095 case INTEGER_CST:
15096 if (TREE_CODE (bottom) != INTEGER_CST
15097 || integer_zerop (bottom)
15098 || (TYPE_UNSIGNED (type)
15099 && (tree_int_cst_sgn (top) < 0
15100 || tree_int_cst_sgn (bottom) < 0)))
15101 return 0;
15102 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15103 top, bottom));
15104
15105 default:
15106 return 0;
15107 }
15108 }
15109
15110 /* Return true if CODE or TYPE is known to be non-negative. */
15111
15112 static bool
15113 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15114 {
15115 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15116 && truth_value_p (code))
15117 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15118 have a signed:1 type (where the value is -1 and 0). */
15119 return true;
15120 return false;
15121 }
15122
15123 /* Return true if (CODE OP0) is known to be non-negative. If the return
15124 value is based on the assumption that signed overflow is undefined,
15125 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15126 *STRICT_OVERFLOW_P. */
15127
15128 bool
15129 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15130 bool *strict_overflow_p)
15131 {
15132 if (TYPE_UNSIGNED (type))
15133 return true;
15134
15135 switch (code)
15136 {
15137 case ABS_EXPR:
15138 /* We can't return 1 if flag_wrapv is set because
15139 ABS_EXPR<INT_MIN> = INT_MIN. */
15140 if (!INTEGRAL_TYPE_P (type))
15141 return true;
15142 if (TYPE_OVERFLOW_UNDEFINED (type))
15143 {
15144 *strict_overflow_p = true;
15145 return true;
15146 }
15147 break;
15148
15149 case NON_LVALUE_EXPR:
15150 case FLOAT_EXPR:
15151 case FIX_TRUNC_EXPR:
15152 return tree_expr_nonnegative_warnv_p (op0,
15153 strict_overflow_p);
15154
15155 case NOP_EXPR:
15156 {
15157 tree inner_type = TREE_TYPE (op0);
15158 tree outer_type = type;
15159
15160 if (TREE_CODE (outer_type) == REAL_TYPE)
15161 {
15162 if (TREE_CODE (inner_type) == REAL_TYPE)
15163 return tree_expr_nonnegative_warnv_p (op0,
15164 strict_overflow_p);
15165 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15166 {
15167 if (TYPE_UNSIGNED (inner_type))
15168 return true;
15169 return tree_expr_nonnegative_warnv_p (op0,
15170 strict_overflow_p);
15171 }
15172 }
15173 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15174 {
15175 if (TREE_CODE (inner_type) == REAL_TYPE)
15176 return tree_expr_nonnegative_warnv_p (op0,
15177 strict_overflow_p);
15178 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15179 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15180 && TYPE_UNSIGNED (inner_type);
15181 }
15182 }
15183 break;
15184
15185 default:
15186 return tree_simple_nonnegative_warnv_p (code, type);
15187 }
15188
15189 /* We don't know sign of `t', so be conservative and return false. */
15190 return false;
15191 }
15192
15193 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15194 value is based on the assumption that signed overflow is undefined,
15195 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15196 *STRICT_OVERFLOW_P. */
15197
15198 bool
15199 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15200 tree op1, bool *strict_overflow_p)
15201 {
15202 if (TYPE_UNSIGNED (type))
15203 return true;
15204
15205 switch (code)
15206 {
15207 case POINTER_PLUS_EXPR:
15208 case PLUS_EXPR:
15209 if (FLOAT_TYPE_P (type))
15210 return (tree_expr_nonnegative_warnv_p (op0,
15211 strict_overflow_p)
15212 && tree_expr_nonnegative_warnv_p (op1,
15213 strict_overflow_p));
15214
15215 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15216 both unsigned and at least 2 bits shorter than the result. */
15217 if (TREE_CODE (type) == INTEGER_TYPE
15218 && TREE_CODE (op0) == NOP_EXPR
15219 && TREE_CODE (op1) == NOP_EXPR)
15220 {
15221 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15222 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15223 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15224 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15225 {
15226 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15227 TYPE_PRECISION (inner2)) + 1;
15228 return prec < TYPE_PRECISION (type);
15229 }
15230 }
15231 break;
15232
15233 case MULT_EXPR:
15234 if (FLOAT_TYPE_P (type))
15235 {
15236 /* x * x for floating point x is always non-negative. */
15237 if (operand_equal_p (op0, op1, 0))
15238 return true;
15239 return (tree_expr_nonnegative_warnv_p (op0,
15240 strict_overflow_p)
15241 && tree_expr_nonnegative_warnv_p (op1,
15242 strict_overflow_p));
15243 }
15244
15245 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15246 both unsigned and their total bits is shorter than the result. */
15247 if (TREE_CODE (type) == INTEGER_TYPE
15248 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15249 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15250 {
15251 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15252 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15253 : TREE_TYPE (op0);
15254 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15255 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15256 : TREE_TYPE (op1);
15257
15258 bool unsigned0 = TYPE_UNSIGNED (inner0);
15259 bool unsigned1 = TYPE_UNSIGNED (inner1);
15260
15261 if (TREE_CODE (op0) == INTEGER_CST)
15262 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15263
15264 if (TREE_CODE (op1) == INTEGER_CST)
15265 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15266
15267 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15268 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15269 {
15270 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15271 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15272 : TYPE_PRECISION (inner0);
15273
15274 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15275 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15276 : TYPE_PRECISION (inner1);
15277
15278 return precision0 + precision1 < TYPE_PRECISION (type);
15279 }
15280 }
15281 return false;
15282
15283 case BIT_AND_EXPR:
15284 case MAX_EXPR:
15285 return (tree_expr_nonnegative_warnv_p (op0,
15286 strict_overflow_p)
15287 || tree_expr_nonnegative_warnv_p (op1,
15288 strict_overflow_p));
15289
15290 case BIT_IOR_EXPR:
15291 case BIT_XOR_EXPR:
15292 case MIN_EXPR:
15293 case RDIV_EXPR:
15294 case TRUNC_DIV_EXPR:
15295 case CEIL_DIV_EXPR:
15296 case FLOOR_DIV_EXPR:
15297 case ROUND_DIV_EXPR:
15298 return (tree_expr_nonnegative_warnv_p (op0,
15299 strict_overflow_p)
15300 && tree_expr_nonnegative_warnv_p (op1,
15301 strict_overflow_p));
15302
15303 case TRUNC_MOD_EXPR:
15304 case CEIL_MOD_EXPR:
15305 case FLOOR_MOD_EXPR:
15306 case ROUND_MOD_EXPR:
15307 return tree_expr_nonnegative_warnv_p (op0,
15308 strict_overflow_p);
15309 default:
15310 return tree_simple_nonnegative_warnv_p (code, type);
15311 }
15312
15313 /* We don't know sign of `t', so be conservative and return false. */
15314 return false;
15315 }
15316
15317 /* Return true if T is known to be non-negative. If the return
15318 value is based on the assumption that signed overflow is undefined,
15319 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15320 *STRICT_OVERFLOW_P. */
15321
15322 bool
15323 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15324 {
15325 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15326 return true;
15327
15328 switch (TREE_CODE (t))
15329 {
15330 case INTEGER_CST:
15331 return tree_int_cst_sgn (t) >= 0;
15332
15333 case REAL_CST:
15334 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15335
15336 case FIXED_CST:
15337 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15338
15339 case COND_EXPR:
15340 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15341 strict_overflow_p)
15342 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15343 strict_overflow_p));
15344 default:
15345 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15346 TREE_TYPE (t));
15347 }
15348 /* We don't know sign of `t', so be conservative and return false. */
15349 return false;
15350 }
15351
15352 /* Return true if T is known to be non-negative. If the return
15353 value is based on the assumption that signed overflow is undefined,
15354 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15355 *STRICT_OVERFLOW_P. */
15356
15357 bool
15358 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15359 tree arg0, tree arg1, bool *strict_overflow_p)
15360 {
15361 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15362 switch (DECL_FUNCTION_CODE (fndecl))
15363 {
15364 CASE_FLT_FN (BUILT_IN_ACOS):
15365 CASE_FLT_FN (BUILT_IN_ACOSH):
15366 CASE_FLT_FN (BUILT_IN_CABS):
15367 CASE_FLT_FN (BUILT_IN_COSH):
15368 CASE_FLT_FN (BUILT_IN_ERFC):
15369 CASE_FLT_FN (BUILT_IN_EXP):
15370 CASE_FLT_FN (BUILT_IN_EXP10):
15371 CASE_FLT_FN (BUILT_IN_EXP2):
15372 CASE_FLT_FN (BUILT_IN_FABS):
15373 CASE_FLT_FN (BUILT_IN_FDIM):
15374 CASE_FLT_FN (BUILT_IN_HYPOT):
15375 CASE_FLT_FN (BUILT_IN_POW10):
15376 CASE_INT_FN (BUILT_IN_FFS):
15377 CASE_INT_FN (BUILT_IN_PARITY):
15378 CASE_INT_FN (BUILT_IN_POPCOUNT):
15379 case BUILT_IN_BSWAP32:
15380 case BUILT_IN_BSWAP64:
15381 /* Always true. */
15382 return true;
15383
15384 CASE_FLT_FN (BUILT_IN_SQRT):
15385 /* sqrt(-0.0) is -0.0. */
15386 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15387 return true;
15388 return tree_expr_nonnegative_warnv_p (arg0,
15389 strict_overflow_p);
15390
15391 CASE_FLT_FN (BUILT_IN_ASINH):
15392 CASE_FLT_FN (BUILT_IN_ATAN):
15393 CASE_FLT_FN (BUILT_IN_ATANH):
15394 CASE_FLT_FN (BUILT_IN_CBRT):
15395 CASE_FLT_FN (BUILT_IN_CEIL):
15396 CASE_FLT_FN (BUILT_IN_ERF):
15397 CASE_FLT_FN (BUILT_IN_EXPM1):
15398 CASE_FLT_FN (BUILT_IN_FLOOR):
15399 CASE_FLT_FN (BUILT_IN_FMOD):
15400 CASE_FLT_FN (BUILT_IN_FREXP):
15401 CASE_FLT_FN (BUILT_IN_ICEIL):
15402 CASE_FLT_FN (BUILT_IN_IFLOOR):
15403 CASE_FLT_FN (BUILT_IN_IRINT):
15404 CASE_FLT_FN (BUILT_IN_IROUND):
15405 CASE_FLT_FN (BUILT_IN_LCEIL):
15406 CASE_FLT_FN (BUILT_IN_LDEXP):
15407 CASE_FLT_FN (BUILT_IN_LFLOOR):
15408 CASE_FLT_FN (BUILT_IN_LLCEIL):
15409 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15410 CASE_FLT_FN (BUILT_IN_LLRINT):
15411 CASE_FLT_FN (BUILT_IN_LLROUND):
15412 CASE_FLT_FN (BUILT_IN_LRINT):
15413 CASE_FLT_FN (BUILT_IN_LROUND):
15414 CASE_FLT_FN (BUILT_IN_MODF):
15415 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15416 CASE_FLT_FN (BUILT_IN_RINT):
15417 CASE_FLT_FN (BUILT_IN_ROUND):
15418 CASE_FLT_FN (BUILT_IN_SCALB):
15419 CASE_FLT_FN (BUILT_IN_SCALBLN):
15420 CASE_FLT_FN (BUILT_IN_SCALBN):
15421 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15422 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15423 CASE_FLT_FN (BUILT_IN_SINH):
15424 CASE_FLT_FN (BUILT_IN_TANH):
15425 CASE_FLT_FN (BUILT_IN_TRUNC):
15426 /* True if the 1st argument is nonnegative. */
15427 return tree_expr_nonnegative_warnv_p (arg0,
15428 strict_overflow_p);
15429
15430 CASE_FLT_FN (BUILT_IN_FMAX):
15431 /* True if the 1st OR 2nd arguments are nonnegative. */
15432 return (tree_expr_nonnegative_warnv_p (arg0,
15433 strict_overflow_p)
15434 || (tree_expr_nonnegative_warnv_p (arg1,
15435 strict_overflow_p)));
15436
15437 CASE_FLT_FN (BUILT_IN_FMIN):
15438 /* True if the 1st AND 2nd arguments are nonnegative. */
15439 return (tree_expr_nonnegative_warnv_p (arg0,
15440 strict_overflow_p)
15441 && (tree_expr_nonnegative_warnv_p (arg1,
15442 strict_overflow_p)));
15443
15444 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15445 /* True if the 2nd argument is nonnegative. */
15446 return tree_expr_nonnegative_warnv_p (arg1,
15447 strict_overflow_p);
15448
15449 CASE_FLT_FN (BUILT_IN_POWI):
15450 /* True if the 1st argument is nonnegative or the second
15451 argument is an even integer. */
15452 if (TREE_CODE (arg1) == INTEGER_CST
15453 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15454 return true;
15455 return tree_expr_nonnegative_warnv_p (arg0,
15456 strict_overflow_p);
15457
15458 CASE_FLT_FN (BUILT_IN_POW):
15459 /* True if the 1st argument is nonnegative or the second
15460 argument is an even integer valued real. */
15461 if (TREE_CODE (arg1) == REAL_CST)
15462 {
15463 REAL_VALUE_TYPE c;
15464 HOST_WIDE_INT n;
15465
15466 c = TREE_REAL_CST (arg1);
15467 n = real_to_integer (&c);
15468 if ((n & 1) == 0)
15469 {
15470 REAL_VALUE_TYPE cint;
15471 real_from_integer (&cint, VOIDmode, n,
15472 n < 0 ? -1 : 0, 0);
15473 if (real_identical (&c, &cint))
15474 return true;
15475 }
15476 }
15477 return tree_expr_nonnegative_warnv_p (arg0,
15478 strict_overflow_p);
15479
15480 default:
15481 break;
15482 }
15483 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15484 type);
15485 }
15486
15487 /* Return true if T is known to be non-negative. If the return
15488 value is based on the assumption that signed overflow is undefined,
15489 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15490 *STRICT_OVERFLOW_P. */
15491
15492 bool
15493 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15494 {
15495 enum tree_code code = TREE_CODE (t);
15496 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15497 return true;
15498
15499 switch (code)
15500 {
15501 case TARGET_EXPR:
15502 {
15503 tree temp = TARGET_EXPR_SLOT (t);
15504 t = TARGET_EXPR_INITIAL (t);
15505
15506 /* If the initializer is non-void, then it's a normal expression
15507 that will be assigned to the slot. */
15508 if (!VOID_TYPE_P (t))
15509 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15510
15511 /* Otherwise, the initializer sets the slot in some way. One common
15512 way is an assignment statement at the end of the initializer. */
15513 while (1)
15514 {
15515 if (TREE_CODE (t) == BIND_EXPR)
15516 t = expr_last (BIND_EXPR_BODY (t));
15517 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15518 || TREE_CODE (t) == TRY_CATCH_EXPR)
15519 t = expr_last (TREE_OPERAND (t, 0));
15520 else if (TREE_CODE (t) == STATEMENT_LIST)
15521 t = expr_last (t);
15522 else
15523 break;
15524 }
15525 if (TREE_CODE (t) == MODIFY_EXPR
15526 && TREE_OPERAND (t, 0) == temp)
15527 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15528 strict_overflow_p);
15529
15530 return false;
15531 }
15532
15533 case CALL_EXPR:
15534 {
15535 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15536 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15537
15538 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15539 get_callee_fndecl (t),
15540 arg0,
15541 arg1,
15542 strict_overflow_p);
15543 }
15544 case COMPOUND_EXPR:
15545 case MODIFY_EXPR:
15546 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15547 strict_overflow_p);
15548 case BIND_EXPR:
15549 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15550 strict_overflow_p);
15551 case SAVE_EXPR:
15552 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15553 strict_overflow_p);
15554
15555 default:
15556 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15557 TREE_TYPE (t));
15558 }
15559
15560 /* We don't know sign of `t', so be conservative and return false. */
15561 return false;
15562 }
15563
15564 /* Return true if T is known to be non-negative. If the return
15565 value is based on the assumption that signed overflow is undefined,
15566 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15567 *STRICT_OVERFLOW_P. */
15568
15569 bool
15570 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15571 {
15572 enum tree_code code;
15573 if (t == error_mark_node)
15574 return false;
15575
15576 code = TREE_CODE (t);
15577 switch (TREE_CODE_CLASS (code))
15578 {
15579 case tcc_binary:
15580 case tcc_comparison:
15581 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15582 TREE_TYPE (t),
15583 TREE_OPERAND (t, 0),
15584 TREE_OPERAND (t, 1),
15585 strict_overflow_p);
15586
15587 case tcc_unary:
15588 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15589 TREE_TYPE (t),
15590 TREE_OPERAND (t, 0),
15591 strict_overflow_p);
15592
15593 case tcc_constant:
15594 case tcc_declaration:
15595 case tcc_reference:
15596 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15597
15598 default:
15599 break;
15600 }
15601
15602 switch (code)
15603 {
15604 case TRUTH_AND_EXPR:
15605 case TRUTH_OR_EXPR:
15606 case TRUTH_XOR_EXPR:
15607 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15608 TREE_TYPE (t),
15609 TREE_OPERAND (t, 0),
15610 TREE_OPERAND (t, 1),
15611 strict_overflow_p);
15612 case TRUTH_NOT_EXPR:
15613 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15614 TREE_TYPE (t),
15615 TREE_OPERAND (t, 0),
15616 strict_overflow_p);
15617
15618 case COND_EXPR:
15619 case CONSTRUCTOR:
15620 case OBJ_TYPE_REF:
15621 case ASSERT_EXPR:
15622 case ADDR_EXPR:
15623 case WITH_SIZE_EXPR:
15624 case SSA_NAME:
15625 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15626
15627 default:
15628 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15629 }
15630 }
15631
15632 /* Return true if `t' is known to be non-negative. Handle warnings
15633 about undefined signed overflow. */
15634
15635 bool
15636 tree_expr_nonnegative_p (tree t)
15637 {
15638 bool ret, strict_overflow_p;
15639
15640 strict_overflow_p = false;
15641 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15642 if (strict_overflow_p)
15643 fold_overflow_warning (("assuming signed overflow does not occur when "
15644 "determining that expression is always "
15645 "non-negative"),
15646 WARN_STRICT_OVERFLOW_MISC);
15647 return ret;
15648 }
15649
15650
15651 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15652 For floating point we further ensure that T is not denormal.
15653 Similar logic is present in nonzero_address in rtlanal.h.
15654
15655 If the return value is based on the assumption that signed overflow
15656 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15657 change *STRICT_OVERFLOW_P. */
15658
15659 bool
15660 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15661 bool *strict_overflow_p)
15662 {
15663 switch (code)
15664 {
15665 case ABS_EXPR:
15666 return tree_expr_nonzero_warnv_p (op0,
15667 strict_overflow_p);
15668
15669 case NOP_EXPR:
15670 {
15671 tree inner_type = TREE_TYPE (op0);
15672 tree outer_type = type;
15673
15674 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15675 && tree_expr_nonzero_warnv_p (op0,
15676 strict_overflow_p));
15677 }
15678 break;
15679
15680 case NON_LVALUE_EXPR:
15681 return tree_expr_nonzero_warnv_p (op0,
15682 strict_overflow_p);
15683
15684 default:
15685 break;
15686 }
15687
15688 return false;
15689 }
15690
15691 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15692 For floating point we further ensure that T is not denormal.
15693 Similar logic is present in nonzero_address in rtlanal.h.
15694
15695 If the return value is based on the assumption that signed overflow
15696 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15697 change *STRICT_OVERFLOW_P. */
15698
15699 bool
15700 tree_binary_nonzero_warnv_p (enum tree_code code,
15701 tree type,
15702 tree op0,
15703 tree op1, bool *strict_overflow_p)
15704 {
15705 bool sub_strict_overflow_p;
15706 switch (code)
15707 {
15708 case POINTER_PLUS_EXPR:
15709 case PLUS_EXPR:
15710 if (TYPE_OVERFLOW_UNDEFINED (type))
15711 {
15712 /* With the presence of negative values it is hard
15713 to say something. */
15714 sub_strict_overflow_p = false;
15715 if (!tree_expr_nonnegative_warnv_p (op0,
15716 &sub_strict_overflow_p)
15717 || !tree_expr_nonnegative_warnv_p (op1,
15718 &sub_strict_overflow_p))
15719 return false;
15720 /* One of operands must be positive and the other non-negative. */
15721 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15722 overflows, on a twos-complement machine the sum of two
15723 nonnegative numbers can never be zero. */
15724 return (tree_expr_nonzero_warnv_p (op0,
15725 strict_overflow_p)
15726 || tree_expr_nonzero_warnv_p (op1,
15727 strict_overflow_p));
15728 }
15729 break;
15730
15731 case MULT_EXPR:
15732 if (TYPE_OVERFLOW_UNDEFINED (type))
15733 {
15734 if (tree_expr_nonzero_warnv_p (op0,
15735 strict_overflow_p)
15736 && tree_expr_nonzero_warnv_p (op1,
15737 strict_overflow_p))
15738 {
15739 *strict_overflow_p = true;
15740 return true;
15741 }
15742 }
15743 break;
15744
15745 case MIN_EXPR:
15746 sub_strict_overflow_p = false;
15747 if (tree_expr_nonzero_warnv_p (op0,
15748 &sub_strict_overflow_p)
15749 && tree_expr_nonzero_warnv_p (op1,
15750 &sub_strict_overflow_p))
15751 {
15752 if (sub_strict_overflow_p)
15753 *strict_overflow_p = true;
15754 }
15755 break;
15756
15757 case MAX_EXPR:
15758 sub_strict_overflow_p = false;
15759 if (tree_expr_nonzero_warnv_p (op0,
15760 &sub_strict_overflow_p))
15761 {
15762 if (sub_strict_overflow_p)
15763 *strict_overflow_p = true;
15764
15765 /* When both operands are nonzero, then MAX must be too. */
15766 if (tree_expr_nonzero_warnv_p (op1,
15767 strict_overflow_p))
15768 return true;
15769
15770 /* MAX where operand 0 is positive is positive. */
15771 return tree_expr_nonnegative_warnv_p (op0,
15772 strict_overflow_p);
15773 }
15774 /* MAX where operand 1 is positive is positive. */
15775 else if (tree_expr_nonzero_warnv_p (op1,
15776 &sub_strict_overflow_p)
15777 && tree_expr_nonnegative_warnv_p (op1,
15778 &sub_strict_overflow_p))
15779 {
15780 if (sub_strict_overflow_p)
15781 *strict_overflow_p = true;
15782 return true;
15783 }
15784 break;
15785
15786 case BIT_IOR_EXPR:
15787 return (tree_expr_nonzero_warnv_p (op1,
15788 strict_overflow_p)
15789 || tree_expr_nonzero_warnv_p (op0,
15790 strict_overflow_p));
15791
15792 default:
15793 break;
15794 }
15795
15796 return false;
15797 }
15798
15799 /* Return true when T is an address and is known to be nonzero.
15800 For floating point we further ensure that T is not denormal.
15801 Similar logic is present in nonzero_address in rtlanal.h.
15802
15803 If the return value is based on the assumption that signed overflow
15804 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15805 change *STRICT_OVERFLOW_P. */
15806
15807 bool
15808 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15809 {
15810 bool sub_strict_overflow_p;
15811 switch (TREE_CODE (t))
15812 {
15813 case INTEGER_CST:
15814 return !integer_zerop (t);
15815
15816 case ADDR_EXPR:
15817 {
15818 tree base = TREE_OPERAND (t, 0);
15819 if (!DECL_P (base))
15820 base = get_base_address (base);
15821
15822 if (!base)
15823 return false;
15824
15825 /* Weak declarations may link to NULL. Other things may also be NULL
15826 so protect with -fdelete-null-pointer-checks; but not variables
15827 allocated on the stack. */
15828 if (DECL_P (base)
15829 && (flag_delete_null_pointer_checks
15830 || (DECL_CONTEXT (base)
15831 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15832 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15833 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15834
15835 /* Constants are never weak. */
15836 if (CONSTANT_CLASS_P (base))
15837 return true;
15838
15839 return false;
15840 }
15841
15842 case COND_EXPR:
15843 sub_strict_overflow_p = false;
15844 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15845 &sub_strict_overflow_p)
15846 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15847 &sub_strict_overflow_p))
15848 {
15849 if (sub_strict_overflow_p)
15850 *strict_overflow_p = true;
15851 return true;
15852 }
15853 break;
15854
15855 default:
15856 break;
15857 }
15858 return false;
15859 }
15860
15861 /* Return true when T is an address and is known to be nonzero.
15862 For floating point we further ensure that T is not denormal.
15863 Similar logic is present in nonzero_address in rtlanal.h.
15864
15865 If the return value is based on the assumption that signed overflow
15866 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15867 change *STRICT_OVERFLOW_P. */
15868
15869 bool
15870 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15871 {
15872 tree type = TREE_TYPE (t);
15873 enum tree_code code;
15874
15875 /* Doing something useful for floating point would need more work. */
15876 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15877 return false;
15878
15879 code = TREE_CODE (t);
15880 switch (TREE_CODE_CLASS (code))
15881 {
15882 case tcc_unary:
15883 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15884 strict_overflow_p);
15885 case tcc_binary:
15886 case tcc_comparison:
15887 return tree_binary_nonzero_warnv_p (code, type,
15888 TREE_OPERAND (t, 0),
15889 TREE_OPERAND (t, 1),
15890 strict_overflow_p);
15891 case tcc_constant:
15892 case tcc_declaration:
15893 case tcc_reference:
15894 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15895
15896 default:
15897 break;
15898 }
15899
15900 switch (code)
15901 {
15902 case TRUTH_NOT_EXPR:
15903 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15904 strict_overflow_p);
15905
15906 case TRUTH_AND_EXPR:
15907 case TRUTH_OR_EXPR:
15908 case TRUTH_XOR_EXPR:
15909 return tree_binary_nonzero_warnv_p (code, type,
15910 TREE_OPERAND (t, 0),
15911 TREE_OPERAND (t, 1),
15912 strict_overflow_p);
15913
15914 case COND_EXPR:
15915 case CONSTRUCTOR:
15916 case OBJ_TYPE_REF:
15917 case ASSERT_EXPR:
15918 case ADDR_EXPR:
15919 case WITH_SIZE_EXPR:
15920 case SSA_NAME:
15921 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15922
15923 case COMPOUND_EXPR:
15924 case MODIFY_EXPR:
15925 case BIND_EXPR:
15926 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15927 strict_overflow_p);
15928
15929 case SAVE_EXPR:
15930 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15931 strict_overflow_p);
15932
15933 case CALL_EXPR:
15934 return alloca_call_p (t);
15935
15936 default:
15937 break;
15938 }
15939 return false;
15940 }
15941
15942 /* Return true when T is an address and is known to be nonzero.
15943 Handle warnings about undefined signed overflow. */
15944
15945 bool
15946 tree_expr_nonzero_p (tree t)
15947 {
15948 bool ret, strict_overflow_p;
15949
15950 strict_overflow_p = false;
15951 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15952 if (strict_overflow_p)
15953 fold_overflow_warning (("assuming signed overflow does not occur when "
15954 "determining that expression is always "
15955 "non-zero"),
15956 WARN_STRICT_OVERFLOW_MISC);
15957 return ret;
15958 }
15959
15960 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15961 attempt to fold the expression to a constant without modifying TYPE,
15962 OP0 or OP1.
15963
15964 If the expression could be simplified to a constant, then return
15965 the constant. If the expression would not be simplified to a
15966 constant, then return NULL_TREE. */
15967
15968 tree
15969 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15970 {
15971 tree tem = fold_binary (code, type, op0, op1);
15972 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15973 }
15974
15975 /* Given the components of a unary expression CODE, TYPE and OP0,
15976 attempt to fold the expression to a constant without modifying
15977 TYPE or OP0.
15978
15979 If the expression could be simplified to a constant, then return
15980 the constant. If the expression would not be simplified to a
15981 constant, then return NULL_TREE. */
15982
15983 tree
15984 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15985 {
15986 tree tem = fold_unary (code, type, op0);
15987 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15988 }
15989
15990 /* If EXP represents referencing an element in a constant string
15991 (either via pointer arithmetic or array indexing), return the
15992 tree representing the value accessed, otherwise return NULL. */
15993
15994 tree
15995 fold_read_from_constant_string (tree exp)
15996 {
15997 if ((TREE_CODE (exp) == INDIRECT_REF
15998 || TREE_CODE (exp) == ARRAY_REF)
15999 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16000 {
16001 tree exp1 = TREE_OPERAND (exp, 0);
16002 tree index;
16003 tree string;
16004 location_t loc = EXPR_LOCATION (exp);
16005
16006 if (TREE_CODE (exp) == INDIRECT_REF)
16007 string = string_constant (exp1, &index);
16008 else
16009 {
16010 tree low_bound = array_ref_low_bound (exp);
16011 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16012
16013 /* Optimize the special-case of a zero lower bound.
16014
16015 We convert the low_bound to sizetype to avoid some problems
16016 with constant folding. (E.g. suppose the lower bound is 1,
16017 and its mode is QI. Without the conversion,l (ARRAY
16018 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16019 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16020 if (! integer_zerop (low_bound))
16021 index = size_diffop_loc (loc, index,
16022 fold_convert_loc (loc, sizetype, low_bound));
16023
16024 string = exp1;
16025 }
16026
16027 if (string
16028 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16029 && TREE_CODE (string) == STRING_CST
16030 && TREE_CODE (index) == INTEGER_CST
16031 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16032 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16033 == MODE_INT)
16034 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16035 return build_int_cst_type (TREE_TYPE (exp),
16036 (TREE_STRING_POINTER (string)
16037 [TREE_INT_CST_LOW (index)]));
16038 }
16039 return NULL;
16040 }
16041
16042 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16043 an integer constant, real, or fixed-point constant.
16044
16045 TYPE is the type of the result. */
16046
16047 static tree
16048 fold_negate_const (tree arg0, tree type)
16049 {
16050 tree t = NULL_TREE;
16051
16052 switch (TREE_CODE (arg0))
16053 {
16054 case INTEGER_CST:
16055 {
16056 double_int val = tree_to_double_int (arg0);
16057 bool overflow;
16058 val = val.neg_with_overflow (&overflow);
16059 t = force_fit_type_double (type, val, 1,
16060 (overflow | TREE_OVERFLOW (arg0))
16061 && !TYPE_UNSIGNED (type));
16062 break;
16063 }
16064
16065 case REAL_CST:
16066 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16067 break;
16068
16069 case FIXED_CST:
16070 {
16071 FIXED_VALUE_TYPE f;
16072 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16073 &(TREE_FIXED_CST (arg0)), NULL,
16074 TYPE_SATURATING (type));
16075 t = build_fixed (type, f);
16076 /* Propagate overflow flags. */
16077 if (overflow_p | TREE_OVERFLOW (arg0))
16078 TREE_OVERFLOW (t) = 1;
16079 break;
16080 }
16081
16082 default:
16083 gcc_unreachable ();
16084 }
16085
16086 return t;
16087 }
16088
16089 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16090 an integer constant or real constant.
16091
16092 TYPE is the type of the result. */
16093
16094 tree
16095 fold_abs_const (tree arg0, tree type)
16096 {
16097 tree t = NULL_TREE;
16098
16099 switch (TREE_CODE (arg0))
16100 {
16101 case INTEGER_CST:
16102 {
16103 double_int val = tree_to_double_int (arg0);
16104
16105 /* If the value is unsigned or non-negative, then the absolute value
16106 is the same as the ordinary value. */
16107 if (TYPE_UNSIGNED (type)
16108 || !val.is_negative ())
16109 t = arg0;
16110
16111 /* If the value is negative, then the absolute value is
16112 its negation. */
16113 else
16114 {
16115 bool overflow;
16116 val = val.neg_with_overflow (&overflow);
16117 t = force_fit_type_double (type, val, -1,
16118 overflow | TREE_OVERFLOW (arg0));
16119 }
16120 }
16121 break;
16122
16123 case REAL_CST:
16124 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16125 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16126 else
16127 t = arg0;
16128 break;
16129
16130 default:
16131 gcc_unreachable ();
16132 }
16133
16134 return t;
16135 }
16136
16137 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16138 constant. TYPE is the type of the result. */
16139
16140 static tree
16141 fold_not_const (const_tree arg0, tree type)
16142 {
16143 double_int val;
16144
16145 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16146
16147 val = ~tree_to_double_int (arg0);
16148 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16149 }
16150
16151 /* Given CODE, a relational operator, the target type, TYPE and two
16152 constant operands OP0 and OP1, return the result of the
16153 relational operation. If the result is not a compile time
16154 constant, then return NULL_TREE. */
16155
16156 static tree
16157 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16158 {
16159 int result, invert;
16160
16161 /* From here on, the only cases we handle are when the result is
16162 known to be a constant. */
16163
16164 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16165 {
16166 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16167 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16168
16169 /* Handle the cases where either operand is a NaN. */
16170 if (real_isnan (c0) || real_isnan (c1))
16171 {
16172 switch (code)
16173 {
16174 case EQ_EXPR:
16175 case ORDERED_EXPR:
16176 result = 0;
16177 break;
16178
16179 case NE_EXPR:
16180 case UNORDERED_EXPR:
16181 case UNLT_EXPR:
16182 case UNLE_EXPR:
16183 case UNGT_EXPR:
16184 case UNGE_EXPR:
16185 case UNEQ_EXPR:
16186 result = 1;
16187 break;
16188
16189 case LT_EXPR:
16190 case LE_EXPR:
16191 case GT_EXPR:
16192 case GE_EXPR:
16193 case LTGT_EXPR:
16194 if (flag_trapping_math)
16195 return NULL_TREE;
16196 result = 0;
16197 break;
16198
16199 default:
16200 gcc_unreachable ();
16201 }
16202
16203 return constant_boolean_node (result, type);
16204 }
16205
16206 return constant_boolean_node (real_compare (code, c0, c1), type);
16207 }
16208
16209 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16210 {
16211 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16212 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16213 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16214 }
16215
16216 /* Handle equality/inequality of complex constants. */
16217 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16218 {
16219 tree rcond = fold_relational_const (code, type,
16220 TREE_REALPART (op0),
16221 TREE_REALPART (op1));
16222 tree icond = fold_relational_const (code, type,
16223 TREE_IMAGPART (op0),
16224 TREE_IMAGPART (op1));
16225 if (code == EQ_EXPR)
16226 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16227 else if (code == NE_EXPR)
16228 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16229 else
16230 return NULL_TREE;
16231 }
16232
16233 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16234 {
16235 unsigned count = VECTOR_CST_NELTS (op0);
16236 tree *elts = XALLOCAVEC (tree, count);
16237 gcc_assert (VECTOR_CST_NELTS (op1) == count
16238 && TYPE_VECTOR_SUBPARTS (type) == count);
16239
16240 for (unsigned i = 0; i < count; i++)
16241 {
16242 tree elem_type = TREE_TYPE (type);
16243 tree elem0 = VECTOR_CST_ELT (op0, i);
16244 tree elem1 = VECTOR_CST_ELT (op1, i);
16245
16246 tree tem = fold_relational_const (code, elem_type,
16247 elem0, elem1);
16248
16249 if (tem == NULL_TREE)
16250 return NULL_TREE;
16251
16252 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16253 }
16254
16255 return build_vector (type, elts);
16256 }
16257
16258 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16259
16260 To compute GT, swap the arguments and do LT.
16261 To compute GE, do LT and invert the result.
16262 To compute LE, swap the arguments, do LT and invert the result.
16263 To compute NE, do EQ and invert the result.
16264
16265 Therefore, the code below must handle only EQ and LT. */
16266
16267 if (code == LE_EXPR || code == GT_EXPR)
16268 {
16269 tree tem = op0;
16270 op0 = op1;
16271 op1 = tem;
16272 code = swap_tree_comparison (code);
16273 }
16274
16275 /* Note that it is safe to invert for real values here because we
16276 have already handled the one case that it matters. */
16277
16278 invert = 0;
16279 if (code == NE_EXPR || code == GE_EXPR)
16280 {
16281 invert = 1;
16282 code = invert_tree_comparison (code, false);
16283 }
16284
16285 /* Compute a result for LT or EQ if args permit;
16286 Otherwise return T. */
16287 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16288 {
16289 if (code == EQ_EXPR)
16290 result = tree_int_cst_equal (op0, op1);
16291 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16292 result = INT_CST_LT_UNSIGNED (op0, op1);
16293 else
16294 result = INT_CST_LT (op0, op1);
16295 }
16296 else
16297 return NULL_TREE;
16298
16299 if (invert)
16300 result ^= 1;
16301 return constant_boolean_node (result, type);
16302 }
16303
16304 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16305 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16306 itself. */
16307
16308 tree
16309 fold_build_cleanup_point_expr (tree type, tree expr)
16310 {
16311 /* If the expression does not have side effects then we don't have to wrap
16312 it with a cleanup point expression. */
16313 if (!TREE_SIDE_EFFECTS (expr))
16314 return expr;
16315
16316 /* If the expression is a return, check to see if the expression inside the
16317 return has no side effects or the right hand side of the modify expression
16318 inside the return. If either don't have side effects set we don't need to
16319 wrap the expression in a cleanup point expression. Note we don't check the
16320 left hand side of the modify because it should always be a return decl. */
16321 if (TREE_CODE (expr) == RETURN_EXPR)
16322 {
16323 tree op = TREE_OPERAND (expr, 0);
16324 if (!op || !TREE_SIDE_EFFECTS (op))
16325 return expr;
16326 op = TREE_OPERAND (op, 1);
16327 if (!TREE_SIDE_EFFECTS (op))
16328 return expr;
16329 }
16330
16331 return build1 (CLEANUP_POINT_EXPR, type, expr);
16332 }
16333
16334 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16335 of an indirection through OP0, or NULL_TREE if no simplification is
16336 possible. */
16337
16338 tree
16339 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16340 {
16341 tree sub = op0;
16342 tree subtype;
16343
16344 STRIP_NOPS (sub);
16345 subtype = TREE_TYPE (sub);
16346 if (!POINTER_TYPE_P (subtype))
16347 return NULL_TREE;
16348
16349 if (TREE_CODE (sub) == ADDR_EXPR)
16350 {
16351 tree op = TREE_OPERAND (sub, 0);
16352 tree optype = TREE_TYPE (op);
16353 /* *&CONST_DECL -> to the value of the const decl. */
16354 if (TREE_CODE (op) == CONST_DECL)
16355 return DECL_INITIAL (op);
16356 /* *&p => p; make sure to handle *&"str"[cst] here. */
16357 if (type == optype)
16358 {
16359 tree fop = fold_read_from_constant_string (op);
16360 if (fop)
16361 return fop;
16362 else
16363 return op;
16364 }
16365 /* *(foo *)&fooarray => fooarray[0] */
16366 else if (TREE_CODE (optype) == ARRAY_TYPE
16367 && type == TREE_TYPE (optype)
16368 && (!in_gimple_form
16369 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16370 {
16371 tree type_domain = TYPE_DOMAIN (optype);
16372 tree min_val = size_zero_node;
16373 if (type_domain && TYPE_MIN_VALUE (type_domain))
16374 min_val = TYPE_MIN_VALUE (type_domain);
16375 if (in_gimple_form
16376 && TREE_CODE (min_val) != INTEGER_CST)
16377 return NULL_TREE;
16378 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16379 NULL_TREE, NULL_TREE);
16380 }
16381 /* *(foo *)&complexfoo => __real__ complexfoo */
16382 else if (TREE_CODE (optype) == COMPLEX_TYPE
16383 && type == TREE_TYPE (optype))
16384 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16385 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16386 else if (TREE_CODE (optype) == VECTOR_TYPE
16387 && type == TREE_TYPE (optype))
16388 {
16389 tree part_width = TYPE_SIZE (type);
16390 tree index = bitsize_int (0);
16391 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16392 }
16393 }
16394
16395 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16396 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16397 {
16398 tree op00 = TREE_OPERAND (sub, 0);
16399 tree op01 = TREE_OPERAND (sub, 1);
16400
16401 STRIP_NOPS (op00);
16402 if (TREE_CODE (op00) == ADDR_EXPR)
16403 {
16404 tree op00type;
16405 op00 = TREE_OPERAND (op00, 0);
16406 op00type = TREE_TYPE (op00);
16407
16408 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16409 if (TREE_CODE (op00type) == VECTOR_TYPE
16410 && type == TREE_TYPE (op00type))
16411 {
16412 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16413 tree part_width = TYPE_SIZE (type);
16414 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16415 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16416 tree index = bitsize_int (indexi);
16417
16418 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16419 return fold_build3_loc (loc,
16420 BIT_FIELD_REF, type, op00,
16421 part_width, index);
16422
16423 }
16424 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16425 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16426 && type == TREE_TYPE (op00type))
16427 {
16428 tree size = TYPE_SIZE_UNIT (type);
16429 if (tree_int_cst_equal (size, op01))
16430 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16431 }
16432 /* ((foo *)&fooarray)[1] => fooarray[1] */
16433 else if (TREE_CODE (op00type) == ARRAY_TYPE
16434 && type == TREE_TYPE (op00type))
16435 {
16436 tree type_domain = TYPE_DOMAIN (op00type);
16437 tree min_val = size_zero_node;
16438 if (type_domain && TYPE_MIN_VALUE (type_domain))
16439 min_val = TYPE_MIN_VALUE (type_domain);
16440 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16441 TYPE_SIZE_UNIT (type));
16442 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16443 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16444 NULL_TREE, NULL_TREE);
16445 }
16446 }
16447 }
16448
16449 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16450 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16451 && type == TREE_TYPE (TREE_TYPE (subtype))
16452 && (!in_gimple_form
16453 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16454 {
16455 tree type_domain;
16456 tree min_val = size_zero_node;
16457 sub = build_fold_indirect_ref_loc (loc, sub);
16458 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16459 if (type_domain && TYPE_MIN_VALUE (type_domain))
16460 min_val = TYPE_MIN_VALUE (type_domain);
16461 if (in_gimple_form
16462 && TREE_CODE (min_val) != INTEGER_CST)
16463 return NULL_TREE;
16464 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16465 NULL_TREE);
16466 }
16467
16468 return NULL_TREE;
16469 }
16470
16471 /* Builds an expression for an indirection through T, simplifying some
16472 cases. */
16473
16474 tree
16475 build_fold_indirect_ref_loc (location_t loc, tree t)
16476 {
16477 tree type = TREE_TYPE (TREE_TYPE (t));
16478 tree sub = fold_indirect_ref_1 (loc, type, t);
16479
16480 if (sub)
16481 return sub;
16482
16483 return build1_loc (loc, INDIRECT_REF, type, t);
16484 }
16485
16486 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16487
16488 tree
16489 fold_indirect_ref_loc (location_t loc, tree t)
16490 {
16491 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16492
16493 if (sub)
16494 return sub;
16495 else
16496 return t;
16497 }
16498
16499 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16500 whose result is ignored. The type of the returned tree need not be
16501 the same as the original expression. */
16502
16503 tree
16504 fold_ignored_result (tree t)
16505 {
16506 if (!TREE_SIDE_EFFECTS (t))
16507 return integer_zero_node;
16508
16509 for (;;)
16510 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16511 {
16512 case tcc_unary:
16513 t = TREE_OPERAND (t, 0);
16514 break;
16515
16516 case tcc_binary:
16517 case tcc_comparison:
16518 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16519 t = TREE_OPERAND (t, 0);
16520 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16521 t = TREE_OPERAND (t, 1);
16522 else
16523 return t;
16524 break;
16525
16526 case tcc_expression:
16527 switch (TREE_CODE (t))
16528 {
16529 case COMPOUND_EXPR:
16530 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16531 return t;
16532 t = TREE_OPERAND (t, 0);
16533 break;
16534
16535 case COND_EXPR:
16536 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16537 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16538 return t;
16539 t = TREE_OPERAND (t, 0);
16540 break;
16541
16542 default:
16543 return t;
16544 }
16545 break;
16546
16547 default:
16548 return t;
16549 }
16550 }
16551
16552 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16553 This can only be applied to objects of a sizetype. */
16554
16555 tree
16556 round_up_loc (location_t loc, tree value, int divisor)
16557 {
16558 tree div = NULL_TREE;
16559
16560 gcc_assert (divisor > 0);
16561 if (divisor == 1)
16562 return value;
16563
16564 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16565 have to do anything. Only do this when we are not given a const,
16566 because in that case, this check is more expensive than just
16567 doing it. */
16568 if (TREE_CODE (value) != INTEGER_CST)
16569 {
16570 div = build_int_cst (TREE_TYPE (value), divisor);
16571
16572 if (multiple_of_p (TREE_TYPE (value), value, div))
16573 return value;
16574 }
16575
16576 /* If divisor is a power of two, simplify this to bit manipulation. */
16577 if (divisor == (divisor & -divisor))
16578 {
16579 if (TREE_CODE (value) == INTEGER_CST)
16580 {
16581 double_int val = tree_to_double_int (value);
16582 bool overflow_p;
16583
16584 if ((val.low & (divisor - 1)) == 0)
16585 return value;
16586
16587 overflow_p = TREE_OVERFLOW (value);
16588 val.low &= ~(divisor - 1);
16589 val.low += divisor;
16590 if (val.low == 0)
16591 {
16592 val.high++;
16593 if (val.high == 0)
16594 overflow_p = true;
16595 }
16596
16597 return force_fit_type_double (TREE_TYPE (value), val,
16598 -1, overflow_p);
16599 }
16600 else
16601 {
16602 tree t;
16603
16604 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16605 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16606 t = build_int_cst (TREE_TYPE (value), -divisor);
16607 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16608 }
16609 }
16610 else
16611 {
16612 if (!div)
16613 div = build_int_cst (TREE_TYPE (value), divisor);
16614 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16615 value = size_binop_loc (loc, MULT_EXPR, value, div);
16616 }
16617
16618 return value;
16619 }
16620
16621 /* Likewise, but round down. */
16622
16623 tree
16624 round_down_loc (location_t loc, tree value, int divisor)
16625 {
16626 tree div = NULL_TREE;
16627
16628 gcc_assert (divisor > 0);
16629 if (divisor == 1)
16630 return value;
16631
16632 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16633 have to do anything. Only do this when we are not given a const,
16634 because in that case, this check is more expensive than just
16635 doing it. */
16636 if (TREE_CODE (value) != INTEGER_CST)
16637 {
16638 div = build_int_cst (TREE_TYPE (value), divisor);
16639
16640 if (multiple_of_p (TREE_TYPE (value), value, div))
16641 return value;
16642 }
16643
16644 /* If divisor is a power of two, simplify this to bit manipulation. */
16645 if (divisor == (divisor & -divisor))
16646 {
16647 tree t;
16648
16649 t = build_int_cst (TREE_TYPE (value), -divisor);
16650 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16651 }
16652 else
16653 {
16654 if (!div)
16655 div = build_int_cst (TREE_TYPE (value), divisor);
16656 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16657 value = size_binop_loc (loc, MULT_EXPR, value, div);
16658 }
16659
16660 return value;
16661 }
16662
16663 /* Returns the pointer to the base of the object addressed by EXP and
16664 extracts the information about the offset of the access, storing it
16665 to PBITPOS and POFFSET. */
16666
16667 static tree
16668 split_address_to_core_and_offset (tree exp,
16669 HOST_WIDE_INT *pbitpos, tree *poffset)
16670 {
16671 tree core;
16672 enum machine_mode mode;
16673 int unsignedp, volatilep;
16674 HOST_WIDE_INT bitsize;
16675 location_t loc = EXPR_LOCATION (exp);
16676
16677 if (TREE_CODE (exp) == ADDR_EXPR)
16678 {
16679 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16680 poffset, &mode, &unsignedp, &volatilep,
16681 false);
16682 core = build_fold_addr_expr_loc (loc, core);
16683 }
16684 else
16685 {
16686 core = exp;
16687 *pbitpos = 0;
16688 *poffset = NULL_TREE;
16689 }
16690
16691 return core;
16692 }
16693
16694 /* Returns true if addresses of E1 and E2 differ by a constant, false
16695 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16696
16697 bool
16698 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16699 {
16700 tree core1, core2;
16701 HOST_WIDE_INT bitpos1, bitpos2;
16702 tree toffset1, toffset2, tdiff, type;
16703
16704 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16705 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16706
16707 if (bitpos1 % BITS_PER_UNIT != 0
16708 || bitpos2 % BITS_PER_UNIT != 0
16709 || !operand_equal_p (core1, core2, 0))
16710 return false;
16711
16712 if (toffset1 && toffset2)
16713 {
16714 type = TREE_TYPE (toffset1);
16715 if (type != TREE_TYPE (toffset2))
16716 toffset2 = fold_convert (type, toffset2);
16717
16718 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16719 if (!cst_and_fits_in_hwi (tdiff))
16720 return false;
16721
16722 *diff = int_cst_value (tdiff);
16723 }
16724 else if (toffset1 || toffset2)
16725 {
16726 /* If only one of the offsets is non-constant, the difference cannot
16727 be a constant. */
16728 return false;
16729 }
16730 else
16731 *diff = 0;
16732
16733 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16734 return true;
16735 }
16736
16737 /* Simplify the floating point expression EXP when the sign of the
16738 result is not significant. Return NULL_TREE if no simplification
16739 is possible. */
16740
16741 tree
16742 fold_strip_sign_ops (tree exp)
16743 {
16744 tree arg0, arg1;
16745 location_t loc = EXPR_LOCATION (exp);
16746
16747 switch (TREE_CODE (exp))
16748 {
16749 case ABS_EXPR:
16750 case NEGATE_EXPR:
16751 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16752 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16753
16754 case MULT_EXPR:
16755 case RDIV_EXPR:
16756 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16757 return NULL_TREE;
16758 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16759 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16760 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16761 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16762 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16763 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16764 break;
16765
16766 case COMPOUND_EXPR:
16767 arg0 = TREE_OPERAND (exp, 0);
16768 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16769 if (arg1)
16770 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16771 break;
16772
16773 case COND_EXPR:
16774 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16775 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16776 if (arg0 || arg1)
16777 return fold_build3_loc (loc,
16778 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16779 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16780 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16781 break;
16782
16783 case CALL_EXPR:
16784 {
16785 const enum built_in_function fcode = builtin_mathfn_code (exp);
16786 switch (fcode)
16787 {
16788 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16789 /* Strip copysign function call, return the 1st argument. */
16790 arg0 = CALL_EXPR_ARG (exp, 0);
16791 arg1 = CALL_EXPR_ARG (exp, 1);
16792 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16793
16794 default:
16795 /* Strip sign ops from the argument of "odd" math functions. */
16796 if (negate_mathfn_p (fcode))
16797 {
16798 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16799 if (arg0)
16800 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16801 }
16802 break;
16803 }
16804 }
16805 break;
16806
16807 default:
16808 break;
16809 }
16810 return NULL_TREE;
16811 }