re PR c++/19476 (Missed null checking elimination with new)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-ssa.h"
62
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
66
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
87 };
88
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
141
142 static location_t
143 expr_location_or (tree t, location_t loc)
144 {
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
147 }
148
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
151
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
154 {
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
160 {
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
163 }
164 return x;
165 }
166 \f
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
170
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
173 {
174 double_int quo, rem;
175 int uns;
176
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
181
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
184
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
187
188 return NULL_TREE;
189 }
190 \f
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
199
200 static int fold_deferring_overflow_warnings;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
206
207 static const char* fold_deferred_overflow_warning;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
211
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
216
217 void
218 fold_defer_overflow_warnings (void)
219 {
220 ++fold_deferring_overflow_warnings;
221 }
222
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
231
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235 const char *warnmsg;
236 location_t locus;
237
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
241 {
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
247 }
248
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
251
252 if (!issue || warnmsg == NULL)
253 return;
254
255 if (gimple_no_warning_p (stmt))
256 return;
257
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
262
263 if (!issue_strict_overflow_warning (code))
264 return;
265
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
275
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279 fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281
282 /* Whether we are deferring overflow warnings. */
283
284 bool
285 fold_deferring_overflow_warnings_p (void)
286 {
287 return fold_deferring_overflow_warnings > 0;
288 }
289
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
292
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296 if (fold_deferring_overflow_warnings > 0)
297 {
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
300 {
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
303 }
304 }
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
307 }
308 \f
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
311
312 static bool
313 negate_mathfn_p (enum built_in_function code)
314 {
315 switch (code)
316 {
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
341
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
347
348 default:
349 break;
350 }
351 return false;
352 }
353
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
356
357 bool
358 may_negate_without_overflow_p (const_tree t)
359 {
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
363
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
369
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
372 {
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
377 }
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
383 }
384
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
387
388 static bool
389 negate_expr_p (tree t)
390 {
391 tree type;
392
393 if (t == 0)
394 return false;
395
396 type = TREE_TYPE (t);
397
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
400 {
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
404
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
410
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
414
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
423
424 case VECTOR_CST:
425 {
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
428
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
430
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
434
435 return true;
436 }
437
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
441
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
444
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
456
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
463
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
467
468 /* Fall through. */
469
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
475
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
487 {
488 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
489 break;
490 /* If overflow is undefined then we have to be careful because
491 we ask whether it's ok to associate the negate with the
492 division which is not ok for example for
493 -((a - b) / c) where (-(a - b)) / c may invoke undefined
494 overflow because of negating INT_MIN. So do not use
495 negate_expr_p here but open-code the two important cases. */
496 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
497 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
498 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
499 return true;
500 }
501 else if (negate_expr_p (TREE_OPERAND (t, 0)))
502 return true;
503 return negate_expr_p (TREE_OPERAND (t, 1));
504
505 case NOP_EXPR:
506 /* Negate -((double)float) as (double)(-float). */
507 if (TREE_CODE (type) == REAL_TYPE)
508 {
509 tree tem = strip_float_extensions (t);
510 if (tem != t)
511 return negate_expr_p (tem);
512 }
513 break;
514
515 case CALL_EXPR:
516 /* Negate -f(x) as f(-x). */
517 if (negate_mathfn_p (builtin_mathfn_code (t)))
518 return negate_expr_p (CALL_EXPR_ARG (t, 0));
519 break;
520
521 case RSHIFT_EXPR:
522 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
523 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 {
525 tree op1 = TREE_OPERAND (t, 1);
526 if (TREE_INT_CST_HIGH (op1) == 0
527 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
528 == TREE_INT_CST_LOW (op1))
529 return true;
530 }
531 break;
532
533 default:
534 break;
535 }
536 return false;
537 }
538
539 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
540 simplification is possible.
541 If negate_expr_p would return true for T, NULL_TREE will never be
542 returned. */
543
544 static tree
545 fold_negate_expr (location_t loc, tree t)
546 {
547 tree type = TREE_TYPE (t);
548 tree tem;
549
550 switch (TREE_CODE (t))
551 {
552 /* Convert - (~A) to A + 1. */
553 case BIT_NOT_EXPR:
554 if (INTEGRAL_TYPE_P (type))
555 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
556 build_one_cst (type));
557 break;
558
559 case INTEGER_CST:
560 tem = fold_negate_const (t, type);
561 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
562 || !TYPE_OVERFLOW_TRAPS (type))
563 return tem;
564 break;
565
566 case REAL_CST:
567 tem = fold_negate_const (t, type);
568 /* Two's complement FP formats, such as c4x, may overflow. */
569 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
570 return tem;
571 break;
572
573 case FIXED_CST:
574 tem = fold_negate_const (t, type);
575 return tem;
576
577 case COMPLEX_CST:
578 {
579 tree rpart = negate_expr (TREE_REALPART (t));
580 tree ipart = negate_expr (TREE_IMAGPART (t));
581
582 if ((TREE_CODE (rpart) == REAL_CST
583 && TREE_CODE (ipart) == REAL_CST)
584 || (TREE_CODE (rpart) == INTEGER_CST
585 && TREE_CODE (ipart) == INTEGER_CST))
586 return build_complex (type, rpart, ipart);
587 }
588 break;
589
590 case VECTOR_CST:
591 {
592 int count = TYPE_VECTOR_SUBPARTS (type), i;
593 tree *elts = XALLOCAVEC (tree, count);
594
595 for (i = 0; i < count; i++)
596 {
597 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
598 if (elts[i] == NULL_TREE)
599 return NULL_TREE;
600 }
601
602 return build_vector (type, elts);
603 }
604
605 case COMPLEX_EXPR:
606 if (negate_expr_p (t))
607 return fold_build2_loc (loc, COMPLEX_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
609 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
610 break;
611
612 case CONJ_EXPR:
613 if (negate_expr_p (t))
614 return fold_build1_loc (loc, CONJ_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
616 break;
617
618 case NEGATE_EXPR:
619 return TREE_OPERAND (t, 0);
620
621 case PLUS_EXPR:
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
624 {
625 /* -(A + B) -> (-B) - A. */
626 if (negate_expr_p (TREE_OPERAND (t, 1))
627 && reorder_operands_p (TREE_OPERAND (t, 0),
628 TREE_OPERAND (t, 1)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 1));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 0));
633 }
634
635 /* -(A + B) -> (-A) - B. */
636 if (negate_expr_p (TREE_OPERAND (t, 0)))
637 {
638 tem = negate_expr (TREE_OPERAND (t, 0));
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 tem, TREE_OPERAND (t, 1));
641 }
642 }
643 break;
644
645 case MINUS_EXPR:
646 /* - (A - B) -> B - A */
647 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
648 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
649 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
650 return fold_build2_loc (loc, MINUS_EXPR, type,
651 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
652 break;
653
654 case MULT_EXPR:
655 if (TYPE_UNSIGNED (type))
656 break;
657
658 /* Fall through. */
659
660 case RDIV_EXPR:
661 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
662 {
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (tem), TREE_OPERAND (t, 1));
671 }
672 break;
673
674 case TRUNC_DIV_EXPR:
675 case ROUND_DIV_EXPR:
676 case FLOOR_DIV_EXPR:
677 case CEIL_DIV_EXPR:
678 case EXACT_DIV_EXPR:
679 /* In general we can't negate A / B, because if A is INT_MIN and
680 B is 1, we may turn this into INT_MIN / -1 which is undefined
681 and actually traps on some architectures. But if overflow is
682 undefined, we can negate, because - (INT_MIN / 1) is an
683 overflow. */
684 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
685 {
686 const char * const warnmsg = G_("assuming signed overflow does not "
687 "occur when negating a division");
688 tem = TREE_OPERAND (t, 1);
689 if (negate_expr_p (tem))
690 {
691 if (INTEGRAL_TYPE_P (type)
692 && (TREE_CODE (tem) != INTEGER_CST
693 || integer_onep (tem)))
694 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
695 return fold_build2_loc (loc, TREE_CODE (t), type,
696 TREE_OPERAND (t, 0), negate_expr (tem));
697 }
698 /* If overflow is undefined then we have to be careful because
699 we ask whether it's ok to associate the negate with the
700 division which is not ok for example for
701 -((a - b) / c) where (-(a - b)) / c may invoke undefined
702 overflow because of negating INT_MIN. So do not use
703 negate_expr_p here but open-code the two important cases. */
704 tem = TREE_OPERAND (t, 0);
705 if ((INTEGRAL_TYPE_P (type)
706 && (TREE_CODE (tem) == NEGATE_EXPR
707 || (TREE_CODE (tem) == INTEGER_CST
708 && may_negate_without_overflow_p (tem))))
709 || !INTEGRAL_TYPE_P (type))
710 return fold_build2_loc (loc, TREE_CODE (t), type,
711 negate_expr (tem), TREE_OPERAND (t, 1));
712 }
713 break;
714
715 case NOP_EXPR:
716 /* Convert -((double)float) into (double)(-float). */
717 if (TREE_CODE (type) == REAL_TYPE)
718 {
719 tem = strip_float_extensions (t);
720 if (tem != t && negate_expr_p (tem))
721 return fold_convert_loc (loc, type, negate_expr (tem));
722 }
723 break;
724
725 case CALL_EXPR:
726 /* Negate -f(x) as f(-x). */
727 if (negate_mathfn_p (builtin_mathfn_code (t))
728 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
729 {
730 tree fndecl, arg;
731
732 fndecl = get_callee_fndecl (t);
733 arg = negate_expr (CALL_EXPR_ARG (t, 0));
734 return build_call_expr_loc (loc, fndecl, 1, arg);
735 }
736 break;
737
738 case RSHIFT_EXPR:
739 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
740 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
741 {
742 tree op1 = TREE_OPERAND (t, 1);
743 if (TREE_INT_CST_HIGH (op1) == 0
744 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
745 == TREE_INT_CST_LOW (op1))
746 {
747 tree ntype = TYPE_UNSIGNED (type)
748 ? signed_type_for (type)
749 : unsigned_type_for (type);
750 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
751 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
752 return fold_convert_loc (loc, type, temp);
753 }
754 }
755 break;
756
757 default:
758 break;
759 }
760
761 return NULL_TREE;
762 }
763
764 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
765 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
766 return NULL_TREE. */
767
768 static tree
769 negate_expr (tree t)
770 {
771 tree type, tem;
772 location_t loc;
773
774 if (t == NULL_TREE)
775 return NULL_TREE;
776
777 loc = EXPR_LOCATION (t);
778 type = TREE_TYPE (t);
779 STRIP_SIGN_NOPS (t);
780
781 tem = fold_negate_expr (loc, t);
782 if (!tem)
783 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
784 return fold_convert_loc (loc, type, tem);
785 }
786 \f
787 /* Split a tree IN into a constant, literal and variable parts that could be
788 combined with CODE to make IN. "constant" means an expression with
789 TREE_CONSTANT but that isn't an actual constant. CODE must be a
790 commutative arithmetic operation. Store the constant part into *CONP,
791 the literal in *LITP and return the variable part. If a part isn't
792 present, set it to null. If the tree does not decompose in this way,
793 return the entire tree as the variable part and the other parts as null.
794
795 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
796 case, we negate an operand that was subtracted. Except if it is a
797 literal for which we use *MINUS_LITP instead.
798
799 If NEGATE_P is true, we are negating all of IN, again except a literal
800 for which we use *MINUS_LITP instead.
801
802 If IN is itself a literal or constant, return it as appropriate.
803
804 Note that we do not guarantee that any of the three values will be the
805 same type as IN, but they will have the same signedness and mode. */
806
807 static tree
808 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
809 tree *minus_litp, int negate_p)
810 {
811 tree var = 0;
812
813 *conp = 0;
814 *litp = 0;
815 *minus_litp = 0;
816
817 /* Strip any conversions that don't change the machine mode or signedness. */
818 STRIP_SIGN_NOPS (in);
819
820 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
821 || TREE_CODE (in) == FIXED_CST)
822 *litp = in;
823 else if (TREE_CODE (in) == code
824 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
825 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
826 /* We can associate addition and subtraction together (even
827 though the C standard doesn't say so) for integers because
828 the value is not affected. For reals, the value might be
829 affected, so we can't. */
830 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
831 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
832 {
833 tree op0 = TREE_OPERAND (in, 0);
834 tree op1 = TREE_OPERAND (in, 1);
835 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
836 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
837
838 /* First see if either of the operands is a literal, then a constant. */
839 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
840 || TREE_CODE (op0) == FIXED_CST)
841 *litp = op0, op0 = 0;
842 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
843 || TREE_CODE (op1) == FIXED_CST)
844 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
845
846 if (op0 != 0 && TREE_CONSTANT (op0))
847 *conp = op0, op0 = 0;
848 else if (op1 != 0 && TREE_CONSTANT (op1))
849 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
850
851 /* If we haven't dealt with either operand, this is not a case we can
852 decompose. Otherwise, VAR is either of the ones remaining, if any. */
853 if (op0 != 0 && op1 != 0)
854 var = in;
855 else if (op0 != 0)
856 var = op0;
857 else
858 var = op1, neg_var_p = neg1_p;
859
860 /* Now do any needed negations. */
861 if (neg_litp_p)
862 *minus_litp = *litp, *litp = 0;
863 if (neg_conp_p)
864 *conp = negate_expr (*conp);
865 if (neg_var_p)
866 var = negate_expr (var);
867 }
868 else if (TREE_CODE (in) == BIT_NOT_EXPR
869 && code == PLUS_EXPR)
870 {
871 /* -X - 1 is folded to ~X, undo that here. */
872 *minus_litp = build_one_cst (TREE_TYPE (in));
873 var = negate_expr (TREE_OPERAND (in, 0));
874 }
875 else if (TREE_CONSTANT (in))
876 *conp = in;
877 else
878 var = in;
879
880 if (negate_p)
881 {
882 if (*litp)
883 *minus_litp = *litp, *litp = 0;
884 else if (*minus_litp)
885 *litp = *minus_litp, *minus_litp = 0;
886 *conp = negate_expr (*conp);
887 var = negate_expr (var);
888 }
889
890 return var;
891 }
892
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
897
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
900 {
901 if (t1 == 0)
902 return t2;
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
911 {
912 if (code == PLUS_EXPR)
913 {
914 if (TREE_CODE (t1) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t2),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t1, 0)));
919 else if (TREE_CODE (t2) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t2, 0)));
924 else if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
926 }
927 else if (code == MINUS_EXPR)
928 {
929 if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
931 }
932
933 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type, t2));
935 }
936
937 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
939 }
940 \f
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
943
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
946 {
947 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948 return false;
949 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return false;
951
952 switch (code)
953 {
954 case LSHIFT_EXPR:
955 case RSHIFT_EXPR:
956 case LROTATE_EXPR:
957 case RROTATE_EXPR:
958 return true;
959
960 default:
961 break;
962 }
963
964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 && TYPE_MODE (type1) == TYPE_MODE (type2);
967 }
968
969
970 /* Combine two integer constants ARG1 and ARG2 under operation CODE
971 to produce a new constant. Return NULL_TREE if we don't know how
972 to evaluate CODE at compile-time. */
973
974 static tree
975 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
976 int overflowable)
977 {
978 double_int op1, op2, res, tmp;
979 tree t;
980 tree type = TREE_TYPE (arg1);
981 bool uns = TYPE_UNSIGNED (type);
982 bool overflow = false;
983
984 op1 = tree_to_double_int (arg1);
985 op2 = tree_to_double_int (arg2);
986
987 switch (code)
988 {
989 case BIT_IOR_EXPR:
990 res = op1 | op2;
991 break;
992
993 case BIT_XOR_EXPR:
994 res = op1 ^ op2;
995 break;
996
997 case BIT_AND_EXPR:
998 res = op1 & op2;
999 break;
1000
1001 case RSHIFT_EXPR:
1002 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1003 break;
1004
1005 case LSHIFT_EXPR:
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1010 break;
1011
1012 case RROTATE_EXPR:
1013 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1014 break;
1015
1016 case LROTATE_EXPR:
1017 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = op1.add_with_sign (op2, false, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = op1.sub_with_overflow (op2, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = op1.mul_with_sign (op2, false, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1034 {
1035 bool dummy_overflow;
1036 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1037 return NULL_TREE;
1038 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1039 }
1040 else
1041 {
1042 bool dummy_overflow;
1043 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1044 is performed in twice the precision of arguments. */
1045 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1046 res = tmp.rshift (TYPE_PRECISION (type),
1047 2 * TYPE_PRECISION (type), !uns);
1048 }
1049 break;
1050
1051 case TRUNC_DIV_EXPR:
1052 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 /* This is a shortcut for a common special case. */
1055 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1056 && !TREE_OVERFLOW (arg1)
1057 && !TREE_OVERFLOW (arg2)
1058 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1059 {
1060 if (code == CEIL_DIV_EXPR)
1061 op1.low += op2.low - 1;
1062
1063 res.low = op1.low / op2.low, res.high = 0;
1064 break;
1065 }
1066
1067 /* ... fall through ... */
1068
1069 case ROUND_DIV_EXPR:
1070 if (op2.is_zero ())
1071 return NULL_TREE;
1072 if (op2.is_one ())
1073 {
1074 res = op1;
1075 break;
1076 }
1077 if (op1 == op2 && !op1.is_zero ())
1078 {
1079 res = double_int_one;
1080 break;
1081 }
1082 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1083 break;
1084
1085 case TRUNC_MOD_EXPR:
1086 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1087 /* This is a shortcut for a common special case. */
1088 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1089 && !TREE_OVERFLOW (arg1)
1090 && !TREE_OVERFLOW (arg2)
1091 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1092 {
1093 if (code == CEIL_MOD_EXPR)
1094 op1.low += op2.low - 1;
1095 res.low = op1.low % op2.low, res.high = 0;
1096 break;
1097 }
1098
1099 /* ... fall through ... */
1100
1101 case ROUND_MOD_EXPR:
1102 if (op2.is_zero ())
1103 return NULL_TREE;
1104 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1105 break;
1106
1107 case MIN_EXPR:
1108 res = op1.min (op2, uns);
1109 break;
1110
1111 case MAX_EXPR:
1112 res = op1.max (op2, uns);
1113 break;
1114
1115 default:
1116 return NULL_TREE;
1117 }
1118
1119 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1120 (!uns && overflow)
1121 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1122
1123 return t;
1124 }
1125
1126 tree
1127 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1128 {
1129 return int_const_binop_1 (code, arg1, arg2, 1);
1130 }
1131
1132 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1133 constant. We assume ARG1 and ARG2 have the same data type, or at least
1134 are the same kind of constant and the same machine mode. Return zero if
1135 combining the constants is not allowed in the current operating mode. */
1136
1137 static tree
1138 const_binop (enum tree_code code, tree arg1, tree arg2)
1139 {
1140 /* Sanity check for the recursive cases. */
1141 if (!arg1 || !arg2)
1142 return NULL_TREE;
1143
1144 STRIP_NOPS (arg1);
1145 STRIP_NOPS (arg2);
1146
1147 if (TREE_CODE (arg1) == INTEGER_CST)
1148 return int_const_binop (code, arg1, arg2);
1149
1150 if (TREE_CODE (arg1) == REAL_CST)
1151 {
1152 enum machine_mode mode;
1153 REAL_VALUE_TYPE d1;
1154 REAL_VALUE_TYPE d2;
1155 REAL_VALUE_TYPE value;
1156 REAL_VALUE_TYPE result;
1157 bool inexact;
1158 tree t, type;
1159
1160 /* The following codes are handled by real_arithmetic. */
1161 switch (code)
1162 {
1163 case PLUS_EXPR:
1164 case MINUS_EXPR:
1165 case MULT_EXPR:
1166 case RDIV_EXPR:
1167 case MIN_EXPR:
1168 case MAX_EXPR:
1169 break;
1170
1171 default:
1172 return NULL_TREE;
1173 }
1174
1175 d1 = TREE_REAL_CST (arg1);
1176 d2 = TREE_REAL_CST (arg2);
1177
1178 type = TREE_TYPE (arg1);
1179 mode = TYPE_MODE (type);
1180
1181 /* Don't perform operation if we honor signaling NaNs and
1182 either operand is a NaN. */
1183 if (HONOR_SNANS (mode)
1184 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1185 return NULL_TREE;
1186
1187 /* Don't perform operation if it would raise a division
1188 by zero exception. */
1189 if (code == RDIV_EXPR
1190 && REAL_VALUES_EQUAL (d2, dconst0)
1191 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1192 return NULL_TREE;
1193
1194 /* If either operand is a NaN, just return it. Otherwise, set up
1195 for floating-point trap; we return an overflow. */
1196 if (REAL_VALUE_ISNAN (d1))
1197 return arg1;
1198 else if (REAL_VALUE_ISNAN (d2))
1199 return arg2;
1200
1201 inexact = real_arithmetic (&value, code, &d1, &d2);
1202 real_convert (&result, mode, &value);
1203
1204 /* Don't constant fold this floating point operation if
1205 the result has overflowed and flag_trapping_math. */
1206 if (flag_trapping_math
1207 && MODE_HAS_INFINITIES (mode)
1208 && REAL_VALUE_ISINF (result)
1209 && !REAL_VALUE_ISINF (d1)
1210 && !REAL_VALUE_ISINF (d2))
1211 return NULL_TREE;
1212
1213 /* Don't constant fold this floating point operation if the
1214 result may dependent upon the run-time rounding mode and
1215 flag_rounding_math is set, or if GCC's software emulation
1216 is unable to accurately represent the result. */
1217 if ((flag_rounding_math
1218 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1219 && (inexact || !real_identical (&result, &value)))
1220 return NULL_TREE;
1221
1222 t = build_real (type, result);
1223
1224 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1225 return t;
1226 }
1227
1228 if (TREE_CODE (arg1) == FIXED_CST)
1229 {
1230 FIXED_VALUE_TYPE f1;
1231 FIXED_VALUE_TYPE f2;
1232 FIXED_VALUE_TYPE result;
1233 tree t, type;
1234 int sat_p;
1235 bool overflow_p;
1236
1237 /* The following codes are handled by fixed_arithmetic. */
1238 switch (code)
1239 {
1240 case PLUS_EXPR:
1241 case MINUS_EXPR:
1242 case MULT_EXPR:
1243 case TRUNC_DIV_EXPR:
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 f2.data.high = TREE_INT_CST_HIGH (arg2);
1250 f2.data.low = TREE_INT_CST_LOW (arg2);
1251 f2.mode = SImode;
1252 break;
1253
1254 default:
1255 return NULL_TREE;
1256 }
1257
1258 f1 = TREE_FIXED_CST (arg1);
1259 type = TREE_TYPE (arg1);
1260 sat_p = TYPE_SATURATING (type);
1261 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1262 t = build_fixed (type, result);
1263 /* Propagate overflow flags. */
1264 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1265 TREE_OVERFLOW (t) = 1;
1266 return t;
1267 }
1268
1269 if (TREE_CODE (arg1) == COMPLEX_CST)
1270 {
1271 tree type = TREE_TYPE (arg1);
1272 tree r1 = TREE_REALPART (arg1);
1273 tree i1 = TREE_IMAGPART (arg1);
1274 tree r2 = TREE_REALPART (arg2);
1275 tree i2 = TREE_IMAGPART (arg2);
1276 tree real, imag;
1277
1278 switch (code)
1279 {
1280 case PLUS_EXPR:
1281 case MINUS_EXPR:
1282 real = const_binop (code, r1, r2);
1283 imag = const_binop (code, i1, i2);
1284 break;
1285
1286 case MULT_EXPR:
1287 if (COMPLEX_FLOAT_TYPE_P (type))
1288 return do_mpc_arg2 (arg1, arg2, type,
1289 /* do_nonfinite= */ folding_initializer,
1290 mpc_mul);
1291
1292 real = const_binop (MINUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 imag = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r1, i2),
1297 const_binop (MULT_EXPR, i1, r2));
1298 break;
1299
1300 case RDIV_EXPR:
1301 if (COMPLEX_FLOAT_TYPE_P (type))
1302 return do_mpc_arg2 (arg1, arg2, type,
1303 /* do_nonfinite= */ folding_initializer,
1304 mpc_div);
1305 /* Fallthru ... */
1306 case TRUNC_DIV_EXPR:
1307 case CEIL_DIV_EXPR:
1308 case FLOOR_DIV_EXPR:
1309 case ROUND_DIV_EXPR:
1310 if (flag_complex_method == 0)
1311 {
1312 /* Keep this algorithm in sync with
1313 tree-complex.c:expand_complex_div_straight().
1314
1315 Expand complex division to scalars, straightforward algorithm.
1316 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1317 t = br*br + bi*bi
1318 */
1319 tree magsquared
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r2, r2),
1322 const_binop (MULT_EXPR, i2, i2));
1323 tree t1
1324 = const_binop (PLUS_EXPR,
1325 const_binop (MULT_EXPR, r1, r2),
1326 const_binop (MULT_EXPR, i1, i2));
1327 tree t2
1328 = const_binop (MINUS_EXPR,
1329 const_binop (MULT_EXPR, i1, r2),
1330 const_binop (MULT_EXPR, r1, i2));
1331
1332 real = const_binop (code, t1, magsquared);
1333 imag = const_binop (code, t2, magsquared);
1334 }
1335 else
1336 {
1337 /* Keep this algorithm in sync with
1338 tree-complex.c:expand_complex_div_wide().
1339
1340 Expand complex division to scalars, modified algorithm to minimize
1341 overflow with wide input ranges. */
1342 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1343 fold_abs_const (r2, TREE_TYPE (type)),
1344 fold_abs_const (i2, TREE_TYPE (type)));
1345
1346 if (integer_nonzerop (compare))
1347 {
1348 /* In the TRUE branch, we compute
1349 ratio = br/bi;
1350 div = (br * ratio) + bi;
1351 tr = (ar * ratio) + ai;
1352 ti = (ai * ratio) - ar;
1353 tr = tr / div;
1354 ti = ti / div; */
1355 tree ratio = const_binop (code, r2, i2);
1356 tree div = const_binop (PLUS_EXPR, i2,
1357 const_binop (MULT_EXPR, r2, ratio));
1358 real = const_binop (MULT_EXPR, r1, ratio);
1359 real = const_binop (PLUS_EXPR, real, i1);
1360 real = const_binop (code, real, div);
1361
1362 imag = const_binop (MULT_EXPR, i1, ratio);
1363 imag = const_binop (MINUS_EXPR, imag, r1);
1364 imag = const_binop (code, imag, div);
1365 }
1366 else
1367 {
1368 /* In the FALSE branch, we compute
1369 ratio = d/c;
1370 divisor = (d * ratio) + c;
1371 tr = (b * ratio) + a;
1372 ti = b - (a * ratio);
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, i2, r2);
1376 tree div = const_binop (PLUS_EXPR, r2,
1377 const_binop (MULT_EXPR, i2, ratio));
1378
1379 real = const_binop (MULT_EXPR, i1, ratio);
1380 real = const_binop (PLUS_EXPR, real, r1);
1381 real = const_binop (code, real, div);
1382
1383 imag = const_binop (MULT_EXPR, r1, ratio);
1384 imag = const_binop (MINUS_EXPR, i1, imag);
1385 imag = const_binop (code, imag, div);
1386 }
1387 }
1388 break;
1389
1390 default:
1391 return NULL_TREE;
1392 }
1393
1394 if (real && imag)
1395 return build_complex (type, real, imag);
1396 }
1397
1398 if (TREE_CODE (arg1) == VECTOR_CST
1399 && TREE_CODE (arg2) == VECTOR_CST)
1400 {
1401 tree type = TREE_TYPE (arg1);
1402 int count = TYPE_VECTOR_SUBPARTS (type), i;
1403 tree *elts = XALLOCAVEC (tree, count);
1404
1405 for (i = 0; i < count; i++)
1406 {
1407 tree elem1 = VECTOR_CST_ELT (arg1, i);
1408 tree elem2 = VECTOR_CST_ELT (arg2, i);
1409
1410 elts[i] = const_binop (code, elem1, elem2);
1411
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE */
1414 if (elts[i] == NULL_TREE)
1415 return NULL_TREE;
1416 }
1417
1418 return build_vector (type, elts);
1419 }
1420
1421 /* Shifts allow a scalar offset for a vector. */
1422 if (TREE_CODE (arg1) == VECTOR_CST
1423 && TREE_CODE (arg2) == INTEGER_CST)
1424 {
1425 tree type = TREE_TYPE (arg1);
1426 int count = TYPE_VECTOR_SUBPARTS (type), i;
1427 tree *elts = XALLOCAVEC (tree, count);
1428
1429 if (code == VEC_LSHIFT_EXPR
1430 || code == VEC_RSHIFT_EXPR)
1431 {
1432 if (!host_integerp (arg2, 1))
1433 return NULL_TREE;
1434
1435 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1436 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1437 unsigned HOST_WIDE_INT innerc
1438 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1439 if (shiftc >= outerc || (shiftc % innerc) != 0)
1440 return NULL_TREE;
1441 int offset = shiftc / innerc;
1442 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1443 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1444 for !BYTES_BIG_ENDIAN picks first vector element, but
1445 for BYTES_BIG_ENDIAN last element from the vector. */
1446 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1447 offset = -offset;
1448 tree zero = build_zero_cst (TREE_TYPE (type));
1449 for (i = 0; i < count; i++)
1450 {
1451 if (i + offset < 0 || i + offset >= count)
1452 elts[i] = zero;
1453 else
1454 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1455 }
1456 }
1457 else
1458 for (i = 0; i < count; i++)
1459 {
1460 tree elem1 = VECTOR_CST_ELT (arg1, i);
1461
1462 elts[i] = const_binop (code, elem1, arg2);
1463
1464 /* It is possible that const_binop cannot handle the given
1465 code and return NULL_TREE */
1466 if (elts[i] == NULL_TREE)
1467 return NULL_TREE;
1468 }
1469
1470 return build_vector (type, elts);
1471 }
1472 return NULL_TREE;
1473 }
1474
1475 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1476 indicates which particular sizetype to create. */
1477
1478 tree
1479 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1480 {
1481 return build_int_cst (sizetype_tab[(int) kind], number);
1482 }
1483 \f
1484 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1485 is a tree code. The type of the result is taken from the operands.
1486 Both must be equivalent integer types, ala int_binop_types_match_p.
1487 If the operands are constant, so is the result. */
1488
1489 tree
1490 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1491 {
1492 tree type = TREE_TYPE (arg0);
1493
1494 if (arg0 == error_mark_node || arg1 == error_mark_node)
1495 return error_mark_node;
1496
1497 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1498 TREE_TYPE (arg1)));
1499
1500 /* Handle the special case of two integer constants faster. */
1501 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1502 {
1503 /* And some specific cases even faster than that. */
1504 if (code == PLUS_EXPR)
1505 {
1506 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1507 return arg1;
1508 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1509 return arg0;
1510 }
1511 else if (code == MINUS_EXPR)
1512 {
1513 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1514 return arg0;
1515 }
1516 else if (code == MULT_EXPR)
1517 {
1518 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1519 return arg1;
1520 }
1521
1522 /* Handle general case of two integer constants. For sizetype
1523 constant calculations we always want to know about overflow,
1524 even in the unsigned case. */
1525 return int_const_binop_1 (code, arg0, arg1, -1);
1526 }
1527
1528 return fold_build2_loc (loc, code, type, arg0, arg1);
1529 }
1530
1531 /* Given two values, either both of sizetype or both of bitsizetype,
1532 compute the difference between the two values. Return the value
1533 in signed type corresponding to the type of the operands. */
1534
1535 tree
1536 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1537 {
1538 tree type = TREE_TYPE (arg0);
1539 tree ctype;
1540
1541 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1542 TREE_TYPE (arg1)));
1543
1544 /* If the type is already signed, just do the simple thing. */
1545 if (!TYPE_UNSIGNED (type))
1546 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1547
1548 if (type == sizetype)
1549 ctype = ssizetype;
1550 else if (type == bitsizetype)
1551 ctype = sbitsizetype;
1552 else
1553 ctype = signed_type_for (type);
1554
1555 /* If either operand is not a constant, do the conversions to the signed
1556 type and subtract. The hardware will do the right thing with any
1557 overflow in the subtraction. */
1558 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1559 return size_binop_loc (loc, MINUS_EXPR,
1560 fold_convert_loc (loc, ctype, arg0),
1561 fold_convert_loc (loc, ctype, arg1));
1562
1563 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1564 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1565 overflow) and negate (which can't either). Special-case a result
1566 of zero while we're here. */
1567 if (tree_int_cst_equal (arg0, arg1))
1568 return build_int_cst (ctype, 0);
1569 else if (tree_int_cst_lt (arg1, arg0))
1570 return fold_convert_loc (loc, ctype,
1571 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1572 else
1573 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1574 fold_convert_loc (loc, ctype,
1575 size_binop_loc (loc,
1576 MINUS_EXPR,
1577 arg1, arg0)));
1578 }
1579 \f
1580 /* A subroutine of fold_convert_const handling conversions of an
1581 INTEGER_CST to another integer type. */
1582
1583 static tree
1584 fold_convert_const_int_from_int (tree type, const_tree arg1)
1585 {
1586 tree t;
1587
1588 /* Given an integer constant, make new constant with new type,
1589 appropriately sign-extended or truncated. */
1590 t = force_fit_type_double (type, tree_to_double_int (arg1),
1591 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1592 (TREE_INT_CST_HIGH (arg1) < 0
1593 && (TYPE_UNSIGNED (type)
1594 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1595 | TREE_OVERFLOW (arg1));
1596
1597 return t;
1598 }
1599
1600 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1601 to an integer type. */
1602
1603 static tree
1604 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1605 {
1606 int overflow = 0;
1607 tree t;
1608
1609 /* The following code implements the floating point to integer
1610 conversion rules required by the Java Language Specification,
1611 that IEEE NaNs are mapped to zero and values that overflow
1612 the target precision saturate, i.e. values greater than
1613 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1614 are mapped to INT_MIN. These semantics are allowed by the
1615 C and C++ standards that simply state that the behavior of
1616 FP-to-integer conversion is unspecified upon overflow. */
1617
1618 double_int val;
1619 REAL_VALUE_TYPE r;
1620 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1621
1622 switch (code)
1623 {
1624 case FIX_TRUNC_EXPR:
1625 real_trunc (&r, VOIDmode, &x);
1626 break;
1627
1628 default:
1629 gcc_unreachable ();
1630 }
1631
1632 /* If R is NaN, return zero and show we have an overflow. */
1633 if (REAL_VALUE_ISNAN (r))
1634 {
1635 overflow = 1;
1636 val = double_int_zero;
1637 }
1638
1639 /* See if R is less than the lower bound or greater than the
1640 upper bound. */
1641
1642 if (! overflow)
1643 {
1644 tree lt = TYPE_MIN_VALUE (type);
1645 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1646 if (REAL_VALUES_LESS (r, l))
1647 {
1648 overflow = 1;
1649 val = tree_to_double_int (lt);
1650 }
1651 }
1652
1653 if (! overflow)
1654 {
1655 tree ut = TYPE_MAX_VALUE (type);
1656 if (ut)
1657 {
1658 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1659 if (REAL_VALUES_LESS (u, r))
1660 {
1661 overflow = 1;
1662 val = tree_to_double_int (ut);
1663 }
1664 }
1665 }
1666
1667 if (! overflow)
1668 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1669
1670 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1671 return t;
1672 }
1673
1674 /* A subroutine of fold_convert_const handling conversions of a
1675 FIXED_CST to an integer type. */
1676
1677 static tree
1678 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1679 {
1680 tree t;
1681 double_int temp, temp_trunc;
1682 unsigned int mode;
1683
1684 /* Right shift FIXED_CST to temp by fbit. */
1685 temp = TREE_FIXED_CST (arg1).data;
1686 mode = TREE_FIXED_CST (arg1).mode;
1687 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1688 {
1689 temp = temp.rshift (GET_MODE_FBIT (mode),
1690 HOST_BITS_PER_DOUBLE_INT,
1691 SIGNED_FIXED_POINT_MODE_P (mode));
1692
1693 /* Left shift temp to temp_trunc by fbit. */
1694 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1695 HOST_BITS_PER_DOUBLE_INT,
1696 SIGNED_FIXED_POINT_MODE_P (mode));
1697 }
1698 else
1699 {
1700 temp = double_int_zero;
1701 temp_trunc = double_int_zero;
1702 }
1703
1704 /* If FIXED_CST is negative, we need to round the value toward 0.
1705 By checking if the fractional bits are not zero to add 1 to temp. */
1706 if (SIGNED_FIXED_POINT_MODE_P (mode)
1707 && temp_trunc.is_negative ()
1708 && TREE_FIXED_CST (arg1).data != temp_trunc)
1709 temp += double_int_one;
1710
1711 /* Given a fixed-point constant, make new constant with new type,
1712 appropriately sign-extended or truncated. */
1713 t = force_fit_type_double (type, temp, -1,
1714 (temp.is_negative ()
1715 && (TYPE_UNSIGNED (type)
1716 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1717 | TREE_OVERFLOW (arg1));
1718
1719 return t;
1720 }
1721
1722 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1723 to another floating point type. */
1724
1725 static tree
1726 fold_convert_const_real_from_real (tree type, const_tree arg1)
1727 {
1728 REAL_VALUE_TYPE value;
1729 tree t;
1730
1731 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1732 t = build_real (type, value);
1733
1734 /* If converting an infinity or NAN to a representation that doesn't
1735 have one, set the overflow bit so that we can produce some kind of
1736 error message at the appropriate point if necessary. It's not the
1737 most user-friendly message, but it's better than nothing. */
1738 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1739 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1740 TREE_OVERFLOW (t) = 1;
1741 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1742 && !MODE_HAS_NANS (TYPE_MODE (type)))
1743 TREE_OVERFLOW (t) = 1;
1744 /* Regular overflow, conversion produced an infinity in a mode that
1745 can't represent them. */
1746 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1747 && REAL_VALUE_ISINF (value)
1748 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1749 TREE_OVERFLOW (t) = 1;
1750 else
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1753 }
1754
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to a floating point type. */
1757
1758 static tree
1759 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1760 {
1761 REAL_VALUE_TYPE value;
1762 tree t;
1763
1764 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1765 t = build_real (type, value);
1766
1767 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1768 return t;
1769 }
1770
1771 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1772 to another fixed-point type. */
1773
1774 static tree
1775 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1776 {
1777 FIXED_VALUE_TYPE value;
1778 tree t;
1779 bool overflow_p;
1780
1781 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1782 TYPE_SATURATING (type));
1783 t = build_fixed (type, value);
1784
1785 /* Propagate overflow flags. */
1786 if (overflow_p | TREE_OVERFLOW (arg1))
1787 TREE_OVERFLOW (t) = 1;
1788 return t;
1789 }
1790
1791 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1792 to a fixed-point type. */
1793
1794 static tree
1795 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1796 {
1797 FIXED_VALUE_TYPE value;
1798 tree t;
1799 bool overflow_p;
1800
1801 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1802 TREE_INT_CST (arg1),
1803 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1804 TYPE_SATURATING (type));
1805 t = build_fixed (type, value);
1806
1807 /* Propagate overflow flags. */
1808 if (overflow_p | TREE_OVERFLOW (arg1))
1809 TREE_OVERFLOW (t) = 1;
1810 return t;
1811 }
1812
1813 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1814 to a fixed-point type. */
1815
1816 static tree
1817 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1818 {
1819 FIXED_VALUE_TYPE value;
1820 tree t;
1821 bool overflow_p;
1822
1823 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1824 &TREE_REAL_CST (arg1),
1825 TYPE_SATURATING (type));
1826 t = build_fixed (type, value);
1827
1828 /* Propagate overflow flags. */
1829 if (overflow_p | TREE_OVERFLOW (arg1))
1830 TREE_OVERFLOW (t) = 1;
1831 return t;
1832 }
1833
1834 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1835 type TYPE. If no simplification can be done return NULL_TREE. */
1836
1837 static tree
1838 fold_convert_const (enum tree_code code, tree type, tree arg1)
1839 {
1840 if (TREE_TYPE (arg1) == type)
1841 return arg1;
1842
1843 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1844 || TREE_CODE (type) == OFFSET_TYPE)
1845 {
1846 if (TREE_CODE (arg1) == INTEGER_CST)
1847 return fold_convert_const_int_from_int (type, arg1);
1848 else if (TREE_CODE (arg1) == REAL_CST)
1849 return fold_convert_const_int_from_real (code, type, arg1);
1850 else if (TREE_CODE (arg1) == FIXED_CST)
1851 return fold_convert_const_int_from_fixed (type, arg1);
1852 }
1853 else if (TREE_CODE (type) == REAL_TYPE)
1854 {
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 else if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1859 else if (TREE_CODE (arg1) == FIXED_CST)
1860 return fold_convert_const_real_from_fixed (type, arg1);
1861 }
1862 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1863 {
1864 if (TREE_CODE (arg1) == FIXED_CST)
1865 return fold_convert_const_fixed_from_fixed (type, arg1);
1866 else if (TREE_CODE (arg1) == INTEGER_CST)
1867 return fold_convert_const_fixed_from_int (type, arg1);
1868 else if (TREE_CODE (arg1) == REAL_CST)
1869 return fold_convert_const_fixed_from_real (type, arg1);
1870 }
1871 return NULL_TREE;
1872 }
1873
1874 /* Construct a vector of zero elements of vector type TYPE. */
1875
1876 static tree
1877 build_zero_vector (tree type)
1878 {
1879 tree t;
1880
1881 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1882 return build_vector_from_val (type, t);
1883 }
1884
1885 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1886
1887 bool
1888 fold_convertible_p (const_tree type, const_tree arg)
1889 {
1890 tree orig = TREE_TYPE (arg);
1891
1892 if (type == orig)
1893 return true;
1894
1895 if (TREE_CODE (arg) == ERROR_MARK
1896 || TREE_CODE (type) == ERROR_MARK
1897 || TREE_CODE (orig) == ERROR_MARK)
1898 return false;
1899
1900 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1901 return true;
1902
1903 switch (TREE_CODE (type))
1904 {
1905 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1906 case POINTER_TYPE: case REFERENCE_TYPE:
1907 case OFFSET_TYPE:
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1909 || TREE_CODE (orig) == OFFSET_TYPE)
1910 return true;
1911 return (TREE_CODE (orig) == VECTOR_TYPE
1912 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1913
1914 case REAL_TYPE:
1915 case FIXED_POINT_TYPE:
1916 case COMPLEX_TYPE:
1917 case VECTOR_TYPE:
1918 case VOID_TYPE:
1919 return TREE_CODE (type) == TREE_CODE (orig);
1920
1921 default:
1922 return false;
1923 }
1924 }
1925
1926 /* Convert expression ARG to type TYPE. Used by the middle-end for
1927 simple conversions in preference to calling the front-end's convert. */
1928
1929 tree
1930 fold_convert_loc (location_t loc, tree type, tree arg)
1931 {
1932 tree orig = TREE_TYPE (arg);
1933 tree tem;
1934
1935 if (type == orig)
1936 return arg;
1937
1938 if (TREE_CODE (arg) == ERROR_MARK
1939 || TREE_CODE (type) == ERROR_MARK
1940 || TREE_CODE (orig) == ERROR_MARK)
1941 return error_mark_node;
1942
1943 switch (TREE_CODE (type))
1944 {
1945 case POINTER_TYPE:
1946 case REFERENCE_TYPE:
1947 /* Handle conversions between pointers to different address spaces. */
1948 if (POINTER_TYPE_P (orig)
1949 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1950 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1951 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1952 /* fall through */
1953
1954 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1955 case OFFSET_TYPE:
1956 if (TREE_CODE (arg) == INTEGER_CST)
1957 {
1958 tem = fold_convert_const (NOP_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1961 }
1962 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1963 || TREE_CODE (orig) == OFFSET_TYPE)
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 if (TREE_CODE (orig) == COMPLEX_TYPE)
1966 return fold_convert_loc (loc, type,
1967 fold_build1_loc (loc, REALPART_EXPR,
1968 TREE_TYPE (orig), arg));
1969 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1970 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1971 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1972
1973 case REAL_TYPE:
1974 if (TREE_CODE (arg) == INTEGER_CST)
1975 {
1976 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1977 if (tem != NULL_TREE)
1978 return tem;
1979 }
1980 else if (TREE_CODE (arg) == REAL_CST)
1981 {
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1985 }
1986 else if (TREE_CODE (arg) == FIXED_CST)
1987 {
1988 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1989 if (tem != NULL_TREE)
1990 return tem;
1991 }
1992
1993 switch (TREE_CODE (orig))
1994 {
1995 case INTEGER_TYPE:
1996 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1997 case POINTER_TYPE: case REFERENCE_TYPE:
1998 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1999
2000 case REAL_TYPE:
2001 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2002
2003 case FIXED_POINT_TYPE:
2004 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2005
2006 case COMPLEX_TYPE:
2007 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2008 return fold_convert_loc (loc, type, tem);
2009
2010 default:
2011 gcc_unreachable ();
2012 }
2013
2014 case FIXED_POINT_TYPE:
2015 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2016 || TREE_CODE (arg) == REAL_CST)
2017 {
2018 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2019 if (tem != NULL_TREE)
2020 goto fold_convert_exit;
2021 }
2022
2023 switch (TREE_CODE (orig))
2024 {
2025 case FIXED_POINT_TYPE:
2026 case INTEGER_TYPE:
2027 case ENUMERAL_TYPE:
2028 case BOOLEAN_TYPE:
2029 case REAL_TYPE:
2030 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2031
2032 case COMPLEX_TYPE:
2033 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2034 return fold_convert_loc (loc, type, tem);
2035
2036 default:
2037 gcc_unreachable ();
2038 }
2039
2040 case COMPLEX_TYPE:
2041 switch (TREE_CODE (orig))
2042 {
2043 case INTEGER_TYPE:
2044 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2045 case POINTER_TYPE: case REFERENCE_TYPE:
2046 case REAL_TYPE:
2047 case FIXED_POINT_TYPE:
2048 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2049 fold_convert_loc (loc, TREE_TYPE (type), arg),
2050 fold_convert_loc (loc, TREE_TYPE (type),
2051 integer_zero_node));
2052 case COMPLEX_TYPE:
2053 {
2054 tree rpart, ipart;
2055
2056 if (TREE_CODE (arg) == COMPLEX_EXPR)
2057 {
2058 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2059 TREE_OPERAND (arg, 0));
2060 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2061 TREE_OPERAND (arg, 1));
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2063 }
2064
2065 arg = save_expr (arg);
2066 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2067 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2068 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2069 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2070 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2071 }
2072
2073 default:
2074 gcc_unreachable ();
2075 }
2076
2077 case VECTOR_TYPE:
2078 if (integer_zerop (arg))
2079 return build_zero_vector (type);
2080 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2081 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2082 || TREE_CODE (orig) == VECTOR_TYPE);
2083 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2084
2085 case VOID_TYPE:
2086 tem = fold_ignored_result (arg);
2087 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2088
2089 default:
2090 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2091 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2092 gcc_unreachable ();
2093 }
2094 fold_convert_exit:
2095 protected_set_expr_location_unshare (tem, loc);
2096 return tem;
2097 }
2098 \f
2099 /* Return false if expr can be assumed not to be an lvalue, true
2100 otherwise. */
2101
2102 static bool
2103 maybe_lvalue_p (const_tree x)
2104 {
2105 /* We only need to wrap lvalue tree codes. */
2106 switch (TREE_CODE (x))
2107 {
2108 case VAR_DECL:
2109 case PARM_DECL:
2110 case RESULT_DECL:
2111 case LABEL_DECL:
2112 case FUNCTION_DECL:
2113 case SSA_NAME:
2114
2115 case COMPONENT_REF:
2116 case MEM_REF:
2117 case INDIRECT_REF:
2118 case ARRAY_REF:
2119 case ARRAY_RANGE_REF:
2120 case BIT_FIELD_REF:
2121 case OBJ_TYPE_REF:
2122
2123 case REALPART_EXPR:
2124 case IMAGPART_EXPR:
2125 case PREINCREMENT_EXPR:
2126 case PREDECREMENT_EXPR:
2127 case SAVE_EXPR:
2128 case TRY_CATCH_EXPR:
2129 case WITH_CLEANUP_EXPR:
2130 case COMPOUND_EXPR:
2131 case MODIFY_EXPR:
2132 case TARGET_EXPR:
2133 case COND_EXPR:
2134 case BIND_EXPR:
2135 break;
2136
2137 default:
2138 /* Assume the worst for front-end tree codes. */
2139 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2140 break;
2141 return false;
2142 }
2143
2144 return true;
2145 }
2146
2147 /* Return an expr equal to X but certainly not valid as an lvalue. */
2148
2149 tree
2150 non_lvalue_loc (location_t loc, tree x)
2151 {
2152 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2153 us. */
2154 if (in_gimple_form)
2155 return x;
2156
2157 if (! maybe_lvalue_p (x))
2158 return x;
2159 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2160 }
2161
2162 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2163 Zero means allow extended lvalues. */
2164
2165 int pedantic_lvalues;
2166
2167 /* When pedantic, return an expr equal to X but certainly not valid as a
2168 pedantic lvalue. Otherwise, return X. */
2169
2170 static tree
2171 pedantic_non_lvalue_loc (location_t loc, tree x)
2172 {
2173 if (pedantic_lvalues)
2174 return non_lvalue_loc (loc, x);
2175
2176 return protected_set_expr_location_unshare (x, loc);
2177 }
2178 \f
2179 /* Given a tree comparison code, return the code that is the logical inverse.
2180 It is generally not safe to do this for floating-point comparisons, except
2181 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2182 ERROR_MARK in this case. */
2183
2184 enum tree_code
2185 invert_tree_comparison (enum tree_code code, bool honor_nans)
2186 {
2187 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2188 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2189 return ERROR_MARK;
2190
2191 switch (code)
2192 {
2193 case EQ_EXPR:
2194 return NE_EXPR;
2195 case NE_EXPR:
2196 return EQ_EXPR;
2197 case GT_EXPR:
2198 return honor_nans ? UNLE_EXPR : LE_EXPR;
2199 case GE_EXPR:
2200 return honor_nans ? UNLT_EXPR : LT_EXPR;
2201 case LT_EXPR:
2202 return honor_nans ? UNGE_EXPR : GE_EXPR;
2203 case LE_EXPR:
2204 return honor_nans ? UNGT_EXPR : GT_EXPR;
2205 case LTGT_EXPR:
2206 return UNEQ_EXPR;
2207 case UNEQ_EXPR:
2208 return LTGT_EXPR;
2209 case UNGT_EXPR:
2210 return LE_EXPR;
2211 case UNGE_EXPR:
2212 return LT_EXPR;
2213 case UNLT_EXPR:
2214 return GE_EXPR;
2215 case UNLE_EXPR:
2216 return GT_EXPR;
2217 case ORDERED_EXPR:
2218 return UNORDERED_EXPR;
2219 case UNORDERED_EXPR:
2220 return ORDERED_EXPR;
2221 default:
2222 gcc_unreachable ();
2223 }
2224 }
2225
2226 /* Similar, but return the comparison that results if the operands are
2227 swapped. This is safe for floating-point. */
2228
2229 enum tree_code
2230 swap_tree_comparison (enum tree_code code)
2231 {
2232 switch (code)
2233 {
2234 case EQ_EXPR:
2235 case NE_EXPR:
2236 case ORDERED_EXPR:
2237 case UNORDERED_EXPR:
2238 case LTGT_EXPR:
2239 case UNEQ_EXPR:
2240 return code;
2241 case GT_EXPR:
2242 return LT_EXPR;
2243 case GE_EXPR:
2244 return LE_EXPR;
2245 case LT_EXPR:
2246 return GT_EXPR;
2247 case LE_EXPR:
2248 return GE_EXPR;
2249 case UNGT_EXPR:
2250 return UNLT_EXPR;
2251 case UNGE_EXPR:
2252 return UNLE_EXPR;
2253 case UNLT_EXPR:
2254 return UNGT_EXPR;
2255 case UNLE_EXPR:
2256 return UNGE_EXPR;
2257 default:
2258 gcc_unreachable ();
2259 }
2260 }
2261
2262
2263 /* Convert a comparison tree code from an enum tree_code representation
2264 into a compcode bit-based encoding. This function is the inverse of
2265 compcode_to_comparison. */
2266
2267 static enum comparison_code
2268 comparison_to_compcode (enum tree_code code)
2269 {
2270 switch (code)
2271 {
2272 case LT_EXPR:
2273 return COMPCODE_LT;
2274 case EQ_EXPR:
2275 return COMPCODE_EQ;
2276 case LE_EXPR:
2277 return COMPCODE_LE;
2278 case GT_EXPR:
2279 return COMPCODE_GT;
2280 case NE_EXPR:
2281 return COMPCODE_NE;
2282 case GE_EXPR:
2283 return COMPCODE_GE;
2284 case ORDERED_EXPR:
2285 return COMPCODE_ORD;
2286 case UNORDERED_EXPR:
2287 return COMPCODE_UNORD;
2288 case UNLT_EXPR:
2289 return COMPCODE_UNLT;
2290 case UNEQ_EXPR:
2291 return COMPCODE_UNEQ;
2292 case UNLE_EXPR:
2293 return COMPCODE_UNLE;
2294 case UNGT_EXPR:
2295 return COMPCODE_UNGT;
2296 case LTGT_EXPR:
2297 return COMPCODE_LTGT;
2298 case UNGE_EXPR:
2299 return COMPCODE_UNGE;
2300 default:
2301 gcc_unreachable ();
2302 }
2303 }
2304
2305 /* Convert a compcode bit-based encoding of a comparison operator back
2306 to GCC's enum tree_code representation. This function is the
2307 inverse of comparison_to_compcode. */
2308
2309 static enum tree_code
2310 compcode_to_comparison (enum comparison_code code)
2311 {
2312 switch (code)
2313 {
2314 case COMPCODE_LT:
2315 return LT_EXPR;
2316 case COMPCODE_EQ:
2317 return EQ_EXPR;
2318 case COMPCODE_LE:
2319 return LE_EXPR;
2320 case COMPCODE_GT:
2321 return GT_EXPR;
2322 case COMPCODE_NE:
2323 return NE_EXPR;
2324 case COMPCODE_GE:
2325 return GE_EXPR;
2326 case COMPCODE_ORD:
2327 return ORDERED_EXPR;
2328 case COMPCODE_UNORD:
2329 return UNORDERED_EXPR;
2330 case COMPCODE_UNLT:
2331 return UNLT_EXPR;
2332 case COMPCODE_UNEQ:
2333 return UNEQ_EXPR;
2334 case COMPCODE_UNLE:
2335 return UNLE_EXPR;
2336 case COMPCODE_UNGT:
2337 return UNGT_EXPR;
2338 case COMPCODE_LTGT:
2339 return LTGT_EXPR;
2340 case COMPCODE_UNGE:
2341 return UNGE_EXPR;
2342 default:
2343 gcc_unreachable ();
2344 }
2345 }
2346
2347 /* Return a tree for the comparison which is the combination of
2348 doing the AND or OR (depending on CODE) of the two operations LCODE
2349 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2350 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2351 if this makes the transformation invalid. */
2352
2353 tree
2354 combine_comparisons (location_t loc,
2355 enum tree_code code, enum tree_code lcode,
2356 enum tree_code rcode, tree truth_type,
2357 tree ll_arg, tree lr_arg)
2358 {
2359 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2360 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2361 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2362 int compcode;
2363
2364 switch (code)
2365 {
2366 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2367 compcode = lcompcode & rcompcode;
2368 break;
2369
2370 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2371 compcode = lcompcode | rcompcode;
2372 break;
2373
2374 default:
2375 return NULL_TREE;
2376 }
2377
2378 if (!honor_nans)
2379 {
2380 /* Eliminate unordered comparisons, as well as LTGT and ORD
2381 which are not used unless the mode has NaNs. */
2382 compcode &= ~COMPCODE_UNORD;
2383 if (compcode == COMPCODE_LTGT)
2384 compcode = COMPCODE_NE;
2385 else if (compcode == COMPCODE_ORD)
2386 compcode = COMPCODE_TRUE;
2387 }
2388 else if (flag_trapping_math)
2389 {
2390 /* Check that the original operation and the optimized ones will trap
2391 under the same condition. */
2392 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2393 && (lcompcode != COMPCODE_EQ)
2394 && (lcompcode != COMPCODE_ORD);
2395 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2396 && (rcompcode != COMPCODE_EQ)
2397 && (rcompcode != COMPCODE_ORD);
2398 bool trap = (compcode & COMPCODE_UNORD) == 0
2399 && (compcode != COMPCODE_EQ)
2400 && (compcode != COMPCODE_ORD);
2401
2402 /* In a short-circuited boolean expression the LHS might be
2403 such that the RHS, if evaluated, will never trap. For
2404 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2405 if neither x nor y is NaN. (This is a mixed blessing: for
2406 example, the expression above will never trap, hence
2407 optimizing it to x < y would be invalid). */
2408 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2409 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2410 rtrap = false;
2411
2412 /* If the comparison was short-circuited, and only the RHS
2413 trapped, we may now generate a spurious trap. */
2414 if (rtrap && !ltrap
2415 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2416 return NULL_TREE;
2417
2418 /* If we changed the conditions that cause a trap, we lose. */
2419 if ((ltrap || rtrap) != trap)
2420 return NULL_TREE;
2421 }
2422
2423 if (compcode == COMPCODE_TRUE)
2424 return constant_boolean_node (true, truth_type);
2425 else if (compcode == COMPCODE_FALSE)
2426 return constant_boolean_node (false, truth_type);
2427 else
2428 {
2429 enum tree_code tcode;
2430
2431 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2432 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2433 }
2434 }
2435 \f
2436 /* Return nonzero if two operands (typically of the same tree node)
2437 are necessarily equal. If either argument has side-effects this
2438 function returns zero. FLAGS modifies behavior as follows:
2439
2440 If OEP_ONLY_CONST is set, only return nonzero for constants.
2441 This function tests whether the operands are indistinguishable;
2442 it does not test whether they are equal using C's == operation.
2443 The distinction is important for IEEE floating point, because
2444 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2445 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2446
2447 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2448 even though it may hold multiple values during a function.
2449 This is because a GCC tree node guarantees that nothing else is
2450 executed between the evaluation of its "operands" (which may often
2451 be evaluated in arbitrary order). Hence if the operands themselves
2452 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2453 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2454 unset means assuming isochronic (or instantaneous) tree equivalence.
2455 Unless comparing arbitrary expression trees, such as from different
2456 statements, this flag can usually be left unset.
2457
2458 If OEP_PURE_SAME is set, then pure functions with identical arguments
2459 are considered the same. It is used when the caller has other ways
2460 to ensure that global memory is unchanged in between. */
2461
2462 int
2463 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2464 {
2465 /* If either is ERROR_MARK, they aren't equal. */
2466 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2467 || TREE_TYPE (arg0) == error_mark_node
2468 || TREE_TYPE (arg1) == error_mark_node)
2469 return 0;
2470
2471 /* Similar, if either does not have a type (like a released SSA name),
2472 they aren't equal. */
2473 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2474 return 0;
2475
2476 /* Check equality of integer constants before bailing out due to
2477 precision differences. */
2478 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2479 return tree_int_cst_equal (arg0, arg1);
2480
2481 /* If both types don't have the same signedness, then we can't consider
2482 them equal. We must check this before the STRIP_NOPS calls
2483 because they may change the signedness of the arguments. As pointers
2484 strictly don't have a signedness, require either two pointers or
2485 two non-pointers as well. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2487 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2488 return 0;
2489
2490 /* We cannot consider pointers to different address space equal. */
2491 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2492 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2493 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2494 return 0;
2495
2496 /* If both types don't have the same precision, then it is not safe
2497 to strip NOPs. */
2498 if (element_precision (TREE_TYPE (arg0))
2499 != element_precision (TREE_TYPE (arg1)))
2500 return 0;
2501
2502 STRIP_NOPS (arg0);
2503 STRIP_NOPS (arg1);
2504
2505 /* In case both args are comparisons but with different comparison
2506 code, try to swap the comparison operands of one arg to produce
2507 a match and compare that variant. */
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 && COMPARISON_CLASS_P (arg0)
2510 && COMPARISON_CLASS_P (arg1))
2511 {
2512 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2513
2514 if (TREE_CODE (arg0) == swap_code)
2515 return operand_equal_p (TREE_OPERAND (arg0, 0),
2516 TREE_OPERAND (arg1, 1), flags)
2517 && operand_equal_p (TREE_OPERAND (arg0, 1),
2518 TREE_OPERAND (arg1, 0), flags);
2519 }
2520
2521 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2522 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2523 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2524 return 0;
2525
2526 /* This is needed for conversions and for COMPONENT_REF.
2527 Might as well play it safe and always test this. */
2528 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2529 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2530 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2531 return 0;
2532
2533 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2534 We don't care about side effects in that case because the SAVE_EXPR
2535 takes care of that for us. In all other cases, two expressions are
2536 equal if they have no side effects. If we have two identical
2537 expressions with side effects that should be treated the same due
2538 to the only side effects being identical SAVE_EXPR's, that will
2539 be detected in the recursive calls below.
2540 If we are taking an invariant address of two identical objects
2541 they are necessarily equal as well. */
2542 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2543 && (TREE_CODE (arg0) == SAVE_EXPR
2544 || (flags & OEP_CONSTANT_ADDRESS_OF)
2545 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2546 return 1;
2547
2548 /* Next handle constant cases, those for which we can return 1 even
2549 if ONLY_CONST is set. */
2550 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2551 switch (TREE_CODE (arg0))
2552 {
2553 case INTEGER_CST:
2554 return tree_int_cst_equal (arg0, arg1);
2555
2556 case FIXED_CST:
2557 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2558 TREE_FIXED_CST (arg1));
2559
2560 case REAL_CST:
2561 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2562 TREE_REAL_CST (arg1)))
2563 return 1;
2564
2565
2566 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2567 {
2568 /* If we do not distinguish between signed and unsigned zero,
2569 consider them equal. */
2570 if (real_zerop (arg0) && real_zerop (arg1))
2571 return 1;
2572 }
2573 return 0;
2574
2575 case VECTOR_CST:
2576 {
2577 unsigned i;
2578
2579 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2580 return 0;
2581
2582 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2583 {
2584 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2585 VECTOR_CST_ELT (arg1, i), flags))
2586 return 0;
2587 }
2588 return 1;
2589 }
2590
2591 case COMPLEX_CST:
2592 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2593 flags)
2594 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2595 flags));
2596
2597 case STRING_CST:
2598 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2599 && ! memcmp (TREE_STRING_POINTER (arg0),
2600 TREE_STRING_POINTER (arg1),
2601 TREE_STRING_LENGTH (arg0)));
2602
2603 case ADDR_EXPR:
2604 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2605 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2606 ? OEP_CONSTANT_ADDRESS_OF : 0);
2607 default:
2608 break;
2609 }
2610
2611 if (flags & OEP_ONLY_CONST)
2612 return 0;
2613
2614 /* Define macros to test an operand from arg0 and arg1 for equality and a
2615 variant that allows null and views null as being different from any
2616 non-null value. In the latter case, if either is null, the both
2617 must be; otherwise, do the normal comparison. */
2618 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2619 TREE_OPERAND (arg1, N), flags)
2620
2621 #define OP_SAME_WITH_NULL(N) \
2622 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2623 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2624
2625 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2626 {
2627 case tcc_unary:
2628 /* Two conversions are equal only if signedness and modes match. */
2629 switch (TREE_CODE (arg0))
2630 {
2631 CASE_CONVERT:
2632 case FIX_TRUNC_EXPR:
2633 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2634 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635 return 0;
2636 break;
2637 default:
2638 break;
2639 }
2640
2641 return OP_SAME (0);
2642
2643
2644 case tcc_comparison:
2645 case tcc_binary:
2646 if (OP_SAME (0) && OP_SAME (1))
2647 return 1;
2648
2649 /* For commutative ops, allow the other order. */
2650 return (commutative_tree_code (TREE_CODE (arg0))
2651 && operand_equal_p (TREE_OPERAND (arg0, 0),
2652 TREE_OPERAND (arg1, 1), flags)
2653 && operand_equal_p (TREE_OPERAND (arg0, 1),
2654 TREE_OPERAND (arg1, 0), flags));
2655
2656 case tcc_reference:
2657 /* If either of the pointer (or reference) expressions we are
2658 dereferencing contain a side effect, these cannot be equal,
2659 but their addresses can be. */
2660 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2661 && (TREE_SIDE_EFFECTS (arg0)
2662 || TREE_SIDE_EFFECTS (arg1)))
2663 return 0;
2664
2665 switch (TREE_CODE (arg0))
2666 {
2667 case INDIRECT_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 return OP_SAME (0);
2670
2671 case REALPART_EXPR:
2672 case IMAGPART_EXPR:
2673 return OP_SAME (0);
2674
2675 case TARGET_MEM_REF:
2676 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2677 /* Require equal extra operands and then fall through to MEM_REF
2678 handling of the two common operands. */
2679 if (!OP_SAME_WITH_NULL (2)
2680 || !OP_SAME_WITH_NULL (3)
2681 || !OP_SAME_WITH_NULL (4))
2682 return 0;
2683 /* Fallthru. */
2684 case MEM_REF:
2685 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2686 /* Require equal access sizes, and similar pointer types.
2687 We can have incomplete types for array references of
2688 variable-sized arrays from the Fortran frontend
2689 though. Also verify the types are compatible. */
2690 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2691 || (TYPE_SIZE (TREE_TYPE (arg0))
2692 && TYPE_SIZE (TREE_TYPE (arg1))
2693 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2694 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2695 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2696 && alias_ptr_types_compatible_p
2697 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2698 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2699 && OP_SAME (0) && OP_SAME (1));
2700
2701 case ARRAY_REF:
2702 case ARRAY_RANGE_REF:
2703 /* Operands 2 and 3 may be null.
2704 Compare the array index by value if it is constant first as we
2705 may have different types but same value here. */
2706 if (!OP_SAME (0))
2707 return 0;
2708 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2709 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2710 TREE_OPERAND (arg1, 1))
2711 || OP_SAME (1))
2712 && OP_SAME_WITH_NULL (2)
2713 && OP_SAME_WITH_NULL (3));
2714
2715 case COMPONENT_REF:
2716 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2717 may be NULL when we're called to compare MEM_EXPRs. */
2718 if (!OP_SAME_WITH_NULL (0))
2719 return 0;
2720 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2721 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2722
2723 case BIT_FIELD_REF:
2724 if (!OP_SAME (0))
2725 return 0;
2726 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2727 return OP_SAME (1) && OP_SAME (2);
2728
2729 default:
2730 return 0;
2731 }
2732
2733 case tcc_expression:
2734 switch (TREE_CODE (arg0))
2735 {
2736 case ADDR_EXPR:
2737 case TRUTH_NOT_EXPR:
2738 return OP_SAME (0);
2739
2740 case TRUTH_ANDIF_EXPR:
2741 case TRUTH_ORIF_EXPR:
2742 return OP_SAME (0) && OP_SAME (1);
2743
2744 case FMA_EXPR:
2745 case WIDEN_MULT_PLUS_EXPR:
2746 case WIDEN_MULT_MINUS_EXPR:
2747 if (!OP_SAME (2))
2748 return 0;
2749 /* The multiplcation operands are commutative. */
2750 /* FALLTHRU */
2751
2752 case TRUTH_AND_EXPR:
2753 case TRUTH_OR_EXPR:
2754 case TRUTH_XOR_EXPR:
2755 if (OP_SAME (0) && OP_SAME (1))
2756 return 1;
2757
2758 /* Otherwise take into account this is a commutative operation. */
2759 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2760 TREE_OPERAND (arg1, 1), flags)
2761 && operand_equal_p (TREE_OPERAND (arg0, 1),
2762 TREE_OPERAND (arg1, 0), flags));
2763
2764 case COND_EXPR:
2765 case VEC_COND_EXPR:
2766 case DOT_PROD_EXPR:
2767 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2768
2769 default:
2770 return 0;
2771 }
2772
2773 case tcc_vl_exp:
2774 switch (TREE_CODE (arg0))
2775 {
2776 case CALL_EXPR:
2777 /* If the CALL_EXPRs call different functions, then they
2778 clearly can not be equal. */
2779 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2780 flags))
2781 return 0;
2782
2783 {
2784 unsigned int cef = call_expr_flags (arg0);
2785 if (flags & OEP_PURE_SAME)
2786 cef &= ECF_CONST | ECF_PURE;
2787 else
2788 cef &= ECF_CONST;
2789 if (!cef)
2790 return 0;
2791 }
2792
2793 /* Now see if all the arguments are the same. */
2794 {
2795 const_call_expr_arg_iterator iter0, iter1;
2796 const_tree a0, a1;
2797 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2798 a1 = first_const_call_expr_arg (arg1, &iter1);
2799 a0 && a1;
2800 a0 = next_const_call_expr_arg (&iter0),
2801 a1 = next_const_call_expr_arg (&iter1))
2802 if (! operand_equal_p (a0, a1, flags))
2803 return 0;
2804
2805 /* If we get here and both argument lists are exhausted
2806 then the CALL_EXPRs are equal. */
2807 return ! (a0 || a1);
2808 }
2809 default:
2810 return 0;
2811 }
2812
2813 case tcc_declaration:
2814 /* Consider __builtin_sqrt equal to sqrt. */
2815 return (TREE_CODE (arg0) == FUNCTION_DECL
2816 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2817 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2818 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2819
2820 default:
2821 return 0;
2822 }
2823
2824 #undef OP_SAME
2825 #undef OP_SAME_WITH_NULL
2826 }
2827 \f
2828 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2829 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2830
2831 When in doubt, return 0. */
2832
2833 static int
2834 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2835 {
2836 int unsignedp1, unsignedpo;
2837 tree primarg0, primarg1, primother;
2838 unsigned int correct_width;
2839
2840 if (operand_equal_p (arg0, arg1, 0))
2841 return 1;
2842
2843 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2844 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2845 return 0;
2846
2847 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2848 and see if the inner values are the same. This removes any
2849 signedness comparison, which doesn't matter here. */
2850 primarg0 = arg0, primarg1 = arg1;
2851 STRIP_NOPS (primarg0);
2852 STRIP_NOPS (primarg1);
2853 if (operand_equal_p (primarg0, primarg1, 0))
2854 return 1;
2855
2856 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2857 actual comparison operand, ARG0.
2858
2859 First throw away any conversions to wider types
2860 already present in the operands. */
2861
2862 primarg1 = get_narrower (arg1, &unsignedp1);
2863 primother = get_narrower (other, &unsignedpo);
2864
2865 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2866 if (unsignedp1 == unsignedpo
2867 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2868 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2869 {
2870 tree type = TREE_TYPE (arg0);
2871
2872 /* Make sure shorter operand is extended the right way
2873 to match the longer operand. */
2874 primarg1 = fold_convert (signed_or_unsigned_type_for
2875 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2876
2877 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2878 return 1;
2879 }
2880
2881 return 0;
2882 }
2883 \f
2884 /* See if ARG is an expression that is either a comparison or is performing
2885 arithmetic on comparisons. The comparisons must only be comparing
2886 two different values, which will be stored in *CVAL1 and *CVAL2; if
2887 they are nonzero it means that some operands have already been found.
2888 No variables may be used anywhere else in the expression except in the
2889 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2890 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2891
2892 If this is true, return 1. Otherwise, return zero. */
2893
2894 static int
2895 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2896 {
2897 enum tree_code code = TREE_CODE (arg);
2898 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2899
2900 /* We can handle some of the tcc_expression cases here. */
2901 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2902 tclass = tcc_unary;
2903 else if (tclass == tcc_expression
2904 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2905 || code == COMPOUND_EXPR))
2906 tclass = tcc_binary;
2907
2908 else if (tclass == tcc_expression && code == SAVE_EXPR
2909 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2910 {
2911 /* If we've already found a CVAL1 or CVAL2, this expression is
2912 two complex to handle. */
2913 if (*cval1 || *cval2)
2914 return 0;
2915
2916 tclass = tcc_unary;
2917 *save_p = 1;
2918 }
2919
2920 switch (tclass)
2921 {
2922 case tcc_unary:
2923 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2924
2925 case tcc_binary:
2926 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2927 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2928 cval1, cval2, save_p));
2929
2930 case tcc_constant:
2931 return 1;
2932
2933 case tcc_expression:
2934 if (code == COND_EXPR)
2935 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2936 cval1, cval2, save_p)
2937 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2938 cval1, cval2, save_p)
2939 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2940 cval1, cval2, save_p));
2941 return 0;
2942
2943 case tcc_comparison:
2944 /* First see if we can handle the first operand, then the second. For
2945 the second operand, we know *CVAL1 can't be zero. It must be that
2946 one side of the comparison is each of the values; test for the
2947 case where this isn't true by failing if the two operands
2948 are the same. */
2949
2950 if (operand_equal_p (TREE_OPERAND (arg, 0),
2951 TREE_OPERAND (arg, 1), 0))
2952 return 0;
2953
2954 if (*cval1 == 0)
2955 *cval1 = TREE_OPERAND (arg, 0);
2956 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2957 ;
2958 else if (*cval2 == 0)
2959 *cval2 = TREE_OPERAND (arg, 0);
2960 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2961 ;
2962 else
2963 return 0;
2964
2965 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2966 ;
2967 else if (*cval2 == 0)
2968 *cval2 = TREE_OPERAND (arg, 1);
2969 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2970 ;
2971 else
2972 return 0;
2973
2974 return 1;
2975
2976 default:
2977 return 0;
2978 }
2979 }
2980 \f
2981 /* ARG is a tree that is known to contain just arithmetic operations and
2982 comparisons. Evaluate the operations in the tree substituting NEW0 for
2983 any occurrence of OLD0 as an operand of a comparison and likewise for
2984 NEW1 and OLD1. */
2985
2986 static tree
2987 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2988 tree old1, tree new1)
2989 {
2990 tree type = TREE_TYPE (arg);
2991 enum tree_code code = TREE_CODE (arg);
2992 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2993
2994 /* We can handle some of the tcc_expression cases here. */
2995 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2996 tclass = tcc_unary;
2997 else if (tclass == tcc_expression
2998 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2999 tclass = tcc_binary;
3000
3001 switch (tclass)
3002 {
3003 case tcc_unary:
3004 return fold_build1_loc (loc, code, type,
3005 eval_subst (loc, TREE_OPERAND (arg, 0),
3006 old0, new0, old1, new1));
3007
3008 case tcc_binary:
3009 return fold_build2_loc (loc, code, type,
3010 eval_subst (loc, TREE_OPERAND (arg, 0),
3011 old0, new0, old1, new1),
3012 eval_subst (loc, TREE_OPERAND (arg, 1),
3013 old0, new0, old1, new1));
3014
3015 case tcc_expression:
3016 switch (code)
3017 {
3018 case SAVE_EXPR:
3019 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3020 old1, new1);
3021
3022 case COMPOUND_EXPR:
3023 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3024 old1, new1);
3025
3026 case COND_EXPR:
3027 return fold_build3_loc (loc, code, type,
3028 eval_subst (loc, TREE_OPERAND (arg, 0),
3029 old0, new0, old1, new1),
3030 eval_subst (loc, TREE_OPERAND (arg, 1),
3031 old0, new0, old1, new1),
3032 eval_subst (loc, TREE_OPERAND (arg, 2),
3033 old0, new0, old1, new1));
3034 default:
3035 break;
3036 }
3037 /* Fall through - ??? */
3038
3039 case tcc_comparison:
3040 {
3041 tree arg0 = TREE_OPERAND (arg, 0);
3042 tree arg1 = TREE_OPERAND (arg, 1);
3043
3044 /* We need to check both for exact equality and tree equality. The
3045 former will be true if the operand has a side-effect. In that
3046 case, we know the operand occurred exactly once. */
3047
3048 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3049 arg0 = new0;
3050 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3051 arg0 = new1;
3052
3053 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3054 arg1 = new0;
3055 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3056 arg1 = new1;
3057
3058 return fold_build2_loc (loc, code, type, arg0, arg1);
3059 }
3060
3061 default:
3062 return arg;
3063 }
3064 }
3065 \f
3066 /* Return a tree for the case when the result of an expression is RESULT
3067 converted to TYPE and OMITTED was previously an operand of the expression
3068 but is now not needed (e.g., we folded OMITTED * 0).
3069
3070 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3071 the conversion of RESULT to TYPE. */
3072
3073 tree
3074 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3075 {
3076 tree t = fold_convert_loc (loc, type, result);
3077
3078 /* If the resulting operand is an empty statement, just return the omitted
3079 statement casted to void. */
3080 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3081 return build1_loc (loc, NOP_EXPR, void_type_node,
3082 fold_ignored_result (omitted));
3083
3084 if (TREE_SIDE_EFFECTS (omitted))
3085 return build2_loc (loc, COMPOUND_EXPR, type,
3086 fold_ignored_result (omitted), t);
3087
3088 return non_lvalue_loc (loc, t);
3089 }
3090
3091 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3092
3093 static tree
3094 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3095 tree omitted)
3096 {
3097 tree t = fold_convert_loc (loc, type, result);
3098
3099 /* If the resulting operand is an empty statement, just return the omitted
3100 statement casted to void. */
3101 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3102 return build1_loc (loc, NOP_EXPR, void_type_node,
3103 fold_ignored_result (omitted));
3104
3105 if (TREE_SIDE_EFFECTS (omitted))
3106 return build2_loc (loc, COMPOUND_EXPR, type,
3107 fold_ignored_result (omitted), t);
3108
3109 return pedantic_non_lvalue_loc (loc, t);
3110 }
3111
3112 /* Return a tree for the case when the result of an expression is RESULT
3113 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3114 of the expression but are now not needed.
3115
3116 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3117 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3118 evaluated before OMITTED2. Otherwise, if neither has side effects,
3119 just do the conversion of RESULT to TYPE. */
3120
3121 tree
3122 omit_two_operands_loc (location_t loc, tree type, tree result,
3123 tree omitted1, tree omitted2)
3124 {
3125 tree t = fold_convert_loc (loc, type, result);
3126
3127 if (TREE_SIDE_EFFECTS (omitted2))
3128 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3129 if (TREE_SIDE_EFFECTS (omitted1))
3130 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3131
3132 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3133 }
3134
3135 \f
3136 /* Return a simplified tree node for the truth-negation of ARG. This
3137 never alters ARG itself. We assume that ARG is an operation that
3138 returns a truth value (0 or 1).
3139
3140 FIXME: one would think we would fold the result, but it causes
3141 problems with the dominator optimizer. */
3142
3143 static tree
3144 fold_truth_not_expr (location_t loc, tree arg)
3145 {
3146 tree type = TREE_TYPE (arg);
3147 enum tree_code code = TREE_CODE (arg);
3148 location_t loc1, loc2;
3149
3150 /* If this is a comparison, we can simply invert it, except for
3151 floating-point non-equality comparisons, in which case we just
3152 enclose a TRUTH_NOT_EXPR around what we have. */
3153
3154 if (TREE_CODE_CLASS (code) == tcc_comparison)
3155 {
3156 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3157 if (FLOAT_TYPE_P (op_type)
3158 && flag_trapping_math
3159 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3160 && code != NE_EXPR && code != EQ_EXPR)
3161 return NULL_TREE;
3162
3163 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3164 if (code == ERROR_MARK)
3165 return NULL_TREE;
3166
3167 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3168 TREE_OPERAND (arg, 1));
3169 }
3170
3171 switch (code)
3172 {
3173 case INTEGER_CST:
3174 return constant_boolean_node (integer_zerop (arg), type);
3175
3176 case TRUTH_AND_EXPR:
3177 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3178 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3179 return build2_loc (loc, TRUTH_OR_EXPR, type,
3180 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3181 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3182
3183 case TRUTH_OR_EXPR:
3184 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3185 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3186 return build2_loc (loc, TRUTH_AND_EXPR, type,
3187 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3188 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3189
3190 case TRUTH_XOR_EXPR:
3191 /* Here we can invert either operand. We invert the first operand
3192 unless the second operand is a TRUTH_NOT_EXPR in which case our
3193 result is the XOR of the first operand with the inside of the
3194 negation of the second operand. */
3195
3196 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3197 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3198 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3199 else
3200 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3201 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3202 TREE_OPERAND (arg, 1));
3203
3204 case TRUTH_ANDIF_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3207 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3209 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3210
3211 case TRUTH_ORIF_EXPR:
3212 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3213 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3214 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3215 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3216 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3217
3218 case TRUTH_NOT_EXPR:
3219 return TREE_OPERAND (arg, 0);
3220
3221 case COND_EXPR:
3222 {
3223 tree arg1 = TREE_OPERAND (arg, 1);
3224 tree arg2 = TREE_OPERAND (arg, 2);
3225
3226 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3227 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3228
3229 /* A COND_EXPR may have a throw as one operand, which
3230 then has void type. Just leave void operands
3231 as they are. */
3232 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3233 VOID_TYPE_P (TREE_TYPE (arg1))
3234 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3235 VOID_TYPE_P (TREE_TYPE (arg2))
3236 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3237 }
3238
3239 case COMPOUND_EXPR:
3240 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3241 return build2_loc (loc, COMPOUND_EXPR, type,
3242 TREE_OPERAND (arg, 0),
3243 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3244
3245 case NON_LVALUE_EXPR:
3246 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3247 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3248
3249 CASE_CONVERT:
3250 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3251 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3252
3253 /* ... fall through ... */
3254
3255 case FLOAT_EXPR:
3256 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3257 return build1_loc (loc, TREE_CODE (arg), type,
3258 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3259
3260 case BIT_AND_EXPR:
3261 if (!integer_onep (TREE_OPERAND (arg, 1)))
3262 return NULL_TREE;
3263 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3264
3265 case SAVE_EXPR:
3266 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3267
3268 case CLEANUP_POINT_EXPR:
3269 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3270 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3271 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3272
3273 default:
3274 return NULL_TREE;
3275 }
3276 }
3277
3278 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3279 assume that ARG is an operation that returns a truth value (0 or 1
3280 for scalars, 0 or -1 for vectors). Return the folded expression if
3281 folding is successful. Otherwise, return NULL_TREE. */
3282
3283 static tree
3284 fold_invert_truthvalue (location_t loc, tree arg)
3285 {
3286 tree type = TREE_TYPE (arg);
3287 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3288 ? BIT_NOT_EXPR
3289 : TRUTH_NOT_EXPR,
3290 type, arg);
3291 }
3292
3293 /* Return a simplified tree node for the truth-negation of ARG. This
3294 never alters ARG itself. We assume that ARG is an operation that
3295 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3296
3297 tree
3298 invert_truthvalue_loc (location_t loc, tree arg)
3299 {
3300 if (TREE_CODE (arg) == ERROR_MARK)
3301 return arg;
3302
3303 tree type = TREE_TYPE (arg);
3304 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3305 ? BIT_NOT_EXPR
3306 : TRUTH_NOT_EXPR,
3307 type, arg);
3308 }
3309
3310 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3311 operands are another bit-wise operation with a common input. If so,
3312 distribute the bit operations to save an operation and possibly two if
3313 constants are involved. For example, convert
3314 (A | B) & (A | C) into A | (B & C)
3315 Further simplification will occur if B and C are constants.
3316
3317 If this optimization cannot be done, 0 will be returned. */
3318
3319 static tree
3320 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3321 tree arg0, tree arg1)
3322 {
3323 tree common;
3324 tree left, right;
3325
3326 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3327 || TREE_CODE (arg0) == code
3328 || (TREE_CODE (arg0) != BIT_AND_EXPR
3329 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3330 return 0;
3331
3332 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3333 {
3334 common = TREE_OPERAND (arg0, 0);
3335 left = TREE_OPERAND (arg0, 1);
3336 right = TREE_OPERAND (arg1, 1);
3337 }
3338 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3339 {
3340 common = TREE_OPERAND (arg0, 0);
3341 left = TREE_OPERAND (arg0, 1);
3342 right = TREE_OPERAND (arg1, 0);
3343 }
3344 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3345 {
3346 common = TREE_OPERAND (arg0, 1);
3347 left = TREE_OPERAND (arg0, 0);
3348 right = TREE_OPERAND (arg1, 1);
3349 }
3350 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3351 {
3352 common = TREE_OPERAND (arg0, 1);
3353 left = TREE_OPERAND (arg0, 0);
3354 right = TREE_OPERAND (arg1, 0);
3355 }
3356 else
3357 return 0;
3358
3359 common = fold_convert_loc (loc, type, common);
3360 left = fold_convert_loc (loc, type, left);
3361 right = fold_convert_loc (loc, type, right);
3362 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3363 fold_build2_loc (loc, code, type, left, right));
3364 }
3365
3366 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3367 with code CODE. This optimization is unsafe. */
3368 static tree
3369 distribute_real_division (location_t loc, enum tree_code code, tree type,
3370 tree arg0, tree arg1)
3371 {
3372 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3373 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3374
3375 /* (A / C) +- (B / C) -> (A +- B) / C. */
3376 if (mul0 == mul1
3377 && operand_equal_p (TREE_OPERAND (arg0, 1),
3378 TREE_OPERAND (arg1, 1), 0))
3379 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3380 fold_build2_loc (loc, code, type,
3381 TREE_OPERAND (arg0, 0),
3382 TREE_OPERAND (arg1, 0)),
3383 TREE_OPERAND (arg0, 1));
3384
3385 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3386 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3387 TREE_OPERAND (arg1, 0), 0)
3388 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3389 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3390 {
3391 REAL_VALUE_TYPE r0, r1;
3392 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3393 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3394 if (!mul0)
3395 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3396 if (!mul1)
3397 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3398 real_arithmetic (&r0, code, &r0, &r1);
3399 return fold_build2_loc (loc, MULT_EXPR, type,
3400 TREE_OPERAND (arg0, 0),
3401 build_real (type, r0));
3402 }
3403
3404 return NULL_TREE;
3405 }
3406 \f
3407 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3408 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3409
3410 static tree
3411 make_bit_field_ref (location_t loc, tree inner, tree type,
3412 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3413 {
3414 tree result, bftype;
3415
3416 if (bitpos == 0)
3417 {
3418 tree size = TYPE_SIZE (TREE_TYPE (inner));
3419 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3420 || POINTER_TYPE_P (TREE_TYPE (inner)))
3421 && host_integerp (size, 0)
3422 && tree_low_cst (size, 0) == bitsize)
3423 return fold_convert_loc (loc, type, inner);
3424 }
3425
3426 bftype = type;
3427 if (TYPE_PRECISION (bftype) != bitsize
3428 || TYPE_UNSIGNED (bftype) == !unsignedp)
3429 bftype = build_nonstandard_integer_type (bitsize, 0);
3430
3431 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3432 size_int (bitsize), bitsize_int (bitpos));
3433
3434 if (bftype != type)
3435 result = fold_convert_loc (loc, type, result);
3436
3437 return result;
3438 }
3439
3440 /* Optimize a bit-field compare.
3441
3442 There are two cases: First is a compare against a constant and the
3443 second is a comparison of two items where the fields are at the same
3444 bit position relative to the start of a chunk (byte, halfword, word)
3445 large enough to contain it. In these cases we can avoid the shift
3446 implicit in bitfield extractions.
3447
3448 For constants, we emit a compare of the shifted constant with the
3449 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3450 compared. For two fields at the same position, we do the ANDs with the
3451 similar mask and compare the result of the ANDs.
3452
3453 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3454 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3455 are the left and right operands of the comparison, respectively.
3456
3457 If the optimization described above can be done, we return the resulting
3458 tree. Otherwise we return zero. */
3459
3460 static tree
3461 optimize_bit_field_compare (location_t loc, enum tree_code code,
3462 tree compare_type, tree lhs, tree rhs)
3463 {
3464 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3465 tree type = TREE_TYPE (lhs);
3466 tree signed_type, unsigned_type;
3467 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3468 enum machine_mode lmode, rmode, nmode;
3469 int lunsignedp, runsignedp;
3470 int lvolatilep = 0, rvolatilep = 0;
3471 tree linner, rinner = NULL_TREE;
3472 tree mask;
3473 tree offset;
3474
3475 /* In the strict volatile bitfields case, doing code changes here may prevent
3476 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3477 if (flag_strict_volatile_bitfields > 0)
3478 return 0;
3479
3480 /* Get all the information about the extractions being done. If the bit size
3481 if the same as the size of the underlying object, we aren't doing an
3482 extraction at all and so can do nothing. We also don't want to
3483 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3484 then will no longer be able to replace it. */
3485 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3486 &lunsignedp, &lvolatilep, false);
3487 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3488 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3489 return 0;
3490
3491 if (!const_p)
3492 {
3493 /* If this is not a constant, we can only do something if bit positions,
3494 sizes, and signedness are the same. */
3495 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3496 &runsignedp, &rvolatilep, false);
3497
3498 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3499 || lunsignedp != runsignedp || offset != 0
3500 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3501 return 0;
3502 }
3503
3504 /* See if we can find a mode to refer to this field. We should be able to,
3505 but fail if we can't. */
3506 if (lvolatilep
3507 && GET_MODE_BITSIZE (lmode) > 0
3508 && flag_strict_volatile_bitfields > 0)
3509 nmode = lmode;
3510 else
3511 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3512 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3513 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3514 TYPE_ALIGN (TREE_TYPE (rinner))),
3515 word_mode, lvolatilep || rvolatilep);
3516 if (nmode == VOIDmode)
3517 return 0;
3518
3519 /* Set signed and unsigned types of the precision of this mode for the
3520 shifts below. */
3521 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3522 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3523
3524 /* Compute the bit position and size for the new reference and our offset
3525 within it. If the new reference is the same size as the original, we
3526 won't optimize anything, so return zero. */
3527 nbitsize = GET_MODE_BITSIZE (nmode);
3528 nbitpos = lbitpos & ~ (nbitsize - 1);
3529 lbitpos -= nbitpos;
3530 if (nbitsize == lbitsize)
3531 return 0;
3532
3533 if (BYTES_BIG_ENDIAN)
3534 lbitpos = nbitsize - lbitsize - lbitpos;
3535
3536 /* Make the mask to be used against the extracted field. */
3537 mask = build_int_cst_type (unsigned_type, -1);
3538 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3539 mask = const_binop (RSHIFT_EXPR, mask,
3540 size_int (nbitsize - lbitsize - lbitpos));
3541
3542 if (! const_p)
3543 /* If not comparing with constant, just rework the comparison
3544 and return. */
3545 return fold_build2_loc (loc, code, compare_type,
3546 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3547 make_bit_field_ref (loc, linner,
3548 unsigned_type,
3549 nbitsize, nbitpos,
3550 1),
3551 mask),
3552 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3553 make_bit_field_ref (loc, rinner,
3554 unsigned_type,
3555 nbitsize, nbitpos,
3556 1),
3557 mask));
3558
3559 /* Otherwise, we are handling the constant case. See if the constant is too
3560 big for the field. Warn and return a tree of for 0 (false) if so. We do
3561 this not only for its own sake, but to avoid having to test for this
3562 error case below. If we didn't, we might generate wrong code.
3563
3564 For unsigned fields, the constant shifted right by the field length should
3565 be all zero. For signed fields, the high-order bits should agree with
3566 the sign bit. */
3567
3568 if (lunsignedp)
3569 {
3570 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3571 fold_convert_loc (loc,
3572 unsigned_type, rhs),
3573 size_int (lbitsize))))
3574 {
3575 warning (0, "comparison is always %d due to width of bit-field",
3576 code == NE_EXPR);
3577 return constant_boolean_node (code == NE_EXPR, compare_type);
3578 }
3579 }
3580 else
3581 {
3582 tree tem = const_binop (RSHIFT_EXPR,
3583 fold_convert_loc (loc, signed_type, rhs),
3584 size_int (lbitsize - 1));
3585 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3586 {
3587 warning (0, "comparison is always %d due to width of bit-field",
3588 code == NE_EXPR);
3589 return constant_boolean_node (code == NE_EXPR, compare_type);
3590 }
3591 }
3592
3593 /* Single-bit compares should always be against zero. */
3594 if (lbitsize == 1 && ! integer_zerop (rhs))
3595 {
3596 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3597 rhs = build_int_cst (type, 0);
3598 }
3599
3600 /* Make a new bitfield reference, shift the constant over the
3601 appropriate number of bits and mask it with the computed mask
3602 (in case this was a signed field). If we changed it, make a new one. */
3603 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3604 if (lvolatilep)
3605 {
3606 TREE_SIDE_EFFECTS (lhs) = 1;
3607 TREE_THIS_VOLATILE (lhs) = 1;
3608 }
3609
3610 rhs = const_binop (BIT_AND_EXPR,
3611 const_binop (LSHIFT_EXPR,
3612 fold_convert_loc (loc, unsigned_type, rhs),
3613 size_int (lbitpos)),
3614 mask);
3615
3616 lhs = build2_loc (loc, code, compare_type,
3617 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3618 return lhs;
3619 }
3620 \f
3621 /* Subroutine for fold_truth_andor_1: decode a field reference.
3622
3623 If EXP is a comparison reference, we return the innermost reference.
3624
3625 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3626 set to the starting bit number.
3627
3628 If the innermost field can be completely contained in a mode-sized
3629 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3630
3631 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3632 otherwise it is not changed.
3633
3634 *PUNSIGNEDP is set to the signedness of the field.
3635
3636 *PMASK is set to the mask used. This is either contained in a
3637 BIT_AND_EXPR or derived from the width of the field.
3638
3639 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3640
3641 Return 0 if this is not a component reference or is one that we can't
3642 do anything with. */
3643
3644 static tree
3645 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3646 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3647 int *punsignedp, int *pvolatilep,
3648 tree *pmask, tree *pand_mask)
3649 {
3650 tree outer_type = 0;
3651 tree and_mask = 0;
3652 tree mask, inner, offset;
3653 tree unsigned_type;
3654 unsigned int precision;
3655
3656 /* All the optimizations using this function assume integer fields.
3657 There are problems with FP fields since the type_for_size call
3658 below can fail for, e.g., XFmode. */
3659 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3660 return 0;
3661
3662 /* We are interested in the bare arrangement of bits, so strip everything
3663 that doesn't affect the machine mode. However, record the type of the
3664 outermost expression if it may matter below. */
3665 if (CONVERT_EXPR_P (exp)
3666 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3667 outer_type = TREE_TYPE (exp);
3668 STRIP_NOPS (exp);
3669
3670 if (TREE_CODE (exp) == BIT_AND_EXPR)
3671 {
3672 and_mask = TREE_OPERAND (exp, 1);
3673 exp = TREE_OPERAND (exp, 0);
3674 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3675 if (TREE_CODE (and_mask) != INTEGER_CST)
3676 return 0;
3677 }
3678
3679 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3680 punsignedp, pvolatilep, false);
3681 if ((inner == exp && and_mask == 0)
3682 || *pbitsize < 0 || offset != 0
3683 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3684 return 0;
3685
3686 /* If the number of bits in the reference is the same as the bitsize of
3687 the outer type, then the outer type gives the signedness. Otherwise
3688 (in case of a small bitfield) the signedness is unchanged. */
3689 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3690 *punsignedp = TYPE_UNSIGNED (outer_type);
3691
3692 /* Compute the mask to access the bitfield. */
3693 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3694 precision = TYPE_PRECISION (unsigned_type);
3695
3696 mask = build_int_cst_type (unsigned_type, -1);
3697
3698 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3699 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3700
3701 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3702 if (and_mask != 0)
3703 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3704 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3705
3706 *pmask = mask;
3707 *pand_mask = and_mask;
3708 return inner;
3709 }
3710
3711 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3712 bit positions. */
3713
3714 static int
3715 all_ones_mask_p (const_tree mask, int size)
3716 {
3717 tree type = TREE_TYPE (mask);
3718 unsigned int precision = TYPE_PRECISION (type);
3719 tree tmask;
3720
3721 tmask = build_int_cst_type (signed_type_for (type), -1);
3722
3723 return
3724 tree_int_cst_equal (mask,
3725 const_binop (RSHIFT_EXPR,
3726 const_binop (LSHIFT_EXPR, tmask,
3727 size_int (precision - size)),
3728 size_int (precision - size)));
3729 }
3730
3731 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3732 represents the sign bit of EXP's type. If EXP represents a sign
3733 or zero extension, also test VAL against the unextended type.
3734 The return value is the (sub)expression whose sign bit is VAL,
3735 or NULL_TREE otherwise. */
3736
3737 static tree
3738 sign_bit_p (tree exp, const_tree val)
3739 {
3740 unsigned HOST_WIDE_INT mask_lo, lo;
3741 HOST_WIDE_INT mask_hi, hi;
3742 int width;
3743 tree t;
3744
3745 /* Tree EXP must have an integral type. */
3746 t = TREE_TYPE (exp);
3747 if (! INTEGRAL_TYPE_P (t))
3748 return NULL_TREE;
3749
3750 /* Tree VAL must be an integer constant. */
3751 if (TREE_CODE (val) != INTEGER_CST
3752 || TREE_OVERFLOW (val))
3753 return NULL_TREE;
3754
3755 width = TYPE_PRECISION (t);
3756 if (width > HOST_BITS_PER_WIDE_INT)
3757 {
3758 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3759 lo = 0;
3760
3761 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3762 mask_lo = -1;
3763 }
3764 else
3765 {
3766 hi = 0;
3767 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3768
3769 mask_hi = 0;
3770 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3771 }
3772
3773 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3774 treat VAL as if it were unsigned. */
3775 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3776 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3777 return exp;
3778
3779 /* Handle extension from a narrower type. */
3780 if (TREE_CODE (exp) == NOP_EXPR
3781 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3782 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3783
3784 return NULL_TREE;
3785 }
3786
3787 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3788 to be evaluated unconditionally. */
3789
3790 static int
3791 simple_operand_p (const_tree exp)
3792 {
3793 /* Strip any conversions that don't change the machine mode. */
3794 STRIP_NOPS (exp);
3795
3796 return (CONSTANT_CLASS_P (exp)
3797 || TREE_CODE (exp) == SSA_NAME
3798 || (DECL_P (exp)
3799 && ! TREE_ADDRESSABLE (exp)
3800 && ! TREE_THIS_VOLATILE (exp)
3801 && ! DECL_NONLOCAL (exp)
3802 /* Don't regard global variables as simple. They may be
3803 allocated in ways unknown to the compiler (shared memory,
3804 #pragma weak, etc). */
3805 && ! TREE_PUBLIC (exp)
3806 && ! DECL_EXTERNAL (exp)
3807 /* Weakrefs are not safe to be read, since they can be NULL.
3808 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3809 have DECL_WEAK flag set. */
3810 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3811 /* Loading a static variable is unduly expensive, but global
3812 registers aren't expensive. */
3813 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3814 }
3815
3816 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3817 to be evaluated unconditionally.
3818 I addition to simple_operand_p, we assume that comparisons, conversions,
3819 and logic-not operations are simple, if their operands are simple, too. */
3820
3821 static bool
3822 simple_operand_p_2 (tree exp)
3823 {
3824 enum tree_code code;
3825
3826 if (TREE_SIDE_EFFECTS (exp)
3827 || tree_could_trap_p (exp))
3828 return false;
3829
3830 while (CONVERT_EXPR_P (exp))
3831 exp = TREE_OPERAND (exp, 0);
3832
3833 code = TREE_CODE (exp);
3834
3835 if (TREE_CODE_CLASS (code) == tcc_comparison)
3836 return (simple_operand_p (TREE_OPERAND (exp, 0))
3837 && simple_operand_p (TREE_OPERAND (exp, 1)));
3838
3839 if (code == TRUTH_NOT_EXPR)
3840 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3841
3842 return simple_operand_p (exp);
3843 }
3844
3845 \f
3846 /* The following functions are subroutines to fold_range_test and allow it to
3847 try to change a logical combination of comparisons into a range test.
3848
3849 For example, both
3850 X == 2 || X == 3 || X == 4 || X == 5
3851 and
3852 X >= 2 && X <= 5
3853 are converted to
3854 (unsigned) (X - 2) <= 3
3855
3856 We describe each set of comparisons as being either inside or outside
3857 a range, using a variable named like IN_P, and then describe the
3858 range with a lower and upper bound. If one of the bounds is omitted,
3859 it represents either the highest or lowest value of the type.
3860
3861 In the comments below, we represent a range by two numbers in brackets
3862 preceded by a "+" to designate being inside that range, or a "-" to
3863 designate being outside that range, so the condition can be inverted by
3864 flipping the prefix. An omitted bound is represented by a "-". For
3865 example, "- [-, 10]" means being outside the range starting at the lowest
3866 possible value and ending at 10, in other words, being greater than 10.
3867 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3868 always false.
3869
3870 We set up things so that the missing bounds are handled in a consistent
3871 manner so neither a missing bound nor "true" and "false" need to be
3872 handled using a special case. */
3873
3874 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3875 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3876 and UPPER1_P are nonzero if the respective argument is an upper bound
3877 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3878 must be specified for a comparison. ARG1 will be converted to ARG0's
3879 type if both are specified. */
3880
3881 static tree
3882 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3883 tree arg1, int upper1_p)
3884 {
3885 tree tem;
3886 int result;
3887 int sgn0, sgn1;
3888
3889 /* If neither arg represents infinity, do the normal operation.
3890 Else, if not a comparison, return infinity. Else handle the special
3891 comparison rules. Note that most of the cases below won't occur, but
3892 are handled for consistency. */
3893
3894 if (arg0 != 0 && arg1 != 0)
3895 {
3896 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3897 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3898 STRIP_NOPS (tem);
3899 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3900 }
3901
3902 if (TREE_CODE_CLASS (code) != tcc_comparison)
3903 return 0;
3904
3905 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3906 for neither. In real maths, we cannot assume open ended ranges are
3907 the same. But, this is computer arithmetic, where numbers are finite.
3908 We can therefore make the transformation of any unbounded range with
3909 the value Z, Z being greater than any representable number. This permits
3910 us to treat unbounded ranges as equal. */
3911 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3912 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3913 switch (code)
3914 {
3915 case EQ_EXPR:
3916 result = sgn0 == sgn1;
3917 break;
3918 case NE_EXPR:
3919 result = sgn0 != sgn1;
3920 break;
3921 case LT_EXPR:
3922 result = sgn0 < sgn1;
3923 break;
3924 case LE_EXPR:
3925 result = sgn0 <= sgn1;
3926 break;
3927 case GT_EXPR:
3928 result = sgn0 > sgn1;
3929 break;
3930 case GE_EXPR:
3931 result = sgn0 >= sgn1;
3932 break;
3933 default:
3934 gcc_unreachable ();
3935 }
3936
3937 return constant_boolean_node (result, type);
3938 }
3939 \f
3940 /* Helper routine for make_range. Perform one step for it, return
3941 new expression if the loop should continue or NULL_TREE if it should
3942 stop. */
3943
3944 tree
3945 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3946 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3947 bool *strict_overflow_p)
3948 {
3949 tree arg0_type = TREE_TYPE (arg0);
3950 tree n_low, n_high, low = *p_low, high = *p_high;
3951 int in_p = *p_in_p, n_in_p;
3952
3953 switch (code)
3954 {
3955 case TRUTH_NOT_EXPR:
3956 /* We can only do something if the range is testing for zero. */
3957 if (low == NULL_TREE || high == NULL_TREE
3958 || ! integer_zerop (low) || ! integer_zerop (high))
3959 return NULL_TREE;
3960 *p_in_p = ! in_p;
3961 return arg0;
3962
3963 case EQ_EXPR: case NE_EXPR:
3964 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3965 /* We can only do something if the range is testing for zero
3966 and if the second operand is an integer constant. Note that
3967 saying something is "in" the range we make is done by
3968 complementing IN_P since it will set in the initial case of
3969 being not equal to zero; "out" is leaving it alone. */
3970 if (low == NULL_TREE || high == NULL_TREE
3971 || ! integer_zerop (low) || ! integer_zerop (high)
3972 || TREE_CODE (arg1) != INTEGER_CST)
3973 return NULL_TREE;
3974
3975 switch (code)
3976 {
3977 case NE_EXPR: /* - [c, c] */
3978 low = high = arg1;
3979 break;
3980 case EQ_EXPR: /* + [c, c] */
3981 in_p = ! in_p, low = high = arg1;
3982 break;
3983 case GT_EXPR: /* - [-, c] */
3984 low = 0, high = arg1;
3985 break;
3986 case GE_EXPR: /* + [c, -] */
3987 in_p = ! in_p, low = arg1, high = 0;
3988 break;
3989 case LT_EXPR: /* - [c, -] */
3990 low = arg1, high = 0;
3991 break;
3992 case LE_EXPR: /* + [-, c] */
3993 in_p = ! in_p, low = 0, high = arg1;
3994 break;
3995 default:
3996 gcc_unreachable ();
3997 }
3998
3999 /* If this is an unsigned comparison, we also know that EXP is
4000 greater than or equal to zero. We base the range tests we make
4001 on that fact, so we record it here so we can parse existing
4002 range tests. We test arg0_type since often the return type
4003 of, e.g. EQ_EXPR, is boolean. */
4004 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4005 {
4006 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4007 in_p, low, high, 1,
4008 build_int_cst (arg0_type, 0),
4009 NULL_TREE))
4010 return NULL_TREE;
4011
4012 in_p = n_in_p, low = n_low, high = n_high;
4013
4014 /* If the high bound is missing, but we have a nonzero low
4015 bound, reverse the range so it goes from zero to the low bound
4016 minus 1. */
4017 if (high == 0 && low && ! integer_zerop (low))
4018 {
4019 in_p = ! in_p;
4020 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4021 integer_one_node, 0);
4022 low = build_int_cst (arg0_type, 0);
4023 }
4024 }
4025
4026 *p_low = low;
4027 *p_high = high;
4028 *p_in_p = in_p;
4029 return arg0;
4030
4031 case NEGATE_EXPR:
4032 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4033 low and high are non-NULL, then normalize will DTRT. */
4034 if (!TYPE_UNSIGNED (arg0_type)
4035 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4036 {
4037 if (low == NULL_TREE)
4038 low = TYPE_MIN_VALUE (arg0_type);
4039 if (high == NULL_TREE)
4040 high = TYPE_MAX_VALUE (arg0_type);
4041 }
4042
4043 /* (-x) IN [a,b] -> x in [-b, -a] */
4044 n_low = range_binop (MINUS_EXPR, exp_type,
4045 build_int_cst (exp_type, 0),
4046 0, high, 1);
4047 n_high = range_binop (MINUS_EXPR, exp_type,
4048 build_int_cst (exp_type, 0),
4049 0, low, 0);
4050 if (n_high != 0 && TREE_OVERFLOW (n_high))
4051 return NULL_TREE;
4052 goto normalize;
4053
4054 case BIT_NOT_EXPR:
4055 /* ~ X -> -X - 1 */
4056 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4057 build_int_cst (exp_type, 1));
4058
4059 case PLUS_EXPR:
4060 case MINUS_EXPR:
4061 if (TREE_CODE (arg1) != INTEGER_CST)
4062 return NULL_TREE;
4063
4064 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4065 move a constant to the other side. */
4066 if (!TYPE_UNSIGNED (arg0_type)
4067 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4068 return NULL_TREE;
4069
4070 /* If EXP is signed, any overflow in the computation is undefined,
4071 so we don't worry about it so long as our computations on
4072 the bounds don't overflow. For unsigned, overflow is defined
4073 and this is exactly the right thing. */
4074 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4075 arg0_type, low, 0, arg1, 0);
4076 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4077 arg0_type, high, 1, arg1, 0);
4078 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4079 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4080 return NULL_TREE;
4081
4082 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4083 *strict_overflow_p = true;
4084
4085 normalize:
4086 /* Check for an unsigned range which has wrapped around the maximum
4087 value thus making n_high < n_low, and normalize it. */
4088 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4089 {
4090 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4091 integer_one_node, 0);
4092 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4093 integer_one_node, 0);
4094
4095 /* If the range is of the form +/- [ x+1, x ], we won't
4096 be able to normalize it. But then, it represents the
4097 whole range or the empty set, so make it
4098 +/- [ -, - ]. */
4099 if (tree_int_cst_equal (n_low, low)
4100 && tree_int_cst_equal (n_high, high))
4101 low = high = 0;
4102 else
4103 in_p = ! in_p;
4104 }
4105 else
4106 low = n_low, high = n_high;
4107
4108 *p_low = low;
4109 *p_high = high;
4110 *p_in_p = in_p;
4111 return arg0;
4112
4113 CASE_CONVERT:
4114 case NON_LVALUE_EXPR:
4115 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4116 return NULL_TREE;
4117
4118 if (! INTEGRAL_TYPE_P (arg0_type)
4119 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4120 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4121 return NULL_TREE;
4122
4123 n_low = low, n_high = high;
4124
4125 if (n_low != 0)
4126 n_low = fold_convert_loc (loc, arg0_type, n_low);
4127
4128 if (n_high != 0)
4129 n_high = fold_convert_loc (loc, arg0_type, n_high);
4130
4131 /* If we're converting arg0 from an unsigned type, to exp,
4132 a signed type, we will be doing the comparison as unsigned.
4133 The tests above have already verified that LOW and HIGH
4134 are both positive.
4135
4136 So we have to ensure that we will handle large unsigned
4137 values the same way that the current signed bounds treat
4138 negative values. */
4139
4140 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4141 {
4142 tree high_positive;
4143 tree equiv_type;
4144 /* For fixed-point modes, we need to pass the saturating flag
4145 as the 2nd parameter. */
4146 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4147 equiv_type
4148 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4149 TYPE_SATURATING (arg0_type));
4150 else
4151 equiv_type
4152 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4153
4154 /* A range without an upper bound is, naturally, unbounded.
4155 Since convert would have cropped a very large value, use
4156 the max value for the destination type. */
4157 high_positive
4158 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4159 : TYPE_MAX_VALUE (arg0_type);
4160
4161 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4162 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4163 fold_convert_loc (loc, arg0_type,
4164 high_positive),
4165 build_int_cst (arg0_type, 1));
4166
4167 /* If the low bound is specified, "and" the range with the
4168 range for which the original unsigned value will be
4169 positive. */
4170 if (low != 0)
4171 {
4172 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4173 1, fold_convert_loc (loc, arg0_type,
4174 integer_zero_node),
4175 high_positive))
4176 return NULL_TREE;
4177
4178 in_p = (n_in_p == in_p);
4179 }
4180 else
4181 {
4182 /* Otherwise, "or" the range with the range of the input
4183 that will be interpreted as negative. */
4184 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4185 1, fold_convert_loc (loc, arg0_type,
4186 integer_zero_node),
4187 high_positive))
4188 return NULL_TREE;
4189
4190 in_p = (in_p != n_in_p);
4191 }
4192 }
4193
4194 *p_low = n_low;
4195 *p_high = n_high;
4196 *p_in_p = in_p;
4197 return arg0;
4198
4199 default:
4200 return NULL_TREE;
4201 }
4202 }
4203
4204 /* Given EXP, a logical expression, set the range it is testing into
4205 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4206 actually being tested. *PLOW and *PHIGH will be made of the same
4207 type as the returned expression. If EXP is not a comparison, we
4208 will most likely not be returning a useful value and range. Set
4209 *STRICT_OVERFLOW_P to true if the return value is only valid
4210 because signed overflow is undefined; otherwise, do not change
4211 *STRICT_OVERFLOW_P. */
4212
4213 tree
4214 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4215 bool *strict_overflow_p)
4216 {
4217 enum tree_code code;
4218 tree arg0, arg1 = NULL_TREE;
4219 tree exp_type, nexp;
4220 int in_p;
4221 tree low, high;
4222 location_t loc = EXPR_LOCATION (exp);
4223
4224 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4225 and see if we can refine the range. Some of the cases below may not
4226 happen, but it doesn't seem worth worrying about this. We "continue"
4227 the outer loop when we've changed something; otherwise we "break"
4228 the switch, which will "break" the while. */
4229
4230 in_p = 0;
4231 low = high = build_int_cst (TREE_TYPE (exp), 0);
4232
4233 while (1)
4234 {
4235 code = TREE_CODE (exp);
4236 exp_type = TREE_TYPE (exp);
4237 arg0 = NULL_TREE;
4238
4239 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4240 {
4241 if (TREE_OPERAND_LENGTH (exp) > 0)
4242 arg0 = TREE_OPERAND (exp, 0);
4243 if (TREE_CODE_CLASS (code) == tcc_binary
4244 || TREE_CODE_CLASS (code) == tcc_comparison
4245 || (TREE_CODE_CLASS (code) == tcc_expression
4246 && TREE_OPERAND_LENGTH (exp) > 1))
4247 arg1 = TREE_OPERAND (exp, 1);
4248 }
4249 if (arg0 == NULL_TREE)
4250 break;
4251
4252 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4253 &high, &in_p, strict_overflow_p);
4254 if (nexp == NULL_TREE)
4255 break;
4256 exp = nexp;
4257 }
4258
4259 /* If EXP is a constant, we can evaluate whether this is true or false. */
4260 if (TREE_CODE (exp) == INTEGER_CST)
4261 {
4262 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4263 exp, 0, low, 0))
4264 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4265 exp, 1, high, 1)));
4266 low = high = 0;
4267 exp = 0;
4268 }
4269
4270 *pin_p = in_p, *plow = low, *phigh = high;
4271 return exp;
4272 }
4273 \f
4274 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4275 type, TYPE, return an expression to test if EXP is in (or out of, depending
4276 on IN_P) the range. Return 0 if the test couldn't be created. */
4277
4278 tree
4279 build_range_check (location_t loc, tree type, tree exp, int in_p,
4280 tree low, tree high)
4281 {
4282 tree etype = TREE_TYPE (exp), value;
4283
4284 #ifdef HAVE_canonicalize_funcptr_for_compare
4285 /* Disable this optimization for function pointer expressions
4286 on targets that require function pointer canonicalization. */
4287 if (HAVE_canonicalize_funcptr_for_compare
4288 && TREE_CODE (etype) == POINTER_TYPE
4289 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4290 return NULL_TREE;
4291 #endif
4292
4293 if (! in_p)
4294 {
4295 value = build_range_check (loc, type, exp, 1, low, high);
4296 if (value != 0)
4297 return invert_truthvalue_loc (loc, value);
4298
4299 return 0;
4300 }
4301
4302 if (low == 0 && high == 0)
4303 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4304
4305 if (low == 0)
4306 return fold_build2_loc (loc, LE_EXPR, type, exp,
4307 fold_convert_loc (loc, etype, high));
4308
4309 if (high == 0)
4310 return fold_build2_loc (loc, GE_EXPR, type, exp,
4311 fold_convert_loc (loc, etype, low));
4312
4313 if (operand_equal_p (low, high, 0))
4314 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4315 fold_convert_loc (loc, etype, low));
4316
4317 if (integer_zerop (low))
4318 {
4319 if (! TYPE_UNSIGNED (etype))
4320 {
4321 etype = unsigned_type_for (etype);
4322 high = fold_convert_loc (loc, etype, high);
4323 exp = fold_convert_loc (loc, etype, exp);
4324 }
4325 return build_range_check (loc, type, exp, 1, 0, high);
4326 }
4327
4328 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4329 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4330 {
4331 unsigned HOST_WIDE_INT lo;
4332 HOST_WIDE_INT hi;
4333 int prec;
4334
4335 prec = TYPE_PRECISION (etype);
4336 if (prec <= HOST_BITS_PER_WIDE_INT)
4337 {
4338 hi = 0;
4339 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4340 }
4341 else
4342 {
4343 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4344 lo = HOST_WIDE_INT_M1U;
4345 }
4346
4347 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4348 {
4349 if (TYPE_UNSIGNED (etype))
4350 {
4351 tree signed_etype = signed_type_for (etype);
4352 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4353 etype
4354 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4355 else
4356 etype = signed_etype;
4357 exp = fold_convert_loc (loc, etype, exp);
4358 }
4359 return fold_build2_loc (loc, GT_EXPR, type, exp,
4360 build_int_cst (etype, 0));
4361 }
4362 }
4363
4364 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4365 This requires wrap-around arithmetics for the type of the expression.
4366 First make sure that arithmetics in this type is valid, then make sure
4367 that it wraps around. */
4368 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4369 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4370 TYPE_UNSIGNED (etype));
4371
4372 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4373 {
4374 tree utype, minv, maxv;
4375
4376 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4377 for the type in question, as we rely on this here. */
4378 utype = unsigned_type_for (etype);
4379 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4380 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4381 integer_one_node, 1);
4382 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4383
4384 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4385 minv, 1, maxv, 1)))
4386 etype = utype;
4387 else
4388 return 0;
4389 }
4390
4391 high = fold_convert_loc (loc, etype, high);
4392 low = fold_convert_loc (loc, etype, low);
4393 exp = fold_convert_loc (loc, etype, exp);
4394
4395 value = const_binop (MINUS_EXPR, high, low);
4396
4397
4398 if (POINTER_TYPE_P (etype))
4399 {
4400 if (value != 0 && !TREE_OVERFLOW (value))
4401 {
4402 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4403 return build_range_check (loc, type,
4404 fold_build_pointer_plus_loc (loc, exp, low),
4405 1, build_int_cst (etype, 0), value);
4406 }
4407 return 0;
4408 }
4409
4410 if (value != 0 && !TREE_OVERFLOW (value))
4411 return build_range_check (loc, type,
4412 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4413 1, build_int_cst (etype, 0), value);
4414
4415 return 0;
4416 }
4417 \f
4418 /* Return the predecessor of VAL in its type, handling the infinite case. */
4419
4420 static tree
4421 range_predecessor (tree val)
4422 {
4423 tree type = TREE_TYPE (val);
4424
4425 if (INTEGRAL_TYPE_P (type)
4426 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4427 return 0;
4428 else
4429 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4430 }
4431
4432 /* Return the successor of VAL in its type, handling the infinite case. */
4433
4434 static tree
4435 range_successor (tree val)
4436 {
4437 tree type = TREE_TYPE (val);
4438
4439 if (INTEGRAL_TYPE_P (type)
4440 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4441 return 0;
4442 else
4443 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4444 }
4445
4446 /* Given two ranges, see if we can merge them into one. Return 1 if we
4447 can, 0 if we can't. Set the output range into the specified parameters. */
4448
4449 bool
4450 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4451 tree high0, int in1_p, tree low1, tree high1)
4452 {
4453 int no_overlap;
4454 int subset;
4455 int temp;
4456 tree tem;
4457 int in_p;
4458 tree low, high;
4459 int lowequal = ((low0 == 0 && low1 == 0)
4460 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4461 low0, 0, low1, 0)));
4462 int highequal = ((high0 == 0 && high1 == 0)
4463 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4464 high0, 1, high1, 1)));
4465
4466 /* Make range 0 be the range that starts first, or ends last if they
4467 start at the same value. Swap them if it isn't. */
4468 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4469 low0, 0, low1, 0))
4470 || (lowequal
4471 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4472 high1, 1, high0, 1))))
4473 {
4474 temp = in0_p, in0_p = in1_p, in1_p = temp;
4475 tem = low0, low0 = low1, low1 = tem;
4476 tem = high0, high0 = high1, high1 = tem;
4477 }
4478
4479 /* Now flag two cases, whether the ranges are disjoint or whether the
4480 second range is totally subsumed in the first. Note that the tests
4481 below are simplified by the ones above. */
4482 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4483 high0, 1, low1, 0));
4484 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4485 high1, 1, high0, 1));
4486
4487 /* We now have four cases, depending on whether we are including or
4488 excluding the two ranges. */
4489 if (in0_p && in1_p)
4490 {
4491 /* If they don't overlap, the result is false. If the second range
4492 is a subset it is the result. Otherwise, the range is from the start
4493 of the second to the end of the first. */
4494 if (no_overlap)
4495 in_p = 0, low = high = 0;
4496 else if (subset)
4497 in_p = 1, low = low1, high = high1;
4498 else
4499 in_p = 1, low = low1, high = high0;
4500 }
4501
4502 else if (in0_p && ! in1_p)
4503 {
4504 /* If they don't overlap, the result is the first range. If they are
4505 equal, the result is false. If the second range is a subset of the
4506 first, and the ranges begin at the same place, we go from just after
4507 the end of the second range to the end of the first. If the second
4508 range is not a subset of the first, or if it is a subset and both
4509 ranges end at the same place, the range starts at the start of the
4510 first range and ends just before the second range.
4511 Otherwise, we can't describe this as a single range. */
4512 if (no_overlap)
4513 in_p = 1, low = low0, high = high0;
4514 else if (lowequal && highequal)
4515 in_p = 0, low = high = 0;
4516 else if (subset && lowequal)
4517 {
4518 low = range_successor (high1);
4519 high = high0;
4520 in_p = 1;
4521 if (low == 0)
4522 {
4523 /* We are in the weird situation where high0 > high1 but
4524 high1 has no successor. Punt. */
4525 return 0;
4526 }
4527 }
4528 else if (! subset || highequal)
4529 {
4530 low = low0;
4531 high = range_predecessor (low1);
4532 in_p = 1;
4533 if (high == 0)
4534 {
4535 /* low0 < low1 but low1 has no predecessor. Punt. */
4536 return 0;
4537 }
4538 }
4539 else
4540 return 0;
4541 }
4542
4543 else if (! in0_p && in1_p)
4544 {
4545 /* If they don't overlap, the result is the second range. If the second
4546 is a subset of the first, the result is false. Otherwise,
4547 the range starts just after the first range and ends at the
4548 end of the second. */
4549 if (no_overlap)
4550 in_p = 1, low = low1, high = high1;
4551 else if (subset || highequal)
4552 in_p = 0, low = high = 0;
4553 else
4554 {
4555 low = range_successor (high0);
4556 high = high1;
4557 in_p = 1;
4558 if (low == 0)
4559 {
4560 /* high1 > high0 but high0 has no successor. Punt. */
4561 return 0;
4562 }
4563 }
4564 }
4565
4566 else
4567 {
4568 /* The case where we are excluding both ranges. Here the complex case
4569 is if they don't overlap. In that case, the only time we have a
4570 range is if they are adjacent. If the second is a subset of the
4571 first, the result is the first. Otherwise, the range to exclude
4572 starts at the beginning of the first range and ends at the end of the
4573 second. */
4574 if (no_overlap)
4575 {
4576 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4577 range_successor (high0),
4578 1, low1, 0)))
4579 in_p = 0, low = low0, high = high1;
4580 else
4581 {
4582 /* Canonicalize - [min, x] into - [-, x]. */
4583 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4584 switch (TREE_CODE (TREE_TYPE (low0)))
4585 {
4586 case ENUMERAL_TYPE:
4587 if (TYPE_PRECISION (TREE_TYPE (low0))
4588 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4589 break;
4590 /* FALLTHROUGH */
4591 case INTEGER_TYPE:
4592 if (tree_int_cst_equal (low0,
4593 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4594 low0 = 0;
4595 break;
4596 case POINTER_TYPE:
4597 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4598 && integer_zerop (low0))
4599 low0 = 0;
4600 break;
4601 default:
4602 break;
4603 }
4604
4605 /* Canonicalize - [x, max] into - [x, -]. */
4606 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4607 switch (TREE_CODE (TREE_TYPE (high1)))
4608 {
4609 case ENUMERAL_TYPE:
4610 if (TYPE_PRECISION (TREE_TYPE (high1))
4611 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4612 break;
4613 /* FALLTHROUGH */
4614 case INTEGER_TYPE:
4615 if (tree_int_cst_equal (high1,
4616 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4617 high1 = 0;
4618 break;
4619 case POINTER_TYPE:
4620 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4621 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4622 high1, 1,
4623 integer_one_node, 1)))
4624 high1 = 0;
4625 break;
4626 default:
4627 break;
4628 }
4629
4630 /* The ranges might be also adjacent between the maximum and
4631 minimum values of the given type. For
4632 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4633 return + [x + 1, y - 1]. */
4634 if (low0 == 0 && high1 == 0)
4635 {
4636 low = range_successor (high0);
4637 high = range_predecessor (low1);
4638 if (low == 0 || high == 0)
4639 return 0;
4640
4641 in_p = 1;
4642 }
4643 else
4644 return 0;
4645 }
4646 }
4647 else if (subset)
4648 in_p = 0, low = low0, high = high0;
4649 else
4650 in_p = 0, low = low0, high = high1;
4651 }
4652
4653 *pin_p = in_p, *plow = low, *phigh = high;
4654 return 1;
4655 }
4656 \f
4657
4658 /* Subroutine of fold, looking inside expressions of the form
4659 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4660 of the COND_EXPR. This function is being used also to optimize
4661 A op B ? C : A, by reversing the comparison first.
4662
4663 Return a folded expression whose code is not a COND_EXPR
4664 anymore, or NULL_TREE if no folding opportunity is found. */
4665
4666 static tree
4667 fold_cond_expr_with_comparison (location_t loc, tree type,
4668 tree arg0, tree arg1, tree arg2)
4669 {
4670 enum tree_code comp_code = TREE_CODE (arg0);
4671 tree arg00 = TREE_OPERAND (arg0, 0);
4672 tree arg01 = TREE_OPERAND (arg0, 1);
4673 tree arg1_type = TREE_TYPE (arg1);
4674 tree tem;
4675
4676 STRIP_NOPS (arg1);
4677 STRIP_NOPS (arg2);
4678
4679 /* If we have A op 0 ? A : -A, consider applying the following
4680 transformations:
4681
4682 A == 0? A : -A same as -A
4683 A != 0? A : -A same as A
4684 A >= 0? A : -A same as abs (A)
4685 A > 0? A : -A same as abs (A)
4686 A <= 0? A : -A same as -abs (A)
4687 A < 0? A : -A same as -abs (A)
4688
4689 None of these transformations work for modes with signed
4690 zeros. If A is +/-0, the first two transformations will
4691 change the sign of the result (from +0 to -0, or vice
4692 versa). The last four will fix the sign of the result,
4693 even though the original expressions could be positive or
4694 negative, depending on the sign of A.
4695
4696 Note that all these transformations are correct if A is
4697 NaN, since the two alternatives (A and -A) are also NaNs. */
4698 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4699 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4700 ? real_zerop (arg01)
4701 : integer_zerop (arg01))
4702 && ((TREE_CODE (arg2) == NEGATE_EXPR
4703 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4704 /* In the case that A is of the form X-Y, '-A' (arg2) may
4705 have already been folded to Y-X, check for that. */
4706 || (TREE_CODE (arg1) == MINUS_EXPR
4707 && TREE_CODE (arg2) == MINUS_EXPR
4708 && operand_equal_p (TREE_OPERAND (arg1, 0),
4709 TREE_OPERAND (arg2, 1), 0)
4710 && operand_equal_p (TREE_OPERAND (arg1, 1),
4711 TREE_OPERAND (arg2, 0), 0))))
4712 switch (comp_code)
4713 {
4714 case EQ_EXPR:
4715 case UNEQ_EXPR:
4716 tem = fold_convert_loc (loc, arg1_type, arg1);
4717 return pedantic_non_lvalue_loc (loc,
4718 fold_convert_loc (loc, type,
4719 negate_expr (tem)));
4720 case NE_EXPR:
4721 case LTGT_EXPR:
4722 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4723 case UNGE_EXPR:
4724 case UNGT_EXPR:
4725 if (flag_trapping_math)
4726 break;
4727 /* Fall through. */
4728 case GE_EXPR:
4729 case GT_EXPR:
4730 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4731 arg1 = fold_convert_loc (loc, signed_type_for
4732 (TREE_TYPE (arg1)), arg1);
4733 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4734 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4735 case UNLE_EXPR:
4736 case UNLT_EXPR:
4737 if (flag_trapping_math)
4738 break;
4739 case LE_EXPR:
4740 case LT_EXPR:
4741 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4742 arg1 = fold_convert_loc (loc, signed_type_for
4743 (TREE_TYPE (arg1)), arg1);
4744 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4745 return negate_expr (fold_convert_loc (loc, type, tem));
4746 default:
4747 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4748 break;
4749 }
4750
4751 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4752 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4753 both transformations are correct when A is NaN: A != 0
4754 is then true, and A == 0 is false. */
4755
4756 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4757 && integer_zerop (arg01) && integer_zerop (arg2))
4758 {
4759 if (comp_code == NE_EXPR)
4760 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4761 else if (comp_code == EQ_EXPR)
4762 return build_zero_cst (type);
4763 }
4764
4765 /* Try some transformations of A op B ? A : B.
4766
4767 A == B? A : B same as B
4768 A != B? A : B same as A
4769 A >= B? A : B same as max (A, B)
4770 A > B? A : B same as max (B, A)
4771 A <= B? A : B same as min (A, B)
4772 A < B? A : B same as min (B, A)
4773
4774 As above, these transformations don't work in the presence
4775 of signed zeros. For example, if A and B are zeros of
4776 opposite sign, the first two transformations will change
4777 the sign of the result. In the last four, the original
4778 expressions give different results for (A=+0, B=-0) and
4779 (A=-0, B=+0), but the transformed expressions do not.
4780
4781 The first two transformations are correct if either A or B
4782 is a NaN. In the first transformation, the condition will
4783 be false, and B will indeed be chosen. In the case of the
4784 second transformation, the condition A != B will be true,
4785 and A will be chosen.
4786
4787 The conversions to max() and min() are not correct if B is
4788 a number and A is not. The conditions in the original
4789 expressions will be false, so all four give B. The min()
4790 and max() versions would give a NaN instead. */
4791 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4792 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4793 /* Avoid these transformations if the COND_EXPR may be used
4794 as an lvalue in the C++ front-end. PR c++/19199. */
4795 && (in_gimple_form
4796 || VECTOR_TYPE_P (type)
4797 || (strcmp (lang_hooks.name, "GNU C++") != 0
4798 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4799 || ! maybe_lvalue_p (arg1)
4800 || ! maybe_lvalue_p (arg2)))
4801 {
4802 tree comp_op0 = arg00;
4803 tree comp_op1 = arg01;
4804 tree comp_type = TREE_TYPE (comp_op0);
4805
4806 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4807 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4808 {
4809 comp_type = type;
4810 comp_op0 = arg1;
4811 comp_op1 = arg2;
4812 }
4813
4814 switch (comp_code)
4815 {
4816 case EQ_EXPR:
4817 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4818 case NE_EXPR:
4819 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4820 case LE_EXPR:
4821 case LT_EXPR:
4822 case UNLE_EXPR:
4823 case UNLT_EXPR:
4824 /* In C++ a ?: expression can be an lvalue, so put the
4825 operand which will be used if they are equal first
4826 so that we can convert this back to the
4827 corresponding COND_EXPR. */
4828 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4829 {
4830 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4831 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4832 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4833 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4834 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4835 comp_op1, comp_op0);
4836 return pedantic_non_lvalue_loc (loc,
4837 fold_convert_loc (loc, type, tem));
4838 }
4839 break;
4840 case GE_EXPR:
4841 case GT_EXPR:
4842 case UNGE_EXPR:
4843 case UNGT_EXPR:
4844 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4845 {
4846 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4847 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4848 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4849 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4850 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4851 comp_op1, comp_op0);
4852 return pedantic_non_lvalue_loc (loc,
4853 fold_convert_loc (loc, type, tem));
4854 }
4855 break;
4856 case UNEQ_EXPR:
4857 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4858 return pedantic_non_lvalue_loc (loc,
4859 fold_convert_loc (loc, type, arg2));
4860 break;
4861 case LTGT_EXPR:
4862 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4863 return pedantic_non_lvalue_loc (loc,
4864 fold_convert_loc (loc, type, arg1));
4865 break;
4866 default:
4867 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4868 break;
4869 }
4870 }
4871
4872 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4873 we might still be able to simplify this. For example,
4874 if C1 is one less or one more than C2, this might have started
4875 out as a MIN or MAX and been transformed by this function.
4876 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4877
4878 if (INTEGRAL_TYPE_P (type)
4879 && TREE_CODE (arg01) == INTEGER_CST
4880 && TREE_CODE (arg2) == INTEGER_CST)
4881 switch (comp_code)
4882 {
4883 case EQ_EXPR:
4884 if (TREE_CODE (arg1) == INTEGER_CST)
4885 break;
4886 /* We can replace A with C1 in this case. */
4887 arg1 = fold_convert_loc (loc, type, arg01);
4888 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4889
4890 case LT_EXPR:
4891 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4892 MIN_EXPR, to preserve the signedness of the comparison. */
4893 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4894 OEP_ONLY_CONST)
4895 && operand_equal_p (arg01,
4896 const_binop (PLUS_EXPR, arg2,
4897 build_int_cst (type, 1)),
4898 OEP_ONLY_CONST))
4899 {
4900 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4901 fold_convert_loc (loc, TREE_TYPE (arg00),
4902 arg2));
4903 return pedantic_non_lvalue_loc (loc,
4904 fold_convert_loc (loc, type, tem));
4905 }
4906 break;
4907
4908 case LE_EXPR:
4909 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4910 as above. */
4911 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4912 OEP_ONLY_CONST)
4913 && operand_equal_p (arg01,
4914 const_binop (MINUS_EXPR, arg2,
4915 build_int_cst (type, 1)),
4916 OEP_ONLY_CONST))
4917 {
4918 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4919 fold_convert_loc (loc, TREE_TYPE (arg00),
4920 arg2));
4921 return pedantic_non_lvalue_loc (loc,
4922 fold_convert_loc (loc, type, tem));
4923 }
4924 break;
4925
4926 case GT_EXPR:
4927 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4928 MAX_EXPR, to preserve the signedness of the comparison. */
4929 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4930 OEP_ONLY_CONST)
4931 && operand_equal_p (arg01,
4932 const_binop (MINUS_EXPR, arg2,
4933 build_int_cst (type, 1)),
4934 OEP_ONLY_CONST))
4935 {
4936 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4937 fold_convert_loc (loc, TREE_TYPE (arg00),
4938 arg2));
4939 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4940 }
4941 break;
4942
4943 case GE_EXPR:
4944 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4945 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4946 OEP_ONLY_CONST)
4947 && operand_equal_p (arg01,
4948 const_binop (PLUS_EXPR, arg2,
4949 build_int_cst (type, 1)),
4950 OEP_ONLY_CONST))
4951 {
4952 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4953 fold_convert_loc (loc, TREE_TYPE (arg00),
4954 arg2));
4955 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4956 }
4957 break;
4958 case NE_EXPR:
4959 break;
4960 default:
4961 gcc_unreachable ();
4962 }
4963
4964 return NULL_TREE;
4965 }
4966
4967
4968 \f
4969 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4970 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4971 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4972 false) >= 2)
4973 #endif
4974
4975 /* EXP is some logical combination of boolean tests. See if we can
4976 merge it into some range test. Return the new tree if so. */
4977
4978 static tree
4979 fold_range_test (location_t loc, enum tree_code code, tree type,
4980 tree op0, tree op1)
4981 {
4982 int or_op = (code == TRUTH_ORIF_EXPR
4983 || code == TRUTH_OR_EXPR);
4984 int in0_p, in1_p, in_p;
4985 tree low0, low1, low, high0, high1, high;
4986 bool strict_overflow_p = false;
4987 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4988 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4989 tree tem;
4990 const char * const warnmsg = G_("assuming signed overflow does not occur "
4991 "when simplifying range test");
4992
4993 /* If this is an OR operation, invert both sides; we will invert
4994 again at the end. */
4995 if (or_op)
4996 in0_p = ! in0_p, in1_p = ! in1_p;
4997
4998 /* If both expressions are the same, if we can merge the ranges, and we
4999 can build the range test, return it or it inverted. If one of the
5000 ranges is always true or always false, consider it to be the same
5001 expression as the other. */
5002 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5003 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5004 in1_p, low1, high1)
5005 && 0 != (tem = (build_range_check (loc, type,
5006 lhs != 0 ? lhs
5007 : rhs != 0 ? rhs : integer_zero_node,
5008 in_p, low, high))))
5009 {
5010 if (strict_overflow_p)
5011 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5012 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5013 }
5014
5015 /* On machines where the branch cost is expensive, if this is a
5016 short-circuited branch and the underlying object on both sides
5017 is the same, make a non-short-circuit operation. */
5018 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5019 && lhs != 0 && rhs != 0
5020 && (code == TRUTH_ANDIF_EXPR
5021 || code == TRUTH_ORIF_EXPR)
5022 && operand_equal_p (lhs, rhs, 0))
5023 {
5024 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5025 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5026 which cases we can't do this. */
5027 if (simple_operand_p (lhs))
5028 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5029 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5030 type, op0, op1);
5031
5032 else if (!lang_hooks.decls.global_bindings_p ()
5033 && !CONTAINS_PLACEHOLDER_P (lhs))
5034 {
5035 tree common = save_expr (lhs);
5036
5037 if (0 != (lhs = build_range_check (loc, type, common,
5038 or_op ? ! in0_p : in0_p,
5039 low0, high0))
5040 && (0 != (rhs = build_range_check (loc, type, common,
5041 or_op ? ! in1_p : in1_p,
5042 low1, high1))))
5043 {
5044 if (strict_overflow_p)
5045 fold_overflow_warning (warnmsg,
5046 WARN_STRICT_OVERFLOW_COMPARISON);
5047 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5048 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5049 type, lhs, rhs);
5050 }
5051 }
5052 }
5053
5054 return 0;
5055 }
5056 \f
5057 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5058 bit value. Arrange things so the extra bits will be set to zero if and
5059 only if C is signed-extended to its full width. If MASK is nonzero,
5060 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5061
5062 static tree
5063 unextend (tree c, int p, int unsignedp, tree mask)
5064 {
5065 tree type = TREE_TYPE (c);
5066 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5067 tree temp;
5068
5069 if (p == modesize || unsignedp)
5070 return c;
5071
5072 /* We work by getting just the sign bit into the low-order bit, then
5073 into the high-order bit, then sign-extend. We then XOR that value
5074 with C. */
5075 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5076 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5077
5078 /* We must use a signed type in order to get an arithmetic right shift.
5079 However, we must also avoid introducing accidental overflows, so that
5080 a subsequent call to integer_zerop will work. Hence we must
5081 do the type conversion here. At this point, the constant is either
5082 zero or one, and the conversion to a signed type can never overflow.
5083 We could get an overflow if this conversion is done anywhere else. */
5084 if (TYPE_UNSIGNED (type))
5085 temp = fold_convert (signed_type_for (type), temp);
5086
5087 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5088 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5089 if (mask != 0)
5090 temp = const_binop (BIT_AND_EXPR, temp,
5091 fold_convert (TREE_TYPE (c), mask));
5092 /* If necessary, convert the type back to match the type of C. */
5093 if (TYPE_UNSIGNED (type))
5094 temp = fold_convert (type, temp);
5095
5096 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5097 }
5098 \f
5099 /* For an expression that has the form
5100 (A && B) || ~B
5101 or
5102 (A || B) && ~B,
5103 we can drop one of the inner expressions and simplify to
5104 A || ~B
5105 or
5106 A && ~B
5107 LOC is the location of the resulting expression. OP is the inner
5108 logical operation; the left-hand side in the examples above, while CMPOP
5109 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5110 removing a condition that guards another, as in
5111 (A != NULL && A->...) || A == NULL
5112 which we must not transform. If RHS_ONLY is true, only eliminate the
5113 right-most operand of the inner logical operation. */
5114
5115 static tree
5116 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5117 bool rhs_only)
5118 {
5119 tree type = TREE_TYPE (cmpop);
5120 enum tree_code code = TREE_CODE (cmpop);
5121 enum tree_code truthop_code = TREE_CODE (op);
5122 tree lhs = TREE_OPERAND (op, 0);
5123 tree rhs = TREE_OPERAND (op, 1);
5124 tree orig_lhs = lhs, orig_rhs = rhs;
5125 enum tree_code rhs_code = TREE_CODE (rhs);
5126 enum tree_code lhs_code = TREE_CODE (lhs);
5127 enum tree_code inv_code;
5128
5129 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5130 return NULL_TREE;
5131
5132 if (TREE_CODE_CLASS (code) != tcc_comparison)
5133 return NULL_TREE;
5134
5135 if (rhs_code == truthop_code)
5136 {
5137 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5138 if (newrhs != NULL_TREE)
5139 {
5140 rhs = newrhs;
5141 rhs_code = TREE_CODE (rhs);
5142 }
5143 }
5144 if (lhs_code == truthop_code && !rhs_only)
5145 {
5146 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5147 if (newlhs != NULL_TREE)
5148 {
5149 lhs = newlhs;
5150 lhs_code = TREE_CODE (lhs);
5151 }
5152 }
5153
5154 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5155 if (inv_code == rhs_code
5156 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5157 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5158 return lhs;
5159 if (!rhs_only && inv_code == lhs_code
5160 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5161 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5162 return rhs;
5163 if (rhs != orig_rhs || lhs != orig_lhs)
5164 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5165 lhs, rhs);
5166 return NULL_TREE;
5167 }
5168
5169 /* Find ways of folding logical expressions of LHS and RHS:
5170 Try to merge two comparisons to the same innermost item.
5171 Look for range tests like "ch >= '0' && ch <= '9'".
5172 Look for combinations of simple terms on machines with expensive branches
5173 and evaluate the RHS unconditionally.
5174
5175 For example, if we have p->a == 2 && p->b == 4 and we can make an
5176 object large enough to span both A and B, we can do this with a comparison
5177 against the object ANDed with the a mask.
5178
5179 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5180 operations to do this with one comparison.
5181
5182 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5183 function and the one above.
5184
5185 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5186 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5187
5188 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5189 two operands.
5190
5191 We return the simplified tree or 0 if no optimization is possible. */
5192
5193 static tree
5194 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5195 tree lhs, tree rhs)
5196 {
5197 /* If this is the "or" of two comparisons, we can do something if
5198 the comparisons are NE_EXPR. If this is the "and", we can do something
5199 if the comparisons are EQ_EXPR. I.e.,
5200 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5201
5202 WANTED_CODE is this operation code. For single bit fields, we can
5203 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5204 comparison for one-bit fields. */
5205
5206 enum tree_code wanted_code;
5207 enum tree_code lcode, rcode;
5208 tree ll_arg, lr_arg, rl_arg, rr_arg;
5209 tree ll_inner, lr_inner, rl_inner, rr_inner;
5210 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5211 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5212 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5213 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5214 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5215 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5216 enum machine_mode lnmode, rnmode;
5217 tree ll_mask, lr_mask, rl_mask, rr_mask;
5218 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5219 tree l_const, r_const;
5220 tree lntype, rntype, result;
5221 HOST_WIDE_INT first_bit, end_bit;
5222 int volatilep;
5223
5224 /* Start by getting the comparison codes. Fail if anything is volatile.
5225 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5226 it were surrounded with a NE_EXPR. */
5227
5228 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5229 return 0;
5230
5231 lcode = TREE_CODE (lhs);
5232 rcode = TREE_CODE (rhs);
5233
5234 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5235 {
5236 lhs = build2 (NE_EXPR, truth_type, lhs,
5237 build_int_cst (TREE_TYPE (lhs), 0));
5238 lcode = NE_EXPR;
5239 }
5240
5241 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5242 {
5243 rhs = build2 (NE_EXPR, truth_type, rhs,
5244 build_int_cst (TREE_TYPE (rhs), 0));
5245 rcode = NE_EXPR;
5246 }
5247
5248 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5249 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5250 return 0;
5251
5252 ll_arg = TREE_OPERAND (lhs, 0);
5253 lr_arg = TREE_OPERAND (lhs, 1);
5254 rl_arg = TREE_OPERAND (rhs, 0);
5255 rr_arg = TREE_OPERAND (rhs, 1);
5256
5257 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5258 if (simple_operand_p (ll_arg)
5259 && simple_operand_p (lr_arg))
5260 {
5261 if (operand_equal_p (ll_arg, rl_arg, 0)
5262 && operand_equal_p (lr_arg, rr_arg, 0))
5263 {
5264 result = combine_comparisons (loc, code, lcode, rcode,
5265 truth_type, ll_arg, lr_arg);
5266 if (result)
5267 return result;
5268 }
5269 else if (operand_equal_p (ll_arg, rr_arg, 0)
5270 && operand_equal_p (lr_arg, rl_arg, 0))
5271 {
5272 result = combine_comparisons (loc, code, lcode,
5273 swap_tree_comparison (rcode),
5274 truth_type, ll_arg, lr_arg);
5275 if (result)
5276 return result;
5277 }
5278 }
5279
5280 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5281 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5282
5283 /* If the RHS can be evaluated unconditionally and its operands are
5284 simple, it wins to evaluate the RHS unconditionally on machines
5285 with expensive branches. In this case, this isn't a comparison
5286 that can be merged. */
5287
5288 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5289 false) >= 2
5290 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5291 && simple_operand_p (rl_arg)
5292 && simple_operand_p (rr_arg))
5293 {
5294 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5295 if (code == TRUTH_OR_EXPR
5296 && lcode == NE_EXPR && integer_zerop (lr_arg)
5297 && rcode == NE_EXPR && integer_zerop (rr_arg)
5298 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5299 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5300 return build2_loc (loc, NE_EXPR, truth_type,
5301 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5302 ll_arg, rl_arg),
5303 build_int_cst (TREE_TYPE (ll_arg), 0));
5304
5305 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5306 if (code == TRUTH_AND_EXPR
5307 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5308 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5309 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5310 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5311 return build2_loc (loc, EQ_EXPR, truth_type,
5312 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5313 ll_arg, rl_arg),
5314 build_int_cst (TREE_TYPE (ll_arg), 0));
5315 }
5316
5317 /* See if the comparisons can be merged. Then get all the parameters for
5318 each side. */
5319
5320 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5321 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5322 return 0;
5323
5324 volatilep = 0;
5325 ll_inner = decode_field_reference (loc, ll_arg,
5326 &ll_bitsize, &ll_bitpos, &ll_mode,
5327 &ll_unsignedp, &volatilep, &ll_mask,
5328 &ll_and_mask);
5329 lr_inner = decode_field_reference (loc, lr_arg,
5330 &lr_bitsize, &lr_bitpos, &lr_mode,
5331 &lr_unsignedp, &volatilep, &lr_mask,
5332 &lr_and_mask);
5333 rl_inner = decode_field_reference (loc, rl_arg,
5334 &rl_bitsize, &rl_bitpos, &rl_mode,
5335 &rl_unsignedp, &volatilep, &rl_mask,
5336 &rl_and_mask);
5337 rr_inner = decode_field_reference (loc, rr_arg,
5338 &rr_bitsize, &rr_bitpos, &rr_mode,
5339 &rr_unsignedp, &volatilep, &rr_mask,
5340 &rr_and_mask);
5341
5342 /* It must be true that the inner operation on the lhs of each
5343 comparison must be the same if we are to be able to do anything.
5344 Then see if we have constants. If not, the same must be true for
5345 the rhs's. */
5346 if (volatilep || ll_inner == 0 || rl_inner == 0
5347 || ! operand_equal_p (ll_inner, rl_inner, 0))
5348 return 0;
5349
5350 if (TREE_CODE (lr_arg) == INTEGER_CST
5351 && TREE_CODE (rr_arg) == INTEGER_CST)
5352 l_const = lr_arg, r_const = rr_arg;
5353 else if (lr_inner == 0 || rr_inner == 0
5354 || ! operand_equal_p (lr_inner, rr_inner, 0))
5355 return 0;
5356 else
5357 l_const = r_const = 0;
5358
5359 /* If either comparison code is not correct for our logical operation,
5360 fail. However, we can convert a one-bit comparison against zero into
5361 the opposite comparison against that bit being set in the field. */
5362
5363 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5364 if (lcode != wanted_code)
5365 {
5366 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5367 {
5368 /* Make the left operand unsigned, since we are only interested
5369 in the value of one bit. Otherwise we are doing the wrong
5370 thing below. */
5371 ll_unsignedp = 1;
5372 l_const = ll_mask;
5373 }
5374 else
5375 return 0;
5376 }
5377
5378 /* This is analogous to the code for l_const above. */
5379 if (rcode != wanted_code)
5380 {
5381 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5382 {
5383 rl_unsignedp = 1;
5384 r_const = rl_mask;
5385 }
5386 else
5387 return 0;
5388 }
5389
5390 /* See if we can find a mode that contains both fields being compared on
5391 the left. If we can't, fail. Otherwise, update all constants and masks
5392 to be relative to a field of that size. */
5393 first_bit = MIN (ll_bitpos, rl_bitpos);
5394 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5395 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5396 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5397 volatilep);
5398 if (lnmode == VOIDmode)
5399 return 0;
5400
5401 lnbitsize = GET_MODE_BITSIZE (lnmode);
5402 lnbitpos = first_bit & ~ (lnbitsize - 1);
5403 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5404 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5405
5406 if (BYTES_BIG_ENDIAN)
5407 {
5408 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5409 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5410 }
5411
5412 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5413 size_int (xll_bitpos));
5414 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5415 size_int (xrl_bitpos));
5416
5417 if (l_const)
5418 {
5419 l_const = fold_convert_loc (loc, lntype, l_const);
5420 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5421 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5422 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5423 fold_build1_loc (loc, BIT_NOT_EXPR,
5424 lntype, ll_mask))))
5425 {
5426 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5427
5428 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5429 }
5430 }
5431 if (r_const)
5432 {
5433 r_const = fold_convert_loc (loc, lntype, r_const);
5434 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5435 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5436 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5437 fold_build1_loc (loc, BIT_NOT_EXPR,
5438 lntype, rl_mask))))
5439 {
5440 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5441
5442 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5443 }
5444 }
5445
5446 /* If the right sides are not constant, do the same for it. Also,
5447 disallow this optimization if a size or signedness mismatch occurs
5448 between the left and right sides. */
5449 if (l_const == 0)
5450 {
5451 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5452 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5453 /* Make sure the two fields on the right
5454 correspond to the left without being swapped. */
5455 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5456 return 0;
5457
5458 first_bit = MIN (lr_bitpos, rr_bitpos);
5459 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5460 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5461 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5462 volatilep);
5463 if (rnmode == VOIDmode)
5464 return 0;
5465
5466 rnbitsize = GET_MODE_BITSIZE (rnmode);
5467 rnbitpos = first_bit & ~ (rnbitsize - 1);
5468 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5469 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5470
5471 if (BYTES_BIG_ENDIAN)
5472 {
5473 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5474 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5475 }
5476
5477 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5478 rntype, lr_mask),
5479 size_int (xlr_bitpos));
5480 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5481 rntype, rr_mask),
5482 size_int (xrr_bitpos));
5483
5484 /* Make a mask that corresponds to both fields being compared.
5485 Do this for both items being compared. If the operands are the
5486 same size and the bits being compared are in the same position
5487 then we can do this by masking both and comparing the masked
5488 results. */
5489 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5490 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5491 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5492 {
5493 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5494 ll_unsignedp || rl_unsignedp);
5495 if (! all_ones_mask_p (ll_mask, lnbitsize))
5496 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5497
5498 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5499 lr_unsignedp || rr_unsignedp);
5500 if (! all_ones_mask_p (lr_mask, rnbitsize))
5501 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5502
5503 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5504 }
5505
5506 /* There is still another way we can do something: If both pairs of
5507 fields being compared are adjacent, we may be able to make a wider
5508 field containing them both.
5509
5510 Note that we still must mask the lhs/rhs expressions. Furthermore,
5511 the mask must be shifted to account for the shift done by
5512 make_bit_field_ref. */
5513 if ((ll_bitsize + ll_bitpos == rl_bitpos
5514 && lr_bitsize + lr_bitpos == rr_bitpos)
5515 || (ll_bitpos == rl_bitpos + rl_bitsize
5516 && lr_bitpos == rr_bitpos + rr_bitsize))
5517 {
5518 tree type;
5519
5520 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5521 ll_bitsize + rl_bitsize,
5522 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5523 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5524 lr_bitsize + rr_bitsize,
5525 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5526
5527 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5528 size_int (MIN (xll_bitpos, xrl_bitpos)));
5529 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5530 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5531
5532 /* Convert to the smaller type before masking out unwanted bits. */
5533 type = lntype;
5534 if (lntype != rntype)
5535 {
5536 if (lnbitsize > rnbitsize)
5537 {
5538 lhs = fold_convert_loc (loc, rntype, lhs);
5539 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5540 type = rntype;
5541 }
5542 else if (lnbitsize < rnbitsize)
5543 {
5544 rhs = fold_convert_loc (loc, lntype, rhs);
5545 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5546 type = lntype;
5547 }
5548 }
5549
5550 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5551 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5552
5553 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5554 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5555
5556 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5557 }
5558
5559 return 0;
5560 }
5561
5562 /* Handle the case of comparisons with constants. If there is something in
5563 common between the masks, those bits of the constants must be the same.
5564 If not, the condition is always false. Test for this to avoid generating
5565 incorrect code below. */
5566 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5567 if (! integer_zerop (result)
5568 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5569 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5570 {
5571 if (wanted_code == NE_EXPR)
5572 {
5573 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5574 return constant_boolean_node (true, truth_type);
5575 }
5576 else
5577 {
5578 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5579 return constant_boolean_node (false, truth_type);
5580 }
5581 }
5582
5583 /* Construct the expression we will return. First get the component
5584 reference we will make. Unless the mask is all ones the width of
5585 that field, perform the mask operation. Then compare with the
5586 merged constant. */
5587 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5588 ll_unsignedp || rl_unsignedp);
5589
5590 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5591 if (! all_ones_mask_p (ll_mask, lnbitsize))
5592 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5593
5594 return build2_loc (loc, wanted_code, truth_type, result,
5595 const_binop (BIT_IOR_EXPR, l_const, r_const));
5596 }
5597 \f
5598 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5599 constant. */
5600
5601 static tree
5602 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5603 tree op0, tree op1)
5604 {
5605 tree arg0 = op0;
5606 enum tree_code op_code;
5607 tree comp_const;
5608 tree minmax_const;
5609 int consts_equal, consts_lt;
5610 tree inner;
5611
5612 STRIP_SIGN_NOPS (arg0);
5613
5614 op_code = TREE_CODE (arg0);
5615 minmax_const = TREE_OPERAND (arg0, 1);
5616 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5617 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5618 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5619 inner = TREE_OPERAND (arg0, 0);
5620
5621 /* If something does not permit us to optimize, return the original tree. */
5622 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5623 || TREE_CODE (comp_const) != INTEGER_CST
5624 || TREE_OVERFLOW (comp_const)
5625 || TREE_CODE (minmax_const) != INTEGER_CST
5626 || TREE_OVERFLOW (minmax_const))
5627 return NULL_TREE;
5628
5629 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5630 and GT_EXPR, doing the rest with recursive calls using logical
5631 simplifications. */
5632 switch (code)
5633 {
5634 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5635 {
5636 tree tem
5637 = optimize_minmax_comparison (loc,
5638 invert_tree_comparison (code, false),
5639 type, op0, op1);
5640 if (tem)
5641 return invert_truthvalue_loc (loc, tem);
5642 return NULL_TREE;
5643 }
5644
5645 case GE_EXPR:
5646 return
5647 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5648 optimize_minmax_comparison
5649 (loc, EQ_EXPR, type, arg0, comp_const),
5650 optimize_minmax_comparison
5651 (loc, GT_EXPR, type, arg0, comp_const));
5652
5653 case EQ_EXPR:
5654 if (op_code == MAX_EXPR && consts_equal)
5655 /* MAX (X, 0) == 0 -> X <= 0 */
5656 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5657
5658 else if (op_code == MAX_EXPR && consts_lt)
5659 /* MAX (X, 0) == 5 -> X == 5 */
5660 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5661
5662 else if (op_code == MAX_EXPR)
5663 /* MAX (X, 0) == -1 -> false */
5664 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5665
5666 else if (consts_equal)
5667 /* MIN (X, 0) == 0 -> X >= 0 */
5668 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5669
5670 else if (consts_lt)
5671 /* MIN (X, 0) == 5 -> false */
5672 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5673
5674 else
5675 /* MIN (X, 0) == -1 -> X == -1 */
5676 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5677
5678 case GT_EXPR:
5679 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5680 /* MAX (X, 0) > 0 -> X > 0
5681 MAX (X, 0) > 5 -> X > 5 */
5682 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5683
5684 else if (op_code == MAX_EXPR)
5685 /* MAX (X, 0) > -1 -> true */
5686 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5687
5688 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5689 /* MIN (X, 0) > 0 -> false
5690 MIN (X, 0) > 5 -> false */
5691 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5692
5693 else
5694 /* MIN (X, 0) > -1 -> X > -1 */
5695 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5696
5697 default:
5698 return NULL_TREE;
5699 }
5700 }
5701 \f
5702 /* T is an integer expression that is being multiplied, divided, or taken a
5703 modulus (CODE says which and what kind of divide or modulus) by a
5704 constant C. See if we can eliminate that operation by folding it with
5705 other operations already in T. WIDE_TYPE, if non-null, is a type that
5706 should be used for the computation if wider than our type.
5707
5708 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5709 (X * 2) + (Y * 4). We must, however, be assured that either the original
5710 expression would not overflow or that overflow is undefined for the type
5711 in the language in question.
5712
5713 If we return a non-null expression, it is an equivalent form of the
5714 original computation, but need not be in the original type.
5715
5716 We set *STRICT_OVERFLOW_P to true if the return values depends on
5717 signed overflow being undefined. Otherwise we do not change
5718 *STRICT_OVERFLOW_P. */
5719
5720 static tree
5721 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5722 bool *strict_overflow_p)
5723 {
5724 /* To avoid exponential search depth, refuse to allow recursion past
5725 three levels. Beyond that (1) it's highly unlikely that we'll find
5726 something interesting and (2) we've probably processed it before
5727 when we built the inner expression. */
5728
5729 static int depth;
5730 tree ret;
5731
5732 if (depth > 3)
5733 return NULL;
5734
5735 depth++;
5736 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5737 depth--;
5738
5739 return ret;
5740 }
5741
5742 static tree
5743 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5744 bool *strict_overflow_p)
5745 {
5746 tree type = TREE_TYPE (t);
5747 enum tree_code tcode = TREE_CODE (t);
5748 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5749 > GET_MODE_SIZE (TYPE_MODE (type)))
5750 ? wide_type : type);
5751 tree t1, t2;
5752 int same_p = tcode == code;
5753 tree op0 = NULL_TREE, op1 = NULL_TREE;
5754 bool sub_strict_overflow_p;
5755
5756 /* Don't deal with constants of zero here; they confuse the code below. */
5757 if (integer_zerop (c))
5758 return NULL_TREE;
5759
5760 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5761 op0 = TREE_OPERAND (t, 0);
5762
5763 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5764 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5765
5766 /* Note that we need not handle conditional operations here since fold
5767 already handles those cases. So just do arithmetic here. */
5768 switch (tcode)
5769 {
5770 case INTEGER_CST:
5771 /* For a constant, we can always simplify if we are a multiply
5772 or (for divide and modulus) if it is a multiple of our constant. */
5773 if (code == MULT_EXPR
5774 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5775 return const_binop (code, fold_convert (ctype, t),
5776 fold_convert (ctype, c));
5777 break;
5778
5779 CASE_CONVERT: case NON_LVALUE_EXPR:
5780 /* If op0 is an expression ... */
5781 if ((COMPARISON_CLASS_P (op0)
5782 || UNARY_CLASS_P (op0)
5783 || BINARY_CLASS_P (op0)
5784 || VL_EXP_CLASS_P (op0)
5785 || EXPRESSION_CLASS_P (op0))
5786 /* ... and has wrapping overflow, and its type is smaller
5787 than ctype, then we cannot pass through as widening. */
5788 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5789 && (TYPE_PRECISION (ctype)
5790 > TYPE_PRECISION (TREE_TYPE (op0))))
5791 /* ... or this is a truncation (t is narrower than op0),
5792 then we cannot pass through this narrowing. */
5793 || (TYPE_PRECISION (type)
5794 < TYPE_PRECISION (TREE_TYPE (op0)))
5795 /* ... or signedness changes for division or modulus,
5796 then we cannot pass through this conversion. */
5797 || (code != MULT_EXPR
5798 && (TYPE_UNSIGNED (ctype)
5799 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5800 /* ... or has undefined overflow while the converted to
5801 type has not, we cannot do the operation in the inner type
5802 as that would introduce undefined overflow. */
5803 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5804 && !TYPE_OVERFLOW_UNDEFINED (type))))
5805 break;
5806
5807 /* Pass the constant down and see if we can make a simplification. If
5808 we can, replace this expression with the inner simplification for
5809 possible later conversion to our or some other type. */
5810 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5811 && TREE_CODE (t2) == INTEGER_CST
5812 && !TREE_OVERFLOW (t2)
5813 && (0 != (t1 = extract_muldiv (op0, t2, code,
5814 code == MULT_EXPR
5815 ? ctype : NULL_TREE,
5816 strict_overflow_p))))
5817 return t1;
5818 break;
5819
5820 case ABS_EXPR:
5821 /* If widening the type changes it from signed to unsigned, then we
5822 must avoid building ABS_EXPR itself as unsigned. */
5823 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5824 {
5825 tree cstype = (*signed_type_for) (ctype);
5826 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5827 != 0)
5828 {
5829 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5830 return fold_convert (ctype, t1);
5831 }
5832 break;
5833 }
5834 /* If the constant is negative, we cannot simplify this. */
5835 if (tree_int_cst_sgn (c) == -1)
5836 break;
5837 /* FALLTHROUGH */
5838 case NEGATE_EXPR:
5839 /* For division and modulus, type can't be unsigned, as e.g.
5840 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5841 For signed types, even with wrapping overflow, this is fine. */
5842 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5843 break;
5844 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5845 != 0)
5846 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5847 break;
5848
5849 case MIN_EXPR: case MAX_EXPR:
5850 /* If widening the type changes the signedness, then we can't perform
5851 this optimization as that changes the result. */
5852 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5853 break;
5854
5855 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5856 sub_strict_overflow_p = false;
5857 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5858 &sub_strict_overflow_p)) != 0
5859 && (t2 = extract_muldiv (op1, c, code, wide_type,
5860 &sub_strict_overflow_p)) != 0)
5861 {
5862 if (tree_int_cst_sgn (c) < 0)
5863 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5864 if (sub_strict_overflow_p)
5865 *strict_overflow_p = true;
5866 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5867 fold_convert (ctype, t2));
5868 }
5869 break;
5870
5871 case LSHIFT_EXPR: case RSHIFT_EXPR:
5872 /* If the second operand is constant, this is a multiplication
5873 or floor division, by a power of two, so we can treat it that
5874 way unless the multiplier or divisor overflows. Signed
5875 left-shift overflow is implementation-defined rather than
5876 undefined in C90, so do not convert signed left shift into
5877 multiplication. */
5878 if (TREE_CODE (op1) == INTEGER_CST
5879 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5880 /* const_binop may not detect overflow correctly,
5881 so check for it explicitly here. */
5882 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5883 && TREE_INT_CST_HIGH (op1) == 0
5884 && 0 != (t1 = fold_convert (ctype,
5885 const_binop (LSHIFT_EXPR,
5886 size_one_node,
5887 op1)))
5888 && !TREE_OVERFLOW (t1))
5889 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5890 ? MULT_EXPR : FLOOR_DIV_EXPR,
5891 ctype,
5892 fold_convert (ctype, op0),
5893 t1),
5894 c, code, wide_type, strict_overflow_p);
5895 break;
5896
5897 case PLUS_EXPR: case MINUS_EXPR:
5898 /* See if we can eliminate the operation on both sides. If we can, we
5899 can return a new PLUS or MINUS. If we can't, the only remaining
5900 cases where we can do anything are if the second operand is a
5901 constant. */
5902 sub_strict_overflow_p = false;
5903 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5904 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5905 if (t1 != 0 && t2 != 0
5906 && (code == MULT_EXPR
5907 /* If not multiplication, we can only do this if both operands
5908 are divisible by c. */
5909 || (multiple_of_p (ctype, op0, c)
5910 && multiple_of_p (ctype, op1, c))))
5911 {
5912 if (sub_strict_overflow_p)
5913 *strict_overflow_p = true;
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5915 fold_convert (ctype, t2));
5916 }
5917
5918 /* If this was a subtraction, negate OP1 and set it to be an addition.
5919 This simplifies the logic below. */
5920 if (tcode == MINUS_EXPR)
5921 {
5922 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5923 /* If OP1 was not easily negatable, the constant may be OP0. */
5924 if (TREE_CODE (op0) == INTEGER_CST)
5925 {
5926 tree tem = op0;
5927 op0 = op1;
5928 op1 = tem;
5929 tem = t1;
5930 t1 = t2;
5931 t2 = tem;
5932 }
5933 }
5934
5935 if (TREE_CODE (op1) != INTEGER_CST)
5936 break;
5937
5938 /* If either OP1 or C are negative, this optimization is not safe for
5939 some of the division and remainder types while for others we need
5940 to change the code. */
5941 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5942 {
5943 if (code == CEIL_DIV_EXPR)
5944 code = FLOOR_DIV_EXPR;
5945 else if (code == FLOOR_DIV_EXPR)
5946 code = CEIL_DIV_EXPR;
5947 else if (code != MULT_EXPR
5948 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5949 break;
5950 }
5951
5952 /* If it's a multiply or a division/modulus operation of a multiple
5953 of our constant, do the operation and verify it doesn't overflow. */
5954 if (code == MULT_EXPR
5955 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5956 {
5957 op1 = const_binop (code, fold_convert (ctype, op1),
5958 fold_convert (ctype, c));
5959 /* We allow the constant to overflow with wrapping semantics. */
5960 if (op1 == 0
5961 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5962 break;
5963 }
5964 else
5965 break;
5966
5967 /* If we have an unsigned type, we cannot widen the operation since it
5968 will change the result if the original computation overflowed. */
5969 if (TYPE_UNSIGNED (ctype) && ctype != type)
5970 break;
5971
5972 /* If we were able to eliminate our operation from the first side,
5973 apply our operation to the second side and reform the PLUS. */
5974 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5975 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5976
5977 /* The last case is if we are a multiply. In that case, we can
5978 apply the distributive law to commute the multiply and addition
5979 if the multiplication of the constants doesn't overflow
5980 and overflow is defined. With undefined overflow
5981 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5982 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5983 return fold_build2 (tcode, ctype,
5984 fold_build2 (code, ctype,
5985 fold_convert (ctype, op0),
5986 fold_convert (ctype, c)),
5987 op1);
5988
5989 break;
5990
5991 case MULT_EXPR:
5992 /* We have a special case here if we are doing something like
5993 (C * 8) % 4 since we know that's zero. */
5994 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5995 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5996 /* If the multiplication can overflow we cannot optimize this. */
5997 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5998 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5999 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6000 {
6001 *strict_overflow_p = true;
6002 return omit_one_operand (type, integer_zero_node, op0);
6003 }
6004
6005 /* ... fall through ... */
6006
6007 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6008 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6009 /* If we can extract our operation from the LHS, do so and return a
6010 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6011 do something only if the second operand is a constant. */
6012 if (same_p
6013 && (t1 = extract_muldiv (op0, c, code, wide_type,
6014 strict_overflow_p)) != 0)
6015 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6016 fold_convert (ctype, op1));
6017 else if (tcode == MULT_EXPR && code == MULT_EXPR
6018 && (t1 = extract_muldiv (op1, c, code, wide_type,
6019 strict_overflow_p)) != 0)
6020 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6021 fold_convert (ctype, t1));
6022 else if (TREE_CODE (op1) != INTEGER_CST)
6023 return 0;
6024
6025 /* If these are the same operation types, we can associate them
6026 assuming no overflow. */
6027 if (tcode == code)
6028 {
6029 double_int mul;
6030 bool overflow_p;
6031 unsigned prec = TYPE_PRECISION (ctype);
6032 bool uns = TYPE_UNSIGNED (ctype);
6033 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6034 double_int dic = tree_to_double_int (c).ext (prec, uns);
6035 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6036 overflow_p = ((!uns && overflow_p)
6037 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6038 if (!double_int_fits_to_tree_p (ctype, mul)
6039 && ((uns && tcode != MULT_EXPR) || !uns))
6040 overflow_p = 1;
6041 if (!overflow_p)
6042 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6043 double_int_to_tree (ctype, mul));
6044 }
6045
6046 /* If these operations "cancel" each other, we have the main
6047 optimizations of this pass, which occur when either constant is a
6048 multiple of the other, in which case we replace this with either an
6049 operation or CODE or TCODE.
6050
6051 If we have an unsigned type, we cannot do this since it will change
6052 the result if the original computation overflowed. */
6053 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6054 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6055 || (tcode == MULT_EXPR
6056 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6057 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6058 && code != MULT_EXPR)))
6059 {
6060 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6061 {
6062 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6063 *strict_overflow_p = true;
6064 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6065 fold_convert (ctype,
6066 const_binop (TRUNC_DIV_EXPR,
6067 op1, c)));
6068 }
6069 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6070 {
6071 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6072 *strict_overflow_p = true;
6073 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6074 fold_convert (ctype,
6075 const_binop (TRUNC_DIV_EXPR,
6076 c, op1)));
6077 }
6078 }
6079 break;
6080
6081 default:
6082 break;
6083 }
6084
6085 return 0;
6086 }
6087 \f
6088 /* Return a node which has the indicated constant VALUE (either 0 or
6089 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6090 and is of the indicated TYPE. */
6091
6092 tree
6093 constant_boolean_node (bool value, tree type)
6094 {
6095 if (type == integer_type_node)
6096 return value ? integer_one_node : integer_zero_node;
6097 else if (type == boolean_type_node)
6098 return value ? boolean_true_node : boolean_false_node;
6099 else if (TREE_CODE (type) == VECTOR_TYPE)
6100 return build_vector_from_val (type,
6101 build_int_cst (TREE_TYPE (type),
6102 value ? -1 : 0));
6103 else
6104 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6105 }
6106
6107
6108 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6109 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6110 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6111 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6112 COND is the first argument to CODE; otherwise (as in the example
6113 given here), it is the second argument. TYPE is the type of the
6114 original expression. Return NULL_TREE if no simplification is
6115 possible. */
6116
6117 static tree
6118 fold_binary_op_with_conditional_arg (location_t loc,
6119 enum tree_code code,
6120 tree type, tree op0, tree op1,
6121 tree cond, tree arg, int cond_first_p)
6122 {
6123 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6124 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6125 tree test, true_value, false_value;
6126 tree lhs = NULL_TREE;
6127 tree rhs = NULL_TREE;
6128 enum tree_code cond_code = COND_EXPR;
6129
6130 if (TREE_CODE (cond) == COND_EXPR
6131 || TREE_CODE (cond) == VEC_COND_EXPR)
6132 {
6133 test = TREE_OPERAND (cond, 0);
6134 true_value = TREE_OPERAND (cond, 1);
6135 false_value = TREE_OPERAND (cond, 2);
6136 /* If this operand throws an expression, then it does not make
6137 sense to try to perform a logical or arithmetic operation
6138 involving it. */
6139 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6140 lhs = true_value;
6141 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6142 rhs = false_value;
6143 }
6144 else
6145 {
6146 tree testtype = TREE_TYPE (cond);
6147 test = cond;
6148 true_value = constant_boolean_node (true, testtype);
6149 false_value = constant_boolean_node (false, testtype);
6150 }
6151
6152 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6153 cond_code = VEC_COND_EXPR;
6154
6155 /* This transformation is only worthwhile if we don't have to wrap ARG
6156 in a SAVE_EXPR and the operation can be simplified without recursing
6157 on at least one of the branches once its pushed inside the COND_EXPR. */
6158 if (!TREE_CONSTANT (arg)
6159 && (TREE_SIDE_EFFECTS (arg)
6160 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6161 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6162 return NULL_TREE;
6163
6164 arg = fold_convert_loc (loc, arg_type, arg);
6165 if (lhs == 0)
6166 {
6167 true_value = fold_convert_loc (loc, cond_type, true_value);
6168 if (cond_first_p)
6169 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6170 else
6171 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6172 }
6173 if (rhs == 0)
6174 {
6175 false_value = fold_convert_loc (loc, cond_type, false_value);
6176 if (cond_first_p)
6177 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6178 else
6179 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6180 }
6181
6182 /* Check that we have simplified at least one of the branches. */
6183 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6184 return NULL_TREE;
6185
6186 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6187 }
6188
6189 \f
6190 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6191
6192 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6193 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6194 ADDEND is the same as X.
6195
6196 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6197 and finite. The problematic cases are when X is zero, and its mode
6198 has signed zeros. In the case of rounding towards -infinity,
6199 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6200 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6201
6202 bool
6203 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6204 {
6205 if (!real_zerop (addend))
6206 return false;
6207
6208 /* Don't allow the fold with -fsignaling-nans. */
6209 if (HONOR_SNANS (TYPE_MODE (type)))
6210 return false;
6211
6212 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6213 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6214 return true;
6215
6216 /* In a vector or complex, we would need to check the sign of all zeros. */
6217 if (TREE_CODE (addend) != REAL_CST)
6218 return false;
6219
6220 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6221 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6222 negate = !negate;
6223
6224 /* The mode has signed zeros, and we have to honor their sign.
6225 In this situation, there is only one case we can return true for.
6226 X - 0 is the same as X unless rounding towards -infinity is
6227 supported. */
6228 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6229 }
6230
6231 /* Subroutine of fold() that checks comparisons of built-in math
6232 functions against real constants.
6233
6234 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6235 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6236 is the type of the result and ARG0 and ARG1 are the operands of the
6237 comparison. ARG1 must be a TREE_REAL_CST.
6238
6239 The function returns the constant folded tree if a simplification
6240 can be made, and NULL_TREE otherwise. */
6241
6242 static tree
6243 fold_mathfn_compare (location_t loc,
6244 enum built_in_function fcode, enum tree_code code,
6245 tree type, tree arg0, tree arg1)
6246 {
6247 REAL_VALUE_TYPE c;
6248
6249 if (BUILTIN_SQRT_P (fcode))
6250 {
6251 tree arg = CALL_EXPR_ARG (arg0, 0);
6252 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6253
6254 c = TREE_REAL_CST (arg1);
6255 if (REAL_VALUE_NEGATIVE (c))
6256 {
6257 /* sqrt(x) < y is always false, if y is negative. */
6258 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6259 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6260
6261 /* sqrt(x) > y is always true, if y is negative and we
6262 don't care about NaNs, i.e. negative values of x. */
6263 if (code == NE_EXPR || !HONOR_NANS (mode))
6264 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6265
6266 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6267 return fold_build2_loc (loc, GE_EXPR, type, arg,
6268 build_real (TREE_TYPE (arg), dconst0));
6269 }
6270 else if (code == GT_EXPR || code == GE_EXPR)
6271 {
6272 REAL_VALUE_TYPE c2;
6273
6274 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6275 real_convert (&c2, mode, &c2);
6276
6277 if (REAL_VALUE_ISINF (c2))
6278 {
6279 /* sqrt(x) > y is x == +Inf, when y is very large. */
6280 if (HONOR_INFINITIES (mode))
6281 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6282 build_real (TREE_TYPE (arg), c2));
6283
6284 /* sqrt(x) > y is always false, when y is very large
6285 and we don't care about infinities. */
6286 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6287 }
6288
6289 /* sqrt(x) > c is the same as x > c*c. */
6290 return fold_build2_loc (loc, code, type, arg,
6291 build_real (TREE_TYPE (arg), c2));
6292 }
6293 else if (code == LT_EXPR || code == LE_EXPR)
6294 {
6295 REAL_VALUE_TYPE c2;
6296
6297 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6298 real_convert (&c2, mode, &c2);
6299
6300 if (REAL_VALUE_ISINF (c2))
6301 {
6302 /* sqrt(x) < y is always true, when y is a very large
6303 value and we don't care about NaNs or Infinities. */
6304 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6305 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6306
6307 /* sqrt(x) < y is x != +Inf when y is very large and we
6308 don't care about NaNs. */
6309 if (! HONOR_NANS (mode))
6310 return fold_build2_loc (loc, NE_EXPR, type, arg,
6311 build_real (TREE_TYPE (arg), c2));
6312
6313 /* sqrt(x) < y is x >= 0 when y is very large and we
6314 don't care about Infinities. */
6315 if (! HONOR_INFINITIES (mode))
6316 return fold_build2_loc (loc, GE_EXPR, type, arg,
6317 build_real (TREE_TYPE (arg), dconst0));
6318
6319 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6320 arg = save_expr (arg);
6321 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6322 fold_build2_loc (loc, GE_EXPR, type, arg,
6323 build_real (TREE_TYPE (arg),
6324 dconst0)),
6325 fold_build2_loc (loc, NE_EXPR, type, arg,
6326 build_real (TREE_TYPE (arg),
6327 c2)));
6328 }
6329
6330 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6331 if (! HONOR_NANS (mode))
6332 return fold_build2_loc (loc, code, type, arg,
6333 build_real (TREE_TYPE (arg), c2));
6334
6335 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6336 arg = save_expr (arg);
6337 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6338 fold_build2_loc (loc, GE_EXPR, type, arg,
6339 build_real (TREE_TYPE (arg),
6340 dconst0)),
6341 fold_build2_loc (loc, code, type, arg,
6342 build_real (TREE_TYPE (arg),
6343 c2)));
6344 }
6345 }
6346
6347 return NULL_TREE;
6348 }
6349
6350 /* Subroutine of fold() that optimizes comparisons against Infinities,
6351 either +Inf or -Inf.
6352
6353 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6354 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6355 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6356
6357 The function returns the constant folded tree if a simplification
6358 can be made, and NULL_TREE otherwise. */
6359
6360 static tree
6361 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6362 tree arg0, tree arg1)
6363 {
6364 enum machine_mode mode;
6365 REAL_VALUE_TYPE max;
6366 tree temp;
6367 bool neg;
6368
6369 mode = TYPE_MODE (TREE_TYPE (arg0));
6370
6371 /* For negative infinity swap the sense of the comparison. */
6372 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6373 if (neg)
6374 code = swap_tree_comparison (code);
6375
6376 switch (code)
6377 {
6378 case GT_EXPR:
6379 /* x > +Inf is always false, if with ignore sNANs. */
6380 if (HONOR_SNANS (mode))
6381 return NULL_TREE;
6382 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6383
6384 case LE_EXPR:
6385 /* x <= +Inf is always true, if we don't case about NaNs. */
6386 if (! HONOR_NANS (mode))
6387 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6388
6389 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6390 arg0 = save_expr (arg0);
6391 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6392
6393 case EQ_EXPR:
6394 case GE_EXPR:
6395 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6396 real_maxval (&max, neg, mode);
6397 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6398 arg0, build_real (TREE_TYPE (arg0), max));
6399
6400 case LT_EXPR:
6401 /* x < +Inf is always equal to x <= DBL_MAX. */
6402 real_maxval (&max, neg, mode);
6403 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6404 arg0, build_real (TREE_TYPE (arg0), max));
6405
6406 case NE_EXPR:
6407 /* x != +Inf is always equal to !(x > DBL_MAX). */
6408 real_maxval (&max, neg, mode);
6409 if (! HONOR_NANS (mode))
6410 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6411 arg0, build_real (TREE_TYPE (arg0), max));
6412
6413 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6414 arg0, build_real (TREE_TYPE (arg0), max));
6415 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6416
6417 default:
6418 break;
6419 }
6420
6421 return NULL_TREE;
6422 }
6423
6424 /* Subroutine of fold() that optimizes comparisons of a division by
6425 a nonzero integer constant against an integer constant, i.e.
6426 X/C1 op C2.
6427
6428 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6429 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6430 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6431
6432 The function returns the constant folded tree if a simplification
6433 can be made, and NULL_TREE otherwise. */
6434
6435 static tree
6436 fold_div_compare (location_t loc,
6437 enum tree_code code, tree type, tree arg0, tree arg1)
6438 {
6439 tree prod, tmp, hi, lo;
6440 tree arg00 = TREE_OPERAND (arg0, 0);
6441 tree arg01 = TREE_OPERAND (arg0, 1);
6442 double_int val;
6443 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6444 bool neg_overflow;
6445 bool overflow;
6446
6447 /* We have to do this the hard way to detect unsigned overflow.
6448 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6449 val = TREE_INT_CST (arg01)
6450 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6451 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6452 neg_overflow = false;
6453
6454 if (unsigned_p)
6455 {
6456 tmp = int_const_binop (MINUS_EXPR, arg01,
6457 build_int_cst (TREE_TYPE (arg01), 1));
6458 lo = prod;
6459
6460 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6461 val = TREE_INT_CST (prod)
6462 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6463 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6464 -1, overflow | TREE_OVERFLOW (prod));
6465 }
6466 else if (tree_int_cst_sgn (arg01) >= 0)
6467 {
6468 tmp = int_const_binop (MINUS_EXPR, arg01,
6469 build_int_cst (TREE_TYPE (arg01), 1));
6470 switch (tree_int_cst_sgn (arg1))
6471 {
6472 case -1:
6473 neg_overflow = true;
6474 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6475 hi = prod;
6476 break;
6477
6478 case 0:
6479 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6480 hi = tmp;
6481 break;
6482
6483 case 1:
6484 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6485 lo = prod;
6486 break;
6487
6488 default:
6489 gcc_unreachable ();
6490 }
6491 }
6492 else
6493 {
6494 /* A negative divisor reverses the relational operators. */
6495 code = swap_tree_comparison (code);
6496
6497 tmp = int_const_binop (PLUS_EXPR, arg01,
6498 build_int_cst (TREE_TYPE (arg01), 1));
6499 switch (tree_int_cst_sgn (arg1))
6500 {
6501 case -1:
6502 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6503 lo = prod;
6504 break;
6505
6506 case 0:
6507 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6508 lo = tmp;
6509 break;
6510
6511 case 1:
6512 neg_overflow = true;
6513 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6514 hi = prod;
6515 break;
6516
6517 default:
6518 gcc_unreachable ();
6519 }
6520 }
6521
6522 switch (code)
6523 {
6524 case EQ_EXPR:
6525 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6526 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6527 if (TREE_OVERFLOW (hi))
6528 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6529 if (TREE_OVERFLOW (lo))
6530 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6531 return build_range_check (loc, type, arg00, 1, lo, hi);
6532
6533 case NE_EXPR:
6534 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6535 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6536 if (TREE_OVERFLOW (hi))
6537 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6538 if (TREE_OVERFLOW (lo))
6539 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6540 return build_range_check (loc, type, arg00, 0, lo, hi);
6541
6542 case LT_EXPR:
6543 if (TREE_OVERFLOW (lo))
6544 {
6545 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6546 return omit_one_operand_loc (loc, type, tmp, arg00);
6547 }
6548 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6549
6550 case LE_EXPR:
6551 if (TREE_OVERFLOW (hi))
6552 {
6553 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6554 return omit_one_operand_loc (loc, type, tmp, arg00);
6555 }
6556 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6557
6558 case GT_EXPR:
6559 if (TREE_OVERFLOW (hi))
6560 {
6561 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6562 return omit_one_operand_loc (loc, type, tmp, arg00);
6563 }
6564 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6565
6566 case GE_EXPR:
6567 if (TREE_OVERFLOW (lo))
6568 {
6569 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6570 return omit_one_operand_loc (loc, type, tmp, arg00);
6571 }
6572 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6573
6574 default:
6575 break;
6576 }
6577
6578 return NULL_TREE;
6579 }
6580
6581
6582 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6583 equality/inequality test, then return a simplified form of the test
6584 using a sign testing. Otherwise return NULL. TYPE is the desired
6585 result type. */
6586
6587 static tree
6588 fold_single_bit_test_into_sign_test (location_t loc,
6589 enum tree_code code, tree arg0, tree arg1,
6590 tree result_type)
6591 {
6592 /* If this is testing a single bit, we can optimize the test. */
6593 if ((code == NE_EXPR || code == EQ_EXPR)
6594 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6595 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6596 {
6597 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6598 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6599 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6600
6601 if (arg00 != NULL_TREE
6602 /* This is only a win if casting to a signed type is cheap,
6603 i.e. when arg00's type is not a partial mode. */
6604 && TYPE_PRECISION (TREE_TYPE (arg00))
6605 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6606 {
6607 tree stype = signed_type_for (TREE_TYPE (arg00));
6608 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6609 result_type,
6610 fold_convert_loc (loc, stype, arg00),
6611 build_int_cst (stype, 0));
6612 }
6613 }
6614
6615 return NULL_TREE;
6616 }
6617
6618 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6619 equality/inequality test, then return a simplified form of
6620 the test using shifts and logical operations. Otherwise return
6621 NULL. TYPE is the desired result type. */
6622
6623 tree
6624 fold_single_bit_test (location_t loc, enum tree_code code,
6625 tree arg0, tree arg1, tree result_type)
6626 {
6627 /* If this is testing a single bit, we can optimize the test. */
6628 if ((code == NE_EXPR || code == EQ_EXPR)
6629 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6630 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6631 {
6632 tree inner = TREE_OPERAND (arg0, 0);
6633 tree type = TREE_TYPE (arg0);
6634 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6635 enum machine_mode operand_mode = TYPE_MODE (type);
6636 int ops_unsigned;
6637 tree signed_type, unsigned_type, intermediate_type;
6638 tree tem, one;
6639
6640 /* First, see if we can fold the single bit test into a sign-bit
6641 test. */
6642 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6643 result_type);
6644 if (tem)
6645 return tem;
6646
6647 /* Otherwise we have (A & C) != 0 where C is a single bit,
6648 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6649 Similarly for (A & C) == 0. */
6650
6651 /* If INNER is a right shift of a constant and it plus BITNUM does
6652 not overflow, adjust BITNUM and INNER. */
6653 if (TREE_CODE (inner) == RSHIFT_EXPR
6654 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6655 && host_integerp (TREE_OPERAND (inner, 1), 1)
6656 && bitnum < TYPE_PRECISION (type)
6657 && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
6658 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6659 {
6660 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6661 inner = TREE_OPERAND (inner, 0);
6662 }
6663
6664 /* If we are going to be able to omit the AND below, we must do our
6665 operations as unsigned. If we must use the AND, we have a choice.
6666 Normally unsigned is faster, but for some machines signed is. */
6667 #ifdef LOAD_EXTEND_OP
6668 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6669 && !flag_syntax_only) ? 0 : 1;
6670 #else
6671 ops_unsigned = 1;
6672 #endif
6673
6674 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6675 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6676 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6677 inner = fold_convert_loc (loc, intermediate_type, inner);
6678
6679 if (bitnum != 0)
6680 inner = build2 (RSHIFT_EXPR, intermediate_type,
6681 inner, size_int (bitnum));
6682
6683 one = build_int_cst (intermediate_type, 1);
6684
6685 if (code == EQ_EXPR)
6686 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6687
6688 /* Put the AND last so it can combine with more things. */
6689 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6690
6691 /* Make sure to return the proper type. */
6692 inner = fold_convert_loc (loc, result_type, inner);
6693
6694 return inner;
6695 }
6696 return NULL_TREE;
6697 }
6698
6699 /* Check whether we are allowed to reorder operands arg0 and arg1,
6700 such that the evaluation of arg1 occurs before arg0. */
6701
6702 static bool
6703 reorder_operands_p (const_tree arg0, const_tree arg1)
6704 {
6705 if (! flag_evaluation_order)
6706 return true;
6707 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6708 return true;
6709 return ! TREE_SIDE_EFFECTS (arg0)
6710 && ! TREE_SIDE_EFFECTS (arg1);
6711 }
6712
6713 /* Test whether it is preferable two swap two operands, ARG0 and
6714 ARG1, for example because ARG0 is an integer constant and ARG1
6715 isn't. If REORDER is true, only recommend swapping if we can
6716 evaluate the operands in reverse order. */
6717
6718 bool
6719 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6720 {
6721 STRIP_SIGN_NOPS (arg0);
6722 STRIP_SIGN_NOPS (arg1);
6723
6724 if (TREE_CODE (arg1) == INTEGER_CST)
6725 return 0;
6726 if (TREE_CODE (arg0) == INTEGER_CST)
6727 return 1;
6728
6729 if (TREE_CODE (arg1) == REAL_CST)
6730 return 0;
6731 if (TREE_CODE (arg0) == REAL_CST)
6732 return 1;
6733
6734 if (TREE_CODE (arg1) == FIXED_CST)
6735 return 0;
6736 if (TREE_CODE (arg0) == FIXED_CST)
6737 return 1;
6738
6739 if (TREE_CODE (arg1) == COMPLEX_CST)
6740 return 0;
6741 if (TREE_CODE (arg0) == COMPLEX_CST)
6742 return 1;
6743
6744 if (TREE_CONSTANT (arg1))
6745 return 0;
6746 if (TREE_CONSTANT (arg0))
6747 return 1;
6748
6749 if (optimize_function_for_size_p (cfun))
6750 return 0;
6751
6752 if (reorder && flag_evaluation_order
6753 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6754 return 0;
6755
6756 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6757 for commutative and comparison operators. Ensuring a canonical
6758 form allows the optimizers to find additional redundancies without
6759 having to explicitly check for both orderings. */
6760 if (TREE_CODE (arg0) == SSA_NAME
6761 && TREE_CODE (arg1) == SSA_NAME
6762 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6763 return 1;
6764
6765 /* Put SSA_NAMEs last. */
6766 if (TREE_CODE (arg1) == SSA_NAME)
6767 return 0;
6768 if (TREE_CODE (arg0) == SSA_NAME)
6769 return 1;
6770
6771 /* Put variables last. */
6772 if (DECL_P (arg1))
6773 return 0;
6774 if (DECL_P (arg0))
6775 return 1;
6776
6777 return 0;
6778 }
6779
6780 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6781 ARG0 is extended to a wider type. */
6782
6783 static tree
6784 fold_widened_comparison (location_t loc, enum tree_code code,
6785 tree type, tree arg0, tree arg1)
6786 {
6787 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6788 tree arg1_unw;
6789 tree shorter_type, outer_type;
6790 tree min, max;
6791 bool above, below;
6792
6793 if (arg0_unw == arg0)
6794 return NULL_TREE;
6795 shorter_type = TREE_TYPE (arg0_unw);
6796
6797 #ifdef HAVE_canonicalize_funcptr_for_compare
6798 /* Disable this optimization if we're casting a function pointer
6799 type on targets that require function pointer canonicalization. */
6800 if (HAVE_canonicalize_funcptr_for_compare
6801 && TREE_CODE (shorter_type) == POINTER_TYPE
6802 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6803 return NULL_TREE;
6804 #endif
6805
6806 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6807 return NULL_TREE;
6808
6809 arg1_unw = get_unwidened (arg1, NULL_TREE);
6810
6811 /* If possible, express the comparison in the shorter mode. */
6812 if ((code == EQ_EXPR || code == NE_EXPR
6813 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6814 && (TREE_TYPE (arg1_unw) == shorter_type
6815 || ((TYPE_PRECISION (shorter_type)
6816 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6817 && (TYPE_UNSIGNED (shorter_type)
6818 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6819 || (TREE_CODE (arg1_unw) == INTEGER_CST
6820 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6821 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6822 && int_fits_type_p (arg1_unw, shorter_type))))
6823 return fold_build2_loc (loc, code, type, arg0_unw,
6824 fold_convert_loc (loc, shorter_type, arg1_unw));
6825
6826 if (TREE_CODE (arg1_unw) != INTEGER_CST
6827 || TREE_CODE (shorter_type) != INTEGER_TYPE
6828 || !int_fits_type_p (arg1_unw, shorter_type))
6829 return NULL_TREE;
6830
6831 /* If we are comparing with the integer that does not fit into the range
6832 of the shorter type, the result is known. */
6833 outer_type = TREE_TYPE (arg1_unw);
6834 min = lower_bound_in_type (outer_type, shorter_type);
6835 max = upper_bound_in_type (outer_type, shorter_type);
6836
6837 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6838 max, arg1_unw));
6839 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6840 arg1_unw, min));
6841
6842 switch (code)
6843 {
6844 case EQ_EXPR:
6845 if (above || below)
6846 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6847 break;
6848
6849 case NE_EXPR:
6850 if (above || below)
6851 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6852 break;
6853
6854 case LT_EXPR:
6855 case LE_EXPR:
6856 if (above)
6857 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6858 else if (below)
6859 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6860
6861 case GT_EXPR:
6862 case GE_EXPR:
6863 if (above)
6864 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6865 else if (below)
6866 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6867
6868 default:
6869 break;
6870 }
6871
6872 return NULL_TREE;
6873 }
6874
6875 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6876 ARG0 just the signedness is changed. */
6877
6878 static tree
6879 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6880 tree arg0, tree arg1)
6881 {
6882 tree arg0_inner;
6883 tree inner_type, outer_type;
6884
6885 if (!CONVERT_EXPR_P (arg0))
6886 return NULL_TREE;
6887
6888 outer_type = TREE_TYPE (arg0);
6889 arg0_inner = TREE_OPERAND (arg0, 0);
6890 inner_type = TREE_TYPE (arg0_inner);
6891
6892 #ifdef HAVE_canonicalize_funcptr_for_compare
6893 /* Disable this optimization if we're casting a function pointer
6894 type on targets that require function pointer canonicalization. */
6895 if (HAVE_canonicalize_funcptr_for_compare
6896 && TREE_CODE (inner_type) == POINTER_TYPE
6897 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6898 return NULL_TREE;
6899 #endif
6900
6901 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6902 return NULL_TREE;
6903
6904 if (TREE_CODE (arg1) != INTEGER_CST
6905 && !(CONVERT_EXPR_P (arg1)
6906 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6907 return NULL_TREE;
6908
6909 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6910 && code != NE_EXPR
6911 && code != EQ_EXPR)
6912 return NULL_TREE;
6913
6914 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6915 return NULL_TREE;
6916
6917 if (TREE_CODE (arg1) == INTEGER_CST)
6918 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6919 0, TREE_OVERFLOW (arg1));
6920 else
6921 arg1 = fold_convert_loc (loc, inner_type, arg1);
6922
6923 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6924 }
6925
6926 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6927 step of the array. Reconstructs s and delta in the case of s *
6928 delta being an integer constant (and thus already folded). ADDR is
6929 the address. MULT is the multiplicative expression. If the
6930 function succeeds, the new address expression is returned.
6931 Otherwise NULL_TREE is returned. LOC is the location of the
6932 resulting expression. */
6933
6934 static tree
6935 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6936 {
6937 tree s, delta, step;
6938 tree ref = TREE_OPERAND (addr, 0), pref;
6939 tree ret, pos;
6940 tree itype;
6941 bool mdim = false;
6942
6943 /* Strip the nops that might be added when converting op1 to sizetype. */
6944 STRIP_NOPS (op1);
6945
6946 /* Canonicalize op1 into a possibly non-constant delta
6947 and an INTEGER_CST s. */
6948 if (TREE_CODE (op1) == MULT_EXPR)
6949 {
6950 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6951
6952 STRIP_NOPS (arg0);
6953 STRIP_NOPS (arg1);
6954
6955 if (TREE_CODE (arg0) == INTEGER_CST)
6956 {
6957 s = arg0;
6958 delta = arg1;
6959 }
6960 else if (TREE_CODE (arg1) == INTEGER_CST)
6961 {
6962 s = arg1;
6963 delta = arg0;
6964 }
6965 else
6966 return NULL_TREE;
6967 }
6968 else if (TREE_CODE (op1) == INTEGER_CST)
6969 {
6970 delta = op1;
6971 s = NULL_TREE;
6972 }
6973 else
6974 {
6975 /* Simulate we are delta * 1. */
6976 delta = op1;
6977 s = integer_one_node;
6978 }
6979
6980 /* Handle &x.array the same as we would handle &x.array[0]. */
6981 if (TREE_CODE (ref) == COMPONENT_REF
6982 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6983 {
6984 tree domain;
6985
6986 /* Remember if this was a multi-dimensional array. */
6987 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6988 mdim = true;
6989
6990 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6991 if (! domain)
6992 goto cont;
6993 itype = TREE_TYPE (domain);
6994
6995 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6996 if (TREE_CODE (step) != INTEGER_CST)
6997 goto cont;
6998
6999 if (s)
7000 {
7001 if (! tree_int_cst_equal (step, s))
7002 goto cont;
7003 }
7004 else
7005 {
7006 /* Try if delta is a multiple of step. */
7007 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7008 if (! tmp)
7009 goto cont;
7010 delta = tmp;
7011 }
7012
7013 /* Only fold here if we can verify we do not overflow one
7014 dimension of a multi-dimensional array. */
7015 if (mdim)
7016 {
7017 tree tmp;
7018
7019 if (!TYPE_MIN_VALUE (domain)
7020 || !TYPE_MAX_VALUE (domain)
7021 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7022 goto cont;
7023
7024 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7025 fold_convert_loc (loc, itype,
7026 TYPE_MIN_VALUE (domain)),
7027 fold_convert_loc (loc, itype, delta));
7028 if (TREE_CODE (tmp) != INTEGER_CST
7029 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7030 goto cont;
7031 }
7032
7033 /* We found a suitable component reference. */
7034
7035 pref = TREE_OPERAND (addr, 0);
7036 ret = copy_node (pref);
7037 SET_EXPR_LOCATION (ret, loc);
7038
7039 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7040 fold_build2_loc
7041 (loc, PLUS_EXPR, itype,
7042 fold_convert_loc (loc, itype,
7043 TYPE_MIN_VALUE
7044 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7045 fold_convert_loc (loc, itype, delta)),
7046 NULL_TREE, NULL_TREE);
7047 return build_fold_addr_expr_loc (loc, ret);
7048 }
7049
7050 cont:
7051
7052 for (;; ref = TREE_OPERAND (ref, 0))
7053 {
7054 if (TREE_CODE (ref) == ARRAY_REF)
7055 {
7056 tree domain;
7057
7058 /* Remember if this was a multi-dimensional array. */
7059 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7060 mdim = true;
7061
7062 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7063 if (! domain)
7064 continue;
7065 itype = TREE_TYPE (domain);
7066
7067 step = array_ref_element_size (ref);
7068 if (TREE_CODE (step) != INTEGER_CST)
7069 continue;
7070
7071 if (s)
7072 {
7073 if (! tree_int_cst_equal (step, s))
7074 continue;
7075 }
7076 else
7077 {
7078 /* Try if delta is a multiple of step. */
7079 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7080 if (! tmp)
7081 continue;
7082 delta = tmp;
7083 }
7084
7085 /* Only fold here if we can verify we do not overflow one
7086 dimension of a multi-dimensional array. */
7087 if (mdim)
7088 {
7089 tree tmp;
7090
7091 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7092 || !TYPE_MAX_VALUE (domain)
7093 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7094 continue;
7095
7096 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7097 fold_convert_loc (loc, itype,
7098 TREE_OPERAND (ref, 1)),
7099 fold_convert_loc (loc, itype, delta));
7100 if (!tmp
7101 || TREE_CODE (tmp) != INTEGER_CST
7102 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7103 continue;
7104 }
7105
7106 break;
7107 }
7108 else
7109 mdim = false;
7110
7111 if (!handled_component_p (ref))
7112 return NULL_TREE;
7113 }
7114
7115 /* We found the suitable array reference. So copy everything up to it,
7116 and replace the index. */
7117
7118 pref = TREE_OPERAND (addr, 0);
7119 ret = copy_node (pref);
7120 SET_EXPR_LOCATION (ret, loc);
7121 pos = ret;
7122
7123 while (pref != ref)
7124 {
7125 pref = TREE_OPERAND (pref, 0);
7126 TREE_OPERAND (pos, 0) = copy_node (pref);
7127 pos = TREE_OPERAND (pos, 0);
7128 }
7129
7130 TREE_OPERAND (pos, 1)
7131 = fold_build2_loc (loc, PLUS_EXPR, itype,
7132 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7133 fold_convert_loc (loc, itype, delta));
7134 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7135 }
7136
7137
7138 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7139 means A >= Y && A != MAX, but in this case we know that
7140 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7141
7142 static tree
7143 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7144 {
7145 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7146
7147 if (TREE_CODE (bound) == LT_EXPR)
7148 a = TREE_OPERAND (bound, 0);
7149 else if (TREE_CODE (bound) == GT_EXPR)
7150 a = TREE_OPERAND (bound, 1);
7151 else
7152 return NULL_TREE;
7153
7154 typea = TREE_TYPE (a);
7155 if (!INTEGRAL_TYPE_P (typea)
7156 && !POINTER_TYPE_P (typea))
7157 return NULL_TREE;
7158
7159 if (TREE_CODE (ineq) == LT_EXPR)
7160 {
7161 a1 = TREE_OPERAND (ineq, 1);
7162 y = TREE_OPERAND (ineq, 0);
7163 }
7164 else if (TREE_CODE (ineq) == GT_EXPR)
7165 {
7166 a1 = TREE_OPERAND (ineq, 0);
7167 y = TREE_OPERAND (ineq, 1);
7168 }
7169 else
7170 return NULL_TREE;
7171
7172 if (TREE_TYPE (a1) != typea)
7173 return NULL_TREE;
7174
7175 if (POINTER_TYPE_P (typea))
7176 {
7177 /* Convert the pointer types into integer before taking the difference. */
7178 tree ta = fold_convert_loc (loc, ssizetype, a);
7179 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7180 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7181 }
7182 else
7183 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7184
7185 if (!diff || !integer_onep (diff))
7186 return NULL_TREE;
7187
7188 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7189 }
7190
7191 /* Fold a sum or difference of at least one multiplication.
7192 Returns the folded tree or NULL if no simplification could be made. */
7193
7194 static tree
7195 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7196 tree arg0, tree arg1)
7197 {
7198 tree arg00, arg01, arg10, arg11;
7199 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7200
7201 /* (A * C) +- (B * C) -> (A+-B) * C.
7202 (A * C) +- A -> A * (C+-1).
7203 We are most concerned about the case where C is a constant,
7204 but other combinations show up during loop reduction. Since
7205 it is not difficult, try all four possibilities. */
7206
7207 if (TREE_CODE (arg0) == MULT_EXPR)
7208 {
7209 arg00 = TREE_OPERAND (arg0, 0);
7210 arg01 = TREE_OPERAND (arg0, 1);
7211 }
7212 else if (TREE_CODE (arg0) == INTEGER_CST)
7213 {
7214 arg00 = build_one_cst (type);
7215 arg01 = arg0;
7216 }
7217 else
7218 {
7219 /* We cannot generate constant 1 for fract. */
7220 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7221 return NULL_TREE;
7222 arg00 = arg0;
7223 arg01 = build_one_cst (type);
7224 }
7225 if (TREE_CODE (arg1) == MULT_EXPR)
7226 {
7227 arg10 = TREE_OPERAND (arg1, 0);
7228 arg11 = TREE_OPERAND (arg1, 1);
7229 }
7230 else if (TREE_CODE (arg1) == INTEGER_CST)
7231 {
7232 arg10 = build_one_cst (type);
7233 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7234 the purpose of this canonicalization. */
7235 if (TREE_INT_CST_HIGH (arg1) == -1
7236 && negate_expr_p (arg1)
7237 && code == PLUS_EXPR)
7238 {
7239 arg11 = negate_expr (arg1);
7240 code = MINUS_EXPR;
7241 }
7242 else
7243 arg11 = arg1;
7244 }
7245 else
7246 {
7247 /* We cannot generate constant 1 for fract. */
7248 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7249 return NULL_TREE;
7250 arg10 = arg1;
7251 arg11 = build_one_cst (type);
7252 }
7253 same = NULL_TREE;
7254
7255 if (operand_equal_p (arg01, arg11, 0))
7256 same = arg01, alt0 = arg00, alt1 = arg10;
7257 else if (operand_equal_p (arg00, arg10, 0))
7258 same = arg00, alt0 = arg01, alt1 = arg11;
7259 else if (operand_equal_p (arg00, arg11, 0))
7260 same = arg00, alt0 = arg01, alt1 = arg10;
7261 else if (operand_equal_p (arg01, arg10, 0))
7262 same = arg01, alt0 = arg00, alt1 = arg11;
7263
7264 /* No identical multiplicands; see if we can find a common
7265 power-of-two factor in non-power-of-two multiplies. This
7266 can help in multi-dimensional array access. */
7267 else if (host_integerp (arg01, 0)
7268 && host_integerp (arg11, 0))
7269 {
7270 HOST_WIDE_INT int01, int11, tmp;
7271 bool swap = false;
7272 tree maybe_same;
7273 int01 = TREE_INT_CST_LOW (arg01);
7274 int11 = TREE_INT_CST_LOW (arg11);
7275
7276 /* Move min of absolute values to int11. */
7277 if (absu_hwi (int01) < absu_hwi (int11))
7278 {
7279 tmp = int01, int01 = int11, int11 = tmp;
7280 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7281 maybe_same = arg01;
7282 swap = true;
7283 }
7284 else
7285 maybe_same = arg11;
7286
7287 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7288 /* The remainder should not be a constant, otherwise we
7289 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7290 increased the number of multiplications necessary. */
7291 && TREE_CODE (arg10) != INTEGER_CST)
7292 {
7293 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7294 build_int_cst (TREE_TYPE (arg00),
7295 int01 / int11));
7296 alt1 = arg10;
7297 same = maybe_same;
7298 if (swap)
7299 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7300 }
7301 }
7302
7303 if (same)
7304 return fold_build2_loc (loc, MULT_EXPR, type,
7305 fold_build2_loc (loc, code, type,
7306 fold_convert_loc (loc, type, alt0),
7307 fold_convert_loc (loc, type, alt1)),
7308 fold_convert_loc (loc, type, same));
7309
7310 return NULL_TREE;
7311 }
7312
7313 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7314 specified by EXPR into the buffer PTR of length LEN bytes.
7315 Return the number of bytes placed in the buffer, or zero
7316 upon failure. */
7317
7318 static int
7319 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7320 {
7321 tree type = TREE_TYPE (expr);
7322 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7323 int byte, offset, word, words;
7324 unsigned char value;
7325
7326 if (total_bytes > len)
7327 return 0;
7328 words = total_bytes / UNITS_PER_WORD;
7329
7330 for (byte = 0; byte < total_bytes; byte++)
7331 {
7332 int bitpos = byte * BITS_PER_UNIT;
7333 if (bitpos < HOST_BITS_PER_WIDE_INT)
7334 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7335 else
7336 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7337 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7338
7339 if (total_bytes > UNITS_PER_WORD)
7340 {
7341 word = byte / UNITS_PER_WORD;
7342 if (WORDS_BIG_ENDIAN)
7343 word = (words - 1) - word;
7344 offset = word * UNITS_PER_WORD;
7345 if (BYTES_BIG_ENDIAN)
7346 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7347 else
7348 offset += byte % UNITS_PER_WORD;
7349 }
7350 else
7351 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7352 ptr[offset] = value;
7353 }
7354 return total_bytes;
7355 }
7356
7357
7358 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7359 specified by EXPR into the buffer PTR of length LEN bytes.
7360 Return the number of bytes placed in the buffer, or zero
7361 upon failure. */
7362
7363 static int
7364 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7365 {
7366 tree type = TREE_TYPE (expr);
7367 enum machine_mode mode = TYPE_MODE (type);
7368 int total_bytes = GET_MODE_SIZE (mode);
7369 FIXED_VALUE_TYPE value;
7370 tree i_value, i_type;
7371
7372 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7373 return 0;
7374
7375 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7376
7377 if (NULL_TREE == i_type
7378 || TYPE_PRECISION (i_type) != total_bytes)
7379 return 0;
7380
7381 value = TREE_FIXED_CST (expr);
7382 i_value = double_int_to_tree (i_type, value.data);
7383
7384 return native_encode_int (i_value, ptr, len);
7385 }
7386
7387
7388 /* Subroutine of native_encode_expr. Encode the REAL_CST
7389 specified by EXPR into the buffer PTR of length LEN bytes.
7390 Return the number of bytes placed in the buffer, or zero
7391 upon failure. */
7392
7393 static int
7394 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7395 {
7396 tree type = TREE_TYPE (expr);
7397 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7398 int byte, offset, word, words, bitpos;
7399 unsigned char value;
7400
7401 /* There are always 32 bits in each long, no matter the size of
7402 the hosts long. We handle floating point representations with
7403 up to 192 bits. */
7404 long tmp[6];
7405
7406 if (total_bytes > len)
7407 return 0;
7408 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7409
7410 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7411
7412 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7413 bitpos += BITS_PER_UNIT)
7414 {
7415 byte = (bitpos / BITS_PER_UNIT) & 3;
7416 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7417
7418 if (UNITS_PER_WORD < 4)
7419 {
7420 word = byte / UNITS_PER_WORD;
7421 if (WORDS_BIG_ENDIAN)
7422 word = (words - 1) - word;
7423 offset = word * UNITS_PER_WORD;
7424 if (BYTES_BIG_ENDIAN)
7425 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7426 else
7427 offset += byte % UNITS_PER_WORD;
7428 }
7429 else
7430 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7431 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7432 }
7433 return total_bytes;
7434 }
7435
7436 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7437 specified by EXPR into the buffer PTR of length LEN bytes.
7438 Return the number of bytes placed in the buffer, or zero
7439 upon failure. */
7440
7441 static int
7442 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7443 {
7444 int rsize, isize;
7445 tree part;
7446
7447 part = TREE_REALPART (expr);
7448 rsize = native_encode_expr (part, ptr, len);
7449 if (rsize == 0)
7450 return 0;
7451 part = TREE_IMAGPART (expr);
7452 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7453 if (isize != rsize)
7454 return 0;
7455 return rsize + isize;
7456 }
7457
7458
7459 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7460 specified by EXPR into the buffer PTR of length LEN bytes.
7461 Return the number of bytes placed in the buffer, or zero
7462 upon failure. */
7463
7464 static int
7465 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7466 {
7467 unsigned i, count;
7468 int size, offset;
7469 tree itype, elem;
7470
7471 offset = 0;
7472 count = VECTOR_CST_NELTS (expr);
7473 itype = TREE_TYPE (TREE_TYPE (expr));
7474 size = GET_MODE_SIZE (TYPE_MODE (itype));
7475 for (i = 0; i < count; i++)
7476 {
7477 elem = VECTOR_CST_ELT (expr, i);
7478 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7479 return 0;
7480 offset += size;
7481 }
7482 return offset;
7483 }
7484
7485
7486 /* Subroutine of native_encode_expr. Encode the STRING_CST
7487 specified by EXPR into the buffer PTR of length LEN bytes.
7488 Return the number of bytes placed in the buffer, or zero
7489 upon failure. */
7490
7491 static int
7492 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7493 {
7494 tree type = TREE_TYPE (expr);
7495 HOST_WIDE_INT total_bytes;
7496
7497 if (TREE_CODE (type) != ARRAY_TYPE
7498 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7499 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7500 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7501 return 0;
7502 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7503 if (total_bytes > len)
7504 return 0;
7505 if (TREE_STRING_LENGTH (expr) < total_bytes)
7506 {
7507 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7508 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7509 total_bytes - TREE_STRING_LENGTH (expr));
7510 }
7511 else
7512 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7513 return total_bytes;
7514 }
7515
7516
7517 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7518 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7519 buffer PTR of length LEN bytes. Return the number of bytes
7520 placed in the buffer, or zero upon failure. */
7521
7522 int
7523 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7524 {
7525 switch (TREE_CODE (expr))
7526 {
7527 case INTEGER_CST:
7528 return native_encode_int (expr, ptr, len);
7529
7530 case REAL_CST:
7531 return native_encode_real (expr, ptr, len);
7532
7533 case FIXED_CST:
7534 return native_encode_fixed (expr, ptr, len);
7535
7536 case COMPLEX_CST:
7537 return native_encode_complex (expr, ptr, len);
7538
7539 case VECTOR_CST:
7540 return native_encode_vector (expr, ptr, len);
7541
7542 case STRING_CST:
7543 return native_encode_string (expr, ptr, len);
7544
7545 default:
7546 return 0;
7547 }
7548 }
7549
7550
7551 /* Subroutine of native_interpret_expr. Interpret the contents of
7552 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7553 If the buffer cannot be interpreted, return NULL_TREE. */
7554
7555 static tree
7556 native_interpret_int (tree type, const unsigned char *ptr, int len)
7557 {
7558 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7559 double_int result;
7560
7561 if (total_bytes > len
7562 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7563 return NULL_TREE;
7564
7565 result = double_int::from_buffer (ptr, total_bytes);
7566
7567 return double_int_to_tree (type, result);
7568 }
7569
7570
7571 /* Subroutine of native_interpret_expr. Interpret the contents of
7572 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7573 If the buffer cannot be interpreted, return NULL_TREE. */
7574
7575 static tree
7576 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7577 {
7578 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7579 double_int result;
7580 FIXED_VALUE_TYPE fixed_value;
7581
7582 if (total_bytes > len
7583 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7584 return NULL_TREE;
7585
7586 result = double_int::from_buffer (ptr, total_bytes);
7587 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7588
7589 return build_fixed (type, fixed_value);
7590 }
7591
7592
7593 /* Subroutine of native_interpret_expr. Interpret the contents of
7594 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7595 If the buffer cannot be interpreted, return NULL_TREE. */
7596
7597 static tree
7598 native_interpret_real (tree type, const unsigned char *ptr, int len)
7599 {
7600 enum machine_mode mode = TYPE_MODE (type);
7601 int total_bytes = GET_MODE_SIZE (mode);
7602 int byte, offset, word, words, bitpos;
7603 unsigned char value;
7604 /* There are always 32 bits in each long, no matter the size of
7605 the hosts long. We handle floating point representations with
7606 up to 192 bits. */
7607 REAL_VALUE_TYPE r;
7608 long tmp[6];
7609
7610 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7611 if (total_bytes > len || total_bytes > 24)
7612 return NULL_TREE;
7613 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7614
7615 memset (tmp, 0, sizeof (tmp));
7616 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7617 bitpos += BITS_PER_UNIT)
7618 {
7619 byte = (bitpos / BITS_PER_UNIT) & 3;
7620 if (UNITS_PER_WORD < 4)
7621 {
7622 word = byte / UNITS_PER_WORD;
7623 if (WORDS_BIG_ENDIAN)
7624 word = (words - 1) - word;
7625 offset = word * UNITS_PER_WORD;
7626 if (BYTES_BIG_ENDIAN)
7627 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7628 else
7629 offset += byte % UNITS_PER_WORD;
7630 }
7631 else
7632 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7633 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7634
7635 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7636 }
7637
7638 real_from_target (&r, tmp, mode);
7639 return build_real (type, r);
7640 }
7641
7642
7643 /* Subroutine of native_interpret_expr. Interpret the contents of
7644 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7645 If the buffer cannot be interpreted, return NULL_TREE. */
7646
7647 static tree
7648 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7649 {
7650 tree etype, rpart, ipart;
7651 int size;
7652
7653 etype = TREE_TYPE (type);
7654 size = GET_MODE_SIZE (TYPE_MODE (etype));
7655 if (size * 2 > len)
7656 return NULL_TREE;
7657 rpart = native_interpret_expr (etype, ptr, size);
7658 if (!rpart)
7659 return NULL_TREE;
7660 ipart = native_interpret_expr (etype, ptr+size, size);
7661 if (!ipart)
7662 return NULL_TREE;
7663 return build_complex (type, rpart, ipart);
7664 }
7665
7666
7667 /* Subroutine of native_interpret_expr. Interpret the contents of
7668 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7669 If the buffer cannot be interpreted, return NULL_TREE. */
7670
7671 static tree
7672 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7673 {
7674 tree etype, elem;
7675 int i, size, count;
7676 tree *elements;
7677
7678 etype = TREE_TYPE (type);
7679 size = GET_MODE_SIZE (TYPE_MODE (etype));
7680 count = TYPE_VECTOR_SUBPARTS (type);
7681 if (size * count > len)
7682 return NULL_TREE;
7683
7684 elements = XALLOCAVEC (tree, count);
7685 for (i = count - 1; i >= 0; i--)
7686 {
7687 elem = native_interpret_expr (etype, ptr+(i*size), size);
7688 if (!elem)
7689 return NULL_TREE;
7690 elements[i] = elem;
7691 }
7692 return build_vector (type, elements);
7693 }
7694
7695
7696 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7697 the buffer PTR of length LEN as a constant of type TYPE. For
7698 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7699 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7700 return NULL_TREE. */
7701
7702 tree
7703 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7704 {
7705 switch (TREE_CODE (type))
7706 {
7707 case INTEGER_TYPE:
7708 case ENUMERAL_TYPE:
7709 case BOOLEAN_TYPE:
7710 case POINTER_TYPE:
7711 case REFERENCE_TYPE:
7712 return native_interpret_int (type, ptr, len);
7713
7714 case REAL_TYPE:
7715 return native_interpret_real (type, ptr, len);
7716
7717 case FIXED_POINT_TYPE:
7718 return native_interpret_fixed (type, ptr, len);
7719
7720 case COMPLEX_TYPE:
7721 return native_interpret_complex (type, ptr, len);
7722
7723 case VECTOR_TYPE:
7724 return native_interpret_vector (type, ptr, len);
7725
7726 default:
7727 return NULL_TREE;
7728 }
7729 }
7730
7731 /* Returns true if we can interpret the contents of a native encoding
7732 as TYPE. */
7733
7734 static bool
7735 can_native_interpret_type_p (tree type)
7736 {
7737 switch (TREE_CODE (type))
7738 {
7739 case INTEGER_TYPE:
7740 case ENUMERAL_TYPE:
7741 case BOOLEAN_TYPE:
7742 case POINTER_TYPE:
7743 case REFERENCE_TYPE:
7744 case FIXED_POINT_TYPE:
7745 case REAL_TYPE:
7746 case COMPLEX_TYPE:
7747 case VECTOR_TYPE:
7748 return true;
7749 default:
7750 return false;
7751 }
7752 }
7753
7754 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7755 TYPE at compile-time. If we're unable to perform the conversion
7756 return NULL_TREE. */
7757
7758 static tree
7759 fold_view_convert_expr (tree type, tree expr)
7760 {
7761 /* We support up to 512-bit values (for V8DFmode). */
7762 unsigned char buffer[64];
7763 int len;
7764
7765 /* Check that the host and target are sane. */
7766 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7767 return NULL_TREE;
7768
7769 len = native_encode_expr (expr, buffer, sizeof (buffer));
7770 if (len == 0)
7771 return NULL_TREE;
7772
7773 return native_interpret_expr (type, buffer, len);
7774 }
7775
7776 /* Build an expression for the address of T. Folds away INDIRECT_REF
7777 to avoid confusing the gimplify process. */
7778
7779 tree
7780 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7781 {
7782 /* The size of the object is not relevant when talking about its address. */
7783 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7784 t = TREE_OPERAND (t, 0);
7785
7786 if (TREE_CODE (t) == INDIRECT_REF)
7787 {
7788 t = TREE_OPERAND (t, 0);
7789
7790 if (TREE_TYPE (t) != ptrtype)
7791 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7792 }
7793 else if (TREE_CODE (t) == MEM_REF
7794 && integer_zerop (TREE_OPERAND (t, 1)))
7795 return TREE_OPERAND (t, 0);
7796 else if (TREE_CODE (t) == MEM_REF
7797 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7798 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7799 TREE_OPERAND (t, 0),
7800 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7801 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7802 {
7803 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7804
7805 if (TREE_TYPE (t) != ptrtype)
7806 t = fold_convert_loc (loc, ptrtype, t);
7807 }
7808 else
7809 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7810
7811 return t;
7812 }
7813
7814 /* Build an expression for the address of T. */
7815
7816 tree
7817 build_fold_addr_expr_loc (location_t loc, tree t)
7818 {
7819 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7820
7821 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7822 }
7823
7824 static bool vec_cst_ctor_to_array (tree, tree *);
7825
7826 /* Fold a unary expression of code CODE and type TYPE with operand
7827 OP0. Return the folded expression if folding is successful.
7828 Otherwise, return NULL_TREE. */
7829
7830 tree
7831 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7832 {
7833 tree tem;
7834 tree arg0;
7835 enum tree_code_class kind = TREE_CODE_CLASS (code);
7836
7837 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7838 && TREE_CODE_LENGTH (code) == 1);
7839
7840 arg0 = op0;
7841 if (arg0)
7842 {
7843 if (CONVERT_EXPR_CODE_P (code)
7844 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7845 {
7846 /* Don't use STRIP_NOPS, because signedness of argument type
7847 matters. */
7848 STRIP_SIGN_NOPS (arg0);
7849 }
7850 else
7851 {
7852 /* Strip any conversions that don't change the mode. This
7853 is safe for every expression, except for a comparison
7854 expression because its signedness is derived from its
7855 operands.
7856
7857 Note that this is done as an internal manipulation within
7858 the constant folder, in order to find the simplest
7859 representation of the arguments so that their form can be
7860 studied. In any cases, the appropriate type conversions
7861 should be put back in the tree that will get out of the
7862 constant folder. */
7863 STRIP_NOPS (arg0);
7864 }
7865 }
7866
7867 if (TREE_CODE_CLASS (code) == tcc_unary)
7868 {
7869 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7870 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7871 fold_build1_loc (loc, code, type,
7872 fold_convert_loc (loc, TREE_TYPE (op0),
7873 TREE_OPERAND (arg0, 1))));
7874 else if (TREE_CODE (arg0) == COND_EXPR)
7875 {
7876 tree arg01 = TREE_OPERAND (arg0, 1);
7877 tree arg02 = TREE_OPERAND (arg0, 2);
7878 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7879 arg01 = fold_build1_loc (loc, code, type,
7880 fold_convert_loc (loc,
7881 TREE_TYPE (op0), arg01));
7882 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7883 arg02 = fold_build1_loc (loc, code, type,
7884 fold_convert_loc (loc,
7885 TREE_TYPE (op0), arg02));
7886 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7887 arg01, arg02);
7888
7889 /* If this was a conversion, and all we did was to move into
7890 inside the COND_EXPR, bring it back out. But leave it if
7891 it is a conversion from integer to integer and the
7892 result precision is no wider than a word since such a
7893 conversion is cheap and may be optimized away by combine,
7894 while it couldn't if it were outside the COND_EXPR. Then return
7895 so we don't get into an infinite recursion loop taking the
7896 conversion out and then back in. */
7897
7898 if ((CONVERT_EXPR_CODE_P (code)
7899 || code == NON_LVALUE_EXPR)
7900 && TREE_CODE (tem) == COND_EXPR
7901 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7902 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7903 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7904 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7905 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7906 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7907 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7908 && (INTEGRAL_TYPE_P
7909 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7910 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7911 || flag_syntax_only))
7912 tem = build1_loc (loc, code, type,
7913 build3 (COND_EXPR,
7914 TREE_TYPE (TREE_OPERAND
7915 (TREE_OPERAND (tem, 1), 0)),
7916 TREE_OPERAND (tem, 0),
7917 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7918 TREE_OPERAND (TREE_OPERAND (tem, 2),
7919 0)));
7920 return tem;
7921 }
7922 }
7923
7924 switch (code)
7925 {
7926 case PAREN_EXPR:
7927 /* Re-association barriers around constants and other re-association
7928 barriers can be removed. */
7929 if (CONSTANT_CLASS_P (op0)
7930 || TREE_CODE (op0) == PAREN_EXPR)
7931 return fold_convert_loc (loc, type, op0);
7932 return NULL_TREE;
7933
7934 CASE_CONVERT:
7935 case FLOAT_EXPR:
7936 case FIX_TRUNC_EXPR:
7937 if (TREE_TYPE (op0) == type)
7938 return op0;
7939
7940 if (COMPARISON_CLASS_P (op0))
7941 {
7942 /* If we have (type) (a CMP b) and type is an integral type, return
7943 new expression involving the new type. Canonicalize
7944 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7945 non-integral type.
7946 Do not fold the result as that would not simplify further, also
7947 folding again results in recursions. */
7948 if (TREE_CODE (type) == BOOLEAN_TYPE)
7949 return build2_loc (loc, TREE_CODE (op0), type,
7950 TREE_OPERAND (op0, 0),
7951 TREE_OPERAND (op0, 1));
7952 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7953 && TREE_CODE (type) != VECTOR_TYPE)
7954 return build3_loc (loc, COND_EXPR, type, op0,
7955 constant_boolean_node (true, type),
7956 constant_boolean_node (false, type));
7957 }
7958
7959 /* Handle cases of two conversions in a row. */
7960 if (CONVERT_EXPR_P (op0))
7961 {
7962 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7963 tree inter_type = TREE_TYPE (op0);
7964 int inside_int = INTEGRAL_TYPE_P (inside_type);
7965 int inside_ptr = POINTER_TYPE_P (inside_type);
7966 int inside_float = FLOAT_TYPE_P (inside_type);
7967 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7968 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7969 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7970 int inter_int = INTEGRAL_TYPE_P (inter_type);
7971 int inter_ptr = POINTER_TYPE_P (inter_type);
7972 int inter_float = FLOAT_TYPE_P (inter_type);
7973 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7974 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7975 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7976 int final_int = INTEGRAL_TYPE_P (type);
7977 int final_ptr = POINTER_TYPE_P (type);
7978 int final_float = FLOAT_TYPE_P (type);
7979 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7980 unsigned int final_prec = TYPE_PRECISION (type);
7981 int final_unsignedp = TYPE_UNSIGNED (type);
7982
7983 /* In addition to the cases of two conversions in a row
7984 handled below, if we are converting something to its own
7985 type via an object of identical or wider precision, neither
7986 conversion is needed. */
7987 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7988 && (((inter_int || inter_ptr) && final_int)
7989 || (inter_float && final_float))
7990 && inter_prec >= final_prec)
7991 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7992
7993 /* Likewise, if the intermediate and initial types are either both
7994 float or both integer, we don't need the middle conversion if the
7995 former is wider than the latter and doesn't change the signedness
7996 (for integers). Avoid this if the final type is a pointer since
7997 then we sometimes need the middle conversion. Likewise if the
7998 final type has a precision not equal to the size of its mode. */
7999 if (((inter_int && inside_int)
8000 || (inter_float && inside_float)
8001 || (inter_vec && inside_vec))
8002 && inter_prec >= inside_prec
8003 && (inter_float || inter_vec
8004 || inter_unsignedp == inside_unsignedp)
8005 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8006 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8007 && ! final_ptr
8008 && (! final_vec || inter_prec == inside_prec))
8009 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8010
8011 /* If we have a sign-extension of a zero-extended value, we can
8012 replace that by a single zero-extension. Likewise if the
8013 final conversion does not change precision we can drop the
8014 intermediate conversion. */
8015 if (inside_int && inter_int && final_int
8016 && ((inside_prec < inter_prec && inter_prec < final_prec
8017 && inside_unsignedp && !inter_unsignedp)
8018 || final_prec == inter_prec))
8019 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8020
8021 /* Two conversions in a row are not needed unless:
8022 - some conversion is floating-point (overstrict for now), or
8023 - some conversion is a vector (overstrict for now), or
8024 - the intermediate type is narrower than both initial and
8025 final, or
8026 - the intermediate type and innermost type differ in signedness,
8027 and the outermost type is wider than the intermediate, or
8028 - the initial type is a pointer type and the precisions of the
8029 intermediate and final types differ, or
8030 - the final type is a pointer type and the precisions of the
8031 initial and intermediate types differ. */
8032 if (! inside_float && ! inter_float && ! final_float
8033 && ! inside_vec && ! inter_vec && ! final_vec
8034 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8035 && ! (inside_int && inter_int
8036 && inter_unsignedp != inside_unsignedp
8037 && inter_prec < final_prec)
8038 && ((inter_unsignedp && inter_prec > inside_prec)
8039 == (final_unsignedp && final_prec > inter_prec))
8040 && ! (inside_ptr && inter_prec != final_prec)
8041 && ! (final_ptr && inside_prec != inter_prec)
8042 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8043 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8044 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8045 }
8046
8047 /* Handle (T *)&A.B.C for A being of type T and B and C
8048 living at offset zero. This occurs frequently in
8049 C++ upcasting and then accessing the base. */
8050 if (TREE_CODE (op0) == ADDR_EXPR
8051 && POINTER_TYPE_P (type)
8052 && handled_component_p (TREE_OPERAND (op0, 0)))
8053 {
8054 HOST_WIDE_INT bitsize, bitpos;
8055 tree offset;
8056 enum machine_mode mode;
8057 int unsignedp, volatilep;
8058 tree base = TREE_OPERAND (op0, 0);
8059 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8060 &mode, &unsignedp, &volatilep, false);
8061 /* If the reference was to a (constant) zero offset, we can use
8062 the address of the base if it has the same base type
8063 as the result type and the pointer type is unqualified. */
8064 if (! offset && bitpos == 0
8065 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8066 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8067 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8068 return fold_convert_loc (loc, type,
8069 build_fold_addr_expr_loc (loc, base));
8070 }
8071
8072 if (TREE_CODE (op0) == MODIFY_EXPR
8073 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8074 /* Detect assigning a bitfield. */
8075 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8076 && DECL_BIT_FIELD
8077 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8078 {
8079 /* Don't leave an assignment inside a conversion
8080 unless assigning a bitfield. */
8081 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8082 /* First do the assignment, then return converted constant. */
8083 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8084 TREE_NO_WARNING (tem) = 1;
8085 TREE_USED (tem) = 1;
8086 return tem;
8087 }
8088
8089 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8090 constants (if x has signed type, the sign bit cannot be set
8091 in c). This folds extension into the BIT_AND_EXPR.
8092 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8093 very likely don't have maximal range for their precision and this
8094 transformation effectively doesn't preserve non-maximal ranges. */
8095 if (TREE_CODE (type) == INTEGER_TYPE
8096 && TREE_CODE (op0) == BIT_AND_EXPR
8097 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8098 {
8099 tree and_expr = op0;
8100 tree and0 = TREE_OPERAND (and_expr, 0);
8101 tree and1 = TREE_OPERAND (and_expr, 1);
8102 int change = 0;
8103
8104 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8105 || (TYPE_PRECISION (type)
8106 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8107 change = 1;
8108 else if (TYPE_PRECISION (TREE_TYPE (and1))
8109 <= HOST_BITS_PER_WIDE_INT
8110 && host_integerp (and1, 1))
8111 {
8112 unsigned HOST_WIDE_INT cst;
8113
8114 cst = tree_low_cst (and1, 1);
8115 cst &= HOST_WIDE_INT_M1U
8116 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8117 change = (cst == 0);
8118 #ifdef LOAD_EXTEND_OP
8119 if (change
8120 && !flag_syntax_only
8121 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8122 == ZERO_EXTEND))
8123 {
8124 tree uns = unsigned_type_for (TREE_TYPE (and0));
8125 and0 = fold_convert_loc (loc, uns, and0);
8126 and1 = fold_convert_loc (loc, uns, and1);
8127 }
8128 #endif
8129 }
8130 if (change)
8131 {
8132 tem = force_fit_type_double (type, tree_to_double_int (and1),
8133 0, TREE_OVERFLOW (and1));
8134 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8135 fold_convert_loc (loc, type, and0), tem);
8136 }
8137 }
8138
8139 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8140 when one of the new casts will fold away. Conservatively we assume
8141 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8142 if (POINTER_TYPE_P (type)
8143 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8144 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8145 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8146 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8147 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8148 {
8149 tree arg00 = TREE_OPERAND (arg0, 0);
8150 tree arg01 = TREE_OPERAND (arg0, 1);
8151
8152 return fold_build_pointer_plus_loc
8153 (loc, fold_convert_loc (loc, type, arg00), arg01);
8154 }
8155
8156 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8157 of the same precision, and X is an integer type not narrower than
8158 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8159 if (INTEGRAL_TYPE_P (type)
8160 && TREE_CODE (op0) == BIT_NOT_EXPR
8161 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8162 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8163 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8164 {
8165 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8166 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8167 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8168 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8169 fold_convert_loc (loc, type, tem));
8170 }
8171
8172 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8173 type of X and Y (integer types only). */
8174 if (INTEGRAL_TYPE_P (type)
8175 && TREE_CODE (op0) == MULT_EXPR
8176 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8177 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8178 {
8179 /* Be careful not to introduce new overflows. */
8180 tree mult_type;
8181 if (TYPE_OVERFLOW_WRAPS (type))
8182 mult_type = type;
8183 else
8184 mult_type = unsigned_type_for (type);
8185
8186 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8187 {
8188 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8189 fold_convert_loc (loc, mult_type,
8190 TREE_OPERAND (op0, 0)),
8191 fold_convert_loc (loc, mult_type,
8192 TREE_OPERAND (op0, 1)));
8193 return fold_convert_loc (loc, type, tem);
8194 }
8195 }
8196
8197 tem = fold_convert_const (code, type, op0);
8198 return tem ? tem : NULL_TREE;
8199
8200 case ADDR_SPACE_CONVERT_EXPR:
8201 if (integer_zerop (arg0))
8202 return fold_convert_const (code, type, arg0);
8203 return NULL_TREE;
8204
8205 case FIXED_CONVERT_EXPR:
8206 tem = fold_convert_const (code, type, arg0);
8207 return tem ? tem : NULL_TREE;
8208
8209 case VIEW_CONVERT_EXPR:
8210 if (TREE_TYPE (op0) == type)
8211 return op0;
8212 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8213 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8214 type, TREE_OPERAND (op0, 0));
8215 if (TREE_CODE (op0) == MEM_REF)
8216 return fold_build2_loc (loc, MEM_REF, type,
8217 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8218
8219 /* For integral conversions with the same precision or pointer
8220 conversions use a NOP_EXPR instead. */
8221 if ((INTEGRAL_TYPE_P (type)
8222 || POINTER_TYPE_P (type))
8223 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8224 || POINTER_TYPE_P (TREE_TYPE (op0)))
8225 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8226 return fold_convert_loc (loc, type, op0);
8227
8228 /* Strip inner integral conversions that do not change the precision. */
8229 if (CONVERT_EXPR_P (op0)
8230 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8231 || POINTER_TYPE_P (TREE_TYPE (op0)))
8232 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8233 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8234 && (TYPE_PRECISION (TREE_TYPE (op0))
8235 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8236 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8237 type, TREE_OPERAND (op0, 0));
8238
8239 return fold_view_convert_expr (type, op0);
8240
8241 case NEGATE_EXPR:
8242 tem = fold_negate_expr (loc, arg0);
8243 if (tem)
8244 return fold_convert_loc (loc, type, tem);
8245 return NULL_TREE;
8246
8247 case ABS_EXPR:
8248 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8249 return fold_abs_const (arg0, type);
8250 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8251 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8252 /* Convert fabs((double)float) into (double)fabsf(float). */
8253 else if (TREE_CODE (arg0) == NOP_EXPR
8254 && TREE_CODE (type) == REAL_TYPE)
8255 {
8256 tree targ0 = strip_float_extensions (arg0);
8257 if (targ0 != arg0)
8258 return fold_convert_loc (loc, type,
8259 fold_build1_loc (loc, ABS_EXPR,
8260 TREE_TYPE (targ0),
8261 targ0));
8262 }
8263 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8264 else if (TREE_CODE (arg0) == ABS_EXPR)
8265 return arg0;
8266 else if (tree_expr_nonnegative_p (arg0))
8267 return arg0;
8268
8269 /* Strip sign ops from argument. */
8270 if (TREE_CODE (type) == REAL_TYPE)
8271 {
8272 tem = fold_strip_sign_ops (arg0);
8273 if (tem)
8274 return fold_build1_loc (loc, ABS_EXPR, type,
8275 fold_convert_loc (loc, type, tem));
8276 }
8277 return NULL_TREE;
8278
8279 case CONJ_EXPR:
8280 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8281 return fold_convert_loc (loc, type, arg0);
8282 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8283 {
8284 tree itype = TREE_TYPE (type);
8285 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8286 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8287 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8288 negate_expr (ipart));
8289 }
8290 if (TREE_CODE (arg0) == COMPLEX_CST)
8291 {
8292 tree itype = TREE_TYPE (type);
8293 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8294 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8295 return build_complex (type, rpart, negate_expr (ipart));
8296 }
8297 if (TREE_CODE (arg0) == CONJ_EXPR)
8298 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8299 return NULL_TREE;
8300
8301 case BIT_NOT_EXPR:
8302 if (TREE_CODE (arg0) == INTEGER_CST)
8303 return fold_not_const (arg0, type);
8304 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8305 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8306 /* Convert ~ (-A) to A - 1. */
8307 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8308 return fold_build2_loc (loc, MINUS_EXPR, type,
8309 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8310 build_int_cst (type, 1));
8311 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8312 else if (INTEGRAL_TYPE_P (type)
8313 && ((TREE_CODE (arg0) == MINUS_EXPR
8314 && integer_onep (TREE_OPERAND (arg0, 1)))
8315 || (TREE_CODE (arg0) == PLUS_EXPR
8316 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8317 return fold_build1_loc (loc, NEGATE_EXPR, type,
8318 fold_convert_loc (loc, type,
8319 TREE_OPERAND (arg0, 0)));
8320 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8321 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8322 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8323 fold_convert_loc (loc, type,
8324 TREE_OPERAND (arg0, 0)))))
8325 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8326 fold_convert_loc (loc, type,
8327 TREE_OPERAND (arg0, 1)));
8328 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8329 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8330 fold_convert_loc (loc, type,
8331 TREE_OPERAND (arg0, 1)))))
8332 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8333 fold_convert_loc (loc, type,
8334 TREE_OPERAND (arg0, 0)), tem);
8335 /* Perform BIT_NOT_EXPR on each element individually. */
8336 else if (TREE_CODE (arg0) == VECTOR_CST)
8337 {
8338 tree *elements;
8339 tree elem;
8340 unsigned count = VECTOR_CST_NELTS (arg0), i;
8341
8342 elements = XALLOCAVEC (tree, count);
8343 for (i = 0; i < count; i++)
8344 {
8345 elem = VECTOR_CST_ELT (arg0, i);
8346 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8347 if (elem == NULL_TREE)
8348 break;
8349 elements[i] = elem;
8350 }
8351 if (i == count)
8352 return build_vector (type, elements);
8353 }
8354 else if (COMPARISON_CLASS_P (arg0)
8355 && (VECTOR_TYPE_P (type)
8356 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8357 {
8358 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8359 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8360 HONOR_NANS (TYPE_MODE (op_type)));
8361 if (subcode != ERROR_MARK)
8362 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8363 TREE_OPERAND (arg0, 1));
8364 }
8365
8366
8367 return NULL_TREE;
8368
8369 case TRUTH_NOT_EXPR:
8370 /* Note that the operand of this must be an int
8371 and its values must be 0 or 1.
8372 ("true" is a fixed value perhaps depending on the language,
8373 but we don't handle values other than 1 correctly yet.) */
8374 tem = fold_truth_not_expr (loc, arg0);
8375 if (!tem)
8376 return NULL_TREE;
8377 return fold_convert_loc (loc, type, tem);
8378
8379 case REALPART_EXPR:
8380 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8381 return fold_convert_loc (loc, type, arg0);
8382 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8383 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8384 TREE_OPERAND (arg0, 1));
8385 if (TREE_CODE (arg0) == COMPLEX_CST)
8386 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8387 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8388 {
8389 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8390 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8391 fold_build1_loc (loc, REALPART_EXPR, itype,
8392 TREE_OPERAND (arg0, 0)),
8393 fold_build1_loc (loc, REALPART_EXPR, itype,
8394 TREE_OPERAND (arg0, 1)));
8395 return fold_convert_loc (loc, type, tem);
8396 }
8397 if (TREE_CODE (arg0) == CONJ_EXPR)
8398 {
8399 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8400 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8401 TREE_OPERAND (arg0, 0));
8402 return fold_convert_loc (loc, type, tem);
8403 }
8404 if (TREE_CODE (arg0) == CALL_EXPR)
8405 {
8406 tree fn = get_callee_fndecl (arg0);
8407 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8408 switch (DECL_FUNCTION_CODE (fn))
8409 {
8410 CASE_FLT_FN (BUILT_IN_CEXPI):
8411 fn = mathfn_built_in (type, BUILT_IN_COS);
8412 if (fn)
8413 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8414 break;
8415
8416 default:
8417 break;
8418 }
8419 }
8420 return NULL_TREE;
8421
8422 case IMAGPART_EXPR:
8423 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8424 return build_zero_cst (type);
8425 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8426 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8427 TREE_OPERAND (arg0, 0));
8428 if (TREE_CODE (arg0) == COMPLEX_CST)
8429 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8430 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8431 {
8432 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8433 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8434 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8435 TREE_OPERAND (arg0, 0)),
8436 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8437 TREE_OPERAND (arg0, 1)));
8438 return fold_convert_loc (loc, type, tem);
8439 }
8440 if (TREE_CODE (arg0) == CONJ_EXPR)
8441 {
8442 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8443 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8444 return fold_convert_loc (loc, type, negate_expr (tem));
8445 }
8446 if (TREE_CODE (arg0) == CALL_EXPR)
8447 {
8448 tree fn = get_callee_fndecl (arg0);
8449 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8450 switch (DECL_FUNCTION_CODE (fn))
8451 {
8452 CASE_FLT_FN (BUILT_IN_CEXPI):
8453 fn = mathfn_built_in (type, BUILT_IN_SIN);
8454 if (fn)
8455 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8456 break;
8457
8458 default:
8459 break;
8460 }
8461 }
8462 return NULL_TREE;
8463
8464 case INDIRECT_REF:
8465 /* Fold *&X to X if X is an lvalue. */
8466 if (TREE_CODE (op0) == ADDR_EXPR)
8467 {
8468 tree op00 = TREE_OPERAND (op0, 0);
8469 if ((TREE_CODE (op00) == VAR_DECL
8470 || TREE_CODE (op00) == PARM_DECL
8471 || TREE_CODE (op00) == RESULT_DECL)
8472 && !TREE_READONLY (op00))
8473 return op00;
8474 }
8475 return NULL_TREE;
8476
8477 case VEC_UNPACK_LO_EXPR:
8478 case VEC_UNPACK_HI_EXPR:
8479 case VEC_UNPACK_FLOAT_LO_EXPR:
8480 case VEC_UNPACK_FLOAT_HI_EXPR:
8481 {
8482 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8483 tree *elts;
8484 enum tree_code subcode;
8485
8486 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8487 if (TREE_CODE (arg0) != VECTOR_CST)
8488 return NULL_TREE;
8489
8490 elts = XALLOCAVEC (tree, nelts * 2);
8491 if (!vec_cst_ctor_to_array (arg0, elts))
8492 return NULL_TREE;
8493
8494 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8495 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8496 elts += nelts;
8497
8498 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8499 subcode = NOP_EXPR;
8500 else
8501 subcode = FLOAT_EXPR;
8502
8503 for (i = 0; i < nelts; i++)
8504 {
8505 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8506 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8507 return NULL_TREE;
8508 }
8509
8510 return build_vector (type, elts);
8511 }
8512
8513 case REDUC_MIN_EXPR:
8514 case REDUC_MAX_EXPR:
8515 case REDUC_PLUS_EXPR:
8516 {
8517 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8518 tree *elts;
8519 enum tree_code subcode;
8520
8521 if (TREE_CODE (op0) != VECTOR_CST)
8522 return NULL_TREE;
8523
8524 elts = XALLOCAVEC (tree, nelts);
8525 if (!vec_cst_ctor_to_array (op0, elts))
8526 return NULL_TREE;
8527
8528 switch (code)
8529 {
8530 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8531 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8532 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8533 default: gcc_unreachable ();
8534 }
8535
8536 for (i = 1; i < nelts; i++)
8537 {
8538 elts[0] = const_binop (subcode, elts[0], elts[i]);
8539 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8540 return NULL_TREE;
8541 elts[i] = build_zero_cst (TREE_TYPE (type));
8542 }
8543
8544 return build_vector (type, elts);
8545 }
8546
8547 default:
8548 return NULL_TREE;
8549 } /* switch (code) */
8550 }
8551
8552
8553 /* If the operation was a conversion do _not_ mark a resulting constant
8554 with TREE_OVERFLOW if the original constant was not. These conversions
8555 have implementation defined behavior and retaining the TREE_OVERFLOW
8556 flag here would confuse later passes such as VRP. */
8557 tree
8558 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8559 tree type, tree op0)
8560 {
8561 tree res = fold_unary_loc (loc, code, type, op0);
8562 if (res
8563 && TREE_CODE (res) == INTEGER_CST
8564 && TREE_CODE (op0) == INTEGER_CST
8565 && CONVERT_EXPR_CODE_P (code))
8566 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8567
8568 return res;
8569 }
8570
8571 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8572 operands OP0 and OP1. LOC is the location of the resulting expression.
8573 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8574 Return the folded expression if folding is successful. Otherwise,
8575 return NULL_TREE. */
8576 static tree
8577 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8578 tree arg0, tree arg1, tree op0, tree op1)
8579 {
8580 tree tem;
8581
8582 /* We only do these simplifications if we are optimizing. */
8583 if (!optimize)
8584 return NULL_TREE;
8585
8586 /* Check for things like (A || B) && (A || C). We can convert this
8587 to A || (B && C). Note that either operator can be any of the four
8588 truth and/or operations and the transformation will still be
8589 valid. Also note that we only care about order for the
8590 ANDIF and ORIF operators. If B contains side effects, this
8591 might change the truth-value of A. */
8592 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8593 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8594 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8595 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8596 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8597 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8598 {
8599 tree a00 = TREE_OPERAND (arg0, 0);
8600 tree a01 = TREE_OPERAND (arg0, 1);
8601 tree a10 = TREE_OPERAND (arg1, 0);
8602 tree a11 = TREE_OPERAND (arg1, 1);
8603 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8604 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8605 && (code == TRUTH_AND_EXPR
8606 || code == TRUTH_OR_EXPR));
8607
8608 if (operand_equal_p (a00, a10, 0))
8609 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8610 fold_build2_loc (loc, code, type, a01, a11));
8611 else if (commutative && operand_equal_p (a00, a11, 0))
8612 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8613 fold_build2_loc (loc, code, type, a01, a10));
8614 else if (commutative && operand_equal_p (a01, a10, 0))
8615 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8616 fold_build2_loc (loc, code, type, a00, a11));
8617
8618 /* This case if tricky because we must either have commutative
8619 operators or else A10 must not have side-effects. */
8620
8621 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8622 && operand_equal_p (a01, a11, 0))
8623 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8624 fold_build2_loc (loc, code, type, a00, a10),
8625 a01);
8626 }
8627
8628 /* See if we can build a range comparison. */
8629 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8630 return tem;
8631
8632 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8633 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8634 {
8635 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8636 if (tem)
8637 return fold_build2_loc (loc, code, type, tem, arg1);
8638 }
8639
8640 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8641 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8642 {
8643 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8644 if (tem)
8645 return fold_build2_loc (loc, code, type, arg0, tem);
8646 }
8647
8648 /* Check for the possibility of merging component references. If our
8649 lhs is another similar operation, try to merge its rhs with our
8650 rhs. Then try to merge our lhs and rhs. */
8651 if (TREE_CODE (arg0) == code
8652 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8653 TREE_OPERAND (arg0, 1), arg1)))
8654 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8655
8656 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8657 return tem;
8658
8659 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8660 && (code == TRUTH_AND_EXPR
8661 || code == TRUTH_ANDIF_EXPR
8662 || code == TRUTH_OR_EXPR
8663 || code == TRUTH_ORIF_EXPR))
8664 {
8665 enum tree_code ncode, icode;
8666
8667 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8668 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8669 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8670
8671 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8672 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8673 We don't want to pack more than two leafs to a non-IF AND/OR
8674 expression.
8675 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8676 equal to IF-CODE, then we don't want to add right-hand operand.
8677 If the inner right-hand side of left-hand operand has
8678 side-effects, or isn't simple, then we can't add to it,
8679 as otherwise we might destroy if-sequence. */
8680 if (TREE_CODE (arg0) == icode
8681 && simple_operand_p_2 (arg1)
8682 /* Needed for sequence points to handle trappings, and
8683 side-effects. */
8684 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8685 {
8686 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8687 arg1);
8688 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8689 tem);
8690 }
8691 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8692 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8693 else if (TREE_CODE (arg1) == icode
8694 && simple_operand_p_2 (arg0)
8695 /* Needed for sequence points to handle trappings, and
8696 side-effects. */
8697 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8698 {
8699 tem = fold_build2_loc (loc, ncode, type,
8700 arg0, TREE_OPERAND (arg1, 0));
8701 return fold_build2_loc (loc, icode, type, tem,
8702 TREE_OPERAND (arg1, 1));
8703 }
8704 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8705 into (A OR B).
8706 For sequence point consistancy, we need to check for trapping,
8707 and side-effects. */
8708 else if (code == icode && simple_operand_p_2 (arg0)
8709 && simple_operand_p_2 (arg1))
8710 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8711 }
8712
8713 return NULL_TREE;
8714 }
8715
8716 /* Fold a binary expression of code CODE and type TYPE with operands
8717 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8718 Return the folded expression if folding is successful. Otherwise,
8719 return NULL_TREE. */
8720
8721 static tree
8722 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8723 {
8724 enum tree_code compl_code;
8725
8726 if (code == MIN_EXPR)
8727 compl_code = MAX_EXPR;
8728 else if (code == MAX_EXPR)
8729 compl_code = MIN_EXPR;
8730 else
8731 gcc_unreachable ();
8732
8733 /* MIN (MAX (a, b), b) == b. */
8734 if (TREE_CODE (op0) == compl_code
8735 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8736 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8737
8738 /* MIN (MAX (b, a), b) == b. */
8739 if (TREE_CODE (op0) == compl_code
8740 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8741 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8742 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8743
8744 /* MIN (a, MAX (a, b)) == a. */
8745 if (TREE_CODE (op1) == compl_code
8746 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8747 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8748 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8749
8750 /* MIN (a, MAX (b, a)) == a. */
8751 if (TREE_CODE (op1) == compl_code
8752 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8753 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8754 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8755
8756 return NULL_TREE;
8757 }
8758
8759 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8760 by changing CODE to reduce the magnitude of constants involved in
8761 ARG0 of the comparison.
8762 Returns a canonicalized comparison tree if a simplification was
8763 possible, otherwise returns NULL_TREE.
8764 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8765 valid if signed overflow is undefined. */
8766
8767 static tree
8768 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8769 tree arg0, tree arg1,
8770 bool *strict_overflow_p)
8771 {
8772 enum tree_code code0 = TREE_CODE (arg0);
8773 tree t, cst0 = NULL_TREE;
8774 int sgn0;
8775 bool swap = false;
8776
8777 /* Match A +- CST code arg1 and CST code arg1. We can change the
8778 first form only if overflow is undefined. */
8779 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8780 /* In principle pointers also have undefined overflow behavior,
8781 but that causes problems elsewhere. */
8782 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8783 && (code0 == MINUS_EXPR
8784 || code0 == PLUS_EXPR)
8785 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8786 || code0 == INTEGER_CST))
8787 return NULL_TREE;
8788
8789 /* Identify the constant in arg0 and its sign. */
8790 if (code0 == INTEGER_CST)
8791 cst0 = arg0;
8792 else
8793 cst0 = TREE_OPERAND (arg0, 1);
8794 sgn0 = tree_int_cst_sgn (cst0);
8795
8796 /* Overflowed constants and zero will cause problems. */
8797 if (integer_zerop (cst0)
8798 || TREE_OVERFLOW (cst0))
8799 return NULL_TREE;
8800
8801 /* See if we can reduce the magnitude of the constant in
8802 arg0 by changing the comparison code. */
8803 if (code0 == INTEGER_CST)
8804 {
8805 /* CST <= arg1 -> CST-1 < arg1. */
8806 if (code == LE_EXPR && sgn0 == 1)
8807 code = LT_EXPR;
8808 /* -CST < arg1 -> -CST-1 <= arg1. */
8809 else if (code == LT_EXPR && sgn0 == -1)
8810 code = LE_EXPR;
8811 /* CST > arg1 -> CST-1 >= arg1. */
8812 else if (code == GT_EXPR && sgn0 == 1)
8813 code = GE_EXPR;
8814 /* -CST >= arg1 -> -CST-1 > arg1. */
8815 else if (code == GE_EXPR && sgn0 == -1)
8816 code = GT_EXPR;
8817 else
8818 return NULL_TREE;
8819 /* arg1 code' CST' might be more canonical. */
8820 swap = true;
8821 }
8822 else
8823 {
8824 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8825 if (code == LT_EXPR
8826 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8827 code = LE_EXPR;
8828 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8829 else if (code == GT_EXPR
8830 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8831 code = GE_EXPR;
8832 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8833 else if (code == LE_EXPR
8834 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8835 code = LT_EXPR;
8836 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8837 else if (code == GE_EXPR
8838 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8839 code = GT_EXPR;
8840 else
8841 return NULL_TREE;
8842 *strict_overflow_p = true;
8843 }
8844
8845 /* Now build the constant reduced in magnitude. But not if that
8846 would produce one outside of its types range. */
8847 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8848 && ((sgn0 == 1
8849 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8850 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8851 || (sgn0 == -1
8852 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8853 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8854 /* We cannot swap the comparison here as that would cause us to
8855 endlessly recurse. */
8856 return NULL_TREE;
8857
8858 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8859 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8860 if (code0 != INTEGER_CST)
8861 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8862 t = fold_convert (TREE_TYPE (arg1), t);
8863
8864 /* If swapping might yield to a more canonical form, do so. */
8865 if (swap)
8866 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8867 else
8868 return fold_build2_loc (loc, code, type, t, arg1);
8869 }
8870
8871 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8872 overflow further. Try to decrease the magnitude of constants involved
8873 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8874 and put sole constants at the second argument position.
8875 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8876
8877 static tree
8878 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8879 tree arg0, tree arg1)
8880 {
8881 tree t;
8882 bool strict_overflow_p;
8883 const char * const warnmsg = G_("assuming signed overflow does not occur "
8884 "when reducing constant in comparison");
8885
8886 /* Try canonicalization by simplifying arg0. */
8887 strict_overflow_p = false;
8888 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8889 &strict_overflow_p);
8890 if (t)
8891 {
8892 if (strict_overflow_p)
8893 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8894 return t;
8895 }
8896
8897 /* Try canonicalization by simplifying arg1 using the swapped
8898 comparison. */
8899 code = swap_tree_comparison (code);
8900 strict_overflow_p = false;
8901 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8902 &strict_overflow_p);
8903 if (t && strict_overflow_p)
8904 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8905 return t;
8906 }
8907
8908 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8909 space. This is used to avoid issuing overflow warnings for
8910 expressions like &p->x which can not wrap. */
8911
8912 static bool
8913 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8914 {
8915 double_int di_offset, total;
8916
8917 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8918 return true;
8919
8920 if (bitpos < 0)
8921 return true;
8922
8923 if (offset == NULL_TREE)
8924 di_offset = double_int_zero;
8925 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8926 return true;
8927 else
8928 di_offset = TREE_INT_CST (offset);
8929
8930 bool overflow;
8931 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8932 total = di_offset.add_with_sign (units, true, &overflow);
8933 if (overflow)
8934 return true;
8935
8936 if (total.high != 0)
8937 return true;
8938
8939 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8940 if (size <= 0)
8941 return true;
8942
8943 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8944 array. */
8945 if (TREE_CODE (base) == ADDR_EXPR)
8946 {
8947 HOST_WIDE_INT base_size;
8948
8949 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8950 if (base_size > 0 && size < base_size)
8951 size = base_size;
8952 }
8953
8954 return total.low > (unsigned HOST_WIDE_INT) size;
8955 }
8956
8957 /* Subroutine of fold_binary. This routine performs all of the
8958 transformations that are common to the equality/inequality
8959 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8960 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8961 fold_binary should call fold_binary. Fold a comparison with
8962 tree code CODE and type TYPE with operands OP0 and OP1. Return
8963 the folded comparison or NULL_TREE. */
8964
8965 static tree
8966 fold_comparison (location_t loc, enum tree_code code, tree type,
8967 tree op0, tree op1)
8968 {
8969 tree arg0, arg1, tem;
8970
8971 arg0 = op0;
8972 arg1 = op1;
8973
8974 STRIP_SIGN_NOPS (arg0);
8975 STRIP_SIGN_NOPS (arg1);
8976
8977 tem = fold_relational_const (code, type, arg0, arg1);
8978 if (tem != NULL_TREE)
8979 return tem;
8980
8981 /* If one arg is a real or integer constant, put it last. */
8982 if (tree_swap_operands_p (arg0, arg1, true))
8983 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8984
8985 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8986 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8987 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8988 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8989 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8990 && (TREE_CODE (arg1) == INTEGER_CST
8991 && !TREE_OVERFLOW (arg1)))
8992 {
8993 tree const1 = TREE_OPERAND (arg0, 1);
8994 tree const2 = arg1;
8995 tree variable = TREE_OPERAND (arg0, 0);
8996 tree lhs;
8997 int lhs_add;
8998 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8999
9000 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9001 TREE_TYPE (arg1), const2, const1);
9002
9003 /* If the constant operation overflowed this can be
9004 simplified as a comparison against INT_MAX/INT_MIN. */
9005 if (TREE_CODE (lhs) == INTEGER_CST
9006 && TREE_OVERFLOW (lhs))
9007 {
9008 int const1_sgn = tree_int_cst_sgn (const1);
9009 enum tree_code code2 = code;
9010
9011 /* Get the sign of the constant on the lhs if the
9012 operation were VARIABLE + CONST1. */
9013 if (TREE_CODE (arg0) == MINUS_EXPR)
9014 const1_sgn = -const1_sgn;
9015
9016 /* The sign of the constant determines if we overflowed
9017 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9018 Canonicalize to the INT_MIN overflow by swapping the comparison
9019 if necessary. */
9020 if (const1_sgn == -1)
9021 code2 = swap_tree_comparison (code);
9022
9023 /* We now can look at the canonicalized case
9024 VARIABLE + 1 CODE2 INT_MIN
9025 and decide on the result. */
9026 if (code2 == LT_EXPR
9027 || code2 == LE_EXPR
9028 || code2 == EQ_EXPR)
9029 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9030 else if (code2 == NE_EXPR
9031 || code2 == GE_EXPR
9032 || code2 == GT_EXPR)
9033 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9034 }
9035
9036 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9037 && (TREE_CODE (lhs) != INTEGER_CST
9038 || !TREE_OVERFLOW (lhs)))
9039 {
9040 if (code != EQ_EXPR && code != NE_EXPR)
9041 fold_overflow_warning ("assuming signed overflow does not occur "
9042 "when changing X +- C1 cmp C2 to "
9043 "X cmp C1 +- C2",
9044 WARN_STRICT_OVERFLOW_COMPARISON);
9045 return fold_build2_loc (loc, code, type, variable, lhs);
9046 }
9047 }
9048
9049 /* For comparisons of pointers we can decompose it to a compile time
9050 comparison of the base objects and the offsets into the object.
9051 This requires at least one operand being an ADDR_EXPR or a
9052 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9053 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9054 && (TREE_CODE (arg0) == ADDR_EXPR
9055 || TREE_CODE (arg1) == ADDR_EXPR
9056 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9057 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9058 {
9059 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9060 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9061 enum machine_mode mode;
9062 int volatilep, unsignedp;
9063 bool indirect_base0 = false, indirect_base1 = false;
9064
9065 /* Get base and offset for the access. Strip ADDR_EXPR for
9066 get_inner_reference, but put it back by stripping INDIRECT_REF
9067 off the base object if possible. indirect_baseN will be true
9068 if baseN is not an address but refers to the object itself. */
9069 base0 = arg0;
9070 if (TREE_CODE (arg0) == ADDR_EXPR)
9071 {
9072 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9073 &bitsize, &bitpos0, &offset0, &mode,
9074 &unsignedp, &volatilep, false);
9075 if (TREE_CODE (base0) == INDIRECT_REF)
9076 base0 = TREE_OPERAND (base0, 0);
9077 else
9078 indirect_base0 = true;
9079 }
9080 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9081 {
9082 base0 = TREE_OPERAND (arg0, 0);
9083 STRIP_SIGN_NOPS (base0);
9084 if (TREE_CODE (base0) == ADDR_EXPR)
9085 {
9086 base0 = TREE_OPERAND (base0, 0);
9087 indirect_base0 = true;
9088 }
9089 offset0 = TREE_OPERAND (arg0, 1);
9090 if (host_integerp (offset0, 0))
9091 {
9092 HOST_WIDE_INT off = size_low_cst (offset0);
9093 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9094 * BITS_PER_UNIT)
9095 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9096 {
9097 bitpos0 = off * BITS_PER_UNIT;
9098 offset0 = NULL_TREE;
9099 }
9100 }
9101 }
9102
9103 base1 = arg1;
9104 if (TREE_CODE (arg1) == ADDR_EXPR)
9105 {
9106 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9107 &bitsize, &bitpos1, &offset1, &mode,
9108 &unsignedp, &volatilep, false);
9109 if (TREE_CODE (base1) == INDIRECT_REF)
9110 base1 = TREE_OPERAND (base1, 0);
9111 else
9112 indirect_base1 = true;
9113 }
9114 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9115 {
9116 base1 = TREE_OPERAND (arg1, 0);
9117 STRIP_SIGN_NOPS (base1);
9118 if (TREE_CODE (base1) == ADDR_EXPR)
9119 {
9120 base1 = TREE_OPERAND (base1, 0);
9121 indirect_base1 = true;
9122 }
9123 offset1 = TREE_OPERAND (arg1, 1);
9124 if (host_integerp (offset1, 0))
9125 {
9126 HOST_WIDE_INT off = size_low_cst (offset1);
9127 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9128 * BITS_PER_UNIT)
9129 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9130 {
9131 bitpos1 = off * BITS_PER_UNIT;
9132 offset1 = NULL_TREE;
9133 }
9134 }
9135 }
9136
9137 /* A local variable can never be pointed to by
9138 the default SSA name of an incoming parameter. */
9139 if ((TREE_CODE (arg0) == ADDR_EXPR
9140 && indirect_base0
9141 && TREE_CODE (base0) == VAR_DECL
9142 && auto_var_in_fn_p (base0, current_function_decl)
9143 && !indirect_base1
9144 && TREE_CODE (base1) == SSA_NAME
9145 && SSA_NAME_IS_DEFAULT_DEF (base1)
9146 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9147 || (TREE_CODE (arg1) == ADDR_EXPR
9148 && indirect_base1
9149 && TREE_CODE (base1) == VAR_DECL
9150 && auto_var_in_fn_p (base1, current_function_decl)
9151 && !indirect_base0
9152 && TREE_CODE (base0) == SSA_NAME
9153 && SSA_NAME_IS_DEFAULT_DEF (base0)
9154 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9155 {
9156 if (code == NE_EXPR)
9157 return constant_boolean_node (1, type);
9158 else if (code == EQ_EXPR)
9159 return constant_boolean_node (0, type);
9160 }
9161 /* If we have equivalent bases we might be able to simplify. */
9162 else if (indirect_base0 == indirect_base1
9163 && operand_equal_p (base0, base1, 0))
9164 {
9165 /* We can fold this expression to a constant if the non-constant
9166 offset parts are equal. */
9167 if ((offset0 == offset1
9168 || (offset0 && offset1
9169 && operand_equal_p (offset0, offset1, 0)))
9170 && (code == EQ_EXPR
9171 || code == NE_EXPR
9172 || (indirect_base0 && DECL_P (base0))
9173 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9174
9175 {
9176 if (code != EQ_EXPR
9177 && code != NE_EXPR
9178 && bitpos0 != bitpos1
9179 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9180 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9181 fold_overflow_warning (("assuming pointer wraparound does not "
9182 "occur when comparing P +- C1 with "
9183 "P +- C2"),
9184 WARN_STRICT_OVERFLOW_CONDITIONAL);
9185
9186 switch (code)
9187 {
9188 case EQ_EXPR:
9189 return constant_boolean_node (bitpos0 == bitpos1, type);
9190 case NE_EXPR:
9191 return constant_boolean_node (bitpos0 != bitpos1, type);
9192 case LT_EXPR:
9193 return constant_boolean_node (bitpos0 < bitpos1, type);
9194 case LE_EXPR:
9195 return constant_boolean_node (bitpos0 <= bitpos1, type);
9196 case GE_EXPR:
9197 return constant_boolean_node (bitpos0 >= bitpos1, type);
9198 case GT_EXPR:
9199 return constant_boolean_node (bitpos0 > bitpos1, type);
9200 default:;
9201 }
9202 }
9203 /* We can simplify the comparison to a comparison of the variable
9204 offset parts if the constant offset parts are equal.
9205 Be careful to use signed sizetype here because otherwise we
9206 mess with array offsets in the wrong way. This is possible
9207 because pointer arithmetic is restricted to retain within an
9208 object and overflow on pointer differences is undefined as of
9209 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9210 else if (bitpos0 == bitpos1
9211 && ((code == EQ_EXPR || code == NE_EXPR)
9212 || (indirect_base0 && DECL_P (base0))
9213 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9214 {
9215 /* By converting to signed sizetype we cover middle-end pointer
9216 arithmetic which operates on unsigned pointer types of size
9217 type size and ARRAY_REF offsets which are properly sign or
9218 zero extended from their type in case it is narrower than
9219 sizetype. */
9220 if (offset0 == NULL_TREE)
9221 offset0 = build_int_cst (ssizetype, 0);
9222 else
9223 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9224 if (offset1 == NULL_TREE)
9225 offset1 = build_int_cst (ssizetype, 0);
9226 else
9227 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9228
9229 if (code != EQ_EXPR
9230 && code != NE_EXPR
9231 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9232 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9233 fold_overflow_warning (("assuming pointer wraparound does not "
9234 "occur when comparing P +- C1 with "
9235 "P +- C2"),
9236 WARN_STRICT_OVERFLOW_COMPARISON);
9237
9238 return fold_build2_loc (loc, code, type, offset0, offset1);
9239 }
9240 }
9241 /* For non-equal bases we can simplify if they are addresses
9242 of local binding decls or constants. */
9243 else if (indirect_base0 && indirect_base1
9244 /* We know that !operand_equal_p (base0, base1, 0)
9245 because the if condition was false. But make
9246 sure two decls are not the same. */
9247 && base0 != base1
9248 && TREE_CODE (arg0) == ADDR_EXPR
9249 && TREE_CODE (arg1) == ADDR_EXPR
9250 && (((TREE_CODE (base0) == VAR_DECL
9251 || TREE_CODE (base0) == PARM_DECL)
9252 && (targetm.binds_local_p (base0)
9253 || CONSTANT_CLASS_P (base1)))
9254 || CONSTANT_CLASS_P (base0))
9255 && (((TREE_CODE (base1) == VAR_DECL
9256 || TREE_CODE (base1) == PARM_DECL)
9257 && (targetm.binds_local_p (base1)
9258 || CONSTANT_CLASS_P (base0)))
9259 || CONSTANT_CLASS_P (base1)))
9260 {
9261 if (code == EQ_EXPR)
9262 return omit_two_operands_loc (loc, type, boolean_false_node,
9263 arg0, arg1);
9264 else if (code == NE_EXPR)
9265 return omit_two_operands_loc (loc, type, boolean_true_node,
9266 arg0, arg1);
9267 }
9268 /* For equal offsets we can simplify to a comparison of the
9269 base addresses. */
9270 else if (bitpos0 == bitpos1
9271 && (indirect_base0
9272 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9273 && (indirect_base1
9274 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9275 && ((offset0 == offset1)
9276 || (offset0 && offset1
9277 && operand_equal_p (offset0, offset1, 0))))
9278 {
9279 if (indirect_base0)
9280 base0 = build_fold_addr_expr_loc (loc, base0);
9281 if (indirect_base1)
9282 base1 = build_fold_addr_expr_loc (loc, base1);
9283 return fold_build2_loc (loc, code, type, base0, base1);
9284 }
9285 }
9286
9287 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9288 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9289 the resulting offset is smaller in absolute value than the
9290 original one. */
9291 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9292 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9293 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9294 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9295 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9296 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9297 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9298 {
9299 tree const1 = TREE_OPERAND (arg0, 1);
9300 tree const2 = TREE_OPERAND (arg1, 1);
9301 tree variable1 = TREE_OPERAND (arg0, 0);
9302 tree variable2 = TREE_OPERAND (arg1, 0);
9303 tree cst;
9304 const char * const warnmsg = G_("assuming signed overflow does not "
9305 "occur when combining constants around "
9306 "a comparison");
9307
9308 /* Put the constant on the side where it doesn't overflow and is
9309 of lower absolute value than before. */
9310 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9311 ? MINUS_EXPR : PLUS_EXPR,
9312 const2, const1);
9313 if (!TREE_OVERFLOW (cst)
9314 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9315 {
9316 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9317 return fold_build2_loc (loc, code, type,
9318 variable1,
9319 fold_build2_loc (loc,
9320 TREE_CODE (arg1), TREE_TYPE (arg1),
9321 variable2, cst));
9322 }
9323
9324 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9325 ? MINUS_EXPR : PLUS_EXPR,
9326 const1, const2);
9327 if (!TREE_OVERFLOW (cst)
9328 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9329 {
9330 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9331 return fold_build2_loc (loc, code, type,
9332 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9333 variable1, cst),
9334 variable2);
9335 }
9336 }
9337
9338 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9339 signed arithmetic case. That form is created by the compiler
9340 often enough for folding it to be of value. One example is in
9341 computing loop trip counts after Operator Strength Reduction. */
9342 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9343 && TREE_CODE (arg0) == MULT_EXPR
9344 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9345 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9346 && integer_zerop (arg1))
9347 {
9348 tree const1 = TREE_OPERAND (arg0, 1);
9349 tree const2 = arg1; /* zero */
9350 tree variable1 = TREE_OPERAND (arg0, 0);
9351 enum tree_code cmp_code = code;
9352
9353 /* Handle unfolded multiplication by zero. */
9354 if (integer_zerop (const1))
9355 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9356
9357 fold_overflow_warning (("assuming signed overflow does not occur when "
9358 "eliminating multiplication in comparison "
9359 "with zero"),
9360 WARN_STRICT_OVERFLOW_COMPARISON);
9361
9362 /* If const1 is negative we swap the sense of the comparison. */
9363 if (tree_int_cst_sgn (const1) < 0)
9364 cmp_code = swap_tree_comparison (cmp_code);
9365
9366 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9367 }
9368
9369 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9370 if (tem)
9371 return tem;
9372
9373 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9374 {
9375 tree targ0 = strip_float_extensions (arg0);
9376 tree targ1 = strip_float_extensions (arg1);
9377 tree newtype = TREE_TYPE (targ0);
9378
9379 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9380 newtype = TREE_TYPE (targ1);
9381
9382 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9383 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9384 return fold_build2_loc (loc, code, type,
9385 fold_convert_loc (loc, newtype, targ0),
9386 fold_convert_loc (loc, newtype, targ1));
9387
9388 /* (-a) CMP (-b) -> b CMP a */
9389 if (TREE_CODE (arg0) == NEGATE_EXPR
9390 && TREE_CODE (arg1) == NEGATE_EXPR)
9391 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9392 TREE_OPERAND (arg0, 0));
9393
9394 if (TREE_CODE (arg1) == REAL_CST)
9395 {
9396 REAL_VALUE_TYPE cst;
9397 cst = TREE_REAL_CST (arg1);
9398
9399 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9400 if (TREE_CODE (arg0) == NEGATE_EXPR)
9401 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9402 TREE_OPERAND (arg0, 0),
9403 build_real (TREE_TYPE (arg1),
9404 real_value_negate (&cst)));
9405
9406 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9407 /* a CMP (-0) -> a CMP 0 */
9408 if (REAL_VALUE_MINUS_ZERO (cst))
9409 return fold_build2_loc (loc, code, type, arg0,
9410 build_real (TREE_TYPE (arg1), dconst0));
9411
9412 /* x != NaN is always true, other ops are always false. */
9413 if (REAL_VALUE_ISNAN (cst)
9414 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9415 {
9416 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9417 return omit_one_operand_loc (loc, type, tem, arg0);
9418 }
9419
9420 /* Fold comparisons against infinity. */
9421 if (REAL_VALUE_ISINF (cst)
9422 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9423 {
9424 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9425 if (tem != NULL_TREE)
9426 return tem;
9427 }
9428 }
9429
9430 /* If this is a comparison of a real constant with a PLUS_EXPR
9431 or a MINUS_EXPR of a real constant, we can convert it into a
9432 comparison with a revised real constant as long as no overflow
9433 occurs when unsafe_math_optimizations are enabled. */
9434 if (flag_unsafe_math_optimizations
9435 && TREE_CODE (arg1) == REAL_CST
9436 && (TREE_CODE (arg0) == PLUS_EXPR
9437 || TREE_CODE (arg0) == MINUS_EXPR)
9438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9439 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9440 ? MINUS_EXPR : PLUS_EXPR,
9441 arg1, TREE_OPERAND (arg0, 1)))
9442 && !TREE_OVERFLOW (tem))
9443 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9444
9445 /* Likewise, we can simplify a comparison of a real constant with
9446 a MINUS_EXPR whose first operand is also a real constant, i.e.
9447 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9448 floating-point types only if -fassociative-math is set. */
9449 if (flag_associative_math
9450 && TREE_CODE (arg1) == REAL_CST
9451 && TREE_CODE (arg0) == MINUS_EXPR
9452 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9453 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9454 arg1))
9455 && !TREE_OVERFLOW (tem))
9456 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9457 TREE_OPERAND (arg0, 1), tem);
9458
9459 /* Fold comparisons against built-in math functions. */
9460 if (TREE_CODE (arg1) == REAL_CST
9461 && flag_unsafe_math_optimizations
9462 && ! flag_errno_math)
9463 {
9464 enum built_in_function fcode = builtin_mathfn_code (arg0);
9465
9466 if (fcode != END_BUILTINS)
9467 {
9468 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9469 if (tem != NULL_TREE)
9470 return tem;
9471 }
9472 }
9473 }
9474
9475 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9476 && CONVERT_EXPR_P (arg0))
9477 {
9478 /* If we are widening one operand of an integer comparison,
9479 see if the other operand is similarly being widened. Perhaps we
9480 can do the comparison in the narrower type. */
9481 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9482 if (tem)
9483 return tem;
9484
9485 /* Or if we are changing signedness. */
9486 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9487 if (tem)
9488 return tem;
9489 }
9490
9491 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9492 constant, we can simplify it. */
9493 if (TREE_CODE (arg1) == INTEGER_CST
9494 && (TREE_CODE (arg0) == MIN_EXPR
9495 || TREE_CODE (arg0) == MAX_EXPR)
9496 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9497 {
9498 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9499 if (tem)
9500 return tem;
9501 }
9502
9503 /* Simplify comparison of something with itself. (For IEEE
9504 floating-point, we can only do some of these simplifications.) */
9505 if (operand_equal_p (arg0, arg1, 0))
9506 {
9507 switch (code)
9508 {
9509 case EQ_EXPR:
9510 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9511 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9512 return constant_boolean_node (1, type);
9513 break;
9514
9515 case GE_EXPR:
9516 case LE_EXPR:
9517 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9518 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9519 return constant_boolean_node (1, type);
9520 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9521
9522 case NE_EXPR:
9523 /* For NE, we can only do this simplification if integer
9524 or we don't honor IEEE floating point NaNs. */
9525 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9526 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9527 break;
9528 /* ... fall through ... */
9529 case GT_EXPR:
9530 case LT_EXPR:
9531 return constant_boolean_node (0, type);
9532 default:
9533 gcc_unreachable ();
9534 }
9535 }
9536
9537 /* If we are comparing an expression that just has comparisons
9538 of two integer values, arithmetic expressions of those comparisons,
9539 and constants, we can simplify it. There are only three cases
9540 to check: the two values can either be equal, the first can be
9541 greater, or the second can be greater. Fold the expression for
9542 those three values. Since each value must be 0 or 1, we have
9543 eight possibilities, each of which corresponds to the constant 0
9544 or 1 or one of the six possible comparisons.
9545
9546 This handles common cases like (a > b) == 0 but also handles
9547 expressions like ((x > y) - (y > x)) > 0, which supposedly
9548 occur in macroized code. */
9549
9550 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9551 {
9552 tree cval1 = 0, cval2 = 0;
9553 int save_p = 0;
9554
9555 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9556 /* Don't handle degenerate cases here; they should already
9557 have been handled anyway. */
9558 && cval1 != 0 && cval2 != 0
9559 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9560 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9561 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9562 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9563 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9564 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9565 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9566 {
9567 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9568 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9569
9570 /* We can't just pass T to eval_subst in case cval1 or cval2
9571 was the same as ARG1. */
9572
9573 tree high_result
9574 = fold_build2_loc (loc, code, type,
9575 eval_subst (loc, arg0, cval1, maxval,
9576 cval2, minval),
9577 arg1);
9578 tree equal_result
9579 = fold_build2_loc (loc, code, type,
9580 eval_subst (loc, arg0, cval1, maxval,
9581 cval2, maxval),
9582 arg1);
9583 tree low_result
9584 = fold_build2_loc (loc, code, type,
9585 eval_subst (loc, arg0, cval1, minval,
9586 cval2, maxval),
9587 arg1);
9588
9589 /* All three of these results should be 0 or 1. Confirm they are.
9590 Then use those values to select the proper code to use. */
9591
9592 if (TREE_CODE (high_result) == INTEGER_CST
9593 && TREE_CODE (equal_result) == INTEGER_CST
9594 && TREE_CODE (low_result) == INTEGER_CST)
9595 {
9596 /* Make a 3-bit mask with the high-order bit being the
9597 value for `>', the next for '=', and the low for '<'. */
9598 switch ((integer_onep (high_result) * 4)
9599 + (integer_onep (equal_result) * 2)
9600 + integer_onep (low_result))
9601 {
9602 case 0:
9603 /* Always false. */
9604 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9605 case 1:
9606 code = LT_EXPR;
9607 break;
9608 case 2:
9609 code = EQ_EXPR;
9610 break;
9611 case 3:
9612 code = LE_EXPR;
9613 break;
9614 case 4:
9615 code = GT_EXPR;
9616 break;
9617 case 5:
9618 code = NE_EXPR;
9619 break;
9620 case 6:
9621 code = GE_EXPR;
9622 break;
9623 case 7:
9624 /* Always true. */
9625 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9626 }
9627
9628 if (save_p)
9629 {
9630 tem = save_expr (build2 (code, type, cval1, cval2));
9631 SET_EXPR_LOCATION (tem, loc);
9632 return tem;
9633 }
9634 return fold_build2_loc (loc, code, type, cval1, cval2);
9635 }
9636 }
9637 }
9638
9639 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9640 into a single range test. */
9641 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9642 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9643 && TREE_CODE (arg1) == INTEGER_CST
9644 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9645 && !integer_zerop (TREE_OPERAND (arg0, 1))
9646 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9647 && !TREE_OVERFLOW (arg1))
9648 {
9649 tem = fold_div_compare (loc, code, type, arg0, arg1);
9650 if (tem != NULL_TREE)
9651 return tem;
9652 }
9653
9654 /* Fold ~X op ~Y as Y op X. */
9655 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9656 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9657 {
9658 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9659 return fold_build2_loc (loc, code, type,
9660 fold_convert_loc (loc, cmp_type,
9661 TREE_OPERAND (arg1, 0)),
9662 TREE_OPERAND (arg0, 0));
9663 }
9664
9665 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9666 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9667 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9668 {
9669 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9670 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9671 TREE_OPERAND (arg0, 0),
9672 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9673 fold_convert_loc (loc, cmp_type, arg1)));
9674 }
9675
9676 return NULL_TREE;
9677 }
9678
9679
9680 /* Subroutine of fold_binary. Optimize complex multiplications of the
9681 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9682 argument EXPR represents the expression "z" of type TYPE. */
9683
9684 static tree
9685 fold_mult_zconjz (location_t loc, tree type, tree expr)
9686 {
9687 tree itype = TREE_TYPE (type);
9688 tree rpart, ipart, tem;
9689
9690 if (TREE_CODE (expr) == COMPLEX_EXPR)
9691 {
9692 rpart = TREE_OPERAND (expr, 0);
9693 ipart = TREE_OPERAND (expr, 1);
9694 }
9695 else if (TREE_CODE (expr) == COMPLEX_CST)
9696 {
9697 rpart = TREE_REALPART (expr);
9698 ipart = TREE_IMAGPART (expr);
9699 }
9700 else
9701 {
9702 expr = save_expr (expr);
9703 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9704 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9705 }
9706
9707 rpart = save_expr (rpart);
9708 ipart = save_expr (ipart);
9709 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9710 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9711 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9712 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9713 build_zero_cst (itype));
9714 }
9715
9716
9717 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9718 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9719 guarantees that P and N have the same least significant log2(M) bits.
9720 N is not otherwise constrained. In particular, N is not normalized to
9721 0 <= N < M as is common. In general, the precise value of P is unknown.
9722 M is chosen as large as possible such that constant N can be determined.
9723
9724 Returns M and sets *RESIDUE to N.
9725
9726 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9727 account. This is not always possible due to PR 35705.
9728 */
9729
9730 static unsigned HOST_WIDE_INT
9731 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9732 bool allow_func_align)
9733 {
9734 enum tree_code code;
9735
9736 *residue = 0;
9737
9738 code = TREE_CODE (expr);
9739 if (code == ADDR_EXPR)
9740 {
9741 unsigned int bitalign;
9742 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9743 *residue /= BITS_PER_UNIT;
9744 return bitalign / BITS_PER_UNIT;
9745 }
9746 else if (code == POINTER_PLUS_EXPR)
9747 {
9748 tree op0, op1;
9749 unsigned HOST_WIDE_INT modulus;
9750 enum tree_code inner_code;
9751
9752 op0 = TREE_OPERAND (expr, 0);
9753 STRIP_NOPS (op0);
9754 modulus = get_pointer_modulus_and_residue (op0, residue,
9755 allow_func_align);
9756
9757 op1 = TREE_OPERAND (expr, 1);
9758 STRIP_NOPS (op1);
9759 inner_code = TREE_CODE (op1);
9760 if (inner_code == INTEGER_CST)
9761 {
9762 *residue += TREE_INT_CST_LOW (op1);
9763 return modulus;
9764 }
9765 else if (inner_code == MULT_EXPR)
9766 {
9767 op1 = TREE_OPERAND (op1, 1);
9768 if (TREE_CODE (op1) == INTEGER_CST)
9769 {
9770 unsigned HOST_WIDE_INT align;
9771
9772 /* Compute the greatest power-of-2 divisor of op1. */
9773 align = TREE_INT_CST_LOW (op1);
9774 align &= -align;
9775
9776 /* If align is non-zero and less than *modulus, replace
9777 *modulus with align., If align is 0, then either op1 is 0
9778 or the greatest power-of-2 divisor of op1 doesn't fit in an
9779 unsigned HOST_WIDE_INT. In either case, no additional
9780 constraint is imposed. */
9781 if (align)
9782 modulus = MIN (modulus, align);
9783
9784 return modulus;
9785 }
9786 }
9787 }
9788
9789 /* If we get here, we were unable to determine anything useful about the
9790 expression. */
9791 return 1;
9792 }
9793
9794 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9795 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9796
9797 static bool
9798 vec_cst_ctor_to_array (tree arg, tree *elts)
9799 {
9800 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9801
9802 if (TREE_CODE (arg) == VECTOR_CST)
9803 {
9804 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9805 elts[i] = VECTOR_CST_ELT (arg, i);
9806 }
9807 else if (TREE_CODE (arg) == CONSTRUCTOR)
9808 {
9809 constructor_elt *elt;
9810
9811 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9812 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9813 return false;
9814 else
9815 elts[i] = elt->value;
9816 }
9817 else
9818 return false;
9819 for (; i < nelts; i++)
9820 elts[i]
9821 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9822 return true;
9823 }
9824
9825 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9826 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9827 NULL_TREE otherwise. */
9828
9829 static tree
9830 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9831 {
9832 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9833 tree *elts;
9834 bool need_ctor = false;
9835
9836 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9837 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9838 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9839 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9840 return NULL_TREE;
9841
9842 elts = XALLOCAVEC (tree, nelts * 3);
9843 if (!vec_cst_ctor_to_array (arg0, elts)
9844 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9845 return NULL_TREE;
9846
9847 for (i = 0; i < nelts; i++)
9848 {
9849 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9850 need_ctor = true;
9851 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9852 }
9853
9854 if (need_ctor)
9855 {
9856 vec<constructor_elt, va_gc> *v;
9857 vec_alloc (v, nelts);
9858 for (i = 0; i < nelts; i++)
9859 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9860 return build_constructor (type, v);
9861 }
9862 else
9863 return build_vector (type, &elts[2 * nelts]);
9864 }
9865
9866 /* Try to fold a pointer difference of type TYPE two address expressions of
9867 array references AREF0 and AREF1 using location LOC. Return a
9868 simplified expression for the difference or NULL_TREE. */
9869
9870 static tree
9871 fold_addr_of_array_ref_difference (location_t loc, tree type,
9872 tree aref0, tree aref1)
9873 {
9874 tree base0 = TREE_OPERAND (aref0, 0);
9875 tree base1 = TREE_OPERAND (aref1, 0);
9876 tree base_offset = build_int_cst (type, 0);
9877
9878 /* If the bases are array references as well, recurse. If the bases
9879 are pointer indirections compute the difference of the pointers.
9880 If the bases are equal, we are set. */
9881 if ((TREE_CODE (base0) == ARRAY_REF
9882 && TREE_CODE (base1) == ARRAY_REF
9883 && (base_offset
9884 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9885 || (INDIRECT_REF_P (base0)
9886 && INDIRECT_REF_P (base1)
9887 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9888 TREE_OPERAND (base0, 0),
9889 TREE_OPERAND (base1, 0))))
9890 || operand_equal_p (base0, base1, 0))
9891 {
9892 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9893 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9894 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9895 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9896 return fold_build2_loc (loc, PLUS_EXPR, type,
9897 base_offset,
9898 fold_build2_loc (loc, MULT_EXPR, type,
9899 diff, esz));
9900 }
9901 return NULL_TREE;
9902 }
9903
9904 /* If the real or vector real constant CST of type TYPE has an exact
9905 inverse, return it, else return NULL. */
9906
9907 static tree
9908 exact_inverse (tree type, tree cst)
9909 {
9910 REAL_VALUE_TYPE r;
9911 tree unit_type, *elts;
9912 enum machine_mode mode;
9913 unsigned vec_nelts, i;
9914
9915 switch (TREE_CODE (cst))
9916 {
9917 case REAL_CST:
9918 r = TREE_REAL_CST (cst);
9919
9920 if (exact_real_inverse (TYPE_MODE (type), &r))
9921 return build_real (type, r);
9922
9923 return NULL_TREE;
9924
9925 case VECTOR_CST:
9926 vec_nelts = VECTOR_CST_NELTS (cst);
9927 elts = XALLOCAVEC (tree, vec_nelts);
9928 unit_type = TREE_TYPE (type);
9929 mode = TYPE_MODE (unit_type);
9930
9931 for (i = 0; i < vec_nelts; i++)
9932 {
9933 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9934 if (!exact_real_inverse (mode, &r))
9935 return NULL_TREE;
9936 elts[i] = build_real (unit_type, r);
9937 }
9938
9939 return build_vector (type, elts);
9940
9941 default:
9942 return NULL_TREE;
9943 }
9944 }
9945
9946 /* Mask out the tz least significant bits of X of type TYPE where
9947 tz is the number of trailing zeroes in Y. */
9948 static double_int
9949 mask_with_tz (tree type, double_int x, double_int y)
9950 {
9951 int tz = y.trailing_zeros ();
9952
9953 if (tz > 0)
9954 {
9955 double_int mask;
9956
9957 mask = ~double_int::mask (tz);
9958 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9959 return mask & x;
9960 }
9961 return x;
9962 }
9963
9964 /* Fold a binary expression of code CODE and type TYPE with operands
9965 OP0 and OP1. LOC is the location of the resulting expression.
9966 Return the folded expression if folding is successful. Otherwise,
9967 return NULL_TREE. */
9968
9969 tree
9970 fold_binary_loc (location_t loc,
9971 enum tree_code code, tree type, tree op0, tree op1)
9972 {
9973 enum tree_code_class kind = TREE_CODE_CLASS (code);
9974 tree arg0, arg1, tem;
9975 tree t1 = NULL_TREE;
9976 bool strict_overflow_p;
9977 unsigned int prec;
9978
9979 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9980 && TREE_CODE_LENGTH (code) == 2
9981 && op0 != NULL_TREE
9982 && op1 != NULL_TREE);
9983
9984 arg0 = op0;
9985 arg1 = op1;
9986
9987 /* Strip any conversions that don't change the mode. This is
9988 safe for every expression, except for a comparison expression
9989 because its signedness is derived from its operands. So, in
9990 the latter case, only strip conversions that don't change the
9991 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9992 preserved.
9993
9994 Note that this is done as an internal manipulation within the
9995 constant folder, in order to find the simplest representation
9996 of the arguments so that their form can be studied. In any
9997 cases, the appropriate type conversions should be put back in
9998 the tree that will get out of the constant folder. */
9999
10000 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10001 {
10002 STRIP_SIGN_NOPS (arg0);
10003 STRIP_SIGN_NOPS (arg1);
10004 }
10005 else
10006 {
10007 STRIP_NOPS (arg0);
10008 STRIP_NOPS (arg1);
10009 }
10010
10011 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10012 constant but we can't do arithmetic on them. */
10013 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10014 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10015 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10016 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10017 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10018 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10019 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10020 {
10021 if (kind == tcc_binary)
10022 {
10023 /* Make sure type and arg0 have the same saturating flag. */
10024 gcc_assert (TYPE_SATURATING (type)
10025 == TYPE_SATURATING (TREE_TYPE (arg0)));
10026 tem = const_binop (code, arg0, arg1);
10027 }
10028 else if (kind == tcc_comparison)
10029 tem = fold_relational_const (code, type, arg0, arg1);
10030 else
10031 tem = NULL_TREE;
10032
10033 if (tem != NULL_TREE)
10034 {
10035 if (TREE_TYPE (tem) != type)
10036 tem = fold_convert_loc (loc, type, tem);
10037 return tem;
10038 }
10039 }
10040
10041 /* If this is a commutative operation, and ARG0 is a constant, move it
10042 to ARG1 to reduce the number of tests below. */
10043 if (commutative_tree_code (code)
10044 && tree_swap_operands_p (arg0, arg1, true))
10045 return fold_build2_loc (loc, code, type, op1, op0);
10046
10047 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10048
10049 First check for cases where an arithmetic operation is applied to a
10050 compound, conditional, or comparison operation. Push the arithmetic
10051 operation inside the compound or conditional to see if any folding
10052 can then be done. Convert comparison to conditional for this purpose.
10053 The also optimizes non-constant cases that used to be done in
10054 expand_expr.
10055
10056 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10057 one of the operands is a comparison and the other is a comparison, a
10058 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10059 code below would make the expression more complex. Change it to a
10060 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10061 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10062
10063 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10064 || code == EQ_EXPR || code == NE_EXPR)
10065 && TREE_CODE (type) != VECTOR_TYPE
10066 && ((truth_value_p (TREE_CODE (arg0))
10067 && (truth_value_p (TREE_CODE (arg1))
10068 || (TREE_CODE (arg1) == BIT_AND_EXPR
10069 && integer_onep (TREE_OPERAND (arg1, 1)))))
10070 || (truth_value_p (TREE_CODE (arg1))
10071 && (truth_value_p (TREE_CODE (arg0))
10072 || (TREE_CODE (arg0) == BIT_AND_EXPR
10073 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10074 {
10075 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10076 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10077 : TRUTH_XOR_EXPR,
10078 boolean_type_node,
10079 fold_convert_loc (loc, boolean_type_node, arg0),
10080 fold_convert_loc (loc, boolean_type_node, arg1));
10081
10082 if (code == EQ_EXPR)
10083 tem = invert_truthvalue_loc (loc, tem);
10084
10085 return fold_convert_loc (loc, type, tem);
10086 }
10087
10088 if (TREE_CODE_CLASS (code) == tcc_binary
10089 || TREE_CODE_CLASS (code) == tcc_comparison)
10090 {
10091 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10092 {
10093 tem = fold_build2_loc (loc, code, type,
10094 fold_convert_loc (loc, TREE_TYPE (op0),
10095 TREE_OPERAND (arg0, 1)), op1);
10096 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10097 tem);
10098 }
10099 if (TREE_CODE (arg1) == COMPOUND_EXPR
10100 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10101 {
10102 tem = fold_build2_loc (loc, code, type, op0,
10103 fold_convert_loc (loc, TREE_TYPE (op1),
10104 TREE_OPERAND (arg1, 1)));
10105 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10106 tem);
10107 }
10108
10109 if (TREE_CODE (arg0) == COND_EXPR
10110 || TREE_CODE (arg0) == VEC_COND_EXPR
10111 || COMPARISON_CLASS_P (arg0))
10112 {
10113 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10114 arg0, arg1,
10115 /*cond_first_p=*/1);
10116 if (tem != NULL_TREE)
10117 return tem;
10118 }
10119
10120 if (TREE_CODE (arg1) == COND_EXPR
10121 || TREE_CODE (arg1) == VEC_COND_EXPR
10122 || COMPARISON_CLASS_P (arg1))
10123 {
10124 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10125 arg1, arg0,
10126 /*cond_first_p=*/0);
10127 if (tem != NULL_TREE)
10128 return tem;
10129 }
10130 }
10131
10132 switch (code)
10133 {
10134 case MEM_REF:
10135 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10136 if (TREE_CODE (arg0) == ADDR_EXPR
10137 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10138 {
10139 tree iref = TREE_OPERAND (arg0, 0);
10140 return fold_build2 (MEM_REF, type,
10141 TREE_OPERAND (iref, 0),
10142 int_const_binop (PLUS_EXPR, arg1,
10143 TREE_OPERAND (iref, 1)));
10144 }
10145
10146 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10147 if (TREE_CODE (arg0) == ADDR_EXPR
10148 && handled_component_p (TREE_OPERAND (arg0, 0)))
10149 {
10150 tree base;
10151 HOST_WIDE_INT coffset;
10152 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10153 &coffset);
10154 if (!base)
10155 return NULL_TREE;
10156 return fold_build2 (MEM_REF, type,
10157 build_fold_addr_expr (base),
10158 int_const_binop (PLUS_EXPR, arg1,
10159 size_int (coffset)));
10160 }
10161
10162 return NULL_TREE;
10163
10164 case POINTER_PLUS_EXPR:
10165 /* 0 +p index -> (type)index */
10166 if (integer_zerop (arg0))
10167 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10168
10169 /* PTR +p 0 -> PTR */
10170 if (integer_zerop (arg1))
10171 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10172
10173 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10174 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10175 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10176 return fold_convert_loc (loc, type,
10177 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10178 fold_convert_loc (loc, sizetype,
10179 arg1),
10180 fold_convert_loc (loc, sizetype,
10181 arg0)));
10182
10183 /* (PTR +p B) +p A -> PTR +p (B + A) */
10184 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10185 {
10186 tree inner;
10187 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10188 tree arg00 = TREE_OPERAND (arg0, 0);
10189 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10190 arg01, fold_convert_loc (loc, sizetype, arg1));
10191 return fold_convert_loc (loc, type,
10192 fold_build_pointer_plus_loc (loc,
10193 arg00, inner));
10194 }
10195
10196 /* PTR_CST +p CST -> CST1 */
10197 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10198 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10199 fold_convert_loc (loc, type, arg1));
10200
10201 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10202 of the array. Loop optimizer sometimes produce this type of
10203 expressions. */
10204 if (TREE_CODE (arg0) == ADDR_EXPR)
10205 {
10206 tem = try_move_mult_to_index (loc, arg0,
10207 fold_convert_loc (loc,
10208 ssizetype, arg1));
10209 if (tem)
10210 return fold_convert_loc (loc, type, tem);
10211 }
10212
10213 return NULL_TREE;
10214
10215 case PLUS_EXPR:
10216 /* A + (-B) -> A - B */
10217 if (TREE_CODE (arg1) == NEGATE_EXPR)
10218 return fold_build2_loc (loc, MINUS_EXPR, type,
10219 fold_convert_loc (loc, type, arg0),
10220 fold_convert_loc (loc, type,
10221 TREE_OPERAND (arg1, 0)));
10222 /* (-A) + B -> B - A */
10223 if (TREE_CODE (arg0) == NEGATE_EXPR
10224 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10225 return fold_build2_loc (loc, MINUS_EXPR, type,
10226 fold_convert_loc (loc, type, arg1),
10227 fold_convert_loc (loc, type,
10228 TREE_OPERAND (arg0, 0)));
10229
10230 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10231 {
10232 /* Convert ~A + 1 to -A. */
10233 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10234 && integer_onep (arg1))
10235 return fold_build1_loc (loc, NEGATE_EXPR, type,
10236 fold_convert_loc (loc, type,
10237 TREE_OPERAND (arg0, 0)));
10238
10239 /* ~X + X is -1. */
10240 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10241 && !TYPE_OVERFLOW_TRAPS (type))
10242 {
10243 tree tem = TREE_OPERAND (arg0, 0);
10244
10245 STRIP_NOPS (tem);
10246 if (operand_equal_p (tem, arg1, 0))
10247 {
10248 t1 = build_all_ones_cst (type);
10249 return omit_one_operand_loc (loc, type, t1, arg1);
10250 }
10251 }
10252
10253 /* X + ~X is -1. */
10254 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10255 && !TYPE_OVERFLOW_TRAPS (type))
10256 {
10257 tree tem = TREE_OPERAND (arg1, 0);
10258
10259 STRIP_NOPS (tem);
10260 if (operand_equal_p (arg0, tem, 0))
10261 {
10262 t1 = build_all_ones_cst (type);
10263 return omit_one_operand_loc (loc, type, t1, arg0);
10264 }
10265 }
10266
10267 /* X + (X / CST) * -CST is X % CST. */
10268 if (TREE_CODE (arg1) == MULT_EXPR
10269 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10270 && operand_equal_p (arg0,
10271 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10272 {
10273 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10274 tree cst1 = TREE_OPERAND (arg1, 1);
10275 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10276 cst1, cst0);
10277 if (sum && integer_zerop (sum))
10278 return fold_convert_loc (loc, type,
10279 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10280 TREE_TYPE (arg0), arg0,
10281 cst0));
10282 }
10283 }
10284
10285 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10286 one. Make sure the type is not saturating and has the signedness of
10287 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10288 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10289 if ((TREE_CODE (arg0) == MULT_EXPR
10290 || TREE_CODE (arg1) == MULT_EXPR)
10291 && !TYPE_SATURATING (type)
10292 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10293 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10294 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10295 {
10296 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10297 if (tem)
10298 return tem;
10299 }
10300
10301 if (! FLOAT_TYPE_P (type))
10302 {
10303 if (integer_zerop (arg1))
10304 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10305
10306 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10307 with a constant, and the two constants have no bits in common,
10308 we should treat this as a BIT_IOR_EXPR since this may produce more
10309 simplifications. */
10310 if (TREE_CODE (arg0) == BIT_AND_EXPR
10311 && TREE_CODE (arg1) == BIT_AND_EXPR
10312 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10313 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10314 && integer_zerop (const_binop (BIT_AND_EXPR,
10315 TREE_OPERAND (arg0, 1),
10316 TREE_OPERAND (arg1, 1))))
10317 {
10318 code = BIT_IOR_EXPR;
10319 goto bit_ior;
10320 }
10321
10322 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10323 (plus (plus (mult) (mult)) (foo)) so that we can
10324 take advantage of the factoring cases below. */
10325 if (TYPE_OVERFLOW_WRAPS (type)
10326 && (((TREE_CODE (arg0) == PLUS_EXPR
10327 || TREE_CODE (arg0) == MINUS_EXPR)
10328 && TREE_CODE (arg1) == MULT_EXPR)
10329 || ((TREE_CODE (arg1) == PLUS_EXPR
10330 || TREE_CODE (arg1) == MINUS_EXPR)
10331 && TREE_CODE (arg0) == MULT_EXPR)))
10332 {
10333 tree parg0, parg1, parg, marg;
10334 enum tree_code pcode;
10335
10336 if (TREE_CODE (arg1) == MULT_EXPR)
10337 parg = arg0, marg = arg1;
10338 else
10339 parg = arg1, marg = arg0;
10340 pcode = TREE_CODE (parg);
10341 parg0 = TREE_OPERAND (parg, 0);
10342 parg1 = TREE_OPERAND (parg, 1);
10343 STRIP_NOPS (parg0);
10344 STRIP_NOPS (parg1);
10345
10346 if (TREE_CODE (parg0) == MULT_EXPR
10347 && TREE_CODE (parg1) != MULT_EXPR)
10348 return fold_build2_loc (loc, pcode, type,
10349 fold_build2_loc (loc, PLUS_EXPR, type,
10350 fold_convert_loc (loc, type,
10351 parg0),
10352 fold_convert_loc (loc, type,
10353 marg)),
10354 fold_convert_loc (loc, type, parg1));
10355 if (TREE_CODE (parg0) != MULT_EXPR
10356 && TREE_CODE (parg1) == MULT_EXPR)
10357 return
10358 fold_build2_loc (loc, PLUS_EXPR, type,
10359 fold_convert_loc (loc, type, parg0),
10360 fold_build2_loc (loc, pcode, type,
10361 fold_convert_loc (loc, type, marg),
10362 fold_convert_loc (loc, type,
10363 parg1)));
10364 }
10365 }
10366 else
10367 {
10368 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10369 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10370 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10371
10372 /* Likewise if the operands are reversed. */
10373 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10374 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10375
10376 /* Convert X + -C into X - C. */
10377 if (TREE_CODE (arg1) == REAL_CST
10378 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10379 {
10380 tem = fold_negate_const (arg1, type);
10381 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10382 return fold_build2_loc (loc, MINUS_EXPR, type,
10383 fold_convert_loc (loc, type, arg0),
10384 fold_convert_loc (loc, type, tem));
10385 }
10386
10387 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10388 to __complex__ ( x, y ). This is not the same for SNaNs or
10389 if signed zeros are involved. */
10390 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10391 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10392 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10393 {
10394 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10395 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10396 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10397 bool arg0rz = false, arg0iz = false;
10398 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10399 || (arg0i && (arg0iz = real_zerop (arg0i))))
10400 {
10401 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10402 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10403 if (arg0rz && arg1i && real_zerop (arg1i))
10404 {
10405 tree rp = arg1r ? arg1r
10406 : build1 (REALPART_EXPR, rtype, arg1);
10407 tree ip = arg0i ? arg0i
10408 : build1 (IMAGPART_EXPR, rtype, arg0);
10409 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10410 }
10411 else if (arg0iz && arg1r && real_zerop (arg1r))
10412 {
10413 tree rp = arg0r ? arg0r
10414 : build1 (REALPART_EXPR, rtype, arg0);
10415 tree ip = arg1i ? arg1i
10416 : build1 (IMAGPART_EXPR, rtype, arg1);
10417 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10418 }
10419 }
10420 }
10421
10422 if (flag_unsafe_math_optimizations
10423 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10424 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10425 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10426 return tem;
10427
10428 /* Convert x+x into x*2.0. */
10429 if (operand_equal_p (arg0, arg1, 0)
10430 && SCALAR_FLOAT_TYPE_P (type))
10431 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10432 build_real (type, dconst2));
10433
10434 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10435 We associate floats only if the user has specified
10436 -fassociative-math. */
10437 if (flag_associative_math
10438 && TREE_CODE (arg1) == PLUS_EXPR
10439 && TREE_CODE (arg0) != MULT_EXPR)
10440 {
10441 tree tree10 = TREE_OPERAND (arg1, 0);
10442 tree tree11 = TREE_OPERAND (arg1, 1);
10443 if (TREE_CODE (tree11) == MULT_EXPR
10444 && TREE_CODE (tree10) == MULT_EXPR)
10445 {
10446 tree tree0;
10447 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10448 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10449 }
10450 }
10451 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10452 We associate floats only if the user has specified
10453 -fassociative-math. */
10454 if (flag_associative_math
10455 && TREE_CODE (arg0) == PLUS_EXPR
10456 && TREE_CODE (arg1) != MULT_EXPR)
10457 {
10458 tree tree00 = TREE_OPERAND (arg0, 0);
10459 tree tree01 = TREE_OPERAND (arg0, 1);
10460 if (TREE_CODE (tree01) == MULT_EXPR
10461 && TREE_CODE (tree00) == MULT_EXPR)
10462 {
10463 tree tree0;
10464 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10465 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10466 }
10467 }
10468 }
10469
10470 bit_rotate:
10471 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10472 is a rotate of A by C1 bits. */
10473 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10474 is a rotate of A by B bits. */
10475 {
10476 enum tree_code code0, code1;
10477 tree rtype;
10478 code0 = TREE_CODE (arg0);
10479 code1 = TREE_CODE (arg1);
10480 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10481 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10482 && operand_equal_p (TREE_OPERAND (arg0, 0),
10483 TREE_OPERAND (arg1, 0), 0)
10484 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10485 TYPE_UNSIGNED (rtype))
10486 /* Only create rotates in complete modes. Other cases are not
10487 expanded properly. */
10488 && (element_precision (rtype)
10489 == element_precision (TYPE_MODE (rtype))))
10490 {
10491 tree tree01, tree11;
10492 enum tree_code code01, code11;
10493
10494 tree01 = TREE_OPERAND (arg0, 1);
10495 tree11 = TREE_OPERAND (arg1, 1);
10496 STRIP_NOPS (tree01);
10497 STRIP_NOPS (tree11);
10498 code01 = TREE_CODE (tree01);
10499 code11 = TREE_CODE (tree11);
10500 if (code01 == INTEGER_CST
10501 && code11 == INTEGER_CST
10502 && TREE_INT_CST_HIGH (tree01) == 0
10503 && TREE_INT_CST_HIGH (tree11) == 0
10504 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10505 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10506 {
10507 tem = build2_loc (loc, LROTATE_EXPR,
10508 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10509 TREE_OPERAND (arg0, 0),
10510 code0 == LSHIFT_EXPR ? tree01 : tree11);
10511 return fold_convert_loc (loc, type, tem);
10512 }
10513 else if (code11 == MINUS_EXPR)
10514 {
10515 tree tree110, tree111;
10516 tree110 = TREE_OPERAND (tree11, 0);
10517 tree111 = TREE_OPERAND (tree11, 1);
10518 STRIP_NOPS (tree110);
10519 STRIP_NOPS (tree111);
10520 if (TREE_CODE (tree110) == INTEGER_CST
10521 && 0 == compare_tree_int (tree110,
10522 element_precision
10523 (TREE_TYPE (TREE_OPERAND
10524 (arg0, 0))))
10525 && operand_equal_p (tree01, tree111, 0))
10526 return
10527 fold_convert_loc (loc, type,
10528 build2 ((code0 == LSHIFT_EXPR
10529 ? LROTATE_EXPR
10530 : RROTATE_EXPR),
10531 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10532 TREE_OPERAND (arg0, 0), tree01));
10533 }
10534 else if (code01 == MINUS_EXPR)
10535 {
10536 tree tree010, tree011;
10537 tree010 = TREE_OPERAND (tree01, 0);
10538 tree011 = TREE_OPERAND (tree01, 1);
10539 STRIP_NOPS (tree010);
10540 STRIP_NOPS (tree011);
10541 if (TREE_CODE (tree010) == INTEGER_CST
10542 && 0 == compare_tree_int (tree010,
10543 element_precision
10544 (TREE_TYPE (TREE_OPERAND
10545 (arg0, 0))))
10546 && operand_equal_p (tree11, tree011, 0))
10547 return fold_convert_loc
10548 (loc, type,
10549 build2 ((code0 != LSHIFT_EXPR
10550 ? LROTATE_EXPR
10551 : RROTATE_EXPR),
10552 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10553 TREE_OPERAND (arg0, 0), tree11));
10554 }
10555 }
10556 }
10557
10558 associate:
10559 /* In most languages, can't associate operations on floats through
10560 parentheses. Rather than remember where the parentheses were, we
10561 don't associate floats at all, unless the user has specified
10562 -fassociative-math.
10563 And, we need to make sure type is not saturating. */
10564
10565 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10566 && !TYPE_SATURATING (type))
10567 {
10568 tree var0, con0, lit0, minus_lit0;
10569 tree var1, con1, lit1, minus_lit1;
10570 tree atype = type;
10571 bool ok = true;
10572
10573 /* Split both trees into variables, constants, and literals. Then
10574 associate each group together, the constants with literals,
10575 then the result with variables. This increases the chances of
10576 literals being recombined later and of generating relocatable
10577 expressions for the sum of a constant and literal. */
10578 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10579 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10580 code == MINUS_EXPR);
10581
10582 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10583 if (code == MINUS_EXPR)
10584 code = PLUS_EXPR;
10585
10586 /* With undefined overflow prefer doing association in a type
10587 which wraps on overflow, if that is one of the operand types. */
10588 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10589 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10590 {
10591 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10592 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10593 atype = TREE_TYPE (arg0);
10594 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10595 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10596 atype = TREE_TYPE (arg1);
10597 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10598 }
10599
10600 /* With undefined overflow we can only associate constants with one
10601 variable, and constants whose association doesn't overflow. */
10602 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10603 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10604 {
10605 if (var0 && var1)
10606 {
10607 tree tmp0 = var0;
10608 tree tmp1 = var1;
10609
10610 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10611 tmp0 = TREE_OPERAND (tmp0, 0);
10612 if (CONVERT_EXPR_P (tmp0)
10613 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10614 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10615 <= TYPE_PRECISION (atype)))
10616 tmp0 = TREE_OPERAND (tmp0, 0);
10617 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10618 tmp1 = TREE_OPERAND (tmp1, 0);
10619 if (CONVERT_EXPR_P (tmp1)
10620 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10621 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10622 <= TYPE_PRECISION (atype)))
10623 tmp1 = TREE_OPERAND (tmp1, 0);
10624 /* The only case we can still associate with two variables
10625 is if they are the same, modulo negation and bit-pattern
10626 preserving conversions. */
10627 if (!operand_equal_p (tmp0, tmp1, 0))
10628 ok = false;
10629 }
10630 }
10631
10632 /* Only do something if we found more than two objects. Otherwise,
10633 nothing has changed and we risk infinite recursion. */
10634 if (ok
10635 && (2 < ((var0 != 0) + (var1 != 0)
10636 + (con0 != 0) + (con1 != 0)
10637 + (lit0 != 0) + (lit1 != 0)
10638 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10639 {
10640 bool any_overflows = false;
10641 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10642 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10643 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10644 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10645 var0 = associate_trees (loc, var0, var1, code, atype);
10646 con0 = associate_trees (loc, con0, con1, code, atype);
10647 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10648 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10649 code, atype);
10650
10651 /* Preserve the MINUS_EXPR if the negative part of the literal is
10652 greater than the positive part. Otherwise, the multiplicative
10653 folding code (i.e extract_muldiv) may be fooled in case
10654 unsigned constants are subtracted, like in the following
10655 example: ((X*2 + 4) - 8U)/2. */
10656 if (minus_lit0 && lit0)
10657 {
10658 if (TREE_CODE (lit0) == INTEGER_CST
10659 && TREE_CODE (minus_lit0) == INTEGER_CST
10660 && tree_int_cst_lt (lit0, minus_lit0))
10661 {
10662 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10663 MINUS_EXPR, atype);
10664 lit0 = 0;
10665 }
10666 else
10667 {
10668 lit0 = associate_trees (loc, lit0, minus_lit0,
10669 MINUS_EXPR, atype);
10670 minus_lit0 = 0;
10671 }
10672 }
10673
10674 /* Don't introduce overflows through reassociation. */
10675 if (!any_overflows
10676 && ((lit0 && TREE_OVERFLOW (lit0))
10677 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10678 return NULL_TREE;
10679
10680 if (minus_lit0)
10681 {
10682 if (con0 == 0)
10683 return
10684 fold_convert_loc (loc, type,
10685 associate_trees (loc, var0, minus_lit0,
10686 MINUS_EXPR, atype));
10687 else
10688 {
10689 con0 = associate_trees (loc, con0, minus_lit0,
10690 MINUS_EXPR, atype);
10691 return
10692 fold_convert_loc (loc, type,
10693 associate_trees (loc, var0, con0,
10694 PLUS_EXPR, atype));
10695 }
10696 }
10697
10698 con0 = associate_trees (loc, con0, lit0, code, atype);
10699 return
10700 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10701 code, atype));
10702 }
10703 }
10704
10705 return NULL_TREE;
10706
10707 case MINUS_EXPR:
10708 /* Pointer simplifications for subtraction, simple reassociations. */
10709 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10710 {
10711 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10712 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10713 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10714 {
10715 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10716 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10717 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10718 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10719 return fold_build2_loc (loc, PLUS_EXPR, type,
10720 fold_build2_loc (loc, MINUS_EXPR, type,
10721 arg00, arg10),
10722 fold_build2_loc (loc, MINUS_EXPR, type,
10723 arg01, arg11));
10724 }
10725 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10726 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10727 {
10728 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10729 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10730 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10731 fold_convert_loc (loc, type, arg1));
10732 if (tmp)
10733 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10734 }
10735 }
10736 /* A - (-B) -> A + B */
10737 if (TREE_CODE (arg1) == NEGATE_EXPR)
10738 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10739 fold_convert_loc (loc, type,
10740 TREE_OPERAND (arg1, 0)));
10741 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10742 if (TREE_CODE (arg0) == NEGATE_EXPR
10743 && negate_expr_p (arg1)
10744 && reorder_operands_p (arg0, arg1))
10745 return fold_build2_loc (loc, MINUS_EXPR, type,
10746 fold_convert_loc (loc, type,
10747 negate_expr (arg1)),
10748 fold_convert_loc (loc, type,
10749 TREE_OPERAND (arg0, 0)));
10750 /* Convert -A - 1 to ~A. */
10751 if (TREE_CODE (type) != COMPLEX_TYPE
10752 && TREE_CODE (arg0) == NEGATE_EXPR
10753 && integer_onep (arg1)
10754 && !TYPE_OVERFLOW_TRAPS (type))
10755 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10756 fold_convert_loc (loc, type,
10757 TREE_OPERAND (arg0, 0)));
10758
10759 /* Convert -1 - A to ~A. */
10760 if (TREE_CODE (type) != COMPLEX_TYPE
10761 && integer_all_onesp (arg0))
10762 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10763
10764
10765 /* X - (X / Y) * Y is X % Y. */
10766 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10767 && TREE_CODE (arg1) == MULT_EXPR
10768 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10769 && operand_equal_p (arg0,
10770 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10771 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10772 TREE_OPERAND (arg1, 1), 0))
10773 return
10774 fold_convert_loc (loc, type,
10775 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10776 arg0, TREE_OPERAND (arg1, 1)));
10777
10778 if (! FLOAT_TYPE_P (type))
10779 {
10780 if (integer_zerop (arg0))
10781 return negate_expr (fold_convert_loc (loc, type, arg1));
10782 if (integer_zerop (arg1))
10783 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10784
10785 /* Fold A - (A & B) into ~B & A. */
10786 if (!TREE_SIDE_EFFECTS (arg0)
10787 && TREE_CODE (arg1) == BIT_AND_EXPR)
10788 {
10789 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10790 {
10791 tree arg10 = fold_convert_loc (loc, type,
10792 TREE_OPERAND (arg1, 0));
10793 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10794 fold_build1_loc (loc, BIT_NOT_EXPR,
10795 type, arg10),
10796 fold_convert_loc (loc, type, arg0));
10797 }
10798 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10799 {
10800 tree arg11 = fold_convert_loc (loc,
10801 type, TREE_OPERAND (arg1, 1));
10802 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10803 fold_build1_loc (loc, BIT_NOT_EXPR,
10804 type, arg11),
10805 fold_convert_loc (loc, type, arg0));
10806 }
10807 }
10808
10809 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10810 any power of 2 minus 1. */
10811 if (TREE_CODE (arg0) == BIT_AND_EXPR
10812 && TREE_CODE (arg1) == BIT_AND_EXPR
10813 && operand_equal_p (TREE_OPERAND (arg0, 0),
10814 TREE_OPERAND (arg1, 0), 0))
10815 {
10816 tree mask0 = TREE_OPERAND (arg0, 1);
10817 tree mask1 = TREE_OPERAND (arg1, 1);
10818 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10819
10820 if (operand_equal_p (tem, mask1, 0))
10821 {
10822 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10823 TREE_OPERAND (arg0, 0), mask1);
10824 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10825 }
10826 }
10827 }
10828
10829 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10830 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10831 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10832
10833 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10834 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10835 (-ARG1 + ARG0) reduces to -ARG1. */
10836 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10837 return negate_expr (fold_convert_loc (loc, type, arg1));
10838
10839 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10840 __complex__ ( x, -y ). This is not the same for SNaNs or if
10841 signed zeros are involved. */
10842 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10843 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10844 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10845 {
10846 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10847 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10848 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10849 bool arg0rz = false, arg0iz = false;
10850 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10851 || (arg0i && (arg0iz = real_zerop (arg0i))))
10852 {
10853 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10854 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10855 if (arg0rz && arg1i && real_zerop (arg1i))
10856 {
10857 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10858 arg1r ? arg1r
10859 : build1 (REALPART_EXPR, rtype, arg1));
10860 tree ip = arg0i ? arg0i
10861 : build1 (IMAGPART_EXPR, rtype, arg0);
10862 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10863 }
10864 else if (arg0iz && arg1r && real_zerop (arg1r))
10865 {
10866 tree rp = arg0r ? arg0r
10867 : build1 (REALPART_EXPR, rtype, arg0);
10868 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10869 arg1i ? arg1i
10870 : build1 (IMAGPART_EXPR, rtype, arg1));
10871 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10872 }
10873 }
10874 }
10875
10876 /* Fold &x - &x. This can happen from &x.foo - &x.
10877 This is unsafe for certain floats even in non-IEEE formats.
10878 In IEEE, it is unsafe because it does wrong for NaNs.
10879 Also note that operand_equal_p is always false if an operand
10880 is volatile. */
10881
10882 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10883 && operand_equal_p (arg0, arg1, 0))
10884 return build_zero_cst (type);
10885
10886 /* A - B -> A + (-B) if B is easily negatable. */
10887 if (negate_expr_p (arg1)
10888 && ((FLOAT_TYPE_P (type)
10889 /* Avoid this transformation if B is a positive REAL_CST. */
10890 && (TREE_CODE (arg1) != REAL_CST
10891 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10892 || INTEGRAL_TYPE_P (type)))
10893 return fold_build2_loc (loc, PLUS_EXPR, type,
10894 fold_convert_loc (loc, type, arg0),
10895 fold_convert_loc (loc, type,
10896 negate_expr (arg1)));
10897
10898 /* Try folding difference of addresses. */
10899 {
10900 HOST_WIDE_INT diff;
10901
10902 if ((TREE_CODE (arg0) == ADDR_EXPR
10903 || TREE_CODE (arg1) == ADDR_EXPR)
10904 && ptr_difference_const (arg0, arg1, &diff))
10905 return build_int_cst_type (type, diff);
10906 }
10907
10908 /* Fold &a[i] - &a[j] to i-j. */
10909 if (TREE_CODE (arg0) == ADDR_EXPR
10910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10911 && TREE_CODE (arg1) == ADDR_EXPR
10912 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10913 {
10914 tree tem = fold_addr_of_array_ref_difference (loc, type,
10915 TREE_OPERAND (arg0, 0),
10916 TREE_OPERAND (arg1, 0));
10917 if (tem)
10918 return tem;
10919 }
10920
10921 if (FLOAT_TYPE_P (type)
10922 && flag_unsafe_math_optimizations
10923 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10924 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10925 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10926 return tem;
10927
10928 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10929 one. Make sure the type is not saturating and has the signedness of
10930 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10931 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10932 if ((TREE_CODE (arg0) == MULT_EXPR
10933 || TREE_CODE (arg1) == MULT_EXPR)
10934 && !TYPE_SATURATING (type)
10935 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10936 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10937 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10938 {
10939 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10940 if (tem)
10941 return tem;
10942 }
10943
10944 goto associate;
10945
10946 case MULT_EXPR:
10947 /* (-A) * (-B) -> A * B */
10948 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10949 return fold_build2_loc (loc, MULT_EXPR, type,
10950 fold_convert_loc (loc, type,
10951 TREE_OPERAND (arg0, 0)),
10952 fold_convert_loc (loc, type,
10953 negate_expr (arg1)));
10954 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10955 return fold_build2_loc (loc, MULT_EXPR, type,
10956 fold_convert_loc (loc, type,
10957 negate_expr (arg0)),
10958 fold_convert_loc (loc, type,
10959 TREE_OPERAND (arg1, 0)));
10960
10961 if (! FLOAT_TYPE_P (type))
10962 {
10963 if (integer_zerop (arg1))
10964 return omit_one_operand_loc (loc, type, arg1, arg0);
10965 if (integer_onep (arg1))
10966 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10967 /* Transform x * -1 into -x. Make sure to do the negation
10968 on the original operand with conversions not stripped
10969 because we can only strip non-sign-changing conversions. */
10970 if (integer_minus_onep (arg1))
10971 return fold_convert_loc (loc, type, negate_expr (op0));
10972 /* Transform x * -C into -x * C if x is easily negatable. */
10973 if (TREE_CODE (arg1) == INTEGER_CST
10974 && tree_int_cst_sgn (arg1) == -1
10975 && negate_expr_p (arg0)
10976 && (tem = negate_expr (arg1)) != arg1
10977 && !TREE_OVERFLOW (tem))
10978 return fold_build2_loc (loc, MULT_EXPR, type,
10979 fold_convert_loc (loc, type,
10980 negate_expr (arg0)),
10981 tem);
10982
10983 /* (a * (1 << b)) is (a << b) */
10984 if (TREE_CODE (arg1) == LSHIFT_EXPR
10985 && integer_onep (TREE_OPERAND (arg1, 0)))
10986 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10987 TREE_OPERAND (arg1, 1));
10988 if (TREE_CODE (arg0) == LSHIFT_EXPR
10989 && integer_onep (TREE_OPERAND (arg0, 0)))
10990 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10991 TREE_OPERAND (arg0, 1));
10992
10993 /* (A + A) * C -> A * 2 * C */
10994 if (TREE_CODE (arg0) == PLUS_EXPR
10995 && TREE_CODE (arg1) == INTEGER_CST
10996 && operand_equal_p (TREE_OPERAND (arg0, 0),
10997 TREE_OPERAND (arg0, 1), 0))
10998 return fold_build2_loc (loc, MULT_EXPR, type,
10999 omit_one_operand_loc (loc, type,
11000 TREE_OPERAND (arg0, 0),
11001 TREE_OPERAND (arg0, 1)),
11002 fold_build2_loc (loc, MULT_EXPR, type,
11003 build_int_cst (type, 2) , arg1));
11004
11005 strict_overflow_p = false;
11006 if (TREE_CODE (arg1) == INTEGER_CST
11007 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11008 &strict_overflow_p)))
11009 {
11010 if (strict_overflow_p)
11011 fold_overflow_warning (("assuming signed overflow does not "
11012 "occur when simplifying "
11013 "multiplication"),
11014 WARN_STRICT_OVERFLOW_MISC);
11015 return fold_convert_loc (loc, type, tem);
11016 }
11017
11018 /* Optimize z * conj(z) for integer complex numbers. */
11019 if (TREE_CODE (arg0) == CONJ_EXPR
11020 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11021 return fold_mult_zconjz (loc, type, arg1);
11022 if (TREE_CODE (arg1) == CONJ_EXPR
11023 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11024 return fold_mult_zconjz (loc, type, arg0);
11025 }
11026 else
11027 {
11028 /* Maybe fold x * 0 to 0. The expressions aren't the same
11029 when x is NaN, since x * 0 is also NaN. Nor are they the
11030 same in modes with signed zeros, since multiplying a
11031 negative value by 0 gives -0, not +0. */
11032 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11033 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11034 && real_zerop (arg1))
11035 return omit_one_operand_loc (loc, type, arg1, arg0);
11036 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11037 Likewise for complex arithmetic with signed zeros. */
11038 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11039 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11040 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11041 && real_onep (arg1))
11042 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11043
11044 /* Transform x * -1.0 into -x. */
11045 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11046 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11047 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11048 && real_minus_onep (arg1))
11049 return fold_convert_loc (loc, type, negate_expr (arg0));
11050
11051 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11052 the result for floating point types due to rounding so it is applied
11053 only if -fassociative-math was specify. */
11054 if (flag_associative_math
11055 && TREE_CODE (arg0) == RDIV_EXPR
11056 && TREE_CODE (arg1) == REAL_CST
11057 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11058 {
11059 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11060 arg1);
11061 if (tem)
11062 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11063 TREE_OPERAND (arg0, 1));
11064 }
11065
11066 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11067 if (operand_equal_p (arg0, arg1, 0))
11068 {
11069 tree tem = fold_strip_sign_ops (arg0);
11070 if (tem != NULL_TREE)
11071 {
11072 tem = fold_convert_loc (loc, type, tem);
11073 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11074 }
11075 }
11076
11077 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11078 This is not the same for NaNs or if signed zeros are
11079 involved. */
11080 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11081 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11082 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11083 && TREE_CODE (arg1) == COMPLEX_CST
11084 && real_zerop (TREE_REALPART (arg1)))
11085 {
11086 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11087 if (real_onep (TREE_IMAGPART (arg1)))
11088 return
11089 fold_build2_loc (loc, COMPLEX_EXPR, type,
11090 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11091 rtype, arg0)),
11092 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11093 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11094 return
11095 fold_build2_loc (loc, COMPLEX_EXPR, type,
11096 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11097 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11098 rtype, arg0)));
11099 }
11100
11101 /* Optimize z * conj(z) for floating point complex numbers.
11102 Guarded by flag_unsafe_math_optimizations as non-finite
11103 imaginary components don't produce scalar results. */
11104 if (flag_unsafe_math_optimizations
11105 && TREE_CODE (arg0) == CONJ_EXPR
11106 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11107 return fold_mult_zconjz (loc, type, arg1);
11108 if (flag_unsafe_math_optimizations
11109 && TREE_CODE (arg1) == CONJ_EXPR
11110 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11111 return fold_mult_zconjz (loc, type, arg0);
11112
11113 if (flag_unsafe_math_optimizations)
11114 {
11115 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11116 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11117
11118 /* Optimizations of root(...)*root(...). */
11119 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11120 {
11121 tree rootfn, arg;
11122 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11123 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11124
11125 /* Optimize sqrt(x)*sqrt(x) as x. */
11126 if (BUILTIN_SQRT_P (fcode0)
11127 && operand_equal_p (arg00, arg10, 0)
11128 && ! HONOR_SNANS (TYPE_MODE (type)))
11129 return arg00;
11130
11131 /* Optimize root(x)*root(y) as root(x*y). */
11132 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11133 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11134 return build_call_expr_loc (loc, rootfn, 1, arg);
11135 }
11136
11137 /* Optimize expN(x)*expN(y) as expN(x+y). */
11138 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11139 {
11140 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11141 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11142 CALL_EXPR_ARG (arg0, 0),
11143 CALL_EXPR_ARG (arg1, 0));
11144 return build_call_expr_loc (loc, expfn, 1, arg);
11145 }
11146
11147 /* Optimizations of pow(...)*pow(...). */
11148 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11149 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11150 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11151 {
11152 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11153 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11154 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11155 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11156
11157 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11158 if (operand_equal_p (arg01, arg11, 0))
11159 {
11160 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11161 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11162 arg00, arg10);
11163 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11164 }
11165
11166 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11167 if (operand_equal_p (arg00, arg10, 0))
11168 {
11169 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11170 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11171 arg01, arg11);
11172 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11173 }
11174 }
11175
11176 /* Optimize tan(x)*cos(x) as sin(x). */
11177 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11178 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11179 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11180 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11181 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11182 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11183 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11184 CALL_EXPR_ARG (arg1, 0), 0))
11185 {
11186 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11187
11188 if (sinfn != NULL_TREE)
11189 return build_call_expr_loc (loc, sinfn, 1,
11190 CALL_EXPR_ARG (arg0, 0));
11191 }
11192
11193 /* Optimize x*pow(x,c) as pow(x,c+1). */
11194 if (fcode1 == BUILT_IN_POW
11195 || fcode1 == BUILT_IN_POWF
11196 || fcode1 == BUILT_IN_POWL)
11197 {
11198 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11199 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11200 if (TREE_CODE (arg11) == REAL_CST
11201 && !TREE_OVERFLOW (arg11)
11202 && operand_equal_p (arg0, arg10, 0))
11203 {
11204 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11205 REAL_VALUE_TYPE c;
11206 tree arg;
11207
11208 c = TREE_REAL_CST (arg11);
11209 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11210 arg = build_real (type, c);
11211 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11212 }
11213 }
11214
11215 /* Optimize pow(x,c)*x as pow(x,c+1). */
11216 if (fcode0 == BUILT_IN_POW
11217 || fcode0 == BUILT_IN_POWF
11218 || fcode0 == BUILT_IN_POWL)
11219 {
11220 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11221 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11222 if (TREE_CODE (arg01) == REAL_CST
11223 && !TREE_OVERFLOW (arg01)
11224 && operand_equal_p (arg1, arg00, 0))
11225 {
11226 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11227 REAL_VALUE_TYPE c;
11228 tree arg;
11229
11230 c = TREE_REAL_CST (arg01);
11231 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11232 arg = build_real (type, c);
11233 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11234 }
11235 }
11236
11237 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11238 if (!in_gimple_form
11239 && optimize
11240 && operand_equal_p (arg0, arg1, 0))
11241 {
11242 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11243
11244 if (powfn)
11245 {
11246 tree arg = build_real (type, dconst2);
11247 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11248 }
11249 }
11250 }
11251 }
11252 goto associate;
11253
11254 case BIT_IOR_EXPR:
11255 bit_ior:
11256 if (integer_all_onesp (arg1))
11257 return omit_one_operand_loc (loc, type, arg1, arg0);
11258 if (integer_zerop (arg1))
11259 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11260 if (operand_equal_p (arg0, arg1, 0))
11261 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11262
11263 /* ~X | X is -1. */
11264 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11265 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11266 {
11267 t1 = build_zero_cst (type);
11268 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11269 return omit_one_operand_loc (loc, type, t1, arg1);
11270 }
11271
11272 /* X | ~X is -1. */
11273 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11274 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11275 {
11276 t1 = build_zero_cst (type);
11277 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11278 return omit_one_operand_loc (loc, type, t1, arg0);
11279 }
11280
11281 /* Canonicalize (X & C1) | C2. */
11282 if (TREE_CODE (arg0) == BIT_AND_EXPR
11283 && TREE_CODE (arg1) == INTEGER_CST
11284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11285 {
11286 double_int c1, c2, c3, msk;
11287 int width = TYPE_PRECISION (type), w;
11288 bool try_simplify = true;
11289
11290 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11291 c2 = tree_to_double_int (arg1);
11292
11293 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11294 if ((c1 & c2) == c1)
11295 return omit_one_operand_loc (loc, type, arg1,
11296 TREE_OPERAND (arg0, 0));
11297
11298 msk = double_int::mask (width);
11299
11300 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11301 if (msk.and_not (c1 | c2).is_zero ())
11302 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11303 TREE_OPERAND (arg0, 0), arg1);
11304
11305 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11306 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11307 mode which allows further optimizations. */
11308 c1 &= msk;
11309 c2 &= msk;
11310 c3 = c1.and_not (c2);
11311 for (w = BITS_PER_UNIT;
11312 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11313 w <<= 1)
11314 {
11315 unsigned HOST_WIDE_INT mask
11316 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11317 if (((c1.low | c2.low) & mask) == mask
11318 && (c1.low & ~mask) == 0 && c1.high == 0)
11319 {
11320 c3 = double_int::from_uhwi (mask);
11321 break;
11322 }
11323 }
11324
11325 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11326 with that optimization from the BIT_AND_EXPR optimizations.
11327 This could end up in an infinite recursion. */
11328 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11329 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11330 == INTEGER_CST)
11331 {
11332 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11333 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11334
11335 try_simplify = (masked != c1);
11336 }
11337
11338 if (try_simplify && c3 != c1)
11339 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11340 fold_build2_loc (loc, BIT_AND_EXPR, type,
11341 TREE_OPERAND (arg0, 0),
11342 double_int_to_tree (type,
11343 c3)),
11344 arg1);
11345 }
11346
11347 /* (X & Y) | Y is (X, Y). */
11348 if (TREE_CODE (arg0) == BIT_AND_EXPR
11349 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11350 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11351 /* (X & Y) | X is (Y, X). */
11352 if (TREE_CODE (arg0) == BIT_AND_EXPR
11353 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11354 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11355 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11356 /* X | (X & Y) is (Y, X). */
11357 if (TREE_CODE (arg1) == BIT_AND_EXPR
11358 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11359 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11360 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11361 /* X | (Y & X) is (Y, X). */
11362 if (TREE_CODE (arg1) == BIT_AND_EXPR
11363 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11364 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11365 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11366
11367 /* (X & ~Y) | (~X & Y) is X ^ Y */
11368 if (TREE_CODE (arg0) == BIT_AND_EXPR
11369 && TREE_CODE (arg1) == BIT_AND_EXPR)
11370 {
11371 tree a0, a1, l0, l1, n0, n1;
11372
11373 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11374 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11375
11376 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11377 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11378
11379 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11380 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11381
11382 if ((operand_equal_p (n0, a0, 0)
11383 && operand_equal_p (n1, a1, 0))
11384 || (operand_equal_p (n0, a1, 0)
11385 && operand_equal_p (n1, a0, 0)))
11386 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11387 }
11388
11389 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11390 if (t1 != NULL_TREE)
11391 return t1;
11392
11393 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11394
11395 This results in more efficient code for machines without a NAND
11396 instruction. Combine will canonicalize to the first form
11397 which will allow use of NAND instructions provided by the
11398 backend if they exist. */
11399 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11400 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11401 {
11402 return
11403 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11404 build2 (BIT_AND_EXPR, type,
11405 fold_convert_loc (loc, type,
11406 TREE_OPERAND (arg0, 0)),
11407 fold_convert_loc (loc, type,
11408 TREE_OPERAND (arg1, 0))));
11409 }
11410
11411 /* See if this can be simplified into a rotate first. If that
11412 is unsuccessful continue in the association code. */
11413 goto bit_rotate;
11414
11415 case BIT_XOR_EXPR:
11416 if (integer_zerop (arg1))
11417 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11418 if (integer_all_onesp (arg1))
11419 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11420 if (operand_equal_p (arg0, arg1, 0))
11421 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11422
11423 /* ~X ^ X is -1. */
11424 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11425 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11426 {
11427 t1 = build_zero_cst (type);
11428 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11429 return omit_one_operand_loc (loc, type, t1, arg1);
11430 }
11431
11432 /* X ^ ~X is -1. */
11433 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11434 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11435 {
11436 t1 = build_zero_cst (type);
11437 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11438 return omit_one_operand_loc (loc, type, t1, arg0);
11439 }
11440
11441 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11442 with a constant, and the two constants have no bits in common,
11443 we should treat this as a BIT_IOR_EXPR since this may produce more
11444 simplifications. */
11445 if (TREE_CODE (arg0) == BIT_AND_EXPR
11446 && TREE_CODE (arg1) == BIT_AND_EXPR
11447 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11448 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11449 && integer_zerop (const_binop (BIT_AND_EXPR,
11450 TREE_OPERAND (arg0, 1),
11451 TREE_OPERAND (arg1, 1))))
11452 {
11453 code = BIT_IOR_EXPR;
11454 goto bit_ior;
11455 }
11456
11457 /* (X | Y) ^ X -> Y & ~ X*/
11458 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11459 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11460 {
11461 tree t2 = TREE_OPERAND (arg0, 1);
11462 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11463 arg1);
11464 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11465 fold_convert_loc (loc, type, t2),
11466 fold_convert_loc (loc, type, t1));
11467 return t1;
11468 }
11469
11470 /* (Y | X) ^ X -> Y & ~ X*/
11471 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11472 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11473 {
11474 tree t2 = TREE_OPERAND (arg0, 0);
11475 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11476 arg1);
11477 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11478 fold_convert_loc (loc, type, t2),
11479 fold_convert_loc (loc, type, t1));
11480 return t1;
11481 }
11482
11483 /* X ^ (X | Y) -> Y & ~ X*/
11484 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11485 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11486 {
11487 tree t2 = TREE_OPERAND (arg1, 1);
11488 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11489 arg0);
11490 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11491 fold_convert_loc (loc, type, t2),
11492 fold_convert_loc (loc, type, t1));
11493 return t1;
11494 }
11495
11496 /* X ^ (Y | X) -> Y & ~ X*/
11497 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11498 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11499 {
11500 tree t2 = TREE_OPERAND (arg1, 0);
11501 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11502 arg0);
11503 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11504 fold_convert_loc (loc, type, t2),
11505 fold_convert_loc (loc, type, t1));
11506 return t1;
11507 }
11508
11509 /* Convert ~X ^ ~Y to X ^ Y. */
11510 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11511 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11512 return fold_build2_loc (loc, code, type,
11513 fold_convert_loc (loc, type,
11514 TREE_OPERAND (arg0, 0)),
11515 fold_convert_loc (loc, type,
11516 TREE_OPERAND (arg1, 0)));
11517
11518 /* Convert ~X ^ C to X ^ ~C. */
11519 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11520 && TREE_CODE (arg1) == INTEGER_CST)
11521 return fold_build2_loc (loc, code, type,
11522 fold_convert_loc (loc, type,
11523 TREE_OPERAND (arg0, 0)),
11524 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11525
11526 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11527 if (TREE_CODE (arg0) == BIT_AND_EXPR
11528 && integer_onep (TREE_OPERAND (arg0, 1))
11529 && integer_onep (arg1))
11530 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11531 build_zero_cst (TREE_TYPE (arg0)));
11532
11533 /* Fold (X & Y) ^ Y as ~X & Y. */
11534 if (TREE_CODE (arg0) == BIT_AND_EXPR
11535 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11536 {
11537 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11538 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11539 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11540 fold_convert_loc (loc, type, arg1));
11541 }
11542 /* Fold (X & Y) ^ X as ~Y & X. */
11543 if (TREE_CODE (arg0) == BIT_AND_EXPR
11544 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11545 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11546 {
11547 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11548 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11549 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11550 fold_convert_loc (loc, type, arg1));
11551 }
11552 /* Fold X ^ (X & Y) as X & ~Y. */
11553 if (TREE_CODE (arg1) == BIT_AND_EXPR
11554 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11555 {
11556 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11557 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11558 fold_convert_loc (loc, type, arg0),
11559 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11560 }
11561 /* Fold X ^ (Y & X) as ~Y & X. */
11562 if (TREE_CODE (arg1) == BIT_AND_EXPR
11563 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11564 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11565 {
11566 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11567 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11568 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11569 fold_convert_loc (loc, type, arg0));
11570 }
11571
11572 /* See if this can be simplified into a rotate first. If that
11573 is unsuccessful continue in the association code. */
11574 goto bit_rotate;
11575
11576 case BIT_AND_EXPR:
11577 if (integer_all_onesp (arg1))
11578 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11579 if (integer_zerop (arg1))
11580 return omit_one_operand_loc (loc, type, arg1, arg0);
11581 if (operand_equal_p (arg0, arg1, 0))
11582 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11583
11584 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11585 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11586 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11587 || (TREE_CODE (arg0) == EQ_EXPR
11588 && integer_zerop (TREE_OPERAND (arg0, 1))))
11589 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11590 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11591
11592 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11593 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11594 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11595 || (TREE_CODE (arg1) == EQ_EXPR
11596 && integer_zerop (TREE_OPERAND (arg1, 1))))
11597 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11598 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11599
11600 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11601 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11602 && TREE_CODE (arg1) == INTEGER_CST
11603 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11604 {
11605 tree tmp1 = fold_convert_loc (loc, type, arg1);
11606 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11607 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11608 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11609 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11610 return
11611 fold_convert_loc (loc, type,
11612 fold_build2_loc (loc, BIT_IOR_EXPR,
11613 type, tmp2, tmp3));
11614 }
11615
11616 /* (X | Y) & Y is (X, Y). */
11617 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11618 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11619 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11620 /* (X | Y) & X is (Y, X). */
11621 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11622 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11623 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11624 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11625 /* X & (X | Y) is (Y, X). */
11626 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11627 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11628 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11629 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11630 /* X & (Y | X) is (Y, X). */
11631 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11632 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11633 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11634 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11635
11636 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11637 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11638 && integer_onep (TREE_OPERAND (arg0, 1))
11639 && integer_onep (arg1))
11640 {
11641 tree tem2;
11642 tem = TREE_OPERAND (arg0, 0);
11643 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11644 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11645 tem, tem2);
11646 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11647 build_zero_cst (TREE_TYPE (tem)));
11648 }
11649 /* Fold ~X & 1 as (X & 1) == 0. */
11650 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11651 && integer_onep (arg1))
11652 {
11653 tree tem2;
11654 tem = TREE_OPERAND (arg0, 0);
11655 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11656 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11657 tem, tem2);
11658 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11659 build_zero_cst (TREE_TYPE (tem)));
11660 }
11661 /* Fold !X & 1 as X == 0. */
11662 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11663 && integer_onep (arg1))
11664 {
11665 tem = TREE_OPERAND (arg0, 0);
11666 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11667 build_zero_cst (TREE_TYPE (tem)));
11668 }
11669
11670 /* Fold (X ^ Y) & Y as ~X & Y. */
11671 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11672 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11673 {
11674 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11675 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11676 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11677 fold_convert_loc (loc, type, arg1));
11678 }
11679 /* Fold (X ^ Y) & X as ~Y & X. */
11680 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11681 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11682 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11683 {
11684 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11685 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11686 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11687 fold_convert_loc (loc, type, arg1));
11688 }
11689 /* Fold X & (X ^ Y) as X & ~Y. */
11690 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11692 {
11693 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11694 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11695 fold_convert_loc (loc, type, arg0),
11696 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11697 }
11698 /* Fold X & (Y ^ X) as ~Y & X. */
11699 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11700 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11701 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11702 {
11703 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11704 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11705 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11706 fold_convert_loc (loc, type, arg0));
11707 }
11708
11709 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11710 multiple of 1 << CST. */
11711 if (TREE_CODE (arg1) == INTEGER_CST)
11712 {
11713 double_int cst1 = tree_to_double_int (arg1);
11714 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11715 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11716 if ((cst1 & ncst1) == ncst1
11717 && multiple_of_p (type, arg0,
11718 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11719 return fold_convert_loc (loc, type, arg0);
11720 }
11721
11722 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11723 bits from CST2. */
11724 if (TREE_CODE (arg1) == INTEGER_CST
11725 && TREE_CODE (arg0) == MULT_EXPR
11726 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11727 {
11728 double_int masked
11729 = mask_with_tz (type, tree_to_double_int (arg1),
11730 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11731
11732 if (masked.is_zero ())
11733 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11734 arg0, arg1);
11735 else if (masked != tree_to_double_int (arg1))
11736 return fold_build2_loc (loc, code, type, op0,
11737 double_int_to_tree (type, masked));
11738 }
11739
11740 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11741 ((A & N) + B) & M -> (A + B) & M
11742 Similarly if (N & M) == 0,
11743 ((A | N) + B) & M -> (A + B) & M
11744 and for - instead of + (or unary - instead of +)
11745 and/or ^ instead of |.
11746 If B is constant and (B & M) == 0, fold into A & M. */
11747 if (host_integerp (arg1, 1))
11748 {
11749 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11750 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11751 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11752 && (TREE_CODE (arg0) == PLUS_EXPR
11753 || TREE_CODE (arg0) == MINUS_EXPR
11754 || TREE_CODE (arg0) == NEGATE_EXPR)
11755 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11756 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11757 {
11758 tree pmop[2];
11759 int which = 0;
11760 unsigned HOST_WIDE_INT cst0;
11761
11762 /* Now we know that arg0 is (C + D) or (C - D) or
11763 -C and arg1 (M) is == (1LL << cst) - 1.
11764 Store C into PMOP[0] and D into PMOP[1]. */
11765 pmop[0] = TREE_OPERAND (arg0, 0);
11766 pmop[1] = NULL;
11767 if (TREE_CODE (arg0) != NEGATE_EXPR)
11768 {
11769 pmop[1] = TREE_OPERAND (arg0, 1);
11770 which = 1;
11771 }
11772
11773 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11774 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11775 & cst1) != cst1)
11776 which = -1;
11777
11778 for (; which >= 0; which--)
11779 switch (TREE_CODE (pmop[which]))
11780 {
11781 case BIT_AND_EXPR:
11782 case BIT_IOR_EXPR:
11783 case BIT_XOR_EXPR:
11784 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11785 != INTEGER_CST)
11786 break;
11787 /* tree_low_cst not used, because we don't care about
11788 the upper bits. */
11789 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11790 cst0 &= cst1;
11791 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11792 {
11793 if (cst0 != cst1)
11794 break;
11795 }
11796 else if (cst0 != 0)
11797 break;
11798 /* If C or D is of the form (A & N) where
11799 (N & M) == M, or of the form (A | N) or
11800 (A ^ N) where (N & M) == 0, replace it with A. */
11801 pmop[which] = TREE_OPERAND (pmop[which], 0);
11802 break;
11803 case INTEGER_CST:
11804 /* If C or D is a N where (N & M) == 0, it can be
11805 omitted (assumed 0). */
11806 if ((TREE_CODE (arg0) == PLUS_EXPR
11807 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11808 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11809 pmop[which] = NULL;
11810 break;
11811 default:
11812 break;
11813 }
11814
11815 /* Only build anything new if we optimized one or both arguments
11816 above. */
11817 if (pmop[0] != TREE_OPERAND (arg0, 0)
11818 || (TREE_CODE (arg0) != NEGATE_EXPR
11819 && pmop[1] != TREE_OPERAND (arg0, 1)))
11820 {
11821 tree utype = TREE_TYPE (arg0);
11822 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11823 {
11824 /* Perform the operations in a type that has defined
11825 overflow behavior. */
11826 utype = unsigned_type_for (TREE_TYPE (arg0));
11827 if (pmop[0] != NULL)
11828 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11829 if (pmop[1] != NULL)
11830 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11831 }
11832
11833 if (TREE_CODE (arg0) == NEGATE_EXPR)
11834 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11835 else if (TREE_CODE (arg0) == PLUS_EXPR)
11836 {
11837 if (pmop[0] != NULL && pmop[1] != NULL)
11838 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11839 pmop[0], pmop[1]);
11840 else if (pmop[0] != NULL)
11841 tem = pmop[0];
11842 else if (pmop[1] != NULL)
11843 tem = pmop[1];
11844 else
11845 return build_int_cst (type, 0);
11846 }
11847 else if (pmop[0] == NULL)
11848 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11849 else
11850 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11851 pmop[0], pmop[1]);
11852 /* TEM is now the new binary +, - or unary - replacement. */
11853 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11854 fold_convert_loc (loc, utype, arg1));
11855 return fold_convert_loc (loc, type, tem);
11856 }
11857 }
11858 }
11859
11860 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11861 if (t1 != NULL_TREE)
11862 return t1;
11863 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11864 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11865 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11866 {
11867 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11868
11869 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11870 && (~TREE_INT_CST_LOW (arg1)
11871 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11872 return
11873 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11874 }
11875
11876 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11877
11878 This results in more efficient code for machines without a NOR
11879 instruction. Combine will canonicalize to the first form
11880 which will allow use of NOR instructions provided by the
11881 backend if they exist. */
11882 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11883 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11884 {
11885 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11886 build2 (BIT_IOR_EXPR, type,
11887 fold_convert_loc (loc, type,
11888 TREE_OPERAND (arg0, 0)),
11889 fold_convert_loc (loc, type,
11890 TREE_OPERAND (arg1, 0))));
11891 }
11892
11893 /* If arg0 is derived from the address of an object or function, we may
11894 be able to fold this expression using the object or function's
11895 alignment. */
11896 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11897 {
11898 unsigned HOST_WIDE_INT modulus, residue;
11899 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11900
11901 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11902 integer_onep (arg1));
11903
11904 /* This works because modulus is a power of 2. If this weren't the
11905 case, we'd have to replace it by its greatest power-of-2
11906 divisor: modulus & -modulus. */
11907 if (low < modulus)
11908 return build_int_cst (type, residue & low);
11909 }
11910
11911 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11912 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11913 if the new mask might be further optimized. */
11914 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11915 || TREE_CODE (arg0) == RSHIFT_EXPR)
11916 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11917 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11918 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11919 < TYPE_PRECISION (TREE_TYPE (arg0))
11920 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11921 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11922 {
11923 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11924 unsigned HOST_WIDE_INT mask
11925 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11926 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11927 tree shift_type = TREE_TYPE (arg0);
11928
11929 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11930 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11931 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11932 && TYPE_PRECISION (TREE_TYPE (arg0))
11933 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11934 {
11935 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11936 tree arg00 = TREE_OPERAND (arg0, 0);
11937 /* See if more bits can be proven as zero because of
11938 zero extension. */
11939 if (TREE_CODE (arg00) == NOP_EXPR
11940 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11941 {
11942 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11943 if (TYPE_PRECISION (inner_type)
11944 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11945 && TYPE_PRECISION (inner_type) < prec)
11946 {
11947 prec = TYPE_PRECISION (inner_type);
11948 /* See if we can shorten the right shift. */
11949 if (shiftc < prec)
11950 shift_type = inner_type;
11951 }
11952 }
11953 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11954 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11955 zerobits <<= prec - shiftc;
11956 /* For arithmetic shift if sign bit could be set, zerobits
11957 can contain actually sign bits, so no transformation is
11958 possible, unless MASK masks them all away. In that
11959 case the shift needs to be converted into logical shift. */
11960 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11961 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11962 {
11963 if ((mask & zerobits) == 0)
11964 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11965 else
11966 zerobits = 0;
11967 }
11968 }
11969
11970 /* ((X << 16) & 0xff00) is (X, 0). */
11971 if ((mask & zerobits) == mask)
11972 return omit_one_operand_loc (loc, type,
11973 build_int_cst (type, 0), arg0);
11974
11975 newmask = mask | zerobits;
11976 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11977 {
11978 /* Only do the transformation if NEWMASK is some integer
11979 mode's mask. */
11980 for (prec = BITS_PER_UNIT;
11981 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11982 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11983 break;
11984 if (prec < HOST_BITS_PER_WIDE_INT
11985 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11986 {
11987 tree newmaskt;
11988
11989 if (shift_type != TREE_TYPE (arg0))
11990 {
11991 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11992 fold_convert_loc (loc, shift_type,
11993 TREE_OPERAND (arg0, 0)),
11994 TREE_OPERAND (arg0, 1));
11995 tem = fold_convert_loc (loc, type, tem);
11996 }
11997 else
11998 tem = op0;
11999 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12000 if (!tree_int_cst_equal (newmaskt, arg1))
12001 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12002 }
12003 }
12004 }
12005
12006 goto associate;
12007
12008 case RDIV_EXPR:
12009 /* Don't touch a floating-point divide by zero unless the mode
12010 of the constant can represent infinity. */
12011 if (TREE_CODE (arg1) == REAL_CST
12012 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12013 && real_zerop (arg1))
12014 return NULL_TREE;
12015
12016 /* Optimize A / A to 1.0 if we don't care about
12017 NaNs or Infinities. Skip the transformation
12018 for non-real operands. */
12019 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12020 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12021 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12022 && operand_equal_p (arg0, arg1, 0))
12023 {
12024 tree r = build_real (TREE_TYPE (arg0), dconst1);
12025
12026 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12027 }
12028
12029 /* The complex version of the above A / A optimization. */
12030 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12031 && operand_equal_p (arg0, arg1, 0))
12032 {
12033 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12034 if (! HONOR_NANS (TYPE_MODE (elem_type))
12035 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12036 {
12037 tree r = build_real (elem_type, dconst1);
12038 /* omit_two_operands will call fold_convert for us. */
12039 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12040 }
12041 }
12042
12043 /* (-A) / (-B) -> A / B */
12044 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12045 return fold_build2_loc (loc, RDIV_EXPR, type,
12046 TREE_OPERAND (arg0, 0),
12047 negate_expr (arg1));
12048 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12049 return fold_build2_loc (loc, RDIV_EXPR, type,
12050 negate_expr (arg0),
12051 TREE_OPERAND (arg1, 0));
12052
12053 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12054 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12055 && real_onep (arg1))
12056 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12057
12058 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12059 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12060 && real_minus_onep (arg1))
12061 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12062 negate_expr (arg0)));
12063
12064 /* If ARG1 is a constant, we can convert this to a multiply by the
12065 reciprocal. This does not have the same rounding properties,
12066 so only do this if -freciprocal-math. We can actually
12067 always safely do it if ARG1 is a power of two, but it's hard to
12068 tell if it is or not in a portable manner. */
12069 if (optimize
12070 && (TREE_CODE (arg1) == REAL_CST
12071 || (TREE_CODE (arg1) == COMPLEX_CST
12072 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12073 || (TREE_CODE (arg1) == VECTOR_CST
12074 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12075 {
12076 if (flag_reciprocal_math
12077 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12078 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12079 /* Find the reciprocal if optimizing and the result is exact.
12080 TODO: Complex reciprocal not implemented. */
12081 if (TREE_CODE (arg1) != COMPLEX_CST)
12082 {
12083 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12084
12085 if (inverse)
12086 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12087 }
12088 }
12089 /* Convert A/B/C to A/(B*C). */
12090 if (flag_reciprocal_math
12091 && TREE_CODE (arg0) == RDIV_EXPR)
12092 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12093 fold_build2_loc (loc, MULT_EXPR, type,
12094 TREE_OPERAND (arg0, 1), arg1));
12095
12096 /* Convert A/(B/C) to (A/B)*C. */
12097 if (flag_reciprocal_math
12098 && TREE_CODE (arg1) == RDIV_EXPR)
12099 return fold_build2_loc (loc, MULT_EXPR, type,
12100 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12101 TREE_OPERAND (arg1, 0)),
12102 TREE_OPERAND (arg1, 1));
12103
12104 /* Convert C1/(X*C2) into (C1/C2)/X. */
12105 if (flag_reciprocal_math
12106 && TREE_CODE (arg1) == MULT_EXPR
12107 && TREE_CODE (arg0) == REAL_CST
12108 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12109 {
12110 tree tem = const_binop (RDIV_EXPR, arg0,
12111 TREE_OPERAND (arg1, 1));
12112 if (tem)
12113 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12114 TREE_OPERAND (arg1, 0));
12115 }
12116
12117 if (flag_unsafe_math_optimizations)
12118 {
12119 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12120 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12121
12122 /* Optimize sin(x)/cos(x) as tan(x). */
12123 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12124 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12125 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12126 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12127 CALL_EXPR_ARG (arg1, 0), 0))
12128 {
12129 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12130
12131 if (tanfn != NULL_TREE)
12132 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12133 }
12134
12135 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12136 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12137 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12138 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12139 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12140 CALL_EXPR_ARG (arg1, 0), 0))
12141 {
12142 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12143
12144 if (tanfn != NULL_TREE)
12145 {
12146 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12147 CALL_EXPR_ARG (arg0, 0));
12148 return fold_build2_loc (loc, RDIV_EXPR, type,
12149 build_real (type, dconst1), tmp);
12150 }
12151 }
12152
12153 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12154 NaNs or Infinities. */
12155 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12156 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12157 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12158 {
12159 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12160 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12161
12162 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12163 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12164 && operand_equal_p (arg00, arg01, 0))
12165 {
12166 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12167
12168 if (cosfn != NULL_TREE)
12169 return build_call_expr_loc (loc, cosfn, 1, arg00);
12170 }
12171 }
12172
12173 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12174 NaNs or Infinities. */
12175 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12176 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12177 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12178 {
12179 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12180 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12181
12182 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12183 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12184 && operand_equal_p (arg00, arg01, 0))
12185 {
12186 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12187
12188 if (cosfn != NULL_TREE)
12189 {
12190 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12191 return fold_build2_loc (loc, RDIV_EXPR, type,
12192 build_real (type, dconst1),
12193 tmp);
12194 }
12195 }
12196 }
12197
12198 /* Optimize pow(x,c)/x as pow(x,c-1). */
12199 if (fcode0 == BUILT_IN_POW
12200 || fcode0 == BUILT_IN_POWF
12201 || fcode0 == BUILT_IN_POWL)
12202 {
12203 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12204 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12205 if (TREE_CODE (arg01) == REAL_CST
12206 && !TREE_OVERFLOW (arg01)
12207 && operand_equal_p (arg1, arg00, 0))
12208 {
12209 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12210 REAL_VALUE_TYPE c;
12211 tree arg;
12212
12213 c = TREE_REAL_CST (arg01);
12214 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12215 arg = build_real (type, c);
12216 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12217 }
12218 }
12219
12220 /* Optimize a/root(b/c) into a*root(c/b). */
12221 if (BUILTIN_ROOT_P (fcode1))
12222 {
12223 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12224
12225 if (TREE_CODE (rootarg) == RDIV_EXPR)
12226 {
12227 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12228 tree b = TREE_OPERAND (rootarg, 0);
12229 tree c = TREE_OPERAND (rootarg, 1);
12230
12231 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12232
12233 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12234 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12235 }
12236 }
12237
12238 /* Optimize x/expN(y) into x*expN(-y). */
12239 if (BUILTIN_EXPONENT_P (fcode1))
12240 {
12241 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12242 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12243 arg1 = build_call_expr_loc (loc,
12244 expfn, 1,
12245 fold_convert_loc (loc, type, arg));
12246 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12247 }
12248
12249 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12250 if (fcode1 == BUILT_IN_POW
12251 || fcode1 == BUILT_IN_POWF
12252 || fcode1 == BUILT_IN_POWL)
12253 {
12254 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12255 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12256 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12257 tree neg11 = fold_convert_loc (loc, type,
12258 negate_expr (arg11));
12259 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12260 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12261 }
12262 }
12263 return NULL_TREE;
12264
12265 case TRUNC_DIV_EXPR:
12266 /* Optimize (X & (-A)) / A where A is a power of 2,
12267 to X >> log2(A) */
12268 if (TREE_CODE (arg0) == BIT_AND_EXPR
12269 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12270 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12271 {
12272 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12273 arg1, TREE_OPERAND (arg0, 1));
12274 if (sum && integer_zerop (sum)) {
12275 unsigned long pow2;
12276
12277 if (TREE_INT_CST_LOW (arg1))
12278 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12279 else
12280 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12281 + HOST_BITS_PER_WIDE_INT;
12282
12283 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12284 TREE_OPERAND (arg0, 0),
12285 build_int_cst (integer_type_node, pow2));
12286 }
12287 }
12288
12289 /* Fall through */
12290
12291 case FLOOR_DIV_EXPR:
12292 /* Simplify A / (B << N) where A and B are positive and B is
12293 a power of 2, to A >> (N + log2(B)). */
12294 strict_overflow_p = false;
12295 if (TREE_CODE (arg1) == LSHIFT_EXPR
12296 && (TYPE_UNSIGNED (type)
12297 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12298 {
12299 tree sval = TREE_OPERAND (arg1, 0);
12300 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12301 {
12302 tree sh_cnt = TREE_OPERAND (arg1, 1);
12303 unsigned long pow2;
12304
12305 if (TREE_INT_CST_LOW (sval))
12306 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12307 else
12308 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12309 + HOST_BITS_PER_WIDE_INT;
12310
12311 if (strict_overflow_p)
12312 fold_overflow_warning (("assuming signed overflow does not "
12313 "occur when simplifying A / (B << N)"),
12314 WARN_STRICT_OVERFLOW_MISC);
12315
12316 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12317 sh_cnt,
12318 build_int_cst (TREE_TYPE (sh_cnt),
12319 pow2));
12320 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12321 fold_convert_loc (loc, type, arg0), sh_cnt);
12322 }
12323 }
12324
12325 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12326 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12327 if (INTEGRAL_TYPE_P (type)
12328 && TYPE_UNSIGNED (type)
12329 && code == FLOOR_DIV_EXPR)
12330 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12331
12332 /* Fall through */
12333
12334 case ROUND_DIV_EXPR:
12335 case CEIL_DIV_EXPR:
12336 case EXACT_DIV_EXPR:
12337 if (integer_onep (arg1))
12338 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12339 if (integer_zerop (arg1))
12340 return NULL_TREE;
12341 /* X / -1 is -X. */
12342 if (!TYPE_UNSIGNED (type)
12343 && TREE_CODE (arg1) == INTEGER_CST
12344 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12345 && TREE_INT_CST_HIGH (arg1) == -1)
12346 return fold_convert_loc (loc, type, negate_expr (arg0));
12347
12348 /* Convert -A / -B to A / B when the type is signed and overflow is
12349 undefined. */
12350 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12351 && TREE_CODE (arg0) == NEGATE_EXPR
12352 && negate_expr_p (arg1))
12353 {
12354 if (INTEGRAL_TYPE_P (type))
12355 fold_overflow_warning (("assuming signed overflow does not occur "
12356 "when distributing negation across "
12357 "division"),
12358 WARN_STRICT_OVERFLOW_MISC);
12359 return fold_build2_loc (loc, code, type,
12360 fold_convert_loc (loc, type,
12361 TREE_OPERAND (arg0, 0)),
12362 fold_convert_loc (loc, type,
12363 negate_expr (arg1)));
12364 }
12365 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12366 && TREE_CODE (arg1) == NEGATE_EXPR
12367 && negate_expr_p (arg0))
12368 {
12369 if (INTEGRAL_TYPE_P (type))
12370 fold_overflow_warning (("assuming signed overflow does not occur "
12371 "when distributing negation across "
12372 "division"),
12373 WARN_STRICT_OVERFLOW_MISC);
12374 return fold_build2_loc (loc, code, type,
12375 fold_convert_loc (loc, type,
12376 negate_expr (arg0)),
12377 fold_convert_loc (loc, type,
12378 TREE_OPERAND (arg1, 0)));
12379 }
12380
12381 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12382 operation, EXACT_DIV_EXPR.
12383
12384 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12385 At one time others generated faster code, it's not clear if they do
12386 after the last round to changes to the DIV code in expmed.c. */
12387 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12388 && multiple_of_p (type, arg0, arg1))
12389 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12390
12391 strict_overflow_p = false;
12392 if (TREE_CODE (arg1) == INTEGER_CST
12393 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12394 &strict_overflow_p)))
12395 {
12396 if (strict_overflow_p)
12397 fold_overflow_warning (("assuming signed overflow does not occur "
12398 "when simplifying division"),
12399 WARN_STRICT_OVERFLOW_MISC);
12400 return fold_convert_loc (loc, type, tem);
12401 }
12402
12403 return NULL_TREE;
12404
12405 case CEIL_MOD_EXPR:
12406 case FLOOR_MOD_EXPR:
12407 case ROUND_MOD_EXPR:
12408 case TRUNC_MOD_EXPR:
12409 /* X % 1 is always zero, but be sure to preserve any side
12410 effects in X. */
12411 if (integer_onep (arg1))
12412 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12413
12414 /* X % 0, return X % 0 unchanged so that we can get the
12415 proper warnings and errors. */
12416 if (integer_zerop (arg1))
12417 return NULL_TREE;
12418
12419 /* 0 % X is always zero, but be sure to preserve any side
12420 effects in X. Place this after checking for X == 0. */
12421 if (integer_zerop (arg0))
12422 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12423
12424 /* X % -1 is zero. */
12425 if (!TYPE_UNSIGNED (type)
12426 && TREE_CODE (arg1) == INTEGER_CST
12427 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12428 && TREE_INT_CST_HIGH (arg1) == -1)
12429 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12430
12431 /* X % -C is the same as X % C. */
12432 if (code == TRUNC_MOD_EXPR
12433 && !TYPE_UNSIGNED (type)
12434 && TREE_CODE (arg1) == INTEGER_CST
12435 && !TREE_OVERFLOW (arg1)
12436 && TREE_INT_CST_HIGH (arg1) < 0
12437 && !TYPE_OVERFLOW_TRAPS (type)
12438 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12439 && !sign_bit_p (arg1, arg1))
12440 return fold_build2_loc (loc, code, type,
12441 fold_convert_loc (loc, type, arg0),
12442 fold_convert_loc (loc, type,
12443 negate_expr (arg1)));
12444
12445 /* X % -Y is the same as X % Y. */
12446 if (code == TRUNC_MOD_EXPR
12447 && !TYPE_UNSIGNED (type)
12448 && TREE_CODE (arg1) == NEGATE_EXPR
12449 && !TYPE_OVERFLOW_TRAPS (type))
12450 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12451 fold_convert_loc (loc, type,
12452 TREE_OPERAND (arg1, 0)));
12453
12454 strict_overflow_p = false;
12455 if (TREE_CODE (arg1) == INTEGER_CST
12456 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12457 &strict_overflow_p)))
12458 {
12459 if (strict_overflow_p)
12460 fold_overflow_warning (("assuming signed overflow does not occur "
12461 "when simplifying modulus"),
12462 WARN_STRICT_OVERFLOW_MISC);
12463 return fold_convert_loc (loc, type, tem);
12464 }
12465
12466 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12467 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12468 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12469 && (TYPE_UNSIGNED (type)
12470 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12471 {
12472 tree c = arg1;
12473 /* Also optimize A % (C << N) where C is a power of 2,
12474 to A & ((C << N) - 1). */
12475 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12476 c = TREE_OPERAND (arg1, 0);
12477
12478 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12479 {
12480 tree mask
12481 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12482 build_int_cst (TREE_TYPE (arg1), 1));
12483 if (strict_overflow_p)
12484 fold_overflow_warning (("assuming signed overflow does not "
12485 "occur when simplifying "
12486 "X % (power of two)"),
12487 WARN_STRICT_OVERFLOW_MISC);
12488 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12489 fold_convert_loc (loc, type, arg0),
12490 fold_convert_loc (loc, type, mask));
12491 }
12492 }
12493
12494 return NULL_TREE;
12495
12496 case LROTATE_EXPR:
12497 case RROTATE_EXPR:
12498 if (integer_all_onesp (arg0))
12499 return omit_one_operand_loc (loc, type, arg0, arg1);
12500 goto shift;
12501
12502 case RSHIFT_EXPR:
12503 /* Optimize -1 >> x for arithmetic right shifts. */
12504 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12505 && tree_expr_nonnegative_p (arg1))
12506 return omit_one_operand_loc (loc, type, arg0, arg1);
12507 /* ... fall through ... */
12508
12509 case LSHIFT_EXPR:
12510 shift:
12511 if (integer_zerop (arg1))
12512 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12513 if (integer_zerop (arg0))
12514 return omit_one_operand_loc (loc, type, arg0, arg1);
12515
12516 /* Prefer vector1 << scalar to vector1 << vector2
12517 if vector2 is uniform. */
12518 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12519 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12520 return fold_build2_loc (loc, code, type, op0, tem);
12521
12522 /* Since negative shift count is not well-defined,
12523 don't try to compute it in the compiler. */
12524 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12525 return NULL_TREE;
12526
12527 prec = element_precision (type);
12528
12529 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12530 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12531 && TREE_INT_CST_LOW (arg1) < prec
12532 && host_integerp (TREE_OPERAND (arg0, 1), true)
12533 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12534 {
12535 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12536 + TREE_INT_CST_LOW (arg1));
12537
12538 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12539 being well defined. */
12540 if (low >= prec)
12541 {
12542 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12543 low = low % prec;
12544 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12545 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12546 TREE_OPERAND (arg0, 0));
12547 else
12548 low = prec - 1;
12549 }
12550
12551 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12552 build_int_cst (TREE_TYPE (arg1), low));
12553 }
12554
12555 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12556 into x & ((unsigned)-1 >> c) for unsigned types. */
12557 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12558 || (TYPE_UNSIGNED (type)
12559 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12560 && host_integerp (arg1, false)
12561 && TREE_INT_CST_LOW (arg1) < prec
12562 && host_integerp (TREE_OPERAND (arg0, 1), false)
12563 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12564 {
12565 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12566 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12567 tree lshift;
12568 tree arg00;
12569
12570 if (low0 == low1)
12571 {
12572 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12573
12574 lshift = build_minus_one_cst (type);
12575 lshift = const_binop (code, lshift, arg1);
12576
12577 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12578 }
12579 }
12580
12581 /* Rewrite an LROTATE_EXPR by a constant into an
12582 RROTATE_EXPR by a new constant. */
12583 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12584 {
12585 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12586 tem = const_binop (MINUS_EXPR, tem, arg1);
12587 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12588 }
12589
12590 /* If we have a rotate of a bit operation with the rotate count and
12591 the second operand of the bit operation both constant,
12592 permute the two operations. */
12593 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12594 && (TREE_CODE (arg0) == BIT_AND_EXPR
12595 || TREE_CODE (arg0) == BIT_IOR_EXPR
12596 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12598 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12599 fold_build2_loc (loc, code, type,
12600 TREE_OPERAND (arg0, 0), arg1),
12601 fold_build2_loc (loc, code, type,
12602 TREE_OPERAND (arg0, 1), arg1));
12603
12604 /* Two consecutive rotates adding up to the precision of the
12605 type can be ignored. */
12606 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12607 && TREE_CODE (arg0) == RROTATE_EXPR
12608 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12609 && TREE_INT_CST_HIGH (arg1) == 0
12610 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12611 && ((TREE_INT_CST_LOW (arg1)
12612 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12613 == prec))
12614 return TREE_OPERAND (arg0, 0);
12615
12616 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12617 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12618 if the latter can be further optimized. */
12619 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12620 && TREE_CODE (arg0) == BIT_AND_EXPR
12621 && TREE_CODE (arg1) == INTEGER_CST
12622 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12623 {
12624 tree mask = fold_build2_loc (loc, code, type,
12625 fold_convert_loc (loc, type,
12626 TREE_OPERAND (arg0, 1)),
12627 arg1);
12628 tree shift = fold_build2_loc (loc, code, type,
12629 fold_convert_loc (loc, type,
12630 TREE_OPERAND (arg0, 0)),
12631 arg1);
12632 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12633 if (tem)
12634 return tem;
12635 }
12636
12637 return NULL_TREE;
12638
12639 case MIN_EXPR:
12640 if (operand_equal_p (arg0, arg1, 0))
12641 return omit_one_operand_loc (loc, type, arg0, arg1);
12642 if (INTEGRAL_TYPE_P (type)
12643 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12644 return omit_one_operand_loc (loc, type, arg1, arg0);
12645 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12646 if (tem)
12647 return tem;
12648 goto associate;
12649
12650 case MAX_EXPR:
12651 if (operand_equal_p (arg0, arg1, 0))
12652 return omit_one_operand_loc (loc, type, arg0, arg1);
12653 if (INTEGRAL_TYPE_P (type)
12654 && TYPE_MAX_VALUE (type)
12655 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12656 return omit_one_operand_loc (loc, type, arg1, arg0);
12657 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12658 if (tem)
12659 return tem;
12660 goto associate;
12661
12662 case TRUTH_ANDIF_EXPR:
12663 /* Note that the operands of this must be ints
12664 and their values must be 0 or 1.
12665 ("true" is a fixed value perhaps depending on the language.) */
12666 /* If first arg is constant zero, return it. */
12667 if (integer_zerop (arg0))
12668 return fold_convert_loc (loc, type, arg0);
12669 case TRUTH_AND_EXPR:
12670 /* If either arg is constant true, drop it. */
12671 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12672 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12673 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12674 /* Preserve sequence points. */
12675 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12676 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12677 /* If second arg is constant zero, result is zero, but first arg
12678 must be evaluated. */
12679 if (integer_zerop (arg1))
12680 return omit_one_operand_loc (loc, type, arg1, arg0);
12681 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12682 case will be handled here. */
12683 if (integer_zerop (arg0))
12684 return omit_one_operand_loc (loc, type, arg0, arg1);
12685
12686 /* !X && X is always false. */
12687 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12688 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12689 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12690 /* X && !X is always false. */
12691 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12692 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12693 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12694
12695 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12696 means A >= Y && A != MAX, but in this case we know that
12697 A < X <= MAX. */
12698
12699 if (!TREE_SIDE_EFFECTS (arg0)
12700 && !TREE_SIDE_EFFECTS (arg1))
12701 {
12702 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12703 if (tem && !operand_equal_p (tem, arg0, 0))
12704 return fold_build2_loc (loc, code, type, tem, arg1);
12705
12706 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12707 if (tem && !operand_equal_p (tem, arg1, 0))
12708 return fold_build2_loc (loc, code, type, arg0, tem);
12709 }
12710
12711 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12712 != NULL_TREE)
12713 return tem;
12714
12715 return NULL_TREE;
12716
12717 case TRUTH_ORIF_EXPR:
12718 /* Note that the operands of this must be ints
12719 and their values must be 0 or true.
12720 ("true" is a fixed value perhaps depending on the language.) */
12721 /* If first arg is constant true, return it. */
12722 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12723 return fold_convert_loc (loc, type, arg0);
12724 case TRUTH_OR_EXPR:
12725 /* If either arg is constant zero, drop it. */
12726 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12727 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12728 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12729 /* Preserve sequence points. */
12730 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12731 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12732 /* If second arg is constant true, result is true, but we must
12733 evaluate first arg. */
12734 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12735 return omit_one_operand_loc (loc, type, arg1, arg0);
12736 /* Likewise for first arg, but note this only occurs here for
12737 TRUTH_OR_EXPR. */
12738 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12739 return omit_one_operand_loc (loc, type, arg0, arg1);
12740
12741 /* !X || X is always true. */
12742 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12743 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12744 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12745 /* X || !X is always true. */
12746 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12747 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12748 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12749
12750 /* (X && !Y) || (!X && Y) is X ^ Y */
12751 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12752 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12753 {
12754 tree a0, a1, l0, l1, n0, n1;
12755
12756 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12757 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12758
12759 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12760 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12761
12762 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12763 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12764
12765 if ((operand_equal_p (n0, a0, 0)
12766 && operand_equal_p (n1, a1, 0))
12767 || (operand_equal_p (n0, a1, 0)
12768 && operand_equal_p (n1, a0, 0)))
12769 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12770 }
12771
12772 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12773 != NULL_TREE)
12774 return tem;
12775
12776 return NULL_TREE;
12777
12778 case TRUTH_XOR_EXPR:
12779 /* If the second arg is constant zero, drop it. */
12780 if (integer_zerop (arg1))
12781 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12782 /* If the second arg is constant true, this is a logical inversion. */
12783 if (integer_onep (arg1))
12784 {
12785 tem = invert_truthvalue_loc (loc, arg0);
12786 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12787 }
12788 /* Identical arguments cancel to zero. */
12789 if (operand_equal_p (arg0, arg1, 0))
12790 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12791
12792 /* !X ^ X is always true. */
12793 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12794 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12795 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12796
12797 /* X ^ !X is always true. */
12798 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12799 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12800 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12801
12802 return NULL_TREE;
12803
12804 case EQ_EXPR:
12805 case NE_EXPR:
12806 STRIP_NOPS (arg0);
12807 STRIP_NOPS (arg1);
12808
12809 tem = fold_comparison (loc, code, type, op0, op1);
12810 if (tem != NULL_TREE)
12811 return tem;
12812
12813 /* bool_var != 0 becomes bool_var. */
12814 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12815 && code == NE_EXPR)
12816 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12817
12818 /* bool_var == 1 becomes bool_var. */
12819 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12820 && code == EQ_EXPR)
12821 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12822
12823 /* bool_var != 1 becomes !bool_var. */
12824 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12825 && code == NE_EXPR)
12826 return fold_convert_loc (loc, type,
12827 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12828 TREE_TYPE (arg0), arg0));
12829
12830 /* bool_var == 0 becomes !bool_var. */
12831 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12832 && code == EQ_EXPR)
12833 return fold_convert_loc (loc, type,
12834 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12835 TREE_TYPE (arg0), arg0));
12836
12837 /* !exp != 0 becomes !exp */
12838 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12839 && code == NE_EXPR)
12840 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12841
12842 /* If this is an equality comparison of the address of two non-weak,
12843 unaliased symbols neither of which are extern (since we do not
12844 have access to attributes for externs), then we know the result. */
12845 if (TREE_CODE (arg0) == ADDR_EXPR
12846 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12847 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12848 && ! lookup_attribute ("alias",
12849 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12850 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12851 && TREE_CODE (arg1) == ADDR_EXPR
12852 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12853 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12854 && ! lookup_attribute ("alias",
12855 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12856 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12857 {
12858 /* We know that we're looking at the address of two
12859 non-weak, unaliased, static _DECL nodes.
12860
12861 It is both wasteful and incorrect to call operand_equal_p
12862 to compare the two ADDR_EXPR nodes. It is wasteful in that
12863 all we need to do is test pointer equality for the arguments
12864 to the two ADDR_EXPR nodes. It is incorrect to use
12865 operand_equal_p as that function is NOT equivalent to a
12866 C equality test. It can in fact return false for two
12867 objects which would test as equal using the C equality
12868 operator. */
12869 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12870 return constant_boolean_node (equal
12871 ? code == EQ_EXPR : code != EQ_EXPR,
12872 type);
12873 }
12874
12875 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12876 a MINUS_EXPR of a constant, we can convert it into a comparison with
12877 a revised constant as long as no overflow occurs. */
12878 if (TREE_CODE (arg1) == INTEGER_CST
12879 && (TREE_CODE (arg0) == PLUS_EXPR
12880 || TREE_CODE (arg0) == MINUS_EXPR)
12881 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12882 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12883 ? MINUS_EXPR : PLUS_EXPR,
12884 fold_convert_loc (loc, TREE_TYPE (arg0),
12885 arg1),
12886 TREE_OPERAND (arg0, 1)))
12887 && !TREE_OVERFLOW (tem))
12888 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12889
12890 /* Similarly for a NEGATE_EXPR. */
12891 if (TREE_CODE (arg0) == NEGATE_EXPR
12892 && TREE_CODE (arg1) == INTEGER_CST
12893 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12894 arg1)))
12895 && TREE_CODE (tem) == INTEGER_CST
12896 && !TREE_OVERFLOW (tem))
12897 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12898
12899 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12900 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12901 && TREE_CODE (arg1) == INTEGER_CST
12902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12903 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12904 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12905 fold_convert_loc (loc,
12906 TREE_TYPE (arg0),
12907 arg1),
12908 TREE_OPERAND (arg0, 1)));
12909
12910 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12911 if ((TREE_CODE (arg0) == PLUS_EXPR
12912 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12913 || TREE_CODE (arg0) == MINUS_EXPR)
12914 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12915 0)),
12916 arg1, 0)
12917 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12918 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12919 {
12920 tree val = TREE_OPERAND (arg0, 1);
12921 return omit_two_operands_loc (loc, type,
12922 fold_build2_loc (loc, code, type,
12923 val,
12924 build_int_cst (TREE_TYPE (val),
12925 0)),
12926 TREE_OPERAND (arg0, 0), arg1);
12927 }
12928
12929 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12930 if (TREE_CODE (arg0) == MINUS_EXPR
12931 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12932 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12933 1)),
12934 arg1, 0)
12935 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12936 {
12937 return omit_two_operands_loc (loc, type,
12938 code == NE_EXPR
12939 ? boolean_true_node : boolean_false_node,
12940 TREE_OPERAND (arg0, 1), arg1);
12941 }
12942
12943 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12944 for !=. Don't do this for ordered comparisons due to overflow. */
12945 if (TREE_CODE (arg0) == MINUS_EXPR
12946 && integer_zerop (arg1))
12947 return fold_build2_loc (loc, code, type,
12948 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12949
12950 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12951 if (TREE_CODE (arg0) == ABS_EXPR
12952 && (integer_zerop (arg1) || real_zerop (arg1)))
12953 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12954
12955 /* If this is an EQ or NE comparison with zero and ARG0 is
12956 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12957 two operations, but the latter can be done in one less insn
12958 on machines that have only two-operand insns or on which a
12959 constant cannot be the first operand. */
12960 if (TREE_CODE (arg0) == BIT_AND_EXPR
12961 && integer_zerop (arg1))
12962 {
12963 tree arg00 = TREE_OPERAND (arg0, 0);
12964 tree arg01 = TREE_OPERAND (arg0, 1);
12965 if (TREE_CODE (arg00) == LSHIFT_EXPR
12966 && integer_onep (TREE_OPERAND (arg00, 0)))
12967 {
12968 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12969 arg01, TREE_OPERAND (arg00, 1));
12970 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12971 build_int_cst (TREE_TYPE (arg0), 1));
12972 return fold_build2_loc (loc, code, type,
12973 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12974 arg1);
12975 }
12976 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12977 && integer_onep (TREE_OPERAND (arg01, 0)))
12978 {
12979 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12980 arg00, TREE_OPERAND (arg01, 1));
12981 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12982 build_int_cst (TREE_TYPE (arg0), 1));
12983 return fold_build2_loc (loc, code, type,
12984 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12985 arg1);
12986 }
12987 }
12988
12989 /* If this is an NE or EQ comparison of zero against the result of a
12990 signed MOD operation whose second operand is a power of 2, make
12991 the MOD operation unsigned since it is simpler and equivalent. */
12992 if (integer_zerop (arg1)
12993 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12994 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12995 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12996 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12997 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12998 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12999 {
13000 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13001 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13002 fold_convert_loc (loc, newtype,
13003 TREE_OPERAND (arg0, 0)),
13004 fold_convert_loc (loc, newtype,
13005 TREE_OPERAND (arg0, 1)));
13006
13007 return fold_build2_loc (loc, code, type, newmod,
13008 fold_convert_loc (loc, newtype, arg1));
13009 }
13010
13011 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13012 C1 is a valid shift constant, and C2 is a power of two, i.e.
13013 a single bit. */
13014 if (TREE_CODE (arg0) == BIT_AND_EXPR
13015 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13016 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13017 == INTEGER_CST
13018 && integer_pow2p (TREE_OPERAND (arg0, 1))
13019 && integer_zerop (arg1))
13020 {
13021 tree itype = TREE_TYPE (arg0);
13022 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13023 prec = TYPE_PRECISION (itype);
13024
13025 /* Check for a valid shift count. */
13026 if (TREE_INT_CST_HIGH (arg001) == 0
13027 && TREE_INT_CST_LOW (arg001) < prec)
13028 {
13029 tree arg01 = TREE_OPERAND (arg0, 1);
13030 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13031 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13032 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13033 can be rewritten as (X & (C2 << C1)) != 0. */
13034 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13035 {
13036 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13037 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13038 return fold_build2_loc (loc, code, type, tem,
13039 fold_convert_loc (loc, itype, arg1));
13040 }
13041 /* Otherwise, for signed (arithmetic) shifts,
13042 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13043 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13044 else if (!TYPE_UNSIGNED (itype))
13045 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13046 arg000, build_int_cst (itype, 0));
13047 /* Otherwise, of unsigned (logical) shifts,
13048 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13049 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13050 else
13051 return omit_one_operand_loc (loc, type,
13052 code == EQ_EXPR ? integer_one_node
13053 : integer_zero_node,
13054 arg000);
13055 }
13056 }
13057
13058 /* If we have (A & C) == C where C is a power of 2, convert this into
13059 (A & C) != 0. Similarly for NE_EXPR. */
13060 if (TREE_CODE (arg0) == BIT_AND_EXPR
13061 && integer_pow2p (TREE_OPERAND (arg0, 1))
13062 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13063 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13064 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13065 integer_zero_node));
13066
13067 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13068 bit, then fold the expression into A < 0 or A >= 0. */
13069 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13070 if (tem)
13071 return tem;
13072
13073 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13074 Similarly for NE_EXPR. */
13075 if (TREE_CODE (arg0) == BIT_AND_EXPR
13076 && TREE_CODE (arg1) == INTEGER_CST
13077 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13078 {
13079 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13080 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13081 TREE_OPERAND (arg0, 1));
13082 tree dandnotc
13083 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13084 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13085 notc);
13086 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13087 if (integer_nonzerop (dandnotc))
13088 return omit_one_operand_loc (loc, type, rslt, arg0);
13089 }
13090
13091 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13092 Similarly for NE_EXPR. */
13093 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13094 && TREE_CODE (arg1) == INTEGER_CST
13095 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13096 {
13097 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13098 tree candnotd
13099 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13100 TREE_OPERAND (arg0, 1),
13101 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13102 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13103 if (integer_nonzerop (candnotd))
13104 return omit_one_operand_loc (loc, type, rslt, arg0);
13105 }
13106
13107 /* If this is a comparison of a field, we may be able to simplify it. */
13108 if ((TREE_CODE (arg0) == COMPONENT_REF
13109 || TREE_CODE (arg0) == BIT_FIELD_REF)
13110 /* Handle the constant case even without -O
13111 to make sure the warnings are given. */
13112 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13113 {
13114 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13115 if (t1)
13116 return t1;
13117 }
13118
13119 /* Optimize comparisons of strlen vs zero to a compare of the
13120 first character of the string vs zero. To wit,
13121 strlen(ptr) == 0 => *ptr == 0
13122 strlen(ptr) != 0 => *ptr != 0
13123 Other cases should reduce to one of these two (or a constant)
13124 due to the return value of strlen being unsigned. */
13125 if (TREE_CODE (arg0) == CALL_EXPR
13126 && integer_zerop (arg1))
13127 {
13128 tree fndecl = get_callee_fndecl (arg0);
13129
13130 if (fndecl
13131 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13132 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13133 && call_expr_nargs (arg0) == 1
13134 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13135 {
13136 tree iref = build_fold_indirect_ref_loc (loc,
13137 CALL_EXPR_ARG (arg0, 0));
13138 return fold_build2_loc (loc, code, type, iref,
13139 build_int_cst (TREE_TYPE (iref), 0));
13140 }
13141 }
13142
13143 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13144 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13145 if (TREE_CODE (arg0) == RSHIFT_EXPR
13146 && integer_zerop (arg1)
13147 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13148 {
13149 tree arg00 = TREE_OPERAND (arg0, 0);
13150 tree arg01 = TREE_OPERAND (arg0, 1);
13151 tree itype = TREE_TYPE (arg00);
13152 if (TREE_INT_CST_HIGH (arg01) == 0
13153 && TREE_INT_CST_LOW (arg01)
13154 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13155 {
13156 if (TYPE_UNSIGNED (itype))
13157 {
13158 itype = signed_type_for (itype);
13159 arg00 = fold_convert_loc (loc, itype, arg00);
13160 }
13161 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13162 type, arg00, build_zero_cst (itype));
13163 }
13164 }
13165
13166 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13167 if (integer_zerop (arg1)
13168 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13169 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13170 TREE_OPERAND (arg0, 1));
13171
13172 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13173 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13174 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13175 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13176 build_zero_cst (TREE_TYPE (arg0)));
13177 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13178 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13179 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13180 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13181 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13182 build_zero_cst (TREE_TYPE (arg0)));
13183
13184 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13185 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13186 && TREE_CODE (arg1) == INTEGER_CST
13187 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13188 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13189 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13190 TREE_OPERAND (arg0, 1), arg1));
13191
13192 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13193 (X & C) == 0 when C is a single bit. */
13194 if (TREE_CODE (arg0) == BIT_AND_EXPR
13195 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13196 && integer_zerop (arg1)
13197 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13198 {
13199 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13200 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13201 TREE_OPERAND (arg0, 1));
13202 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13203 type, tem,
13204 fold_convert_loc (loc, TREE_TYPE (arg0),
13205 arg1));
13206 }
13207
13208 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13209 constant C is a power of two, i.e. a single bit. */
13210 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13211 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13212 && integer_zerop (arg1)
13213 && integer_pow2p (TREE_OPERAND (arg0, 1))
13214 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13215 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13216 {
13217 tree arg00 = TREE_OPERAND (arg0, 0);
13218 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13219 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13220 }
13221
13222 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13223 when is C is a power of two, i.e. a single bit. */
13224 if (TREE_CODE (arg0) == BIT_AND_EXPR
13225 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13226 && integer_zerop (arg1)
13227 && integer_pow2p (TREE_OPERAND (arg0, 1))
13228 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13229 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13230 {
13231 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13232 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13233 arg000, TREE_OPERAND (arg0, 1));
13234 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13235 tem, build_int_cst (TREE_TYPE (tem), 0));
13236 }
13237
13238 if (integer_zerop (arg1)
13239 && tree_expr_nonzero_p (arg0))
13240 {
13241 tree res = constant_boolean_node (code==NE_EXPR, type);
13242 return omit_one_operand_loc (loc, type, res, arg0);
13243 }
13244
13245 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13246 if (TREE_CODE (arg0) == NEGATE_EXPR
13247 && TREE_CODE (arg1) == NEGATE_EXPR)
13248 return fold_build2_loc (loc, code, type,
13249 TREE_OPERAND (arg0, 0),
13250 fold_convert_loc (loc, TREE_TYPE (arg0),
13251 TREE_OPERAND (arg1, 0)));
13252
13253 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13254 if (TREE_CODE (arg0) == BIT_AND_EXPR
13255 && TREE_CODE (arg1) == BIT_AND_EXPR)
13256 {
13257 tree arg00 = TREE_OPERAND (arg0, 0);
13258 tree arg01 = TREE_OPERAND (arg0, 1);
13259 tree arg10 = TREE_OPERAND (arg1, 0);
13260 tree arg11 = TREE_OPERAND (arg1, 1);
13261 tree itype = TREE_TYPE (arg0);
13262
13263 if (operand_equal_p (arg01, arg11, 0))
13264 return fold_build2_loc (loc, code, type,
13265 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13266 fold_build2_loc (loc,
13267 BIT_XOR_EXPR, itype,
13268 arg00, arg10),
13269 arg01),
13270 build_zero_cst (itype));
13271
13272 if (operand_equal_p (arg01, arg10, 0))
13273 return fold_build2_loc (loc, code, type,
13274 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13275 fold_build2_loc (loc,
13276 BIT_XOR_EXPR, itype,
13277 arg00, arg11),
13278 arg01),
13279 build_zero_cst (itype));
13280
13281 if (operand_equal_p (arg00, arg11, 0))
13282 return fold_build2_loc (loc, code, type,
13283 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13284 fold_build2_loc (loc,
13285 BIT_XOR_EXPR, itype,
13286 arg01, arg10),
13287 arg00),
13288 build_zero_cst (itype));
13289
13290 if (operand_equal_p (arg00, arg10, 0))
13291 return fold_build2_loc (loc, code, type,
13292 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13293 fold_build2_loc (loc,
13294 BIT_XOR_EXPR, itype,
13295 arg01, arg11),
13296 arg00),
13297 build_zero_cst (itype));
13298 }
13299
13300 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13301 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13302 {
13303 tree arg00 = TREE_OPERAND (arg0, 0);
13304 tree arg01 = TREE_OPERAND (arg0, 1);
13305 tree arg10 = TREE_OPERAND (arg1, 0);
13306 tree arg11 = TREE_OPERAND (arg1, 1);
13307 tree itype = TREE_TYPE (arg0);
13308
13309 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13310 operand_equal_p guarantees no side-effects so we don't need
13311 to use omit_one_operand on Z. */
13312 if (operand_equal_p (arg01, arg11, 0))
13313 return fold_build2_loc (loc, code, type, arg00,
13314 fold_convert_loc (loc, TREE_TYPE (arg00),
13315 arg10));
13316 if (operand_equal_p (arg01, arg10, 0))
13317 return fold_build2_loc (loc, code, type, arg00,
13318 fold_convert_loc (loc, TREE_TYPE (arg00),
13319 arg11));
13320 if (operand_equal_p (arg00, arg11, 0))
13321 return fold_build2_loc (loc, code, type, arg01,
13322 fold_convert_loc (loc, TREE_TYPE (arg01),
13323 arg10));
13324 if (operand_equal_p (arg00, arg10, 0))
13325 return fold_build2_loc (loc, code, type, arg01,
13326 fold_convert_loc (loc, TREE_TYPE (arg01),
13327 arg11));
13328
13329 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13330 if (TREE_CODE (arg01) == INTEGER_CST
13331 && TREE_CODE (arg11) == INTEGER_CST)
13332 {
13333 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13334 fold_convert_loc (loc, itype, arg11));
13335 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13336 return fold_build2_loc (loc, code, type, tem,
13337 fold_convert_loc (loc, itype, arg10));
13338 }
13339 }
13340
13341 /* Attempt to simplify equality/inequality comparisons of complex
13342 values. Only lower the comparison if the result is known or
13343 can be simplified to a single scalar comparison. */
13344 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13345 || TREE_CODE (arg0) == COMPLEX_CST)
13346 && (TREE_CODE (arg1) == COMPLEX_EXPR
13347 || TREE_CODE (arg1) == COMPLEX_CST))
13348 {
13349 tree real0, imag0, real1, imag1;
13350 tree rcond, icond;
13351
13352 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13353 {
13354 real0 = TREE_OPERAND (arg0, 0);
13355 imag0 = TREE_OPERAND (arg0, 1);
13356 }
13357 else
13358 {
13359 real0 = TREE_REALPART (arg0);
13360 imag0 = TREE_IMAGPART (arg0);
13361 }
13362
13363 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13364 {
13365 real1 = TREE_OPERAND (arg1, 0);
13366 imag1 = TREE_OPERAND (arg1, 1);
13367 }
13368 else
13369 {
13370 real1 = TREE_REALPART (arg1);
13371 imag1 = TREE_IMAGPART (arg1);
13372 }
13373
13374 rcond = fold_binary_loc (loc, code, type, real0, real1);
13375 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13376 {
13377 if (integer_zerop (rcond))
13378 {
13379 if (code == EQ_EXPR)
13380 return omit_two_operands_loc (loc, type, boolean_false_node,
13381 imag0, imag1);
13382 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13383 }
13384 else
13385 {
13386 if (code == NE_EXPR)
13387 return omit_two_operands_loc (loc, type, boolean_true_node,
13388 imag0, imag1);
13389 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13390 }
13391 }
13392
13393 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13394 if (icond && TREE_CODE (icond) == INTEGER_CST)
13395 {
13396 if (integer_zerop (icond))
13397 {
13398 if (code == EQ_EXPR)
13399 return omit_two_operands_loc (loc, type, boolean_false_node,
13400 real0, real1);
13401 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13402 }
13403 else
13404 {
13405 if (code == NE_EXPR)
13406 return omit_two_operands_loc (loc, type, boolean_true_node,
13407 real0, real1);
13408 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13409 }
13410 }
13411 }
13412
13413 return NULL_TREE;
13414
13415 case LT_EXPR:
13416 case GT_EXPR:
13417 case LE_EXPR:
13418 case GE_EXPR:
13419 tem = fold_comparison (loc, code, type, op0, op1);
13420 if (tem != NULL_TREE)
13421 return tem;
13422
13423 /* Transform comparisons of the form X +- C CMP X. */
13424 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13425 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13426 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13427 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13428 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13429 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13430 {
13431 tree arg01 = TREE_OPERAND (arg0, 1);
13432 enum tree_code code0 = TREE_CODE (arg0);
13433 int is_positive;
13434
13435 if (TREE_CODE (arg01) == REAL_CST)
13436 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13437 else
13438 is_positive = tree_int_cst_sgn (arg01);
13439
13440 /* (X - c) > X becomes false. */
13441 if (code == GT_EXPR
13442 && ((code0 == MINUS_EXPR && is_positive >= 0)
13443 || (code0 == PLUS_EXPR && is_positive <= 0)))
13444 {
13445 if (TREE_CODE (arg01) == INTEGER_CST
13446 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13447 fold_overflow_warning (("assuming signed overflow does not "
13448 "occur when assuming that (X - c) > X "
13449 "is always false"),
13450 WARN_STRICT_OVERFLOW_ALL);
13451 return constant_boolean_node (0, type);
13452 }
13453
13454 /* Likewise (X + c) < X becomes false. */
13455 if (code == LT_EXPR
13456 && ((code0 == PLUS_EXPR && is_positive >= 0)
13457 || (code0 == MINUS_EXPR && is_positive <= 0)))
13458 {
13459 if (TREE_CODE (arg01) == INTEGER_CST
13460 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13461 fold_overflow_warning (("assuming signed overflow does not "
13462 "occur when assuming that "
13463 "(X + c) < X is always false"),
13464 WARN_STRICT_OVERFLOW_ALL);
13465 return constant_boolean_node (0, type);
13466 }
13467
13468 /* Convert (X - c) <= X to true. */
13469 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13470 && code == LE_EXPR
13471 && ((code0 == MINUS_EXPR && is_positive >= 0)
13472 || (code0 == PLUS_EXPR && is_positive <= 0)))
13473 {
13474 if (TREE_CODE (arg01) == INTEGER_CST
13475 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13476 fold_overflow_warning (("assuming signed overflow does not "
13477 "occur when assuming that "
13478 "(X - c) <= X is always true"),
13479 WARN_STRICT_OVERFLOW_ALL);
13480 return constant_boolean_node (1, type);
13481 }
13482
13483 /* Convert (X + c) >= X to true. */
13484 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13485 && code == GE_EXPR
13486 && ((code0 == PLUS_EXPR && is_positive >= 0)
13487 || (code0 == MINUS_EXPR && is_positive <= 0)))
13488 {
13489 if (TREE_CODE (arg01) == INTEGER_CST
13490 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13491 fold_overflow_warning (("assuming signed overflow does not "
13492 "occur when assuming that "
13493 "(X + c) >= X is always true"),
13494 WARN_STRICT_OVERFLOW_ALL);
13495 return constant_boolean_node (1, type);
13496 }
13497
13498 if (TREE_CODE (arg01) == INTEGER_CST)
13499 {
13500 /* Convert X + c > X and X - c < X to true for integers. */
13501 if (code == GT_EXPR
13502 && ((code0 == PLUS_EXPR && is_positive > 0)
13503 || (code0 == MINUS_EXPR && is_positive < 0)))
13504 {
13505 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13506 fold_overflow_warning (("assuming signed overflow does "
13507 "not occur when assuming that "
13508 "(X + c) > X is always true"),
13509 WARN_STRICT_OVERFLOW_ALL);
13510 return constant_boolean_node (1, type);
13511 }
13512
13513 if (code == LT_EXPR
13514 && ((code0 == MINUS_EXPR && is_positive > 0)
13515 || (code0 == PLUS_EXPR && is_positive < 0)))
13516 {
13517 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13518 fold_overflow_warning (("assuming signed overflow does "
13519 "not occur when assuming that "
13520 "(X - c) < X is always true"),
13521 WARN_STRICT_OVERFLOW_ALL);
13522 return constant_boolean_node (1, type);
13523 }
13524
13525 /* Convert X + c <= X and X - c >= X to false for integers. */
13526 if (code == LE_EXPR
13527 && ((code0 == PLUS_EXPR && is_positive > 0)
13528 || (code0 == MINUS_EXPR && is_positive < 0)))
13529 {
13530 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13531 fold_overflow_warning (("assuming signed overflow does "
13532 "not occur when assuming that "
13533 "(X + c) <= X is always false"),
13534 WARN_STRICT_OVERFLOW_ALL);
13535 return constant_boolean_node (0, type);
13536 }
13537
13538 if (code == GE_EXPR
13539 && ((code0 == MINUS_EXPR && is_positive > 0)
13540 || (code0 == PLUS_EXPR && is_positive < 0)))
13541 {
13542 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13543 fold_overflow_warning (("assuming signed overflow does "
13544 "not occur when assuming that "
13545 "(X - c) >= X is always false"),
13546 WARN_STRICT_OVERFLOW_ALL);
13547 return constant_boolean_node (0, type);
13548 }
13549 }
13550 }
13551
13552 /* Comparisons with the highest or lowest possible integer of
13553 the specified precision will have known values. */
13554 {
13555 tree arg1_type = TREE_TYPE (arg1);
13556 unsigned int width = TYPE_PRECISION (arg1_type);
13557
13558 if (TREE_CODE (arg1) == INTEGER_CST
13559 && width <= HOST_BITS_PER_DOUBLE_INT
13560 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13561 {
13562 HOST_WIDE_INT signed_max_hi;
13563 unsigned HOST_WIDE_INT signed_max_lo;
13564 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13565
13566 if (width <= HOST_BITS_PER_WIDE_INT)
13567 {
13568 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13569 - 1;
13570 signed_max_hi = 0;
13571 max_hi = 0;
13572
13573 if (TYPE_UNSIGNED (arg1_type))
13574 {
13575 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13576 min_lo = 0;
13577 min_hi = 0;
13578 }
13579 else
13580 {
13581 max_lo = signed_max_lo;
13582 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13583 min_hi = -1;
13584 }
13585 }
13586 else
13587 {
13588 width -= HOST_BITS_PER_WIDE_INT;
13589 signed_max_lo = -1;
13590 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13591 - 1;
13592 max_lo = -1;
13593 min_lo = 0;
13594
13595 if (TYPE_UNSIGNED (arg1_type))
13596 {
13597 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13598 min_hi = 0;
13599 }
13600 else
13601 {
13602 max_hi = signed_max_hi;
13603 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13604 }
13605 }
13606
13607 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13608 && TREE_INT_CST_LOW (arg1) == max_lo)
13609 switch (code)
13610 {
13611 case GT_EXPR:
13612 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13613
13614 case GE_EXPR:
13615 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13616
13617 case LE_EXPR:
13618 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13619
13620 case LT_EXPR:
13621 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13622
13623 /* The GE_EXPR and LT_EXPR cases above are not normally
13624 reached because of previous transformations. */
13625
13626 default:
13627 break;
13628 }
13629 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13630 == max_hi
13631 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13632 switch (code)
13633 {
13634 case GT_EXPR:
13635 arg1 = const_binop (PLUS_EXPR, arg1,
13636 build_int_cst (TREE_TYPE (arg1), 1));
13637 return fold_build2_loc (loc, EQ_EXPR, type,
13638 fold_convert_loc (loc,
13639 TREE_TYPE (arg1), arg0),
13640 arg1);
13641 case LE_EXPR:
13642 arg1 = const_binop (PLUS_EXPR, arg1,
13643 build_int_cst (TREE_TYPE (arg1), 1));
13644 return fold_build2_loc (loc, NE_EXPR, type,
13645 fold_convert_loc (loc, TREE_TYPE (arg1),
13646 arg0),
13647 arg1);
13648 default:
13649 break;
13650 }
13651 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13652 == min_hi
13653 && TREE_INT_CST_LOW (arg1) == min_lo)
13654 switch (code)
13655 {
13656 case LT_EXPR:
13657 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13658
13659 case LE_EXPR:
13660 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13661
13662 case GE_EXPR:
13663 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13664
13665 case GT_EXPR:
13666 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13667
13668 default:
13669 break;
13670 }
13671 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13672 == min_hi
13673 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13674 switch (code)
13675 {
13676 case GE_EXPR:
13677 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13678 return fold_build2_loc (loc, NE_EXPR, type,
13679 fold_convert_loc (loc,
13680 TREE_TYPE (arg1), arg0),
13681 arg1);
13682 case LT_EXPR:
13683 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13684 return fold_build2_loc (loc, EQ_EXPR, type,
13685 fold_convert_loc (loc, TREE_TYPE (arg1),
13686 arg0),
13687 arg1);
13688 default:
13689 break;
13690 }
13691
13692 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13693 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13694 && TYPE_UNSIGNED (arg1_type)
13695 /* We will flip the signedness of the comparison operator
13696 associated with the mode of arg1, so the sign bit is
13697 specified by this mode. Check that arg1 is the signed
13698 max associated with this sign bit. */
13699 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13700 /* signed_type does not work on pointer types. */
13701 && INTEGRAL_TYPE_P (arg1_type))
13702 {
13703 /* The following case also applies to X < signed_max+1
13704 and X >= signed_max+1 because previous transformations. */
13705 if (code == LE_EXPR || code == GT_EXPR)
13706 {
13707 tree st;
13708 st = signed_type_for (TREE_TYPE (arg1));
13709 return fold_build2_loc (loc,
13710 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13711 type, fold_convert_loc (loc, st, arg0),
13712 build_int_cst (st, 0));
13713 }
13714 }
13715 }
13716 }
13717
13718 /* If we are comparing an ABS_EXPR with a constant, we can
13719 convert all the cases into explicit comparisons, but they may
13720 well not be faster than doing the ABS and one comparison.
13721 But ABS (X) <= C is a range comparison, which becomes a subtraction
13722 and a comparison, and is probably faster. */
13723 if (code == LE_EXPR
13724 && TREE_CODE (arg1) == INTEGER_CST
13725 && TREE_CODE (arg0) == ABS_EXPR
13726 && ! TREE_SIDE_EFFECTS (arg0)
13727 && (0 != (tem = negate_expr (arg1)))
13728 && TREE_CODE (tem) == INTEGER_CST
13729 && !TREE_OVERFLOW (tem))
13730 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13731 build2 (GE_EXPR, type,
13732 TREE_OPERAND (arg0, 0), tem),
13733 build2 (LE_EXPR, type,
13734 TREE_OPERAND (arg0, 0), arg1));
13735
13736 /* Convert ABS_EXPR<x> >= 0 to true. */
13737 strict_overflow_p = false;
13738 if (code == GE_EXPR
13739 && (integer_zerop (arg1)
13740 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13741 && real_zerop (arg1)))
13742 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13743 {
13744 if (strict_overflow_p)
13745 fold_overflow_warning (("assuming signed overflow does not occur "
13746 "when simplifying comparison of "
13747 "absolute value and zero"),
13748 WARN_STRICT_OVERFLOW_CONDITIONAL);
13749 return omit_one_operand_loc (loc, type,
13750 constant_boolean_node (true, type),
13751 arg0);
13752 }
13753
13754 /* Convert ABS_EXPR<x> < 0 to false. */
13755 strict_overflow_p = false;
13756 if (code == LT_EXPR
13757 && (integer_zerop (arg1) || real_zerop (arg1))
13758 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13759 {
13760 if (strict_overflow_p)
13761 fold_overflow_warning (("assuming signed overflow does not occur "
13762 "when simplifying comparison of "
13763 "absolute value and zero"),
13764 WARN_STRICT_OVERFLOW_CONDITIONAL);
13765 return omit_one_operand_loc (loc, type,
13766 constant_boolean_node (false, type),
13767 arg0);
13768 }
13769
13770 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13771 and similarly for >= into !=. */
13772 if ((code == LT_EXPR || code == GE_EXPR)
13773 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13774 && TREE_CODE (arg1) == LSHIFT_EXPR
13775 && integer_onep (TREE_OPERAND (arg1, 0)))
13776 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13777 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13778 TREE_OPERAND (arg1, 1)),
13779 build_zero_cst (TREE_TYPE (arg0)));
13780
13781 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13782 otherwise Y might be >= # of bits in X's type and thus e.g.
13783 (unsigned char) (1 << Y) for Y 15 might be 0.
13784 If the cast is widening, then 1 << Y should have unsigned type,
13785 otherwise if Y is number of bits in the signed shift type minus 1,
13786 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13787 31 might be 0xffffffff80000000. */
13788 if ((code == LT_EXPR || code == GE_EXPR)
13789 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13790 && CONVERT_EXPR_P (arg1)
13791 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13792 && (TYPE_PRECISION (TREE_TYPE (arg1))
13793 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13794 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13795 || (TYPE_PRECISION (TREE_TYPE (arg1))
13796 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13797 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13798 {
13799 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13800 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13801 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13802 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13803 build_zero_cst (TREE_TYPE (arg0)));
13804 }
13805
13806 return NULL_TREE;
13807
13808 case UNORDERED_EXPR:
13809 case ORDERED_EXPR:
13810 case UNLT_EXPR:
13811 case UNLE_EXPR:
13812 case UNGT_EXPR:
13813 case UNGE_EXPR:
13814 case UNEQ_EXPR:
13815 case LTGT_EXPR:
13816 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13817 {
13818 t1 = fold_relational_const (code, type, arg0, arg1);
13819 if (t1 != NULL_TREE)
13820 return t1;
13821 }
13822
13823 /* If the first operand is NaN, the result is constant. */
13824 if (TREE_CODE (arg0) == REAL_CST
13825 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13826 && (code != LTGT_EXPR || ! flag_trapping_math))
13827 {
13828 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13829 ? integer_zero_node
13830 : integer_one_node;
13831 return omit_one_operand_loc (loc, type, t1, arg1);
13832 }
13833
13834 /* If the second operand is NaN, the result is constant. */
13835 if (TREE_CODE (arg1) == REAL_CST
13836 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13837 && (code != LTGT_EXPR || ! flag_trapping_math))
13838 {
13839 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13840 ? integer_zero_node
13841 : integer_one_node;
13842 return omit_one_operand_loc (loc, type, t1, arg0);
13843 }
13844
13845 /* Simplify unordered comparison of something with itself. */
13846 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13847 && operand_equal_p (arg0, arg1, 0))
13848 return constant_boolean_node (1, type);
13849
13850 if (code == LTGT_EXPR
13851 && !flag_trapping_math
13852 && operand_equal_p (arg0, arg1, 0))
13853 return constant_boolean_node (0, type);
13854
13855 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13856 {
13857 tree targ0 = strip_float_extensions (arg0);
13858 tree targ1 = strip_float_extensions (arg1);
13859 tree newtype = TREE_TYPE (targ0);
13860
13861 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13862 newtype = TREE_TYPE (targ1);
13863
13864 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13865 return fold_build2_loc (loc, code, type,
13866 fold_convert_loc (loc, newtype, targ0),
13867 fold_convert_loc (loc, newtype, targ1));
13868 }
13869
13870 return NULL_TREE;
13871
13872 case COMPOUND_EXPR:
13873 /* When pedantic, a compound expression can be neither an lvalue
13874 nor an integer constant expression. */
13875 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13876 return NULL_TREE;
13877 /* Don't let (0, 0) be null pointer constant. */
13878 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13879 : fold_convert_loc (loc, type, arg1);
13880 return pedantic_non_lvalue_loc (loc, tem);
13881
13882 case COMPLEX_EXPR:
13883 if ((TREE_CODE (arg0) == REAL_CST
13884 && TREE_CODE (arg1) == REAL_CST)
13885 || (TREE_CODE (arg0) == INTEGER_CST
13886 && TREE_CODE (arg1) == INTEGER_CST))
13887 return build_complex (type, arg0, arg1);
13888 if (TREE_CODE (arg0) == REALPART_EXPR
13889 && TREE_CODE (arg1) == IMAGPART_EXPR
13890 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13891 && operand_equal_p (TREE_OPERAND (arg0, 0),
13892 TREE_OPERAND (arg1, 0), 0))
13893 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13894 TREE_OPERAND (arg1, 0));
13895 return NULL_TREE;
13896
13897 case ASSERT_EXPR:
13898 /* An ASSERT_EXPR should never be passed to fold_binary. */
13899 gcc_unreachable ();
13900
13901 case VEC_PACK_TRUNC_EXPR:
13902 case VEC_PACK_FIX_TRUNC_EXPR:
13903 {
13904 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13905 tree *elts;
13906
13907 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13908 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13909 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13910 return NULL_TREE;
13911
13912 elts = XALLOCAVEC (tree, nelts);
13913 if (!vec_cst_ctor_to_array (arg0, elts)
13914 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13915 return NULL_TREE;
13916
13917 for (i = 0; i < nelts; i++)
13918 {
13919 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13920 ? NOP_EXPR : FIX_TRUNC_EXPR,
13921 TREE_TYPE (type), elts[i]);
13922 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13923 return NULL_TREE;
13924 }
13925
13926 return build_vector (type, elts);
13927 }
13928
13929 case VEC_WIDEN_MULT_LO_EXPR:
13930 case VEC_WIDEN_MULT_HI_EXPR:
13931 case VEC_WIDEN_MULT_EVEN_EXPR:
13932 case VEC_WIDEN_MULT_ODD_EXPR:
13933 {
13934 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13935 unsigned int out, ofs, scale;
13936 tree *elts;
13937
13938 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13939 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13940 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13941 return NULL_TREE;
13942
13943 elts = XALLOCAVEC (tree, nelts * 4);
13944 if (!vec_cst_ctor_to_array (arg0, elts)
13945 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13946 return NULL_TREE;
13947
13948 if (code == VEC_WIDEN_MULT_LO_EXPR)
13949 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13950 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13951 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13952 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13953 scale = 1, ofs = 0;
13954 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13955 scale = 1, ofs = 1;
13956
13957 for (out = 0; out < nelts; out++)
13958 {
13959 unsigned int in1 = (out << scale) + ofs;
13960 unsigned int in2 = in1 + nelts * 2;
13961 tree t1, t2;
13962
13963 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13964 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13965
13966 if (t1 == NULL_TREE || t2 == NULL_TREE)
13967 return NULL_TREE;
13968 elts[out] = const_binop (MULT_EXPR, t1, t2);
13969 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13970 return NULL_TREE;
13971 }
13972
13973 return build_vector (type, elts);
13974 }
13975
13976 default:
13977 return NULL_TREE;
13978 } /* switch (code) */
13979 }
13980
13981 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13982 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13983 of GOTO_EXPR. */
13984
13985 static tree
13986 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13987 {
13988 switch (TREE_CODE (*tp))
13989 {
13990 case LABEL_EXPR:
13991 return *tp;
13992
13993 case GOTO_EXPR:
13994 *walk_subtrees = 0;
13995
13996 /* ... fall through ... */
13997
13998 default:
13999 return NULL_TREE;
14000 }
14001 }
14002
14003 /* Return whether the sub-tree ST contains a label which is accessible from
14004 outside the sub-tree. */
14005
14006 static bool
14007 contains_label_p (tree st)
14008 {
14009 return
14010 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14011 }
14012
14013 /* Fold a ternary expression of code CODE and type TYPE with operands
14014 OP0, OP1, and OP2. Return the folded expression if folding is
14015 successful. Otherwise, return NULL_TREE. */
14016
14017 tree
14018 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14019 tree op0, tree op1, tree op2)
14020 {
14021 tree tem;
14022 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14023 enum tree_code_class kind = TREE_CODE_CLASS (code);
14024
14025 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14026 && TREE_CODE_LENGTH (code) == 3);
14027
14028 /* Strip any conversions that don't change the mode. This is safe
14029 for every expression, except for a comparison expression because
14030 its signedness is derived from its operands. So, in the latter
14031 case, only strip conversions that don't change the signedness.
14032
14033 Note that this is done as an internal manipulation within the
14034 constant folder, in order to find the simplest representation of
14035 the arguments so that their form can be studied. In any cases,
14036 the appropriate type conversions should be put back in the tree
14037 that will get out of the constant folder. */
14038 if (op0)
14039 {
14040 arg0 = op0;
14041 STRIP_NOPS (arg0);
14042 }
14043
14044 if (op1)
14045 {
14046 arg1 = op1;
14047 STRIP_NOPS (arg1);
14048 }
14049
14050 if (op2)
14051 {
14052 arg2 = op2;
14053 STRIP_NOPS (arg2);
14054 }
14055
14056 switch (code)
14057 {
14058 case COMPONENT_REF:
14059 if (TREE_CODE (arg0) == CONSTRUCTOR
14060 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14061 {
14062 unsigned HOST_WIDE_INT idx;
14063 tree field, value;
14064 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14065 if (field == arg1)
14066 return value;
14067 }
14068 return NULL_TREE;
14069
14070 case COND_EXPR:
14071 case VEC_COND_EXPR:
14072 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14073 so all simple results must be passed through pedantic_non_lvalue. */
14074 if (TREE_CODE (arg0) == INTEGER_CST)
14075 {
14076 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14077 tem = integer_zerop (arg0) ? op2 : op1;
14078 /* Only optimize constant conditions when the selected branch
14079 has the same type as the COND_EXPR. This avoids optimizing
14080 away "c ? x : throw", where the throw has a void type.
14081 Avoid throwing away that operand which contains label. */
14082 if ((!TREE_SIDE_EFFECTS (unused_op)
14083 || !contains_label_p (unused_op))
14084 && (! VOID_TYPE_P (TREE_TYPE (tem))
14085 || VOID_TYPE_P (type)))
14086 return pedantic_non_lvalue_loc (loc, tem);
14087 return NULL_TREE;
14088 }
14089 else if (TREE_CODE (arg0) == VECTOR_CST)
14090 {
14091 if (integer_all_onesp (arg0))
14092 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14093 if (integer_zerop (arg0))
14094 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14095
14096 if ((TREE_CODE (arg1) == VECTOR_CST
14097 || TREE_CODE (arg1) == CONSTRUCTOR)
14098 && (TREE_CODE (arg2) == VECTOR_CST
14099 || TREE_CODE (arg2) == CONSTRUCTOR))
14100 {
14101 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14102 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14103 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14104 for (i = 0; i < nelts; i++)
14105 {
14106 tree val = VECTOR_CST_ELT (arg0, i);
14107 if (integer_all_onesp (val))
14108 sel[i] = i;
14109 else if (integer_zerop (val))
14110 sel[i] = nelts + i;
14111 else /* Currently unreachable. */
14112 return NULL_TREE;
14113 }
14114 tree t = fold_vec_perm (type, arg1, arg2, sel);
14115 if (t != NULL_TREE)
14116 return t;
14117 }
14118 }
14119
14120 if (operand_equal_p (arg1, op2, 0))
14121 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14122
14123 /* If we have A op B ? A : C, we may be able to convert this to a
14124 simpler expression, depending on the operation and the values
14125 of B and C. Signed zeros prevent all of these transformations,
14126 for reasons given above each one.
14127
14128 Also try swapping the arguments and inverting the conditional. */
14129 if (COMPARISON_CLASS_P (arg0)
14130 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14131 arg1, TREE_OPERAND (arg0, 1))
14132 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14133 {
14134 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14135 if (tem)
14136 return tem;
14137 }
14138
14139 if (COMPARISON_CLASS_P (arg0)
14140 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14141 op2,
14142 TREE_OPERAND (arg0, 1))
14143 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14144 {
14145 location_t loc0 = expr_location_or (arg0, loc);
14146 tem = fold_invert_truthvalue (loc0, arg0);
14147 if (tem && COMPARISON_CLASS_P (tem))
14148 {
14149 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14150 if (tem)
14151 return tem;
14152 }
14153 }
14154
14155 /* If the second operand is simpler than the third, swap them
14156 since that produces better jump optimization results. */
14157 if (truth_value_p (TREE_CODE (arg0))
14158 && tree_swap_operands_p (op1, op2, false))
14159 {
14160 location_t loc0 = expr_location_or (arg0, loc);
14161 /* See if this can be inverted. If it can't, possibly because
14162 it was a floating-point inequality comparison, don't do
14163 anything. */
14164 tem = fold_invert_truthvalue (loc0, arg0);
14165 if (tem)
14166 return fold_build3_loc (loc, code, type, tem, op2, op1);
14167 }
14168
14169 /* Convert A ? 1 : 0 to simply A. */
14170 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14171 : (integer_onep (op1)
14172 && !VECTOR_TYPE_P (type)))
14173 && integer_zerop (op2)
14174 /* If we try to convert OP0 to our type, the
14175 call to fold will try to move the conversion inside
14176 a COND, which will recurse. In that case, the COND_EXPR
14177 is probably the best choice, so leave it alone. */
14178 && type == TREE_TYPE (arg0))
14179 return pedantic_non_lvalue_loc (loc, arg0);
14180
14181 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14182 over COND_EXPR in cases such as floating point comparisons. */
14183 if (integer_zerop (op1)
14184 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14185 : (integer_onep (op2)
14186 && !VECTOR_TYPE_P (type)))
14187 && truth_value_p (TREE_CODE (arg0)))
14188 return pedantic_non_lvalue_loc (loc,
14189 fold_convert_loc (loc, type,
14190 invert_truthvalue_loc (loc,
14191 arg0)));
14192
14193 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14194 if (TREE_CODE (arg0) == LT_EXPR
14195 && integer_zerop (TREE_OPERAND (arg0, 1))
14196 && integer_zerop (op2)
14197 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14198 {
14199 /* sign_bit_p looks through both zero and sign extensions,
14200 but for this optimization only sign extensions are
14201 usable. */
14202 tree tem2 = TREE_OPERAND (arg0, 0);
14203 while (tem != tem2)
14204 {
14205 if (TREE_CODE (tem2) != NOP_EXPR
14206 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14207 {
14208 tem = NULL_TREE;
14209 break;
14210 }
14211 tem2 = TREE_OPERAND (tem2, 0);
14212 }
14213 /* sign_bit_p only checks ARG1 bits within A's precision.
14214 If <sign bit of A> has wider type than A, bits outside
14215 of A's precision in <sign bit of A> need to be checked.
14216 If they are all 0, this optimization needs to be done
14217 in unsigned A's type, if they are all 1 in signed A's type,
14218 otherwise this can't be done. */
14219 if (tem
14220 && TYPE_PRECISION (TREE_TYPE (tem))
14221 < TYPE_PRECISION (TREE_TYPE (arg1))
14222 && TYPE_PRECISION (TREE_TYPE (tem))
14223 < TYPE_PRECISION (type))
14224 {
14225 unsigned HOST_WIDE_INT mask_lo;
14226 HOST_WIDE_INT mask_hi;
14227 int inner_width, outer_width;
14228 tree tem_type;
14229
14230 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14231 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14232 if (outer_width > TYPE_PRECISION (type))
14233 outer_width = TYPE_PRECISION (type);
14234
14235 if (outer_width > HOST_BITS_PER_WIDE_INT)
14236 {
14237 mask_hi = (HOST_WIDE_INT_M1U
14238 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14239 mask_lo = -1;
14240 }
14241 else
14242 {
14243 mask_hi = 0;
14244 mask_lo = (HOST_WIDE_INT_M1U
14245 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14246 }
14247 if (inner_width > HOST_BITS_PER_WIDE_INT)
14248 {
14249 mask_hi &= ~(HOST_WIDE_INT_M1U
14250 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14251 mask_lo = 0;
14252 }
14253 else
14254 mask_lo &= ~(HOST_WIDE_INT_M1U
14255 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14256
14257 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14258 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14259 {
14260 tem_type = signed_type_for (TREE_TYPE (tem));
14261 tem = fold_convert_loc (loc, tem_type, tem);
14262 }
14263 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14264 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14265 {
14266 tem_type = unsigned_type_for (TREE_TYPE (tem));
14267 tem = fold_convert_loc (loc, tem_type, tem);
14268 }
14269 else
14270 tem = NULL;
14271 }
14272
14273 if (tem)
14274 return
14275 fold_convert_loc (loc, type,
14276 fold_build2_loc (loc, BIT_AND_EXPR,
14277 TREE_TYPE (tem), tem,
14278 fold_convert_loc (loc,
14279 TREE_TYPE (tem),
14280 arg1)));
14281 }
14282
14283 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14284 already handled above. */
14285 if (TREE_CODE (arg0) == BIT_AND_EXPR
14286 && integer_onep (TREE_OPERAND (arg0, 1))
14287 && integer_zerop (op2)
14288 && integer_pow2p (arg1))
14289 {
14290 tree tem = TREE_OPERAND (arg0, 0);
14291 STRIP_NOPS (tem);
14292 if (TREE_CODE (tem) == RSHIFT_EXPR
14293 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14294 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14295 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14296 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14297 TREE_OPERAND (tem, 0), arg1);
14298 }
14299
14300 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14301 is probably obsolete because the first operand should be a
14302 truth value (that's why we have the two cases above), but let's
14303 leave it in until we can confirm this for all front-ends. */
14304 if (integer_zerop (op2)
14305 && TREE_CODE (arg0) == NE_EXPR
14306 && integer_zerop (TREE_OPERAND (arg0, 1))
14307 && integer_pow2p (arg1)
14308 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14309 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14310 arg1, OEP_ONLY_CONST))
14311 return pedantic_non_lvalue_loc (loc,
14312 fold_convert_loc (loc, type,
14313 TREE_OPERAND (arg0, 0)));
14314
14315 /* Disable the transformations below for vectors, since
14316 fold_binary_op_with_conditional_arg may undo them immediately,
14317 yielding an infinite loop. */
14318 if (code == VEC_COND_EXPR)
14319 return NULL_TREE;
14320
14321 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14322 if (integer_zerop (op2)
14323 && truth_value_p (TREE_CODE (arg0))
14324 && truth_value_p (TREE_CODE (arg1))
14325 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14326 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14327 : TRUTH_ANDIF_EXPR,
14328 type, fold_convert_loc (loc, type, arg0), arg1);
14329
14330 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14331 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14332 && truth_value_p (TREE_CODE (arg0))
14333 && truth_value_p (TREE_CODE (arg1))
14334 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14335 {
14336 location_t loc0 = expr_location_or (arg0, loc);
14337 /* Only perform transformation if ARG0 is easily inverted. */
14338 tem = fold_invert_truthvalue (loc0, arg0);
14339 if (tem)
14340 return fold_build2_loc (loc, code == VEC_COND_EXPR
14341 ? BIT_IOR_EXPR
14342 : TRUTH_ORIF_EXPR,
14343 type, fold_convert_loc (loc, type, tem),
14344 arg1);
14345 }
14346
14347 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14348 if (integer_zerop (arg1)
14349 && truth_value_p (TREE_CODE (arg0))
14350 && truth_value_p (TREE_CODE (op2))
14351 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14352 {
14353 location_t loc0 = expr_location_or (arg0, loc);
14354 /* Only perform transformation if ARG0 is easily inverted. */
14355 tem = fold_invert_truthvalue (loc0, arg0);
14356 if (tem)
14357 return fold_build2_loc (loc, code == VEC_COND_EXPR
14358 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14359 type, fold_convert_loc (loc, type, tem),
14360 op2);
14361 }
14362
14363 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14364 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14365 && truth_value_p (TREE_CODE (arg0))
14366 && truth_value_p (TREE_CODE (op2))
14367 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14368 return fold_build2_loc (loc, code == VEC_COND_EXPR
14369 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14370 type, fold_convert_loc (loc, type, arg0), op2);
14371
14372 return NULL_TREE;
14373
14374 case CALL_EXPR:
14375 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14376 of fold_ternary on them. */
14377 gcc_unreachable ();
14378
14379 case BIT_FIELD_REF:
14380 if ((TREE_CODE (arg0) == VECTOR_CST
14381 || (TREE_CODE (arg0) == CONSTRUCTOR
14382 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14383 && (type == TREE_TYPE (TREE_TYPE (arg0))
14384 || (TREE_CODE (type) == VECTOR_TYPE
14385 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14386 {
14387 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14388 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14389 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14390 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14391
14392 if (n != 0
14393 && (idx % width) == 0
14394 && (n % width) == 0
14395 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14396 {
14397 idx = idx / width;
14398 n = n / width;
14399
14400 if (TREE_CODE (arg0) == VECTOR_CST)
14401 {
14402 if (n == 1)
14403 return VECTOR_CST_ELT (arg0, idx);
14404
14405 tree *vals = XALLOCAVEC (tree, n);
14406 for (unsigned i = 0; i < n; ++i)
14407 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14408 return build_vector (type, vals);
14409 }
14410
14411 /* Constructor elements can be subvectors. */
14412 unsigned HOST_WIDE_INT k = 1;
14413 if (CONSTRUCTOR_NELTS (arg0) != 0)
14414 {
14415 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14416 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14417 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14418 }
14419
14420 /* We keep an exact subset of the constructor elements. */
14421 if ((idx % k) == 0 && (n % k) == 0)
14422 {
14423 if (CONSTRUCTOR_NELTS (arg0) == 0)
14424 return build_constructor (type, NULL);
14425 idx /= k;
14426 n /= k;
14427 if (n == 1)
14428 {
14429 if (idx < CONSTRUCTOR_NELTS (arg0))
14430 return CONSTRUCTOR_ELT (arg0, idx)->value;
14431 return build_zero_cst (type);
14432 }
14433
14434 vec<constructor_elt, va_gc> *vals;
14435 vec_alloc (vals, n);
14436 for (unsigned i = 0;
14437 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14438 ++i)
14439 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14440 CONSTRUCTOR_ELT
14441 (arg0, idx + i)->value);
14442 return build_constructor (type, vals);
14443 }
14444 /* The bitfield references a single constructor element. */
14445 else if (idx + n <= (idx / k + 1) * k)
14446 {
14447 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14448 return build_zero_cst (type);
14449 else if (n == k)
14450 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14451 else
14452 return fold_build3_loc (loc, code, type,
14453 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14454 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14455 }
14456 }
14457 }
14458
14459 /* A bit-field-ref that referenced the full argument can be stripped. */
14460 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14461 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14462 && integer_zerop (op2))
14463 return fold_convert_loc (loc, type, arg0);
14464
14465 /* On constants we can use native encode/interpret to constant
14466 fold (nearly) all BIT_FIELD_REFs. */
14467 if (CONSTANT_CLASS_P (arg0)
14468 && can_native_interpret_type_p (type)
14469 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14470 /* This limitation should not be necessary, we just need to
14471 round this up to mode size. */
14472 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14473 /* Need bit-shifting of the buffer to relax the following. */
14474 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14475 {
14476 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14477 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14478 unsigned HOST_WIDE_INT clen;
14479 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14480 /* ??? We cannot tell native_encode_expr to start at
14481 some random byte only. So limit us to a reasonable amount
14482 of work. */
14483 if (clen <= 4096)
14484 {
14485 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14486 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14487 if (len > 0
14488 && len * BITS_PER_UNIT >= bitpos + bitsize)
14489 {
14490 tree v = native_interpret_expr (type,
14491 b + bitpos / BITS_PER_UNIT,
14492 bitsize / BITS_PER_UNIT);
14493 if (v)
14494 return v;
14495 }
14496 }
14497 }
14498
14499 return NULL_TREE;
14500
14501 case FMA_EXPR:
14502 /* For integers we can decompose the FMA if possible. */
14503 if (TREE_CODE (arg0) == INTEGER_CST
14504 && TREE_CODE (arg1) == INTEGER_CST)
14505 return fold_build2_loc (loc, PLUS_EXPR, type,
14506 const_binop (MULT_EXPR, arg0, arg1), arg2);
14507 if (integer_zerop (arg2))
14508 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14509
14510 return fold_fma (loc, type, arg0, arg1, arg2);
14511
14512 case VEC_PERM_EXPR:
14513 if (TREE_CODE (arg2) == VECTOR_CST)
14514 {
14515 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14516 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14517 tree t;
14518 bool need_mask_canon = false;
14519 bool all_in_vec0 = true;
14520 bool all_in_vec1 = true;
14521 bool maybe_identity = true;
14522 bool single_arg = (op0 == op1);
14523 bool changed = false;
14524
14525 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14526 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14527 for (i = 0; i < nelts; i++)
14528 {
14529 tree val = VECTOR_CST_ELT (arg2, i);
14530 if (TREE_CODE (val) != INTEGER_CST)
14531 return NULL_TREE;
14532
14533 sel[i] = TREE_INT_CST_LOW (val) & mask;
14534 if (TREE_INT_CST_HIGH (val)
14535 || ((unsigned HOST_WIDE_INT)
14536 TREE_INT_CST_LOW (val) != sel[i]))
14537 need_mask_canon = true;
14538
14539 if (sel[i] < nelts)
14540 all_in_vec1 = false;
14541 else
14542 all_in_vec0 = false;
14543
14544 if ((sel[i] & (nelts-1)) != i)
14545 maybe_identity = false;
14546 }
14547
14548 if (maybe_identity)
14549 {
14550 if (all_in_vec0)
14551 return op0;
14552 if (all_in_vec1)
14553 return op1;
14554 }
14555
14556 if (all_in_vec0)
14557 op1 = op0;
14558 else if (all_in_vec1)
14559 {
14560 op0 = op1;
14561 for (i = 0; i < nelts; i++)
14562 sel[i] -= nelts;
14563 need_mask_canon = true;
14564 }
14565
14566 if ((TREE_CODE (op0) == VECTOR_CST
14567 || TREE_CODE (op0) == CONSTRUCTOR)
14568 && (TREE_CODE (op1) == VECTOR_CST
14569 || TREE_CODE (op1) == CONSTRUCTOR))
14570 {
14571 t = fold_vec_perm (type, op0, op1, sel);
14572 if (t != NULL_TREE)
14573 return t;
14574 }
14575
14576 if (op0 == op1 && !single_arg)
14577 changed = true;
14578
14579 if (need_mask_canon && arg2 == op2)
14580 {
14581 tree *tsel = XALLOCAVEC (tree, nelts);
14582 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14583 for (i = 0; i < nelts; i++)
14584 tsel[i] = build_int_cst (eltype, sel[i]);
14585 op2 = build_vector (TREE_TYPE (arg2), tsel);
14586 changed = true;
14587 }
14588
14589 if (changed)
14590 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14591 }
14592 return NULL_TREE;
14593
14594 default:
14595 return NULL_TREE;
14596 } /* switch (code) */
14597 }
14598
14599 /* Perform constant folding and related simplification of EXPR.
14600 The related simplifications include x*1 => x, x*0 => 0, etc.,
14601 and application of the associative law.
14602 NOP_EXPR conversions may be removed freely (as long as we
14603 are careful not to change the type of the overall expression).
14604 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14605 but we can constant-fold them if they have constant operands. */
14606
14607 #ifdef ENABLE_FOLD_CHECKING
14608 # define fold(x) fold_1 (x)
14609 static tree fold_1 (tree);
14610 static
14611 #endif
14612 tree
14613 fold (tree expr)
14614 {
14615 const tree t = expr;
14616 enum tree_code code = TREE_CODE (t);
14617 enum tree_code_class kind = TREE_CODE_CLASS (code);
14618 tree tem;
14619 location_t loc = EXPR_LOCATION (expr);
14620
14621 /* Return right away if a constant. */
14622 if (kind == tcc_constant)
14623 return t;
14624
14625 /* CALL_EXPR-like objects with variable numbers of operands are
14626 treated specially. */
14627 if (kind == tcc_vl_exp)
14628 {
14629 if (code == CALL_EXPR)
14630 {
14631 tem = fold_call_expr (loc, expr, false);
14632 return tem ? tem : expr;
14633 }
14634 return expr;
14635 }
14636
14637 if (IS_EXPR_CODE_CLASS (kind))
14638 {
14639 tree type = TREE_TYPE (t);
14640 tree op0, op1, op2;
14641
14642 switch (TREE_CODE_LENGTH (code))
14643 {
14644 case 1:
14645 op0 = TREE_OPERAND (t, 0);
14646 tem = fold_unary_loc (loc, code, type, op0);
14647 return tem ? tem : expr;
14648 case 2:
14649 op0 = TREE_OPERAND (t, 0);
14650 op1 = TREE_OPERAND (t, 1);
14651 tem = fold_binary_loc (loc, code, type, op0, op1);
14652 return tem ? tem : expr;
14653 case 3:
14654 op0 = TREE_OPERAND (t, 0);
14655 op1 = TREE_OPERAND (t, 1);
14656 op2 = TREE_OPERAND (t, 2);
14657 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14658 return tem ? tem : expr;
14659 default:
14660 break;
14661 }
14662 }
14663
14664 switch (code)
14665 {
14666 case ARRAY_REF:
14667 {
14668 tree op0 = TREE_OPERAND (t, 0);
14669 tree op1 = TREE_OPERAND (t, 1);
14670
14671 if (TREE_CODE (op1) == INTEGER_CST
14672 && TREE_CODE (op0) == CONSTRUCTOR
14673 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14674 {
14675 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14676 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14677 unsigned HOST_WIDE_INT begin = 0;
14678
14679 /* Find a matching index by means of a binary search. */
14680 while (begin != end)
14681 {
14682 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14683 tree index = (*elts)[middle].index;
14684
14685 if (TREE_CODE (index) == INTEGER_CST
14686 && tree_int_cst_lt (index, op1))
14687 begin = middle + 1;
14688 else if (TREE_CODE (index) == INTEGER_CST
14689 && tree_int_cst_lt (op1, index))
14690 end = middle;
14691 else if (TREE_CODE (index) == RANGE_EXPR
14692 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14693 begin = middle + 1;
14694 else if (TREE_CODE (index) == RANGE_EXPR
14695 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14696 end = middle;
14697 else
14698 return (*elts)[middle].value;
14699 }
14700 }
14701
14702 return t;
14703 }
14704
14705 /* Return a VECTOR_CST if possible. */
14706 case CONSTRUCTOR:
14707 {
14708 tree type = TREE_TYPE (t);
14709 if (TREE_CODE (type) != VECTOR_TYPE)
14710 return t;
14711
14712 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14713 unsigned HOST_WIDE_INT idx, pos = 0;
14714 tree value;
14715
14716 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14717 {
14718 if (!CONSTANT_CLASS_P (value))
14719 return t;
14720 if (TREE_CODE (value) == VECTOR_CST)
14721 {
14722 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14723 vec[pos++] = VECTOR_CST_ELT (value, i);
14724 }
14725 else
14726 vec[pos++] = value;
14727 }
14728 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14729 vec[pos] = build_zero_cst (TREE_TYPE (type));
14730
14731 return build_vector (type, vec);
14732 }
14733
14734 case CONST_DECL:
14735 return fold (DECL_INITIAL (t));
14736
14737 default:
14738 return t;
14739 } /* switch (code) */
14740 }
14741
14742 #ifdef ENABLE_FOLD_CHECKING
14743 #undef fold
14744
14745 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14746 hash_table <pointer_hash <tree_node> >);
14747 static void fold_check_failed (const_tree, const_tree);
14748 void print_fold_checksum (const_tree);
14749
14750 /* When --enable-checking=fold, compute a digest of expr before
14751 and after actual fold call to see if fold did not accidentally
14752 change original expr. */
14753
14754 tree
14755 fold (tree expr)
14756 {
14757 tree ret;
14758 struct md5_ctx ctx;
14759 unsigned char checksum_before[16], checksum_after[16];
14760 hash_table <pointer_hash <tree_node> > ht;
14761
14762 ht.create (32);
14763 md5_init_ctx (&ctx);
14764 fold_checksum_tree (expr, &ctx, ht);
14765 md5_finish_ctx (&ctx, checksum_before);
14766 ht.empty ();
14767
14768 ret = fold_1 (expr);
14769
14770 md5_init_ctx (&ctx);
14771 fold_checksum_tree (expr, &ctx, ht);
14772 md5_finish_ctx (&ctx, checksum_after);
14773 ht.dispose ();
14774
14775 if (memcmp (checksum_before, checksum_after, 16))
14776 fold_check_failed (expr, ret);
14777
14778 return ret;
14779 }
14780
14781 void
14782 print_fold_checksum (const_tree expr)
14783 {
14784 struct md5_ctx ctx;
14785 unsigned char checksum[16], cnt;
14786 hash_table <pointer_hash <tree_node> > ht;
14787
14788 ht.create (32);
14789 md5_init_ctx (&ctx);
14790 fold_checksum_tree (expr, &ctx, ht);
14791 md5_finish_ctx (&ctx, checksum);
14792 ht.dispose ();
14793 for (cnt = 0; cnt < 16; ++cnt)
14794 fprintf (stderr, "%02x", checksum[cnt]);
14795 putc ('\n', stderr);
14796 }
14797
14798 static void
14799 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14800 {
14801 internal_error ("fold check: original tree changed by fold");
14802 }
14803
14804 static void
14805 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14806 hash_table <pointer_hash <tree_node> > ht)
14807 {
14808 tree_node **slot;
14809 enum tree_code code;
14810 union tree_node buf;
14811 int i, len;
14812
14813 recursive_label:
14814 if (expr == NULL)
14815 return;
14816 slot = ht.find_slot (expr, INSERT);
14817 if (*slot != NULL)
14818 return;
14819 *slot = CONST_CAST_TREE (expr);
14820 code = TREE_CODE (expr);
14821 if (TREE_CODE_CLASS (code) == tcc_declaration
14822 && DECL_ASSEMBLER_NAME_SET_P (expr))
14823 {
14824 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14825 memcpy ((char *) &buf, expr, tree_size (expr));
14826 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14827 expr = (tree) &buf;
14828 }
14829 else if (TREE_CODE_CLASS (code) == tcc_type
14830 && (TYPE_POINTER_TO (expr)
14831 || TYPE_REFERENCE_TO (expr)
14832 || TYPE_CACHED_VALUES_P (expr)
14833 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14834 || TYPE_NEXT_VARIANT (expr)))
14835 {
14836 /* Allow these fields to be modified. */
14837 tree tmp;
14838 memcpy ((char *) &buf, expr, tree_size (expr));
14839 expr = tmp = (tree) &buf;
14840 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14841 TYPE_POINTER_TO (tmp) = NULL;
14842 TYPE_REFERENCE_TO (tmp) = NULL;
14843 TYPE_NEXT_VARIANT (tmp) = NULL;
14844 if (TYPE_CACHED_VALUES_P (tmp))
14845 {
14846 TYPE_CACHED_VALUES_P (tmp) = 0;
14847 TYPE_CACHED_VALUES (tmp) = NULL;
14848 }
14849 }
14850 md5_process_bytes (expr, tree_size (expr), ctx);
14851 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14852 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14853 if (TREE_CODE_CLASS (code) != tcc_type
14854 && TREE_CODE_CLASS (code) != tcc_declaration
14855 && code != TREE_LIST
14856 && code != SSA_NAME
14857 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14858 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14859 switch (TREE_CODE_CLASS (code))
14860 {
14861 case tcc_constant:
14862 switch (code)
14863 {
14864 case STRING_CST:
14865 md5_process_bytes (TREE_STRING_POINTER (expr),
14866 TREE_STRING_LENGTH (expr), ctx);
14867 break;
14868 case COMPLEX_CST:
14869 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14870 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14871 break;
14872 case VECTOR_CST:
14873 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14874 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14875 break;
14876 default:
14877 break;
14878 }
14879 break;
14880 case tcc_exceptional:
14881 switch (code)
14882 {
14883 case TREE_LIST:
14884 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14885 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14886 expr = TREE_CHAIN (expr);
14887 goto recursive_label;
14888 break;
14889 case TREE_VEC:
14890 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14891 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14892 break;
14893 default:
14894 break;
14895 }
14896 break;
14897 case tcc_expression:
14898 case tcc_reference:
14899 case tcc_comparison:
14900 case tcc_unary:
14901 case tcc_binary:
14902 case tcc_statement:
14903 case tcc_vl_exp:
14904 len = TREE_OPERAND_LENGTH (expr);
14905 for (i = 0; i < len; ++i)
14906 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14907 break;
14908 case tcc_declaration:
14909 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14910 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14911 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14912 {
14913 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14914 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14915 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14916 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14917 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14918 }
14919 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14920 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14921
14922 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14923 {
14924 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14925 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14926 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14927 }
14928 break;
14929 case tcc_type:
14930 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14931 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14932 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14933 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14934 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14935 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14936 if (INTEGRAL_TYPE_P (expr)
14937 || SCALAR_FLOAT_TYPE_P (expr))
14938 {
14939 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14940 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14941 }
14942 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14943 if (TREE_CODE (expr) == RECORD_TYPE
14944 || TREE_CODE (expr) == UNION_TYPE
14945 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14946 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14947 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14948 break;
14949 default:
14950 break;
14951 }
14952 }
14953
14954 /* Helper function for outputting the checksum of a tree T. When
14955 debugging with gdb, you can "define mynext" to be "next" followed
14956 by "call debug_fold_checksum (op0)", then just trace down till the
14957 outputs differ. */
14958
14959 DEBUG_FUNCTION void
14960 debug_fold_checksum (const_tree t)
14961 {
14962 int i;
14963 unsigned char checksum[16];
14964 struct md5_ctx ctx;
14965 hash_table <pointer_hash <tree_node> > ht;
14966 ht.create (32);
14967
14968 md5_init_ctx (&ctx);
14969 fold_checksum_tree (t, &ctx, ht);
14970 md5_finish_ctx (&ctx, checksum);
14971 ht.empty ();
14972
14973 for (i = 0; i < 16; i++)
14974 fprintf (stderr, "%d ", checksum[i]);
14975
14976 fprintf (stderr, "\n");
14977 }
14978
14979 #endif
14980
14981 /* Fold a unary tree expression with code CODE of type TYPE with an
14982 operand OP0. LOC is the location of the resulting expression.
14983 Return a folded expression if successful. Otherwise, return a tree
14984 expression with code CODE of type TYPE with an operand OP0. */
14985
14986 tree
14987 fold_build1_stat_loc (location_t loc,
14988 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14989 {
14990 tree tem;
14991 #ifdef ENABLE_FOLD_CHECKING
14992 unsigned char checksum_before[16], checksum_after[16];
14993 struct md5_ctx ctx;
14994 hash_table <pointer_hash <tree_node> > ht;
14995
14996 ht.create (32);
14997 md5_init_ctx (&ctx);
14998 fold_checksum_tree (op0, &ctx, ht);
14999 md5_finish_ctx (&ctx, checksum_before);
15000 ht.empty ();
15001 #endif
15002
15003 tem = fold_unary_loc (loc, code, type, op0);
15004 if (!tem)
15005 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15006
15007 #ifdef ENABLE_FOLD_CHECKING
15008 md5_init_ctx (&ctx);
15009 fold_checksum_tree (op0, &ctx, ht);
15010 md5_finish_ctx (&ctx, checksum_after);
15011 ht.dispose ();
15012
15013 if (memcmp (checksum_before, checksum_after, 16))
15014 fold_check_failed (op0, tem);
15015 #endif
15016 return tem;
15017 }
15018
15019 /* Fold a binary tree expression with code CODE of type TYPE with
15020 operands OP0 and OP1. LOC is the location of the resulting
15021 expression. Return a folded expression if successful. Otherwise,
15022 return a tree expression with code CODE of type TYPE with operands
15023 OP0 and OP1. */
15024
15025 tree
15026 fold_build2_stat_loc (location_t loc,
15027 enum tree_code code, tree type, tree op0, tree op1
15028 MEM_STAT_DECL)
15029 {
15030 tree tem;
15031 #ifdef ENABLE_FOLD_CHECKING
15032 unsigned char checksum_before_op0[16],
15033 checksum_before_op1[16],
15034 checksum_after_op0[16],
15035 checksum_after_op1[16];
15036 struct md5_ctx ctx;
15037 hash_table <pointer_hash <tree_node> > ht;
15038
15039 ht.create (32);
15040 md5_init_ctx (&ctx);
15041 fold_checksum_tree (op0, &ctx, ht);
15042 md5_finish_ctx (&ctx, checksum_before_op0);
15043 ht.empty ();
15044
15045 md5_init_ctx (&ctx);
15046 fold_checksum_tree (op1, &ctx, ht);
15047 md5_finish_ctx (&ctx, checksum_before_op1);
15048 ht.empty ();
15049 #endif
15050
15051 tem = fold_binary_loc (loc, code, type, op0, op1);
15052 if (!tem)
15053 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15054
15055 #ifdef ENABLE_FOLD_CHECKING
15056 md5_init_ctx (&ctx);
15057 fold_checksum_tree (op0, &ctx, ht);
15058 md5_finish_ctx (&ctx, checksum_after_op0);
15059 ht.empty ();
15060
15061 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15062 fold_check_failed (op0, tem);
15063
15064 md5_init_ctx (&ctx);
15065 fold_checksum_tree (op1, &ctx, ht);
15066 md5_finish_ctx (&ctx, checksum_after_op1);
15067 ht.dispose ();
15068
15069 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15070 fold_check_failed (op1, tem);
15071 #endif
15072 return tem;
15073 }
15074
15075 /* Fold a ternary tree expression with code CODE of type TYPE with
15076 operands OP0, OP1, and OP2. Return a folded expression if
15077 successful. Otherwise, return a tree expression with code CODE of
15078 type TYPE with operands OP0, OP1, and OP2. */
15079
15080 tree
15081 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15082 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15083 {
15084 tree tem;
15085 #ifdef ENABLE_FOLD_CHECKING
15086 unsigned char checksum_before_op0[16],
15087 checksum_before_op1[16],
15088 checksum_before_op2[16],
15089 checksum_after_op0[16],
15090 checksum_after_op1[16],
15091 checksum_after_op2[16];
15092 struct md5_ctx ctx;
15093 hash_table <pointer_hash <tree_node> > ht;
15094
15095 ht.create (32);
15096 md5_init_ctx (&ctx);
15097 fold_checksum_tree (op0, &ctx, ht);
15098 md5_finish_ctx (&ctx, checksum_before_op0);
15099 ht.empty ();
15100
15101 md5_init_ctx (&ctx);
15102 fold_checksum_tree (op1, &ctx, ht);
15103 md5_finish_ctx (&ctx, checksum_before_op1);
15104 ht.empty ();
15105
15106 md5_init_ctx (&ctx);
15107 fold_checksum_tree (op2, &ctx, ht);
15108 md5_finish_ctx (&ctx, checksum_before_op2);
15109 ht.empty ();
15110 #endif
15111
15112 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15113 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15114 if (!tem)
15115 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15116
15117 #ifdef ENABLE_FOLD_CHECKING
15118 md5_init_ctx (&ctx);
15119 fold_checksum_tree (op0, &ctx, ht);
15120 md5_finish_ctx (&ctx, checksum_after_op0);
15121 ht.empty ();
15122
15123 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15124 fold_check_failed (op0, tem);
15125
15126 md5_init_ctx (&ctx);
15127 fold_checksum_tree (op1, &ctx, ht);
15128 md5_finish_ctx (&ctx, checksum_after_op1);
15129 ht.empty ();
15130
15131 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15132 fold_check_failed (op1, tem);
15133
15134 md5_init_ctx (&ctx);
15135 fold_checksum_tree (op2, &ctx, ht);
15136 md5_finish_ctx (&ctx, checksum_after_op2);
15137 ht.dispose ();
15138
15139 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15140 fold_check_failed (op2, tem);
15141 #endif
15142 return tem;
15143 }
15144
15145 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15146 arguments in ARGARRAY, and a null static chain.
15147 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15148 of type TYPE from the given operands as constructed by build_call_array. */
15149
15150 tree
15151 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15152 int nargs, tree *argarray)
15153 {
15154 tree tem;
15155 #ifdef ENABLE_FOLD_CHECKING
15156 unsigned char checksum_before_fn[16],
15157 checksum_before_arglist[16],
15158 checksum_after_fn[16],
15159 checksum_after_arglist[16];
15160 struct md5_ctx ctx;
15161 hash_table <pointer_hash <tree_node> > ht;
15162 int i;
15163
15164 ht.create (32);
15165 md5_init_ctx (&ctx);
15166 fold_checksum_tree (fn, &ctx, ht);
15167 md5_finish_ctx (&ctx, checksum_before_fn);
15168 ht.empty ();
15169
15170 md5_init_ctx (&ctx);
15171 for (i = 0; i < nargs; i++)
15172 fold_checksum_tree (argarray[i], &ctx, ht);
15173 md5_finish_ctx (&ctx, checksum_before_arglist);
15174 ht.empty ();
15175 #endif
15176
15177 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15178
15179 #ifdef ENABLE_FOLD_CHECKING
15180 md5_init_ctx (&ctx);
15181 fold_checksum_tree (fn, &ctx, ht);
15182 md5_finish_ctx (&ctx, checksum_after_fn);
15183 ht.empty ();
15184
15185 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15186 fold_check_failed (fn, tem);
15187
15188 md5_init_ctx (&ctx);
15189 for (i = 0; i < nargs; i++)
15190 fold_checksum_tree (argarray[i], &ctx, ht);
15191 md5_finish_ctx (&ctx, checksum_after_arglist);
15192 ht.dispose ();
15193
15194 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15195 fold_check_failed (NULL_TREE, tem);
15196 #endif
15197 return tem;
15198 }
15199
15200 /* Perform constant folding and related simplification of initializer
15201 expression EXPR. These behave identically to "fold_buildN" but ignore
15202 potential run-time traps and exceptions that fold must preserve. */
15203
15204 #define START_FOLD_INIT \
15205 int saved_signaling_nans = flag_signaling_nans;\
15206 int saved_trapping_math = flag_trapping_math;\
15207 int saved_rounding_math = flag_rounding_math;\
15208 int saved_trapv = flag_trapv;\
15209 int saved_folding_initializer = folding_initializer;\
15210 flag_signaling_nans = 0;\
15211 flag_trapping_math = 0;\
15212 flag_rounding_math = 0;\
15213 flag_trapv = 0;\
15214 folding_initializer = 1;
15215
15216 #define END_FOLD_INIT \
15217 flag_signaling_nans = saved_signaling_nans;\
15218 flag_trapping_math = saved_trapping_math;\
15219 flag_rounding_math = saved_rounding_math;\
15220 flag_trapv = saved_trapv;\
15221 folding_initializer = saved_folding_initializer;
15222
15223 tree
15224 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15225 tree type, tree op)
15226 {
15227 tree result;
15228 START_FOLD_INIT;
15229
15230 result = fold_build1_loc (loc, code, type, op);
15231
15232 END_FOLD_INIT;
15233 return result;
15234 }
15235
15236 tree
15237 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15238 tree type, tree op0, tree op1)
15239 {
15240 tree result;
15241 START_FOLD_INIT;
15242
15243 result = fold_build2_loc (loc, code, type, op0, op1);
15244
15245 END_FOLD_INIT;
15246 return result;
15247 }
15248
15249 tree
15250 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15251 tree type, tree op0, tree op1, tree op2)
15252 {
15253 tree result;
15254 START_FOLD_INIT;
15255
15256 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15257
15258 END_FOLD_INIT;
15259 return result;
15260 }
15261
15262 tree
15263 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15264 int nargs, tree *argarray)
15265 {
15266 tree result;
15267 START_FOLD_INIT;
15268
15269 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15270
15271 END_FOLD_INIT;
15272 return result;
15273 }
15274
15275 #undef START_FOLD_INIT
15276 #undef END_FOLD_INIT
15277
15278 /* Determine if first argument is a multiple of second argument. Return 0 if
15279 it is not, or we cannot easily determined it to be.
15280
15281 An example of the sort of thing we care about (at this point; this routine
15282 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15283 fold cases do now) is discovering that
15284
15285 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15286
15287 is a multiple of
15288
15289 SAVE_EXPR (J * 8)
15290
15291 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15292
15293 This code also handles discovering that
15294
15295 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15296
15297 is a multiple of 8 so we don't have to worry about dealing with a
15298 possible remainder.
15299
15300 Note that we *look* inside a SAVE_EXPR only to determine how it was
15301 calculated; it is not safe for fold to do much of anything else with the
15302 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15303 at run time. For example, the latter example above *cannot* be implemented
15304 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15305 evaluation time of the original SAVE_EXPR is not necessarily the same at
15306 the time the new expression is evaluated. The only optimization of this
15307 sort that would be valid is changing
15308
15309 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15310
15311 divided by 8 to
15312
15313 SAVE_EXPR (I) * SAVE_EXPR (J)
15314
15315 (where the same SAVE_EXPR (J) is used in the original and the
15316 transformed version). */
15317
15318 int
15319 multiple_of_p (tree type, const_tree top, const_tree bottom)
15320 {
15321 if (operand_equal_p (top, bottom, 0))
15322 return 1;
15323
15324 if (TREE_CODE (type) != INTEGER_TYPE)
15325 return 0;
15326
15327 switch (TREE_CODE (top))
15328 {
15329 case BIT_AND_EXPR:
15330 /* Bitwise and provides a power of two multiple. If the mask is
15331 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15332 if (!integer_pow2p (bottom))
15333 return 0;
15334 /* FALLTHRU */
15335
15336 case MULT_EXPR:
15337 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15338 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15339
15340 case PLUS_EXPR:
15341 case MINUS_EXPR:
15342 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15343 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15344
15345 case LSHIFT_EXPR:
15346 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15347 {
15348 tree op1, t1;
15349
15350 op1 = TREE_OPERAND (top, 1);
15351 /* const_binop may not detect overflow correctly,
15352 so check for it explicitly here. */
15353 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15354 > TREE_INT_CST_LOW (op1)
15355 && TREE_INT_CST_HIGH (op1) == 0
15356 && 0 != (t1 = fold_convert (type,
15357 const_binop (LSHIFT_EXPR,
15358 size_one_node,
15359 op1)))
15360 && !TREE_OVERFLOW (t1))
15361 return multiple_of_p (type, t1, bottom);
15362 }
15363 return 0;
15364
15365 case NOP_EXPR:
15366 /* Can't handle conversions from non-integral or wider integral type. */
15367 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15368 || (TYPE_PRECISION (type)
15369 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15370 return 0;
15371
15372 /* .. fall through ... */
15373
15374 case SAVE_EXPR:
15375 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15376
15377 case COND_EXPR:
15378 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15379 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15380
15381 case INTEGER_CST:
15382 if (TREE_CODE (bottom) != INTEGER_CST
15383 || integer_zerop (bottom)
15384 || (TYPE_UNSIGNED (type)
15385 && (tree_int_cst_sgn (top) < 0
15386 || tree_int_cst_sgn (bottom) < 0)))
15387 return 0;
15388 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15389 top, bottom));
15390
15391 default:
15392 return 0;
15393 }
15394 }
15395
15396 /* Return true if CODE or TYPE is known to be non-negative. */
15397
15398 static bool
15399 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15400 {
15401 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15402 && truth_value_p (code))
15403 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15404 have a signed:1 type (where the value is -1 and 0). */
15405 return true;
15406 return false;
15407 }
15408
15409 /* Return true if (CODE OP0) is known to be non-negative. If the return
15410 value is based on the assumption that signed overflow is undefined,
15411 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15412 *STRICT_OVERFLOW_P. */
15413
15414 bool
15415 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15416 bool *strict_overflow_p)
15417 {
15418 if (TYPE_UNSIGNED (type))
15419 return true;
15420
15421 switch (code)
15422 {
15423 case ABS_EXPR:
15424 /* We can't return 1 if flag_wrapv is set because
15425 ABS_EXPR<INT_MIN> = INT_MIN. */
15426 if (!INTEGRAL_TYPE_P (type))
15427 return true;
15428 if (TYPE_OVERFLOW_UNDEFINED (type))
15429 {
15430 *strict_overflow_p = true;
15431 return true;
15432 }
15433 break;
15434
15435 case NON_LVALUE_EXPR:
15436 case FLOAT_EXPR:
15437 case FIX_TRUNC_EXPR:
15438 return tree_expr_nonnegative_warnv_p (op0,
15439 strict_overflow_p);
15440
15441 case NOP_EXPR:
15442 {
15443 tree inner_type = TREE_TYPE (op0);
15444 tree outer_type = type;
15445
15446 if (TREE_CODE (outer_type) == REAL_TYPE)
15447 {
15448 if (TREE_CODE (inner_type) == REAL_TYPE)
15449 return tree_expr_nonnegative_warnv_p (op0,
15450 strict_overflow_p);
15451 if (INTEGRAL_TYPE_P (inner_type))
15452 {
15453 if (TYPE_UNSIGNED (inner_type))
15454 return true;
15455 return tree_expr_nonnegative_warnv_p (op0,
15456 strict_overflow_p);
15457 }
15458 }
15459 else if (INTEGRAL_TYPE_P (outer_type))
15460 {
15461 if (TREE_CODE (inner_type) == REAL_TYPE)
15462 return tree_expr_nonnegative_warnv_p (op0,
15463 strict_overflow_p);
15464 if (INTEGRAL_TYPE_P (inner_type))
15465 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15466 && TYPE_UNSIGNED (inner_type);
15467 }
15468 }
15469 break;
15470
15471 default:
15472 return tree_simple_nonnegative_warnv_p (code, type);
15473 }
15474
15475 /* We don't know sign of `t', so be conservative and return false. */
15476 return false;
15477 }
15478
15479 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15480 value is based on the assumption that signed overflow is undefined,
15481 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15482 *STRICT_OVERFLOW_P. */
15483
15484 bool
15485 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15486 tree op1, bool *strict_overflow_p)
15487 {
15488 if (TYPE_UNSIGNED (type))
15489 return true;
15490
15491 switch (code)
15492 {
15493 case POINTER_PLUS_EXPR:
15494 case PLUS_EXPR:
15495 if (FLOAT_TYPE_P (type))
15496 return (tree_expr_nonnegative_warnv_p (op0,
15497 strict_overflow_p)
15498 && tree_expr_nonnegative_warnv_p (op1,
15499 strict_overflow_p));
15500
15501 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15502 both unsigned and at least 2 bits shorter than the result. */
15503 if (TREE_CODE (type) == INTEGER_TYPE
15504 && TREE_CODE (op0) == NOP_EXPR
15505 && TREE_CODE (op1) == NOP_EXPR)
15506 {
15507 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15508 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15509 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15510 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15511 {
15512 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15513 TYPE_PRECISION (inner2)) + 1;
15514 return prec < TYPE_PRECISION (type);
15515 }
15516 }
15517 break;
15518
15519 case MULT_EXPR:
15520 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15521 {
15522 /* x * x is always non-negative for floating point x
15523 or without overflow. */
15524 if (operand_equal_p (op0, op1, 0)
15525 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15526 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15527 {
15528 if (TYPE_OVERFLOW_UNDEFINED (type))
15529 *strict_overflow_p = true;
15530 return true;
15531 }
15532 }
15533
15534 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15535 both unsigned and their total bits is shorter than the result. */
15536 if (TREE_CODE (type) == INTEGER_TYPE
15537 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15538 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15539 {
15540 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15541 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15542 : TREE_TYPE (op0);
15543 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15544 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15545 : TREE_TYPE (op1);
15546
15547 bool unsigned0 = TYPE_UNSIGNED (inner0);
15548 bool unsigned1 = TYPE_UNSIGNED (inner1);
15549
15550 if (TREE_CODE (op0) == INTEGER_CST)
15551 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15552
15553 if (TREE_CODE (op1) == INTEGER_CST)
15554 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15555
15556 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15557 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15558 {
15559 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15560 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15561 : TYPE_PRECISION (inner0);
15562
15563 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15564 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15565 : TYPE_PRECISION (inner1);
15566
15567 return precision0 + precision1 < TYPE_PRECISION (type);
15568 }
15569 }
15570 return false;
15571
15572 case BIT_AND_EXPR:
15573 case MAX_EXPR:
15574 return (tree_expr_nonnegative_warnv_p (op0,
15575 strict_overflow_p)
15576 || tree_expr_nonnegative_warnv_p (op1,
15577 strict_overflow_p));
15578
15579 case BIT_IOR_EXPR:
15580 case BIT_XOR_EXPR:
15581 case MIN_EXPR:
15582 case RDIV_EXPR:
15583 case TRUNC_DIV_EXPR:
15584 case CEIL_DIV_EXPR:
15585 case FLOOR_DIV_EXPR:
15586 case ROUND_DIV_EXPR:
15587 return (tree_expr_nonnegative_warnv_p (op0,
15588 strict_overflow_p)
15589 && tree_expr_nonnegative_warnv_p (op1,
15590 strict_overflow_p));
15591
15592 case TRUNC_MOD_EXPR:
15593 case CEIL_MOD_EXPR:
15594 case FLOOR_MOD_EXPR:
15595 case ROUND_MOD_EXPR:
15596 return tree_expr_nonnegative_warnv_p (op0,
15597 strict_overflow_p);
15598 default:
15599 return tree_simple_nonnegative_warnv_p (code, type);
15600 }
15601
15602 /* We don't know sign of `t', so be conservative and return false. */
15603 return false;
15604 }
15605
15606 /* Return true if T is known to be non-negative. If the return
15607 value is based on the assumption that signed overflow is undefined,
15608 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15609 *STRICT_OVERFLOW_P. */
15610
15611 bool
15612 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15613 {
15614 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15615 return true;
15616
15617 switch (TREE_CODE (t))
15618 {
15619 case INTEGER_CST:
15620 return tree_int_cst_sgn (t) >= 0;
15621
15622 case REAL_CST:
15623 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15624
15625 case FIXED_CST:
15626 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15627
15628 case COND_EXPR:
15629 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15630 strict_overflow_p)
15631 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15632 strict_overflow_p));
15633 default:
15634 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15635 TREE_TYPE (t));
15636 }
15637 /* We don't know sign of `t', so be conservative and return false. */
15638 return false;
15639 }
15640
15641 /* Return true if T is known to be non-negative. If the return
15642 value is based on the assumption that signed overflow is undefined,
15643 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15644 *STRICT_OVERFLOW_P. */
15645
15646 bool
15647 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15648 tree arg0, tree arg1, bool *strict_overflow_p)
15649 {
15650 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15651 switch (DECL_FUNCTION_CODE (fndecl))
15652 {
15653 CASE_FLT_FN (BUILT_IN_ACOS):
15654 CASE_FLT_FN (BUILT_IN_ACOSH):
15655 CASE_FLT_FN (BUILT_IN_CABS):
15656 CASE_FLT_FN (BUILT_IN_COSH):
15657 CASE_FLT_FN (BUILT_IN_ERFC):
15658 CASE_FLT_FN (BUILT_IN_EXP):
15659 CASE_FLT_FN (BUILT_IN_EXP10):
15660 CASE_FLT_FN (BUILT_IN_EXP2):
15661 CASE_FLT_FN (BUILT_IN_FABS):
15662 CASE_FLT_FN (BUILT_IN_FDIM):
15663 CASE_FLT_FN (BUILT_IN_HYPOT):
15664 CASE_FLT_FN (BUILT_IN_POW10):
15665 CASE_INT_FN (BUILT_IN_FFS):
15666 CASE_INT_FN (BUILT_IN_PARITY):
15667 CASE_INT_FN (BUILT_IN_POPCOUNT):
15668 CASE_INT_FN (BUILT_IN_CLZ):
15669 CASE_INT_FN (BUILT_IN_CLRSB):
15670 case BUILT_IN_BSWAP32:
15671 case BUILT_IN_BSWAP64:
15672 /* Always true. */
15673 return true;
15674
15675 CASE_FLT_FN (BUILT_IN_SQRT):
15676 /* sqrt(-0.0) is -0.0. */
15677 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15678 return true;
15679 return tree_expr_nonnegative_warnv_p (arg0,
15680 strict_overflow_p);
15681
15682 CASE_FLT_FN (BUILT_IN_ASINH):
15683 CASE_FLT_FN (BUILT_IN_ATAN):
15684 CASE_FLT_FN (BUILT_IN_ATANH):
15685 CASE_FLT_FN (BUILT_IN_CBRT):
15686 CASE_FLT_FN (BUILT_IN_CEIL):
15687 CASE_FLT_FN (BUILT_IN_ERF):
15688 CASE_FLT_FN (BUILT_IN_EXPM1):
15689 CASE_FLT_FN (BUILT_IN_FLOOR):
15690 CASE_FLT_FN (BUILT_IN_FMOD):
15691 CASE_FLT_FN (BUILT_IN_FREXP):
15692 CASE_FLT_FN (BUILT_IN_ICEIL):
15693 CASE_FLT_FN (BUILT_IN_IFLOOR):
15694 CASE_FLT_FN (BUILT_IN_IRINT):
15695 CASE_FLT_FN (BUILT_IN_IROUND):
15696 CASE_FLT_FN (BUILT_IN_LCEIL):
15697 CASE_FLT_FN (BUILT_IN_LDEXP):
15698 CASE_FLT_FN (BUILT_IN_LFLOOR):
15699 CASE_FLT_FN (BUILT_IN_LLCEIL):
15700 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15701 CASE_FLT_FN (BUILT_IN_LLRINT):
15702 CASE_FLT_FN (BUILT_IN_LLROUND):
15703 CASE_FLT_FN (BUILT_IN_LRINT):
15704 CASE_FLT_FN (BUILT_IN_LROUND):
15705 CASE_FLT_FN (BUILT_IN_MODF):
15706 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15707 CASE_FLT_FN (BUILT_IN_RINT):
15708 CASE_FLT_FN (BUILT_IN_ROUND):
15709 CASE_FLT_FN (BUILT_IN_SCALB):
15710 CASE_FLT_FN (BUILT_IN_SCALBLN):
15711 CASE_FLT_FN (BUILT_IN_SCALBN):
15712 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15713 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15714 CASE_FLT_FN (BUILT_IN_SINH):
15715 CASE_FLT_FN (BUILT_IN_TANH):
15716 CASE_FLT_FN (BUILT_IN_TRUNC):
15717 /* True if the 1st argument is nonnegative. */
15718 return tree_expr_nonnegative_warnv_p (arg0,
15719 strict_overflow_p);
15720
15721 CASE_FLT_FN (BUILT_IN_FMAX):
15722 /* True if the 1st OR 2nd arguments are nonnegative. */
15723 return (tree_expr_nonnegative_warnv_p (arg0,
15724 strict_overflow_p)
15725 || (tree_expr_nonnegative_warnv_p (arg1,
15726 strict_overflow_p)));
15727
15728 CASE_FLT_FN (BUILT_IN_FMIN):
15729 /* True if the 1st AND 2nd arguments are nonnegative. */
15730 return (tree_expr_nonnegative_warnv_p (arg0,
15731 strict_overflow_p)
15732 && (tree_expr_nonnegative_warnv_p (arg1,
15733 strict_overflow_p)));
15734
15735 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15736 /* True if the 2nd argument is nonnegative. */
15737 return tree_expr_nonnegative_warnv_p (arg1,
15738 strict_overflow_p);
15739
15740 CASE_FLT_FN (BUILT_IN_POWI):
15741 /* True if the 1st argument is nonnegative or the second
15742 argument is an even integer. */
15743 if (TREE_CODE (arg1) == INTEGER_CST
15744 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15745 return true;
15746 return tree_expr_nonnegative_warnv_p (arg0,
15747 strict_overflow_p);
15748
15749 CASE_FLT_FN (BUILT_IN_POW):
15750 /* True if the 1st argument is nonnegative or the second
15751 argument is an even integer valued real. */
15752 if (TREE_CODE (arg1) == REAL_CST)
15753 {
15754 REAL_VALUE_TYPE c;
15755 HOST_WIDE_INT n;
15756
15757 c = TREE_REAL_CST (arg1);
15758 n = real_to_integer (&c);
15759 if ((n & 1) == 0)
15760 {
15761 REAL_VALUE_TYPE cint;
15762 real_from_integer (&cint, VOIDmode, n,
15763 n < 0 ? -1 : 0, 0);
15764 if (real_identical (&c, &cint))
15765 return true;
15766 }
15767 }
15768 return tree_expr_nonnegative_warnv_p (arg0,
15769 strict_overflow_p);
15770
15771 default:
15772 break;
15773 }
15774 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15775 type);
15776 }
15777
15778 /* Return true if T is known to be non-negative. If the return
15779 value is based on the assumption that signed overflow is undefined,
15780 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15781 *STRICT_OVERFLOW_P. */
15782
15783 bool
15784 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15785 {
15786 enum tree_code code = TREE_CODE (t);
15787 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15788 return true;
15789
15790 switch (code)
15791 {
15792 case TARGET_EXPR:
15793 {
15794 tree temp = TARGET_EXPR_SLOT (t);
15795 t = TARGET_EXPR_INITIAL (t);
15796
15797 /* If the initializer is non-void, then it's a normal expression
15798 that will be assigned to the slot. */
15799 if (!VOID_TYPE_P (t))
15800 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15801
15802 /* Otherwise, the initializer sets the slot in some way. One common
15803 way is an assignment statement at the end of the initializer. */
15804 while (1)
15805 {
15806 if (TREE_CODE (t) == BIND_EXPR)
15807 t = expr_last (BIND_EXPR_BODY (t));
15808 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15809 || TREE_CODE (t) == TRY_CATCH_EXPR)
15810 t = expr_last (TREE_OPERAND (t, 0));
15811 else if (TREE_CODE (t) == STATEMENT_LIST)
15812 t = expr_last (t);
15813 else
15814 break;
15815 }
15816 if (TREE_CODE (t) == MODIFY_EXPR
15817 && TREE_OPERAND (t, 0) == temp)
15818 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15819 strict_overflow_p);
15820
15821 return false;
15822 }
15823
15824 case CALL_EXPR:
15825 {
15826 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15827 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15828
15829 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15830 get_callee_fndecl (t),
15831 arg0,
15832 arg1,
15833 strict_overflow_p);
15834 }
15835 case COMPOUND_EXPR:
15836 case MODIFY_EXPR:
15837 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15838 strict_overflow_p);
15839 case BIND_EXPR:
15840 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15841 strict_overflow_p);
15842 case SAVE_EXPR:
15843 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15844 strict_overflow_p);
15845
15846 default:
15847 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15848 TREE_TYPE (t));
15849 }
15850
15851 /* We don't know sign of `t', so be conservative and return false. */
15852 return false;
15853 }
15854
15855 /* Return true if T is known to be non-negative. If the return
15856 value is based on the assumption that signed overflow is undefined,
15857 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15858 *STRICT_OVERFLOW_P. */
15859
15860 bool
15861 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15862 {
15863 enum tree_code code;
15864 if (t == error_mark_node)
15865 return false;
15866
15867 code = TREE_CODE (t);
15868 switch (TREE_CODE_CLASS (code))
15869 {
15870 case tcc_binary:
15871 case tcc_comparison:
15872 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15873 TREE_TYPE (t),
15874 TREE_OPERAND (t, 0),
15875 TREE_OPERAND (t, 1),
15876 strict_overflow_p);
15877
15878 case tcc_unary:
15879 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15880 TREE_TYPE (t),
15881 TREE_OPERAND (t, 0),
15882 strict_overflow_p);
15883
15884 case tcc_constant:
15885 case tcc_declaration:
15886 case tcc_reference:
15887 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15888
15889 default:
15890 break;
15891 }
15892
15893 switch (code)
15894 {
15895 case TRUTH_AND_EXPR:
15896 case TRUTH_OR_EXPR:
15897 case TRUTH_XOR_EXPR:
15898 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15899 TREE_TYPE (t),
15900 TREE_OPERAND (t, 0),
15901 TREE_OPERAND (t, 1),
15902 strict_overflow_p);
15903 case TRUTH_NOT_EXPR:
15904 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15905 TREE_TYPE (t),
15906 TREE_OPERAND (t, 0),
15907 strict_overflow_p);
15908
15909 case COND_EXPR:
15910 case CONSTRUCTOR:
15911 case OBJ_TYPE_REF:
15912 case ASSERT_EXPR:
15913 case ADDR_EXPR:
15914 case WITH_SIZE_EXPR:
15915 case SSA_NAME:
15916 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15917
15918 default:
15919 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15920 }
15921 }
15922
15923 /* Return true if `t' is known to be non-negative. Handle warnings
15924 about undefined signed overflow. */
15925
15926 bool
15927 tree_expr_nonnegative_p (tree t)
15928 {
15929 bool ret, strict_overflow_p;
15930
15931 strict_overflow_p = false;
15932 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15933 if (strict_overflow_p)
15934 fold_overflow_warning (("assuming signed overflow does not occur when "
15935 "determining that expression is always "
15936 "non-negative"),
15937 WARN_STRICT_OVERFLOW_MISC);
15938 return ret;
15939 }
15940
15941
15942 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15943 For floating point we further ensure that T is not denormal.
15944 Similar logic is present in nonzero_address in rtlanal.h.
15945
15946 If the return value is based on the assumption that signed overflow
15947 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15948 change *STRICT_OVERFLOW_P. */
15949
15950 bool
15951 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15952 bool *strict_overflow_p)
15953 {
15954 switch (code)
15955 {
15956 case ABS_EXPR:
15957 return tree_expr_nonzero_warnv_p (op0,
15958 strict_overflow_p);
15959
15960 case NOP_EXPR:
15961 {
15962 tree inner_type = TREE_TYPE (op0);
15963 tree outer_type = type;
15964
15965 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15966 && tree_expr_nonzero_warnv_p (op0,
15967 strict_overflow_p));
15968 }
15969 break;
15970
15971 case NON_LVALUE_EXPR:
15972 return tree_expr_nonzero_warnv_p (op0,
15973 strict_overflow_p);
15974
15975 default:
15976 break;
15977 }
15978
15979 return false;
15980 }
15981
15982 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15983 For floating point we further ensure that T is not denormal.
15984 Similar logic is present in nonzero_address in rtlanal.h.
15985
15986 If the return value is based on the assumption that signed overflow
15987 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15988 change *STRICT_OVERFLOW_P. */
15989
15990 bool
15991 tree_binary_nonzero_warnv_p (enum tree_code code,
15992 tree type,
15993 tree op0,
15994 tree op1, bool *strict_overflow_p)
15995 {
15996 bool sub_strict_overflow_p;
15997 switch (code)
15998 {
15999 case POINTER_PLUS_EXPR:
16000 case PLUS_EXPR:
16001 if (TYPE_OVERFLOW_UNDEFINED (type))
16002 {
16003 /* With the presence of negative values it is hard
16004 to say something. */
16005 sub_strict_overflow_p = false;
16006 if (!tree_expr_nonnegative_warnv_p (op0,
16007 &sub_strict_overflow_p)
16008 || !tree_expr_nonnegative_warnv_p (op1,
16009 &sub_strict_overflow_p))
16010 return false;
16011 /* One of operands must be positive and the other non-negative. */
16012 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16013 overflows, on a twos-complement machine the sum of two
16014 nonnegative numbers can never be zero. */
16015 return (tree_expr_nonzero_warnv_p (op0,
16016 strict_overflow_p)
16017 || tree_expr_nonzero_warnv_p (op1,
16018 strict_overflow_p));
16019 }
16020 break;
16021
16022 case MULT_EXPR:
16023 if (TYPE_OVERFLOW_UNDEFINED (type))
16024 {
16025 if (tree_expr_nonzero_warnv_p (op0,
16026 strict_overflow_p)
16027 && tree_expr_nonzero_warnv_p (op1,
16028 strict_overflow_p))
16029 {
16030 *strict_overflow_p = true;
16031 return true;
16032 }
16033 }
16034 break;
16035
16036 case MIN_EXPR:
16037 sub_strict_overflow_p = false;
16038 if (tree_expr_nonzero_warnv_p (op0,
16039 &sub_strict_overflow_p)
16040 && tree_expr_nonzero_warnv_p (op1,
16041 &sub_strict_overflow_p))
16042 {
16043 if (sub_strict_overflow_p)
16044 *strict_overflow_p = true;
16045 }
16046 break;
16047
16048 case MAX_EXPR:
16049 sub_strict_overflow_p = false;
16050 if (tree_expr_nonzero_warnv_p (op0,
16051 &sub_strict_overflow_p))
16052 {
16053 if (sub_strict_overflow_p)
16054 *strict_overflow_p = true;
16055
16056 /* When both operands are nonzero, then MAX must be too. */
16057 if (tree_expr_nonzero_warnv_p (op1,
16058 strict_overflow_p))
16059 return true;
16060
16061 /* MAX where operand 0 is positive is positive. */
16062 return tree_expr_nonnegative_warnv_p (op0,
16063 strict_overflow_p);
16064 }
16065 /* MAX where operand 1 is positive is positive. */
16066 else if (tree_expr_nonzero_warnv_p (op1,
16067 &sub_strict_overflow_p)
16068 && tree_expr_nonnegative_warnv_p (op1,
16069 &sub_strict_overflow_p))
16070 {
16071 if (sub_strict_overflow_p)
16072 *strict_overflow_p = true;
16073 return true;
16074 }
16075 break;
16076
16077 case BIT_IOR_EXPR:
16078 return (tree_expr_nonzero_warnv_p (op1,
16079 strict_overflow_p)
16080 || tree_expr_nonzero_warnv_p (op0,
16081 strict_overflow_p));
16082
16083 default:
16084 break;
16085 }
16086
16087 return false;
16088 }
16089
16090 /* Return true when T is an address and is known to be nonzero.
16091 For floating point we further ensure that T is not denormal.
16092 Similar logic is present in nonzero_address in rtlanal.h.
16093
16094 If the return value is based on the assumption that signed overflow
16095 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16096 change *STRICT_OVERFLOW_P. */
16097
16098 bool
16099 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16100 {
16101 bool sub_strict_overflow_p;
16102 switch (TREE_CODE (t))
16103 {
16104 case INTEGER_CST:
16105 return !integer_zerop (t);
16106
16107 case ADDR_EXPR:
16108 {
16109 tree base = TREE_OPERAND (t, 0);
16110 if (!DECL_P (base))
16111 base = get_base_address (base);
16112
16113 if (!base)
16114 return false;
16115
16116 /* Weak declarations may link to NULL. Other things may also be NULL
16117 so protect with -fdelete-null-pointer-checks; but not variables
16118 allocated on the stack. */
16119 if (DECL_P (base)
16120 && (flag_delete_null_pointer_checks
16121 || (DECL_CONTEXT (base)
16122 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16123 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16124 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16125
16126 /* Constants are never weak. */
16127 if (CONSTANT_CLASS_P (base))
16128 return true;
16129
16130 return false;
16131 }
16132
16133 case COND_EXPR:
16134 sub_strict_overflow_p = false;
16135 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16136 &sub_strict_overflow_p)
16137 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16138 &sub_strict_overflow_p))
16139 {
16140 if (sub_strict_overflow_p)
16141 *strict_overflow_p = true;
16142 return true;
16143 }
16144 break;
16145
16146 default:
16147 break;
16148 }
16149 return false;
16150 }
16151
16152 /* Return true when T is an address and is known to be nonzero.
16153 For floating point we further ensure that T is not denormal.
16154 Similar logic is present in nonzero_address in rtlanal.h.
16155
16156 If the return value is based on the assumption that signed overflow
16157 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16158 change *STRICT_OVERFLOW_P. */
16159
16160 bool
16161 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16162 {
16163 tree type = TREE_TYPE (t);
16164 enum tree_code code;
16165
16166 /* Doing something useful for floating point would need more work. */
16167 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16168 return false;
16169
16170 code = TREE_CODE (t);
16171 switch (TREE_CODE_CLASS (code))
16172 {
16173 case tcc_unary:
16174 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16175 strict_overflow_p);
16176 case tcc_binary:
16177 case tcc_comparison:
16178 return tree_binary_nonzero_warnv_p (code, type,
16179 TREE_OPERAND (t, 0),
16180 TREE_OPERAND (t, 1),
16181 strict_overflow_p);
16182 case tcc_constant:
16183 case tcc_declaration:
16184 case tcc_reference:
16185 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16186
16187 default:
16188 break;
16189 }
16190
16191 switch (code)
16192 {
16193 case TRUTH_NOT_EXPR:
16194 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16195 strict_overflow_p);
16196
16197 case TRUTH_AND_EXPR:
16198 case TRUTH_OR_EXPR:
16199 case TRUTH_XOR_EXPR:
16200 return tree_binary_nonzero_warnv_p (code, type,
16201 TREE_OPERAND (t, 0),
16202 TREE_OPERAND (t, 1),
16203 strict_overflow_p);
16204
16205 case COND_EXPR:
16206 case CONSTRUCTOR:
16207 case OBJ_TYPE_REF:
16208 case ASSERT_EXPR:
16209 case ADDR_EXPR:
16210 case WITH_SIZE_EXPR:
16211 case SSA_NAME:
16212 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16213
16214 case COMPOUND_EXPR:
16215 case MODIFY_EXPR:
16216 case BIND_EXPR:
16217 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16218 strict_overflow_p);
16219
16220 case SAVE_EXPR:
16221 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16222 strict_overflow_p);
16223
16224 case CALL_EXPR:
16225 {
16226 tree fndecl = get_callee_fndecl (t);
16227 if (!fndecl) return false;
16228 if (flag_delete_null_pointer_checks && !flag_check_new
16229 && DECL_IS_OPERATOR_NEW (fndecl)
16230 && !TREE_NOTHROW (fndecl))
16231 return true;
16232 return alloca_call_p (t);
16233 }
16234
16235 default:
16236 break;
16237 }
16238 return false;
16239 }
16240
16241 /* Return true when T is an address and is known to be nonzero.
16242 Handle warnings about undefined signed overflow. */
16243
16244 bool
16245 tree_expr_nonzero_p (tree t)
16246 {
16247 bool ret, strict_overflow_p;
16248
16249 strict_overflow_p = false;
16250 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16251 if (strict_overflow_p)
16252 fold_overflow_warning (("assuming signed overflow does not occur when "
16253 "determining that expression is always "
16254 "non-zero"),
16255 WARN_STRICT_OVERFLOW_MISC);
16256 return ret;
16257 }
16258
16259 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16260 attempt to fold the expression to a constant without modifying TYPE,
16261 OP0 or OP1.
16262
16263 If the expression could be simplified to a constant, then return
16264 the constant. If the expression would not be simplified to a
16265 constant, then return NULL_TREE. */
16266
16267 tree
16268 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16269 {
16270 tree tem = fold_binary (code, type, op0, op1);
16271 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16272 }
16273
16274 /* Given the components of a unary expression CODE, TYPE and OP0,
16275 attempt to fold the expression to a constant without modifying
16276 TYPE or OP0.
16277
16278 If the expression could be simplified to a constant, then return
16279 the constant. If the expression would not be simplified to a
16280 constant, then return NULL_TREE. */
16281
16282 tree
16283 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16284 {
16285 tree tem = fold_unary (code, type, op0);
16286 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16287 }
16288
16289 /* If EXP represents referencing an element in a constant string
16290 (either via pointer arithmetic or array indexing), return the
16291 tree representing the value accessed, otherwise return NULL. */
16292
16293 tree
16294 fold_read_from_constant_string (tree exp)
16295 {
16296 if ((TREE_CODE (exp) == INDIRECT_REF
16297 || TREE_CODE (exp) == ARRAY_REF)
16298 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16299 {
16300 tree exp1 = TREE_OPERAND (exp, 0);
16301 tree index;
16302 tree string;
16303 location_t loc = EXPR_LOCATION (exp);
16304
16305 if (TREE_CODE (exp) == INDIRECT_REF)
16306 string = string_constant (exp1, &index);
16307 else
16308 {
16309 tree low_bound = array_ref_low_bound (exp);
16310 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16311
16312 /* Optimize the special-case of a zero lower bound.
16313
16314 We convert the low_bound to sizetype to avoid some problems
16315 with constant folding. (E.g. suppose the lower bound is 1,
16316 and its mode is QI. Without the conversion,l (ARRAY
16317 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16318 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16319 if (! integer_zerop (low_bound))
16320 index = size_diffop_loc (loc, index,
16321 fold_convert_loc (loc, sizetype, low_bound));
16322
16323 string = exp1;
16324 }
16325
16326 if (string
16327 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16328 && TREE_CODE (string) == STRING_CST
16329 && TREE_CODE (index) == INTEGER_CST
16330 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16331 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16332 == MODE_INT)
16333 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16334 return build_int_cst_type (TREE_TYPE (exp),
16335 (TREE_STRING_POINTER (string)
16336 [TREE_INT_CST_LOW (index)]));
16337 }
16338 return NULL;
16339 }
16340
16341 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16342 an integer constant, real, or fixed-point constant.
16343
16344 TYPE is the type of the result. */
16345
16346 static tree
16347 fold_negate_const (tree arg0, tree type)
16348 {
16349 tree t = NULL_TREE;
16350
16351 switch (TREE_CODE (arg0))
16352 {
16353 case INTEGER_CST:
16354 {
16355 double_int val = tree_to_double_int (arg0);
16356 bool overflow;
16357 val = val.neg_with_overflow (&overflow);
16358 t = force_fit_type_double (type, val, 1,
16359 (overflow | TREE_OVERFLOW (arg0))
16360 && !TYPE_UNSIGNED (type));
16361 break;
16362 }
16363
16364 case REAL_CST:
16365 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16366 break;
16367
16368 case FIXED_CST:
16369 {
16370 FIXED_VALUE_TYPE f;
16371 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16372 &(TREE_FIXED_CST (arg0)), NULL,
16373 TYPE_SATURATING (type));
16374 t = build_fixed (type, f);
16375 /* Propagate overflow flags. */
16376 if (overflow_p | TREE_OVERFLOW (arg0))
16377 TREE_OVERFLOW (t) = 1;
16378 break;
16379 }
16380
16381 default:
16382 gcc_unreachable ();
16383 }
16384
16385 return t;
16386 }
16387
16388 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16389 an integer constant or real constant.
16390
16391 TYPE is the type of the result. */
16392
16393 tree
16394 fold_abs_const (tree arg0, tree type)
16395 {
16396 tree t = NULL_TREE;
16397
16398 switch (TREE_CODE (arg0))
16399 {
16400 case INTEGER_CST:
16401 {
16402 double_int val = tree_to_double_int (arg0);
16403
16404 /* If the value is unsigned or non-negative, then the absolute value
16405 is the same as the ordinary value. */
16406 if (TYPE_UNSIGNED (type)
16407 || !val.is_negative ())
16408 t = arg0;
16409
16410 /* If the value is negative, then the absolute value is
16411 its negation. */
16412 else
16413 {
16414 bool overflow;
16415 val = val.neg_with_overflow (&overflow);
16416 t = force_fit_type_double (type, val, -1,
16417 overflow | TREE_OVERFLOW (arg0));
16418 }
16419 }
16420 break;
16421
16422 case REAL_CST:
16423 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16424 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16425 else
16426 t = arg0;
16427 break;
16428
16429 default:
16430 gcc_unreachable ();
16431 }
16432
16433 return t;
16434 }
16435
16436 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16437 constant. TYPE is the type of the result. */
16438
16439 static tree
16440 fold_not_const (const_tree arg0, tree type)
16441 {
16442 double_int val;
16443
16444 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16445
16446 val = ~tree_to_double_int (arg0);
16447 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16448 }
16449
16450 /* Given CODE, a relational operator, the target type, TYPE and two
16451 constant operands OP0 and OP1, return the result of the
16452 relational operation. If the result is not a compile time
16453 constant, then return NULL_TREE. */
16454
16455 static tree
16456 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16457 {
16458 int result, invert;
16459
16460 /* From here on, the only cases we handle are when the result is
16461 known to be a constant. */
16462
16463 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16464 {
16465 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16466 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16467
16468 /* Handle the cases where either operand is a NaN. */
16469 if (real_isnan (c0) || real_isnan (c1))
16470 {
16471 switch (code)
16472 {
16473 case EQ_EXPR:
16474 case ORDERED_EXPR:
16475 result = 0;
16476 break;
16477
16478 case NE_EXPR:
16479 case UNORDERED_EXPR:
16480 case UNLT_EXPR:
16481 case UNLE_EXPR:
16482 case UNGT_EXPR:
16483 case UNGE_EXPR:
16484 case UNEQ_EXPR:
16485 result = 1;
16486 break;
16487
16488 case LT_EXPR:
16489 case LE_EXPR:
16490 case GT_EXPR:
16491 case GE_EXPR:
16492 case LTGT_EXPR:
16493 if (flag_trapping_math)
16494 return NULL_TREE;
16495 result = 0;
16496 break;
16497
16498 default:
16499 gcc_unreachable ();
16500 }
16501
16502 return constant_boolean_node (result, type);
16503 }
16504
16505 return constant_boolean_node (real_compare (code, c0, c1), type);
16506 }
16507
16508 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16509 {
16510 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16511 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16512 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16513 }
16514
16515 /* Handle equality/inequality of complex constants. */
16516 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16517 {
16518 tree rcond = fold_relational_const (code, type,
16519 TREE_REALPART (op0),
16520 TREE_REALPART (op1));
16521 tree icond = fold_relational_const (code, type,
16522 TREE_IMAGPART (op0),
16523 TREE_IMAGPART (op1));
16524 if (code == EQ_EXPR)
16525 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16526 else if (code == NE_EXPR)
16527 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16528 else
16529 return NULL_TREE;
16530 }
16531
16532 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16533 {
16534 unsigned count = VECTOR_CST_NELTS (op0);
16535 tree *elts = XALLOCAVEC (tree, count);
16536 gcc_assert (VECTOR_CST_NELTS (op1) == count
16537 && TYPE_VECTOR_SUBPARTS (type) == count);
16538
16539 for (unsigned i = 0; i < count; i++)
16540 {
16541 tree elem_type = TREE_TYPE (type);
16542 tree elem0 = VECTOR_CST_ELT (op0, i);
16543 tree elem1 = VECTOR_CST_ELT (op1, i);
16544
16545 tree tem = fold_relational_const (code, elem_type,
16546 elem0, elem1);
16547
16548 if (tem == NULL_TREE)
16549 return NULL_TREE;
16550
16551 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16552 }
16553
16554 return build_vector (type, elts);
16555 }
16556
16557 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16558
16559 To compute GT, swap the arguments and do LT.
16560 To compute GE, do LT and invert the result.
16561 To compute LE, swap the arguments, do LT and invert the result.
16562 To compute NE, do EQ and invert the result.
16563
16564 Therefore, the code below must handle only EQ and LT. */
16565
16566 if (code == LE_EXPR || code == GT_EXPR)
16567 {
16568 tree tem = op0;
16569 op0 = op1;
16570 op1 = tem;
16571 code = swap_tree_comparison (code);
16572 }
16573
16574 /* Note that it is safe to invert for real values here because we
16575 have already handled the one case that it matters. */
16576
16577 invert = 0;
16578 if (code == NE_EXPR || code == GE_EXPR)
16579 {
16580 invert = 1;
16581 code = invert_tree_comparison (code, false);
16582 }
16583
16584 /* Compute a result for LT or EQ if args permit;
16585 Otherwise return T. */
16586 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16587 {
16588 if (code == EQ_EXPR)
16589 result = tree_int_cst_equal (op0, op1);
16590 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16591 result = INT_CST_LT_UNSIGNED (op0, op1);
16592 else
16593 result = INT_CST_LT (op0, op1);
16594 }
16595 else
16596 return NULL_TREE;
16597
16598 if (invert)
16599 result ^= 1;
16600 return constant_boolean_node (result, type);
16601 }
16602
16603 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16604 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16605 itself. */
16606
16607 tree
16608 fold_build_cleanup_point_expr (tree type, tree expr)
16609 {
16610 /* If the expression does not have side effects then we don't have to wrap
16611 it with a cleanup point expression. */
16612 if (!TREE_SIDE_EFFECTS (expr))
16613 return expr;
16614
16615 /* If the expression is a return, check to see if the expression inside the
16616 return has no side effects or the right hand side of the modify expression
16617 inside the return. If either don't have side effects set we don't need to
16618 wrap the expression in a cleanup point expression. Note we don't check the
16619 left hand side of the modify because it should always be a return decl. */
16620 if (TREE_CODE (expr) == RETURN_EXPR)
16621 {
16622 tree op = TREE_OPERAND (expr, 0);
16623 if (!op || !TREE_SIDE_EFFECTS (op))
16624 return expr;
16625 op = TREE_OPERAND (op, 1);
16626 if (!TREE_SIDE_EFFECTS (op))
16627 return expr;
16628 }
16629
16630 return build1 (CLEANUP_POINT_EXPR, type, expr);
16631 }
16632
16633 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16634 of an indirection through OP0, or NULL_TREE if no simplification is
16635 possible. */
16636
16637 tree
16638 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16639 {
16640 tree sub = op0;
16641 tree subtype;
16642
16643 STRIP_NOPS (sub);
16644 subtype = TREE_TYPE (sub);
16645 if (!POINTER_TYPE_P (subtype))
16646 return NULL_TREE;
16647
16648 if (TREE_CODE (sub) == ADDR_EXPR)
16649 {
16650 tree op = TREE_OPERAND (sub, 0);
16651 tree optype = TREE_TYPE (op);
16652 /* *&CONST_DECL -> to the value of the const decl. */
16653 if (TREE_CODE (op) == CONST_DECL)
16654 return DECL_INITIAL (op);
16655 /* *&p => p; make sure to handle *&"str"[cst] here. */
16656 if (type == optype)
16657 {
16658 tree fop = fold_read_from_constant_string (op);
16659 if (fop)
16660 return fop;
16661 else
16662 return op;
16663 }
16664 /* *(foo *)&fooarray => fooarray[0] */
16665 else if (TREE_CODE (optype) == ARRAY_TYPE
16666 && type == TREE_TYPE (optype)
16667 && (!in_gimple_form
16668 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16669 {
16670 tree type_domain = TYPE_DOMAIN (optype);
16671 tree min_val = size_zero_node;
16672 if (type_domain && TYPE_MIN_VALUE (type_domain))
16673 min_val = TYPE_MIN_VALUE (type_domain);
16674 if (in_gimple_form
16675 && TREE_CODE (min_val) != INTEGER_CST)
16676 return NULL_TREE;
16677 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16678 NULL_TREE, NULL_TREE);
16679 }
16680 /* *(foo *)&complexfoo => __real__ complexfoo */
16681 else if (TREE_CODE (optype) == COMPLEX_TYPE
16682 && type == TREE_TYPE (optype))
16683 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16684 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16685 else if (TREE_CODE (optype) == VECTOR_TYPE
16686 && type == TREE_TYPE (optype))
16687 {
16688 tree part_width = TYPE_SIZE (type);
16689 tree index = bitsize_int (0);
16690 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16691 }
16692 }
16693
16694 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16695 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16696 {
16697 tree op00 = TREE_OPERAND (sub, 0);
16698 tree op01 = TREE_OPERAND (sub, 1);
16699
16700 STRIP_NOPS (op00);
16701 if (TREE_CODE (op00) == ADDR_EXPR)
16702 {
16703 tree op00type;
16704 op00 = TREE_OPERAND (op00, 0);
16705 op00type = TREE_TYPE (op00);
16706
16707 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16708 if (TREE_CODE (op00type) == VECTOR_TYPE
16709 && type == TREE_TYPE (op00type))
16710 {
16711 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16712 tree part_width = TYPE_SIZE (type);
16713 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16714 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16715 tree index = bitsize_int (indexi);
16716
16717 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16718 return fold_build3_loc (loc,
16719 BIT_FIELD_REF, type, op00,
16720 part_width, index);
16721
16722 }
16723 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16724 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16725 && type == TREE_TYPE (op00type))
16726 {
16727 tree size = TYPE_SIZE_UNIT (type);
16728 if (tree_int_cst_equal (size, op01))
16729 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16730 }
16731 /* ((foo *)&fooarray)[1] => fooarray[1] */
16732 else if (TREE_CODE (op00type) == ARRAY_TYPE
16733 && type == TREE_TYPE (op00type))
16734 {
16735 tree type_domain = TYPE_DOMAIN (op00type);
16736 tree min_val = size_zero_node;
16737 if (type_domain && TYPE_MIN_VALUE (type_domain))
16738 min_val = TYPE_MIN_VALUE (type_domain);
16739 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16740 TYPE_SIZE_UNIT (type));
16741 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16742 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16743 NULL_TREE, NULL_TREE);
16744 }
16745 }
16746 }
16747
16748 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16749 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16750 && type == TREE_TYPE (TREE_TYPE (subtype))
16751 && (!in_gimple_form
16752 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16753 {
16754 tree type_domain;
16755 tree min_val = size_zero_node;
16756 sub = build_fold_indirect_ref_loc (loc, sub);
16757 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16758 if (type_domain && TYPE_MIN_VALUE (type_domain))
16759 min_val = TYPE_MIN_VALUE (type_domain);
16760 if (in_gimple_form
16761 && TREE_CODE (min_val) != INTEGER_CST)
16762 return NULL_TREE;
16763 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16764 NULL_TREE);
16765 }
16766
16767 return NULL_TREE;
16768 }
16769
16770 /* Builds an expression for an indirection through T, simplifying some
16771 cases. */
16772
16773 tree
16774 build_fold_indirect_ref_loc (location_t loc, tree t)
16775 {
16776 tree type = TREE_TYPE (TREE_TYPE (t));
16777 tree sub = fold_indirect_ref_1 (loc, type, t);
16778
16779 if (sub)
16780 return sub;
16781
16782 return build1_loc (loc, INDIRECT_REF, type, t);
16783 }
16784
16785 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16786
16787 tree
16788 fold_indirect_ref_loc (location_t loc, tree t)
16789 {
16790 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16791
16792 if (sub)
16793 return sub;
16794 else
16795 return t;
16796 }
16797
16798 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16799 whose result is ignored. The type of the returned tree need not be
16800 the same as the original expression. */
16801
16802 tree
16803 fold_ignored_result (tree t)
16804 {
16805 if (!TREE_SIDE_EFFECTS (t))
16806 return integer_zero_node;
16807
16808 for (;;)
16809 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16810 {
16811 case tcc_unary:
16812 t = TREE_OPERAND (t, 0);
16813 break;
16814
16815 case tcc_binary:
16816 case tcc_comparison:
16817 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16818 t = TREE_OPERAND (t, 0);
16819 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16820 t = TREE_OPERAND (t, 1);
16821 else
16822 return t;
16823 break;
16824
16825 case tcc_expression:
16826 switch (TREE_CODE (t))
16827 {
16828 case COMPOUND_EXPR:
16829 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16830 return t;
16831 t = TREE_OPERAND (t, 0);
16832 break;
16833
16834 case COND_EXPR:
16835 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16836 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16837 return t;
16838 t = TREE_OPERAND (t, 0);
16839 break;
16840
16841 default:
16842 return t;
16843 }
16844 break;
16845
16846 default:
16847 return t;
16848 }
16849 }
16850
16851 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16852 This can only be applied to objects of a sizetype. */
16853
16854 tree
16855 round_up_loc (location_t loc, tree value, int divisor)
16856 {
16857 tree div = NULL_TREE;
16858
16859 gcc_assert (divisor > 0);
16860 if (divisor == 1)
16861 return value;
16862
16863 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16864 have to do anything. Only do this when we are not given a const,
16865 because in that case, this check is more expensive than just
16866 doing it. */
16867 if (TREE_CODE (value) != INTEGER_CST)
16868 {
16869 div = build_int_cst (TREE_TYPE (value), divisor);
16870
16871 if (multiple_of_p (TREE_TYPE (value), value, div))
16872 return value;
16873 }
16874
16875 /* If divisor is a power of two, simplify this to bit manipulation. */
16876 if (divisor == (divisor & -divisor))
16877 {
16878 if (TREE_CODE (value) == INTEGER_CST)
16879 {
16880 double_int val = tree_to_double_int (value);
16881 bool overflow_p;
16882
16883 if ((val.low & (divisor - 1)) == 0)
16884 return value;
16885
16886 overflow_p = TREE_OVERFLOW (value);
16887 val.low &= ~(divisor - 1);
16888 val.low += divisor;
16889 if (val.low == 0)
16890 {
16891 val.high++;
16892 if (val.high == 0)
16893 overflow_p = true;
16894 }
16895
16896 return force_fit_type_double (TREE_TYPE (value), val,
16897 -1, overflow_p);
16898 }
16899 else
16900 {
16901 tree t;
16902
16903 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16904 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16905 t = build_int_cst (TREE_TYPE (value), -divisor);
16906 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16907 }
16908 }
16909 else
16910 {
16911 if (!div)
16912 div = build_int_cst (TREE_TYPE (value), divisor);
16913 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16914 value = size_binop_loc (loc, MULT_EXPR, value, div);
16915 }
16916
16917 return value;
16918 }
16919
16920 /* Likewise, but round down. */
16921
16922 tree
16923 round_down_loc (location_t loc, tree value, int divisor)
16924 {
16925 tree div = NULL_TREE;
16926
16927 gcc_assert (divisor > 0);
16928 if (divisor == 1)
16929 return value;
16930
16931 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16932 have to do anything. Only do this when we are not given a const,
16933 because in that case, this check is more expensive than just
16934 doing it. */
16935 if (TREE_CODE (value) != INTEGER_CST)
16936 {
16937 div = build_int_cst (TREE_TYPE (value), divisor);
16938
16939 if (multiple_of_p (TREE_TYPE (value), value, div))
16940 return value;
16941 }
16942
16943 /* If divisor is a power of two, simplify this to bit manipulation. */
16944 if (divisor == (divisor & -divisor))
16945 {
16946 tree t;
16947
16948 t = build_int_cst (TREE_TYPE (value), -divisor);
16949 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16950 }
16951 else
16952 {
16953 if (!div)
16954 div = build_int_cst (TREE_TYPE (value), divisor);
16955 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16956 value = size_binop_loc (loc, MULT_EXPR, value, div);
16957 }
16958
16959 return value;
16960 }
16961
16962 /* Returns the pointer to the base of the object addressed by EXP and
16963 extracts the information about the offset of the access, storing it
16964 to PBITPOS and POFFSET. */
16965
16966 static tree
16967 split_address_to_core_and_offset (tree exp,
16968 HOST_WIDE_INT *pbitpos, tree *poffset)
16969 {
16970 tree core;
16971 enum machine_mode mode;
16972 int unsignedp, volatilep;
16973 HOST_WIDE_INT bitsize;
16974 location_t loc = EXPR_LOCATION (exp);
16975
16976 if (TREE_CODE (exp) == ADDR_EXPR)
16977 {
16978 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16979 poffset, &mode, &unsignedp, &volatilep,
16980 false);
16981 core = build_fold_addr_expr_loc (loc, core);
16982 }
16983 else
16984 {
16985 core = exp;
16986 *pbitpos = 0;
16987 *poffset = NULL_TREE;
16988 }
16989
16990 return core;
16991 }
16992
16993 /* Returns true if addresses of E1 and E2 differ by a constant, false
16994 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16995
16996 bool
16997 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16998 {
16999 tree core1, core2;
17000 HOST_WIDE_INT bitpos1, bitpos2;
17001 tree toffset1, toffset2, tdiff, type;
17002
17003 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17004 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17005
17006 if (bitpos1 % BITS_PER_UNIT != 0
17007 || bitpos2 % BITS_PER_UNIT != 0
17008 || !operand_equal_p (core1, core2, 0))
17009 return false;
17010
17011 if (toffset1 && toffset2)
17012 {
17013 type = TREE_TYPE (toffset1);
17014 if (type != TREE_TYPE (toffset2))
17015 toffset2 = fold_convert (type, toffset2);
17016
17017 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17018 if (!cst_and_fits_in_hwi (tdiff))
17019 return false;
17020
17021 *diff = int_cst_value (tdiff);
17022 }
17023 else if (toffset1 || toffset2)
17024 {
17025 /* If only one of the offsets is non-constant, the difference cannot
17026 be a constant. */
17027 return false;
17028 }
17029 else
17030 *diff = 0;
17031
17032 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17033 return true;
17034 }
17035
17036 /* Simplify the floating point expression EXP when the sign of the
17037 result is not significant. Return NULL_TREE if no simplification
17038 is possible. */
17039
17040 tree
17041 fold_strip_sign_ops (tree exp)
17042 {
17043 tree arg0, arg1;
17044 location_t loc = EXPR_LOCATION (exp);
17045
17046 switch (TREE_CODE (exp))
17047 {
17048 case ABS_EXPR:
17049 case NEGATE_EXPR:
17050 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17051 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17052
17053 case MULT_EXPR:
17054 case RDIV_EXPR:
17055 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17056 return NULL_TREE;
17057 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17058 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17059 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17060 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17061 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17062 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17063 break;
17064
17065 case COMPOUND_EXPR:
17066 arg0 = TREE_OPERAND (exp, 0);
17067 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17068 if (arg1)
17069 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17070 break;
17071
17072 case COND_EXPR:
17073 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17074 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17075 if (arg0 || arg1)
17076 return fold_build3_loc (loc,
17077 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17078 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17079 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17080 break;
17081
17082 case CALL_EXPR:
17083 {
17084 const enum built_in_function fcode = builtin_mathfn_code (exp);
17085 switch (fcode)
17086 {
17087 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17088 /* Strip copysign function call, return the 1st argument. */
17089 arg0 = CALL_EXPR_ARG (exp, 0);
17090 arg1 = CALL_EXPR_ARG (exp, 1);
17091 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17092
17093 default:
17094 /* Strip sign ops from the argument of "odd" math functions. */
17095 if (negate_mathfn_p (fcode))
17096 {
17097 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17098 if (arg0)
17099 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17100 }
17101 break;
17102 }
17103 }
17104 break;
17105
17106 default:
17107 break;
17108 }
17109 return NULL_TREE;
17110 }