re PR tree-optimization/57656 (Wrong constant folding)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
62
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
66
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
87 };
88
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
141
142 static location_t
143 expr_location_or (tree t, location_t loc)
144 {
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
147 }
148
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
151
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
154 {
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
160 {
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
163 }
164 return x;
165 }
166 \f
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
170
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
173 {
174 double_int quo, rem;
175 int uns;
176
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
181
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
184
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
187
188 return NULL_TREE;
189 }
190 \f
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
199
200 static int fold_deferring_overflow_warnings;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
206
207 static const char* fold_deferred_overflow_warning;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
211
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
216
217 void
218 fold_defer_overflow_warnings (void)
219 {
220 ++fold_deferring_overflow_warnings;
221 }
222
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
231
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235 const char *warnmsg;
236 location_t locus;
237
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
241 {
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
247 }
248
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
251
252 if (!issue || warnmsg == NULL)
253 return;
254
255 if (gimple_no_warning_p (stmt))
256 return;
257
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
262
263 if (!issue_strict_overflow_warning (code))
264 return;
265
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
275
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279 fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281
282 /* Whether we are deferring overflow warnings. */
283
284 bool
285 fold_deferring_overflow_warnings_p (void)
286 {
287 return fold_deferring_overflow_warnings > 0;
288 }
289
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
292
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296 if (fold_deferring_overflow_warnings > 0)
297 {
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
300 {
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
303 }
304 }
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
307 }
308 \f
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
311
312 static bool
313 negate_mathfn_p (enum built_in_function code)
314 {
315 switch (code)
316 {
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
341
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
347
348 default:
349 break;
350 }
351 return false;
352 }
353
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
356
357 bool
358 may_negate_without_overflow_p (const_tree t)
359 {
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
363
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
369
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
372 {
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
377 }
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
383 }
384
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
387
388 static bool
389 negate_expr_p (tree t)
390 {
391 tree type;
392
393 if (t == 0)
394 return false;
395
396 type = TREE_TYPE (t);
397
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
400 {
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
404
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
410
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
414
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
423
424 case VECTOR_CST:
425 {
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
428
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
430
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
434
435 return true;
436 }
437
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
441
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
444
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
456
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
463
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
467
468 /* Fall through. */
469
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
475
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
487 {
488 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
489 break;
490 /* If overflow is undefined then we have to be careful because
491 we ask whether it's ok to associate the negate with the
492 division which is not ok for example for
493 -((a - b) / c) where (-(a - b)) / c may invoke undefined
494 overflow because of negating INT_MIN. So do not use
495 negate_expr_p here but open-code the two important cases. */
496 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
497 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
498 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
499 return true;
500 }
501 else if (negate_expr_p (TREE_OPERAND (t, 0)))
502 return true;
503 return negate_expr_p (TREE_OPERAND (t, 1));
504
505 case NOP_EXPR:
506 /* Negate -((double)float) as (double)(-float). */
507 if (TREE_CODE (type) == REAL_TYPE)
508 {
509 tree tem = strip_float_extensions (t);
510 if (tem != t)
511 return negate_expr_p (tem);
512 }
513 break;
514
515 case CALL_EXPR:
516 /* Negate -f(x) as f(-x). */
517 if (negate_mathfn_p (builtin_mathfn_code (t)))
518 return negate_expr_p (CALL_EXPR_ARG (t, 0));
519 break;
520
521 case RSHIFT_EXPR:
522 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
523 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 {
525 tree op1 = TREE_OPERAND (t, 1);
526 if (TREE_INT_CST_HIGH (op1) == 0
527 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
528 == TREE_INT_CST_LOW (op1))
529 return true;
530 }
531 break;
532
533 default:
534 break;
535 }
536 return false;
537 }
538
539 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
540 simplification is possible.
541 If negate_expr_p would return true for T, NULL_TREE will never be
542 returned. */
543
544 static tree
545 fold_negate_expr (location_t loc, tree t)
546 {
547 tree type = TREE_TYPE (t);
548 tree tem;
549
550 switch (TREE_CODE (t))
551 {
552 /* Convert - (~A) to A + 1. */
553 case BIT_NOT_EXPR:
554 if (INTEGRAL_TYPE_P (type))
555 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
556 build_one_cst (type));
557 break;
558
559 case INTEGER_CST:
560 tem = fold_negate_const (t, type);
561 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
562 || !TYPE_OVERFLOW_TRAPS (type))
563 return tem;
564 break;
565
566 case REAL_CST:
567 tem = fold_negate_const (t, type);
568 /* Two's complement FP formats, such as c4x, may overflow. */
569 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
570 return tem;
571 break;
572
573 case FIXED_CST:
574 tem = fold_negate_const (t, type);
575 return tem;
576
577 case COMPLEX_CST:
578 {
579 tree rpart = negate_expr (TREE_REALPART (t));
580 tree ipart = negate_expr (TREE_IMAGPART (t));
581
582 if ((TREE_CODE (rpart) == REAL_CST
583 && TREE_CODE (ipart) == REAL_CST)
584 || (TREE_CODE (rpart) == INTEGER_CST
585 && TREE_CODE (ipart) == INTEGER_CST))
586 return build_complex (type, rpart, ipart);
587 }
588 break;
589
590 case VECTOR_CST:
591 {
592 int count = TYPE_VECTOR_SUBPARTS (type), i;
593 tree *elts = XALLOCAVEC (tree, count);
594
595 for (i = 0; i < count; i++)
596 {
597 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
598 if (elts[i] == NULL_TREE)
599 return NULL_TREE;
600 }
601
602 return build_vector (type, elts);
603 }
604
605 case COMPLEX_EXPR:
606 if (negate_expr_p (t))
607 return fold_build2_loc (loc, COMPLEX_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
609 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
610 break;
611
612 case CONJ_EXPR:
613 if (negate_expr_p (t))
614 return fold_build1_loc (loc, CONJ_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
616 break;
617
618 case NEGATE_EXPR:
619 return TREE_OPERAND (t, 0);
620
621 case PLUS_EXPR:
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
624 {
625 /* -(A + B) -> (-B) - A. */
626 if (negate_expr_p (TREE_OPERAND (t, 1))
627 && reorder_operands_p (TREE_OPERAND (t, 0),
628 TREE_OPERAND (t, 1)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 1));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 0));
633 }
634
635 /* -(A + B) -> (-A) - B. */
636 if (negate_expr_p (TREE_OPERAND (t, 0)))
637 {
638 tem = negate_expr (TREE_OPERAND (t, 0));
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 tem, TREE_OPERAND (t, 1));
641 }
642 }
643 break;
644
645 case MINUS_EXPR:
646 /* - (A - B) -> B - A */
647 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
648 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
649 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
650 return fold_build2_loc (loc, MINUS_EXPR, type,
651 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
652 break;
653
654 case MULT_EXPR:
655 if (TYPE_UNSIGNED (type))
656 break;
657
658 /* Fall through. */
659
660 case RDIV_EXPR:
661 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
662 {
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (tem), TREE_OPERAND (t, 1));
671 }
672 break;
673
674 case TRUNC_DIV_EXPR:
675 case ROUND_DIV_EXPR:
676 case FLOOR_DIV_EXPR:
677 case CEIL_DIV_EXPR:
678 case EXACT_DIV_EXPR:
679 /* In general we can't negate A / B, because if A is INT_MIN and
680 B is 1, we may turn this into INT_MIN / -1 which is undefined
681 and actually traps on some architectures. But if overflow is
682 undefined, we can negate, because - (INT_MIN / 1) is an
683 overflow. */
684 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
685 {
686 const char * const warnmsg = G_("assuming signed overflow does not "
687 "occur when negating a division");
688 tem = TREE_OPERAND (t, 1);
689 if (negate_expr_p (tem))
690 {
691 if (INTEGRAL_TYPE_P (type)
692 && (TREE_CODE (tem) != INTEGER_CST
693 || integer_onep (tem)))
694 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
695 return fold_build2_loc (loc, TREE_CODE (t), type,
696 TREE_OPERAND (t, 0), negate_expr (tem));
697 }
698 /* If overflow is undefined then we have to be careful because
699 we ask whether it's ok to associate the negate with the
700 division which is not ok for example for
701 -((a - b) / c) where (-(a - b)) / c may invoke undefined
702 overflow because of negating INT_MIN. So do not use
703 negate_expr_p here but open-code the two important cases. */
704 tem = TREE_OPERAND (t, 0);
705 if ((INTEGRAL_TYPE_P (type)
706 && (TREE_CODE (tem) == NEGATE_EXPR
707 || (TREE_CODE (tem) == INTEGER_CST
708 && may_negate_without_overflow_p (tem))))
709 || !INTEGRAL_TYPE_P (type))
710 return fold_build2_loc (loc, TREE_CODE (t), type,
711 negate_expr (tem), TREE_OPERAND (t, 1));
712 }
713 break;
714
715 case NOP_EXPR:
716 /* Convert -((double)float) into (double)(-float). */
717 if (TREE_CODE (type) == REAL_TYPE)
718 {
719 tem = strip_float_extensions (t);
720 if (tem != t && negate_expr_p (tem))
721 return fold_convert_loc (loc, type, negate_expr (tem));
722 }
723 break;
724
725 case CALL_EXPR:
726 /* Negate -f(x) as f(-x). */
727 if (negate_mathfn_p (builtin_mathfn_code (t))
728 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
729 {
730 tree fndecl, arg;
731
732 fndecl = get_callee_fndecl (t);
733 arg = negate_expr (CALL_EXPR_ARG (t, 0));
734 return build_call_expr_loc (loc, fndecl, 1, arg);
735 }
736 break;
737
738 case RSHIFT_EXPR:
739 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
740 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
741 {
742 tree op1 = TREE_OPERAND (t, 1);
743 if (TREE_INT_CST_HIGH (op1) == 0
744 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
745 == TREE_INT_CST_LOW (op1))
746 {
747 tree ntype = TYPE_UNSIGNED (type)
748 ? signed_type_for (type)
749 : unsigned_type_for (type);
750 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
751 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
752 return fold_convert_loc (loc, type, temp);
753 }
754 }
755 break;
756
757 default:
758 break;
759 }
760
761 return NULL_TREE;
762 }
763
764 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
765 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
766 return NULL_TREE. */
767
768 static tree
769 negate_expr (tree t)
770 {
771 tree type, tem;
772 location_t loc;
773
774 if (t == NULL_TREE)
775 return NULL_TREE;
776
777 loc = EXPR_LOCATION (t);
778 type = TREE_TYPE (t);
779 STRIP_SIGN_NOPS (t);
780
781 tem = fold_negate_expr (loc, t);
782 if (!tem)
783 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
784 return fold_convert_loc (loc, type, tem);
785 }
786 \f
787 /* Split a tree IN into a constant, literal and variable parts that could be
788 combined with CODE to make IN. "constant" means an expression with
789 TREE_CONSTANT but that isn't an actual constant. CODE must be a
790 commutative arithmetic operation. Store the constant part into *CONP,
791 the literal in *LITP and return the variable part. If a part isn't
792 present, set it to null. If the tree does not decompose in this way,
793 return the entire tree as the variable part and the other parts as null.
794
795 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
796 case, we negate an operand that was subtracted. Except if it is a
797 literal for which we use *MINUS_LITP instead.
798
799 If NEGATE_P is true, we are negating all of IN, again except a literal
800 for which we use *MINUS_LITP instead.
801
802 If IN is itself a literal or constant, return it as appropriate.
803
804 Note that we do not guarantee that any of the three values will be the
805 same type as IN, but they will have the same signedness and mode. */
806
807 static tree
808 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
809 tree *minus_litp, int negate_p)
810 {
811 tree var = 0;
812
813 *conp = 0;
814 *litp = 0;
815 *minus_litp = 0;
816
817 /* Strip any conversions that don't change the machine mode or signedness. */
818 STRIP_SIGN_NOPS (in);
819
820 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
821 || TREE_CODE (in) == FIXED_CST)
822 *litp = in;
823 else if (TREE_CODE (in) == code
824 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
825 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
826 /* We can associate addition and subtraction together (even
827 though the C standard doesn't say so) for integers because
828 the value is not affected. For reals, the value might be
829 affected, so we can't. */
830 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
831 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
832 {
833 tree op0 = TREE_OPERAND (in, 0);
834 tree op1 = TREE_OPERAND (in, 1);
835 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
836 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
837
838 /* First see if either of the operands is a literal, then a constant. */
839 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
840 || TREE_CODE (op0) == FIXED_CST)
841 *litp = op0, op0 = 0;
842 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
843 || TREE_CODE (op1) == FIXED_CST)
844 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
845
846 if (op0 != 0 && TREE_CONSTANT (op0))
847 *conp = op0, op0 = 0;
848 else if (op1 != 0 && TREE_CONSTANT (op1))
849 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
850
851 /* If we haven't dealt with either operand, this is not a case we can
852 decompose. Otherwise, VAR is either of the ones remaining, if any. */
853 if (op0 != 0 && op1 != 0)
854 var = in;
855 else if (op0 != 0)
856 var = op0;
857 else
858 var = op1, neg_var_p = neg1_p;
859
860 /* Now do any needed negations. */
861 if (neg_litp_p)
862 *minus_litp = *litp, *litp = 0;
863 if (neg_conp_p)
864 *conp = negate_expr (*conp);
865 if (neg_var_p)
866 var = negate_expr (var);
867 }
868 else if (TREE_CODE (in) == BIT_NOT_EXPR
869 && code == PLUS_EXPR)
870 {
871 /* -X - 1 is folded to ~X, undo that here. */
872 *minus_litp = build_one_cst (TREE_TYPE (in));
873 var = negate_expr (TREE_OPERAND (in, 0));
874 }
875 else if (TREE_CONSTANT (in))
876 *conp = in;
877 else
878 var = in;
879
880 if (negate_p)
881 {
882 if (*litp)
883 *minus_litp = *litp, *litp = 0;
884 else if (*minus_litp)
885 *litp = *minus_litp, *minus_litp = 0;
886 *conp = negate_expr (*conp);
887 var = negate_expr (var);
888 }
889
890 return var;
891 }
892
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
897
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
900 {
901 if (t1 == 0)
902 return t2;
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
911 {
912 if (code == PLUS_EXPR)
913 {
914 if (TREE_CODE (t1) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t2),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t1, 0)));
919 else if (TREE_CODE (t2) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t2, 0)));
924 else if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
926 }
927 else if (code == MINUS_EXPR)
928 {
929 if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
931 }
932
933 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type, t2));
935 }
936
937 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
939 }
940 \f
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
943
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
946 {
947 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948 return false;
949 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return false;
951
952 switch (code)
953 {
954 case LSHIFT_EXPR:
955 case RSHIFT_EXPR:
956 case LROTATE_EXPR:
957 case RROTATE_EXPR:
958 return true;
959
960 default:
961 break;
962 }
963
964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 && TYPE_MODE (type1) == TYPE_MODE (type2);
967 }
968
969
970 /* Combine two integer constants ARG1 and ARG2 under operation CODE
971 to produce a new constant. Return NULL_TREE if we don't know how
972 to evaluate CODE at compile-time. */
973
974 static tree
975 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
976 int overflowable)
977 {
978 double_int op1, op2, res, tmp;
979 tree t;
980 tree type = TREE_TYPE (arg1);
981 bool uns = TYPE_UNSIGNED (type);
982 bool overflow = false;
983
984 op1 = tree_to_double_int (arg1);
985 op2 = tree_to_double_int (arg2);
986
987 switch (code)
988 {
989 case BIT_IOR_EXPR:
990 res = op1 | op2;
991 break;
992
993 case BIT_XOR_EXPR:
994 res = op1 ^ op2;
995 break;
996
997 case BIT_AND_EXPR:
998 res = op1 & op2;
999 break;
1000
1001 case RSHIFT_EXPR:
1002 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1003 break;
1004
1005 case LSHIFT_EXPR:
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1010 break;
1011
1012 case RROTATE_EXPR:
1013 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1014 break;
1015
1016 case LROTATE_EXPR:
1017 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = op1.add_with_sign (op2, false, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = op1.sub_with_overflow (op2, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = op1.mul_with_sign (op2, false, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1034 {
1035 bool dummy_overflow;
1036 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1037 return NULL_TREE;
1038 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1039 }
1040 else
1041 {
1042 bool dummy_overflow;
1043 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1044 is performed in twice the precision of arguments. */
1045 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1046 res = tmp.rshift (TYPE_PRECISION (type),
1047 2 * TYPE_PRECISION (type), !uns);
1048 }
1049 break;
1050
1051 case TRUNC_DIV_EXPR:
1052 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 /* This is a shortcut for a common special case. */
1055 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1056 && !TREE_OVERFLOW (arg1)
1057 && !TREE_OVERFLOW (arg2)
1058 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1059 {
1060 if (code == CEIL_DIV_EXPR)
1061 op1.low += op2.low - 1;
1062
1063 res.low = op1.low / op2.low, res.high = 0;
1064 break;
1065 }
1066
1067 /* ... fall through ... */
1068
1069 case ROUND_DIV_EXPR:
1070 if (op2.is_zero ())
1071 return NULL_TREE;
1072 if (op2.is_one ())
1073 {
1074 res = op1;
1075 break;
1076 }
1077 if (op1 == op2 && !op1.is_zero ())
1078 {
1079 res = double_int_one;
1080 break;
1081 }
1082 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1083 break;
1084
1085 case TRUNC_MOD_EXPR:
1086 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1087 /* This is a shortcut for a common special case. */
1088 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1089 && !TREE_OVERFLOW (arg1)
1090 && !TREE_OVERFLOW (arg2)
1091 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1092 {
1093 if (code == CEIL_MOD_EXPR)
1094 op1.low += op2.low - 1;
1095 res.low = op1.low % op2.low, res.high = 0;
1096 break;
1097 }
1098
1099 /* ... fall through ... */
1100
1101 case ROUND_MOD_EXPR:
1102 if (op2.is_zero ())
1103 return NULL_TREE;
1104 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1105 break;
1106
1107 case MIN_EXPR:
1108 res = op1.min (op2, uns);
1109 break;
1110
1111 case MAX_EXPR:
1112 res = op1.max (op2, uns);
1113 break;
1114
1115 default:
1116 return NULL_TREE;
1117 }
1118
1119 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1120 (!uns && overflow)
1121 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1122
1123 return t;
1124 }
1125
1126 tree
1127 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1128 {
1129 return int_const_binop_1 (code, arg1, arg2, 1);
1130 }
1131
1132 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1133 constant. We assume ARG1 and ARG2 have the same data type, or at least
1134 are the same kind of constant and the same machine mode. Return zero if
1135 combining the constants is not allowed in the current operating mode. */
1136
1137 static tree
1138 const_binop (enum tree_code code, tree arg1, tree arg2)
1139 {
1140 /* Sanity check for the recursive cases. */
1141 if (!arg1 || !arg2)
1142 return NULL_TREE;
1143
1144 STRIP_NOPS (arg1);
1145 STRIP_NOPS (arg2);
1146
1147 if (TREE_CODE (arg1) == INTEGER_CST)
1148 return int_const_binop (code, arg1, arg2);
1149
1150 if (TREE_CODE (arg1) == REAL_CST)
1151 {
1152 enum machine_mode mode;
1153 REAL_VALUE_TYPE d1;
1154 REAL_VALUE_TYPE d2;
1155 REAL_VALUE_TYPE value;
1156 REAL_VALUE_TYPE result;
1157 bool inexact;
1158 tree t, type;
1159
1160 /* The following codes are handled by real_arithmetic. */
1161 switch (code)
1162 {
1163 case PLUS_EXPR:
1164 case MINUS_EXPR:
1165 case MULT_EXPR:
1166 case RDIV_EXPR:
1167 case MIN_EXPR:
1168 case MAX_EXPR:
1169 break;
1170
1171 default:
1172 return NULL_TREE;
1173 }
1174
1175 d1 = TREE_REAL_CST (arg1);
1176 d2 = TREE_REAL_CST (arg2);
1177
1178 type = TREE_TYPE (arg1);
1179 mode = TYPE_MODE (type);
1180
1181 /* Don't perform operation if we honor signaling NaNs and
1182 either operand is a NaN. */
1183 if (HONOR_SNANS (mode)
1184 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1185 return NULL_TREE;
1186
1187 /* Don't perform operation if it would raise a division
1188 by zero exception. */
1189 if (code == RDIV_EXPR
1190 && REAL_VALUES_EQUAL (d2, dconst0)
1191 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1192 return NULL_TREE;
1193
1194 /* If either operand is a NaN, just return it. Otherwise, set up
1195 for floating-point trap; we return an overflow. */
1196 if (REAL_VALUE_ISNAN (d1))
1197 return arg1;
1198 else if (REAL_VALUE_ISNAN (d2))
1199 return arg2;
1200
1201 inexact = real_arithmetic (&value, code, &d1, &d2);
1202 real_convert (&result, mode, &value);
1203
1204 /* Don't constant fold this floating point operation if
1205 the result has overflowed and flag_trapping_math. */
1206 if (flag_trapping_math
1207 && MODE_HAS_INFINITIES (mode)
1208 && REAL_VALUE_ISINF (result)
1209 && !REAL_VALUE_ISINF (d1)
1210 && !REAL_VALUE_ISINF (d2))
1211 return NULL_TREE;
1212
1213 /* Don't constant fold this floating point operation if the
1214 result may dependent upon the run-time rounding mode and
1215 flag_rounding_math is set, or if GCC's software emulation
1216 is unable to accurately represent the result. */
1217 if ((flag_rounding_math
1218 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1219 && (inexact || !real_identical (&result, &value)))
1220 return NULL_TREE;
1221
1222 t = build_real (type, result);
1223
1224 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1225 return t;
1226 }
1227
1228 if (TREE_CODE (arg1) == FIXED_CST)
1229 {
1230 FIXED_VALUE_TYPE f1;
1231 FIXED_VALUE_TYPE f2;
1232 FIXED_VALUE_TYPE result;
1233 tree t, type;
1234 int sat_p;
1235 bool overflow_p;
1236
1237 /* The following codes are handled by fixed_arithmetic. */
1238 switch (code)
1239 {
1240 case PLUS_EXPR:
1241 case MINUS_EXPR:
1242 case MULT_EXPR:
1243 case TRUNC_DIV_EXPR:
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 f2.data.high = TREE_INT_CST_HIGH (arg2);
1250 f2.data.low = TREE_INT_CST_LOW (arg2);
1251 f2.mode = SImode;
1252 break;
1253
1254 default:
1255 return NULL_TREE;
1256 }
1257
1258 f1 = TREE_FIXED_CST (arg1);
1259 type = TREE_TYPE (arg1);
1260 sat_p = TYPE_SATURATING (type);
1261 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1262 t = build_fixed (type, result);
1263 /* Propagate overflow flags. */
1264 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1265 TREE_OVERFLOW (t) = 1;
1266 return t;
1267 }
1268
1269 if (TREE_CODE (arg1) == COMPLEX_CST)
1270 {
1271 tree type = TREE_TYPE (arg1);
1272 tree r1 = TREE_REALPART (arg1);
1273 tree i1 = TREE_IMAGPART (arg1);
1274 tree r2 = TREE_REALPART (arg2);
1275 tree i2 = TREE_IMAGPART (arg2);
1276 tree real, imag;
1277
1278 switch (code)
1279 {
1280 case PLUS_EXPR:
1281 case MINUS_EXPR:
1282 real = const_binop (code, r1, r2);
1283 imag = const_binop (code, i1, i2);
1284 break;
1285
1286 case MULT_EXPR:
1287 if (COMPLEX_FLOAT_TYPE_P (type))
1288 return do_mpc_arg2 (arg1, arg2, type,
1289 /* do_nonfinite= */ folding_initializer,
1290 mpc_mul);
1291
1292 real = const_binop (MINUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 imag = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r1, i2),
1297 const_binop (MULT_EXPR, i1, r2));
1298 break;
1299
1300 case RDIV_EXPR:
1301 if (COMPLEX_FLOAT_TYPE_P (type))
1302 return do_mpc_arg2 (arg1, arg2, type,
1303 /* do_nonfinite= */ folding_initializer,
1304 mpc_div);
1305 /* Fallthru ... */
1306 case TRUNC_DIV_EXPR:
1307 case CEIL_DIV_EXPR:
1308 case FLOOR_DIV_EXPR:
1309 case ROUND_DIV_EXPR:
1310 if (flag_complex_method == 0)
1311 {
1312 /* Keep this algorithm in sync with
1313 tree-complex.c:expand_complex_div_straight().
1314
1315 Expand complex division to scalars, straightforward algorithm.
1316 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1317 t = br*br + bi*bi
1318 */
1319 tree magsquared
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r2, r2),
1322 const_binop (MULT_EXPR, i2, i2));
1323 tree t1
1324 = const_binop (PLUS_EXPR,
1325 const_binop (MULT_EXPR, r1, r2),
1326 const_binop (MULT_EXPR, i1, i2));
1327 tree t2
1328 = const_binop (MINUS_EXPR,
1329 const_binop (MULT_EXPR, i1, r2),
1330 const_binop (MULT_EXPR, r1, i2));
1331
1332 real = const_binop (code, t1, magsquared);
1333 imag = const_binop (code, t2, magsquared);
1334 }
1335 else
1336 {
1337 /* Keep this algorithm in sync with
1338 tree-complex.c:expand_complex_div_wide().
1339
1340 Expand complex division to scalars, modified algorithm to minimize
1341 overflow with wide input ranges. */
1342 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1343 fold_abs_const (r2, TREE_TYPE (type)),
1344 fold_abs_const (i2, TREE_TYPE (type)));
1345
1346 if (integer_nonzerop (compare))
1347 {
1348 /* In the TRUE branch, we compute
1349 ratio = br/bi;
1350 div = (br * ratio) + bi;
1351 tr = (ar * ratio) + ai;
1352 ti = (ai * ratio) - ar;
1353 tr = tr / div;
1354 ti = ti / div; */
1355 tree ratio = const_binop (code, r2, i2);
1356 tree div = const_binop (PLUS_EXPR, i2,
1357 const_binop (MULT_EXPR, r2, ratio));
1358 real = const_binop (MULT_EXPR, r1, ratio);
1359 real = const_binop (PLUS_EXPR, real, i1);
1360 real = const_binop (code, real, div);
1361
1362 imag = const_binop (MULT_EXPR, i1, ratio);
1363 imag = const_binop (MINUS_EXPR, imag, r1);
1364 imag = const_binop (code, imag, div);
1365 }
1366 else
1367 {
1368 /* In the FALSE branch, we compute
1369 ratio = d/c;
1370 divisor = (d * ratio) + c;
1371 tr = (b * ratio) + a;
1372 ti = b - (a * ratio);
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, i2, r2);
1376 tree div = const_binop (PLUS_EXPR, r2,
1377 const_binop (MULT_EXPR, i2, ratio));
1378
1379 real = const_binop (MULT_EXPR, i1, ratio);
1380 real = const_binop (PLUS_EXPR, real, r1);
1381 real = const_binop (code, real, div);
1382
1383 imag = const_binop (MULT_EXPR, r1, ratio);
1384 imag = const_binop (MINUS_EXPR, i1, imag);
1385 imag = const_binop (code, imag, div);
1386 }
1387 }
1388 break;
1389
1390 default:
1391 return NULL_TREE;
1392 }
1393
1394 if (real && imag)
1395 return build_complex (type, real, imag);
1396 }
1397
1398 if (TREE_CODE (arg1) == VECTOR_CST
1399 && TREE_CODE (arg2) == VECTOR_CST)
1400 {
1401 tree type = TREE_TYPE (arg1);
1402 int count = TYPE_VECTOR_SUBPARTS (type), i;
1403 tree *elts = XALLOCAVEC (tree, count);
1404
1405 for (i = 0; i < count; i++)
1406 {
1407 tree elem1 = VECTOR_CST_ELT (arg1, i);
1408 tree elem2 = VECTOR_CST_ELT (arg2, i);
1409
1410 elts[i] = const_binop (code, elem1, elem2);
1411
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE */
1414 if (elts[i] == NULL_TREE)
1415 return NULL_TREE;
1416 }
1417
1418 return build_vector (type, elts);
1419 }
1420
1421 /* Shifts allow a scalar offset for a vector. */
1422 if (TREE_CODE (arg1) == VECTOR_CST
1423 && TREE_CODE (arg2) == INTEGER_CST)
1424 {
1425 tree type = TREE_TYPE (arg1);
1426 int count = TYPE_VECTOR_SUBPARTS (type), i;
1427 tree *elts = XALLOCAVEC (tree, count);
1428
1429 if (code == VEC_LSHIFT_EXPR
1430 || code == VEC_RSHIFT_EXPR)
1431 {
1432 if (!host_integerp (arg2, 1))
1433 return NULL_TREE;
1434
1435 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1436 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1437 unsigned HOST_WIDE_INT innerc
1438 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1439 if (shiftc >= outerc || (shiftc % innerc) != 0)
1440 return NULL_TREE;
1441 int offset = shiftc / innerc;
1442 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1443 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1444 for !BYTES_BIG_ENDIAN picks first vector element, but
1445 for BYTES_BIG_ENDIAN last element from the vector. */
1446 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1447 offset = -offset;
1448 tree zero = build_zero_cst (TREE_TYPE (type));
1449 for (i = 0; i < count; i++)
1450 {
1451 if (i + offset < 0 || i + offset >= count)
1452 elts[i] = zero;
1453 else
1454 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1455 }
1456 }
1457 else
1458 for (i = 0; i < count; i++)
1459 {
1460 tree elem1 = VECTOR_CST_ELT (arg1, i);
1461
1462 elts[i] = const_binop (code, elem1, arg2);
1463
1464 /* It is possible that const_binop cannot handle the given
1465 code and return NULL_TREE */
1466 if (elts[i] == NULL_TREE)
1467 return NULL_TREE;
1468 }
1469
1470 return build_vector (type, elts);
1471 }
1472 return NULL_TREE;
1473 }
1474
1475 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1476 indicates which particular sizetype to create. */
1477
1478 tree
1479 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1480 {
1481 return build_int_cst (sizetype_tab[(int) kind], number);
1482 }
1483 \f
1484 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1485 is a tree code. The type of the result is taken from the operands.
1486 Both must be equivalent integer types, ala int_binop_types_match_p.
1487 If the operands are constant, so is the result. */
1488
1489 tree
1490 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1491 {
1492 tree type = TREE_TYPE (arg0);
1493
1494 if (arg0 == error_mark_node || arg1 == error_mark_node)
1495 return error_mark_node;
1496
1497 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1498 TREE_TYPE (arg1)));
1499
1500 /* Handle the special case of two integer constants faster. */
1501 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1502 {
1503 /* And some specific cases even faster than that. */
1504 if (code == PLUS_EXPR)
1505 {
1506 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1507 return arg1;
1508 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1509 return arg0;
1510 }
1511 else if (code == MINUS_EXPR)
1512 {
1513 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1514 return arg0;
1515 }
1516 else if (code == MULT_EXPR)
1517 {
1518 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1519 return arg1;
1520 }
1521
1522 /* Handle general case of two integer constants. For sizetype
1523 constant calculations we always want to know about overflow,
1524 even in the unsigned case. */
1525 return int_const_binop_1 (code, arg0, arg1, -1);
1526 }
1527
1528 return fold_build2_loc (loc, code, type, arg0, arg1);
1529 }
1530
1531 /* Given two values, either both of sizetype or both of bitsizetype,
1532 compute the difference between the two values. Return the value
1533 in signed type corresponding to the type of the operands. */
1534
1535 tree
1536 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1537 {
1538 tree type = TREE_TYPE (arg0);
1539 tree ctype;
1540
1541 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1542 TREE_TYPE (arg1)));
1543
1544 /* If the type is already signed, just do the simple thing. */
1545 if (!TYPE_UNSIGNED (type))
1546 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1547
1548 if (type == sizetype)
1549 ctype = ssizetype;
1550 else if (type == bitsizetype)
1551 ctype = sbitsizetype;
1552 else
1553 ctype = signed_type_for (type);
1554
1555 /* If either operand is not a constant, do the conversions to the signed
1556 type and subtract. The hardware will do the right thing with any
1557 overflow in the subtraction. */
1558 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1559 return size_binop_loc (loc, MINUS_EXPR,
1560 fold_convert_loc (loc, ctype, arg0),
1561 fold_convert_loc (loc, ctype, arg1));
1562
1563 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1564 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1565 overflow) and negate (which can't either). Special-case a result
1566 of zero while we're here. */
1567 if (tree_int_cst_equal (arg0, arg1))
1568 return build_int_cst (ctype, 0);
1569 else if (tree_int_cst_lt (arg1, arg0))
1570 return fold_convert_loc (loc, ctype,
1571 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1572 else
1573 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1574 fold_convert_loc (loc, ctype,
1575 size_binop_loc (loc,
1576 MINUS_EXPR,
1577 arg1, arg0)));
1578 }
1579 \f
1580 /* A subroutine of fold_convert_const handling conversions of an
1581 INTEGER_CST to another integer type. */
1582
1583 static tree
1584 fold_convert_const_int_from_int (tree type, const_tree arg1)
1585 {
1586 tree t;
1587
1588 /* Given an integer constant, make new constant with new type,
1589 appropriately sign-extended or truncated. */
1590 t = force_fit_type_double (type, tree_to_double_int (arg1),
1591 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1592 (TREE_INT_CST_HIGH (arg1) < 0
1593 && (TYPE_UNSIGNED (type)
1594 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1595 | TREE_OVERFLOW (arg1));
1596
1597 return t;
1598 }
1599
1600 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1601 to an integer type. */
1602
1603 static tree
1604 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1605 {
1606 int overflow = 0;
1607 tree t;
1608
1609 /* The following code implements the floating point to integer
1610 conversion rules required by the Java Language Specification,
1611 that IEEE NaNs are mapped to zero and values that overflow
1612 the target precision saturate, i.e. values greater than
1613 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1614 are mapped to INT_MIN. These semantics are allowed by the
1615 C and C++ standards that simply state that the behavior of
1616 FP-to-integer conversion is unspecified upon overflow. */
1617
1618 double_int val;
1619 REAL_VALUE_TYPE r;
1620 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1621
1622 switch (code)
1623 {
1624 case FIX_TRUNC_EXPR:
1625 real_trunc (&r, VOIDmode, &x);
1626 break;
1627
1628 default:
1629 gcc_unreachable ();
1630 }
1631
1632 /* If R is NaN, return zero and show we have an overflow. */
1633 if (REAL_VALUE_ISNAN (r))
1634 {
1635 overflow = 1;
1636 val = double_int_zero;
1637 }
1638
1639 /* See if R is less than the lower bound or greater than the
1640 upper bound. */
1641
1642 if (! overflow)
1643 {
1644 tree lt = TYPE_MIN_VALUE (type);
1645 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1646 if (REAL_VALUES_LESS (r, l))
1647 {
1648 overflow = 1;
1649 val = tree_to_double_int (lt);
1650 }
1651 }
1652
1653 if (! overflow)
1654 {
1655 tree ut = TYPE_MAX_VALUE (type);
1656 if (ut)
1657 {
1658 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1659 if (REAL_VALUES_LESS (u, r))
1660 {
1661 overflow = 1;
1662 val = tree_to_double_int (ut);
1663 }
1664 }
1665 }
1666
1667 if (! overflow)
1668 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1669
1670 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1671 return t;
1672 }
1673
1674 /* A subroutine of fold_convert_const handling conversions of a
1675 FIXED_CST to an integer type. */
1676
1677 static tree
1678 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1679 {
1680 tree t;
1681 double_int temp, temp_trunc;
1682 unsigned int mode;
1683
1684 /* Right shift FIXED_CST to temp by fbit. */
1685 temp = TREE_FIXED_CST (arg1).data;
1686 mode = TREE_FIXED_CST (arg1).mode;
1687 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1688 {
1689 temp = temp.rshift (GET_MODE_FBIT (mode),
1690 HOST_BITS_PER_DOUBLE_INT,
1691 SIGNED_FIXED_POINT_MODE_P (mode));
1692
1693 /* Left shift temp to temp_trunc by fbit. */
1694 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1695 HOST_BITS_PER_DOUBLE_INT,
1696 SIGNED_FIXED_POINT_MODE_P (mode));
1697 }
1698 else
1699 {
1700 temp = double_int_zero;
1701 temp_trunc = double_int_zero;
1702 }
1703
1704 /* If FIXED_CST is negative, we need to round the value toward 0.
1705 By checking if the fractional bits are not zero to add 1 to temp. */
1706 if (SIGNED_FIXED_POINT_MODE_P (mode)
1707 && temp_trunc.is_negative ()
1708 && TREE_FIXED_CST (arg1).data != temp_trunc)
1709 temp += double_int_one;
1710
1711 /* Given a fixed-point constant, make new constant with new type,
1712 appropriately sign-extended or truncated. */
1713 t = force_fit_type_double (type, temp, -1,
1714 (temp.is_negative ()
1715 && (TYPE_UNSIGNED (type)
1716 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1717 | TREE_OVERFLOW (arg1));
1718
1719 return t;
1720 }
1721
1722 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1723 to another floating point type. */
1724
1725 static tree
1726 fold_convert_const_real_from_real (tree type, const_tree arg1)
1727 {
1728 REAL_VALUE_TYPE value;
1729 tree t;
1730
1731 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1732 t = build_real (type, value);
1733
1734 /* If converting an infinity or NAN to a representation that doesn't
1735 have one, set the overflow bit so that we can produce some kind of
1736 error message at the appropriate point if necessary. It's not the
1737 most user-friendly message, but it's better than nothing. */
1738 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1739 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1740 TREE_OVERFLOW (t) = 1;
1741 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1742 && !MODE_HAS_NANS (TYPE_MODE (type)))
1743 TREE_OVERFLOW (t) = 1;
1744 /* Regular overflow, conversion produced an infinity in a mode that
1745 can't represent them. */
1746 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1747 && REAL_VALUE_ISINF (value)
1748 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1749 TREE_OVERFLOW (t) = 1;
1750 else
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1753 }
1754
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to a floating point type. */
1757
1758 static tree
1759 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1760 {
1761 REAL_VALUE_TYPE value;
1762 tree t;
1763
1764 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1765 t = build_real (type, value);
1766
1767 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1768 return t;
1769 }
1770
1771 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1772 to another fixed-point type. */
1773
1774 static tree
1775 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1776 {
1777 FIXED_VALUE_TYPE value;
1778 tree t;
1779 bool overflow_p;
1780
1781 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1782 TYPE_SATURATING (type));
1783 t = build_fixed (type, value);
1784
1785 /* Propagate overflow flags. */
1786 if (overflow_p | TREE_OVERFLOW (arg1))
1787 TREE_OVERFLOW (t) = 1;
1788 return t;
1789 }
1790
1791 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1792 to a fixed-point type. */
1793
1794 static tree
1795 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1796 {
1797 FIXED_VALUE_TYPE value;
1798 tree t;
1799 bool overflow_p;
1800
1801 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1802 TREE_INT_CST (arg1),
1803 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1804 TYPE_SATURATING (type));
1805 t = build_fixed (type, value);
1806
1807 /* Propagate overflow flags. */
1808 if (overflow_p | TREE_OVERFLOW (arg1))
1809 TREE_OVERFLOW (t) = 1;
1810 return t;
1811 }
1812
1813 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1814 to a fixed-point type. */
1815
1816 static tree
1817 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1818 {
1819 FIXED_VALUE_TYPE value;
1820 tree t;
1821 bool overflow_p;
1822
1823 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1824 &TREE_REAL_CST (arg1),
1825 TYPE_SATURATING (type));
1826 t = build_fixed (type, value);
1827
1828 /* Propagate overflow flags. */
1829 if (overflow_p | TREE_OVERFLOW (arg1))
1830 TREE_OVERFLOW (t) = 1;
1831 return t;
1832 }
1833
1834 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1835 type TYPE. If no simplification can be done return NULL_TREE. */
1836
1837 static tree
1838 fold_convert_const (enum tree_code code, tree type, tree arg1)
1839 {
1840 if (TREE_TYPE (arg1) == type)
1841 return arg1;
1842
1843 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1844 || TREE_CODE (type) == OFFSET_TYPE)
1845 {
1846 if (TREE_CODE (arg1) == INTEGER_CST)
1847 return fold_convert_const_int_from_int (type, arg1);
1848 else if (TREE_CODE (arg1) == REAL_CST)
1849 return fold_convert_const_int_from_real (code, type, arg1);
1850 else if (TREE_CODE (arg1) == FIXED_CST)
1851 return fold_convert_const_int_from_fixed (type, arg1);
1852 }
1853 else if (TREE_CODE (type) == REAL_TYPE)
1854 {
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 else if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1859 else if (TREE_CODE (arg1) == FIXED_CST)
1860 return fold_convert_const_real_from_fixed (type, arg1);
1861 }
1862 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1863 {
1864 if (TREE_CODE (arg1) == FIXED_CST)
1865 return fold_convert_const_fixed_from_fixed (type, arg1);
1866 else if (TREE_CODE (arg1) == INTEGER_CST)
1867 return fold_convert_const_fixed_from_int (type, arg1);
1868 else if (TREE_CODE (arg1) == REAL_CST)
1869 return fold_convert_const_fixed_from_real (type, arg1);
1870 }
1871 return NULL_TREE;
1872 }
1873
1874 /* Construct a vector of zero elements of vector type TYPE. */
1875
1876 static tree
1877 build_zero_vector (tree type)
1878 {
1879 tree t;
1880
1881 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1882 return build_vector_from_val (type, t);
1883 }
1884
1885 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1886
1887 bool
1888 fold_convertible_p (const_tree type, const_tree arg)
1889 {
1890 tree orig = TREE_TYPE (arg);
1891
1892 if (type == orig)
1893 return true;
1894
1895 if (TREE_CODE (arg) == ERROR_MARK
1896 || TREE_CODE (type) == ERROR_MARK
1897 || TREE_CODE (orig) == ERROR_MARK)
1898 return false;
1899
1900 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1901 return true;
1902
1903 switch (TREE_CODE (type))
1904 {
1905 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1906 case POINTER_TYPE: case REFERENCE_TYPE:
1907 case OFFSET_TYPE:
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1909 || TREE_CODE (orig) == OFFSET_TYPE)
1910 return true;
1911 return (TREE_CODE (orig) == VECTOR_TYPE
1912 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1913
1914 case REAL_TYPE:
1915 case FIXED_POINT_TYPE:
1916 case COMPLEX_TYPE:
1917 case VECTOR_TYPE:
1918 case VOID_TYPE:
1919 return TREE_CODE (type) == TREE_CODE (orig);
1920
1921 default:
1922 return false;
1923 }
1924 }
1925
1926 /* Convert expression ARG to type TYPE. Used by the middle-end for
1927 simple conversions in preference to calling the front-end's convert. */
1928
1929 tree
1930 fold_convert_loc (location_t loc, tree type, tree arg)
1931 {
1932 tree orig = TREE_TYPE (arg);
1933 tree tem;
1934
1935 if (type == orig)
1936 return arg;
1937
1938 if (TREE_CODE (arg) == ERROR_MARK
1939 || TREE_CODE (type) == ERROR_MARK
1940 || TREE_CODE (orig) == ERROR_MARK)
1941 return error_mark_node;
1942
1943 switch (TREE_CODE (type))
1944 {
1945 case POINTER_TYPE:
1946 case REFERENCE_TYPE:
1947 /* Handle conversions between pointers to different address spaces. */
1948 if (POINTER_TYPE_P (orig)
1949 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1950 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1951 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1952 /* fall through */
1953
1954 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1955 case OFFSET_TYPE:
1956 if (TREE_CODE (arg) == INTEGER_CST)
1957 {
1958 tem = fold_convert_const (NOP_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1961 }
1962 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1963 || TREE_CODE (orig) == OFFSET_TYPE)
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 if (TREE_CODE (orig) == COMPLEX_TYPE)
1966 return fold_convert_loc (loc, type,
1967 fold_build1_loc (loc, REALPART_EXPR,
1968 TREE_TYPE (orig), arg));
1969 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1970 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1971 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1972
1973 case REAL_TYPE:
1974 if (TREE_CODE (arg) == INTEGER_CST)
1975 {
1976 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1977 if (tem != NULL_TREE)
1978 return tem;
1979 }
1980 else if (TREE_CODE (arg) == REAL_CST)
1981 {
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1985 }
1986 else if (TREE_CODE (arg) == FIXED_CST)
1987 {
1988 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1989 if (tem != NULL_TREE)
1990 return tem;
1991 }
1992
1993 switch (TREE_CODE (orig))
1994 {
1995 case INTEGER_TYPE:
1996 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1997 case POINTER_TYPE: case REFERENCE_TYPE:
1998 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1999
2000 case REAL_TYPE:
2001 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2002
2003 case FIXED_POINT_TYPE:
2004 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2005
2006 case COMPLEX_TYPE:
2007 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2008 return fold_convert_loc (loc, type, tem);
2009
2010 default:
2011 gcc_unreachable ();
2012 }
2013
2014 case FIXED_POINT_TYPE:
2015 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2016 || TREE_CODE (arg) == REAL_CST)
2017 {
2018 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2019 if (tem != NULL_TREE)
2020 goto fold_convert_exit;
2021 }
2022
2023 switch (TREE_CODE (orig))
2024 {
2025 case FIXED_POINT_TYPE:
2026 case INTEGER_TYPE:
2027 case ENUMERAL_TYPE:
2028 case BOOLEAN_TYPE:
2029 case REAL_TYPE:
2030 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2031
2032 case COMPLEX_TYPE:
2033 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2034 return fold_convert_loc (loc, type, tem);
2035
2036 default:
2037 gcc_unreachable ();
2038 }
2039
2040 case COMPLEX_TYPE:
2041 switch (TREE_CODE (orig))
2042 {
2043 case INTEGER_TYPE:
2044 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2045 case POINTER_TYPE: case REFERENCE_TYPE:
2046 case REAL_TYPE:
2047 case FIXED_POINT_TYPE:
2048 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2049 fold_convert_loc (loc, TREE_TYPE (type), arg),
2050 fold_convert_loc (loc, TREE_TYPE (type),
2051 integer_zero_node));
2052 case COMPLEX_TYPE:
2053 {
2054 tree rpart, ipart;
2055
2056 if (TREE_CODE (arg) == COMPLEX_EXPR)
2057 {
2058 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2059 TREE_OPERAND (arg, 0));
2060 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2061 TREE_OPERAND (arg, 1));
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2063 }
2064
2065 arg = save_expr (arg);
2066 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2067 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2068 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2069 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2070 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2071 }
2072
2073 default:
2074 gcc_unreachable ();
2075 }
2076
2077 case VECTOR_TYPE:
2078 if (integer_zerop (arg))
2079 return build_zero_vector (type);
2080 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2081 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2082 || TREE_CODE (orig) == VECTOR_TYPE);
2083 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2084
2085 case VOID_TYPE:
2086 tem = fold_ignored_result (arg);
2087 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2088
2089 default:
2090 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2091 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2092 gcc_unreachable ();
2093 }
2094 fold_convert_exit:
2095 protected_set_expr_location_unshare (tem, loc);
2096 return tem;
2097 }
2098 \f
2099 /* Return false if expr can be assumed not to be an lvalue, true
2100 otherwise. */
2101
2102 static bool
2103 maybe_lvalue_p (const_tree x)
2104 {
2105 /* We only need to wrap lvalue tree codes. */
2106 switch (TREE_CODE (x))
2107 {
2108 case VAR_DECL:
2109 case PARM_DECL:
2110 case RESULT_DECL:
2111 case LABEL_DECL:
2112 case FUNCTION_DECL:
2113 case SSA_NAME:
2114
2115 case COMPONENT_REF:
2116 case MEM_REF:
2117 case INDIRECT_REF:
2118 case ARRAY_REF:
2119 case ARRAY_RANGE_REF:
2120 case BIT_FIELD_REF:
2121 case OBJ_TYPE_REF:
2122
2123 case REALPART_EXPR:
2124 case IMAGPART_EXPR:
2125 case PREINCREMENT_EXPR:
2126 case PREDECREMENT_EXPR:
2127 case SAVE_EXPR:
2128 case TRY_CATCH_EXPR:
2129 case WITH_CLEANUP_EXPR:
2130 case COMPOUND_EXPR:
2131 case MODIFY_EXPR:
2132 case TARGET_EXPR:
2133 case COND_EXPR:
2134 case BIND_EXPR:
2135 break;
2136
2137 default:
2138 /* Assume the worst for front-end tree codes. */
2139 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2140 break;
2141 return false;
2142 }
2143
2144 return true;
2145 }
2146
2147 /* Return an expr equal to X but certainly not valid as an lvalue. */
2148
2149 tree
2150 non_lvalue_loc (location_t loc, tree x)
2151 {
2152 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2153 us. */
2154 if (in_gimple_form)
2155 return x;
2156
2157 if (! maybe_lvalue_p (x))
2158 return x;
2159 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2160 }
2161
2162 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2163 Zero means allow extended lvalues. */
2164
2165 int pedantic_lvalues;
2166
2167 /* When pedantic, return an expr equal to X but certainly not valid as a
2168 pedantic lvalue. Otherwise, return X. */
2169
2170 static tree
2171 pedantic_non_lvalue_loc (location_t loc, tree x)
2172 {
2173 if (pedantic_lvalues)
2174 return non_lvalue_loc (loc, x);
2175
2176 return protected_set_expr_location_unshare (x, loc);
2177 }
2178 \f
2179 /* Given a tree comparison code, return the code that is the logical inverse.
2180 It is generally not safe to do this for floating-point comparisons, except
2181 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2182 ERROR_MARK in this case. */
2183
2184 enum tree_code
2185 invert_tree_comparison (enum tree_code code, bool honor_nans)
2186 {
2187 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2188 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2189 return ERROR_MARK;
2190
2191 switch (code)
2192 {
2193 case EQ_EXPR:
2194 return NE_EXPR;
2195 case NE_EXPR:
2196 return EQ_EXPR;
2197 case GT_EXPR:
2198 return honor_nans ? UNLE_EXPR : LE_EXPR;
2199 case GE_EXPR:
2200 return honor_nans ? UNLT_EXPR : LT_EXPR;
2201 case LT_EXPR:
2202 return honor_nans ? UNGE_EXPR : GE_EXPR;
2203 case LE_EXPR:
2204 return honor_nans ? UNGT_EXPR : GT_EXPR;
2205 case LTGT_EXPR:
2206 return UNEQ_EXPR;
2207 case UNEQ_EXPR:
2208 return LTGT_EXPR;
2209 case UNGT_EXPR:
2210 return LE_EXPR;
2211 case UNGE_EXPR:
2212 return LT_EXPR;
2213 case UNLT_EXPR:
2214 return GE_EXPR;
2215 case UNLE_EXPR:
2216 return GT_EXPR;
2217 case ORDERED_EXPR:
2218 return UNORDERED_EXPR;
2219 case UNORDERED_EXPR:
2220 return ORDERED_EXPR;
2221 default:
2222 gcc_unreachable ();
2223 }
2224 }
2225
2226 /* Similar, but return the comparison that results if the operands are
2227 swapped. This is safe for floating-point. */
2228
2229 enum tree_code
2230 swap_tree_comparison (enum tree_code code)
2231 {
2232 switch (code)
2233 {
2234 case EQ_EXPR:
2235 case NE_EXPR:
2236 case ORDERED_EXPR:
2237 case UNORDERED_EXPR:
2238 case LTGT_EXPR:
2239 case UNEQ_EXPR:
2240 return code;
2241 case GT_EXPR:
2242 return LT_EXPR;
2243 case GE_EXPR:
2244 return LE_EXPR;
2245 case LT_EXPR:
2246 return GT_EXPR;
2247 case LE_EXPR:
2248 return GE_EXPR;
2249 case UNGT_EXPR:
2250 return UNLT_EXPR;
2251 case UNGE_EXPR:
2252 return UNLE_EXPR;
2253 case UNLT_EXPR:
2254 return UNGT_EXPR;
2255 case UNLE_EXPR:
2256 return UNGE_EXPR;
2257 default:
2258 gcc_unreachable ();
2259 }
2260 }
2261
2262
2263 /* Convert a comparison tree code from an enum tree_code representation
2264 into a compcode bit-based encoding. This function is the inverse of
2265 compcode_to_comparison. */
2266
2267 static enum comparison_code
2268 comparison_to_compcode (enum tree_code code)
2269 {
2270 switch (code)
2271 {
2272 case LT_EXPR:
2273 return COMPCODE_LT;
2274 case EQ_EXPR:
2275 return COMPCODE_EQ;
2276 case LE_EXPR:
2277 return COMPCODE_LE;
2278 case GT_EXPR:
2279 return COMPCODE_GT;
2280 case NE_EXPR:
2281 return COMPCODE_NE;
2282 case GE_EXPR:
2283 return COMPCODE_GE;
2284 case ORDERED_EXPR:
2285 return COMPCODE_ORD;
2286 case UNORDERED_EXPR:
2287 return COMPCODE_UNORD;
2288 case UNLT_EXPR:
2289 return COMPCODE_UNLT;
2290 case UNEQ_EXPR:
2291 return COMPCODE_UNEQ;
2292 case UNLE_EXPR:
2293 return COMPCODE_UNLE;
2294 case UNGT_EXPR:
2295 return COMPCODE_UNGT;
2296 case LTGT_EXPR:
2297 return COMPCODE_LTGT;
2298 case UNGE_EXPR:
2299 return COMPCODE_UNGE;
2300 default:
2301 gcc_unreachable ();
2302 }
2303 }
2304
2305 /* Convert a compcode bit-based encoding of a comparison operator back
2306 to GCC's enum tree_code representation. This function is the
2307 inverse of comparison_to_compcode. */
2308
2309 static enum tree_code
2310 compcode_to_comparison (enum comparison_code code)
2311 {
2312 switch (code)
2313 {
2314 case COMPCODE_LT:
2315 return LT_EXPR;
2316 case COMPCODE_EQ:
2317 return EQ_EXPR;
2318 case COMPCODE_LE:
2319 return LE_EXPR;
2320 case COMPCODE_GT:
2321 return GT_EXPR;
2322 case COMPCODE_NE:
2323 return NE_EXPR;
2324 case COMPCODE_GE:
2325 return GE_EXPR;
2326 case COMPCODE_ORD:
2327 return ORDERED_EXPR;
2328 case COMPCODE_UNORD:
2329 return UNORDERED_EXPR;
2330 case COMPCODE_UNLT:
2331 return UNLT_EXPR;
2332 case COMPCODE_UNEQ:
2333 return UNEQ_EXPR;
2334 case COMPCODE_UNLE:
2335 return UNLE_EXPR;
2336 case COMPCODE_UNGT:
2337 return UNGT_EXPR;
2338 case COMPCODE_LTGT:
2339 return LTGT_EXPR;
2340 case COMPCODE_UNGE:
2341 return UNGE_EXPR;
2342 default:
2343 gcc_unreachable ();
2344 }
2345 }
2346
2347 /* Return a tree for the comparison which is the combination of
2348 doing the AND or OR (depending on CODE) of the two operations LCODE
2349 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2350 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2351 if this makes the transformation invalid. */
2352
2353 tree
2354 combine_comparisons (location_t loc,
2355 enum tree_code code, enum tree_code lcode,
2356 enum tree_code rcode, tree truth_type,
2357 tree ll_arg, tree lr_arg)
2358 {
2359 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2360 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2361 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2362 int compcode;
2363
2364 switch (code)
2365 {
2366 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2367 compcode = lcompcode & rcompcode;
2368 break;
2369
2370 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2371 compcode = lcompcode | rcompcode;
2372 break;
2373
2374 default:
2375 return NULL_TREE;
2376 }
2377
2378 if (!honor_nans)
2379 {
2380 /* Eliminate unordered comparisons, as well as LTGT and ORD
2381 which are not used unless the mode has NaNs. */
2382 compcode &= ~COMPCODE_UNORD;
2383 if (compcode == COMPCODE_LTGT)
2384 compcode = COMPCODE_NE;
2385 else if (compcode == COMPCODE_ORD)
2386 compcode = COMPCODE_TRUE;
2387 }
2388 else if (flag_trapping_math)
2389 {
2390 /* Check that the original operation and the optimized ones will trap
2391 under the same condition. */
2392 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2393 && (lcompcode != COMPCODE_EQ)
2394 && (lcompcode != COMPCODE_ORD);
2395 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2396 && (rcompcode != COMPCODE_EQ)
2397 && (rcompcode != COMPCODE_ORD);
2398 bool trap = (compcode & COMPCODE_UNORD) == 0
2399 && (compcode != COMPCODE_EQ)
2400 && (compcode != COMPCODE_ORD);
2401
2402 /* In a short-circuited boolean expression the LHS might be
2403 such that the RHS, if evaluated, will never trap. For
2404 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2405 if neither x nor y is NaN. (This is a mixed blessing: for
2406 example, the expression above will never trap, hence
2407 optimizing it to x < y would be invalid). */
2408 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2409 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2410 rtrap = false;
2411
2412 /* If the comparison was short-circuited, and only the RHS
2413 trapped, we may now generate a spurious trap. */
2414 if (rtrap && !ltrap
2415 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2416 return NULL_TREE;
2417
2418 /* If we changed the conditions that cause a trap, we lose. */
2419 if ((ltrap || rtrap) != trap)
2420 return NULL_TREE;
2421 }
2422
2423 if (compcode == COMPCODE_TRUE)
2424 return constant_boolean_node (true, truth_type);
2425 else if (compcode == COMPCODE_FALSE)
2426 return constant_boolean_node (false, truth_type);
2427 else
2428 {
2429 enum tree_code tcode;
2430
2431 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2432 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2433 }
2434 }
2435 \f
2436 /* Return nonzero if two operands (typically of the same tree node)
2437 are necessarily equal. If either argument has side-effects this
2438 function returns zero. FLAGS modifies behavior as follows:
2439
2440 If OEP_ONLY_CONST is set, only return nonzero for constants.
2441 This function tests whether the operands are indistinguishable;
2442 it does not test whether they are equal using C's == operation.
2443 The distinction is important for IEEE floating point, because
2444 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2445 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2446
2447 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2448 even though it may hold multiple values during a function.
2449 This is because a GCC tree node guarantees that nothing else is
2450 executed between the evaluation of its "operands" (which may often
2451 be evaluated in arbitrary order). Hence if the operands themselves
2452 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2453 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2454 unset means assuming isochronic (or instantaneous) tree equivalence.
2455 Unless comparing arbitrary expression trees, such as from different
2456 statements, this flag can usually be left unset.
2457
2458 If OEP_PURE_SAME is set, then pure functions with identical arguments
2459 are considered the same. It is used when the caller has other ways
2460 to ensure that global memory is unchanged in between. */
2461
2462 int
2463 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2464 {
2465 /* If either is ERROR_MARK, they aren't equal. */
2466 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2467 || TREE_TYPE (arg0) == error_mark_node
2468 || TREE_TYPE (arg1) == error_mark_node)
2469 return 0;
2470
2471 /* Similar, if either does not have a type (like a released SSA name),
2472 they aren't equal. */
2473 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2474 return 0;
2475
2476 /* Check equality of integer constants before bailing out due to
2477 precision differences. */
2478 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2479 return tree_int_cst_equal (arg0, arg1);
2480
2481 /* If both types don't have the same signedness, then we can't consider
2482 them equal. We must check this before the STRIP_NOPS calls
2483 because they may change the signedness of the arguments. As pointers
2484 strictly don't have a signedness, require either two pointers or
2485 two non-pointers as well. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2487 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2488 return 0;
2489
2490 /* We cannot consider pointers to different address space equal. */
2491 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2492 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2493 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2494 return 0;
2495
2496 /* If both types don't have the same precision, then it is not safe
2497 to strip NOPs. */
2498 if (element_precision (TREE_TYPE (arg0))
2499 != element_precision (TREE_TYPE (arg1)))
2500 return 0;
2501
2502 STRIP_NOPS (arg0);
2503 STRIP_NOPS (arg1);
2504
2505 /* In case both args are comparisons but with different comparison
2506 code, try to swap the comparison operands of one arg to produce
2507 a match and compare that variant. */
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 && COMPARISON_CLASS_P (arg0)
2510 && COMPARISON_CLASS_P (arg1))
2511 {
2512 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2513
2514 if (TREE_CODE (arg0) == swap_code)
2515 return operand_equal_p (TREE_OPERAND (arg0, 0),
2516 TREE_OPERAND (arg1, 1), flags)
2517 && operand_equal_p (TREE_OPERAND (arg0, 1),
2518 TREE_OPERAND (arg1, 0), flags);
2519 }
2520
2521 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2522 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2523 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2524 return 0;
2525
2526 /* This is needed for conversions and for COMPONENT_REF.
2527 Might as well play it safe and always test this. */
2528 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2529 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2530 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2531 return 0;
2532
2533 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2534 We don't care about side effects in that case because the SAVE_EXPR
2535 takes care of that for us. In all other cases, two expressions are
2536 equal if they have no side effects. If we have two identical
2537 expressions with side effects that should be treated the same due
2538 to the only side effects being identical SAVE_EXPR's, that will
2539 be detected in the recursive calls below.
2540 If we are taking an invariant address of two identical objects
2541 they are necessarily equal as well. */
2542 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2543 && (TREE_CODE (arg0) == SAVE_EXPR
2544 || (flags & OEP_CONSTANT_ADDRESS_OF)
2545 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2546 return 1;
2547
2548 /* Next handle constant cases, those for which we can return 1 even
2549 if ONLY_CONST is set. */
2550 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2551 switch (TREE_CODE (arg0))
2552 {
2553 case INTEGER_CST:
2554 return tree_int_cst_equal (arg0, arg1);
2555
2556 case FIXED_CST:
2557 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2558 TREE_FIXED_CST (arg1));
2559
2560 case REAL_CST:
2561 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2562 TREE_REAL_CST (arg1)))
2563 return 1;
2564
2565
2566 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2567 {
2568 /* If we do not distinguish between signed and unsigned zero,
2569 consider them equal. */
2570 if (real_zerop (arg0) && real_zerop (arg1))
2571 return 1;
2572 }
2573 return 0;
2574
2575 case VECTOR_CST:
2576 {
2577 unsigned i;
2578
2579 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2580 return 0;
2581
2582 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2583 {
2584 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2585 VECTOR_CST_ELT (arg1, i), flags))
2586 return 0;
2587 }
2588 return 1;
2589 }
2590
2591 case COMPLEX_CST:
2592 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2593 flags)
2594 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2595 flags));
2596
2597 case STRING_CST:
2598 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2599 && ! memcmp (TREE_STRING_POINTER (arg0),
2600 TREE_STRING_POINTER (arg1),
2601 TREE_STRING_LENGTH (arg0)));
2602
2603 case ADDR_EXPR:
2604 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2605 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2606 ? OEP_CONSTANT_ADDRESS_OF : 0);
2607 default:
2608 break;
2609 }
2610
2611 if (flags & OEP_ONLY_CONST)
2612 return 0;
2613
2614 /* Define macros to test an operand from arg0 and arg1 for equality and a
2615 variant that allows null and views null as being different from any
2616 non-null value. In the latter case, if either is null, the both
2617 must be; otherwise, do the normal comparison. */
2618 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2619 TREE_OPERAND (arg1, N), flags)
2620
2621 #define OP_SAME_WITH_NULL(N) \
2622 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2623 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2624
2625 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2626 {
2627 case tcc_unary:
2628 /* Two conversions are equal only if signedness and modes match. */
2629 switch (TREE_CODE (arg0))
2630 {
2631 CASE_CONVERT:
2632 case FIX_TRUNC_EXPR:
2633 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2634 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635 return 0;
2636 break;
2637 default:
2638 break;
2639 }
2640
2641 return OP_SAME (0);
2642
2643
2644 case tcc_comparison:
2645 case tcc_binary:
2646 if (OP_SAME (0) && OP_SAME (1))
2647 return 1;
2648
2649 /* For commutative ops, allow the other order. */
2650 return (commutative_tree_code (TREE_CODE (arg0))
2651 && operand_equal_p (TREE_OPERAND (arg0, 0),
2652 TREE_OPERAND (arg1, 1), flags)
2653 && operand_equal_p (TREE_OPERAND (arg0, 1),
2654 TREE_OPERAND (arg1, 0), flags));
2655
2656 case tcc_reference:
2657 /* If either of the pointer (or reference) expressions we are
2658 dereferencing contain a side effect, these cannot be equal,
2659 but their addresses can be. */
2660 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2661 && (TREE_SIDE_EFFECTS (arg0)
2662 || TREE_SIDE_EFFECTS (arg1)))
2663 return 0;
2664
2665 switch (TREE_CODE (arg0))
2666 {
2667 case INDIRECT_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 return OP_SAME (0);
2670
2671 case REALPART_EXPR:
2672 case IMAGPART_EXPR:
2673 return OP_SAME (0);
2674
2675 case TARGET_MEM_REF:
2676 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2677 /* Require equal extra operands and then fall through to MEM_REF
2678 handling of the two common operands. */
2679 if (!OP_SAME_WITH_NULL (2)
2680 || !OP_SAME_WITH_NULL (3)
2681 || !OP_SAME_WITH_NULL (4))
2682 return 0;
2683 /* Fallthru. */
2684 case MEM_REF:
2685 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2686 /* Require equal access sizes, and similar pointer types.
2687 We can have incomplete types for array references of
2688 variable-sized arrays from the Fortran frontend
2689 though. Also verify the types are compatible. */
2690 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2691 || (TYPE_SIZE (TREE_TYPE (arg0))
2692 && TYPE_SIZE (TREE_TYPE (arg1))
2693 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2694 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2695 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2696 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2697 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2698 && OP_SAME (0) && OP_SAME (1));
2699
2700 case ARRAY_REF:
2701 case ARRAY_RANGE_REF:
2702 /* Operands 2 and 3 may be null.
2703 Compare the array index by value if it is constant first as we
2704 may have different types but same value here. */
2705 if (!OP_SAME (0))
2706 return 0;
2707 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2708 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2709 TREE_OPERAND (arg1, 1))
2710 || OP_SAME (1))
2711 && OP_SAME_WITH_NULL (2)
2712 && OP_SAME_WITH_NULL (3));
2713
2714 case COMPONENT_REF:
2715 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2716 may be NULL when we're called to compare MEM_EXPRs. */
2717 if (!OP_SAME_WITH_NULL (0))
2718 return 0;
2719 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2720 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2721
2722 case BIT_FIELD_REF:
2723 if (!OP_SAME (0))
2724 return 0;
2725 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2726 return OP_SAME (1) && OP_SAME (2);
2727
2728 default:
2729 return 0;
2730 }
2731
2732 case tcc_expression:
2733 switch (TREE_CODE (arg0))
2734 {
2735 case ADDR_EXPR:
2736 case TRUTH_NOT_EXPR:
2737 return OP_SAME (0);
2738
2739 case TRUTH_ANDIF_EXPR:
2740 case TRUTH_ORIF_EXPR:
2741 return OP_SAME (0) && OP_SAME (1);
2742
2743 case FMA_EXPR:
2744 case WIDEN_MULT_PLUS_EXPR:
2745 case WIDEN_MULT_MINUS_EXPR:
2746 if (!OP_SAME (2))
2747 return 0;
2748 /* The multiplcation operands are commutative. */
2749 /* FALLTHRU */
2750
2751 case TRUTH_AND_EXPR:
2752 case TRUTH_OR_EXPR:
2753 case TRUTH_XOR_EXPR:
2754 if (OP_SAME (0) && OP_SAME (1))
2755 return 1;
2756
2757 /* Otherwise take into account this is a commutative operation. */
2758 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2759 TREE_OPERAND (arg1, 1), flags)
2760 && operand_equal_p (TREE_OPERAND (arg0, 1),
2761 TREE_OPERAND (arg1, 0), flags));
2762
2763 case COND_EXPR:
2764 case VEC_COND_EXPR:
2765 case DOT_PROD_EXPR:
2766 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2767
2768 default:
2769 return 0;
2770 }
2771
2772 case tcc_vl_exp:
2773 switch (TREE_CODE (arg0))
2774 {
2775 case CALL_EXPR:
2776 /* If the CALL_EXPRs call different functions, then they
2777 clearly can not be equal. */
2778 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2779 flags))
2780 return 0;
2781
2782 {
2783 unsigned int cef = call_expr_flags (arg0);
2784 if (flags & OEP_PURE_SAME)
2785 cef &= ECF_CONST | ECF_PURE;
2786 else
2787 cef &= ECF_CONST;
2788 if (!cef)
2789 return 0;
2790 }
2791
2792 /* Now see if all the arguments are the same. */
2793 {
2794 const_call_expr_arg_iterator iter0, iter1;
2795 const_tree a0, a1;
2796 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2797 a1 = first_const_call_expr_arg (arg1, &iter1);
2798 a0 && a1;
2799 a0 = next_const_call_expr_arg (&iter0),
2800 a1 = next_const_call_expr_arg (&iter1))
2801 if (! operand_equal_p (a0, a1, flags))
2802 return 0;
2803
2804 /* If we get here and both argument lists are exhausted
2805 then the CALL_EXPRs are equal. */
2806 return ! (a0 || a1);
2807 }
2808 default:
2809 return 0;
2810 }
2811
2812 case tcc_declaration:
2813 /* Consider __builtin_sqrt equal to sqrt. */
2814 return (TREE_CODE (arg0) == FUNCTION_DECL
2815 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2816 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2817 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2818
2819 default:
2820 return 0;
2821 }
2822
2823 #undef OP_SAME
2824 #undef OP_SAME_WITH_NULL
2825 }
2826 \f
2827 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2828 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2829
2830 When in doubt, return 0. */
2831
2832 static int
2833 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2834 {
2835 int unsignedp1, unsignedpo;
2836 tree primarg0, primarg1, primother;
2837 unsigned int correct_width;
2838
2839 if (operand_equal_p (arg0, arg1, 0))
2840 return 1;
2841
2842 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2843 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2844 return 0;
2845
2846 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2847 and see if the inner values are the same. This removes any
2848 signedness comparison, which doesn't matter here. */
2849 primarg0 = arg0, primarg1 = arg1;
2850 STRIP_NOPS (primarg0);
2851 STRIP_NOPS (primarg1);
2852 if (operand_equal_p (primarg0, primarg1, 0))
2853 return 1;
2854
2855 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2856 actual comparison operand, ARG0.
2857
2858 First throw away any conversions to wider types
2859 already present in the operands. */
2860
2861 primarg1 = get_narrower (arg1, &unsignedp1);
2862 primother = get_narrower (other, &unsignedpo);
2863
2864 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2865 if (unsignedp1 == unsignedpo
2866 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2867 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2868 {
2869 tree type = TREE_TYPE (arg0);
2870
2871 /* Make sure shorter operand is extended the right way
2872 to match the longer operand. */
2873 primarg1 = fold_convert (signed_or_unsigned_type_for
2874 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2875
2876 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2877 return 1;
2878 }
2879
2880 return 0;
2881 }
2882 \f
2883 /* See if ARG is an expression that is either a comparison or is performing
2884 arithmetic on comparisons. The comparisons must only be comparing
2885 two different values, which will be stored in *CVAL1 and *CVAL2; if
2886 they are nonzero it means that some operands have already been found.
2887 No variables may be used anywhere else in the expression except in the
2888 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2889 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2890
2891 If this is true, return 1. Otherwise, return zero. */
2892
2893 static int
2894 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2895 {
2896 enum tree_code code = TREE_CODE (arg);
2897 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2898
2899 /* We can handle some of the tcc_expression cases here. */
2900 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2901 tclass = tcc_unary;
2902 else if (tclass == tcc_expression
2903 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2904 || code == COMPOUND_EXPR))
2905 tclass = tcc_binary;
2906
2907 else if (tclass == tcc_expression && code == SAVE_EXPR
2908 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2909 {
2910 /* If we've already found a CVAL1 or CVAL2, this expression is
2911 two complex to handle. */
2912 if (*cval1 || *cval2)
2913 return 0;
2914
2915 tclass = tcc_unary;
2916 *save_p = 1;
2917 }
2918
2919 switch (tclass)
2920 {
2921 case tcc_unary:
2922 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2923
2924 case tcc_binary:
2925 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2926 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2927 cval1, cval2, save_p));
2928
2929 case tcc_constant:
2930 return 1;
2931
2932 case tcc_expression:
2933 if (code == COND_EXPR)
2934 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2935 cval1, cval2, save_p)
2936 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2937 cval1, cval2, save_p)
2938 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2939 cval1, cval2, save_p));
2940 return 0;
2941
2942 case tcc_comparison:
2943 /* First see if we can handle the first operand, then the second. For
2944 the second operand, we know *CVAL1 can't be zero. It must be that
2945 one side of the comparison is each of the values; test for the
2946 case where this isn't true by failing if the two operands
2947 are the same. */
2948
2949 if (operand_equal_p (TREE_OPERAND (arg, 0),
2950 TREE_OPERAND (arg, 1), 0))
2951 return 0;
2952
2953 if (*cval1 == 0)
2954 *cval1 = TREE_OPERAND (arg, 0);
2955 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2956 ;
2957 else if (*cval2 == 0)
2958 *cval2 = TREE_OPERAND (arg, 0);
2959 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2960 ;
2961 else
2962 return 0;
2963
2964 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2965 ;
2966 else if (*cval2 == 0)
2967 *cval2 = TREE_OPERAND (arg, 1);
2968 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2969 ;
2970 else
2971 return 0;
2972
2973 return 1;
2974
2975 default:
2976 return 0;
2977 }
2978 }
2979 \f
2980 /* ARG is a tree that is known to contain just arithmetic operations and
2981 comparisons. Evaluate the operations in the tree substituting NEW0 for
2982 any occurrence of OLD0 as an operand of a comparison and likewise for
2983 NEW1 and OLD1. */
2984
2985 static tree
2986 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2987 tree old1, tree new1)
2988 {
2989 tree type = TREE_TYPE (arg);
2990 enum tree_code code = TREE_CODE (arg);
2991 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2992
2993 /* We can handle some of the tcc_expression cases here. */
2994 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2995 tclass = tcc_unary;
2996 else if (tclass == tcc_expression
2997 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2998 tclass = tcc_binary;
2999
3000 switch (tclass)
3001 {
3002 case tcc_unary:
3003 return fold_build1_loc (loc, code, type,
3004 eval_subst (loc, TREE_OPERAND (arg, 0),
3005 old0, new0, old1, new1));
3006
3007 case tcc_binary:
3008 return fold_build2_loc (loc, code, type,
3009 eval_subst (loc, TREE_OPERAND (arg, 0),
3010 old0, new0, old1, new1),
3011 eval_subst (loc, TREE_OPERAND (arg, 1),
3012 old0, new0, old1, new1));
3013
3014 case tcc_expression:
3015 switch (code)
3016 {
3017 case SAVE_EXPR:
3018 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3019 old1, new1);
3020
3021 case COMPOUND_EXPR:
3022 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3023 old1, new1);
3024
3025 case COND_EXPR:
3026 return fold_build3_loc (loc, code, type,
3027 eval_subst (loc, TREE_OPERAND (arg, 0),
3028 old0, new0, old1, new1),
3029 eval_subst (loc, TREE_OPERAND (arg, 1),
3030 old0, new0, old1, new1),
3031 eval_subst (loc, TREE_OPERAND (arg, 2),
3032 old0, new0, old1, new1));
3033 default:
3034 break;
3035 }
3036 /* Fall through - ??? */
3037
3038 case tcc_comparison:
3039 {
3040 tree arg0 = TREE_OPERAND (arg, 0);
3041 tree arg1 = TREE_OPERAND (arg, 1);
3042
3043 /* We need to check both for exact equality and tree equality. The
3044 former will be true if the operand has a side-effect. In that
3045 case, we know the operand occurred exactly once. */
3046
3047 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3048 arg0 = new0;
3049 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3050 arg0 = new1;
3051
3052 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3053 arg1 = new0;
3054 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3055 arg1 = new1;
3056
3057 return fold_build2_loc (loc, code, type, arg0, arg1);
3058 }
3059
3060 default:
3061 return arg;
3062 }
3063 }
3064 \f
3065 /* Return a tree for the case when the result of an expression is RESULT
3066 converted to TYPE and OMITTED was previously an operand of the expression
3067 but is now not needed (e.g., we folded OMITTED * 0).
3068
3069 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3070 the conversion of RESULT to TYPE. */
3071
3072 tree
3073 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3074 {
3075 tree t = fold_convert_loc (loc, type, result);
3076
3077 /* If the resulting operand is an empty statement, just return the omitted
3078 statement casted to void. */
3079 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3080 return build1_loc (loc, NOP_EXPR, void_type_node,
3081 fold_ignored_result (omitted));
3082
3083 if (TREE_SIDE_EFFECTS (omitted))
3084 return build2_loc (loc, COMPOUND_EXPR, type,
3085 fold_ignored_result (omitted), t);
3086
3087 return non_lvalue_loc (loc, t);
3088 }
3089
3090 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3091
3092 static tree
3093 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3094 tree omitted)
3095 {
3096 tree t = fold_convert_loc (loc, type, result);
3097
3098 /* If the resulting operand is an empty statement, just return the omitted
3099 statement casted to void. */
3100 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3101 return build1_loc (loc, NOP_EXPR, void_type_node,
3102 fold_ignored_result (omitted));
3103
3104 if (TREE_SIDE_EFFECTS (omitted))
3105 return build2_loc (loc, COMPOUND_EXPR, type,
3106 fold_ignored_result (omitted), t);
3107
3108 return pedantic_non_lvalue_loc (loc, t);
3109 }
3110
3111 /* Return a tree for the case when the result of an expression is RESULT
3112 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3113 of the expression but are now not needed.
3114
3115 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3116 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3117 evaluated before OMITTED2. Otherwise, if neither has side effects,
3118 just do the conversion of RESULT to TYPE. */
3119
3120 tree
3121 omit_two_operands_loc (location_t loc, tree type, tree result,
3122 tree omitted1, tree omitted2)
3123 {
3124 tree t = fold_convert_loc (loc, type, result);
3125
3126 if (TREE_SIDE_EFFECTS (omitted2))
3127 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3128 if (TREE_SIDE_EFFECTS (omitted1))
3129 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3130
3131 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3132 }
3133
3134 \f
3135 /* Return a simplified tree node for the truth-negation of ARG. This
3136 never alters ARG itself. We assume that ARG is an operation that
3137 returns a truth value (0 or 1).
3138
3139 FIXME: one would think we would fold the result, but it causes
3140 problems with the dominator optimizer. */
3141
3142 static tree
3143 fold_truth_not_expr (location_t loc, tree arg)
3144 {
3145 tree type = TREE_TYPE (arg);
3146 enum tree_code code = TREE_CODE (arg);
3147 location_t loc1, loc2;
3148
3149 /* If this is a comparison, we can simply invert it, except for
3150 floating-point non-equality comparisons, in which case we just
3151 enclose a TRUTH_NOT_EXPR around what we have. */
3152
3153 if (TREE_CODE_CLASS (code) == tcc_comparison)
3154 {
3155 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3156 if (FLOAT_TYPE_P (op_type)
3157 && flag_trapping_math
3158 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3159 && code != NE_EXPR && code != EQ_EXPR)
3160 return NULL_TREE;
3161
3162 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3163 if (code == ERROR_MARK)
3164 return NULL_TREE;
3165
3166 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3167 TREE_OPERAND (arg, 1));
3168 }
3169
3170 switch (code)
3171 {
3172 case INTEGER_CST:
3173 return constant_boolean_node (integer_zerop (arg), type);
3174
3175 case TRUTH_AND_EXPR:
3176 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3177 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3178 return build2_loc (loc, TRUTH_OR_EXPR, type,
3179 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3180 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3181
3182 case TRUTH_OR_EXPR:
3183 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3184 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3185 return build2_loc (loc, TRUTH_AND_EXPR, type,
3186 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3187 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3188
3189 case TRUTH_XOR_EXPR:
3190 /* Here we can invert either operand. We invert the first operand
3191 unless the second operand is a TRUTH_NOT_EXPR in which case our
3192 result is the XOR of the first operand with the inside of the
3193 negation of the second operand. */
3194
3195 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3196 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3197 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3198 else
3199 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3200 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3201 TREE_OPERAND (arg, 1));
3202
3203 case TRUTH_ANDIF_EXPR:
3204 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3205 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3206 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3208 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3209
3210 case TRUTH_ORIF_EXPR:
3211 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3212 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3213 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3214 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3215 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3216
3217 case TRUTH_NOT_EXPR:
3218 return TREE_OPERAND (arg, 0);
3219
3220 case COND_EXPR:
3221 {
3222 tree arg1 = TREE_OPERAND (arg, 1);
3223 tree arg2 = TREE_OPERAND (arg, 2);
3224
3225 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3226 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3227
3228 /* A COND_EXPR may have a throw as one operand, which
3229 then has void type. Just leave void operands
3230 as they are. */
3231 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3232 VOID_TYPE_P (TREE_TYPE (arg1))
3233 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3234 VOID_TYPE_P (TREE_TYPE (arg2))
3235 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3236 }
3237
3238 case COMPOUND_EXPR:
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3240 return build2_loc (loc, COMPOUND_EXPR, type,
3241 TREE_OPERAND (arg, 0),
3242 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3243
3244 case NON_LVALUE_EXPR:
3245 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3246 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3247
3248 CASE_CONVERT:
3249 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3250 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3251
3252 /* ... fall through ... */
3253
3254 case FLOAT_EXPR:
3255 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3256 return build1_loc (loc, TREE_CODE (arg), type,
3257 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3258
3259 case BIT_AND_EXPR:
3260 if (!integer_onep (TREE_OPERAND (arg, 1)))
3261 return NULL_TREE;
3262 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3263
3264 case SAVE_EXPR:
3265 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3266
3267 case CLEANUP_POINT_EXPR:
3268 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3269 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3270 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3271
3272 default:
3273 return NULL_TREE;
3274 }
3275 }
3276
3277 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3278 assume that ARG is an operation that returns a truth value (0 or 1
3279 for scalars, 0 or -1 for vectors). Return the folded expression if
3280 folding is successful. Otherwise, return NULL_TREE. */
3281
3282 static tree
3283 fold_invert_truthvalue (location_t loc, tree arg)
3284 {
3285 tree type = TREE_TYPE (arg);
3286 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3287 ? BIT_NOT_EXPR
3288 : TRUTH_NOT_EXPR,
3289 type, arg);
3290 }
3291
3292 /* Return a simplified tree node for the truth-negation of ARG. This
3293 never alters ARG itself. We assume that ARG is an operation that
3294 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3295
3296 tree
3297 invert_truthvalue_loc (location_t loc, tree arg)
3298 {
3299 if (TREE_CODE (arg) == ERROR_MARK)
3300 return arg;
3301
3302 tree type = TREE_TYPE (arg);
3303 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3304 ? BIT_NOT_EXPR
3305 : TRUTH_NOT_EXPR,
3306 type, arg);
3307 }
3308
3309 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3310 operands are another bit-wise operation with a common input. If so,
3311 distribute the bit operations to save an operation and possibly two if
3312 constants are involved. For example, convert
3313 (A | B) & (A | C) into A | (B & C)
3314 Further simplification will occur if B and C are constants.
3315
3316 If this optimization cannot be done, 0 will be returned. */
3317
3318 static tree
3319 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3320 tree arg0, tree arg1)
3321 {
3322 tree common;
3323 tree left, right;
3324
3325 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3326 || TREE_CODE (arg0) == code
3327 || (TREE_CODE (arg0) != BIT_AND_EXPR
3328 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3329 return 0;
3330
3331 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3332 {
3333 common = TREE_OPERAND (arg0, 0);
3334 left = TREE_OPERAND (arg0, 1);
3335 right = TREE_OPERAND (arg1, 1);
3336 }
3337 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3338 {
3339 common = TREE_OPERAND (arg0, 0);
3340 left = TREE_OPERAND (arg0, 1);
3341 right = TREE_OPERAND (arg1, 0);
3342 }
3343 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3344 {
3345 common = TREE_OPERAND (arg0, 1);
3346 left = TREE_OPERAND (arg0, 0);
3347 right = TREE_OPERAND (arg1, 1);
3348 }
3349 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3350 {
3351 common = TREE_OPERAND (arg0, 1);
3352 left = TREE_OPERAND (arg0, 0);
3353 right = TREE_OPERAND (arg1, 0);
3354 }
3355 else
3356 return 0;
3357
3358 common = fold_convert_loc (loc, type, common);
3359 left = fold_convert_loc (loc, type, left);
3360 right = fold_convert_loc (loc, type, right);
3361 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3362 fold_build2_loc (loc, code, type, left, right));
3363 }
3364
3365 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3366 with code CODE. This optimization is unsafe. */
3367 static tree
3368 distribute_real_division (location_t loc, enum tree_code code, tree type,
3369 tree arg0, tree arg1)
3370 {
3371 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3372 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3373
3374 /* (A / C) +- (B / C) -> (A +- B) / C. */
3375 if (mul0 == mul1
3376 && operand_equal_p (TREE_OPERAND (arg0, 1),
3377 TREE_OPERAND (arg1, 1), 0))
3378 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3379 fold_build2_loc (loc, code, type,
3380 TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 0)),
3382 TREE_OPERAND (arg0, 1));
3383
3384 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3385 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3386 TREE_OPERAND (arg1, 0), 0)
3387 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3388 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3389 {
3390 REAL_VALUE_TYPE r0, r1;
3391 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3392 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3393 if (!mul0)
3394 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3395 if (!mul1)
3396 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3397 real_arithmetic (&r0, code, &r0, &r1);
3398 return fold_build2_loc (loc, MULT_EXPR, type,
3399 TREE_OPERAND (arg0, 0),
3400 build_real (type, r0));
3401 }
3402
3403 return NULL_TREE;
3404 }
3405 \f
3406 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3407 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3408
3409 static tree
3410 make_bit_field_ref (location_t loc, tree inner, tree type,
3411 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3412 {
3413 tree result, bftype;
3414
3415 if (bitpos == 0)
3416 {
3417 tree size = TYPE_SIZE (TREE_TYPE (inner));
3418 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3419 || POINTER_TYPE_P (TREE_TYPE (inner)))
3420 && host_integerp (size, 0)
3421 && tree_low_cst (size, 0) == bitsize)
3422 return fold_convert_loc (loc, type, inner);
3423 }
3424
3425 bftype = type;
3426 if (TYPE_PRECISION (bftype) != bitsize
3427 || TYPE_UNSIGNED (bftype) == !unsignedp)
3428 bftype = build_nonstandard_integer_type (bitsize, 0);
3429
3430 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3431 size_int (bitsize), bitsize_int (bitpos));
3432
3433 if (bftype != type)
3434 result = fold_convert_loc (loc, type, result);
3435
3436 return result;
3437 }
3438
3439 /* Optimize a bit-field compare.
3440
3441 There are two cases: First is a compare against a constant and the
3442 second is a comparison of two items where the fields are at the same
3443 bit position relative to the start of a chunk (byte, halfword, word)
3444 large enough to contain it. In these cases we can avoid the shift
3445 implicit in bitfield extractions.
3446
3447 For constants, we emit a compare of the shifted constant with the
3448 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3449 compared. For two fields at the same position, we do the ANDs with the
3450 similar mask and compare the result of the ANDs.
3451
3452 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3453 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3454 are the left and right operands of the comparison, respectively.
3455
3456 If the optimization described above can be done, we return the resulting
3457 tree. Otherwise we return zero. */
3458
3459 static tree
3460 optimize_bit_field_compare (location_t loc, enum tree_code code,
3461 tree compare_type, tree lhs, tree rhs)
3462 {
3463 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3464 tree type = TREE_TYPE (lhs);
3465 tree signed_type, unsigned_type;
3466 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3467 enum machine_mode lmode, rmode, nmode;
3468 int lunsignedp, runsignedp;
3469 int lvolatilep = 0, rvolatilep = 0;
3470 tree linner, rinner = NULL_TREE;
3471 tree mask;
3472 tree offset;
3473
3474 /* In the strict volatile bitfields case, doing code changes here may prevent
3475 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3476 if (flag_strict_volatile_bitfields > 0)
3477 return 0;
3478
3479 /* Get all the information about the extractions being done. If the bit size
3480 if the same as the size of the underlying object, we aren't doing an
3481 extraction at all and so can do nothing. We also don't want to
3482 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3483 then will no longer be able to replace it. */
3484 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3485 &lunsignedp, &lvolatilep, false);
3486 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3487 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3488 return 0;
3489
3490 if (!const_p)
3491 {
3492 /* If this is not a constant, we can only do something if bit positions,
3493 sizes, and signedness are the same. */
3494 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3495 &runsignedp, &rvolatilep, false);
3496
3497 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3498 || lunsignedp != runsignedp || offset != 0
3499 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3500 return 0;
3501 }
3502
3503 /* See if we can find a mode to refer to this field. We should be able to,
3504 but fail if we can't. */
3505 if (lvolatilep
3506 && GET_MODE_BITSIZE (lmode) > 0
3507 && flag_strict_volatile_bitfields > 0)
3508 nmode = lmode;
3509 else
3510 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3511 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3512 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3513 TYPE_ALIGN (TREE_TYPE (rinner))),
3514 word_mode, lvolatilep || rvolatilep);
3515 if (nmode == VOIDmode)
3516 return 0;
3517
3518 /* Set signed and unsigned types of the precision of this mode for the
3519 shifts below. */
3520 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3521 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3522
3523 /* Compute the bit position and size for the new reference and our offset
3524 within it. If the new reference is the same size as the original, we
3525 won't optimize anything, so return zero. */
3526 nbitsize = GET_MODE_BITSIZE (nmode);
3527 nbitpos = lbitpos & ~ (nbitsize - 1);
3528 lbitpos -= nbitpos;
3529 if (nbitsize == lbitsize)
3530 return 0;
3531
3532 if (BYTES_BIG_ENDIAN)
3533 lbitpos = nbitsize - lbitsize - lbitpos;
3534
3535 /* Make the mask to be used against the extracted field. */
3536 mask = build_int_cst_type (unsigned_type, -1);
3537 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3538 mask = const_binop (RSHIFT_EXPR, mask,
3539 size_int (nbitsize - lbitsize - lbitpos));
3540
3541 if (! const_p)
3542 /* If not comparing with constant, just rework the comparison
3543 and return. */
3544 return fold_build2_loc (loc, code, compare_type,
3545 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3546 make_bit_field_ref (loc, linner,
3547 unsigned_type,
3548 nbitsize, nbitpos,
3549 1),
3550 mask),
3551 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3552 make_bit_field_ref (loc, rinner,
3553 unsigned_type,
3554 nbitsize, nbitpos,
3555 1),
3556 mask));
3557
3558 /* Otherwise, we are handling the constant case. See if the constant is too
3559 big for the field. Warn and return a tree of for 0 (false) if so. We do
3560 this not only for its own sake, but to avoid having to test for this
3561 error case below. If we didn't, we might generate wrong code.
3562
3563 For unsigned fields, the constant shifted right by the field length should
3564 be all zero. For signed fields, the high-order bits should agree with
3565 the sign bit. */
3566
3567 if (lunsignedp)
3568 {
3569 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3570 fold_convert_loc (loc,
3571 unsigned_type, rhs),
3572 size_int (lbitsize))))
3573 {
3574 warning (0, "comparison is always %d due to width of bit-field",
3575 code == NE_EXPR);
3576 return constant_boolean_node (code == NE_EXPR, compare_type);
3577 }
3578 }
3579 else
3580 {
3581 tree tem = const_binop (RSHIFT_EXPR,
3582 fold_convert_loc (loc, signed_type, rhs),
3583 size_int (lbitsize - 1));
3584 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3585 {
3586 warning (0, "comparison is always %d due to width of bit-field",
3587 code == NE_EXPR);
3588 return constant_boolean_node (code == NE_EXPR, compare_type);
3589 }
3590 }
3591
3592 /* Single-bit compares should always be against zero. */
3593 if (lbitsize == 1 && ! integer_zerop (rhs))
3594 {
3595 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3596 rhs = build_int_cst (type, 0);
3597 }
3598
3599 /* Make a new bitfield reference, shift the constant over the
3600 appropriate number of bits and mask it with the computed mask
3601 (in case this was a signed field). If we changed it, make a new one. */
3602 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3603 if (lvolatilep)
3604 {
3605 TREE_SIDE_EFFECTS (lhs) = 1;
3606 TREE_THIS_VOLATILE (lhs) = 1;
3607 }
3608
3609 rhs = const_binop (BIT_AND_EXPR,
3610 const_binop (LSHIFT_EXPR,
3611 fold_convert_loc (loc, unsigned_type, rhs),
3612 size_int (lbitpos)),
3613 mask);
3614
3615 lhs = build2_loc (loc, code, compare_type,
3616 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3617 return lhs;
3618 }
3619 \f
3620 /* Subroutine for fold_truth_andor_1: decode a field reference.
3621
3622 If EXP is a comparison reference, we return the innermost reference.
3623
3624 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3625 set to the starting bit number.
3626
3627 If the innermost field can be completely contained in a mode-sized
3628 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3629
3630 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3631 otherwise it is not changed.
3632
3633 *PUNSIGNEDP is set to the signedness of the field.
3634
3635 *PMASK is set to the mask used. This is either contained in a
3636 BIT_AND_EXPR or derived from the width of the field.
3637
3638 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3639
3640 Return 0 if this is not a component reference or is one that we can't
3641 do anything with. */
3642
3643 static tree
3644 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3645 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3646 int *punsignedp, int *pvolatilep,
3647 tree *pmask, tree *pand_mask)
3648 {
3649 tree outer_type = 0;
3650 tree and_mask = 0;
3651 tree mask, inner, offset;
3652 tree unsigned_type;
3653 unsigned int precision;
3654
3655 /* All the optimizations using this function assume integer fields.
3656 There are problems with FP fields since the type_for_size call
3657 below can fail for, e.g., XFmode. */
3658 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3659 return 0;
3660
3661 /* We are interested in the bare arrangement of bits, so strip everything
3662 that doesn't affect the machine mode. However, record the type of the
3663 outermost expression if it may matter below. */
3664 if (CONVERT_EXPR_P (exp)
3665 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3666 outer_type = TREE_TYPE (exp);
3667 STRIP_NOPS (exp);
3668
3669 if (TREE_CODE (exp) == BIT_AND_EXPR)
3670 {
3671 and_mask = TREE_OPERAND (exp, 1);
3672 exp = TREE_OPERAND (exp, 0);
3673 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3674 if (TREE_CODE (and_mask) != INTEGER_CST)
3675 return 0;
3676 }
3677
3678 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3679 punsignedp, pvolatilep, false);
3680 if ((inner == exp && and_mask == 0)
3681 || *pbitsize < 0 || offset != 0
3682 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3683 return 0;
3684
3685 /* If the number of bits in the reference is the same as the bitsize of
3686 the outer type, then the outer type gives the signedness. Otherwise
3687 (in case of a small bitfield) the signedness is unchanged. */
3688 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3689 *punsignedp = TYPE_UNSIGNED (outer_type);
3690
3691 /* Compute the mask to access the bitfield. */
3692 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3693 precision = TYPE_PRECISION (unsigned_type);
3694
3695 mask = build_int_cst_type (unsigned_type, -1);
3696
3697 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3698 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3699
3700 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3701 if (and_mask != 0)
3702 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3703 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3704
3705 *pmask = mask;
3706 *pand_mask = and_mask;
3707 return inner;
3708 }
3709
3710 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3711 bit positions. */
3712
3713 static int
3714 all_ones_mask_p (const_tree mask, int size)
3715 {
3716 tree type = TREE_TYPE (mask);
3717 unsigned int precision = TYPE_PRECISION (type);
3718 tree tmask;
3719
3720 tmask = build_int_cst_type (signed_type_for (type), -1);
3721
3722 return
3723 tree_int_cst_equal (mask,
3724 const_binop (RSHIFT_EXPR,
3725 const_binop (LSHIFT_EXPR, tmask,
3726 size_int (precision - size)),
3727 size_int (precision - size)));
3728 }
3729
3730 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3731 represents the sign bit of EXP's type. If EXP represents a sign
3732 or zero extension, also test VAL against the unextended type.
3733 The return value is the (sub)expression whose sign bit is VAL,
3734 or NULL_TREE otherwise. */
3735
3736 static tree
3737 sign_bit_p (tree exp, const_tree val)
3738 {
3739 unsigned HOST_WIDE_INT mask_lo, lo;
3740 HOST_WIDE_INT mask_hi, hi;
3741 int width;
3742 tree t;
3743
3744 /* Tree EXP must have an integral type. */
3745 t = TREE_TYPE (exp);
3746 if (! INTEGRAL_TYPE_P (t))
3747 return NULL_TREE;
3748
3749 /* Tree VAL must be an integer constant. */
3750 if (TREE_CODE (val) != INTEGER_CST
3751 || TREE_OVERFLOW (val))
3752 return NULL_TREE;
3753
3754 width = TYPE_PRECISION (t);
3755 if (width > HOST_BITS_PER_WIDE_INT)
3756 {
3757 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3758 lo = 0;
3759
3760 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3761 mask_lo = -1;
3762 }
3763 else
3764 {
3765 hi = 0;
3766 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3767
3768 mask_hi = 0;
3769 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3770 }
3771
3772 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3773 treat VAL as if it were unsigned. */
3774 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3775 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3776 return exp;
3777
3778 /* Handle extension from a narrower type. */
3779 if (TREE_CODE (exp) == NOP_EXPR
3780 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3781 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3782
3783 return NULL_TREE;
3784 }
3785
3786 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3787 to be evaluated unconditionally. */
3788
3789 static int
3790 simple_operand_p (const_tree exp)
3791 {
3792 /* Strip any conversions that don't change the machine mode. */
3793 STRIP_NOPS (exp);
3794
3795 return (CONSTANT_CLASS_P (exp)
3796 || TREE_CODE (exp) == SSA_NAME
3797 || (DECL_P (exp)
3798 && ! TREE_ADDRESSABLE (exp)
3799 && ! TREE_THIS_VOLATILE (exp)
3800 && ! DECL_NONLOCAL (exp)
3801 /* Don't regard global variables as simple. They may be
3802 allocated in ways unknown to the compiler (shared memory,
3803 #pragma weak, etc). */
3804 && ! TREE_PUBLIC (exp)
3805 && ! DECL_EXTERNAL (exp)
3806 /* Weakrefs are not safe to be read, since they can be NULL.
3807 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3808 have DECL_WEAK flag set. */
3809 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3810 /* Loading a static variable is unduly expensive, but global
3811 registers aren't expensive. */
3812 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3813 }
3814
3815 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3816 to be evaluated unconditionally.
3817 I addition to simple_operand_p, we assume that comparisons, conversions,
3818 and logic-not operations are simple, if their operands are simple, too. */
3819
3820 static bool
3821 simple_operand_p_2 (tree exp)
3822 {
3823 enum tree_code code;
3824
3825 if (TREE_SIDE_EFFECTS (exp)
3826 || tree_could_trap_p (exp))
3827 return false;
3828
3829 while (CONVERT_EXPR_P (exp))
3830 exp = TREE_OPERAND (exp, 0);
3831
3832 code = TREE_CODE (exp);
3833
3834 if (TREE_CODE_CLASS (code) == tcc_comparison)
3835 return (simple_operand_p (TREE_OPERAND (exp, 0))
3836 && simple_operand_p (TREE_OPERAND (exp, 1)));
3837
3838 if (code == TRUTH_NOT_EXPR)
3839 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3840
3841 return simple_operand_p (exp);
3842 }
3843
3844 \f
3845 /* The following functions are subroutines to fold_range_test and allow it to
3846 try to change a logical combination of comparisons into a range test.
3847
3848 For example, both
3849 X == 2 || X == 3 || X == 4 || X == 5
3850 and
3851 X >= 2 && X <= 5
3852 are converted to
3853 (unsigned) (X - 2) <= 3
3854
3855 We describe each set of comparisons as being either inside or outside
3856 a range, using a variable named like IN_P, and then describe the
3857 range with a lower and upper bound. If one of the bounds is omitted,
3858 it represents either the highest or lowest value of the type.
3859
3860 In the comments below, we represent a range by two numbers in brackets
3861 preceded by a "+" to designate being inside that range, or a "-" to
3862 designate being outside that range, so the condition can be inverted by
3863 flipping the prefix. An omitted bound is represented by a "-". For
3864 example, "- [-, 10]" means being outside the range starting at the lowest
3865 possible value and ending at 10, in other words, being greater than 10.
3866 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3867 always false.
3868
3869 We set up things so that the missing bounds are handled in a consistent
3870 manner so neither a missing bound nor "true" and "false" need to be
3871 handled using a special case. */
3872
3873 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3874 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3875 and UPPER1_P are nonzero if the respective argument is an upper bound
3876 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3877 must be specified for a comparison. ARG1 will be converted to ARG0's
3878 type if both are specified. */
3879
3880 static tree
3881 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3882 tree arg1, int upper1_p)
3883 {
3884 tree tem;
3885 int result;
3886 int sgn0, sgn1;
3887
3888 /* If neither arg represents infinity, do the normal operation.
3889 Else, if not a comparison, return infinity. Else handle the special
3890 comparison rules. Note that most of the cases below won't occur, but
3891 are handled for consistency. */
3892
3893 if (arg0 != 0 && arg1 != 0)
3894 {
3895 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3896 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3897 STRIP_NOPS (tem);
3898 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3899 }
3900
3901 if (TREE_CODE_CLASS (code) != tcc_comparison)
3902 return 0;
3903
3904 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3905 for neither. In real maths, we cannot assume open ended ranges are
3906 the same. But, this is computer arithmetic, where numbers are finite.
3907 We can therefore make the transformation of any unbounded range with
3908 the value Z, Z being greater than any representable number. This permits
3909 us to treat unbounded ranges as equal. */
3910 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3911 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3912 switch (code)
3913 {
3914 case EQ_EXPR:
3915 result = sgn0 == sgn1;
3916 break;
3917 case NE_EXPR:
3918 result = sgn0 != sgn1;
3919 break;
3920 case LT_EXPR:
3921 result = sgn0 < sgn1;
3922 break;
3923 case LE_EXPR:
3924 result = sgn0 <= sgn1;
3925 break;
3926 case GT_EXPR:
3927 result = sgn0 > sgn1;
3928 break;
3929 case GE_EXPR:
3930 result = sgn0 >= sgn1;
3931 break;
3932 default:
3933 gcc_unreachable ();
3934 }
3935
3936 return constant_boolean_node (result, type);
3937 }
3938 \f
3939 /* Helper routine for make_range. Perform one step for it, return
3940 new expression if the loop should continue or NULL_TREE if it should
3941 stop. */
3942
3943 tree
3944 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3945 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3946 bool *strict_overflow_p)
3947 {
3948 tree arg0_type = TREE_TYPE (arg0);
3949 tree n_low, n_high, low = *p_low, high = *p_high;
3950 int in_p = *p_in_p, n_in_p;
3951
3952 switch (code)
3953 {
3954 case TRUTH_NOT_EXPR:
3955 /* We can only do something if the range is testing for zero. */
3956 if (low == NULL_TREE || high == NULL_TREE
3957 || ! integer_zerop (low) || ! integer_zerop (high))
3958 return NULL_TREE;
3959 *p_in_p = ! in_p;
3960 return arg0;
3961
3962 case EQ_EXPR: case NE_EXPR:
3963 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3964 /* We can only do something if the range is testing for zero
3965 and if the second operand is an integer constant. Note that
3966 saying something is "in" the range we make is done by
3967 complementing IN_P since it will set in the initial case of
3968 being not equal to zero; "out" is leaving it alone. */
3969 if (low == NULL_TREE || high == NULL_TREE
3970 || ! integer_zerop (low) || ! integer_zerop (high)
3971 || TREE_CODE (arg1) != INTEGER_CST)
3972 return NULL_TREE;
3973
3974 switch (code)
3975 {
3976 case NE_EXPR: /* - [c, c] */
3977 low = high = arg1;
3978 break;
3979 case EQ_EXPR: /* + [c, c] */
3980 in_p = ! in_p, low = high = arg1;
3981 break;
3982 case GT_EXPR: /* - [-, c] */
3983 low = 0, high = arg1;
3984 break;
3985 case GE_EXPR: /* + [c, -] */
3986 in_p = ! in_p, low = arg1, high = 0;
3987 break;
3988 case LT_EXPR: /* - [c, -] */
3989 low = arg1, high = 0;
3990 break;
3991 case LE_EXPR: /* + [-, c] */
3992 in_p = ! in_p, low = 0, high = arg1;
3993 break;
3994 default:
3995 gcc_unreachable ();
3996 }
3997
3998 /* If this is an unsigned comparison, we also know that EXP is
3999 greater than or equal to zero. We base the range tests we make
4000 on that fact, so we record it here so we can parse existing
4001 range tests. We test arg0_type since often the return type
4002 of, e.g. EQ_EXPR, is boolean. */
4003 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4004 {
4005 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4006 in_p, low, high, 1,
4007 build_int_cst (arg0_type, 0),
4008 NULL_TREE))
4009 return NULL_TREE;
4010
4011 in_p = n_in_p, low = n_low, high = n_high;
4012
4013 /* If the high bound is missing, but we have a nonzero low
4014 bound, reverse the range so it goes from zero to the low bound
4015 minus 1. */
4016 if (high == 0 && low && ! integer_zerop (low))
4017 {
4018 in_p = ! in_p;
4019 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4020 integer_one_node, 0);
4021 low = build_int_cst (arg0_type, 0);
4022 }
4023 }
4024
4025 *p_low = low;
4026 *p_high = high;
4027 *p_in_p = in_p;
4028 return arg0;
4029
4030 case NEGATE_EXPR:
4031 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4032 low and high are non-NULL, then normalize will DTRT. */
4033 if (!TYPE_UNSIGNED (arg0_type)
4034 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4035 {
4036 if (low == NULL_TREE)
4037 low = TYPE_MIN_VALUE (arg0_type);
4038 if (high == NULL_TREE)
4039 high = TYPE_MAX_VALUE (arg0_type);
4040 }
4041
4042 /* (-x) IN [a,b] -> x in [-b, -a] */
4043 n_low = range_binop (MINUS_EXPR, exp_type,
4044 build_int_cst (exp_type, 0),
4045 0, high, 1);
4046 n_high = range_binop (MINUS_EXPR, exp_type,
4047 build_int_cst (exp_type, 0),
4048 0, low, 0);
4049 if (n_high != 0 && TREE_OVERFLOW (n_high))
4050 return NULL_TREE;
4051 goto normalize;
4052
4053 case BIT_NOT_EXPR:
4054 /* ~ X -> -X - 1 */
4055 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4056 build_int_cst (exp_type, 1));
4057
4058 case PLUS_EXPR:
4059 case MINUS_EXPR:
4060 if (TREE_CODE (arg1) != INTEGER_CST)
4061 return NULL_TREE;
4062
4063 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4064 move a constant to the other side. */
4065 if (!TYPE_UNSIGNED (arg0_type)
4066 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4067 return NULL_TREE;
4068
4069 /* If EXP is signed, any overflow in the computation is undefined,
4070 so we don't worry about it so long as our computations on
4071 the bounds don't overflow. For unsigned, overflow is defined
4072 and this is exactly the right thing. */
4073 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4074 arg0_type, low, 0, arg1, 0);
4075 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4076 arg0_type, high, 1, arg1, 0);
4077 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4078 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4079 return NULL_TREE;
4080
4081 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4082 *strict_overflow_p = true;
4083
4084 normalize:
4085 /* Check for an unsigned range which has wrapped around the maximum
4086 value thus making n_high < n_low, and normalize it. */
4087 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4088 {
4089 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4090 integer_one_node, 0);
4091 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4092 integer_one_node, 0);
4093
4094 /* If the range is of the form +/- [ x+1, x ], we won't
4095 be able to normalize it. But then, it represents the
4096 whole range or the empty set, so make it
4097 +/- [ -, - ]. */
4098 if (tree_int_cst_equal (n_low, low)
4099 && tree_int_cst_equal (n_high, high))
4100 low = high = 0;
4101 else
4102 in_p = ! in_p;
4103 }
4104 else
4105 low = n_low, high = n_high;
4106
4107 *p_low = low;
4108 *p_high = high;
4109 *p_in_p = in_p;
4110 return arg0;
4111
4112 CASE_CONVERT:
4113 case NON_LVALUE_EXPR:
4114 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4115 return NULL_TREE;
4116
4117 if (! INTEGRAL_TYPE_P (arg0_type)
4118 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4119 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4120 return NULL_TREE;
4121
4122 n_low = low, n_high = high;
4123
4124 if (n_low != 0)
4125 n_low = fold_convert_loc (loc, arg0_type, n_low);
4126
4127 if (n_high != 0)
4128 n_high = fold_convert_loc (loc, arg0_type, n_high);
4129
4130 /* If we're converting arg0 from an unsigned type, to exp,
4131 a signed type, we will be doing the comparison as unsigned.
4132 The tests above have already verified that LOW and HIGH
4133 are both positive.
4134
4135 So we have to ensure that we will handle large unsigned
4136 values the same way that the current signed bounds treat
4137 negative values. */
4138
4139 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4140 {
4141 tree high_positive;
4142 tree equiv_type;
4143 /* For fixed-point modes, we need to pass the saturating flag
4144 as the 2nd parameter. */
4145 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4146 equiv_type
4147 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4148 TYPE_SATURATING (arg0_type));
4149 else
4150 equiv_type
4151 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4152
4153 /* A range without an upper bound is, naturally, unbounded.
4154 Since convert would have cropped a very large value, use
4155 the max value for the destination type. */
4156 high_positive
4157 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4158 : TYPE_MAX_VALUE (arg0_type);
4159
4160 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4161 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4162 fold_convert_loc (loc, arg0_type,
4163 high_positive),
4164 build_int_cst (arg0_type, 1));
4165
4166 /* If the low bound is specified, "and" the range with the
4167 range for which the original unsigned value will be
4168 positive. */
4169 if (low != 0)
4170 {
4171 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4172 1, fold_convert_loc (loc, arg0_type,
4173 integer_zero_node),
4174 high_positive))
4175 return NULL_TREE;
4176
4177 in_p = (n_in_p == in_p);
4178 }
4179 else
4180 {
4181 /* Otherwise, "or" the range with the range of the input
4182 that will be interpreted as negative. */
4183 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4184 1, fold_convert_loc (loc, arg0_type,
4185 integer_zero_node),
4186 high_positive))
4187 return NULL_TREE;
4188
4189 in_p = (in_p != n_in_p);
4190 }
4191 }
4192
4193 *p_low = n_low;
4194 *p_high = n_high;
4195 *p_in_p = in_p;
4196 return arg0;
4197
4198 default:
4199 return NULL_TREE;
4200 }
4201 }
4202
4203 /* Given EXP, a logical expression, set the range it is testing into
4204 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4205 actually being tested. *PLOW and *PHIGH will be made of the same
4206 type as the returned expression. If EXP is not a comparison, we
4207 will most likely not be returning a useful value and range. Set
4208 *STRICT_OVERFLOW_P to true if the return value is only valid
4209 because signed overflow is undefined; otherwise, do not change
4210 *STRICT_OVERFLOW_P. */
4211
4212 tree
4213 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4214 bool *strict_overflow_p)
4215 {
4216 enum tree_code code;
4217 tree arg0, arg1 = NULL_TREE;
4218 tree exp_type, nexp;
4219 int in_p;
4220 tree low, high;
4221 location_t loc = EXPR_LOCATION (exp);
4222
4223 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4224 and see if we can refine the range. Some of the cases below may not
4225 happen, but it doesn't seem worth worrying about this. We "continue"
4226 the outer loop when we've changed something; otherwise we "break"
4227 the switch, which will "break" the while. */
4228
4229 in_p = 0;
4230 low = high = build_int_cst (TREE_TYPE (exp), 0);
4231
4232 while (1)
4233 {
4234 code = TREE_CODE (exp);
4235 exp_type = TREE_TYPE (exp);
4236 arg0 = NULL_TREE;
4237
4238 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4239 {
4240 if (TREE_OPERAND_LENGTH (exp) > 0)
4241 arg0 = TREE_OPERAND (exp, 0);
4242 if (TREE_CODE_CLASS (code) == tcc_binary
4243 || TREE_CODE_CLASS (code) == tcc_comparison
4244 || (TREE_CODE_CLASS (code) == tcc_expression
4245 && TREE_OPERAND_LENGTH (exp) > 1))
4246 arg1 = TREE_OPERAND (exp, 1);
4247 }
4248 if (arg0 == NULL_TREE)
4249 break;
4250
4251 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4252 &high, &in_p, strict_overflow_p);
4253 if (nexp == NULL_TREE)
4254 break;
4255 exp = nexp;
4256 }
4257
4258 /* If EXP is a constant, we can evaluate whether this is true or false. */
4259 if (TREE_CODE (exp) == INTEGER_CST)
4260 {
4261 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4262 exp, 0, low, 0))
4263 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4264 exp, 1, high, 1)));
4265 low = high = 0;
4266 exp = 0;
4267 }
4268
4269 *pin_p = in_p, *plow = low, *phigh = high;
4270 return exp;
4271 }
4272 \f
4273 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4274 type, TYPE, return an expression to test if EXP is in (or out of, depending
4275 on IN_P) the range. Return 0 if the test couldn't be created. */
4276
4277 tree
4278 build_range_check (location_t loc, tree type, tree exp, int in_p,
4279 tree low, tree high)
4280 {
4281 tree etype = TREE_TYPE (exp), value;
4282
4283 #ifdef HAVE_canonicalize_funcptr_for_compare
4284 /* Disable this optimization for function pointer expressions
4285 on targets that require function pointer canonicalization. */
4286 if (HAVE_canonicalize_funcptr_for_compare
4287 && TREE_CODE (etype) == POINTER_TYPE
4288 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4289 return NULL_TREE;
4290 #endif
4291
4292 if (! in_p)
4293 {
4294 value = build_range_check (loc, type, exp, 1, low, high);
4295 if (value != 0)
4296 return invert_truthvalue_loc (loc, value);
4297
4298 return 0;
4299 }
4300
4301 if (low == 0 && high == 0)
4302 return build_int_cst (type, 1);
4303
4304 if (low == 0)
4305 return fold_build2_loc (loc, LE_EXPR, type, exp,
4306 fold_convert_loc (loc, etype, high));
4307
4308 if (high == 0)
4309 return fold_build2_loc (loc, GE_EXPR, type, exp,
4310 fold_convert_loc (loc, etype, low));
4311
4312 if (operand_equal_p (low, high, 0))
4313 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4314 fold_convert_loc (loc, etype, low));
4315
4316 if (integer_zerop (low))
4317 {
4318 if (! TYPE_UNSIGNED (etype))
4319 {
4320 etype = unsigned_type_for (etype);
4321 high = fold_convert_loc (loc, etype, high);
4322 exp = fold_convert_loc (loc, etype, exp);
4323 }
4324 return build_range_check (loc, type, exp, 1, 0, high);
4325 }
4326
4327 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4328 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4329 {
4330 unsigned HOST_WIDE_INT lo;
4331 HOST_WIDE_INT hi;
4332 int prec;
4333
4334 prec = TYPE_PRECISION (etype);
4335 if (prec <= HOST_BITS_PER_WIDE_INT)
4336 {
4337 hi = 0;
4338 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4339 }
4340 else
4341 {
4342 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4343 lo = HOST_WIDE_INT_M1U;
4344 }
4345
4346 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4347 {
4348 if (TYPE_UNSIGNED (etype))
4349 {
4350 tree signed_etype = signed_type_for (etype);
4351 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4352 etype
4353 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4354 else
4355 etype = signed_etype;
4356 exp = fold_convert_loc (loc, etype, exp);
4357 }
4358 return fold_build2_loc (loc, GT_EXPR, type, exp,
4359 build_int_cst (etype, 0));
4360 }
4361 }
4362
4363 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4364 This requires wrap-around arithmetics for the type of the expression.
4365 First make sure that arithmetics in this type is valid, then make sure
4366 that it wraps around. */
4367 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4368 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4369 TYPE_UNSIGNED (etype));
4370
4371 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4372 {
4373 tree utype, minv, maxv;
4374
4375 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4376 for the type in question, as we rely on this here. */
4377 utype = unsigned_type_for (etype);
4378 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4379 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4380 integer_one_node, 1);
4381 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4382
4383 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4384 minv, 1, maxv, 1)))
4385 etype = utype;
4386 else
4387 return 0;
4388 }
4389
4390 high = fold_convert_loc (loc, etype, high);
4391 low = fold_convert_loc (loc, etype, low);
4392 exp = fold_convert_loc (loc, etype, exp);
4393
4394 value = const_binop (MINUS_EXPR, high, low);
4395
4396
4397 if (POINTER_TYPE_P (etype))
4398 {
4399 if (value != 0 && !TREE_OVERFLOW (value))
4400 {
4401 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4402 return build_range_check (loc, type,
4403 fold_build_pointer_plus_loc (loc, exp, low),
4404 1, build_int_cst (etype, 0), value);
4405 }
4406 return 0;
4407 }
4408
4409 if (value != 0 && !TREE_OVERFLOW (value))
4410 return build_range_check (loc, type,
4411 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4412 1, build_int_cst (etype, 0), value);
4413
4414 return 0;
4415 }
4416 \f
4417 /* Return the predecessor of VAL in its type, handling the infinite case. */
4418
4419 static tree
4420 range_predecessor (tree val)
4421 {
4422 tree type = TREE_TYPE (val);
4423
4424 if (INTEGRAL_TYPE_P (type)
4425 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4426 return 0;
4427 else
4428 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4429 }
4430
4431 /* Return the successor of VAL in its type, handling the infinite case. */
4432
4433 static tree
4434 range_successor (tree val)
4435 {
4436 tree type = TREE_TYPE (val);
4437
4438 if (INTEGRAL_TYPE_P (type)
4439 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4440 return 0;
4441 else
4442 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4443 }
4444
4445 /* Given two ranges, see if we can merge them into one. Return 1 if we
4446 can, 0 if we can't. Set the output range into the specified parameters. */
4447
4448 bool
4449 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4450 tree high0, int in1_p, tree low1, tree high1)
4451 {
4452 int no_overlap;
4453 int subset;
4454 int temp;
4455 tree tem;
4456 int in_p;
4457 tree low, high;
4458 int lowequal = ((low0 == 0 && low1 == 0)
4459 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4460 low0, 0, low1, 0)));
4461 int highequal = ((high0 == 0 && high1 == 0)
4462 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4463 high0, 1, high1, 1)));
4464
4465 /* Make range 0 be the range that starts first, or ends last if they
4466 start at the same value. Swap them if it isn't. */
4467 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4468 low0, 0, low1, 0))
4469 || (lowequal
4470 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4471 high1, 1, high0, 1))))
4472 {
4473 temp = in0_p, in0_p = in1_p, in1_p = temp;
4474 tem = low0, low0 = low1, low1 = tem;
4475 tem = high0, high0 = high1, high1 = tem;
4476 }
4477
4478 /* Now flag two cases, whether the ranges are disjoint or whether the
4479 second range is totally subsumed in the first. Note that the tests
4480 below are simplified by the ones above. */
4481 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4482 high0, 1, low1, 0));
4483 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4484 high1, 1, high0, 1));
4485
4486 /* We now have four cases, depending on whether we are including or
4487 excluding the two ranges. */
4488 if (in0_p && in1_p)
4489 {
4490 /* If they don't overlap, the result is false. If the second range
4491 is a subset it is the result. Otherwise, the range is from the start
4492 of the second to the end of the first. */
4493 if (no_overlap)
4494 in_p = 0, low = high = 0;
4495 else if (subset)
4496 in_p = 1, low = low1, high = high1;
4497 else
4498 in_p = 1, low = low1, high = high0;
4499 }
4500
4501 else if (in0_p && ! in1_p)
4502 {
4503 /* If they don't overlap, the result is the first range. If they are
4504 equal, the result is false. If the second range is a subset of the
4505 first, and the ranges begin at the same place, we go from just after
4506 the end of the second range to the end of the first. If the second
4507 range is not a subset of the first, or if it is a subset and both
4508 ranges end at the same place, the range starts at the start of the
4509 first range and ends just before the second range.
4510 Otherwise, we can't describe this as a single range. */
4511 if (no_overlap)
4512 in_p = 1, low = low0, high = high0;
4513 else if (lowequal && highequal)
4514 in_p = 0, low = high = 0;
4515 else if (subset && lowequal)
4516 {
4517 low = range_successor (high1);
4518 high = high0;
4519 in_p = 1;
4520 if (low == 0)
4521 {
4522 /* We are in the weird situation where high0 > high1 but
4523 high1 has no successor. Punt. */
4524 return 0;
4525 }
4526 }
4527 else if (! subset || highequal)
4528 {
4529 low = low0;
4530 high = range_predecessor (low1);
4531 in_p = 1;
4532 if (high == 0)
4533 {
4534 /* low0 < low1 but low1 has no predecessor. Punt. */
4535 return 0;
4536 }
4537 }
4538 else
4539 return 0;
4540 }
4541
4542 else if (! in0_p && in1_p)
4543 {
4544 /* If they don't overlap, the result is the second range. If the second
4545 is a subset of the first, the result is false. Otherwise,
4546 the range starts just after the first range and ends at the
4547 end of the second. */
4548 if (no_overlap)
4549 in_p = 1, low = low1, high = high1;
4550 else if (subset || highequal)
4551 in_p = 0, low = high = 0;
4552 else
4553 {
4554 low = range_successor (high0);
4555 high = high1;
4556 in_p = 1;
4557 if (low == 0)
4558 {
4559 /* high1 > high0 but high0 has no successor. Punt. */
4560 return 0;
4561 }
4562 }
4563 }
4564
4565 else
4566 {
4567 /* The case where we are excluding both ranges. Here the complex case
4568 is if they don't overlap. In that case, the only time we have a
4569 range is if they are adjacent. If the second is a subset of the
4570 first, the result is the first. Otherwise, the range to exclude
4571 starts at the beginning of the first range and ends at the end of the
4572 second. */
4573 if (no_overlap)
4574 {
4575 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4576 range_successor (high0),
4577 1, low1, 0)))
4578 in_p = 0, low = low0, high = high1;
4579 else
4580 {
4581 /* Canonicalize - [min, x] into - [-, x]. */
4582 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4583 switch (TREE_CODE (TREE_TYPE (low0)))
4584 {
4585 case ENUMERAL_TYPE:
4586 if (TYPE_PRECISION (TREE_TYPE (low0))
4587 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4588 break;
4589 /* FALLTHROUGH */
4590 case INTEGER_TYPE:
4591 if (tree_int_cst_equal (low0,
4592 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4593 low0 = 0;
4594 break;
4595 case POINTER_TYPE:
4596 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4597 && integer_zerop (low0))
4598 low0 = 0;
4599 break;
4600 default:
4601 break;
4602 }
4603
4604 /* Canonicalize - [x, max] into - [x, -]. */
4605 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4606 switch (TREE_CODE (TREE_TYPE (high1)))
4607 {
4608 case ENUMERAL_TYPE:
4609 if (TYPE_PRECISION (TREE_TYPE (high1))
4610 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4611 break;
4612 /* FALLTHROUGH */
4613 case INTEGER_TYPE:
4614 if (tree_int_cst_equal (high1,
4615 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4616 high1 = 0;
4617 break;
4618 case POINTER_TYPE:
4619 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4620 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4621 high1, 1,
4622 integer_one_node, 1)))
4623 high1 = 0;
4624 break;
4625 default:
4626 break;
4627 }
4628
4629 /* The ranges might be also adjacent between the maximum and
4630 minimum values of the given type. For
4631 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4632 return + [x + 1, y - 1]. */
4633 if (low0 == 0 && high1 == 0)
4634 {
4635 low = range_successor (high0);
4636 high = range_predecessor (low1);
4637 if (low == 0 || high == 0)
4638 return 0;
4639
4640 in_p = 1;
4641 }
4642 else
4643 return 0;
4644 }
4645 }
4646 else if (subset)
4647 in_p = 0, low = low0, high = high0;
4648 else
4649 in_p = 0, low = low0, high = high1;
4650 }
4651
4652 *pin_p = in_p, *plow = low, *phigh = high;
4653 return 1;
4654 }
4655 \f
4656
4657 /* Subroutine of fold, looking inside expressions of the form
4658 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4659 of the COND_EXPR. This function is being used also to optimize
4660 A op B ? C : A, by reversing the comparison first.
4661
4662 Return a folded expression whose code is not a COND_EXPR
4663 anymore, or NULL_TREE if no folding opportunity is found. */
4664
4665 static tree
4666 fold_cond_expr_with_comparison (location_t loc, tree type,
4667 tree arg0, tree arg1, tree arg2)
4668 {
4669 enum tree_code comp_code = TREE_CODE (arg0);
4670 tree arg00 = TREE_OPERAND (arg0, 0);
4671 tree arg01 = TREE_OPERAND (arg0, 1);
4672 tree arg1_type = TREE_TYPE (arg1);
4673 tree tem;
4674
4675 STRIP_NOPS (arg1);
4676 STRIP_NOPS (arg2);
4677
4678 /* If we have A op 0 ? A : -A, consider applying the following
4679 transformations:
4680
4681 A == 0? A : -A same as -A
4682 A != 0? A : -A same as A
4683 A >= 0? A : -A same as abs (A)
4684 A > 0? A : -A same as abs (A)
4685 A <= 0? A : -A same as -abs (A)
4686 A < 0? A : -A same as -abs (A)
4687
4688 None of these transformations work for modes with signed
4689 zeros. If A is +/-0, the first two transformations will
4690 change the sign of the result (from +0 to -0, or vice
4691 versa). The last four will fix the sign of the result,
4692 even though the original expressions could be positive or
4693 negative, depending on the sign of A.
4694
4695 Note that all these transformations are correct if A is
4696 NaN, since the two alternatives (A and -A) are also NaNs. */
4697 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4698 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4699 ? real_zerop (arg01)
4700 : integer_zerop (arg01))
4701 && ((TREE_CODE (arg2) == NEGATE_EXPR
4702 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4703 /* In the case that A is of the form X-Y, '-A' (arg2) may
4704 have already been folded to Y-X, check for that. */
4705 || (TREE_CODE (arg1) == MINUS_EXPR
4706 && TREE_CODE (arg2) == MINUS_EXPR
4707 && operand_equal_p (TREE_OPERAND (arg1, 0),
4708 TREE_OPERAND (arg2, 1), 0)
4709 && operand_equal_p (TREE_OPERAND (arg1, 1),
4710 TREE_OPERAND (arg2, 0), 0))))
4711 switch (comp_code)
4712 {
4713 case EQ_EXPR:
4714 case UNEQ_EXPR:
4715 tem = fold_convert_loc (loc, arg1_type, arg1);
4716 return pedantic_non_lvalue_loc (loc,
4717 fold_convert_loc (loc, type,
4718 negate_expr (tem)));
4719 case NE_EXPR:
4720 case LTGT_EXPR:
4721 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4722 case UNGE_EXPR:
4723 case UNGT_EXPR:
4724 if (flag_trapping_math)
4725 break;
4726 /* Fall through. */
4727 case GE_EXPR:
4728 case GT_EXPR:
4729 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4730 arg1 = fold_convert_loc (loc, signed_type_for
4731 (TREE_TYPE (arg1)), arg1);
4732 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4733 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4734 case UNLE_EXPR:
4735 case UNLT_EXPR:
4736 if (flag_trapping_math)
4737 break;
4738 case LE_EXPR:
4739 case LT_EXPR:
4740 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4741 arg1 = fold_convert_loc (loc, signed_type_for
4742 (TREE_TYPE (arg1)), arg1);
4743 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4744 return negate_expr (fold_convert_loc (loc, type, tem));
4745 default:
4746 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4747 break;
4748 }
4749
4750 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4751 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4752 both transformations are correct when A is NaN: A != 0
4753 is then true, and A == 0 is false. */
4754
4755 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4756 && integer_zerop (arg01) && integer_zerop (arg2))
4757 {
4758 if (comp_code == NE_EXPR)
4759 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4760 else if (comp_code == EQ_EXPR)
4761 return build_zero_cst (type);
4762 }
4763
4764 /* Try some transformations of A op B ? A : B.
4765
4766 A == B? A : B same as B
4767 A != B? A : B same as A
4768 A >= B? A : B same as max (A, B)
4769 A > B? A : B same as max (B, A)
4770 A <= B? A : B same as min (A, B)
4771 A < B? A : B same as min (B, A)
4772
4773 As above, these transformations don't work in the presence
4774 of signed zeros. For example, if A and B are zeros of
4775 opposite sign, the first two transformations will change
4776 the sign of the result. In the last four, the original
4777 expressions give different results for (A=+0, B=-0) and
4778 (A=-0, B=+0), but the transformed expressions do not.
4779
4780 The first two transformations are correct if either A or B
4781 is a NaN. In the first transformation, the condition will
4782 be false, and B will indeed be chosen. In the case of the
4783 second transformation, the condition A != B will be true,
4784 and A will be chosen.
4785
4786 The conversions to max() and min() are not correct if B is
4787 a number and A is not. The conditions in the original
4788 expressions will be false, so all four give B. The min()
4789 and max() versions would give a NaN instead. */
4790 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4791 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4792 /* Avoid these transformations if the COND_EXPR may be used
4793 as an lvalue in the C++ front-end. PR c++/19199. */
4794 && (in_gimple_form
4795 || VECTOR_TYPE_P (type)
4796 || (strcmp (lang_hooks.name, "GNU C++") != 0
4797 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4798 || ! maybe_lvalue_p (arg1)
4799 || ! maybe_lvalue_p (arg2)))
4800 {
4801 tree comp_op0 = arg00;
4802 tree comp_op1 = arg01;
4803 tree comp_type = TREE_TYPE (comp_op0);
4804
4805 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4806 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4807 {
4808 comp_type = type;
4809 comp_op0 = arg1;
4810 comp_op1 = arg2;
4811 }
4812
4813 switch (comp_code)
4814 {
4815 case EQ_EXPR:
4816 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4817 case NE_EXPR:
4818 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4819 case LE_EXPR:
4820 case LT_EXPR:
4821 case UNLE_EXPR:
4822 case UNLT_EXPR:
4823 /* In C++ a ?: expression can be an lvalue, so put the
4824 operand which will be used if they are equal first
4825 so that we can convert this back to the
4826 corresponding COND_EXPR. */
4827 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4828 {
4829 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4830 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4831 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4832 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4833 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4834 comp_op1, comp_op0);
4835 return pedantic_non_lvalue_loc (loc,
4836 fold_convert_loc (loc, type, tem));
4837 }
4838 break;
4839 case GE_EXPR:
4840 case GT_EXPR:
4841 case UNGE_EXPR:
4842 case UNGT_EXPR:
4843 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4844 {
4845 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4846 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4847 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4848 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4849 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4850 comp_op1, comp_op0);
4851 return pedantic_non_lvalue_loc (loc,
4852 fold_convert_loc (loc, type, tem));
4853 }
4854 break;
4855 case UNEQ_EXPR:
4856 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4857 return pedantic_non_lvalue_loc (loc,
4858 fold_convert_loc (loc, type, arg2));
4859 break;
4860 case LTGT_EXPR:
4861 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4862 return pedantic_non_lvalue_loc (loc,
4863 fold_convert_loc (loc, type, arg1));
4864 break;
4865 default:
4866 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4867 break;
4868 }
4869 }
4870
4871 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4872 we might still be able to simplify this. For example,
4873 if C1 is one less or one more than C2, this might have started
4874 out as a MIN or MAX and been transformed by this function.
4875 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4876
4877 if (INTEGRAL_TYPE_P (type)
4878 && TREE_CODE (arg01) == INTEGER_CST
4879 && TREE_CODE (arg2) == INTEGER_CST)
4880 switch (comp_code)
4881 {
4882 case EQ_EXPR:
4883 if (TREE_CODE (arg1) == INTEGER_CST)
4884 break;
4885 /* We can replace A with C1 in this case. */
4886 arg1 = fold_convert_loc (loc, type, arg01);
4887 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4888
4889 case LT_EXPR:
4890 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4891 MIN_EXPR, to preserve the signedness of the comparison. */
4892 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4893 OEP_ONLY_CONST)
4894 && operand_equal_p (arg01,
4895 const_binop (PLUS_EXPR, arg2,
4896 build_int_cst (type, 1)),
4897 OEP_ONLY_CONST))
4898 {
4899 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4900 fold_convert_loc (loc, TREE_TYPE (arg00),
4901 arg2));
4902 return pedantic_non_lvalue_loc (loc,
4903 fold_convert_loc (loc, type, tem));
4904 }
4905 break;
4906
4907 case LE_EXPR:
4908 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4909 as above. */
4910 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4911 OEP_ONLY_CONST)
4912 && operand_equal_p (arg01,
4913 const_binop (MINUS_EXPR, arg2,
4914 build_int_cst (type, 1)),
4915 OEP_ONLY_CONST))
4916 {
4917 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4918 fold_convert_loc (loc, TREE_TYPE (arg00),
4919 arg2));
4920 return pedantic_non_lvalue_loc (loc,
4921 fold_convert_loc (loc, type, tem));
4922 }
4923 break;
4924
4925 case GT_EXPR:
4926 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4927 MAX_EXPR, to preserve the signedness of the comparison. */
4928 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4929 OEP_ONLY_CONST)
4930 && operand_equal_p (arg01,
4931 const_binop (MINUS_EXPR, arg2,
4932 build_int_cst (type, 1)),
4933 OEP_ONLY_CONST))
4934 {
4935 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4936 fold_convert_loc (loc, TREE_TYPE (arg00),
4937 arg2));
4938 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4939 }
4940 break;
4941
4942 case GE_EXPR:
4943 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4944 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4945 OEP_ONLY_CONST)
4946 && operand_equal_p (arg01,
4947 const_binop (PLUS_EXPR, arg2,
4948 build_int_cst (type, 1)),
4949 OEP_ONLY_CONST))
4950 {
4951 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4952 fold_convert_loc (loc, TREE_TYPE (arg00),
4953 arg2));
4954 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4955 }
4956 break;
4957 case NE_EXPR:
4958 break;
4959 default:
4960 gcc_unreachable ();
4961 }
4962
4963 return NULL_TREE;
4964 }
4965
4966
4967 \f
4968 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4969 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4970 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4971 false) >= 2)
4972 #endif
4973
4974 /* EXP is some logical combination of boolean tests. See if we can
4975 merge it into some range test. Return the new tree if so. */
4976
4977 static tree
4978 fold_range_test (location_t loc, enum tree_code code, tree type,
4979 tree op0, tree op1)
4980 {
4981 int or_op = (code == TRUTH_ORIF_EXPR
4982 || code == TRUTH_OR_EXPR);
4983 int in0_p, in1_p, in_p;
4984 tree low0, low1, low, high0, high1, high;
4985 bool strict_overflow_p = false;
4986 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4987 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4988 tree tem;
4989 const char * const warnmsg = G_("assuming signed overflow does not occur "
4990 "when simplifying range test");
4991
4992 /* If this is an OR operation, invert both sides; we will invert
4993 again at the end. */
4994 if (or_op)
4995 in0_p = ! in0_p, in1_p = ! in1_p;
4996
4997 /* If both expressions are the same, if we can merge the ranges, and we
4998 can build the range test, return it or it inverted. If one of the
4999 ranges is always true or always false, consider it to be the same
5000 expression as the other. */
5001 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5002 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5003 in1_p, low1, high1)
5004 && 0 != (tem = (build_range_check (loc, type,
5005 lhs != 0 ? lhs
5006 : rhs != 0 ? rhs : integer_zero_node,
5007 in_p, low, high))))
5008 {
5009 if (strict_overflow_p)
5010 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5011 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5012 }
5013
5014 /* On machines where the branch cost is expensive, if this is a
5015 short-circuited branch and the underlying object on both sides
5016 is the same, make a non-short-circuit operation. */
5017 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5018 && lhs != 0 && rhs != 0
5019 && (code == TRUTH_ANDIF_EXPR
5020 || code == TRUTH_ORIF_EXPR)
5021 && operand_equal_p (lhs, rhs, 0))
5022 {
5023 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5024 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5025 which cases we can't do this. */
5026 if (simple_operand_p (lhs))
5027 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5028 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5029 type, op0, op1);
5030
5031 else if (!lang_hooks.decls.global_bindings_p ()
5032 && !CONTAINS_PLACEHOLDER_P (lhs))
5033 {
5034 tree common = save_expr (lhs);
5035
5036 if (0 != (lhs = build_range_check (loc, type, common,
5037 or_op ? ! in0_p : in0_p,
5038 low0, high0))
5039 && (0 != (rhs = build_range_check (loc, type, common,
5040 or_op ? ! in1_p : in1_p,
5041 low1, high1))))
5042 {
5043 if (strict_overflow_p)
5044 fold_overflow_warning (warnmsg,
5045 WARN_STRICT_OVERFLOW_COMPARISON);
5046 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5047 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5048 type, lhs, rhs);
5049 }
5050 }
5051 }
5052
5053 return 0;
5054 }
5055 \f
5056 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5057 bit value. Arrange things so the extra bits will be set to zero if and
5058 only if C is signed-extended to its full width. If MASK is nonzero,
5059 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5060
5061 static tree
5062 unextend (tree c, int p, int unsignedp, tree mask)
5063 {
5064 tree type = TREE_TYPE (c);
5065 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5066 tree temp;
5067
5068 if (p == modesize || unsignedp)
5069 return c;
5070
5071 /* We work by getting just the sign bit into the low-order bit, then
5072 into the high-order bit, then sign-extend. We then XOR that value
5073 with C. */
5074 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5075 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5076
5077 /* We must use a signed type in order to get an arithmetic right shift.
5078 However, we must also avoid introducing accidental overflows, so that
5079 a subsequent call to integer_zerop will work. Hence we must
5080 do the type conversion here. At this point, the constant is either
5081 zero or one, and the conversion to a signed type can never overflow.
5082 We could get an overflow if this conversion is done anywhere else. */
5083 if (TYPE_UNSIGNED (type))
5084 temp = fold_convert (signed_type_for (type), temp);
5085
5086 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5087 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5088 if (mask != 0)
5089 temp = const_binop (BIT_AND_EXPR, temp,
5090 fold_convert (TREE_TYPE (c), mask));
5091 /* If necessary, convert the type back to match the type of C. */
5092 if (TYPE_UNSIGNED (type))
5093 temp = fold_convert (type, temp);
5094
5095 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5096 }
5097 \f
5098 /* For an expression that has the form
5099 (A && B) || ~B
5100 or
5101 (A || B) && ~B,
5102 we can drop one of the inner expressions and simplify to
5103 A || ~B
5104 or
5105 A && ~B
5106 LOC is the location of the resulting expression. OP is the inner
5107 logical operation; the left-hand side in the examples above, while CMPOP
5108 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5109 removing a condition that guards another, as in
5110 (A != NULL && A->...) || A == NULL
5111 which we must not transform. If RHS_ONLY is true, only eliminate the
5112 right-most operand of the inner logical operation. */
5113
5114 static tree
5115 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5116 bool rhs_only)
5117 {
5118 tree type = TREE_TYPE (cmpop);
5119 enum tree_code code = TREE_CODE (cmpop);
5120 enum tree_code truthop_code = TREE_CODE (op);
5121 tree lhs = TREE_OPERAND (op, 0);
5122 tree rhs = TREE_OPERAND (op, 1);
5123 tree orig_lhs = lhs, orig_rhs = rhs;
5124 enum tree_code rhs_code = TREE_CODE (rhs);
5125 enum tree_code lhs_code = TREE_CODE (lhs);
5126 enum tree_code inv_code;
5127
5128 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5129 return NULL_TREE;
5130
5131 if (TREE_CODE_CLASS (code) != tcc_comparison)
5132 return NULL_TREE;
5133
5134 if (rhs_code == truthop_code)
5135 {
5136 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5137 if (newrhs != NULL_TREE)
5138 {
5139 rhs = newrhs;
5140 rhs_code = TREE_CODE (rhs);
5141 }
5142 }
5143 if (lhs_code == truthop_code && !rhs_only)
5144 {
5145 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5146 if (newlhs != NULL_TREE)
5147 {
5148 lhs = newlhs;
5149 lhs_code = TREE_CODE (lhs);
5150 }
5151 }
5152
5153 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5154 if (inv_code == rhs_code
5155 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5156 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5157 return lhs;
5158 if (!rhs_only && inv_code == lhs_code
5159 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5160 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5161 return rhs;
5162 if (rhs != orig_rhs || lhs != orig_lhs)
5163 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5164 lhs, rhs);
5165 return NULL_TREE;
5166 }
5167
5168 /* Find ways of folding logical expressions of LHS and RHS:
5169 Try to merge two comparisons to the same innermost item.
5170 Look for range tests like "ch >= '0' && ch <= '9'".
5171 Look for combinations of simple terms on machines with expensive branches
5172 and evaluate the RHS unconditionally.
5173
5174 For example, if we have p->a == 2 && p->b == 4 and we can make an
5175 object large enough to span both A and B, we can do this with a comparison
5176 against the object ANDed with the a mask.
5177
5178 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5179 operations to do this with one comparison.
5180
5181 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5182 function and the one above.
5183
5184 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5185 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5186
5187 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5188 two operands.
5189
5190 We return the simplified tree or 0 if no optimization is possible. */
5191
5192 static tree
5193 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5194 tree lhs, tree rhs)
5195 {
5196 /* If this is the "or" of two comparisons, we can do something if
5197 the comparisons are NE_EXPR. If this is the "and", we can do something
5198 if the comparisons are EQ_EXPR. I.e.,
5199 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5200
5201 WANTED_CODE is this operation code. For single bit fields, we can
5202 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5203 comparison for one-bit fields. */
5204
5205 enum tree_code wanted_code;
5206 enum tree_code lcode, rcode;
5207 tree ll_arg, lr_arg, rl_arg, rr_arg;
5208 tree ll_inner, lr_inner, rl_inner, rr_inner;
5209 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5210 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5211 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5212 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5213 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5214 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5215 enum machine_mode lnmode, rnmode;
5216 tree ll_mask, lr_mask, rl_mask, rr_mask;
5217 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5218 tree l_const, r_const;
5219 tree lntype, rntype, result;
5220 HOST_WIDE_INT first_bit, end_bit;
5221 int volatilep;
5222
5223 /* Start by getting the comparison codes. Fail if anything is volatile.
5224 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5225 it were surrounded with a NE_EXPR. */
5226
5227 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5228 return 0;
5229
5230 lcode = TREE_CODE (lhs);
5231 rcode = TREE_CODE (rhs);
5232
5233 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5234 {
5235 lhs = build2 (NE_EXPR, truth_type, lhs,
5236 build_int_cst (TREE_TYPE (lhs), 0));
5237 lcode = NE_EXPR;
5238 }
5239
5240 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5241 {
5242 rhs = build2 (NE_EXPR, truth_type, rhs,
5243 build_int_cst (TREE_TYPE (rhs), 0));
5244 rcode = NE_EXPR;
5245 }
5246
5247 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5248 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5249 return 0;
5250
5251 ll_arg = TREE_OPERAND (lhs, 0);
5252 lr_arg = TREE_OPERAND (lhs, 1);
5253 rl_arg = TREE_OPERAND (rhs, 0);
5254 rr_arg = TREE_OPERAND (rhs, 1);
5255
5256 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5257 if (simple_operand_p (ll_arg)
5258 && simple_operand_p (lr_arg))
5259 {
5260 if (operand_equal_p (ll_arg, rl_arg, 0)
5261 && operand_equal_p (lr_arg, rr_arg, 0))
5262 {
5263 result = combine_comparisons (loc, code, lcode, rcode,
5264 truth_type, ll_arg, lr_arg);
5265 if (result)
5266 return result;
5267 }
5268 else if (operand_equal_p (ll_arg, rr_arg, 0)
5269 && operand_equal_p (lr_arg, rl_arg, 0))
5270 {
5271 result = combine_comparisons (loc, code, lcode,
5272 swap_tree_comparison (rcode),
5273 truth_type, ll_arg, lr_arg);
5274 if (result)
5275 return result;
5276 }
5277 }
5278
5279 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5280 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5281
5282 /* If the RHS can be evaluated unconditionally and its operands are
5283 simple, it wins to evaluate the RHS unconditionally on machines
5284 with expensive branches. In this case, this isn't a comparison
5285 that can be merged. */
5286
5287 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5288 false) >= 2
5289 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5290 && simple_operand_p (rl_arg)
5291 && simple_operand_p (rr_arg))
5292 {
5293 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5294 if (code == TRUTH_OR_EXPR
5295 && lcode == NE_EXPR && integer_zerop (lr_arg)
5296 && rcode == NE_EXPR && integer_zerop (rr_arg)
5297 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5298 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5299 return build2_loc (loc, NE_EXPR, truth_type,
5300 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5301 ll_arg, rl_arg),
5302 build_int_cst (TREE_TYPE (ll_arg), 0));
5303
5304 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5305 if (code == TRUTH_AND_EXPR
5306 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5307 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5308 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5309 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5310 return build2_loc (loc, EQ_EXPR, truth_type,
5311 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5312 ll_arg, rl_arg),
5313 build_int_cst (TREE_TYPE (ll_arg), 0));
5314 }
5315
5316 /* See if the comparisons can be merged. Then get all the parameters for
5317 each side. */
5318
5319 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5320 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5321 return 0;
5322
5323 volatilep = 0;
5324 ll_inner = decode_field_reference (loc, ll_arg,
5325 &ll_bitsize, &ll_bitpos, &ll_mode,
5326 &ll_unsignedp, &volatilep, &ll_mask,
5327 &ll_and_mask);
5328 lr_inner = decode_field_reference (loc, lr_arg,
5329 &lr_bitsize, &lr_bitpos, &lr_mode,
5330 &lr_unsignedp, &volatilep, &lr_mask,
5331 &lr_and_mask);
5332 rl_inner = decode_field_reference (loc, rl_arg,
5333 &rl_bitsize, &rl_bitpos, &rl_mode,
5334 &rl_unsignedp, &volatilep, &rl_mask,
5335 &rl_and_mask);
5336 rr_inner = decode_field_reference (loc, rr_arg,
5337 &rr_bitsize, &rr_bitpos, &rr_mode,
5338 &rr_unsignedp, &volatilep, &rr_mask,
5339 &rr_and_mask);
5340
5341 /* It must be true that the inner operation on the lhs of each
5342 comparison must be the same if we are to be able to do anything.
5343 Then see if we have constants. If not, the same must be true for
5344 the rhs's. */
5345 if (volatilep || ll_inner == 0 || rl_inner == 0
5346 || ! operand_equal_p (ll_inner, rl_inner, 0))
5347 return 0;
5348
5349 if (TREE_CODE (lr_arg) == INTEGER_CST
5350 && TREE_CODE (rr_arg) == INTEGER_CST)
5351 l_const = lr_arg, r_const = rr_arg;
5352 else if (lr_inner == 0 || rr_inner == 0
5353 || ! operand_equal_p (lr_inner, rr_inner, 0))
5354 return 0;
5355 else
5356 l_const = r_const = 0;
5357
5358 /* If either comparison code is not correct for our logical operation,
5359 fail. However, we can convert a one-bit comparison against zero into
5360 the opposite comparison against that bit being set in the field. */
5361
5362 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5363 if (lcode != wanted_code)
5364 {
5365 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5366 {
5367 /* Make the left operand unsigned, since we are only interested
5368 in the value of one bit. Otherwise we are doing the wrong
5369 thing below. */
5370 ll_unsignedp = 1;
5371 l_const = ll_mask;
5372 }
5373 else
5374 return 0;
5375 }
5376
5377 /* This is analogous to the code for l_const above. */
5378 if (rcode != wanted_code)
5379 {
5380 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5381 {
5382 rl_unsignedp = 1;
5383 r_const = rl_mask;
5384 }
5385 else
5386 return 0;
5387 }
5388
5389 /* See if we can find a mode that contains both fields being compared on
5390 the left. If we can't, fail. Otherwise, update all constants and masks
5391 to be relative to a field of that size. */
5392 first_bit = MIN (ll_bitpos, rl_bitpos);
5393 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5394 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5395 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5396 volatilep);
5397 if (lnmode == VOIDmode)
5398 return 0;
5399
5400 lnbitsize = GET_MODE_BITSIZE (lnmode);
5401 lnbitpos = first_bit & ~ (lnbitsize - 1);
5402 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5403 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5404
5405 if (BYTES_BIG_ENDIAN)
5406 {
5407 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5408 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5409 }
5410
5411 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5412 size_int (xll_bitpos));
5413 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5414 size_int (xrl_bitpos));
5415
5416 if (l_const)
5417 {
5418 l_const = fold_convert_loc (loc, lntype, l_const);
5419 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5420 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5421 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5422 fold_build1_loc (loc, BIT_NOT_EXPR,
5423 lntype, ll_mask))))
5424 {
5425 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5426
5427 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5428 }
5429 }
5430 if (r_const)
5431 {
5432 r_const = fold_convert_loc (loc, lntype, r_const);
5433 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5434 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5435 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5436 fold_build1_loc (loc, BIT_NOT_EXPR,
5437 lntype, rl_mask))))
5438 {
5439 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5440
5441 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5442 }
5443 }
5444
5445 /* If the right sides are not constant, do the same for it. Also,
5446 disallow this optimization if a size or signedness mismatch occurs
5447 between the left and right sides. */
5448 if (l_const == 0)
5449 {
5450 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5451 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5452 /* Make sure the two fields on the right
5453 correspond to the left without being swapped. */
5454 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5455 return 0;
5456
5457 first_bit = MIN (lr_bitpos, rr_bitpos);
5458 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5459 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5460 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5461 volatilep);
5462 if (rnmode == VOIDmode)
5463 return 0;
5464
5465 rnbitsize = GET_MODE_BITSIZE (rnmode);
5466 rnbitpos = first_bit & ~ (rnbitsize - 1);
5467 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5468 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5469
5470 if (BYTES_BIG_ENDIAN)
5471 {
5472 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5473 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5474 }
5475
5476 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5477 rntype, lr_mask),
5478 size_int (xlr_bitpos));
5479 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5480 rntype, rr_mask),
5481 size_int (xrr_bitpos));
5482
5483 /* Make a mask that corresponds to both fields being compared.
5484 Do this for both items being compared. If the operands are the
5485 same size and the bits being compared are in the same position
5486 then we can do this by masking both and comparing the masked
5487 results. */
5488 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5489 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5490 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5491 {
5492 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5493 ll_unsignedp || rl_unsignedp);
5494 if (! all_ones_mask_p (ll_mask, lnbitsize))
5495 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5496
5497 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5498 lr_unsignedp || rr_unsignedp);
5499 if (! all_ones_mask_p (lr_mask, rnbitsize))
5500 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5501
5502 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5503 }
5504
5505 /* There is still another way we can do something: If both pairs of
5506 fields being compared are adjacent, we may be able to make a wider
5507 field containing them both.
5508
5509 Note that we still must mask the lhs/rhs expressions. Furthermore,
5510 the mask must be shifted to account for the shift done by
5511 make_bit_field_ref. */
5512 if ((ll_bitsize + ll_bitpos == rl_bitpos
5513 && lr_bitsize + lr_bitpos == rr_bitpos)
5514 || (ll_bitpos == rl_bitpos + rl_bitsize
5515 && lr_bitpos == rr_bitpos + rr_bitsize))
5516 {
5517 tree type;
5518
5519 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5520 ll_bitsize + rl_bitsize,
5521 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5522 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5523 lr_bitsize + rr_bitsize,
5524 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5525
5526 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5527 size_int (MIN (xll_bitpos, xrl_bitpos)));
5528 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5529 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5530
5531 /* Convert to the smaller type before masking out unwanted bits. */
5532 type = lntype;
5533 if (lntype != rntype)
5534 {
5535 if (lnbitsize > rnbitsize)
5536 {
5537 lhs = fold_convert_loc (loc, rntype, lhs);
5538 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5539 type = rntype;
5540 }
5541 else if (lnbitsize < rnbitsize)
5542 {
5543 rhs = fold_convert_loc (loc, lntype, rhs);
5544 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5545 type = lntype;
5546 }
5547 }
5548
5549 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5550 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5551
5552 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5553 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5554
5555 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5556 }
5557
5558 return 0;
5559 }
5560
5561 /* Handle the case of comparisons with constants. If there is something in
5562 common between the masks, those bits of the constants must be the same.
5563 If not, the condition is always false. Test for this to avoid generating
5564 incorrect code below. */
5565 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5566 if (! integer_zerop (result)
5567 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5568 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5569 {
5570 if (wanted_code == NE_EXPR)
5571 {
5572 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5573 return constant_boolean_node (true, truth_type);
5574 }
5575 else
5576 {
5577 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5578 return constant_boolean_node (false, truth_type);
5579 }
5580 }
5581
5582 /* Construct the expression we will return. First get the component
5583 reference we will make. Unless the mask is all ones the width of
5584 that field, perform the mask operation. Then compare with the
5585 merged constant. */
5586 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5587 ll_unsignedp || rl_unsignedp);
5588
5589 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5590 if (! all_ones_mask_p (ll_mask, lnbitsize))
5591 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5592
5593 return build2_loc (loc, wanted_code, truth_type, result,
5594 const_binop (BIT_IOR_EXPR, l_const, r_const));
5595 }
5596 \f
5597 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5598 constant. */
5599
5600 static tree
5601 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5602 tree op0, tree op1)
5603 {
5604 tree arg0 = op0;
5605 enum tree_code op_code;
5606 tree comp_const;
5607 tree minmax_const;
5608 int consts_equal, consts_lt;
5609 tree inner;
5610
5611 STRIP_SIGN_NOPS (arg0);
5612
5613 op_code = TREE_CODE (arg0);
5614 minmax_const = TREE_OPERAND (arg0, 1);
5615 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5616 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5617 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5618 inner = TREE_OPERAND (arg0, 0);
5619
5620 /* If something does not permit us to optimize, return the original tree. */
5621 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5622 || TREE_CODE (comp_const) != INTEGER_CST
5623 || TREE_OVERFLOW (comp_const)
5624 || TREE_CODE (minmax_const) != INTEGER_CST
5625 || TREE_OVERFLOW (minmax_const))
5626 return NULL_TREE;
5627
5628 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5629 and GT_EXPR, doing the rest with recursive calls using logical
5630 simplifications. */
5631 switch (code)
5632 {
5633 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5634 {
5635 tree tem
5636 = optimize_minmax_comparison (loc,
5637 invert_tree_comparison (code, false),
5638 type, op0, op1);
5639 if (tem)
5640 return invert_truthvalue_loc (loc, tem);
5641 return NULL_TREE;
5642 }
5643
5644 case GE_EXPR:
5645 return
5646 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5647 optimize_minmax_comparison
5648 (loc, EQ_EXPR, type, arg0, comp_const),
5649 optimize_minmax_comparison
5650 (loc, GT_EXPR, type, arg0, comp_const));
5651
5652 case EQ_EXPR:
5653 if (op_code == MAX_EXPR && consts_equal)
5654 /* MAX (X, 0) == 0 -> X <= 0 */
5655 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5656
5657 else if (op_code == MAX_EXPR && consts_lt)
5658 /* MAX (X, 0) == 5 -> X == 5 */
5659 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5660
5661 else if (op_code == MAX_EXPR)
5662 /* MAX (X, 0) == -1 -> false */
5663 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5664
5665 else if (consts_equal)
5666 /* MIN (X, 0) == 0 -> X >= 0 */
5667 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5668
5669 else if (consts_lt)
5670 /* MIN (X, 0) == 5 -> false */
5671 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5672
5673 else
5674 /* MIN (X, 0) == -1 -> X == -1 */
5675 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5676
5677 case GT_EXPR:
5678 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5679 /* MAX (X, 0) > 0 -> X > 0
5680 MAX (X, 0) > 5 -> X > 5 */
5681 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5682
5683 else if (op_code == MAX_EXPR)
5684 /* MAX (X, 0) > -1 -> true */
5685 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5686
5687 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5688 /* MIN (X, 0) > 0 -> false
5689 MIN (X, 0) > 5 -> false */
5690 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5691
5692 else
5693 /* MIN (X, 0) > -1 -> X > -1 */
5694 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5695
5696 default:
5697 return NULL_TREE;
5698 }
5699 }
5700 \f
5701 /* T is an integer expression that is being multiplied, divided, or taken a
5702 modulus (CODE says which and what kind of divide or modulus) by a
5703 constant C. See if we can eliminate that operation by folding it with
5704 other operations already in T. WIDE_TYPE, if non-null, is a type that
5705 should be used for the computation if wider than our type.
5706
5707 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5708 (X * 2) + (Y * 4). We must, however, be assured that either the original
5709 expression would not overflow or that overflow is undefined for the type
5710 in the language in question.
5711
5712 If we return a non-null expression, it is an equivalent form of the
5713 original computation, but need not be in the original type.
5714
5715 We set *STRICT_OVERFLOW_P to true if the return values depends on
5716 signed overflow being undefined. Otherwise we do not change
5717 *STRICT_OVERFLOW_P. */
5718
5719 static tree
5720 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5721 bool *strict_overflow_p)
5722 {
5723 /* To avoid exponential search depth, refuse to allow recursion past
5724 three levels. Beyond that (1) it's highly unlikely that we'll find
5725 something interesting and (2) we've probably processed it before
5726 when we built the inner expression. */
5727
5728 static int depth;
5729 tree ret;
5730
5731 if (depth > 3)
5732 return NULL;
5733
5734 depth++;
5735 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5736 depth--;
5737
5738 return ret;
5739 }
5740
5741 static tree
5742 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5743 bool *strict_overflow_p)
5744 {
5745 tree type = TREE_TYPE (t);
5746 enum tree_code tcode = TREE_CODE (t);
5747 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5748 > GET_MODE_SIZE (TYPE_MODE (type)))
5749 ? wide_type : type);
5750 tree t1, t2;
5751 int same_p = tcode == code;
5752 tree op0 = NULL_TREE, op1 = NULL_TREE;
5753 bool sub_strict_overflow_p;
5754
5755 /* Don't deal with constants of zero here; they confuse the code below. */
5756 if (integer_zerop (c))
5757 return NULL_TREE;
5758
5759 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5760 op0 = TREE_OPERAND (t, 0);
5761
5762 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5763 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5764
5765 /* Note that we need not handle conditional operations here since fold
5766 already handles those cases. So just do arithmetic here. */
5767 switch (tcode)
5768 {
5769 case INTEGER_CST:
5770 /* For a constant, we can always simplify if we are a multiply
5771 or (for divide and modulus) if it is a multiple of our constant. */
5772 if (code == MULT_EXPR
5773 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5774 return const_binop (code, fold_convert (ctype, t),
5775 fold_convert (ctype, c));
5776 break;
5777
5778 CASE_CONVERT: case NON_LVALUE_EXPR:
5779 /* If op0 is an expression ... */
5780 if ((COMPARISON_CLASS_P (op0)
5781 || UNARY_CLASS_P (op0)
5782 || BINARY_CLASS_P (op0)
5783 || VL_EXP_CLASS_P (op0)
5784 || EXPRESSION_CLASS_P (op0))
5785 /* ... and has wrapping overflow, and its type is smaller
5786 than ctype, then we cannot pass through as widening. */
5787 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5788 && (TYPE_PRECISION (ctype)
5789 > TYPE_PRECISION (TREE_TYPE (op0))))
5790 /* ... or this is a truncation (t is narrower than op0),
5791 then we cannot pass through this narrowing. */
5792 || (TYPE_PRECISION (type)
5793 < TYPE_PRECISION (TREE_TYPE (op0)))
5794 /* ... or signedness changes for division or modulus,
5795 then we cannot pass through this conversion. */
5796 || (code != MULT_EXPR
5797 && (TYPE_UNSIGNED (ctype)
5798 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5799 /* ... or has undefined overflow while the converted to
5800 type has not, we cannot do the operation in the inner type
5801 as that would introduce undefined overflow. */
5802 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5803 && !TYPE_OVERFLOW_UNDEFINED (type))))
5804 break;
5805
5806 /* Pass the constant down and see if we can make a simplification. If
5807 we can, replace this expression with the inner simplification for
5808 possible later conversion to our or some other type. */
5809 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5810 && TREE_CODE (t2) == INTEGER_CST
5811 && !TREE_OVERFLOW (t2)
5812 && (0 != (t1 = extract_muldiv (op0, t2, code,
5813 code == MULT_EXPR
5814 ? ctype : NULL_TREE,
5815 strict_overflow_p))))
5816 return t1;
5817 break;
5818
5819 case ABS_EXPR:
5820 /* If widening the type changes it from signed to unsigned, then we
5821 must avoid building ABS_EXPR itself as unsigned. */
5822 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5823 {
5824 tree cstype = (*signed_type_for) (ctype);
5825 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5826 != 0)
5827 {
5828 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5829 return fold_convert (ctype, t1);
5830 }
5831 break;
5832 }
5833 /* If the constant is negative, we cannot simplify this. */
5834 if (tree_int_cst_sgn (c) == -1)
5835 break;
5836 /* FALLTHROUGH */
5837 case NEGATE_EXPR:
5838 /* For division and modulus, type can't be unsigned, as e.g.
5839 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5840 For signed types, even with wrapping overflow, this is fine. */
5841 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5842 break;
5843 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5844 != 0)
5845 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5846 break;
5847
5848 case MIN_EXPR: case MAX_EXPR:
5849 /* If widening the type changes the signedness, then we can't perform
5850 this optimization as that changes the result. */
5851 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5852 break;
5853
5854 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5855 sub_strict_overflow_p = false;
5856 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5857 &sub_strict_overflow_p)) != 0
5858 && (t2 = extract_muldiv (op1, c, code, wide_type,
5859 &sub_strict_overflow_p)) != 0)
5860 {
5861 if (tree_int_cst_sgn (c) < 0)
5862 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5863 if (sub_strict_overflow_p)
5864 *strict_overflow_p = true;
5865 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5866 fold_convert (ctype, t2));
5867 }
5868 break;
5869
5870 case LSHIFT_EXPR: case RSHIFT_EXPR:
5871 /* If the second operand is constant, this is a multiplication
5872 or floor division, by a power of two, so we can treat it that
5873 way unless the multiplier or divisor overflows. Signed
5874 left-shift overflow is implementation-defined rather than
5875 undefined in C90, so do not convert signed left shift into
5876 multiplication. */
5877 if (TREE_CODE (op1) == INTEGER_CST
5878 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5879 /* const_binop may not detect overflow correctly,
5880 so check for it explicitly here. */
5881 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5882 && TREE_INT_CST_HIGH (op1) == 0
5883 && 0 != (t1 = fold_convert (ctype,
5884 const_binop (LSHIFT_EXPR,
5885 size_one_node,
5886 op1)))
5887 && !TREE_OVERFLOW (t1))
5888 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5889 ? MULT_EXPR : FLOOR_DIV_EXPR,
5890 ctype,
5891 fold_convert (ctype, op0),
5892 t1),
5893 c, code, wide_type, strict_overflow_p);
5894 break;
5895
5896 case PLUS_EXPR: case MINUS_EXPR:
5897 /* See if we can eliminate the operation on both sides. If we can, we
5898 can return a new PLUS or MINUS. If we can't, the only remaining
5899 cases where we can do anything are if the second operand is a
5900 constant. */
5901 sub_strict_overflow_p = false;
5902 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5903 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5904 if (t1 != 0 && t2 != 0
5905 && (code == MULT_EXPR
5906 /* If not multiplication, we can only do this if both operands
5907 are divisible by c. */
5908 || (multiple_of_p (ctype, op0, c)
5909 && multiple_of_p (ctype, op1, c))))
5910 {
5911 if (sub_strict_overflow_p)
5912 *strict_overflow_p = true;
5913 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5914 fold_convert (ctype, t2));
5915 }
5916
5917 /* If this was a subtraction, negate OP1 and set it to be an addition.
5918 This simplifies the logic below. */
5919 if (tcode == MINUS_EXPR)
5920 {
5921 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5922 /* If OP1 was not easily negatable, the constant may be OP0. */
5923 if (TREE_CODE (op0) == INTEGER_CST)
5924 {
5925 tree tem = op0;
5926 op0 = op1;
5927 op1 = tem;
5928 tem = t1;
5929 t1 = t2;
5930 t2 = tem;
5931 }
5932 }
5933
5934 if (TREE_CODE (op1) != INTEGER_CST)
5935 break;
5936
5937 /* If either OP1 or C are negative, this optimization is not safe for
5938 some of the division and remainder types while for others we need
5939 to change the code. */
5940 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5941 {
5942 if (code == CEIL_DIV_EXPR)
5943 code = FLOOR_DIV_EXPR;
5944 else if (code == FLOOR_DIV_EXPR)
5945 code = CEIL_DIV_EXPR;
5946 else if (code != MULT_EXPR
5947 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5948 break;
5949 }
5950
5951 /* If it's a multiply or a division/modulus operation of a multiple
5952 of our constant, do the operation and verify it doesn't overflow. */
5953 if (code == MULT_EXPR
5954 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5955 {
5956 op1 = const_binop (code, fold_convert (ctype, op1),
5957 fold_convert (ctype, c));
5958 /* We allow the constant to overflow with wrapping semantics. */
5959 if (op1 == 0
5960 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5961 break;
5962 }
5963 else
5964 break;
5965
5966 /* If we have an unsigned type, we cannot widen the operation since it
5967 will change the result if the original computation overflowed. */
5968 if (TYPE_UNSIGNED (ctype) && ctype != type)
5969 break;
5970
5971 /* If we were able to eliminate our operation from the first side,
5972 apply our operation to the second side and reform the PLUS. */
5973 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5974 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5975
5976 /* The last case is if we are a multiply. In that case, we can
5977 apply the distributive law to commute the multiply and addition
5978 if the multiplication of the constants doesn't overflow
5979 and overflow is defined. With undefined overflow
5980 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5981 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5982 return fold_build2 (tcode, ctype,
5983 fold_build2 (code, ctype,
5984 fold_convert (ctype, op0),
5985 fold_convert (ctype, c)),
5986 op1);
5987
5988 break;
5989
5990 case MULT_EXPR:
5991 /* We have a special case here if we are doing something like
5992 (C * 8) % 4 since we know that's zero. */
5993 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5994 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5995 /* If the multiplication can overflow we cannot optimize this. */
5996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5997 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5998 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5999 {
6000 *strict_overflow_p = true;
6001 return omit_one_operand (type, integer_zero_node, op0);
6002 }
6003
6004 /* ... fall through ... */
6005
6006 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6007 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6008 /* If we can extract our operation from the LHS, do so and return a
6009 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6010 do something only if the second operand is a constant. */
6011 if (same_p
6012 && (t1 = extract_muldiv (op0, c, code, wide_type,
6013 strict_overflow_p)) != 0)
6014 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6015 fold_convert (ctype, op1));
6016 else if (tcode == MULT_EXPR && code == MULT_EXPR
6017 && (t1 = extract_muldiv (op1, c, code, wide_type,
6018 strict_overflow_p)) != 0)
6019 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6020 fold_convert (ctype, t1));
6021 else if (TREE_CODE (op1) != INTEGER_CST)
6022 return 0;
6023
6024 /* If these are the same operation types, we can associate them
6025 assuming no overflow. */
6026 if (tcode == code)
6027 {
6028 double_int mul;
6029 bool overflow_p;
6030 unsigned prec = TYPE_PRECISION (ctype);
6031 bool uns = TYPE_UNSIGNED (ctype);
6032 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6033 double_int dic = tree_to_double_int (c).ext (prec, uns);
6034 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6035 overflow_p = ((!uns && overflow_p)
6036 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6037 if (!double_int_fits_to_tree_p (ctype, mul)
6038 && ((uns && tcode != MULT_EXPR) || !uns))
6039 overflow_p = 1;
6040 if (!overflow_p)
6041 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6042 double_int_to_tree (ctype, mul));
6043 }
6044
6045 /* If these operations "cancel" each other, we have the main
6046 optimizations of this pass, which occur when either constant is a
6047 multiple of the other, in which case we replace this with either an
6048 operation or CODE or TCODE.
6049
6050 If we have an unsigned type, we cannot do this since it will change
6051 the result if the original computation overflowed. */
6052 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6053 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6054 || (tcode == MULT_EXPR
6055 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6056 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6057 && code != MULT_EXPR)))
6058 {
6059 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6060 {
6061 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6062 *strict_overflow_p = true;
6063 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6064 fold_convert (ctype,
6065 const_binop (TRUNC_DIV_EXPR,
6066 op1, c)));
6067 }
6068 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6069 {
6070 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6071 *strict_overflow_p = true;
6072 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6073 fold_convert (ctype,
6074 const_binop (TRUNC_DIV_EXPR,
6075 c, op1)));
6076 }
6077 }
6078 break;
6079
6080 default:
6081 break;
6082 }
6083
6084 return 0;
6085 }
6086 \f
6087 /* Return a node which has the indicated constant VALUE (either 0 or
6088 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6089 and is of the indicated TYPE. */
6090
6091 tree
6092 constant_boolean_node (bool value, tree type)
6093 {
6094 if (type == integer_type_node)
6095 return value ? integer_one_node : integer_zero_node;
6096 else if (type == boolean_type_node)
6097 return value ? boolean_true_node : boolean_false_node;
6098 else if (TREE_CODE (type) == VECTOR_TYPE)
6099 return build_vector_from_val (type,
6100 build_int_cst (TREE_TYPE (type),
6101 value ? -1 : 0));
6102 else
6103 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6104 }
6105
6106
6107 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6108 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6109 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6110 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6111 COND is the first argument to CODE; otherwise (as in the example
6112 given here), it is the second argument. TYPE is the type of the
6113 original expression. Return NULL_TREE if no simplification is
6114 possible. */
6115
6116 static tree
6117 fold_binary_op_with_conditional_arg (location_t loc,
6118 enum tree_code code,
6119 tree type, tree op0, tree op1,
6120 tree cond, tree arg, int cond_first_p)
6121 {
6122 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6123 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6124 tree test, true_value, false_value;
6125 tree lhs = NULL_TREE;
6126 tree rhs = NULL_TREE;
6127 enum tree_code cond_code = COND_EXPR;
6128
6129 if (TREE_CODE (cond) == COND_EXPR
6130 || TREE_CODE (cond) == VEC_COND_EXPR)
6131 {
6132 test = TREE_OPERAND (cond, 0);
6133 true_value = TREE_OPERAND (cond, 1);
6134 false_value = TREE_OPERAND (cond, 2);
6135 /* If this operand throws an expression, then it does not make
6136 sense to try to perform a logical or arithmetic operation
6137 involving it. */
6138 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6139 lhs = true_value;
6140 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6141 rhs = false_value;
6142 }
6143 else
6144 {
6145 tree testtype = TREE_TYPE (cond);
6146 test = cond;
6147 true_value = constant_boolean_node (true, testtype);
6148 false_value = constant_boolean_node (false, testtype);
6149 }
6150
6151 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6152 cond_code = VEC_COND_EXPR;
6153
6154 /* This transformation is only worthwhile if we don't have to wrap ARG
6155 in a SAVE_EXPR and the operation can be simplified without recursing
6156 on at least one of the branches once its pushed inside the COND_EXPR. */
6157 if (!TREE_CONSTANT (arg)
6158 && (TREE_SIDE_EFFECTS (arg)
6159 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6160 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6161 return NULL_TREE;
6162
6163 arg = fold_convert_loc (loc, arg_type, arg);
6164 if (lhs == 0)
6165 {
6166 true_value = fold_convert_loc (loc, cond_type, true_value);
6167 if (cond_first_p)
6168 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6169 else
6170 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6171 }
6172 if (rhs == 0)
6173 {
6174 false_value = fold_convert_loc (loc, cond_type, false_value);
6175 if (cond_first_p)
6176 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6177 else
6178 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6179 }
6180
6181 /* Check that we have simplified at least one of the branches. */
6182 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6183 return NULL_TREE;
6184
6185 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6186 }
6187
6188 \f
6189 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6190
6191 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6192 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6193 ADDEND is the same as X.
6194
6195 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6196 and finite. The problematic cases are when X is zero, and its mode
6197 has signed zeros. In the case of rounding towards -infinity,
6198 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6199 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6200
6201 bool
6202 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6203 {
6204 if (!real_zerop (addend))
6205 return false;
6206
6207 /* Don't allow the fold with -fsignaling-nans. */
6208 if (HONOR_SNANS (TYPE_MODE (type)))
6209 return false;
6210
6211 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6212 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6213 return true;
6214
6215 /* In a vector or complex, we would need to check the sign of all zeros. */
6216 if (TREE_CODE (addend) != REAL_CST)
6217 return false;
6218
6219 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6220 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6221 negate = !negate;
6222
6223 /* The mode has signed zeros, and we have to honor their sign.
6224 In this situation, there is only one case we can return true for.
6225 X - 0 is the same as X unless rounding towards -infinity is
6226 supported. */
6227 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6228 }
6229
6230 /* Subroutine of fold() that checks comparisons of built-in math
6231 functions against real constants.
6232
6233 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6234 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6235 is the type of the result and ARG0 and ARG1 are the operands of the
6236 comparison. ARG1 must be a TREE_REAL_CST.
6237
6238 The function returns the constant folded tree if a simplification
6239 can be made, and NULL_TREE otherwise. */
6240
6241 static tree
6242 fold_mathfn_compare (location_t loc,
6243 enum built_in_function fcode, enum tree_code code,
6244 tree type, tree arg0, tree arg1)
6245 {
6246 REAL_VALUE_TYPE c;
6247
6248 if (BUILTIN_SQRT_P (fcode))
6249 {
6250 tree arg = CALL_EXPR_ARG (arg0, 0);
6251 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6252
6253 c = TREE_REAL_CST (arg1);
6254 if (REAL_VALUE_NEGATIVE (c))
6255 {
6256 /* sqrt(x) < y is always false, if y is negative. */
6257 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6258 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6259
6260 /* sqrt(x) > y is always true, if y is negative and we
6261 don't care about NaNs, i.e. negative values of x. */
6262 if (code == NE_EXPR || !HONOR_NANS (mode))
6263 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6264
6265 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6266 return fold_build2_loc (loc, GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg), dconst0));
6268 }
6269 else if (code == GT_EXPR || code == GE_EXPR)
6270 {
6271 REAL_VALUE_TYPE c2;
6272
6273 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6274 real_convert (&c2, mode, &c2);
6275
6276 if (REAL_VALUE_ISINF (c2))
6277 {
6278 /* sqrt(x) > y is x == +Inf, when y is very large. */
6279 if (HONOR_INFINITIES (mode))
6280 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6281 build_real (TREE_TYPE (arg), c2));
6282
6283 /* sqrt(x) > y is always false, when y is very large
6284 and we don't care about infinities. */
6285 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6286 }
6287
6288 /* sqrt(x) > c is the same as x > c*c. */
6289 return fold_build2_loc (loc, code, type, arg,
6290 build_real (TREE_TYPE (arg), c2));
6291 }
6292 else if (code == LT_EXPR || code == LE_EXPR)
6293 {
6294 REAL_VALUE_TYPE c2;
6295
6296 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6297 real_convert (&c2, mode, &c2);
6298
6299 if (REAL_VALUE_ISINF (c2))
6300 {
6301 /* sqrt(x) < y is always true, when y is a very large
6302 value and we don't care about NaNs or Infinities. */
6303 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6304 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6305
6306 /* sqrt(x) < y is x != +Inf when y is very large and we
6307 don't care about NaNs. */
6308 if (! HONOR_NANS (mode))
6309 return fold_build2_loc (loc, NE_EXPR, type, arg,
6310 build_real (TREE_TYPE (arg), c2));
6311
6312 /* sqrt(x) < y is x >= 0 when y is very large and we
6313 don't care about Infinities. */
6314 if (! HONOR_INFINITIES (mode))
6315 return fold_build2_loc (loc, GE_EXPR, type, arg,
6316 build_real (TREE_TYPE (arg), dconst0));
6317
6318 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6319 arg = save_expr (arg);
6320 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6321 fold_build2_loc (loc, GE_EXPR, type, arg,
6322 build_real (TREE_TYPE (arg),
6323 dconst0)),
6324 fold_build2_loc (loc, NE_EXPR, type, arg,
6325 build_real (TREE_TYPE (arg),
6326 c2)));
6327 }
6328
6329 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6330 if (! HONOR_NANS (mode))
6331 return fold_build2_loc (loc, code, type, arg,
6332 build_real (TREE_TYPE (arg), c2));
6333
6334 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6335 arg = save_expr (arg);
6336 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6337 fold_build2_loc (loc, GE_EXPR, type, arg,
6338 build_real (TREE_TYPE (arg),
6339 dconst0)),
6340 fold_build2_loc (loc, code, type, arg,
6341 build_real (TREE_TYPE (arg),
6342 c2)));
6343 }
6344 }
6345
6346 return NULL_TREE;
6347 }
6348
6349 /* Subroutine of fold() that optimizes comparisons against Infinities,
6350 either +Inf or -Inf.
6351
6352 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6353 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6354 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6355
6356 The function returns the constant folded tree if a simplification
6357 can be made, and NULL_TREE otherwise. */
6358
6359 static tree
6360 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6361 tree arg0, tree arg1)
6362 {
6363 enum machine_mode mode;
6364 REAL_VALUE_TYPE max;
6365 tree temp;
6366 bool neg;
6367
6368 mode = TYPE_MODE (TREE_TYPE (arg0));
6369
6370 /* For negative infinity swap the sense of the comparison. */
6371 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6372 if (neg)
6373 code = swap_tree_comparison (code);
6374
6375 switch (code)
6376 {
6377 case GT_EXPR:
6378 /* x > +Inf is always false, if with ignore sNANs. */
6379 if (HONOR_SNANS (mode))
6380 return NULL_TREE;
6381 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6382
6383 case LE_EXPR:
6384 /* x <= +Inf is always true, if we don't case about NaNs. */
6385 if (! HONOR_NANS (mode))
6386 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6387
6388 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6389 arg0 = save_expr (arg0);
6390 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6391
6392 case EQ_EXPR:
6393 case GE_EXPR:
6394 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6395 real_maxval (&max, neg, mode);
6396 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6397 arg0, build_real (TREE_TYPE (arg0), max));
6398
6399 case LT_EXPR:
6400 /* x < +Inf is always equal to x <= DBL_MAX. */
6401 real_maxval (&max, neg, mode);
6402 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6403 arg0, build_real (TREE_TYPE (arg0), max));
6404
6405 case NE_EXPR:
6406 /* x != +Inf is always equal to !(x > DBL_MAX). */
6407 real_maxval (&max, neg, mode);
6408 if (! HONOR_NANS (mode))
6409 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6410 arg0, build_real (TREE_TYPE (arg0), max));
6411
6412 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6413 arg0, build_real (TREE_TYPE (arg0), max));
6414 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6415
6416 default:
6417 break;
6418 }
6419
6420 return NULL_TREE;
6421 }
6422
6423 /* Subroutine of fold() that optimizes comparisons of a division by
6424 a nonzero integer constant against an integer constant, i.e.
6425 X/C1 op C2.
6426
6427 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6428 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6429 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6430
6431 The function returns the constant folded tree if a simplification
6432 can be made, and NULL_TREE otherwise. */
6433
6434 static tree
6435 fold_div_compare (location_t loc,
6436 enum tree_code code, tree type, tree arg0, tree arg1)
6437 {
6438 tree prod, tmp, hi, lo;
6439 tree arg00 = TREE_OPERAND (arg0, 0);
6440 tree arg01 = TREE_OPERAND (arg0, 1);
6441 double_int val;
6442 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6443 bool neg_overflow;
6444 bool overflow;
6445
6446 /* We have to do this the hard way to detect unsigned overflow.
6447 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6448 val = TREE_INT_CST (arg01)
6449 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6450 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6451 neg_overflow = false;
6452
6453 if (unsigned_p)
6454 {
6455 tmp = int_const_binop (MINUS_EXPR, arg01,
6456 build_int_cst (TREE_TYPE (arg01), 1));
6457 lo = prod;
6458
6459 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6460 val = TREE_INT_CST (prod)
6461 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6462 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6463 -1, overflow | TREE_OVERFLOW (prod));
6464 }
6465 else if (tree_int_cst_sgn (arg01) >= 0)
6466 {
6467 tmp = int_const_binop (MINUS_EXPR, arg01,
6468 build_int_cst (TREE_TYPE (arg01), 1));
6469 switch (tree_int_cst_sgn (arg1))
6470 {
6471 case -1:
6472 neg_overflow = true;
6473 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6474 hi = prod;
6475 break;
6476
6477 case 0:
6478 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6479 hi = tmp;
6480 break;
6481
6482 case 1:
6483 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6484 lo = prod;
6485 break;
6486
6487 default:
6488 gcc_unreachable ();
6489 }
6490 }
6491 else
6492 {
6493 /* A negative divisor reverses the relational operators. */
6494 code = swap_tree_comparison (code);
6495
6496 tmp = int_const_binop (PLUS_EXPR, arg01,
6497 build_int_cst (TREE_TYPE (arg01), 1));
6498 switch (tree_int_cst_sgn (arg1))
6499 {
6500 case -1:
6501 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6502 lo = prod;
6503 break;
6504
6505 case 0:
6506 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6507 lo = tmp;
6508 break;
6509
6510 case 1:
6511 neg_overflow = true;
6512 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6513 hi = prod;
6514 break;
6515
6516 default:
6517 gcc_unreachable ();
6518 }
6519 }
6520
6521 switch (code)
6522 {
6523 case EQ_EXPR:
6524 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6525 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6526 if (TREE_OVERFLOW (hi))
6527 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6528 if (TREE_OVERFLOW (lo))
6529 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6530 return build_range_check (loc, type, arg00, 1, lo, hi);
6531
6532 case NE_EXPR:
6533 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6534 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6535 if (TREE_OVERFLOW (hi))
6536 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6537 if (TREE_OVERFLOW (lo))
6538 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6539 return build_range_check (loc, type, arg00, 0, lo, hi);
6540
6541 case LT_EXPR:
6542 if (TREE_OVERFLOW (lo))
6543 {
6544 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6545 return omit_one_operand_loc (loc, type, tmp, arg00);
6546 }
6547 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6548
6549 case LE_EXPR:
6550 if (TREE_OVERFLOW (hi))
6551 {
6552 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6553 return omit_one_operand_loc (loc, type, tmp, arg00);
6554 }
6555 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6556
6557 case GT_EXPR:
6558 if (TREE_OVERFLOW (hi))
6559 {
6560 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6561 return omit_one_operand_loc (loc, type, tmp, arg00);
6562 }
6563 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6564
6565 case GE_EXPR:
6566 if (TREE_OVERFLOW (lo))
6567 {
6568 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6569 return omit_one_operand_loc (loc, type, tmp, arg00);
6570 }
6571 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6572
6573 default:
6574 break;
6575 }
6576
6577 return NULL_TREE;
6578 }
6579
6580
6581 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6582 equality/inequality test, then return a simplified form of the test
6583 using a sign testing. Otherwise return NULL. TYPE is the desired
6584 result type. */
6585
6586 static tree
6587 fold_single_bit_test_into_sign_test (location_t loc,
6588 enum tree_code code, tree arg0, tree arg1,
6589 tree result_type)
6590 {
6591 /* If this is testing a single bit, we can optimize the test. */
6592 if ((code == NE_EXPR || code == EQ_EXPR)
6593 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6594 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6595 {
6596 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6597 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6598 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6599
6600 if (arg00 != NULL_TREE
6601 /* This is only a win if casting to a signed type is cheap,
6602 i.e. when arg00's type is not a partial mode. */
6603 && TYPE_PRECISION (TREE_TYPE (arg00))
6604 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6605 {
6606 tree stype = signed_type_for (TREE_TYPE (arg00));
6607 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6608 result_type,
6609 fold_convert_loc (loc, stype, arg00),
6610 build_int_cst (stype, 0));
6611 }
6612 }
6613
6614 return NULL_TREE;
6615 }
6616
6617 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6618 equality/inequality test, then return a simplified form of
6619 the test using shifts and logical operations. Otherwise return
6620 NULL. TYPE is the desired result type. */
6621
6622 tree
6623 fold_single_bit_test (location_t loc, enum tree_code code,
6624 tree arg0, tree arg1, tree result_type)
6625 {
6626 /* If this is testing a single bit, we can optimize the test. */
6627 if ((code == NE_EXPR || code == EQ_EXPR)
6628 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6629 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6630 {
6631 tree inner = TREE_OPERAND (arg0, 0);
6632 tree type = TREE_TYPE (arg0);
6633 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6634 enum machine_mode operand_mode = TYPE_MODE (type);
6635 int ops_unsigned;
6636 tree signed_type, unsigned_type, intermediate_type;
6637 tree tem, one;
6638
6639 /* First, see if we can fold the single bit test into a sign-bit
6640 test. */
6641 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6642 result_type);
6643 if (tem)
6644 return tem;
6645
6646 /* Otherwise we have (A & C) != 0 where C is a single bit,
6647 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6648 Similarly for (A & C) == 0. */
6649
6650 /* If INNER is a right shift of a constant and it plus BITNUM does
6651 not overflow, adjust BITNUM and INNER. */
6652 if (TREE_CODE (inner) == RSHIFT_EXPR
6653 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6654 && host_integerp (TREE_OPERAND (inner, 1), 1)
6655 && bitnum < TYPE_PRECISION (type)
6656 && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
6657 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6658 {
6659 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6660 inner = TREE_OPERAND (inner, 0);
6661 }
6662
6663 /* If we are going to be able to omit the AND below, we must do our
6664 operations as unsigned. If we must use the AND, we have a choice.
6665 Normally unsigned is faster, but for some machines signed is. */
6666 #ifdef LOAD_EXTEND_OP
6667 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6668 && !flag_syntax_only) ? 0 : 1;
6669 #else
6670 ops_unsigned = 1;
6671 #endif
6672
6673 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6674 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6675 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6676 inner = fold_convert_loc (loc, intermediate_type, inner);
6677
6678 if (bitnum != 0)
6679 inner = build2 (RSHIFT_EXPR, intermediate_type,
6680 inner, size_int (bitnum));
6681
6682 one = build_int_cst (intermediate_type, 1);
6683
6684 if (code == EQ_EXPR)
6685 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6686
6687 /* Put the AND last so it can combine with more things. */
6688 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6689
6690 /* Make sure to return the proper type. */
6691 inner = fold_convert_loc (loc, result_type, inner);
6692
6693 return inner;
6694 }
6695 return NULL_TREE;
6696 }
6697
6698 /* Check whether we are allowed to reorder operands arg0 and arg1,
6699 such that the evaluation of arg1 occurs before arg0. */
6700
6701 static bool
6702 reorder_operands_p (const_tree arg0, const_tree arg1)
6703 {
6704 if (! flag_evaluation_order)
6705 return true;
6706 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6707 return true;
6708 return ! TREE_SIDE_EFFECTS (arg0)
6709 && ! TREE_SIDE_EFFECTS (arg1);
6710 }
6711
6712 /* Test whether it is preferable two swap two operands, ARG0 and
6713 ARG1, for example because ARG0 is an integer constant and ARG1
6714 isn't. If REORDER is true, only recommend swapping if we can
6715 evaluate the operands in reverse order. */
6716
6717 bool
6718 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6719 {
6720 STRIP_SIGN_NOPS (arg0);
6721 STRIP_SIGN_NOPS (arg1);
6722
6723 if (TREE_CODE (arg1) == INTEGER_CST)
6724 return 0;
6725 if (TREE_CODE (arg0) == INTEGER_CST)
6726 return 1;
6727
6728 if (TREE_CODE (arg1) == REAL_CST)
6729 return 0;
6730 if (TREE_CODE (arg0) == REAL_CST)
6731 return 1;
6732
6733 if (TREE_CODE (arg1) == FIXED_CST)
6734 return 0;
6735 if (TREE_CODE (arg0) == FIXED_CST)
6736 return 1;
6737
6738 if (TREE_CODE (arg1) == COMPLEX_CST)
6739 return 0;
6740 if (TREE_CODE (arg0) == COMPLEX_CST)
6741 return 1;
6742
6743 if (TREE_CONSTANT (arg1))
6744 return 0;
6745 if (TREE_CONSTANT (arg0))
6746 return 1;
6747
6748 if (optimize_function_for_size_p (cfun))
6749 return 0;
6750
6751 if (reorder && flag_evaluation_order
6752 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6753 return 0;
6754
6755 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6756 for commutative and comparison operators. Ensuring a canonical
6757 form allows the optimizers to find additional redundancies without
6758 having to explicitly check for both orderings. */
6759 if (TREE_CODE (arg0) == SSA_NAME
6760 && TREE_CODE (arg1) == SSA_NAME
6761 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6762 return 1;
6763
6764 /* Put SSA_NAMEs last. */
6765 if (TREE_CODE (arg1) == SSA_NAME)
6766 return 0;
6767 if (TREE_CODE (arg0) == SSA_NAME)
6768 return 1;
6769
6770 /* Put variables last. */
6771 if (DECL_P (arg1))
6772 return 0;
6773 if (DECL_P (arg0))
6774 return 1;
6775
6776 return 0;
6777 }
6778
6779 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6780 ARG0 is extended to a wider type. */
6781
6782 static tree
6783 fold_widened_comparison (location_t loc, enum tree_code code,
6784 tree type, tree arg0, tree arg1)
6785 {
6786 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6787 tree arg1_unw;
6788 tree shorter_type, outer_type;
6789 tree min, max;
6790 bool above, below;
6791
6792 if (arg0_unw == arg0)
6793 return NULL_TREE;
6794 shorter_type = TREE_TYPE (arg0_unw);
6795
6796 #ifdef HAVE_canonicalize_funcptr_for_compare
6797 /* Disable this optimization if we're casting a function pointer
6798 type on targets that require function pointer canonicalization. */
6799 if (HAVE_canonicalize_funcptr_for_compare
6800 && TREE_CODE (shorter_type) == POINTER_TYPE
6801 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6802 return NULL_TREE;
6803 #endif
6804
6805 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6806 return NULL_TREE;
6807
6808 arg1_unw = get_unwidened (arg1, NULL_TREE);
6809
6810 /* If possible, express the comparison in the shorter mode. */
6811 if ((code == EQ_EXPR || code == NE_EXPR
6812 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6813 && (TREE_TYPE (arg1_unw) == shorter_type
6814 || ((TYPE_PRECISION (shorter_type)
6815 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6816 && (TYPE_UNSIGNED (shorter_type)
6817 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6818 || (TREE_CODE (arg1_unw) == INTEGER_CST
6819 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6820 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6821 && int_fits_type_p (arg1_unw, shorter_type))))
6822 return fold_build2_loc (loc, code, type, arg0_unw,
6823 fold_convert_loc (loc, shorter_type, arg1_unw));
6824
6825 if (TREE_CODE (arg1_unw) != INTEGER_CST
6826 || TREE_CODE (shorter_type) != INTEGER_TYPE
6827 || !int_fits_type_p (arg1_unw, shorter_type))
6828 return NULL_TREE;
6829
6830 /* If we are comparing with the integer that does not fit into the range
6831 of the shorter type, the result is known. */
6832 outer_type = TREE_TYPE (arg1_unw);
6833 min = lower_bound_in_type (outer_type, shorter_type);
6834 max = upper_bound_in_type (outer_type, shorter_type);
6835
6836 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6837 max, arg1_unw));
6838 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6839 arg1_unw, min));
6840
6841 switch (code)
6842 {
6843 case EQ_EXPR:
6844 if (above || below)
6845 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6846 break;
6847
6848 case NE_EXPR:
6849 if (above || below)
6850 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6851 break;
6852
6853 case LT_EXPR:
6854 case LE_EXPR:
6855 if (above)
6856 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6857 else if (below)
6858 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6859
6860 case GT_EXPR:
6861 case GE_EXPR:
6862 if (above)
6863 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6864 else if (below)
6865 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6866
6867 default:
6868 break;
6869 }
6870
6871 return NULL_TREE;
6872 }
6873
6874 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6875 ARG0 just the signedness is changed. */
6876
6877 static tree
6878 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6879 tree arg0, tree arg1)
6880 {
6881 tree arg0_inner;
6882 tree inner_type, outer_type;
6883
6884 if (!CONVERT_EXPR_P (arg0))
6885 return NULL_TREE;
6886
6887 outer_type = TREE_TYPE (arg0);
6888 arg0_inner = TREE_OPERAND (arg0, 0);
6889 inner_type = TREE_TYPE (arg0_inner);
6890
6891 #ifdef HAVE_canonicalize_funcptr_for_compare
6892 /* Disable this optimization if we're casting a function pointer
6893 type on targets that require function pointer canonicalization. */
6894 if (HAVE_canonicalize_funcptr_for_compare
6895 && TREE_CODE (inner_type) == POINTER_TYPE
6896 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6897 return NULL_TREE;
6898 #endif
6899
6900 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6901 return NULL_TREE;
6902
6903 if (TREE_CODE (arg1) != INTEGER_CST
6904 && !(CONVERT_EXPR_P (arg1)
6905 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6906 return NULL_TREE;
6907
6908 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6909 && code != NE_EXPR
6910 && code != EQ_EXPR)
6911 return NULL_TREE;
6912
6913 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6914 return NULL_TREE;
6915
6916 if (TREE_CODE (arg1) == INTEGER_CST)
6917 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6918 0, TREE_OVERFLOW (arg1));
6919 else
6920 arg1 = fold_convert_loc (loc, inner_type, arg1);
6921
6922 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6923 }
6924
6925 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6926 step of the array. Reconstructs s and delta in the case of s *
6927 delta being an integer constant (and thus already folded). ADDR is
6928 the address. MULT is the multiplicative expression. If the
6929 function succeeds, the new address expression is returned.
6930 Otherwise NULL_TREE is returned. LOC is the location of the
6931 resulting expression. */
6932
6933 static tree
6934 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6935 {
6936 tree s, delta, step;
6937 tree ref = TREE_OPERAND (addr, 0), pref;
6938 tree ret, pos;
6939 tree itype;
6940 bool mdim = false;
6941
6942 /* Strip the nops that might be added when converting op1 to sizetype. */
6943 STRIP_NOPS (op1);
6944
6945 /* Canonicalize op1 into a possibly non-constant delta
6946 and an INTEGER_CST s. */
6947 if (TREE_CODE (op1) == MULT_EXPR)
6948 {
6949 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6950
6951 STRIP_NOPS (arg0);
6952 STRIP_NOPS (arg1);
6953
6954 if (TREE_CODE (arg0) == INTEGER_CST)
6955 {
6956 s = arg0;
6957 delta = arg1;
6958 }
6959 else if (TREE_CODE (arg1) == INTEGER_CST)
6960 {
6961 s = arg1;
6962 delta = arg0;
6963 }
6964 else
6965 return NULL_TREE;
6966 }
6967 else if (TREE_CODE (op1) == INTEGER_CST)
6968 {
6969 delta = op1;
6970 s = NULL_TREE;
6971 }
6972 else
6973 {
6974 /* Simulate we are delta * 1. */
6975 delta = op1;
6976 s = integer_one_node;
6977 }
6978
6979 /* Handle &x.array the same as we would handle &x.array[0]. */
6980 if (TREE_CODE (ref) == COMPONENT_REF
6981 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6982 {
6983 tree domain;
6984
6985 /* Remember if this was a multi-dimensional array. */
6986 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6987 mdim = true;
6988
6989 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6990 if (! domain)
6991 goto cont;
6992 itype = TREE_TYPE (domain);
6993
6994 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6995 if (TREE_CODE (step) != INTEGER_CST)
6996 goto cont;
6997
6998 if (s)
6999 {
7000 if (! tree_int_cst_equal (step, s))
7001 goto cont;
7002 }
7003 else
7004 {
7005 /* Try if delta is a multiple of step. */
7006 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7007 if (! tmp)
7008 goto cont;
7009 delta = tmp;
7010 }
7011
7012 /* Only fold here if we can verify we do not overflow one
7013 dimension of a multi-dimensional array. */
7014 if (mdim)
7015 {
7016 tree tmp;
7017
7018 if (!TYPE_MIN_VALUE (domain)
7019 || !TYPE_MAX_VALUE (domain)
7020 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7021 goto cont;
7022
7023 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7024 fold_convert_loc (loc, itype,
7025 TYPE_MIN_VALUE (domain)),
7026 fold_convert_loc (loc, itype, delta));
7027 if (TREE_CODE (tmp) != INTEGER_CST
7028 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7029 goto cont;
7030 }
7031
7032 /* We found a suitable component reference. */
7033
7034 pref = TREE_OPERAND (addr, 0);
7035 ret = copy_node (pref);
7036 SET_EXPR_LOCATION (ret, loc);
7037
7038 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7039 fold_build2_loc
7040 (loc, PLUS_EXPR, itype,
7041 fold_convert_loc (loc, itype,
7042 TYPE_MIN_VALUE
7043 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7044 fold_convert_loc (loc, itype, delta)),
7045 NULL_TREE, NULL_TREE);
7046 return build_fold_addr_expr_loc (loc, ret);
7047 }
7048
7049 cont:
7050
7051 for (;; ref = TREE_OPERAND (ref, 0))
7052 {
7053 if (TREE_CODE (ref) == ARRAY_REF)
7054 {
7055 tree domain;
7056
7057 /* Remember if this was a multi-dimensional array. */
7058 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7059 mdim = true;
7060
7061 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7062 if (! domain)
7063 continue;
7064 itype = TREE_TYPE (domain);
7065
7066 step = array_ref_element_size (ref);
7067 if (TREE_CODE (step) != INTEGER_CST)
7068 continue;
7069
7070 if (s)
7071 {
7072 if (! tree_int_cst_equal (step, s))
7073 continue;
7074 }
7075 else
7076 {
7077 /* Try if delta is a multiple of step. */
7078 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7079 if (! tmp)
7080 continue;
7081 delta = tmp;
7082 }
7083
7084 /* Only fold here if we can verify we do not overflow one
7085 dimension of a multi-dimensional array. */
7086 if (mdim)
7087 {
7088 tree tmp;
7089
7090 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7091 || !TYPE_MAX_VALUE (domain)
7092 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7093 continue;
7094
7095 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7096 fold_convert_loc (loc, itype,
7097 TREE_OPERAND (ref, 1)),
7098 fold_convert_loc (loc, itype, delta));
7099 if (!tmp
7100 || TREE_CODE (tmp) != INTEGER_CST
7101 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7102 continue;
7103 }
7104
7105 break;
7106 }
7107 else
7108 mdim = false;
7109
7110 if (!handled_component_p (ref))
7111 return NULL_TREE;
7112 }
7113
7114 /* We found the suitable array reference. So copy everything up to it,
7115 and replace the index. */
7116
7117 pref = TREE_OPERAND (addr, 0);
7118 ret = copy_node (pref);
7119 SET_EXPR_LOCATION (ret, loc);
7120 pos = ret;
7121
7122 while (pref != ref)
7123 {
7124 pref = TREE_OPERAND (pref, 0);
7125 TREE_OPERAND (pos, 0) = copy_node (pref);
7126 pos = TREE_OPERAND (pos, 0);
7127 }
7128
7129 TREE_OPERAND (pos, 1)
7130 = fold_build2_loc (loc, PLUS_EXPR, itype,
7131 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7132 fold_convert_loc (loc, itype, delta));
7133 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7134 }
7135
7136
7137 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7138 means A >= Y && A != MAX, but in this case we know that
7139 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7140
7141 static tree
7142 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7143 {
7144 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7145
7146 if (TREE_CODE (bound) == LT_EXPR)
7147 a = TREE_OPERAND (bound, 0);
7148 else if (TREE_CODE (bound) == GT_EXPR)
7149 a = TREE_OPERAND (bound, 1);
7150 else
7151 return NULL_TREE;
7152
7153 typea = TREE_TYPE (a);
7154 if (!INTEGRAL_TYPE_P (typea)
7155 && !POINTER_TYPE_P (typea))
7156 return NULL_TREE;
7157
7158 if (TREE_CODE (ineq) == LT_EXPR)
7159 {
7160 a1 = TREE_OPERAND (ineq, 1);
7161 y = TREE_OPERAND (ineq, 0);
7162 }
7163 else if (TREE_CODE (ineq) == GT_EXPR)
7164 {
7165 a1 = TREE_OPERAND (ineq, 0);
7166 y = TREE_OPERAND (ineq, 1);
7167 }
7168 else
7169 return NULL_TREE;
7170
7171 if (TREE_TYPE (a1) != typea)
7172 return NULL_TREE;
7173
7174 if (POINTER_TYPE_P (typea))
7175 {
7176 /* Convert the pointer types into integer before taking the difference. */
7177 tree ta = fold_convert_loc (loc, ssizetype, a);
7178 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7179 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7180 }
7181 else
7182 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7183
7184 if (!diff || !integer_onep (diff))
7185 return NULL_TREE;
7186
7187 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7188 }
7189
7190 /* Fold a sum or difference of at least one multiplication.
7191 Returns the folded tree or NULL if no simplification could be made. */
7192
7193 static tree
7194 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7195 tree arg0, tree arg1)
7196 {
7197 tree arg00, arg01, arg10, arg11;
7198 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7199
7200 /* (A * C) +- (B * C) -> (A+-B) * C.
7201 (A * C) +- A -> A * (C+-1).
7202 We are most concerned about the case where C is a constant,
7203 but other combinations show up during loop reduction. Since
7204 it is not difficult, try all four possibilities. */
7205
7206 if (TREE_CODE (arg0) == MULT_EXPR)
7207 {
7208 arg00 = TREE_OPERAND (arg0, 0);
7209 arg01 = TREE_OPERAND (arg0, 1);
7210 }
7211 else if (TREE_CODE (arg0) == INTEGER_CST)
7212 {
7213 arg00 = build_one_cst (type);
7214 arg01 = arg0;
7215 }
7216 else
7217 {
7218 /* We cannot generate constant 1 for fract. */
7219 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7220 return NULL_TREE;
7221 arg00 = arg0;
7222 arg01 = build_one_cst (type);
7223 }
7224 if (TREE_CODE (arg1) == MULT_EXPR)
7225 {
7226 arg10 = TREE_OPERAND (arg1, 0);
7227 arg11 = TREE_OPERAND (arg1, 1);
7228 }
7229 else if (TREE_CODE (arg1) == INTEGER_CST)
7230 {
7231 arg10 = build_one_cst (type);
7232 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7233 the purpose of this canonicalization. */
7234 if (TREE_INT_CST_HIGH (arg1) == -1
7235 && negate_expr_p (arg1)
7236 && code == PLUS_EXPR)
7237 {
7238 arg11 = negate_expr (arg1);
7239 code = MINUS_EXPR;
7240 }
7241 else
7242 arg11 = arg1;
7243 }
7244 else
7245 {
7246 /* We cannot generate constant 1 for fract. */
7247 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7248 return NULL_TREE;
7249 arg10 = arg1;
7250 arg11 = build_one_cst (type);
7251 }
7252 same = NULL_TREE;
7253
7254 if (operand_equal_p (arg01, arg11, 0))
7255 same = arg01, alt0 = arg00, alt1 = arg10;
7256 else if (operand_equal_p (arg00, arg10, 0))
7257 same = arg00, alt0 = arg01, alt1 = arg11;
7258 else if (operand_equal_p (arg00, arg11, 0))
7259 same = arg00, alt0 = arg01, alt1 = arg10;
7260 else if (operand_equal_p (arg01, arg10, 0))
7261 same = arg01, alt0 = arg00, alt1 = arg11;
7262
7263 /* No identical multiplicands; see if we can find a common
7264 power-of-two factor in non-power-of-two multiplies. This
7265 can help in multi-dimensional array access. */
7266 else if (host_integerp (arg01, 0)
7267 && host_integerp (arg11, 0))
7268 {
7269 HOST_WIDE_INT int01, int11, tmp;
7270 bool swap = false;
7271 tree maybe_same;
7272 int01 = TREE_INT_CST_LOW (arg01);
7273 int11 = TREE_INT_CST_LOW (arg11);
7274
7275 /* Move min of absolute values to int11. */
7276 if (absu_hwi (int01) < absu_hwi (int11))
7277 {
7278 tmp = int01, int01 = int11, int11 = tmp;
7279 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7280 maybe_same = arg01;
7281 swap = true;
7282 }
7283 else
7284 maybe_same = arg11;
7285
7286 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7287 /* The remainder should not be a constant, otherwise we
7288 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7289 increased the number of multiplications necessary. */
7290 && TREE_CODE (arg10) != INTEGER_CST)
7291 {
7292 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7293 build_int_cst (TREE_TYPE (arg00),
7294 int01 / int11));
7295 alt1 = arg10;
7296 same = maybe_same;
7297 if (swap)
7298 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7299 }
7300 }
7301
7302 if (same)
7303 return fold_build2_loc (loc, MULT_EXPR, type,
7304 fold_build2_loc (loc, code, type,
7305 fold_convert_loc (loc, type, alt0),
7306 fold_convert_loc (loc, type, alt1)),
7307 fold_convert_loc (loc, type, same));
7308
7309 return NULL_TREE;
7310 }
7311
7312 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7313 specified by EXPR into the buffer PTR of length LEN bytes.
7314 Return the number of bytes placed in the buffer, or zero
7315 upon failure. */
7316
7317 static int
7318 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7319 {
7320 tree type = TREE_TYPE (expr);
7321 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7322 int byte, offset, word, words;
7323 unsigned char value;
7324
7325 if (total_bytes > len)
7326 return 0;
7327 words = total_bytes / UNITS_PER_WORD;
7328
7329 for (byte = 0; byte < total_bytes; byte++)
7330 {
7331 int bitpos = byte * BITS_PER_UNIT;
7332 if (bitpos < HOST_BITS_PER_WIDE_INT)
7333 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7334 else
7335 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7336 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7337
7338 if (total_bytes > UNITS_PER_WORD)
7339 {
7340 word = byte / UNITS_PER_WORD;
7341 if (WORDS_BIG_ENDIAN)
7342 word = (words - 1) - word;
7343 offset = word * UNITS_PER_WORD;
7344 if (BYTES_BIG_ENDIAN)
7345 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7346 else
7347 offset += byte % UNITS_PER_WORD;
7348 }
7349 else
7350 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7351 ptr[offset] = value;
7352 }
7353 return total_bytes;
7354 }
7355
7356
7357 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7358 specified by EXPR into the buffer PTR of length LEN bytes.
7359 Return the number of bytes placed in the buffer, or zero
7360 upon failure. */
7361
7362 static int
7363 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7364 {
7365 tree type = TREE_TYPE (expr);
7366 enum machine_mode mode = TYPE_MODE (type);
7367 int total_bytes = GET_MODE_SIZE (mode);
7368 FIXED_VALUE_TYPE value;
7369 tree i_value, i_type;
7370
7371 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7372 return 0;
7373
7374 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7375
7376 if (NULL_TREE == i_type
7377 || TYPE_PRECISION (i_type) != total_bytes)
7378 return 0;
7379
7380 value = TREE_FIXED_CST (expr);
7381 i_value = double_int_to_tree (i_type, value.data);
7382
7383 return native_encode_int (i_value, ptr, len);
7384 }
7385
7386
7387 /* Subroutine of native_encode_expr. Encode the REAL_CST
7388 specified by EXPR into the buffer PTR of length LEN bytes.
7389 Return the number of bytes placed in the buffer, or zero
7390 upon failure. */
7391
7392 static int
7393 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7394 {
7395 tree type = TREE_TYPE (expr);
7396 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7397 int byte, offset, word, words, bitpos;
7398 unsigned char value;
7399
7400 /* There are always 32 bits in each long, no matter the size of
7401 the hosts long. We handle floating point representations with
7402 up to 192 bits. */
7403 long tmp[6];
7404
7405 if (total_bytes > len)
7406 return 0;
7407 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7408
7409 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7410
7411 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7412 bitpos += BITS_PER_UNIT)
7413 {
7414 byte = (bitpos / BITS_PER_UNIT) & 3;
7415 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7416
7417 if (UNITS_PER_WORD < 4)
7418 {
7419 word = byte / UNITS_PER_WORD;
7420 if (WORDS_BIG_ENDIAN)
7421 word = (words - 1) - word;
7422 offset = word * UNITS_PER_WORD;
7423 if (BYTES_BIG_ENDIAN)
7424 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7425 else
7426 offset += byte % UNITS_PER_WORD;
7427 }
7428 else
7429 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7430 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7431 }
7432 return total_bytes;
7433 }
7434
7435 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7436 specified by EXPR into the buffer PTR of length LEN bytes.
7437 Return the number of bytes placed in the buffer, or zero
7438 upon failure. */
7439
7440 static int
7441 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7442 {
7443 int rsize, isize;
7444 tree part;
7445
7446 part = TREE_REALPART (expr);
7447 rsize = native_encode_expr (part, ptr, len);
7448 if (rsize == 0)
7449 return 0;
7450 part = TREE_IMAGPART (expr);
7451 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7452 if (isize != rsize)
7453 return 0;
7454 return rsize + isize;
7455 }
7456
7457
7458 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7459 specified by EXPR into the buffer PTR of length LEN bytes.
7460 Return the number of bytes placed in the buffer, or zero
7461 upon failure. */
7462
7463 static int
7464 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7465 {
7466 unsigned i, count;
7467 int size, offset;
7468 tree itype, elem;
7469
7470 offset = 0;
7471 count = VECTOR_CST_NELTS (expr);
7472 itype = TREE_TYPE (TREE_TYPE (expr));
7473 size = GET_MODE_SIZE (TYPE_MODE (itype));
7474 for (i = 0; i < count; i++)
7475 {
7476 elem = VECTOR_CST_ELT (expr, i);
7477 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7478 return 0;
7479 offset += size;
7480 }
7481 return offset;
7482 }
7483
7484
7485 /* Subroutine of native_encode_expr. Encode the STRING_CST
7486 specified by EXPR into the buffer PTR of length LEN bytes.
7487 Return the number of bytes placed in the buffer, or zero
7488 upon failure. */
7489
7490 static int
7491 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7492 {
7493 tree type = TREE_TYPE (expr);
7494 HOST_WIDE_INT total_bytes;
7495
7496 if (TREE_CODE (type) != ARRAY_TYPE
7497 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7498 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7499 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7500 return 0;
7501 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7502 if (total_bytes > len)
7503 return 0;
7504 if (TREE_STRING_LENGTH (expr) < total_bytes)
7505 {
7506 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7507 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7508 total_bytes - TREE_STRING_LENGTH (expr));
7509 }
7510 else
7511 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7512 return total_bytes;
7513 }
7514
7515
7516 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7517 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7518 buffer PTR of length LEN bytes. Return the number of bytes
7519 placed in the buffer, or zero upon failure. */
7520
7521 int
7522 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7523 {
7524 switch (TREE_CODE (expr))
7525 {
7526 case INTEGER_CST:
7527 return native_encode_int (expr, ptr, len);
7528
7529 case REAL_CST:
7530 return native_encode_real (expr, ptr, len);
7531
7532 case FIXED_CST:
7533 return native_encode_fixed (expr, ptr, len);
7534
7535 case COMPLEX_CST:
7536 return native_encode_complex (expr, ptr, len);
7537
7538 case VECTOR_CST:
7539 return native_encode_vector (expr, ptr, len);
7540
7541 case STRING_CST:
7542 return native_encode_string (expr, ptr, len);
7543
7544 default:
7545 return 0;
7546 }
7547 }
7548
7549
7550 /* Subroutine of native_interpret_expr. Interpret the contents of
7551 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7552 If the buffer cannot be interpreted, return NULL_TREE. */
7553
7554 static tree
7555 native_interpret_int (tree type, const unsigned char *ptr, int len)
7556 {
7557 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7558 double_int result;
7559
7560 if (total_bytes > len
7561 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7562 return NULL_TREE;
7563
7564 result = double_int::from_buffer (ptr, total_bytes);
7565
7566 return double_int_to_tree (type, result);
7567 }
7568
7569
7570 /* Subroutine of native_interpret_expr. Interpret the contents of
7571 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7572 If the buffer cannot be interpreted, return NULL_TREE. */
7573
7574 static tree
7575 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7576 {
7577 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7578 double_int result;
7579 FIXED_VALUE_TYPE fixed_value;
7580
7581 if (total_bytes > len
7582 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7583 return NULL_TREE;
7584
7585 result = double_int::from_buffer (ptr, total_bytes);
7586 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7587
7588 return build_fixed (type, fixed_value);
7589 }
7590
7591
7592 /* Subroutine of native_interpret_expr. Interpret the contents of
7593 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7594 If the buffer cannot be interpreted, return NULL_TREE. */
7595
7596 static tree
7597 native_interpret_real (tree type, const unsigned char *ptr, int len)
7598 {
7599 enum machine_mode mode = TYPE_MODE (type);
7600 int total_bytes = GET_MODE_SIZE (mode);
7601 int byte, offset, word, words, bitpos;
7602 unsigned char value;
7603 /* There are always 32 bits in each long, no matter the size of
7604 the hosts long. We handle floating point representations with
7605 up to 192 bits. */
7606 REAL_VALUE_TYPE r;
7607 long tmp[6];
7608
7609 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7610 if (total_bytes > len || total_bytes > 24)
7611 return NULL_TREE;
7612 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7613
7614 memset (tmp, 0, sizeof (tmp));
7615 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7616 bitpos += BITS_PER_UNIT)
7617 {
7618 byte = (bitpos / BITS_PER_UNIT) & 3;
7619 if (UNITS_PER_WORD < 4)
7620 {
7621 word = byte / UNITS_PER_WORD;
7622 if (WORDS_BIG_ENDIAN)
7623 word = (words - 1) - word;
7624 offset = word * UNITS_PER_WORD;
7625 if (BYTES_BIG_ENDIAN)
7626 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7627 else
7628 offset += byte % UNITS_PER_WORD;
7629 }
7630 else
7631 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7632 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7633
7634 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7635 }
7636
7637 real_from_target (&r, tmp, mode);
7638 return build_real (type, r);
7639 }
7640
7641
7642 /* Subroutine of native_interpret_expr. Interpret the contents of
7643 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7644 If the buffer cannot be interpreted, return NULL_TREE. */
7645
7646 static tree
7647 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7648 {
7649 tree etype, rpart, ipart;
7650 int size;
7651
7652 etype = TREE_TYPE (type);
7653 size = GET_MODE_SIZE (TYPE_MODE (etype));
7654 if (size * 2 > len)
7655 return NULL_TREE;
7656 rpart = native_interpret_expr (etype, ptr, size);
7657 if (!rpart)
7658 return NULL_TREE;
7659 ipart = native_interpret_expr (etype, ptr+size, size);
7660 if (!ipart)
7661 return NULL_TREE;
7662 return build_complex (type, rpart, ipart);
7663 }
7664
7665
7666 /* Subroutine of native_interpret_expr. Interpret the contents of
7667 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7668 If the buffer cannot be interpreted, return NULL_TREE. */
7669
7670 static tree
7671 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7672 {
7673 tree etype, elem;
7674 int i, size, count;
7675 tree *elements;
7676
7677 etype = TREE_TYPE (type);
7678 size = GET_MODE_SIZE (TYPE_MODE (etype));
7679 count = TYPE_VECTOR_SUBPARTS (type);
7680 if (size * count > len)
7681 return NULL_TREE;
7682
7683 elements = XALLOCAVEC (tree, count);
7684 for (i = count - 1; i >= 0; i--)
7685 {
7686 elem = native_interpret_expr (etype, ptr+(i*size), size);
7687 if (!elem)
7688 return NULL_TREE;
7689 elements[i] = elem;
7690 }
7691 return build_vector (type, elements);
7692 }
7693
7694
7695 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7696 the buffer PTR of length LEN as a constant of type TYPE. For
7697 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7698 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7699 return NULL_TREE. */
7700
7701 tree
7702 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7703 {
7704 switch (TREE_CODE (type))
7705 {
7706 case INTEGER_TYPE:
7707 case ENUMERAL_TYPE:
7708 case BOOLEAN_TYPE:
7709 case POINTER_TYPE:
7710 case REFERENCE_TYPE:
7711 return native_interpret_int (type, ptr, len);
7712
7713 case REAL_TYPE:
7714 return native_interpret_real (type, ptr, len);
7715
7716 case FIXED_POINT_TYPE:
7717 return native_interpret_fixed (type, ptr, len);
7718
7719 case COMPLEX_TYPE:
7720 return native_interpret_complex (type, ptr, len);
7721
7722 case VECTOR_TYPE:
7723 return native_interpret_vector (type, ptr, len);
7724
7725 default:
7726 return NULL_TREE;
7727 }
7728 }
7729
7730 /* Returns true if we can interpret the contents of a native encoding
7731 as TYPE. */
7732
7733 static bool
7734 can_native_interpret_type_p (tree type)
7735 {
7736 switch (TREE_CODE (type))
7737 {
7738 case INTEGER_TYPE:
7739 case ENUMERAL_TYPE:
7740 case BOOLEAN_TYPE:
7741 case POINTER_TYPE:
7742 case REFERENCE_TYPE:
7743 case FIXED_POINT_TYPE:
7744 case REAL_TYPE:
7745 case COMPLEX_TYPE:
7746 case VECTOR_TYPE:
7747 return true;
7748 default:
7749 return false;
7750 }
7751 }
7752
7753 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7754 TYPE at compile-time. If we're unable to perform the conversion
7755 return NULL_TREE. */
7756
7757 static tree
7758 fold_view_convert_expr (tree type, tree expr)
7759 {
7760 /* We support up to 512-bit values (for V8DFmode). */
7761 unsigned char buffer[64];
7762 int len;
7763
7764 /* Check that the host and target are sane. */
7765 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7766 return NULL_TREE;
7767
7768 len = native_encode_expr (expr, buffer, sizeof (buffer));
7769 if (len == 0)
7770 return NULL_TREE;
7771
7772 return native_interpret_expr (type, buffer, len);
7773 }
7774
7775 /* Build an expression for the address of T. Folds away INDIRECT_REF
7776 to avoid confusing the gimplify process. */
7777
7778 tree
7779 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7780 {
7781 /* The size of the object is not relevant when talking about its address. */
7782 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7783 t = TREE_OPERAND (t, 0);
7784
7785 if (TREE_CODE (t) == INDIRECT_REF)
7786 {
7787 t = TREE_OPERAND (t, 0);
7788
7789 if (TREE_TYPE (t) != ptrtype)
7790 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7791 }
7792 else if (TREE_CODE (t) == MEM_REF
7793 && integer_zerop (TREE_OPERAND (t, 1)))
7794 return TREE_OPERAND (t, 0);
7795 else if (TREE_CODE (t) == MEM_REF
7796 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7797 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7798 TREE_OPERAND (t, 0),
7799 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7800 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7801 {
7802 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7803
7804 if (TREE_TYPE (t) != ptrtype)
7805 t = fold_convert_loc (loc, ptrtype, t);
7806 }
7807 else
7808 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7809
7810 return t;
7811 }
7812
7813 /* Build an expression for the address of T. */
7814
7815 tree
7816 build_fold_addr_expr_loc (location_t loc, tree t)
7817 {
7818 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7819
7820 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7821 }
7822
7823 static bool vec_cst_ctor_to_array (tree, tree *);
7824
7825 /* Fold a unary expression of code CODE and type TYPE with operand
7826 OP0. Return the folded expression if folding is successful.
7827 Otherwise, return NULL_TREE. */
7828
7829 tree
7830 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7831 {
7832 tree tem;
7833 tree arg0;
7834 enum tree_code_class kind = TREE_CODE_CLASS (code);
7835
7836 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7837 && TREE_CODE_LENGTH (code) == 1);
7838
7839 arg0 = op0;
7840 if (arg0)
7841 {
7842 if (CONVERT_EXPR_CODE_P (code)
7843 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7844 {
7845 /* Don't use STRIP_NOPS, because signedness of argument type
7846 matters. */
7847 STRIP_SIGN_NOPS (arg0);
7848 }
7849 else
7850 {
7851 /* Strip any conversions that don't change the mode. This
7852 is safe for every expression, except for a comparison
7853 expression because its signedness is derived from its
7854 operands.
7855
7856 Note that this is done as an internal manipulation within
7857 the constant folder, in order to find the simplest
7858 representation of the arguments so that their form can be
7859 studied. In any cases, the appropriate type conversions
7860 should be put back in the tree that will get out of the
7861 constant folder. */
7862 STRIP_NOPS (arg0);
7863 }
7864 }
7865
7866 if (TREE_CODE_CLASS (code) == tcc_unary)
7867 {
7868 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7869 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7870 fold_build1_loc (loc, code, type,
7871 fold_convert_loc (loc, TREE_TYPE (op0),
7872 TREE_OPERAND (arg0, 1))));
7873 else if (TREE_CODE (arg0) == COND_EXPR)
7874 {
7875 tree arg01 = TREE_OPERAND (arg0, 1);
7876 tree arg02 = TREE_OPERAND (arg0, 2);
7877 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7878 arg01 = fold_build1_loc (loc, code, type,
7879 fold_convert_loc (loc,
7880 TREE_TYPE (op0), arg01));
7881 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7882 arg02 = fold_build1_loc (loc, code, type,
7883 fold_convert_loc (loc,
7884 TREE_TYPE (op0), arg02));
7885 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7886 arg01, arg02);
7887
7888 /* If this was a conversion, and all we did was to move into
7889 inside the COND_EXPR, bring it back out. But leave it if
7890 it is a conversion from integer to integer and the
7891 result precision is no wider than a word since such a
7892 conversion is cheap and may be optimized away by combine,
7893 while it couldn't if it were outside the COND_EXPR. Then return
7894 so we don't get into an infinite recursion loop taking the
7895 conversion out and then back in. */
7896
7897 if ((CONVERT_EXPR_CODE_P (code)
7898 || code == NON_LVALUE_EXPR)
7899 && TREE_CODE (tem) == COND_EXPR
7900 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7901 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7902 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7903 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7904 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7905 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7906 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7907 && (INTEGRAL_TYPE_P
7908 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7909 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7910 || flag_syntax_only))
7911 tem = build1_loc (loc, code, type,
7912 build3 (COND_EXPR,
7913 TREE_TYPE (TREE_OPERAND
7914 (TREE_OPERAND (tem, 1), 0)),
7915 TREE_OPERAND (tem, 0),
7916 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7917 TREE_OPERAND (TREE_OPERAND (tem, 2),
7918 0)));
7919 return tem;
7920 }
7921 }
7922
7923 switch (code)
7924 {
7925 case PAREN_EXPR:
7926 /* Re-association barriers around constants and other re-association
7927 barriers can be removed. */
7928 if (CONSTANT_CLASS_P (op0)
7929 || TREE_CODE (op0) == PAREN_EXPR)
7930 return fold_convert_loc (loc, type, op0);
7931 return NULL_TREE;
7932
7933 CASE_CONVERT:
7934 case FLOAT_EXPR:
7935 case FIX_TRUNC_EXPR:
7936 if (TREE_TYPE (op0) == type)
7937 return op0;
7938
7939 if (COMPARISON_CLASS_P (op0))
7940 {
7941 /* If we have (type) (a CMP b) and type is an integral type, return
7942 new expression involving the new type. Canonicalize
7943 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7944 non-integral type.
7945 Do not fold the result as that would not simplify further, also
7946 folding again results in recursions. */
7947 if (TREE_CODE (type) == BOOLEAN_TYPE)
7948 return build2_loc (loc, TREE_CODE (op0), type,
7949 TREE_OPERAND (op0, 0),
7950 TREE_OPERAND (op0, 1));
7951 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7952 && TREE_CODE (type) != VECTOR_TYPE)
7953 return build3_loc (loc, COND_EXPR, type, op0,
7954 constant_boolean_node (true, type),
7955 constant_boolean_node (false, type));
7956 }
7957
7958 /* Handle cases of two conversions in a row. */
7959 if (CONVERT_EXPR_P (op0))
7960 {
7961 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7962 tree inter_type = TREE_TYPE (op0);
7963 int inside_int = INTEGRAL_TYPE_P (inside_type);
7964 int inside_ptr = POINTER_TYPE_P (inside_type);
7965 int inside_float = FLOAT_TYPE_P (inside_type);
7966 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7967 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7968 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7969 int inter_int = INTEGRAL_TYPE_P (inter_type);
7970 int inter_ptr = POINTER_TYPE_P (inter_type);
7971 int inter_float = FLOAT_TYPE_P (inter_type);
7972 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7973 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7974 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7975 int final_int = INTEGRAL_TYPE_P (type);
7976 int final_ptr = POINTER_TYPE_P (type);
7977 int final_float = FLOAT_TYPE_P (type);
7978 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7979 unsigned int final_prec = TYPE_PRECISION (type);
7980 int final_unsignedp = TYPE_UNSIGNED (type);
7981
7982 /* In addition to the cases of two conversions in a row
7983 handled below, if we are converting something to its own
7984 type via an object of identical or wider precision, neither
7985 conversion is needed. */
7986 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7987 && (((inter_int || inter_ptr) && final_int)
7988 || (inter_float && final_float))
7989 && inter_prec >= final_prec)
7990 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7991
7992 /* Likewise, if the intermediate and initial types are either both
7993 float or both integer, we don't need the middle conversion if the
7994 former is wider than the latter and doesn't change the signedness
7995 (for integers). Avoid this if the final type is a pointer since
7996 then we sometimes need the middle conversion. Likewise if the
7997 final type has a precision not equal to the size of its mode. */
7998 if (((inter_int && inside_int)
7999 || (inter_float && inside_float)
8000 || (inter_vec && inside_vec))
8001 && inter_prec >= inside_prec
8002 && (inter_float || inter_vec
8003 || inter_unsignedp == inside_unsignedp)
8004 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8005 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8006 && ! final_ptr
8007 && (! final_vec || inter_prec == inside_prec))
8008 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8009
8010 /* If we have a sign-extension of a zero-extended value, we can
8011 replace that by a single zero-extension. Likewise if the
8012 final conversion does not change precision we can drop the
8013 intermediate conversion. */
8014 if (inside_int && inter_int && final_int
8015 && ((inside_prec < inter_prec && inter_prec < final_prec
8016 && inside_unsignedp && !inter_unsignedp)
8017 || final_prec == inter_prec))
8018 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8019
8020 /* Two conversions in a row are not needed unless:
8021 - some conversion is floating-point (overstrict for now), or
8022 - some conversion is a vector (overstrict for now), or
8023 - the intermediate type is narrower than both initial and
8024 final, or
8025 - the intermediate type and innermost type differ in signedness,
8026 and the outermost type is wider than the intermediate, or
8027 - the initial type is a pointer type and the precisions of the
8028 intermediate and final types differ, or
8029 - the final type is a pointer type and the precisions of the
8030 initial and intermediate types differ. */
8031 if (! inside_float && ! inter_float && ! final_float
8032 && ! inside_vec && ! inter_vec && ! final_vec
8033 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8034 && ! (inside_int && inter_int
8035 && inter_unsignedp != inside_unsignedp
8036 && inter_prec < final_prec)
8037 && ((inter_unsignedp && inter_prec > inside_prec)
8038 == (final_unsignedp && final_prec > inter_prec))
8039 && ! (inside_ptr && inter_prec != final_prec)
8040 && ! (final_ptr && inside_prec != inter_prec)
8041 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8042 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8043 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8044 }
8045
8046 /* Handle (T *)&A.B.C for A being of type T and B and C
8047 living at offset zero. This occurs frequently in
8048 C++ upcasting and then accessing the base. */
8049 if (TREE_CODE (op0) == ADDR_EXPR
8050 && POINTER_TYPE_P (type)
8051 && handled_component_p (TREE_OPERAND (op0, 0)))
8052 {
8053 HOST_WIDE_INT bitsize, bitpos;
8054 tree offset;
8055 enum machine_mode mode;
8056 int unsignedp, volatilep;
8057 tree base = TREE_OPERAND (op0, 0);
8058 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8059 &mode, &unsignedp, &volatilep, false);
8060 /* If the reference was to a (constant) zero offset, we can use
8061 the address of the base if it has the same base type
8062 as the result type and the pointer type is unqualified. */
8063 if (! offset && bitpos == 0
8064 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8065 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8066 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8067 return fold_convert_loc (loc, type,
8068 build_fold_addr_expr_loc (loc, base));
8069 }
8070
8071 if (TREE_CODE (op0) == MODIFY_EXPR
8072 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8073 /* Detect assigning a bitfield. */
8074 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8075 && DECL_BIT_FIELD
8076 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8077 {
8078 /* Don't leave an assignment inside a conversion
8079 unless assigning a bitfield. */
8080 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8081 /* First do the assignment, then return converted constant. */
8082 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8083 TREE_NO_WARNING (tem) = 1;
8084 TREE_USED (tem) = 1;
8085 return tem;
8086 }
8087
8088 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8089 constants (if x has signed type, the sign bit cannot be set
8090 in c). This folds extension into the BIT_AND_EXPR.
8091 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8092 very likely don't have maximal range for their precision and this
8093 transformation effectively doesn't preserve non-maximal ranges. */
8094 if (TREE_CODE (type) == INTEGER_TYPE
8095 && TREE_CODE (op0) == BIT_AND_EXPR
8096 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8097 {
8098 tree and_expr = op0;
8099 tree and0 = TREE_OPERAND (and_expr, 0);
8100 tree and1 = TREE_OPERAND (and_expr, 1);
8101 int change = 0;
8102
8103 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8104 || (TYPE_PRECISION (type)
8105 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8106 change = 1;
8107 else if (TYPE_PRECISION (TREE_TYPE (and1))
8108 <= HOST_BITS_PER_WIDE_INT
8109 && host_integerp (and1, 1))
8110 {
8111 unsigned HOST_WIDE_INT cst;
8112
8113 cst = tree_low_cst (and1, 1);
8114 cst &= HOST_WIDE_INT_M1U
8115 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8116 change = (cst == 0);
8117 #ifdef LOAD_EXTEND_OP
8118 if (change
8119 && !flag_syntax_only
8120 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8121 == ZERO_EXTEND))
8122 {
8123 tree uns = unsigned_type_for (TREE_TYPE (and0));
8124 and0 = fold_convert_loc (loc, uns, and0);
8125 and1 = fold_convert_loc (loc, uns, and1);
8126 }
8127 #endif
8128 }
8129 if (change)
8130 {
8131 tem = force_fit_type_double (type, tree_to_double_int (and1),
8132 0, TREE_OVERFLOW (and1));
8133 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8134 fold_convert_loc (loc, type, and0), tem);
8135 }
8136 }
8137
8138 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8139 when one of the new casts will fold away. Conservatively we assume
8140 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8141 if (POINTER_TYPE_P (type)
8142 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8143 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8144 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8145 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8146 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8147 {
8148 tree arg00 = TREE_OPERAND (arg0, 0);
8149 tree arg01 = TREE_OPERAND (arg0, 1);
8150
8151 return fold_build_pointer_plus_loc
8152 (loc, fold_convert_loc (loc, type, arg00), arg01);
8153 }
8154
8155 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8156 of the same precision, and X is an integer type not narrower than
8157 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8158 if (INTEGRAL_TYPE_P (type)
8159 && TREE_CODE (op0) == BIT_NOT_EXPR
8160 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8161 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8162 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8163 {
8164 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8165 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8166 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8167 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8168 fold_convert_loc (loc, type, tem));
8169 }
8170
8171 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8172 type of X and Y (integer types only). */
8173 if (INTEGRAL_TYPE_P (type)
8174 && TREE_CODE (op0) == MULT_EXPR
8175 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8176 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8177 {
8178 /* Be careful not to introduce new overflows. */
8179 tree mult_type;
8180 if (TYPE_OVERFLOW_WRAPS (type))
8181 mult_type = type;
8182 else
8183 mult_type = unsigned_type_for (type);
8184
8185 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8186 {
8187 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8188 fold_convert_loc (loc, mult_type,
8189 TREE_OPERAND (op0, 0)),
8190 fold_convert_loc (loc, mult_type,
8191 TREE_OPERAND (op0, 1)));
8192 return fold_convert_loc (loc, type, tem);
8193 }
8194 }
8195
8196 tem = fold_convert_const (code, type, op0);
8197 return tem ? tem : NULL_TREE;
8198
8199 case ADDR_SPACE_CONVERT_EXPR:
8200 if (integer_zerop (arg0))
8201 return fold_convert_const (code, type, arg0);
8202 return NULL_TREE;
8203
8204 case FIXED_CONVERT_EXPR:
8205 tem = fold_convert_const (code, type, arg0);
8206 return tem ? tem : NULL_TREE;
8207
8208 case VIEW_CONVERT_EXPR:
8209 if (TREE_TYPE (op0) == type)
8210 return op0;
8211 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8212 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8213 type, TREE_OPERAND (op0, 0));
8214 if (TREE_CODE (op0) == MEM_REF)
8215 return fold_build2_loc (loc, MEM_REF, type,
8216 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8217
8218 /* For integral conversions with the same precision or pointer
8219 conversions use a NOP_EXPR instead. */
8220 if ((INTEGRAL_TYPE_P (type)
8221 || POINTER_TYPE_P (type))
8222 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8223 || POINTER_TYPE_P (TREE_TYPE (op0)))
8224 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8225 return fold_convert_loc (loc, type, op0);
8226
8227 /* Strip inner integral conversions that do not change the precision. */
8228 if (CONVERT_EXPR_P (op0)
8229 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8230 || POINTER_TYPE_P (TREE_TYPE (op0)))
8231 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8232 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8233 && (TYPE_PRECISION (TREE_TYPE (op0))
8234 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8235 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8236 type, TREE_OPERAND (op0, 0));
8237
8238 return fold_view_convert_expr (type, op0);
8239
8240 case NEGATE_EXPR:
8241 tem = fold_negate_expr (loc, arg0);
8242 if (tem)
8243 return fold_convert_loc (loc, type, tem);
8244 return NULL_TREE;
8245
8246 case ABS_EXPR:
8247 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8248 return fold_abs_const (arg0, type);
8249 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8250 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8251 /* Convert fabs((double)float) into (double)fabsf(float). */
8252 else if (TREE_CODE (arg0) == NOP_EXPR
8253 && TREE_CODE (type) == REAL_TYPE)
8254 {
8255 tree targ0 = strip_float_extensions (arg0);
8256 if (targ0 != arg0)
8257 return fold_convert_loc (loc, type,
8258 fold_build1_loc (loc, ABS_EXPR,
8259 TREE_TYPE (targ0),
8260 targ0));
8261 }
8262 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8263 else if (TREE_CODE (arg0) == ABS_EXPR)
8264 return arg0;
8265 else if (tree_expr_nonnegative_p (arg0))
8266 return arg0;
8267
8268 /* Strip sign ops from argument. */
8269 if (TREE_CODE (type) == REAL_TYPE)
8270 {
8271 tem = fold_strip_sign_ops (arg0);
8272 if (tem)
8273 return fold_build1_loc (loc, ABS_EXPR, type,
8274 fold_convert_loc (loc, type, tem));
8275 }
8276 return NULL_TREE;
8277
8278 case CONJ_EXPR:
8279 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8280 return fold_convert_loc (loc, type, arg0);
8281 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8282 {
8283 tree itype = TREE_TYPE (type);
8284 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8285 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8286 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8287 negate_expr (ipart));
8288 }
8289 if (TREE_CODE (arg0) == COMPLEX_CST)
8290 {
8291 tree itype = TREE_TYPE (type);
8292 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8293 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8294 return build_complex (type, rpart, negate_expr (ipart));
8295 }
8296 if (TREE_CODE (arg0) == CONJ_EXPR)
8297 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8298 return NULL_TREE;
8299
8300 case BIT_NOT_EXPR:
8301 if (TREE_CODE (arg0) == INTEGER_CST)
8302 return fold_not_const (arg0, type);
8303 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8304 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8305 /* Convert ~ (-A) to A - 1. */
8306 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8307 return fold_build2_loc (loc, MINUS_EXPR, type,
8308 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8309 build_int_cst (type, 1));
8310 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8311 else if (INTEGRAL_TYPE_P (type)
8312 && ((TREE_CODE (arg0) == MINUS_EXPR
8313 && integer_onep (TREE_OPERAND (arg0, 1)))
8314 || (TREE_CODE (arg0) == PLUS_EXPR
8315 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8316 return fold_build1_loc (loc, NEGATE_EXPR, type,
8317 fold_convert_loc (loc, type,
8318 TREE_OPERAND (arg0, 0)));
8319 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8320 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8321 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8322 fold_convert_loc (loc, type,
8323 TREE_OPERAND (arg0, 0)))))
8324 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8325 fold_convert_loc (loc, type,
8326 TREE_OPERAND (arg0, 1)));
8327 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8328 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8329 fold_convert_loc (loc, type,
8330 TREE_OPERAND (arg0, 1)))))
8331 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8332 fold_convert_loc (loc, type,
8333 TREE_OPERAND (arg0, 0)), tem);
8334 /* Perform BIT_NOT_EXPR on each element individually. */
8335 else if (TREE_CODE (arg0) == VECTOR_CST)
8336 {
8337 tree *elements;
8338 tree elem;
8339 unsigned count = VECTOR_CST_NELTS (arg0), i;
8340
8341 elements = XALLOCAVEC (tree, count);
8342 for (i = 0; i < count; i++)
8343 {
8344 elem = VECTOR_CST_ELT (arg0, i);
8345 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8346 if (elem == NULL_TREE)
8347 break;
8348 elements[i] = elem;
8349 }
8350 if (i == count)
8351 return build_vector (type, elements);
8352 }
8353 else if (COMPARISON_CLASS_P (arg0)
8354 && (VECTOR_TYPE_P (type)
8355 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8356 {
8357 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8358 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8359 HONOR_NANS (TYPE_MODE (op_type)));
8360 if (subcode != ERROR_MARK)
8361 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8362 TREE_OPERAND (arg0, 1));
8363 }
8364
8365
8366 return NULL_TREE;
8367
8368 case TRUTH_NOT_EXPR:
8369 /* Note that the operand of this must be an int
8370 and its values must be 0 or 1.
8371 ("true" is a fixed value perhaps depending on the language,
8372 but we don't handle values other than 1 correctly yet.) */
8373 tem = fold_truth_not_expr (loc, arg0);
8374 if (!tem)
8375 return NULL_TREE;
8376 return fold_convert_loc (loc, type, tem);
8377
8378 case REALPART_EXPR:
8379 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8380 return fold_convert_loc (loc, type, arg0);
8381 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8382 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8383 TREE_OPERAND (arg0, 1));
8384 if (TREE_CODE (arg0) == COMPLEX_CST)
8385 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8386 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8387 {
8388 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8389 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8390 fold_build1_loc (loc, REALPART_EXPR, itype,
8391 TREE_OPERAND (arg0, 0)),
8392 fold_build1_loc (loc, REALPART_EXPR, itype,
8393 TREE_OPERAND (arg0, 1)));
8394 return fold_convert_loc (loc, type, tem);
8395 }
8396 if (TREE_CODE (arg0) == CONJ_EXPR)
8397 {
8398 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8399 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8400 TREE_OPERAND (arg0, 0));
8401 return fold_convert_loc (loc, type, tem);
8402 }
8403 if (TREE_CODE (arg0) == CALL_EXPR)
8404 {
8405 tree fn = get_callee_fndecl (arg0);
8406 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8407 switch (DECL_FUNCTION_CODE (fn))
8408 {
8409 CASE_FLT_FN (BUILT_IN_CEXPI):
8410 fn = mathfn_built_in (type, BUILT_IN_COS);
8411 if (fn)
8412 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8413 break;
8414
8415 default:
8416 break;
8417 }
8418 }
8419 return NULL_TREE;
8420
8421 case IMAGPART_EXPR:
8422 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8423 return build_zero_cst (type);
8424 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8425 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8426 TREE_OPERAND (arg0, 0));
8427 if (TREE_CODE (arg0) == COMPLEX_CST)
8428 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8429 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8430 {
8431 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8432 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8433 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8434 TREE_OPERAND (arg0, 0)),
8435 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8436 TREE_OPERAND (arg0, 1)));
8437 return fold_convert_loc (loc, type, tem);
8438 }
8439 if (TREE_CODE (arg0) == CONJ_EXPR)
8440 {
8441 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8442 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8443 return fold_convert_loc (loc, type, negate_expr (tem));
8444 }
8445 if (TREE_CODE (arg0) == CALL_EXPR)
8446 {
8447 tree fn = get_callee_fndecl (arg0);
8448 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8449 switch (DECL_FUNCTION_CODE (fn))
8450 {
8451 CASE_FLT_FN (BUILT_IN_CEXPI):
8452 fn = mathfn_built_in (type, BUILT_IN_SIN);
8453 if (fn)
8454 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8455 break;
8456
8457 default:
8458 break;
8459 }
8460 }
8461 return NULL_TREE;
8462
8463 case INDIRECT_REF:
8464 /* Fold *&X to X if X is an lvalue. */
8465 if (TREE_CODE (op0) == ADDR_EXPR)
8466 {
8467 tree op00 = TREE_OPERAND (op0, 0);
8468 if ((TREE_CODE (op00) == VAR_DECL
8469 || TREE_CODE (op00) == PARM_DECL
8470 || TREE_CODE (op00) == RESULT_DECL)
8471 && !TREE_READONLY (op00))
8472 return op00;
8473 }
8474 return NULL_TREE;
8475
8476 case VEC_UNPACK_LO_EXPR:
8477 case VEC_UNPACK_HI_EXPR:
8478 case VEC_UNPACK_FLOAT_LO_EXPR:
8479 case VEC_UNPACK_FLOAT_HI_EXPR:
8480 {
8481 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8482 tree *elts;
8483 enum tree_code subcode;
8484
8485 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8486 if (TREE_CODE (arg0) != VECTOR_CST)
8487 return NULL_TREE;
8488
8489 elts = XALLOCAVEC (tree, nelts * 2);
8490 if (!vec_cst_ctor_to_array (arg0, elts))
8491 return NULL_TREE;
8492
8493 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8494 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8495 elts += nelts;
8496
8497 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8498 subcode = NOP_EXPR;
8499 else
8500 subcode = FLOAT_EXPR;
8501
8502 for (i = 0; i < nelts; i++)
8503 {
8504 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8505 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8506 return NULL_TREE;
8507 }
8508
8509 return build_vector (type, elts);
8510 }
8511
8512 case REDUC_MIN_EXPR:
8513 case REDUC_MAX_EXPR:
8514 case REDUC_PLUS_EXPR:
8515 {
8516 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8517 tree *elts;
8518 enum tree_code subcode;
8519
8520 if (TREE_CODE (op0) != VECTOR_CST)
8521 return NULL_TREE;
8522
8523 elts = XALLOCAVEC (tree, nelts);
8524 if (!vec_cst_ctor_to_array (op0, elts))
8525 return NULL_TREE;
8526
8527 switch (code)
8528 {
8529 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8530 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8531 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8532 default: gcc_unreachable ();
8533 }
8534
8535 for (i = 1; i < nelts; i++)
8536 {
8537 elts[0] = const_binop (subcode, elts[0], elts[i]);
8538 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8539 return NULL_TREE;
8540 elts[i] = build_zero_cst (TREE_TYPE (type));
8541 }
8542
8543 return build_vector (type, elts);
8544 }
8545
8546 default:
8547 return NULL_TREE;
8548 } /* switch (code) */
8549 }
8550
8551
8552 /* If the operation was a conversion do _not_ mark a resulting constant
8553 with TREE_OVERFLOW if the original constant was not. These conversions
8554 have implementation defined behavior and retaining the TREE_OVERFLOW
8555 flag here would confuse later passes such as VRP. */
8556 tree
8557 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8558 tree type, tree op0)
8559 {
8560 tree res = fold_unary_loc (loc, code, type, op0);
8561 if (res
8562 && TREE_CODE (res) == INTEGER_CST
8563 && TREE_CODE (op0) == INTEGER_CST
8564 && CONVERT_EXPR_CODE_P (code))
8565 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8566
8567 return res;
8568 }
8569
8570 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8571 operands OP0 and OP1. LOC is the location of the resulting expression.
8572 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8573 Return the folded expression if folding is successful. Otherwise,
8574 return NULL_TREE. */
8575 static tree
8576 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8577 tree arg0, tree arg1, tree op0, tree op1)
8578 {
8579 tree tem;
8580
8581 /* We only do these simplifications if we are optimizing. */
8582 if (!optimize)
8583 return NULL_TREE;
8584
8585 /* Check for things like (A || B) && (A || C). We can convert this
8586 to A || (B && C). Note that either operator can be any of the four
8587 truth and/or operations and the transformation will still be
8588 valid. Also note that we only care about order for the
8589 ANDIF and ORIF operators. If B contains side effects, this
8590 might change the truth-value of A. */
8591 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8592 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8593 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8594 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8595 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8596 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8597 {
8598 tree a00 = TREE_OPERAND (arg0, 0);
8599 tree a01 = TREE_OPERAND (arg0, 1);
8600 tree a10 = TREE_OPERAND (arg1, 0);
8601 tree a11 = TREE_OPERAND (arg1, 1);
8602 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8603 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8604 && (code == TRUTH_AND_EXPR
8605 || code == TRUTH_OR_EXPR));
8606
8607 if (operand_equal_p (a00, a10, 0))
8608 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8609 fold_build2_loc (loc, code, type, a01, a11));
8610 else if (commutative && operand_equal_p (a00, a11, 0))
8611 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8612 fold_build2_loc (loc, code, type, a01, a10));
8613 else if (commutative && operand_equal_p (a01, a10, 0))
8614 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8615 fold_build2_loc (loc, code, type, a00, a11));
8616
8617 /* This case if tricky because we must either have commutative
8618 operators or else A10 must not have side-effects. */
8619
8620 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8621 && operand_equal_p (a01, a11, 0))
8622 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8623 fold_build2_loc (loc, code, type, a00, a10),
8624 a01);
8625 }
8626
8627 /* See if we can build a range comparison. */
8628 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8629 return tem;
8630
8631 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8632 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8633 {
8634 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8635 if (tem)
8636 return fold_build2_loc (loc, code, type, tem, arg1);
8637 }
8638
8639 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8640 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8641 {
8642 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8643 if (tem)
8644 return fold_build2_loc (loc, code, type, arg0, tem);
8645 }
8646
8647 /* Check for the possibility of merging component references. If our
8648 lhs is another similar operation, try to merge its rhs with our
8649 rhs. Then try to merge our lhs and rhs. */
8650 if (TREE_CODE (arg0) == code
8651 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8652 TREE_OPERAND (arg0, 1), arg1)))
8653 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8654
8655 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8656 return tem;
8657
8658 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8659 && (code == TRUTH_AND_EXPR
8660 || code == TRUTH_ANDIF_EXPR
8661 || code == TRUTH_OR_EXPR
8662 || code == TRUTH_ORIF_EXPR))
8663 {
8664 enum tree_code ncode, icode;
8665
8666 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8667 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8668 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8669
8670 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8671 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8672 We don't want to pack more than two leafs to a non-IF AND/OR
8673 expression.
8674 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8675 equal to IF-CODE, then we don't want to add right-hand operand.
8676 If the inner right-hand side of left-hand operand has
8677 side-effects, or isn't simple, then we can't add to it,
8678 as otherwise we might destroy if-sequence. */
8679 if (TREE_CODE (arg0) == icode
8680 && simple_operand_p_2 (arg1)
8681 /* Needed for sequence points to handle trappings, and
8682 side-effects. */
8683 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8684 {
8685 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8686 arg1);
8687 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8688 tem);
8689 }
8690 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8691 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8692 else if (TREE_CODE (arg1) == icode
8693 && simple_operand_p_2 (arg0)
8694 /* Needed for sequence points to handle trappings, and
8695 side-effects. */
8696 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8697 {
8698 tem = fold_build2_loc (loc, ncode, type,
8699 arg0, TREE_OPERAND (arg1, 0));
8700 return fold_build2_loc (loc, icode, type, tem,
8701 TREE_OPERAND (arg1, 1));
8702 }
8703 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8704 into (A OR B).
8705 For sequence point consistancy, we need to check for trapping,
8706 and side-effects. */
8707 else if (code == icode && simple_operand_p_2 (arg0)
8708 && simple_operand_p_2 (arg1))
8709 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8710 }
8711
8712 return NULL_TREE;
8713 }
8714
8715 /* Fold a binary expression of code CODE and type TYPE with operands
8716 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8717 Return the folded expression if folding is successful. Otherwise,
8718 return NULL_TREE. */
8719
8720 static tree
8721 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8722 {
8723 enum tree_code compl_code;
8724
8725 if (code == MIN_EXPR)
8726 compl_code = MAX_EXPR;
8727 else if (code == MAX_EXPR)
8728 compl_code = MIN_EXPR;
8729 else
8730 gcc_unreachable ();
8731
8732 /* MIN (MAX (a, b), b) == b. */
8733 if (TREE_CODE (op0) == compl_code
8734 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8735 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8736
8737 /* MIN (MAX (b, a), b) == b. */
8738 if (TREE_CODE (op0) == compl_code
8739 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8740 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8741 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8742
8743 /* MIN (a, MAX (a, b)) == a. */
8744 if (TREE_CODE (op1) == compl_code
8745 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8746 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8747 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8748
8749 /* MIN (a, MAX (b, a)) == a. */
8750 if (TREE_CODE (op1) == compl_code
8751 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8752 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8753 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8754
8755 return NULL_TREE;
8756 }
8757
8758 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8759 by changing CODE to reduce the magnitude of constants involved in
8760 ARG0 of the comparison.
8761 Returns a canonicalized comparison tree if a simplification was
8762 possible, otherwise returns NULL_TREE.
8763 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8764 valid if signed overflow is undefined. */
8765
8766 static tree
8767 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8768 tree arg0, tree arg1,
8769 bool *strict_overflow_p)
8770 {
8771 enum tree_code code0 = TREE_CODE (arg0);
8772 tree t, cst0 = NULL_TREE;
8773 int sgn0;
8774 bool swap = false;
8775
8776 /* Match A +- CST code arg1 and CST code arg1. We can change the
8777 first form only if overflow is undefined. */
8778 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8779 /* In principle pointers also have undefined overflow behavior,
8780 but that causes problems elsewhere. */
8781 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8782 && (code0 == MINUS_EXPR
8783 || code0 == PLUS_EXPR)
8784 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8785 || code0 == INTEGER_CST))
8786 return NULL_TREE;
8787
8788 /* Identify the constant in arg0 and its sign. */
8789 if (code0 == INTEGER_CST)
8790 cst0 = arg0;
8791 else
8792 cst0 = TREE_OPERAND (arg0, 1);
8793 sgn0 = tree_int_cst_sgn (cst0);
8794
8795 /* Overflowed constants and zero will cause problems. */
8796 if (integer_zerop (cst0)
8797 || TREE_OVERFLOW (cst0))
8798 return NULL_TREE;
8799
8800 /* See if we can reduce the magnitude of the constant in
8801 arg0 by changing the comparison code. */
8802 if (code0 == INTEGER_CST)
8803 {
8804 /* CST <= arg1 -> CST-1 < arg1. */
8805 if (code == LE_EXPR && sgn0 == 1)
8806 code = LT_EXPR;
8807 /* -CST < arg1 -> -CST-1 <= arg1. */
8808 else if (code == LT_EXPR && sgn0 == -1)
8809 code = LE_EXPR;
8810 /* CST > arg1 -> CST-1 >= arg1. */
8811 else if (code == GT_EXPR && sgn0 == 1)
8812 code = GE_EXPR;
8813 /* -CST >= arg1 -> -CST-1 > arg1. */
8814 else if (code == GE_EXPR && sgn0 == -1)
8815 code = GT_EXPR;
8816 else
8817 return NULL_TREE;
8818 /* arg1 code' CST' might be more canonical. */
8819 swap = true;
8820 }
8821 else
8822 {
8823 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8824 if (code == LT_EXPR
8825 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8826 code = LE_EXPR;
8827 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8828 else if (code == GT_EXPR
8829 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8830 code = GE_EXPR;
8831 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8832 else if (code == LE_EXPR
8833 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8834 code = LT_EXPR;
8835 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8836 else if (code == GE_EXPR
8837 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8838 code = GT_EXPR;
8839 else
8840 return NULL_TREE;
8841 *strict_overflow_p = true;
8842 }
8843
8844 /* Now build the constant reduced in magnitude. But not if that
8845 would produce one outside of its types range. */
8846 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8847 && ((sgn0 == 1
8848 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8849 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8850 || (sgn0 == -1
8851 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8852 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8853 /* We cannot swap the comparison here as that would cause us to
8854 endlessly recurse. */
8855 return NULL_TREE;
8856
8857 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8858 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8859 if (code0 != INTEGER_CST)
8860 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8861 t = fold_convert (TREE_TYPE (arg1), t);
8862
8863 /* If swapping might yield to a more canonical form, do so. */
8864 if (swap)
8865 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8866 else
8867 return fold_build2_loc (loc, code, type, t, arg1);
8868 }
8869
8870 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8871 overflow further. Try to decrease the magnitude of constants involved
8872 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8873 and put sole constants at the second argument position.
8874 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8875
8876 static tree
8877 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8878 tree arg0, tree arg1)
8879 {
8880 tree t;
8881 bool strict_overflow_p;
8882 const char * const warnmsg = G_("assuming signed overflow does not occur "
8883 "when reducing constant in comparison");
8884
8885 /* Try canonicalization by simplifying arg0. */
8886 strict_overflow_p = false;
8887 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8888 &strict_overflow_p);
8889 if (t)
8890 {
8891 if (strict_overflow_p)
8892 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8893 return t;
8894 }
8895
8896 /* Try canonicalization by simplifying arg1 using the swapped
8897 comparison. */
8898 code = swap_tree_comparison (code);
8899 strict_overflow_p = false;
8900 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8901 &strict_overflow_p);
8902 if (t && strict_overflow_p)
8903 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8904 return t;
8905 }
8906
8907 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8908 space. This is used to avoid issuing overflow warnings for
8909 expressions like &p->x which can not wrap. */
8910
8911 static bool
8912 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8913 {
8914 double_int di_offset, total;
8915
8916 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8917 return true;
8918
8919 if (bitpos < 0)
8920 return true;
8921
8922 if (offset == NULL_TREE)
8923 di_offset = double_int_zero;
8924 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8925 return true;
8926 else
8927 di_offset = TREE_INT_CST (offset);
8928
8929 bool overflow;
8930 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8931 total = di_offset.add_with_sign (units, true, &overflow);
8932 if (overflow)
8933 return true;
8934
8935 if (total.high != 0)
8936 return true;
8937
8938 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8939 if (size <= 0)
8940 return true;
8941
8942 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8943 array. */
8944 if (TREE_CODE (base) == ADDR_EXPR)
8945 {
8946 HOST_WIDE_INT base_size;
8947
8948 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8949 if (base_size > 0 && size < base_size)
8950 size = base_size;
8951 }
8952
8953 return total.low > (unsigned HOST_WIDE_INT) size;
8954 }
8955
8956 /* Subroutine of fold_binary. This routine performs all of the
8957 transformations that are common to the equality/inequality
8958 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8959 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8960 fold_binary should call fold_binary. Fold a comparison with
8961 tree code CODE and type TYPE with operands OP0 and OP1. Return
8962 the folded comparison or NULL_TREE. */
8963
8964 static tree
8965 fold_comparison (location_t loc, enum tree_code code, tree type,
8966 tree op0, tree op1)
8967 {
8968 tree arg0, arg1, tem;
8969
8970 arg0 = op0;
8971 arg1 = op1;
8972
8973 STRIP_SIGN_NOPS (arg0);
8974 STRIP_SIGN_NOPS (arg1);
8975
8976 tem = fold_relational_const (code, type, arg0, arg1);
8977 if (tem != NULL_TREE)
8978 return tem;
8979
8980 /* If one arg is a real or integer constant, put it last. */
8981 if (tree_swap_operands_p (arg0, arg1, true))
8982 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8983
8984 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8985 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8986 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8987 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8988 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8989 && (TREE_CODE (arg1) == INTEGER_CST
8990 && !TREE_OVERFLOW (arg1)))
8991 {
8992 tree const1 = TREE_OPERAND (arg0, 1);
8993 tree const2 = arg1;
8994 tree variable = TREE_OPERAND (arg0, 0);
8995 tree lhs;
8996 int lhs_add;
8997 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8998
8999 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9000 TREE_TYPE (arg1), const2, const1);
9001
9002 /* If the constant operation overflowed this can be
9003 simplified as a comparison against INT_MAX/INT_MIN. */
9004 if (TREE_CODE (lhs) == INTEGER_CST
9005 && TREE_OVERFLOW (lhs))
9006 {
9007 int const1_sgn = tree_int_cst_sgn (const1);
9008 enum tree_code code2 = code;
9009
9010 /* Get the sign of the constant on the lhs if the
9011 operation were VARIABLE + CONST1. */
9012 if (TREE_CODE (arg0) == MINUS_EXPR)
9013 const1_sgn = -const1_sgn;
9014
9015 /* The sign of the constant determines if we overflowed
9016 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9017 Canonicalize to the INT_MIN overflow by swapping the comparison
9018 if necessary. */
9019 if (const1_sgn == -1)
9020 code2 = swap_tree_comparison (code);
9021
9022 /* We now can look at the canonicalized case
9023 VARIABLE + 1 CODE2 INT_MIN
9024 and decide on the result. */
9025 if (code2 == LT_EXPR
9026 || code2 == LE_EXPR
9027 || code2 == EQ_EXPR)
9028 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9029 else if (code2 == NE_EXPR
9030 || code2 == GE_EXPR
9031 || code2 == GT_EXPR)
9032 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9033 }
9034
9035 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9036 && (TREE_CODE (lhs) != INTEGER_CST
9037 || !TREE_OVERFLOW (lhs)))
9038 {
9039 if (code != EQ_EXPR && code != NE_EXPR)
9040 fold_overflow_warning ("assuming signed overflow does not occur "
9041 "when changing X +- C1 cmp C2 to "
9042 "X cmp C1 +- C2",
9043 WARN_STRICT_OVERFLOW_COMPARISON);
9044 return fold_build2_loc (loc, code, type, variable, lhs);
9045 }
9046 }
9047
9048 /* For comparisons of pointers we can decompose it to a compile time
9049 comparison of the base objects and the offsets into the object.
9050 This requires at least one operand being an ADDR_EXPR or a
9051 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9052 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9053 && (TREE_CODE (arg0) == ADDR_EXPR
9054 || TREE_CODE (arg1) == ADDR_EXPR
9055 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9056 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9057 {
9058 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9059 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9060 enum machine_mode mode;
9061 int volatilep, unsignedp;
9062 bool indirect_base0 = false, indirect_base1 = false;
9063
9064 /* Get base and offset for the access. Strip ADDR_EXPR for
9065 get_inner_reference, but put it back by stripping INDIRECT_REF
9066 off the base object if possible. indirect_baseN will be true
9067 if baseN is not an address but refers to the object itself. */
9068 base0 = arg0;
9069 if (TREE_CODE (arg0) == ADDR_EXPR)
9070 {
9071 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9072 &bitsize, &bitpos0, &offset0, &mode,
9073 &unsignedp, &volatilep, false);
9074 if (TREE_CODE (base0) == INDIRECT_REF)
9075 base0 = TREE_OPERAND (base0, 0);
9076 else
9077 indirect_base0 = true;
9078 }
9079 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9080 {
9081 base0 = TREE_OPERAND (arg0, 0);
9082 STRIP_SIGN_NOPS (base0);
9083 if (TREE_CODE (base0) == ADDR_EXPR)
9084 {
9085 base0 = TREE_OPERAND (base0, 0);
9086 indirect_base0 = true;
9087 }
9088 offset0 = TREE_OPERAND (arg0, 1);
9089 if (host_integerp (offset0, 0))
9090 {
9091 HOST_WIDE_INT off = size_low_cst (offset0);
9092 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9093 * BITS_PER_UNIT)
9094 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9095 {
9096 bitpos0 = off * BITS_PER_UNIT;
9097 offset0 = NULL_TREE;
9098 }
9099 }
9100 }
9101
9102 base1 = arg1;
9103 if (TREE_CODE (arg1) == ADDR_EXPR)
9104 {
9105 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9106 &bitsize, &bitpos1, &offset1, &mode,
9107 &unsignedp, &volatilep, false);
9108 if (TREE_CODE (base1) == INDIRECT_REF)
9109 base1 = TREE_OPERAND (base1, 0);
9110 else
9111 indirect_base1 = true;
9112 }
9113 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9114 {
9115 base1 = TREE_OPERAND (arg1, 0);
9116 STRIP_SIGN_NOPS (base1);
9117 if (TREE_CODE (base1) == ADDR_EXPR)
9118 {
9119 base1 = TREE_OPERAND (base1, 0);
9120 indirect_base1 = true;
9121 }
9122 offset1 = TREE_OPERAND (arg1, 1);
9123 if (host_integerp (offset1, 0))
9124 {
9125 HOST_WIDE_INT off = size_low_cst (offset1);
9126 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9127 * BITS_PER_UNIT)
9128 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9129 {
9130 bitpos1 = off * BITS_PER_UNIT;
9131 offset1 = NULL_TREE;
9132 }
9133 }
9134 }
9135
9136 /* A local variable can never be pointed to by
9137 the default SSA name of an incoming parameter. */
9138 if ((TREE_CODE (arg0) == ADDR_EXPR
9139 && indirect_base0
9140 && TREE_CODE (base0) == VAR_DECL
9141 && auto_var_in_fn_p (base0, current_function_decl)
9142 && !indirect_base1
9143 && TREE_CODE (base1) == SSA_NAME
9144 && SSA_NAME_IS_DEFAULT_DEF (base1)
9145 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9146 || (TREE_CODE (arg1) == ADDR_EXPR
9147 && indirect_base1
9148 && TREE_CODE (base1) == VAR_DECL
9149 && auto_var_in_fn_p (base1, current_function_decl)
9150 && !indirect_base0
9151 && TREE_CODE (base0) == SSA_NAME
9152 && SSA_NAME_IS_DEFAULT_DEF (base0)
9153 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9154 {
9155 if (code == NE_EXPR)
9156 return constant_boolean_node (1, type);
9157 else if (code == EQ_EXPR)
9158 return constant_boolean_node (0, type);
9159 }
9160 /* If we have equivalent bases we might be able to simplify. */
9161 else if (indirect_base0 == indirect_base1
9162 && operand_equal_p (base0, base1, 0))
9163 {
9164 /* We can fold this expression to a constant if the non-constant
9165 offset parts are equal. */
9166 if ((offset0 == offset1
9167 || (offset0 && offset1
9168 && operand_equal_p (offset0, offset1, 0)))
9169 && (code == EQ_EXPR
9170 || code == NE_EXPR
9171 || (indirect_base0 && DECL_P (base0))
9172 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9173
9174 {
9175 if (code != EQ_EXPR
9176 && code != NE_EXPR
9177 && bitpos0 != bitpos1
9178 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9179 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9180 fold_overflow_warning (("assuming pointer wraparound does not "
9181 "occur when comparing P +- C1 with "
9182 "P +- C2"),
9183 WARN_STRICT_OVERFLOW_CONDITIONAL);
9184
9185 switch (code)
9186 {
9187 case EQ_EXPR:
9188 return constant_boolean_node (bitpos0 == bitpos1, type);
9189 case NE_EXPR:
9190 return constant_boolean_node (bitpos0 != bitpos1, type);
9191 case LT_EXPR:
9192 return constant_boolean_node (bitpos0 < bitpos1, type);
9193 case LE_EXPR:
9194 return constant_boolean_node (bitpos0 <= bitpos1, type);
9195 case GE_EXPR:
9196 return constant_boolean_node (bitpos0 >= bitpos1, type);
9197 case GT_EXPR:
9198 return constant_boolean_node (bitpos0 > bitpos1, type);
9199 default:;
9200 }
9201 }
9202 /* We can simplify the comparison to a comparison of the variable
9203 offset parts if the constant offset parts are equal.
9204 Be careful to use signed sizetype here because otherwise we
9205 mess with array offsets in the wrong way. This is possible
9206 because pointer arithmetic is restricted to retain within an
9207 object and overflow on pointer differences is undefined as of
9208 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9209 else if (bitpos0 == bitpos1
9210 && ((code == EQ_EXPR || code == NE_EXPR)
9211 || (indirect_base0 && DECL_P (base0))
9212 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9213 {
9214 /* By converting to signed sizetype we cover middle-end pointer
9215 arithmetic which operates on unsigned pointer types of size
9216 type size and ARRAY_REF offsets which are properly sign or
9217 zero extended from their type in case it is narrower than
9218 sizetype. */
9219 if (offset0 == NULL_TREE)
9220 offset0 = build_int_cst (ssizetype, 0);
9221 else
9222 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9223 if (offset1 == NULL_TREE)
9224 offset1 = build_int_cst (ssizetype, 0);
9225 else
9226 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9227
9228 if (code != EQ_EXPR
9229 && code != NE_EXPR
9230 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9231 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9232 fold_overflow_warning (("assuming pointer wraparound does not "
9233 "occur when comparing P +- C1 with "
9234 "P +- C2"),
9235 WARN_STRICT_OVERFLOW_COMPARISON);
9236
9237 return fold_build2_loc (loc, code, type, offset0, offset1);
9238 }
9239 }
9240 /* For non-equal bases we can simplify if they are addresses
9241 of local binding decls or constants. */
9242 else if (indirect_base0 && indirect_base1
9243 /* We know that !operand_equal_p (base0, base1, 0)
9244 because the if condition was false. But make
9245 sure two decls are not the same. */
9246 && base0 != base1
9247 && TREE_CODE (arg0) == ADDR_EXPR
9248 && TREE_CODE (arg1) == ADDR_EXPR
9249 && (((TREE_CODE (base0) == VAR_DECL
9250 || TREE_CODE (base0) == PARM_DECL)
9251 && (targetm.binds_local_p (base0)
9252 || CONSTANT_CLASS_P (base1)))
9253 || CONSTANT_CLASS_P (base0))
9254 && (((TREE_CODE (base1) == VAR_DECL
9255 || TREE_CODE (base1) == PARM_DECL)
9256 && (targetm.binds_local_p (base1)
9257 || CONSTANT_CLASS_P (base0)))
9258 || CONSTANT_CLASS_P (base1)))
9259 {
9260 if (code == EQ_EXPR)
9261 return omit_two_operands_loc (loc, type, boolean_false_node,
9262 arg0, arg1);
9263 else if (code == NE_EXPR)
9264 return omit_two_operands_loc (loc, type, boolean_true_node,
9265 arg0, arg1);
9266 }
9267 /* For equal offsets we can simplify to a comparison of the
9268 base addresses. */
9269 else if (bitpos0 == bitpos1
9270 && (indirect_base0
9271 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9272 && (indirect_base1
9273 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9274 && ((offset0 == offset1)
9275 || (offset0 && offset1
9276 && operand_equal_p (offset0, offset1, 0))))
9277 {
9278 if (indirect_base0)
9279 base0 = build_fold_addr_expr_loc (loc, base0);
9280 if (indirect_base1)
9281 base1 = build_fold_addr_expr_loc (loc, base1);
9282 return fold_build2_loc (loc, code, type, base0, base1);
9283 }
9284 }
9285
9286 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9287 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9288 the resulting offset is smaller in absolute value than the
9289 original one. */
9290 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9291 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9292 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9293 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9294 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9295 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9296 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9297 {
9298 tree const1 = TREE_OPERAND (arg0, 1);
9299 tree const2 = TREE_OPERAND (arg1, 1);
9300 tree variable1 = TREE_OPERAND (arg0, 0);
9301 tree variable2 = TREE_OPERAND (arg1, 0);
9302 tree cst;
9303 const char * const warnmsg = G_("assuming signed overflow does not "
9304 "occur when combining constants around "
9305 "a comparison");
9306
9307 /* Put the constant on the side where it doesn't overflow and is
9308 of lower absolute value than before. */
9309 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9310 ? MINUS_EXPR : PLUS_EXPR,
9311 const2, const1);
9312 if (!TREE_OVERFLOW (cst)
9313 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9314 {
9315 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9316 return fold_build2_loc (loc, code, type,
9317 variable1,
9318 fold_build2_loc (loc,
9319 TREE_CODE (arg1), TREE_TYPE (arg1),
9320 variable2, cst));
9321 }
9322
9323 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9324 ? MINUS_EXPR : PLUS_EXPR,
9325 const1, const2);
9326 if (!TREE_OVERFLOW (cst)
9327 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9328 {
9329 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9330 return fold_build2_loc (loc, code, type,
9331 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9332 variable1, cst),
9333 variable2);
9334 }
9335 }
9336
9337 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9338 signed arithmetic case. That form is created by the compiler
9339 often enough for folding it to be of value. One example is in
9340 computing loop trip counts after Operator Strength Reduction. */
9341 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9342 && TREE_CODE (arg0) == MULT_EXPR
9343 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9344 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9345 && integer_zerop (arg1))
9346 {
9347 tree const1 = TREE_OPERAND (arg0, 1);
9348 tree const2 = arg1; /* zero */
9349 tree variable1 = TREE_OPERAND (arg0, 0);
9350 enum tree_code cmp_code = code;
9351
9352 /* Handle unfolded multiplication by zero. */
9353 if (integer_zerop (const1))
9354 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9355
9356 fold_overflow_warning (("assuming signed overflow does not occur when "
9357 "eliminating multiplication in comparison "
9358 "with zero"),
9359 WARN_STRICT_OVERFLOW_COMPARISON);
9360
9361 /* If const1 is negative we swap the sense of the comparison. */
9362 if (tree_int_cst_sgn (const1) < 0)
9363 cmp_code = swap_tree_comparison (cmp_code);
9364
9365 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9366 }
9367
9368 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9369 if (tem)
9370 return tem;
9371
9372 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9373 {
9374 tree targ0 = strip_float_extensions (arg0);
9375 tree targ1 = strip_float_extensions (arg1);
9376 tree newtype = TREE_TYPE (targ0);
9377
9378 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9379 newtype = TREE_TYPE (targ1);
9380
9381 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9382 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9383 return fold_build2_loc (loc, code, type,
9384 fold_convert_loc (loc, newtype, targ0),
9385 fold_convert_loc (loc, newtype, targ1));
9386
9387 /* (-a) CMP (-b) -> b CMP a */
9388 if (TREE_CODE (arg0) == NEGATE_EXPR
9389 && TREE_CODE (arg1) == NEGATE_EXPR)
9390 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9391 TREE_OPERAND (arg0, 0));
9392
9393 if (TREE_CODE (arg1) == REAL_CST)
9394 {
9395 REAL_VALUE_TYPE cst;
9396 cst = TREE_REAL_CST (arg1);
9397
9398 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9399 if (TREE_CODE (arg0) == NEGATE_EXPR)
9400 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9401 TREE_OPERAND (arg0, 0),
9402 build_real (TREE_TYPE (arg1),
9403 real_value_negate (&cst)));
9404
9405 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9406 /* a CMP (-0) -> a CMP 0 */
9407 if (REAL_VALUE_MINUS_ZERO (cst))
9408 return fold_build2_loc (loc, code, type, arg0,
9409 build_real (TREE_TYPE (arg1), dconst0));
9410
9411 /* x != NaN is always true, other ops are always false. */
9412 if (REAL_VALUE_ISNAN (cst)
9413 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9414 {
9415 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9416 return omit_one_operand_loc (loc, type, tem, arg0);
9417 }
9418
9419 /* Fold comparisons against infinity. */
9420 if (REAL_VALUE_ISINF (cst)
9421 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9422 {
9423 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9424 if (tem != NULL_TREE)
9425 return tem;
9426 }
9427 }
9428
9429 /* If this is a comparison of a real constant with a PLUS_EXPR
9430 or a MINUS_EXPR of a real constant, we can convert it into a
9431 comparison with a revised real constant as long as no overflow
9432 occurs when unsafe_math_optimizations are enabled. */
9433 if (flag_unsafe_math_optimizations
9434 && TREE_CODE (arg1) == REAL_CST
9435 && (TREE_CODE (arg0) == PLUS_EXPR
9436 || TREE_CODE (arg0) == MINUS_EXPR)
9437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9438 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9439 ? MINUS_EXPR : PLUS_EXPR,
9440 arg1, TREE_OPERAND (arg0, 1)))
9441 && !TREE_OVERFLOW (tem))
9442 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9443
9444 /* Likewise, we can simplify a comparison of a real constant with
9445 a MINUS_EXPR whose first operand is also a real constant, i.e.
9446 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9447 floating-point types only if -fassociative-math is set. */
9448 if (flag_associative_math
9449 && TREE_CODE (arg1) == REAL_CST
9450 && TREE_CODE (arg0) == MINUS_EXPR
9451 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9452 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9453 arg1))
9454 && !TREE_OVERFLOW (tem))
9455 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9456 TREE_OPERAND (arg0, 1), tem);
9457
9458 /* Fold comparisons against built-in math functions. */
9459 if (TREE_CODE (arg1) == REAL_CST
9460 && flag_unsafe_math_optimizations
9461 && ! flag_errno_math)
9462 {
9463 enum built_in_function fcode = builtin_mathfn_code (arg0);
9464
9465 if (fcode != END_BUILTINS)
9466 {
9467 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9468 if (tem != NULL_TREE)
9469 return tem;
9470 }
9471 }
9472 }
9473
9474 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9475 && CONVERT_EXPR_P (arg0))
9476 {
9477 /* If we are widening one operand of an integer comparison,
9478 see if the other operand is similarly being widened. Perhaps we
9479 can do the comparison in the narrower type. */
9480 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9481 if (tem)
9482 return tem;
9483
9484 /* Or if we are changing signedness. */
9485 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9486 if (tem)
9487 return tem;
9488 }
9489
9490 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9491 constant, we can simplify it. */
9492 if (TREE_CODE (arg1) == INTEGER_CST
9493 && (TREE_CODE (arg0) == MIN_EXPR
9494 || TREE_CODE (arg0) == MAX_EXPR)
9495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9496 {
9497 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9498 if (tem)
9499 return tem;
9500 }
9501
9502 /* Simplify comparison of something with itself. (For IEEE
9503 floating-point, we can only do some of these simplifications.) */
9504 if (operand_equal_p (arg0, arg1, 0))
9505 {
9506 switch (code)
9507 {
9508 case EQ_EXPR:
9509 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9510 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9511 return constant_boolean_node (1, type);
9512 break;
9513
9514 case GE_EXPR:
9515 case LE_EXPR:
9516 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9517 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9518 return constant_boolean_node (1, type);
9519 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9520
9521 case NE_EXPR:
9522 /* For NE, we can only do this simplification if integer
9523 or we don't honor IEEE floating point NaNs. */
9524 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9525 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9526 break;
9527 /* ... fall through ... */
9528 case GT_EXPR:
9529 case LT_EXPR:
9530 return constant_boolean_node (0, type);
9531 default:
9532 gcc_unreachable ();
9533 }
9534 }
9535
9536 /* If we are comparing an expression that just has comparisons
9537 of two integer values, arithmetic expressions of those comparisons,
9538 and constants, we can simplify it. There are only three cases
9539 to check: the two values can either be equal, the first can be
9540 greater, or the second can be greater. Fold the expression for
9541 those three values. Since each value must be 0 or 1, we have
9542 eight possibilities, each of which corresponds to the constant 0
9543 or 1 or one of the six possible comparisons.
9544
9545 This handles common cases like (a > b) == 0 but also handles
9546 expressions like ((x > y) - (y > x)) > 0, which supposedly
9547 occur in macroized code. */
9548
9549 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9550 {
9551 tree cval1 = 0, cval2 = 0;
9552 int save_p = 0;
9553
9554 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9555 /* Don't handle degenerate cases here; they should already
9556 have been handled anyway. */
9557 && cval1 != 0 && cval2 != 0
9558 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9559 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9560 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9561 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9562 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9563 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9564 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9565 {
9566 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9567 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9568
9569 /* We can't just pass T to eval_subst in case cval1 or cval2
9570 was the same as ARG1. */
9571
9572 tree high_result
9573 = fold_build2_loc (loc, code, type,
9574 eval_subst (loc, arg0, cval1, maxval,
9575 cval2, minval),
9576 arg1);
9577 tree equal_result
9578 = fold_build2_loc (loc, code, type,
9579 eval_subst (loc, arg0, cval1, maxval,
9580 cval2, maxval),
9581 arg1);
9582 tree low_result
9583 = fold_build2_loc (loc, code, type,
9584 eval_subst (loc, arg0, cval1, minval,
9585 cval2, maxval),
9586 arg1);
9587
9588 /* All three of these results should be 0 or 1. Confirm they are.
9589 Then use those values to select the proper code to use. */
9590
9591 if (TREE_CODE (high_result) == INTEGER_CST
9592 && TREE_CODE (equal_result) == INTEGER_CST
9593 && TREE_CODE (low_result) == INTEGER_CST)
9594 {
9595 /* Make a 3-bit mask with the high-order bit being the
9596 value for `>', the next for '=', and the low for '<'. */
9597 switch ((integer_onep (high_result) * 4)
9598 + (integer_onep (equal_result) * 2)
9599 + integer_onep (low_result))
9600 {
9601 case 0:
9602 /* Always false. */
9603 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9604 case 1:
9605 code = LT_EXPR;
9606 break;
9607 case 2:
9608 code = EQ_EXPR;
9609 break;
9610 case 3:
9611 code = LE_EXPR;
9612 break;
9613 case 4:
9614 code = GT_EXPR;
9615 break;
9616 case 5:
9617 code = NE_EXPR;
9618 break;
9619 case 6:
9620 code = GE_EXPR;
9621 break;
9622 case 7:
9623 /* Always true. */
9624 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9625 }
9626
9627 if (save_p)
9628 {
9629 tem = save_expr (build2 (code, type, cval1, cval2));
9630 SET_EXPR_LOCATION (tem, loc);
9631 return tem;
9632 }
9633 return fold_build2_loc (loc, code, type, cval1, cval2);
9634 }
9635 }
9636 }
9637
9638 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9639 into a single range test. */
9640 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9641 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9642 && TREE_CODE (arg1) == INTEGER_CST
9643 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9644 && !integer_zerop (TREE_OPERAND (arg0, 1))
9645 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9646 && !TREE_OVERFLOW (arg1))
9647 {
9648 tem = fold_div_compare (loc, code, type, arg0, arg1);
9649 if (tem != NULL_TREE)
9650 return tem;
9651 }
9652
9653 /* Fold ~X op ~Y as Y op X. */
9654 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9655 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9656 {
9657 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9658 return fold_build2_loc (loc, code, type,
9659 fold_convert_loc (loc, cmp_type,
9660 TREE_OPERAND (arg1, 0)),
9661 TREE_OPERAND (arg0, 0));
9662 }
9663
9664 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9665 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9666 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9667 {
9668 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9669 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9670 TREE_OPERAND (arg0, 0),
9671 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9672 fold_convert_loc (loc, cmp_type, arg1)));
9673 }
9674
9675 return NULL_TREE;
9676 }
9677
9678
9679 /* Subroutine of fold_binary. Optimize complex multiplications of the
9680 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9681 argument EXPR represents the expression "z" of type TYPE. */
9682
9683 static tree
9684 fold_mult_zconjz (location_t loc, tree type, tree expr)
9685 {
9686 tree itype = TREE_TYPE (type);
9687 tree rpart, ipart, tem;
9688
9689 if (TREE_CODE (expr) == COMPLEX_EXPR)
9690 {
9691 rpart = TREE_OPERAND (expr, 0);
9692 ipart = TREE_OPERAND (expr, 1);
9693 }
9694 else if (TREE_CODE (expr) == COMPLEX_CST)
9695 {
9696 rpart = TREE_REALPART (expr);
9697 ipart = TREE_IMAGPART (expr);
9698 }
9699 else
9700 {
9701 expr = save_expr (expr);
9702 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9703 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9704 }
9705
9706 rpart = save_expr (rpart);
9707 ipart = save_expr (ipart);
9708 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9709 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9710 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9711 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9712 build_zero_cst (itype));
9713 }
9714
9715
9716 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9717 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9718 guarantees that P and N have the same least significant log2(M) bits.
9719 N is not otherwise constrained. In particular, N is not normalized to
9720 0 <= N < M as is common. In general, the precise value of P is unknown.
9721 M is chosen as large as possible such that constant N can be determined.
9722
9723 Returns M and sets *RESIDUE to N.
9724
9725 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9726 account. This is not always possible due to PR 35705.
9727 */
9728
9729 static unsigned HOST_WIDE_INT
9730 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9731 bool allow_func_align)
9732 {
9733 enum tree_code code;
9734
9735 *residue = 0;
9736
9737 code = TREE_CODE (expr);
9738 if (code == ADDR_EXPR)
9739 {
9740 unsigned int bitalign;
9741 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9742 *residue /= BITS_PER_UNIT;
9743 return bitalign / BITS_PER_UNIT;
9744 }
9745 else if (code == POINTER_PLUS_EXPR)
9746 {
9747 tree op0, op1;
9748 unsigned HOST_WIDE_INT modulus;
9749 enum tree_code inner_code;
9750
9751 op0 = TREE_OPERAND (expr, 0);
9752 STRIP_NOPS (op0);
9753 modulus = get_pointer_modulus_and_residue (op0, residue,
9754 allow_func_align);
9755
9756 op1 = TREE_OPERAND (expr, 1);
9757 STRIP_NOPS (op1);
9758 inner_code = TREE_CODE (op1);
9759 if (inner_code == INTEGER_CST)
9760 {
9761 *residue += TREE_INT_CST_LOW (op1);
9762 return modulus;
9763 }
9764 else if (inner_code == MULT_EXPR)
9765 {
9766 op1 = TREE_OPERAND (op1, 1);
9767 if (TREE_CODE (op1) == INTEGER_CST)
9768 {
9769 unsigned HOST_WIDE_INT align;
9770
9771 /* Compute the greatest power-of-2 divisor of op1. */
9772 align = TREE_INT_CST_LOW (op1);
9773 align &= -align;
9774
9775 /* If align is non-zero and less than *modulus, replace
9776 *modulus with align., If align is 0, then either op1 is 0
9777 or the greatest power-of-2 divisor of op1 doesn't fit in an
9778 unsigned HOST_WIDE_INT. In either case, no additional
9779 constraint is imposed. */
9780 if (align)
9781 modulus = MIN (modulus, align);
9782
9783 return modulus;
9784 }
9785 }
9786 }
9787
9788 /* If we get here, we were unable to determine anything useful about the
9789 expression. */
9790 return 1;
9791 }
9792
9793 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9794 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9795
9796 static bool
9797 vec_cst_ctor_to_array (tree arg, tree *elts)
9798 {
9799 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9800
9801 if (TREE_CODE (arg) == VECTOR_CST)
9802 {
9803 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9804 elts[i] = VECTOR_CST_ELT (arg, i);
9805 }
9806 else if (TREE_CODE (arg) == CONSTRUCTOR)
9807 {
9808 constructor_elt *elt;
9809
9810 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9811 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9812 return false;
9813 else
9814 elts[i] = elt->value;
9815 }
9816 else
9817 return false;
9818 for (; i < nelts; i++)
9819 elts[i]
9820 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9821 return true;
9822 }
9823
9824 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9825 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9826 NULL_TREE otherwise. */
9827
9828 static tree
9829 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9830 {
9831 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9832 tree *elts;
9833 bool need_ctor = false;
9834
9835 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9836 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9837 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9838 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9839 return NULL_TREE;
9840
9841 elts = XALLOCAVEC (tree, nelts * 3);
9842 if (!vec_cst_ctor_to_array (arg0, elts)
9843 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9844 return NULL_TREE;
9845
9846 for (i = 0; i < nelts; i++)
9847 {
9848 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9849 need_ctor = true;
9850 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9851 }
9852
9853 if (need_ctor)
9854 {
9855 vec<constructor_elt, va_gc> *v;
9856 vec_alloc (v, nelts);
9857 for (i = 0; i < nelts; i++)
9858 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9859 return build_constructor (type, v);
9860 }
9861 else
9862 return build_vector (type, &elts[2 * nelts]);
9863 }
9864
9865 /* Try to fold a pointer difference of type TYPE two address expressions of
9866 array references AREF0 and AREF1 using location LOC. Return a
9867 simplified expression for the difference or NULL_TREE. */
9868
9869 static tree
9870 fold_addr_of_array_ref_difference (location_t loc, tree type,
9871 tree aref0, tree aref1)
9872 {
9873 tree base0 = TREE_OPERAND (aref0, 0);
9874 tree base1 = TREE_OPERAND (aref1, 0);
9875 tree base_offset = build_int_cst (type, 0);
9876
9877 /* If the bases are array references as well, recurse. If the bases
9878 are pointer indirections compute the difference of the pointers.
9879 If the bases are equal, we are set. */
9880 if ((TREE_CODE (base0) == ARRAY_REF
9881 && TREE_CODE (base1) == ARRAY_REF
9882 && (base_offset
9883 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9884 || (INDIRECT_REF_P (base0)
9885 && INDIRECT_REF_P (base1)
9886 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9887 TREE_OPERAND (base0, 0),
9888 TREE_OPERAND (base1, 0))))
9889 || operand_equal_p (base0, base1, 0))
9890 {
9891 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9892 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9893 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9894 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9895 return fold_build2_loc (loc, PLUS_EXPR, type,
9896 base_offset,
9897 fold_build2_loc (loc, MULT_EXPR, type,
9898 diff, esz));
9899 }
9900 return NULL_TREE;
9901 }
9902
9903 /* If the real or vector real constant CST of type TYPE has an exact
9904 inverse, return it, else return NULL. */
9905
9906 static tree
9907 exact_inverse (tree type, tree cst)
9908 {
9909 REAL_VALUE_TYPE r;
9910 tree unit_type, *elts;
9911 enum machine_mode mode;
9912 unsigned vec_nelts, i;
9913
9914 switch (TREE_CODE (cst))
9915 {
9916 case REAL_CST:
9917 r = TREE_REAL_CST (cst);
9918
9919 if (exact_real_inverse (TYPE_MODE (type), &r))
9920 return build_real (type, r);
9921
9922 return NULL_TREE;
9923
9924 case VECTOR_CST:
9925 vec_nelts = VECTOR_CST_NELTS (cst);
9926 elts = XALLOCAVEC (tree, vec_nelts);
9927 unit_type = TREE_TYPE (type);
9928 mode = TYPE_MODE (unit_type);
9929
9930 for (i = 0; i < vec_nelts; i++)
9931 {
9932 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9933 if (!exact_real_inverse (mode, &r))
9934 return NULL_TREE;
9935 elts[i] = build_real (unit_type, r);
9936 }
9937
9938 return build_vector (type, elts);
9939
9940 default:
9941 return NULL_TREE;
9942 }
9943 }
9944
9945 /* Fold a binary expression of code CODE and type TYPE with operands
9946 OP0 and OP1. LOC is the location of the resulting expression.
9947 Return the folded expression if folding is successful. Otherwise,
9948 return NULL_TREE. */
9949
9950 tree
9951 fold_binary_loc (location_t loc,
9952 enum tree_code code, tree type, tree op0, tree op1)
9953 {
9954 enum tree_code_class kind = TREE_CODE_CLASS (code);
9955 tree arg0, arg1, tem;
9956 tree t1 = NULL_TREE;
9957 bool strict_overflow_p;
9958 unsigned int prec;
9959
9960 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9961 && TREE_CODE_LENGTH (code) == 2
9962 && op0 != NULL_TREE
9963 && op1 != NULL_TREE);
9964
9965 arg0 = op0;
9966 arg1 = op1;
9967
9968 /* Strip any conversions that don't change the mode. This is
9969 safe for every expression, except for a comparison expression
9970 because its signedness is derived from its operands. So, in
9971 the latter case, only strip conversions that don't change the
9972 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9973 preserved.
9974
9975 Note that this is done as an internal manipulation within the
9976 constant folder, in order to find the simplest representation
9977 of the arguments so that their form can be studied. In any
9978 cases, the appropriate type conversions should be put back in
9979 the tree that will get out of the constant folder. */
9980
9981 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9982 {
9983 STRIP_SIGN_NOPS (arg0);
9984 STRIP_SIGN_NOPS (arg1);
9985 }
9986 else
9987 {
9988 STRIP_NOPS (arg0);
9989 STRIP_NOPS (arg1);
9990 }
9991
9992 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9993 constant but we can't do arithmetic on them. */
9994 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9995 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9996 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9997 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9998 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9999 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10000 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10001 {
10002 if (kind == tcc_binary)
10003 {
10004 /* Make sure type and arg0 have the same saturating flag. */
10005 gcc_assert (TYPE_SATURATING (type)
10006 == TYPE_SATURATING (TREE_TYPE (arg0)));
10007 tem = const_binop (code, arg0, arg1);
10008 }
10009 else if (kind == tcc_comparison)
10010 tem = fold_relational_const (code, type, arg0, arg1);
10011 else
10012 tem = NULL_TREE;
10013
10014 if (tem != NULL_TREE)
10015 {
10016 if (TREE_TYPE (tem) != type)
10017 tem = fold_convert_loc (loc, type, tem);
10018 return tem;
10019 }
10020 }
10021
10022 /* If this is a commutative operation, and ARG0 is a constant, move it
10023 to ARG1 to reduce the number of tests below. */
10024 if (commutative_tree_code (code)
10025 && tree_swap_operands_p (arg0, arg1, true))
10026 return fold_build2_loc (loc, code, type, op1, op0);
10027
10028 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10029
10030 First check for cases where an arithmetic operation is applied to a
10031 compound, conditional, or comparison operation. Push the arithmetic
10032 operation inside the compound or conditional to see if any folding
10033 can then be done. Convert comparison to conditional for this purpose.
10034 The also optimizes non-constant cases that used to be done in
10035 expand_expr.
10036
10037 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10038 one of the operands is a comparison and the other is a comparison, a
10039 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10040 code below would make the expression more complex. Change it to a
10041 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10042 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10043
10044 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10045 || code == EQ_EXPR || code == NE_EXPR)
10046 && TREE_CODE (type) != VECTOR_TYPE
10047 && ((truth_value_p (TREE_CODE (arg0))
10048 && (truth_value_p (TREE_CODE (arg1))
10049 || (TREE_CODE (arg1) == BIT_AND_EXPR
10050 && integer_onep (TREE_OPERAND (arg1, 1)))))
10051 || (truth_value_p (TREE_CODE (arg1))
10052 && (truth_value_p (TREE_CODE (arg0))
10053 || (TREE_CODE (arg0) == BIT_AND_EXPR
10054 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10055 {
10056 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10057 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10058 : TRUTH_XOR_EXPR,
10059 boolean_type_node,
10060 fold_convert_loc (loc, boolean_type_node, arg0),
10061 fold_convert_loc (loc, boolean_type_node, arg1));
10062
10063 if (code == EQ_EXPR)
10064 tem = invert_truthvalue_loc (loc, tem);
10065
10066 return fold_convert_loc (loc, type, tem);
10067 }
10068
10069 if (TREE_CODE_CLASS (code) == tcc_binary
10070 || TREE_CODE_CLASS (code) == tcc_comparison)
10071 {
10072 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10073 {
10074 tem = fold_build2_loc (loc, code, type,
10075 fold_convert_loc (loc, TREE_TYPE (op0),
10076 TREE_OPERAND (arg0, 1)), op1);
10077 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10078 tem);
10079 }
10080 if (TREE_CODE (arg1) == COMPOUND_EXPR
10081 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10082 {
10083 tem = fold_build2_loc (loc, code, type, op0,
10084 fold_convert_loc (loc, TREE_TYPE (op1),
10085 TREE_OPERAND (arg1, 1)));
10086 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10087 tem);
10088 }
10089
10090 if (TREE_CODE (arg0) == COND_EXPR
10091 || TREE_CODE (arg0) == VEC_COND_EXPR
10092 || COMPARISON_CLASS_P (arg0))
10093 {
10094 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10095 arg0, arg1,
10096 /*cond_first_p=*/1);
10097 if (tem != NULL_TREE)
10098 return tem;
10099 }
10100
10101 if (TREE_CODE (arg1) == COND_EXPR
10102 || TREE_CODE (arg1) == VEC_COND_EXPR
10103 || COMPARISON_CLASS_P (arg1))
10104 {
10105 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10106 arg1, arg0,
10107 /*cond_first_p=*/0);
10108 if (tem != NULL_TREE)
10109 return tem;
10110 }
10111 }
10112
10113 switch (code)
10114 {
10115 case MEM_REF:
10116 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10117 if (TREE_CODE (arg0) == ADDR_EXPR
10118 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10119 {
10120 tree iref = TREE_OPERAND (arg0, 0);
10121 return fold_build2 (MEM_REF, type,
10122 TREE_OPERAND (iref, 0),
10123 int_const_binop (PLUS_EXPR, arg1,
10124 TREE_OPERAND (iref, 1)));
10125 }
10126
10127 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10128 if (TREE_CODE (arg0) == ADDR_EXPR
10129 && handled_component_p (TREE_OPERAND (arg0, 0)))
10130 {
10131 tree base;
10132 HOST_WIDE_INT coffset;
10133 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10134 &coffset);
10135 if (!base)
10136 return NULL_TREE;
10137 return fold_build2 (MEM_REF, type,
10138 build_fold_addr_expr (base),
10139 int_const_binop (PLUS_EXPR, arg1,
10140 size_int (coffset)));
10141 }
10142
10143 return NULL_TREE;
10144
10145 case POINTER_PLUS_EXPR:
10146 /* 0 +p index -> (type)index */
10147 if (integer_zerop (arg0))
10148 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10149
10150 /* PTR +p 0 -> PTR */
10151 if (integer_zerop (arg1))
10152 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10153
10154 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10155 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10156 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10157 return fold_convert_loc (loc, type,
10158 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10159 fold_convert_loc (loc, sizetype,
10160 arg1),
10161 fold_convert_loc (loc, sizetype,
10162 arg0)));
10163
10164 /* (PTR +p B) +p A -> PTR +p (B + A) */
10165 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10166 {
10167 tree inner;
10168 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10169 tree arg00 = TREE_OPERAND (arg0, 0);
10170 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10171 arg01, fold_convert_loc (loc, sizetype, arg1));
10172 return fold_convert_loc (loc, type,
10173 fold_build_pointer_plus_loc (loc,
10174 arg00, inner));
10175 }
10176
10177 /* PTR_CST +p CST -> CST1 */
10178 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10179 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10180 fold_convert_loc (loc, type, arg1));
10181
10182 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10183 of the array. Loop optimizer sometimes produce this type of
10184 expressions. */
10185 if (TREE_CODE (arg0) == ADDR_EXPR)
10186 {
10187 tem = try_move_mult_to_index (loc, arg0,
10188 fold_convert_loc (loc,
10189 ssizetype, arg1));
10190 if (tem)
10191 return fold_convert_loc (loc, type, tem);
10192 }
10193
10194 return NULL_TREE;
10195
10196 case PLUS_EXPR:
10197 /* A + (-B) -> A - B */
10198 if (TREE_CODE (arg1) == NEGATE_EXPR)
10199 return fold_build2_loc (loc, MINUS_EXPR, type,
10200 fold_convert_loc (loc, type, arg0),
10201 fold_convert_loc (loc, type,
10202 TREE_OPERAND (arg1, 0)));
10203 /* (-A) + B -> B - A */
10204 if (TREE_CODE (arg0) == NEGATE_EXPR
10205 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10206 return fold_build2_loc (loc, MINUS_EXPR, type,
10207 fold_convert_loc (loc, type, arg1),
10208 fold_convert_loc (loc, type,
10209 TREE_OPERAND (arg0, 0)));
10210
10211 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10212 {
10213 /* Convert ~A + 1 to -A. */
10214 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10215 && integer_onep (arg1))
10216 return fold_build1_loc (loc, NEGATE_EXPR, type,
10217 fold_convert_loc (loc, type,
10218 TREE_OPERAND (arg0, 0)));
10219
10220 /* ~X + X is -1. */
10221 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10222 && !TYPE_OVERFLOW_TRAPS (type))
10223 {
10224 tree tem = TREE_OPERAND (arg0, 0);
10225
10226 STRIP_NOPS (tem);
10227 if (operand_equal_p (tem, arg1, 0))
10228 {
10229 t1 = build_all_ones_cst (type);
10230 return omit_one_operand_loc (loc, type, t1, arg1);
10231 }
10232 }
10233
10234 /* X + ~X is -1. */
10235 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10236 && !TYPE_OVERFLOW_TRAPS (type))
10237 {
10238 tree tem = TREE_OPERAND (arg1, 0);
10239
10240 STRIP_NOPS (tem);
10241 if (operand_equal_p (arg0, tem, 0))
10242 {
10243 t1 = build_all_ones_cst (type);
10244 return omit_one_operand_loc (loc, type, t1, arg0);
10245 }
10246 }
10247
10248 /* X + (X / CST) * -CST is X % CST. */
10249 if (TREE_CODE (arg1) == MULT_EXPR
10250 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10251 && operand_equal_p (arg0,
10252 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10253 {
10254 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10255 tree cst1 = TREE_OPERAND (arg1, 1);
10256 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10257 cst1, cst0);
10258 if (sum && integer_zerop (sum))
10259 return fold_convert_loc (loc, type,
10260 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10261 TREE_TYPE (arg0), arg0,
10262 cst0));
10263 }
10264 }
10265
10266 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10267 one. Make sure the type is not saturating and has the signedness of
10268 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10269 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10270 if ((TREE_CODE (arg0) == MULT_EXPR
10271 || TREE_CODE (arg1) == MULT_EXPR)
10272 && !TYPE_SATURATING (type)
10273 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10274 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10275 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10276 {
10277 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10278 if (tem)
10279 return tem;
10280 }
10281
10282 if (! FLOAT_TYPE_P (type))
10283 {
10284 if (integer_zerop (arg1))
10285 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10286
10287 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10288 with a constant, and the two constants have no bits in common,
10289 we should treat this as a BIT_IOR_EXPR since this may produce more
10290 simplifications. */
10291 if (TREE_CODE (arg0) == BIT_AND_EXPR
10292 && TREE_CODE (arg1) == BIT_AND_EXPR
10293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10294 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10295 && integer_zerop (const_binop (BIT_AND_EXPR,
10296 TREE_OPERAND (arg0, 1),
10297 TREE_OPERAND (arg1, 1))))
10298 {
10299 code = BIT_IOR_EXPR;
10300 goto bit_ior;
10301 }
10302
10303 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10304 (plus (plus (mult) (mult)) (foo)) so that we can
10305 take advantage of the factoring cases below. */
10306 if (TYPE_OVERFLOW_WRAPS (type)
10307 && (((TREE_CODE (arg0) == PLUS_EXPR
10308 || TREE_CODE (arg0) == MINUS_EXPR)
10309 && TREE_CODE (arg1) == MULT_EXPR)
10310 || ((TREE_CODE (arg1) == PLUS_EXPR
10311 || TREE_CODE (arg1) == MINUS_EXPR)
10312 && TREE_CODE (arg0) == MULT_EXPR)))
10313 {
10314 tree parg0, parg1, parg, marg;
10315 enum tree_code pcode;
10316
10317 if (TREE_CODE (arg1) == MULT_EXPR)
10318 parg = arg0, marg = arg1;
10319 else
10320 parg = arg1, marg = arg0;
10321 pcode = TREE_CODE (parg);
10322 parg0 = TREE_OPERAND (parg, 0);
10323 parg1 = TREE_OPERAND (parg, 1);
10324 STRIP_NOPS (parg0);
10325 STRIP_NOPS (parg1);
10326
10327 if (TREE_CODE (parg0) == MULT_EXPR
10328 && TREE_CODE (parg1) != MULT_EXPR)
10329 return fold_build2_loc (loc, pcode, type,
10330 fold_build2_loc (loc, PLUS_EXPR, type,
10331 fold_convert_loc (loc, type,
10332 parg0),
10333 fold_convert_loc (loc, type,
10334 marg)),
10335 fold_convert_loc (loc, type, parg1));
10336 if (TREE_CODE (parg0) != MULT_EXPR
10337 && TREE_CODE (parg1) == MULT_EXPR)
10338 return
10339 fold_build2_loc (loc, PLUS_EXPR, type,
10340 fold_convert_loc (loc, type, parg0),
10341 fold_build2_loc (loc, pcode, type,
10342 fold_convert_loc (loc, type, marg),
10343 fold_convert_loc (loc, type,
10344 parg1)));
10345 }
10346 }
10347 else
10348 {
10349 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10350 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10351 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10352
10353 /* Likewise if the operands are reversed. */
10354 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10355 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10356
10357 /* Convert X + -C into X - C. */
10358 if (TREE_CODE (arg1) == REAL_CST
10359 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10360 {
10361 tem = fold_negate_const (arg1, type);
10362 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10363 return fold_build2_loc (loc, MINUS_EXPR, type,
10364 fold_convert_loc (loc, type, arg0),
10365 fold_convert_loc (loc, type, tem));
10366 }
10367
10368 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10369 to __complex__ ( x, y ). This is not the same for SNaNs or
10370 if signed zeros are involved. */
10371 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10372 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10373 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10374 {
10375 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10376 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10377 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10378 bool arg0rz = false, arg0iz = false;
10379 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10380 || (arg0i && (arg0iz = real_zerop (arg0i))))
10381 {
10382 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10383 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10384 if (arg0rz && arg1i && real_zerop (arg1i))
10385 {
10386 tree rp = arg1r ? arg1r
10387 : build1 (REALPART_EXPR, rtype, arg1);
10388 tree ip = arg0i ? arg0i
10389 : build1 (IMAGPART_EXPR, rtype, arg0);
10390 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10391 }
10392 else if (arg0iz && arg1r && real_zerop (arg1r))
10393 {
10394 tree rp = arg0r ? arg0r
10395 : build1 (REALPART_EXPR, rtype, arg0);
10396 tree ip = arg1i ? arg1i
10397 : build1 (IMAGPART_EXPR, rtype, arg1);
10398 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10399 }
10400 }
10401 }
10402
10403 if (flag_unsafe_math_optimizations
10404 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10405 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10406 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10407 return tem;
10408
10409 /* Convert x+x into x*2.0. */
10410 if (operand_equal_p (arg0, arg1, 0)
10411 && SCALAR_FLOAT_TYPE_P (type))
10412 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10413 build_real (type, dconst2));
10414
10415 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10416 We associate floats only if the user has specified
10417 -fassociative-math. */
10418 if (flag_associative_math
10419 && TREE_CODE (arg1) == PLUS_EXPR
10420 && TREE_CODE (arg0) != MULT_EXPR)
10421 {
10422 tree tree10 = TREE_OPERAND (arg1, 0);
10423 tree tree11 = TREE_OPERAND (arg1, 1);
10424 if (TREE_CODE (tree11) == MULT_EXPR
10425 && TREE_CODE (tree10) == MULT_EXPR)
10426 {
10427 tree tree0;
10428 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10429 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10430 }
10431 }
10432 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10433 We associate floats only if the user has specified
10434 -fassociative-math. */
10435 if (flag_associative_math
10436 && TREE_CODE (arg0) == PLUS_EXPR
10437 && TREE_CODE (arg1) != MULT_EXPR)
10438 {
10439 tree tree00 = TREE_OPERAND (arg0, 0);
10440 tree tree01 = TREE_OPERAND (arg0, 1);
10441 if (TREE_CODE (tree01) == MULT_EXPR
10442 && TREE_CODE (tree00) == MULT_EXPR)
10443 {
10444 tree tree0;
10445 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10446 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10447 }
10448 }
10449 }
10450
10451 bit_rotate:
10452 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10453 is a rotate of A by C1 bits. */
10454 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10455 is a rotate of A by B bits. */
10456 {
10457 enum tree_code code0, code1;
10458 tree rtype;
10459 code0 = TREE_CODE (arg0);
10460 code1 = TREE_CODE (arg1);
10461 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10462 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10463 && operand_equal_p (TREE_OPERAND (arg0, 0),
10464 TREE_OPERAND (arg1, 0), 0)
10465 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10466 TYPE_UNSIGNED (rtype))
10467 /* Only create rotates in complete modes. Other cases are not
10468 expanded properly. */
10469 && (element_precision (rtype)
10470 == element_precision (TYPE_MODE (rtype))))
10471 {
10472 tree tree01, tree11;
10473 enum tree_code code01, code11;
10474
10475 tree01 = TREE_OPERAND (arg0, 1);
10476 tree11 = TREE_OPERAND (arg1, 1);
10477 STRIP_NOPS (tree01);
10478 STRIP_NOPS (tree11);
10479 code01 = TREE_CODE (tree01);
10480 code11 = TREE_CODE (tree11);
10481 if (code01 == INTEGER_CST
10482 && code11 == INTEGER_CST
10483 && TREE_INT_CST_HIGH (tree01) == 0
10484 && TREE_INT_CST_HIGH (tree11) == 0
10485 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10486 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10487 {
10488 tem = build2_loc (loc, LROTATE_EXPR,
10489 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10490 TREE_OPERAND (arg0, 0),
10491 code0 == LSHIFT_EXPR ? tree01 : tree11);
10492 return fold_convert_loc (loc, type, tem);
10493 }
10494 else if (code11 == MINUS_EXPR)
10495 {
10496 tree tree110, tree111;
10497 tree110 = TREE_OPERAND (tree11, 0);
10498 tree111 = TREE_OPERAND (tree11, 1);
10499 STRIP_NOPS (tree110);
10500 STRIP_NOPS (tree111);
10501 if (TREE_CODE (tree110) == INTEGER_CST
10502 && 0 == compare_tree_int (tree110,
10503 element_precision
10504 (TREE_TYPE (TREE_OPERAND
10505 (arg0, 0))))
10506 && operand_equal_p (tree01, tree111, 0))
10507 return
10508 fold_convert_loc (loc, type,
10509 build2 ((code0 == LSHIFT_EXPR
10510 ? LROTATE_EXPR
10511 : RROTATE_EXPR),
10512 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10513 TREE_OPERAND (arg0, 0), tree01));
10514 }
10515 else if (code01 == MINUS_EXPR)
10516 {
10517 tree tree010, tree011;
10518 tree010 = TREE_OPERAND (tree01, 0);
10519 tree011 = TREE_OPERAND (tree01, 1);
10520 STRIP_NOPS (tree010);
10521 STRIP_NOPS (tree011);
10522 if (TREE_CODE (tree010) == INTEGER_CST
10523 && 0 == compare_tree_int (tree010,
10524 element_precision
10525 (TREE_TYPE (TREE_OPERAND
10526 (arg0, 0))))
10527 && operand_equal_p (tree11, tree011, 0))
10528 return fold_convert_loc
10529 (loc, type,
10530 build2 ((code0 != LSHIFT_EXPR
10531 ? LROTATE_EXPR
10532 : RROTATE_EXPR),
10533 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10534 TREE_OPERAND (arg0, 0), tree11));
10535 }
10536 }
10537 }
10538
10539 associate:
10540 /* In most languages, can't associate operations on floats through
10541 parentheses. Rather than remember where the parentheses were, we
10542 don't associate floats at all, unless the user has specified
10543 -fassociative-math.
10544 And, we need to make sure type is not saturating. */
10545
10546 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10547 && !TYPE_SATURATING (type))
10548 {
10549 tree var0, con0, lit0, minus_lit0;
10550 tree var1, con1, lit1, minus_lit1;
10551 tree atype = type;
10552 bool ok = true;
10553
10554 /* Split both trees into variables, constants, and literals. Then
10555 associate each group together, the constants with literals,
10556 then the result with variables. This increases the chances of
10557 literals being recombined later and of generating relocatable
10558 expressions for the sum of a constant and literal. */
10559 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10560 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10561 code == MINUS_EXPR);
10562
10563 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10564 if (code == MINUS_EXPR)
10565 code = PLUS_EXPR;
10566
10567 /* With undefined overflow prefer doing association in a type
10568 which wraps on overflow, if that is one of the operand types. */
10569 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10570 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10571 {
10572 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10573 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10574 atype = TREE_TYPE (arg0);
10575 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10576 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10577 atype = TREE_TYPE (arg1);
10578 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10579 }
10580
10581 /* With undefined overflow we can only associate constants with one
10582 variable, and constants whose association doesn't overflow. */
10583 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10584 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10585 {
10586 if (var0 && var1)
10587 {
10588 tree tmp0 = var0;
10589 tree tmp1 = var1;
10590
10591 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10592 tmp0 = TREE_OPERAND (tmp0, 0);
10593 if (CONVERT_EXPR_P (tmp0)
10594 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10595 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10596 <= TYPE_PRECISION (atype)))
10597 tmp0 = TREE_OPERAND (tmp0, 0);
10598 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10599 tmp1 = TREE_OPERAND (tmp1, 0);
10600 if (CONVERT_EXPR_P (tmp1)
10601 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10602 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10603 <= TYPE_PRECISION (atype)))
10604 tmp1 = TREE_OPERAND (tmp1, 0);
10605 /* The only case we can still associate with two variables
10606 is if they are the same, modulo negation and bit-pattern
10607 preserving conversions. */
10608 if (!operand_equal_p (tmp0, tmp1, 0))
10609 ok = false;
10610 }
10611 }
10612
10613 /* Only do something if we found more than two objects. Otherwise,
10614 nothing has changed and we risk infinite recursion. */
10615 if (ok
10616 && (2 < ((var0 != 0) + (var1 != 0)
10617 + (con0 != 0) + (con1 != 0)
10618 + (lit0 != 0) + (lit1 != 0)
10619 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10620 {
10621 bool any_overflows = false;
10622 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10623 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10624 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10625 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10626 var0 = associate_trees (loc, var0, var1, code, atype);
10627 con0 = associate_trees (loc, con0, con1, code, atype);
10628 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10629 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10630 code, atype);
10631
10632 /* Preserve the MINUS_EXPR if the negative part of the literal is
10633 greater than the positive part. Otherwise, the multiplicative
10634 folding code (i.e extract_muldiv) may be fooled in case
10635 unsigned constants are subtracted, like in the following
10636 example: ((X*2 + 4) - 8U)/2. */
10637 if (minus_lit0 && lit0)
10638 {
10639 if (TREE_CODE (lit0) == INTEGER_CST
10640 && TREE_CODE (minus_lit0) == INTEGER_CST
10641 && tree_int_cst_lt (lit0, minus_lit0))
10642 {
10643 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10644 MINUS_EXPR, atype);
10645 lit0 = 0;
10646 }
10647 else
10648 {
10649 lit0 = associate_trees (loc, lit0, minus_lit0,
10650 MINUS_EXPR, atype);
10651 minus_lit0 = 0;
10652 }
10653 }
10654
10655 /* Don't introduce overflows through reassociation. */
10656 if (!any_overflows
10657 && ((lit0 && TREE_OVERFLOW (lit0))
10658 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10659 return NULL_TREE;
10660
10661 if (minus_lit0)
10662 {
10663 if (con0 == 0)
10664 return
10665 fold_convert_loc (loc, type,
10666 associate_trees (loc, var0, minus_lit0,
10667 MINUS_EXPR, atype));
10668 else
10669 {
10670 con0 = associate_trees (loc, con0, minus_lit0,
10671 MINUS_EXPR, atype);
10672 return
10673 fold_convert_loc (loc, type,
10674 associate_trees (loc, var0, con0,
10675 PLUS_EXPR, atype));
10676 }
10677 }
10678
10679 con0 = associate_trees (loc, con0, lit0, code, atype);
10680 return
10681 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10682 code, atype));
10683 }
10684 }
10685
10686 return NULL_TREE;
10687
10688 case MINUS_EXPR:
10689 /* Pointer simplifications for subtraction, simple reassociations. */
10690 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10691 {
10692 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10693 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10694 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10695 {
10696 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10697 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10698 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10699 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10700 return fold_build2_loc (loc, PLUS_EXPR, type,
10701 fold_build2_loc (loc, MINUS_EXPR, type,
10702 arg00, arg10),
10703 fold_build2_loc (loc, MINUS_EXPR, type,
10704 arg01, arg11));
10705 }
10706 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10707 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10708 {
10709 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10710 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10711 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10712 fold_convert_loc (loc, type, arg1));
10713 if (tmp)
10714 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10715 }
10716 }
10717 /* A - (-B) -> A + B */
10718 if (TREE_CODE (arg1) == NEGATE_EXPR)
10719 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10720 fold_convert_loc (loc, type,
10721 TREE_OPERAND (arg1, 0)));
10722 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10723 if (TREE_CODE (arg0) == NEGATE_EXPR
10724 && negate_expr_p (arg1)
10725 && reorder_operands_p (arg0, arg1))
10726 return fold_build2_loc (loc, MINUS_EXPR, type,
10727 fold_convert_loc (loc, type,
10728 negate_expr (arg1)),
10729 fold_convert_loc (loc, type,
10730 TREE_OPERAND (arg0, 0)));
10731 /* Convert -A - 1 to ~A. */
10732 if (TREE_CODE (type) != COMPLEX_TYPE
10733 && TREE_CODE (arg0) == NEGATE_EXPR
10734 && integer_onep (arg1)
10735 && !TYPE_OVERFLOW_TRAPS (type))
10736 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10737 fold_convert_loc (loc, type,
10738 TREE_OPERAND (arg0, 0)));
10739
10740 /* Convert -1 - A to ~A. */
10741 if (TREE_CODE (type) != COMPLEX_TYPE
10742 && integer_all_onesp (arg0))
10743 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10744
10745
10746 /* X - (X / Y) * Y is X % Y. */
10747 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10748 && TREE_CODE (arg1) == MULT_EXPR
10749 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10750 && operand_equal_p (arg0,
10751 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10752 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10753 TREE_OPERAND (arg1, 1), 0))
10754 return
10755 fold_convert_loc (loc, type,
10756 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10757 arg0, TREE_OPERAND (arg1, 1)));
10758
10759 if (! FLOAT_TYPE_P (type))
10760 {
10761 if (integer_zerop (arg0))
10762 return negate_expr (fold_convert_loc (loc, type, arg1));
10763 if (integer_zerop (arg1))
10764 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10765
10766 /* Fold A - (A & B) into ~B & A. */
10767 if (!TREE_SIDE_EFFECTS (arg0)
10768 && TREE_CODE (arg1) == BIT_AND_EXPR)
10769 {
10770 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10771 {
10772 tree arg10 = fold_convert_loc (loc, type,
10773 TREE_OPERAND (arg1, 0));
10774 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10775 fold_build1_loc (loc, BIT_NOT_EXPR,
10776 type, arg10),
10777 fold_convert_loc (loc, type, arg0));
10778 }
10779 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10780 {
10781 tree arg11 = fold_convert_loc (loc,
10782 type, TREE_OPERAND (arg1, 1));
10783 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10784 fold_build1_loc (loc, BIT_NOT_EXPR,
10785 type, arg11),
10786 fold_convert_loc (loc, type, arg0));
10787 }
10788 }
10789
10790 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10791 any power of 2 minus 1. */
10792 if (TREE_CODE (arg0) == BIT_AND_EXPR
10793 && TREE_CODE (arg1) == BIT_AND_EXPR
10794 && operand_equal_p (TREE_OPERAND (arg0, 0),
10795 TREE_OPERAND (arg1, 0), 0))
10796 {
10797 tree mask0 = TREE_OPERAND (arg0, 1);
10798 tree mask1 = TREE_OPERAND (arg1, 1);
10799 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10800
10801 if (operand_equal_p (tem, mask1, 0))
10802 {
10803 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10804 TREE_OPERAND (arg0, 0), mask1);
10805 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10806 }
10807 }
10808 }
10809
10810 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10811 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10812 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10813
10814 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10815 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10816 (-ARG1 + ARG0) reduces to -ARG1. */
10817 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10818 return negate_expr (fold_convert_loc (loc, type, arg1));
10819
10820 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10821 __complex__ ( x, -y ). This is not the same for SNaNs or if
10822 signed zeros are involved. */
10823 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10824 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10825 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10826 {
10827 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10828 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10829 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10830 bool arg0rz = false, arg0iz = false;
10831 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10832 || (arg0i && (arg0iz = real_zerop (arg0i))))
10833 {
10834 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10835 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10836 if (arg0rz && arg1i && real_zerop (arg1i))
10837 {
10838 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10839 arg1r ? arg1r
10840 : build1 (REALPART_EXPR, rtype, arg1));
10841 tree ip = arg0i ? arg0i
10842 : build1 (IMAGPART_EXPR, rtype, arg0);
10843 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10844 }
10845 else if (arg0iz && arg1r && real_zerop (arg1r))
10846 {
10847 tree rp = arg0r ? arg0r
10848 : build1 (REALPART_EXPR, rtype, arg0);
10849 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10850 arg1i ? arg1i
10851 : build1 (IMAGPART_EXPR, rtype, arg1));
10852 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10853 }
10854 }
10855 }
10856
10857 /* Fold &x - &x. This can happen from &x.foo - &x.
10858 This is unsafe for certain floats even in non-IEEE formats.
10859 In IEEE, it is unsafe because it does wrong for NaNs.
10860 Also note that operand_equal_p is always false if an operand
10861 is volatile. */
10862
10863 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10864 && operand_equal_p (arg0, arg1, 0))
10865 return build_zero_cst (type);
10866
10867 /* A - B -> A + (-B) if B is easily negatable. */
10868 if (negate_expr_p (arg1)
10869 && ((FLOAT_TYPE_P (type)
10870 /* Avoid this transformation if B is a positive REAL_CST. */
10871 && (TREE_CODE (arg1) != REAL_CST
10872 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10873 || INTEGRAL_TYPE_P (type)))
10874 return fold_build2_loc (loc, PLUS_EXPR, type,
10875 fold_convert_loc (loc, type, arg0),
10876 fold_convert_loc (loc, type,
10877 negate_expr (arg1)));
10878
10879 /* Try folding difference of addresses. */
10880 {
10881 HOST_WIDE_INT diff;
10882
10883 if ((TREE_CODE (arg0) == ADDR_EXPR
10884 || TREE_CODE (arg1) == ADDR_EXPR)
10885 && ptr_difference_const (arg0, arg1, &diff))
10886 return build_int_cst_type (type, diff);
10887 }
10888
10889 /* Fold &a[i] - &a[j] to i-j. */
10890 if (TREE_CODE (arg0) == ADDR_EXPR
10891 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10892 && TREE_CODE (arg1) == ADDR_EXPR
10893 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10894 {
10895 tree tem = fold_addr_of_array_ref_difference (loc, type,
10896 TREE_OPERAND (arg0, 0),
10897 TREE_OPERAND (arg1, 0));
10898 if (tem)
10899 return tem;
10900 }
10901
10902 if (FLOAT_TYPE_P (type)
10903 && flag_unsafe_math_optimizations
10904 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10905 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10906 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10907 return tem;
10908
10909 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10910 one. Make sure the type is not saturating and has the signedness of
10911 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10912 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10913 if ((TREE_CODE (arg0) == MULT_EXPR
10914 || TREE_CODE (arg1) == MULT_EXPR)
10915 && !TYPE_SATURATING (type)
10916 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10917 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10918 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10919 {
10920 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10921 if (tem)
10922 return tem;
10923 }
10924
10925 goto associate;
10926
10927 case MULT_EXPR:
10928 /* (-A) * (-B) -> A * B */
10929 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10930 return fold_build2_loc (loc, MULT_EXPR, type,
10931 fold_convert_loc (loc, type,
10932 TREE_OPERAND (arg0, 0)),
10933 fold_convert_loc (loc, type,
10934 negate_expr (arg1)));
10935 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10936 return fold_build2_loc (loc, MULT_EXPR, type,
10937 fold_convert_loc (loc, type,
10938 negate_expr (arg0)),
10939 fold_convert_loc (loc, type,
10940 TREE_OPERAND (arg1, 0)));
10941
10942 if (! FLOAT_TYPE_P (type))
10943 {
10944 if (integer_zerop (arg1))
10945 return omit_one_operand_loc (loc, type, arg1, arg0);
10946 if (integer_onep (arg1))
10947 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10948 /* Transform x * -1 into -x. Make sure to do the negation
10949 on the original operand with conversions not stripped
10950 because we can only strip non-sign-changing conversions. */
10951 if (integer_minus_onep (arg1))
10952 return fold_convert_loc (loc, type, negate_expr (op0));
10953 /* Transform x * -C into -x * C if x is easily negatable. */
10954 if (TREE_CODE (arg1) == INTEGER_CST
10955 && tree_int_cst_sgn (arg1) == -1
10956 && negate_expr_p (arg0)
10957 && (tem = negate_expr (arg1)) != arg1
10958 && !TREE_OVERFLOW (tem))
10959 return fold_build2_loc (loc, MULT_EXPR, type,
10960 fold_convert_loc (loc, type,
10961 negate_expr (arg0)),
10962 tem);
10963
10964 /* (a * (1 << b)) is (a << b) */
10965 if (TREE_CODE (arg1) == LSHIFT_EXPR
10966 && integer_onep (TREE_OPERAND (arg1, 0)))
10967 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10968 TREE_OPERAND (arg1, 1));
10969 if (TREE_CODE (arg0) == LSHIFT_EXPR
10970 && integer_onep (TREE_OPERAND (arg0, 0)))
10971 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10972 TREE_OPERAND (arg0, 1));
10973
10974 /* (A + A) * C -> A * 2 * C */
10975 if (TREE_CODE (arg0) == PLUS_EXPR
10976 && TREE_CODE (arg1) == INTEGER_CST
10977 && operand_equal_p (TREE_OPERAND (arg0, 0),
10978 TREE_OPERAND (arg0, 1), 0))
10979 return fold_build2_loc (loc, MULT_EXPR, type,
10980 omit_one_operand_loc (loc, type,
10981 TREE_OPERAND (arg0, 0),
10982 TREE_OPERAND (arg0, 1)),
10983 fold_build2_loc (loc, MULT_EXPR, type,
10984 build_int_cst (type, 2) , arg1));
10985
10986 strict_overflow_p = false;
10987 if (TREE_CODE (arg1) == INTEGER_CST
10988 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10989 &strict_overflow_p)))
10990 {
10991 if (strict_overflow_p)
10992 fold_overflow_warning (("assuming signed overflow does not "
10993 "occur when simplifying "
10994 "multiplication"),
10995 WARN_STRICT_OVERFLOW_MISC);
10996 return fold_convert_loc (loc, type, tem);
10997 }
10998
10999 /* Optimize z * conj(z) for integer complex numbers. */
11000 if (TREE_CODE (arg0) == CONJ_EXPR
11001 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11002 return fold_mult_zconjz (loc, type, arg1);
11003 if (TREE_CODE (arg1) == CONJ_EXPR
11004 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11005 return fold_mult_zconjz (loc, type, arg0);
11006 }
11007 else
11008 {
11009 /* Maybe fold x * 0 to 0. The expressions aren't the same
11010 when x is NaN, since x * 0 is also NaN. Nor are they the
11011 same in modes with signed zeros, since multiplying a
11012 negative value by 0 gives -0, not +0. */
11013 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11014 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11015 && real_zerop (arg1))
11016 return omit_one_operand_loc (loc, type, arg1, arg0);
11017 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11018 Likewise for complex arithmetic with signed zeros. */
11019 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11020 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11021 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11022 && real_onep (arg1))
11023 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11024
11025 /* Transform x * -1.0 into -x. */
11026 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11027 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11028 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11029 && real_minus_onep (arg1))
11030 return fold_convert_loc (loc, type, negate_expr (arg0));
11031
11032 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11033 the result for floating point types due to rounding so it is applied
11034 only if -fassociative-math was specify. */
11035 if (flag_associative_math
11036 && TREE_CODE (arg0) == RDIV_EXPR
11037 && TREE_CODE (arg1) == REAL_CST
11038 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11039 {
11040 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11041 arg1);
11042 if (tem)
11043 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11044 TREE_OPERAND (arg0, 1));
11045 }
11046
11047 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11048 if (operand_equal_p (arg0, arg1, 0))
11049 {
11050 tree tem = fold_strip_sign_ops (arg0);
11051 if (tem != NULL_TREE)
11052 {
11053 tem = fold_convert_loc (loc, type, tem);
11054 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11055 }
11056 }
11057
11058 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11059 This is not the same for NaNs or if signed zeros are
11060 involved. */
11061 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11062 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11063 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11064 && TREE_CODE (arg1) == COMPLEX_CST
11065 && real_zerop (TREE_REALPART (arg1)))
11066 {
11067 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11068 if (real_onep (TREE_IMAGPART (arg1)))
11069 return
11070 fold_build2_loc (loc, COMPLEX_EXPR, type,
11071 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11072 rtype, arg0)),
11073 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11074 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11075 return
11076 fold_build2_loc (loc, COMPLEX_EXPR, type,
11077 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11078 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11079 rtype, arg0)));
11080 }
11081
11082 /* Optimize z * conj(z) for floating point complex numbers.
11083 Guarded by flag_unsafe_math_optimizations as non-finite
11084 imaginary components don't produce scalar results. */
11085 if (flag_unsafe_math_optimizations
11086 && TREE_CODE (arg0) == CONJ_EXPR
11087 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11088 return fold_mult_zconjz (loc, type, arg1);
11089 if (flag_unsafe_math_optimizations
11090 && TREE_CODE (arg1) == CONJ_EXPR
11091 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11092 return fold_mult_zconjz (loc, type, arg0);
11093
11094 if (flag_unsafe_math_optimizations)
11095 {
11096 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11097 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11098
11099 /* Optimizations of root(...)*root(...). */
11100 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11101 {
11102 tree rootfn, arg;
11103 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11104 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11105
11106 /* Optimize sqrt(x)*sqrt(x) as x. */
11107 if (BUILTIN_SQRT_P (fcode0)
11108 && operand_equal_p (arg00, arg10, 0)
11109 && ! HONOR_SNANS (TYPE_MODE (type)))
11110 return arg00;
11111
11112 /* Optimize root(x)*root(y) as root(x*y). */
11113 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11114 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11115 return build_call_expr_loc (loc, rootfn, 1, arg);
11116 }
11117
11118 /* Optimize expN(x)*expN(y) as expN(x+y). */
11119 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11120 {
11121 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11122 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11123 CALL_EXPR_ARG (arg0, 0),
11124 CALL_EXPR_ARG (arg1, 0));
11125 return build_call_expr_loc (loc, expfn, 1, arg);
11126 }
11127
11128 /* Optimizations of pow(...)*pow(...). */
11129 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11130 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11131 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11132 {
11133 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11134 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11135 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11136 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11137
11138 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11139 if (operand_equal_p (arg01, arg11, 0))
11140 {
11141 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11142 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11143 arg00, arg10);
11144 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11145 }
11146
11147 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11148 if (operand_equal_p (arg00, arg10, 0))
11149 {
11150 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11151 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11152 arg01, arg11);
11153 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11154 }
11155 }
11156
11157 /* Optimize tan(x)*cos(x) as sin(x). */
11158 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11159 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11160 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11161 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11162 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11163 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11164 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11165 CALL_EXPR_ARG (arg1, 0), 0))
11166 {
11167 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11168
11169 if (sinfn != NULL_TREE)
11170 return build_call_expr_loc (loc, sinfn, 1,
11171 CALL_EXPR_ARG (arg0, 0));
11172 }
11173
11174 /* Optimize x*pow(x,c) as pow(x,c+1). */
11175 if (fcode1 == BUILT_IN_POW
11176 || fcode1 == BUILT_IN_POWF
11177 || fcode1 == BUILT_IN_POWL)
11178 {
11179 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11180 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11181 if (TREE_CODE (arg11) == REAL_CST
11182 && !TREE_OVERFLOW (arg11)
11183 && operand_equal_p (arg0, arg10, 0))
11184 {
11185 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11186 REAL_VALUE_TYPE c;
11187 tree arg;
11188
11189 c = TREE_REAL_CST (arg11);
11190 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11191 arg = build_real (type, c);
11192 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11193 }
11194 }
11195
11196 /* Optimize pow(x,c)*x as pow(x,c+1). */
11197 if (fcode0 == BUILT_IN_POW
11198 || fcode0 == BUILT_IN_POWF
11199 || fcode0 == BUILT_IN_POWL)
11200 {
11201 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11202 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11203 if (TREE_CODE (arg01) == REAL_CST
11204 && !TREE_OVERFLOW (arg01)
11205 && operand_equal_p (arg1, arg00, 0))
11206 {
11207 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11208 REAL_VALUE_TYPE c;
11209 tree arg;
11210
11211 c = TREE_REAL_CST (arg01);
11212 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11213 arg = build_real (type, c);
11214 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11215 }
11216 }
11217
11218 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11219 if (!in_gimple_form
11220 && optimize
11221 && operand_equal_p (arg0, arg1, 0))
11222 {
11223 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11224
11225 if (powfn)
11226 {
11227 tree arg = build_real (type, dconst2);
11228 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11229 }
11230 }
11231 }
11232 }
11233 goto associate;
11234
11235 case BIT_IOR_EXPR:
11236 bit_ior:
11237 if (integer_all_onesp (arg1))
11238 return omit_one_operand_loc (loc, type, arg1, arg0);
11239 if (integer_zerop (arg1))
11240 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11241 if (operand_equal_p (arg0, arg1, 0))
11242 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11243
11244 /* ~X | X is -1. */
11245 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11246 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11247 {
11248 t1 = build_zero_cst (type);
11249 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11250 return omit_one_operand_loc (loc, type, t1, arg1);
11251 }
11252
11253 /* X | ~X is -1. */
11254 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11255 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11256 {
11257 t1 = build_zero_cst (type);
11258 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11259 return omit_one_operand_loc (loc, type, t1, arg0);
11260 }
11261
11262 /* Canonicalize (X & C1) | C2. */
11263 if (TREE_CODE (arg0) == BIT_AND_EXPR
11264 && TREE_CODE (arg1) == INTEGER_CST
11265 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11266 {
11267 double_int c1, c2, c3, msk;
11268 int width = TYPE_PRECISION (type), w;
11269 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11270 c2 = tree_to_double_int (arg1);
11271
11272 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11273 if ((c1 & c2) == c1)
11274 return omit_one_operand_loc (loc, type, arg1,
11275 TREE_OPERAND (arg0, 0));
11276
11277 msk = double_int::mask (width);
11278
11279 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11280 if (msk.and_not (c1 | c2).is_zero ())
11281 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11282 TREE_OPERAND (arg0, 0), arg1);
11283
11284 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11285 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11286 mode which allows further optimizations. */
11287 c1 &= msk;
11288 c2 &= msk;
11289 c3 = c1.and_not (c2);
11290 for (w = BITS_PER_UNIT;
11291 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11292 w <<= 1)
11293 {
11294 unsigned HOST_WIDE_INT mask
11295 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11296 if (((c1.low | c2.low) & mask) == mask
11297 && (c1.low & ~mask) == 0 && c1.high == 0)
11298 {
11299 c3 = double_int::from_uhwi (mask);
11300 break;
11301 }
11302 }
11303 if (c3 != c1)
11304 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11305 fold_build2_loc (loc, BIT_AND_EXPR, type,
11306 TREE_OPERAND (arg0, 0),
11307 double_int_to_tree (type,
11308 c3)),
11309 arg1);
11310 }
11311
11312 /* (X & Y) | Y is (X, Y). */
11313 if (TREE_CODE (arg0) == BIT_AND_EXPR
11314 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11315 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11316 /* (X & Y) | X is (Y, X). */
11317 if (TREE_CODE (arg0) == BIT_AND_EXPR
11318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11319 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11320 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11321 /* X | (X & Y) is (Y, X). */
11322 if (TREE_CODE (arg1) == BIT_AND_EXPR
11323 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11324 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11325 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11326 /* X | (Y & X) is (Y, X). */
11327 if (TREE_CODE (arg1) == BIT_AND_EXPR
11328 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11329 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11330 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11331
11332 /* (X & ~Y) | (~X & Y) is X ^ Y */
11333 if (TREE_CODE (arg0) == BIT_AND_EXPR
11334 && TREE_CODE (arg1) == BIT_AND_EXPR)
11335 {
11336 tree a0, a1, l0, l1, n0, n1;
11337
11338 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11339 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11340
11341 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11342 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11343
11344 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11345 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11346
11347 if ((operand_equal_p (n0, a0, 0)
11348 && operand_equal_p (n1, a1, 0))
11349 || (operand_equal_p (n0, a1, 0)
11350 && operand_equal_p (n1, a0, 0)))
11351 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11352 }
11353
11354 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11355 if (t1 != NULL_TREE)
11356 return t1;
11357
11358 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11359
11360 This results in more efficient code for machines without a NAND
11361 instruction. Combine will canonicalize to the first form
11362 which will allow use of NAND instructions provided by the
11363 backend if they exist. */
11364 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11365 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11366 {
11367 return
11368 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11369 build2 (BIT_AND_EXPR, type,
11370 fold_convert_loc (loc, type,
11371 TREE_OPERAND (arg0, 0)),
11372 fold_convert_loc (loc, type,
11373 TREE_OPERAND (arg1, 0))));
11374 }
11375
11376 /* See if this can be simplified into a rotate first. If that
11377 is unsuccessful continue in the association code. */
11378 goto bit_rotate;
11379
11380 case BIT_XOR_EXPR:
11381 if (integer_zerop (arg1))
11382 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11383 if (integer_all_onesp (arg1))
11384 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11385 if (operand_equal_p (arg0, arg1, 0))
11386 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11387
11388 /* ~X ^ X is -1. */
11389 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11390 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11391 {
11392 t1 = build_zero_cst (type);
11393 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11394 return omit_one_operand_loc (loc, type, t1, arg1);
11395 }
11396
11397 /* X ^ ~X is -1. */
11398 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11399 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11400 {
11401 t1 = build_zero_cst (type);
11402 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11403 return omit_one_operand_loc (loc, type, t1, arg0);
11404 }
11405
11406 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11407 with a constant, and the two constants have no bits in common,
11408 we should treat this as a BIT_IOR_EXPR since this may produce more
11409 simplifications. */
11410 if (TREE_CODE (arg0) == BIT_AND_EXPR
11411 && TREE_CODE (arg1) == BIT_AND_EXPR
11412 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11413 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11414 && integer_zerop (const_binop (BIT_AND_EXPR,
11415 TREE_OPERAND (arg0, 1),
11416 TREE_OPERAND (arg1, 1))))
11417 {
11418 code = BIT_IOR_EXPR;
11419 goto bit_ior;
11420 }
11421
11422 /* (X | Y) ^ X -> Y & ~ X*/
11423 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11424 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11425 {
11426 tree t2 = TREE_OPERAND (arg0, 1);
11427 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11428 arg1);
11429 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11430 fold_convert_loc (loc, type, t2),
11431 fold_convert_loc (loc, type, t1));
11432 return t1;
11433 }
11434
11435 /* (Y | X) ^ X -> Y & ~ X*/
11436 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11437 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11438 {
11439 tree t2 = TREE_OPERAND (arg0, 0);
11440 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11441 arg1);
11442 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11443 fold_convert_loc (loc, type, t2),
11444 fold_convert_loc (loc, type, t1));
11445 return t1;
11446 }
11447
11448 /* X ^ (X | Y) -> Y & ~ X*/
11449 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11450 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11451 {
11452 tree t2 = TREE_OPERAND (arg1, 1);
11453 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11454 arg0);
11455 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11456 fold_convert_loc (loc, type, t2),
11457 fold_convert_loc (loc, type, t1));
11458 return t1;
11459 }
11460
11461 /* X ^ (Y | X) -> Y & ~ X*/
11462 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11463 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11464 {
11465 tree t2 = TREE_OPERAND (arg1, 0);
11466 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11467 arg0);
11468 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11469 fold_convert_loc (loc, type, t2),
11470 fold_convert_loc (loc, type, t1));
11471 return t1;
11472 }
11473
11474 /* Convert ~X ^ ~Y to X ^ Y. */
11475 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11476 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11477 return fold_build2_loc (loc, code, type,
11478 fold_convert_loc (loc, type,
11479 TREE_OPERAND (arg0, 0)),
11480 fold_convert_loc (loc, type,
11481 TREE_OPERAND (arg1, 0)));
11482
11483 /* Convert ~X ^ C to X ^ ~C. */
11484 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11485 && TREE_CODE (arg1) == INTEGER_CST)
11486 return fold_build2_loc (loc, code, type,
11487 fold_convert_loc (loc, type,
11488 TREE_OPERAND (arg0, 0)),
11489 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11490
11491 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11492 if (TREE_CODE (arg0) == BIT_AND_EXPR
11493 && integer_onep (TREE_OPERAND (arg0, 1))
11494 && integer_onep (arg1))
11495 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11496 build_zero_cst (TREE_TYPE (arg0)));
11497
11498 /* Fold (X & Y) ^ Y as ~X & Y. */
11499 if (TREE_CODE (arg0) == BIT_AND_EXPR
11500 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11501 {
11502 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11503 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11504 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11505 fold_convert_loc (loc, type, arg1));
11506 }
11507 /* Fold (X & Y) ^ X as ~Y & X. */
11508 if (TREE_CODE (arg0) == BIT_AND_EXPR
11509 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11510 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11511 {
11512 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11513 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11514 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11515 fold_convert_loc (loc, type, arg1));
11516 }
11517 /* Fold X ^ (X & Y) as X & ~Y. */
11518 if (TREE_CODE (arg1) == BIT_AND_EXPR
11519 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11520 {
11521 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11522 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11523 fold_convert_loc (loc, type, arg0),
11524 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11525 }
11526 /* Fold X ^ (Y & X) as ~Y & X. */
11527 if (TREE_CODE (arg1) == BIT_AND_EXPR
11528 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11529 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11530 {
11531 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11532 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11533 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11534 fold_convert_loc (loc, type, arg0));
11535 }
11536
11537 /* See if this can be simplified into a rotate first. If that
11538 is unsuccessful continue in the association code. */
11539 goto bit_rotate;
11540
11541 case BIT_AND_EXPR:
11542 if (integer_all_onesp (arg1))
11543 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11544 if (integer_zerop (arg1))
11545 return omit_one_operand_loc (loc, type, arg1, arg0);
11546 if (operand_equal_p (arg0, arg1, 0))
11547 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11548
11549 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11550 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11551 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11552 || (TREE_CODE (arg0) == EQ_EXPR
11553 && integer_zerop (TREE_OPERAND (arg0, 1))))
11554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11555 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11556
11557 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11558 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11559 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11560 || (TREE_CODE (arg1) == EQ_EXPR
11561 && integer_zerop (TREE_OPERAND (arg1, 1))))
11562 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11563 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11564
11565 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11566 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11567 && TREE_CODE (arg1) == INTEGER_CST
11568 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11569 {
11570 tree tmp1 = fold_convert_loc (loc, type, arg1);
11571 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11572 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11573 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11574 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11575 return
11576 fold_convert_loc (loc, type,
11577 fold_build2_loc (loc, BIT_IOR_EXPR,
11578 type, tmp2, tmp3));
11579 }
11580
11581 /* (X | Y) & Y is (X, Y). */
11582 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11583 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11584 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11585 /* (X | Y) & X is (Y, X). */
11586 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11587 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11588 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11589 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11590 /* X & (X | Y) is (Y, X). */
11591 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11592 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11593 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11594 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11595 /* X & (Y | X) is (Y, X). */
11596 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11597 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11598 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11599 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11600
11601 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11602 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11603 && integer_onep (TREE_OPERAND (arg0, 1))
11604 && integer_onep (arg1))
11605 {
11606 tree tem2;
11607 tem = TREE_OPERAND (arg0, 0);
11608 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11609 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11610 tem, tem2);
11611 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11612 build_zero_cst (TREE_TYPE (tem)));
11613 }
11614 /* Fold ~X & 1 as (X & 1) == 0. */
11615 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11616 && integer_onep (arg1))
11617 {
11618 tree tem2;
11619 tem = TREE_OPERAND (arg0, 0);
11620 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11621 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11622 tem, tem2);
11623 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11624 build_zero_cst (TREE_TYPE (tem)));
11625 }
11626 /* Fold !X & 1 as X == 0. */
11627 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11628 && integer_onep (arg1))
11629 {
11630 tem = TREE_OPERAND (arg0, 0);
11631 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11632 build_zero_cst (TREE_TYPE (tem)));
11633 }
11634
11635 /* Fold (X ^ Y) & Y as ~X & Y. */
11636 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11637 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11638 {
11639 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11640 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11641 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11642 fold_convert_loc (loc, type, arg1));
11643 }
11644 /* Fold (X ^ Y) & X as ~Y & X. */
11645 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11646 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11647 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11648 {
11649 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11650 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11651 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11652 fold_convert_loc (loc, type, arg1));
11653 }
11654 /* Fold X & (X ^ Y) as X & ~Y. */
11655 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11656 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11657 {
11658 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11659 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11660 fold_convert_loc (loc, type, arg0),
11661 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11662 }
11663 /* Fold X & (Y ^ X) as ~Y & X. */
11664 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11665 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11666 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11667 {
11668 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11669 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11670 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11671 fold_convert_loc (loc, type, arg0));
11672 }
11673
11674 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11675 multiple of 1 << CST. */
11676 if (TREE_CODE (arg1) == INTEGER_CST)
11677 {
11678 double_int cst1 = tree_to_double_int (arg1);
11679 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11680 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11681 if ((cst1 & ncst1) == ncst1
11682 && multiple_of_p (type, arg0,
11683 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11684 return fold_convert_loc (loc, type, arg0);
11685 }
11686
11687 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11688 bits from CST2. */
11689 if (TREE_CODE (arg1) == INTEGER_CST
11690 && TREE_CODE (arg0) == MULT_EXPR
11691 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11692 {
11693 int arg1tz
11694 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11695 if (arg1tz > 0)
11696 {
11697 double_int arg1mask, masked;
11698 arg1mask = ~double_int::mask (arg1tz);
11699 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11700 TYPE_UNSIGNED (type));
11701 masked = arg1mask & tree_to_double_int (arg1);
11702 if (masked.is_zero ())
11703 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11704 arg0, arg1);
11705 else if (masked != tree_to_double_int (arg1))
11706 return fold_build2_loc (loc, code, type, op0,
11707 double_int_to_tree (type, masked));
11708 }
11709 }
11710
11711 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11712 ((A & N) + B) & M -> (A + B) & M
11713 Similarly if (N & M) == 0,
11714 ((A | N) + B) & M -> (A + B) & M
11715 and for - instead of + (or unary - instead of +)
11716 and/or ^ instead of |.
11717 If B is constant and (B & M) == 0, fold into A & M. */
11718 if (host_integerp (arg1, 1))
11719 {
11720 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11721 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11722 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11723 && (TREE_CODE (arg0) == PLUS_EXPR
11724 || TREE_CODE (arg0) == MINUS_EXPR
11725 || TREE_CODE (arg0) == NEGATE_EXPR)
11726 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11727 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11728 {
11729 tree pmop[2];
11730 int which = 0;
11731 unsigned HOST_WIDE_INT cst0;
11732
11733 /* Now we know that arg0 is (C + D) or (C - D) or
11734 -C and arg1 (M) is == (1LL << cst) - 1.
11735 Store C into PMOP[0] and D into PMOP[1]. */
11736 pmop[0] = TREE_OPERAND (arg0, 0);
11737 pmop[1] = NULL;
11738 if (TREE_CODE (arg0) != NEGATE_EXPR)
11739 {
11740 pmop[1] = TREE_OPERAND (arg0, 1);
11741 which = 1;
11742 }
11743
11744 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11745 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11746 & cst1) != cst1)
11747 which = -1;
11748
11749 for (; which >= 0; which--)
11750 switch (TREE_CODE (pmop[which]))
11751 {
11752 case BIT_AND_EXPR:
11753 case BIT_IOR_EXPR:
11754 case BIT_XOR_EXPR:
11755 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11756 != INTEGER_CST)
11757 break;
11758 /* tree_low_cst not used, because we don't care about
11759 the upper bits. */
11760 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11761 cst0 &= cst1;
11762 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11763 {
11764 if (cst0 != cst1)
11765 break;
11766 }
11767 else if (cst0 != 0)
11768 break;
11769 /* If C or D is of the form (A & N) where
11770 (N & M) == M, or of the form (A | N) or
11771 (A ^ N) where (N & M) == 0, replace it with A. */
11772 pmop[which] = TREE_OPERAND (pmop[which], 0);
11773 break;
11774 case INTEGER_CST:
11775 /* If C or D is a N where (N & M) == 0, it can be
11776 omitted (assumed 0). */
11777 if ((TREE_CODE (arg0) == PLUS_EXPR
11778 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11779 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11780 pmop[which] = NULL;
11781 break;
11782 default:
11783 break;
11784 }
11785
11786 /* Only build anything new if we optimized one or both arguments
11787 above. */
11788 if (pmop[0] != TREE_OPERAND (arg0, 0)
11789 || (TREE_CODE (arg0) != NEGATE_EXPR
11790 && pmop[1] != TREE_OPERAND (arg0, 1)))
11791 {
11792 tree utype = TREE_TYPE (arg0);
11793 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11794 {
11795 /* Perform the operations in a type that has defined
11796 overflow behavior. */
11797 utype = unsigned_type_for (TREE_TYPE (arg0));
11798 if (pmop[0] != NULL)
11799 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11800 if (pmop[1] != NULL)
11801 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11802 }
11803
11804 if (TREE_CODE (arg0) == NEGATE_EXPR)
11805 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11806 else if (TREE_CODE (arg0) == PLUS_EXPR)
11807 {
11808 if (pmop[0] != NULL && pmop[1] != NULL)
11809 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11810 pmop[0], pmop[1]);
11811 else if (pmop[0] != NULL)
11812 tem = pmop[0];
11813 else if (pmop[1] != NULL)
11814 tem = pmop[1];
11815 else
11816 return build_int_cst (type, 0);
11817 }
11818 else if (pmop[0] == NULL)
11819 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11820 else
11821 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11822 pmop[0], pmop[1]);
11823 /* TEM is now the new binary +, - or unary - replacement. */
11824 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11825 fold_convert_loc (loc, utype, arg1));
11826 return fold_convert_loc (loc, type, tem);
11827 }
11828 }
11829 }
11830
11831 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11832 if (t1 != NULL_TREE)
11833 return t1;
11834 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11835 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11836 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11837 {
11838 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11839
11840 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11841 && (~TREE_INT_CST_LOW (arg1)
11842 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11843 return
11844 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11845 }
11846
11847 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11848
11849 This results in more efficient code for machines without a NOR
11850 instruction. Combine will canonicalize to the first form
11851 which will allow use of NOR instructions provided by the
11852 backend if they exist. */
11853 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11854 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11855 {
11856 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11857 build2 (BIT_IOR_EXPR, type,
11858 fold_convert_loc (loc, type,
11859 TREE_OPERAND (arg0, 0)),
11860 fold_convert_loc (loc, type,
11861 TREE_OPERAND (arg1, 0))));
11862 }
11863
11864 /* If arg0 is derived from the address of an object or function, we may
11865 be able to fold this expression using the object or function's
11866 alignment. */
11867 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11868 {
11869 unsigned HOST_WIDE_INT modulus, residue;
11870 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11871
11872 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11873 integer_onep (arg1));
11874
11875 /* This works because modulus is a power of 2. If this weren't the
11876 case, we'd have to replace it by its greatest power-of-2
11877 divisor: modulus & -modulus. */
11878 if (low < modulus)
11879 return build_int_cst (type, residue & low);
11880 }
11881
11882 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11883 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11884 if the new mask might be further optimized. */
11885 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11886 || TREE_CODE (arg0) == RSHIFT_EXPR)
11887 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11888 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11889 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11890 < TYPE_PRECISION (TREE_TYPE (arg0))
11891 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11892 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11893 {
11894 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11895 unsigned HOST_WIDE_INT mask
11896 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11897 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11898 tree shift_type = TREE_TYPE (arg0);
11899
11900 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11901 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11902 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11903 && TYPE_PRECISION (TREE_TYPE (arg0))
11904 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11905 {
11906 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11907 tree arg00 = TREE_OPERAND (arg0, 0);
11908 /* See if more bits can be proven as zero because of
11909 zero extension. */
11910 if (TREE_CODE (arg00) == NOP_EXPR
11911 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11912 {
11913 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11914 if (TYPE_PRECISION (inner_type)
11915 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11916 && TYPE_PRECISION (inner_type) < prec)
11917 {
11918 prec = TYPE_PRECISION (inner_type);
11919 /* See if we can shorten the right shift. */
11920 if (shiftc < prec)
11921 shift_type = inner_type;
11922 }
11923 }
11924 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11925 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11926 zerobits <<= prec - shiftc;
11927 /* For arithmetic shift if sign bit could be set, zerobits
11928 can contain actually sign bits, so no transformation is
11929 possible, unless MASK masks them all away. In that
11930 case the shift needs to be converted into logical shift. */
11931 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11932 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11933 {
11934 if ((mask & zerobits) == 0)
11935 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11936 else
11937 zerobits = 0;
11938 }
11939 }
11940
11941 /* ((X << 16) & 0xff00) is (X, 0). */
11942 if ((mask & zerobits) == mask)
11943 return omit_one_operand_loc (loc, type,
11944 build_int_cst (type, 0), arg0);
11945
11946 newmask = mask | zerobits;
11947 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11948 {
11949 /* Only do the transformation if NEWMASK is some integer
11950 mode's mask. */
11951 for (prec = BITS_PER_UNIT;
11952 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11953 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11954 break;
11955 if (prec < HOST_BITS_PER_WIDE_INT
11956 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11957 {
11958 tree newmaskt;
11959
11960 if (shift_type != TREE_TYPE (arg0))
11961 {
11962 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11963 fold_convert_loc (loc, shift_type,
11964 TREE_OPERAND (arg0, 0)),
11965 TREE_OPERAND (arg0, 1));
11966 tem = fold_convert_loc (loc, type, tem);
11967 }
11968 else
11969 tem = op0;
11970 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11971 if (!tree_int_cst_equal (newmaskt, arg1))
11972 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11973 }
11974 }
11975 }
11976
11977 goto associate;
11978
11979 case RDIV_EXPR:
11980 /* Don't touch a floating-point divide by zero unless the mode
11981 of the constant can represent infinity. */
11982 if (TREE_CODE (arg1) == REAL_CST
11983 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11984 && real_zerop (arg1))
11985 return NULL_TREE;
11986
11987 /* Optimize A / A to 1.0 if we don't care about
11988 NaNs or Infinities. Skip the transformation
11989 for non-real operands. */
11990 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11991 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11992 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11993 && operand_equal_p (arg0, arg1, 0))
11994 {
11995 tree r = build_real (TREE_TYPE (arg0), dconst1);
11996
11997 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11998 }
11999
12000 /* The complex version of the above A / A optimization. */
12001 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12002 && operand_equal_p (arg0, arg1, 0))
12003 {
12004 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12005 if (! HONOR_NANS (TYPE_MODE (elem_type))
12006 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12007 {
12008 tree r = build_real (elem_type, dconst1);
12009 /* omit_two_operands will call fold_convert for us. */
12010 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12011 }
12012 }
12013
12014 /* (-A) / (-B) -> A / B */
12015 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12016 return fold_build2_loc (loc, RDIV_EXPR, type,
12017 TREE_OPERAND (arg0, 0),
12018 negate_expr (arg1));
12019 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12020 return fold_build2_loc (loc, RDIV_EXPR, type,
12021 negate_expr (arg0),
12022 TREE_OPERAND (arg1, 0));
12023
12024 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12025 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12026 && real_onep (arg1))
12027 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12028
12029 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12030 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12031 && real_minus_onep (arg1))
12032 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12033 negate_expr (arg0)));
12034
12035 /* If ARG1 is a constant, we can convert this to a multiply by the
12036 reciprocal. This does not have the same rounding properties,
12037 so only do this if -freciprocal-math. We can actually
12038 always safely do it if ARG1 is a power of two, but it's hard to
12039 tell if it is or not in a portable manner. */
12040 if (optimize
12041 && (TREE_CODE (arg1) == REAL_CST
12042 || (TREE_CODE (arg1) == COMPLEX_CST
12043 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12044 || (TREE_CODE (arg1) == VECTOR_CST
12045 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12046 {
12047 if (flag_reciprocal_math
12048 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12049 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12050 /* Find the reciprocal if optimizing and the result is exact.
12051 TODO: Complex reciprocal not implemented. */
12052 if (TREE_CODE (arg1) != COMPLEX_CST)
12053 {
12054 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12055
12056 if (inverse)
12057 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12058 }
12059 }
12060 /* Convert A/B/C to A/(B*C). */
12061 if (flag_reciprocal_math
12062 && TREE_CODE (arg0) == RDIV_EXPR)
12063 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12064 fold_build2_loc (loc, MULT_EXPR, type,
12065 TREE_OPERAND (arg0, 1), arg1));
12066
12067 /* Convert A/(B/C) to (A/B)*C. */
12068 if (flag_reciprocal_math
12069 && TREE_CODE (arg1) == RDIV_EXPR)
12070 return fold_build2_loc (loc, MULT_EXPR, type,
12071 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12072 TREE_OPERAND (arg1, 0)),
12073 TREE_OPERAND (arg1, 1));
12074
12075 /* Convert C1/(X*C2) into (C1/C2)/X. */
12076 if (flag_reciprocal_math
12077 && TREE_CODE (arg1) == MULT_EXPR
12078 && TREE_CODE (arg0) == REAL_CST
12079 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12080 {
12081 tree tem = const_binop (RDIV_EXPR, arg0,
12082 TREE_OPERAND (arg1, 1));
12083 if (tem)
12084 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12085 TREE_OPERAND (arg1, 0));
12086 }
12087
12088 if (flag_unsafe_math_optimizations)
12089 {
12090 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12091 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12092
12093 /* Optimize sin(x)/cos(x) as tan(x). */
12094 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12095 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12096 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12097 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12098 CALL_EXPR_ARG (arg1, 0), 0))
12099 {
12100 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12101
12102 if (tanfn != NULL_TREE)
12103 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12104 }
12105
12106 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12107 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12108 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12109 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12110 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12111 CALL_EXPR_ARG (arg1, 0), 0))
12112 {
12113 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12114
12115 if (tanfn != NULL_TREE)
12116 {
12117 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12118 CALL_EXPR_ARG (arg0, 0));
12119 return fold_build2_loc (loc, RDIV_EXPR, type,
12120 build_real (type, dconst1), tmp);
12121 }
12122 }
12123
12124 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12125 NaNs or Infinities. */
12126 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12127 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12128 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12129 {
12130 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12131 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12132
12133 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12134 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12135 && operand_equal_p (arg00, arg01, 0))
12136 {
12137 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12138
12139 if (cosfn != NULL_TREE)
12140 return build_call_expr_loc (loc, cosfn, 1, arg00);
12141 }
12142 }
12143
12144 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12145 NaNs or Infinities. */
12146 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12147 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12148 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12149 {
12150 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12151 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12152
12153 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12154 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12155 && operand_equal_p (arg00, arg01, 0))
12156 {
12157 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12158
12159 if (cosfn != NULL_TREE)
12160 {
12161 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12162 return fold_build2_loc (loc, RDIV_EXPR, type,
12163 build_real (type, dconst1),
12164 tmp);
12165 }
12166 }
12167 }
12168
12169 /* Optimize pow(x,c)/x as pow(x,c-1). */
12170 if (fcode0 == BUILT_IN_POW
12171 || fcode0 == BUILT_IN_POWF
12172 || fcode0 == BUILT_IN_POWL)
12173 {
12174 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12175 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12176 if (TREE_CODE (arg01) == REAL_CST
12177 && !TREE_OVERFLOW (arg01)
12178 && operand_equal_p (arg1, arg00, 0))
12179 {
12180 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12181 REAL_VALUE_TYPE c;
12182 tree arg;
12183
12184 c = TREE_REAL_CST (arg01);
12185 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12186 arg = build_real (type, c);
12187 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12188 }
12189 }
12190
12191 /* Optimize a/root(b/c) into a*root(c/b). */
12192 if (BUILTIN_ROOT_P (fcode1))
12193 {
12194 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12195
12196 if (TREE_CODE (rootarg) == RDIV_EXPR)
12197 {
12198 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12199 tree b = TREE_OPERAND (rootarg, 0);
12200 tree c = TREE_OPERAND (rootarg, 1);
12201
12202 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12203
12204 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12205 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12206 }
12207 }
12208
12209 /* Optimize x/expN(y) into x*expN(-y). */
12210 if (BUILTIN_EXPONENT_P (fcode1))
12211 {
12212 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12213 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12214 arg1 = build_call_expr_loc (loc,
12215 expfn, 1,
12216 fold_convert_loc (loc, type, arg));
12217 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12218 }
12219
12220 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12221 if (fcode1 == BUILT_IN_POW
12222 || fcode1 == BUILT_IN_POWF
12223 || fcode1 == BUILT_IN_POWL)
12224 {
12225 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12226 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12227 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12228 tree neg11 = fold_convert_loc (loc, type,
12229 negate_expr (arg11));
12230 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12231 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12232 }
12233 }
12234 return NULL_TREE;
12235
12236 case TRUNC_DIV_EXPR:
12237 /* Optimize (X & (-A)) / A where A is a power of 2,
12238 to X >> log2(A) */
12239 if (TREE_CODE (arg0) == BIT_AND_EXPR
12240 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12241 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12242 {
12243 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12244 arg1, TREE_OPERAND (arg0, 1));
12245 if (sum && integer_zerop (sum)) {
12246 unsigned long pow2;
12247
12248 if (TREE_INT_CST_LOW (arg1))
12249 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12250 else
12251 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12252 + HOST_BITS_PER_WIDE_INT;
12253
12254 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12255 TREE_OPERAND (arg0, 0),
12256 build_int_cst (integer_type_node, pow2));
12257 }
12258 }
12259
12260 /* Fall through */
12261
12262 case FLOOR_DIV_EXPR:
12263 /* Simplify A / (B << N) where A and B are positive and B is
12264 a power of 2, to A >> (N + log2(B)). */
12265 strict_overflow_p = false;
12266 if (TREE_CODE (arg1) == LSHIFT_EXPR
12267 && (TYPE_UNSIGNED (type)
12268 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12269 {
12270 tree sval = TREE_OPERAND (arg1, 0);
12271 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12272 {
12273 tree sh_cnt = TREE_OPERAND (arg1, 1);
12274 unsigned long pow2;
12275
12276 if (TREE_INT_CST_LOW (sval))
12277 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12278 else
12279 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12280 + HOST_BITS_PER_WIDE_INT;
12281
12282 if (strict_overflow_p)
12283 fold_overflow_warning (("assuming signed overflow does not "
12284 "occur when simplifying A / (B << N)"),
12285 WARN_STRICT_OVERFLOW_MISC);
12286
12287 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12288 sh_cnt,
12289 build_int_cst (TREE_TYPE (sh_cnt),
12290 pow2));
12291 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12292 fold_convert_loc (loc, type, arg0), sh_cnt);
12293 }
12294 }
12295
12296 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12297 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12298 if (INTEGRAL_TYPE_P (type)
12299 && TYPE_UNSIGNED (type)
12300 && code == FLOOR_DIV_EXPR)
12301 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12302
12303 /* Fall through */
12304
12305 case ROUND_DIV_EXPR:
12306 case CEIL_DIV_EXPR:
12307 case EXACT_DIV_EXPR:
12308 if (integer_onep (arg1))
12309 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12310 if (integer_zerop (arg1))
12311 return NULL_TREE;
12312 /* X / -1 is -X. */
12313 if (!TYPE_UNSIGNED (type)
12314 && TREE_CODE (arg1) == INTEGER_CST
12315 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12316 && TREE_INT_CST_HIGH (arg1) == -1)
12317 return fold_convert_loc (loc, type, negate_expr (arg0));
12318
12319 /* Convert -A / -B to A / B when the type is signed and overflow is
12320 undefined. */
12321 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12322 && TREE_CODE (arg0) == NEGATE_EXPR
12323 && negate_expr_p (arg1))
12324 {
12325 if (INTEGRAL_TYPE_P (type))
12326 fold_overflow_warning (("assuming signed overflow does not occur "
12327 "when distributing negation across "
12328 "division"),
12329 WARN_STRICT_OVERFLOW_MISC);
12330 return fold_build2_loc (loc, code, type,
12331 fold_convert_loc (loc, type,
12332 TREE_OPERAND (arg0, 0)),
12333 fold_convert_loc (loc, type,
12334 negate_expr (arg1)));
12335 }
12336 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12337 && TREE_CODE (arg1) == NEGATE_EXPR
12338 && negate_expr_p (arg0))
12339 {
12340 if (INTEGRAL_TYPE_P (type))
12341 fold_overflow_warning (("assuming signed overflow does not occur "
12342 "when distributing negation across "
12343 "division"),
12344 WARN_STRICT_OVERFLOW_MISC);
12345 return fold_build2_loc (loc, code, type,
12346 fold_convert_loc (loc, type,
12347 negate_expr (arg0)),
12348 fold_convert_loc (loc, type,
12349 TREE_OPERAND (arg1, 0)));
12350 }
12351
12352 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12353 operation, EXACT_DIV_EXPR.
12354
12355 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12356 At one time others generated faster code, it's not clear if they do
12357 after the last round to changes to the DIV code in expmed.c. */
12358 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12359 && multiple_of_p (type, arg0, arg1))
12360 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12361
12362 strict_overflow_p = false;
12363 if (TREE_CODE (arg1) == INTEGER_CST
12364 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12365 &strict_overflow_p)))
12366 {
12367 if (strict_overflow_p)
12368 fold_overflow_warning (("assuming signed overflow does not occur "
12369 "when simplifying division"),
12370 WARN_STRICT_OVERFLOW_MISC);
12371 return fold_convert_loc (loc, type, tem);
12372 }
12373
12374 return NULL_TREE;
12375
12376 case CEIL_MOD_EXPR:
12377 case FLOOR_MOD_EXPR:
12378 case ROUND_MOD_EXPR:
12379 case TRUNC_MOD_EXPR:
12380 /* X % 1 is always zero, but be sure to preserve any side
12381 effects in X. */
12382 if (integer_onep (arg1))
12383 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12384
12385 /* X % 0, return X % 0 unchanged so that we can get the
12386 proper warnings and errors. */
12387 if (integer_zerop (arg1))
12388 return NULL_TREE;
12389
12390 /* 0 % X is always zero, but be sure to preserve any side
12391 effects in X. Place this after checking for X == 0. */
12392 if (integer_zerop (arg0))
12393 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12394
12395 /* X % -1 is zero. */
12396 if (!TYPE_UNSIGNED (type)
12397 && TREE_CODE (arg1) == INTEGER_CST
12398 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12399 && TREE_INT_CST_HIGH (arg1) == -1)
12400 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12401
12402 /* X % -C is the same as X % C. */
12403 if (code == TRUNC_MOD_EXPR
12404 && !TYPE_UNSIGNED (type)
12405 && TREE_CODE (arg1) == INTEGER_CST
12406 && !TREE_OVERFLOW (arg1)
12407 && TREE_INT_CST_HIGH (arg1) < 0
12408 && !TYPE_OVERFLOW_TRAPS (type)
12409 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12410 && !sign_bit_p (arg1, arg1))
12411 return fold_build2_loc (loc, code, type,
12412 fold_convert_loc (loc, type, arg0),
12413 fold_convert_loc (loc, type,
12414 negate_expr (arg1)));
12415
12416 /* X % -Y is the same as X % Y. */
12417 if (code == TRUNC_MOD_EXPR
12418 && !TYPE_UNSIGNED (type)
12419 && TREE_CODE (arg1) == NEGATE_EXPR
12420 && !TYPE_OVERFLOW_TRAPS (type))
12421 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12422 fold_convert_loc (loc, type,
12423 TREE_OPERAND (arg1, 0)));
12424
12425 strict_overflow_p = false;
12426 if (TREE_CODE (arg1) == INTEGER_CST
12427 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12428 &strict_overflow_p)))
12429 {
12430 if (strict_overflow_p)
12431 fold_overflow_warning (("assuming signed overflow does not occur "
12432 "when simplifying modulus"),
12433 WARN_STRICT_OVERFLOW_MISC);
12434 return fold_convert_loc (loc, type, tem);
12435 }
12436
12437 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12438 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12439 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12440 && (TYPE_UNSIGNED (type)
12441 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12442 {
12443 tree c = arg1;
12444 /* Also optimize A % (C << N) where C is a power of 2,
12445 to A & ((C << N) - 1). */
12446 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12447 c = TREE_OPERAND (arg1, 0);
12448
12449 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12450 {
12451 tree mask
12452 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12453 build_int_cst (TREE_TYPE (arg1), 1));
12454 if (strict_overflow_p)
12455 fold_overflow_warning (("assuming signed overflow does not "
12456 "occur when simplifying "
12457 "X % (power of two)"),
12458 WARN_STRICT_OVERFLOW_MISC);
12459 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12460 fold_convert_loc (loc, type, arg0),
12461 fold_convert_loc (loc, type, mask));
12462 }
12463 }
12464
12465 return NULL_TREE;
12466
12467 case LROTATE_EXPR:
12468 case RROTATE_EXPR:
12469 if (integer_all_onesp (arg0))
12470 return omit_one_operand_loc (loc, type, arg0, arg1);
12471 goto shift;
12472
12473 case RSHIFT_EXPR:
12474 /* Optimize -1 >> x for arithmetic right shifts. */
12475 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12476 && tree_expr_nonnegative_p (arg1))
12477 return omit_one_operand_loc (loc, type, arg0, arg1);
12478 /* ... fall through ... */
12479
12480 case LSHIFT_EXPR:
12481 shift:
12482 if (integer_zerop (arg1))
12483 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12484 if (integer_zerop (arg0))
12485 return omit_one_operand_loc (loc, type, arg0, arg1);
12486
12487 /* Prefer vector1 << scalar to vector1 << vector2
12488 if vector2 is uniform. */
12489 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12490 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12491 return fold_build2_loc (loc, code, type, op0, tem);
12492
12493 /* Since negative shift count is not well-defined,
12494 don't try to compute it in the compiler. */
12495 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12496 return NULL_TREE;
12497
12498 prec = element_precision (type);
12499
12500 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12501 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12502 && TREE_INT_CST_LOW (arg1) < prec
12503 && host_integerp (TREE_OPERAND (arg0, 1), true)
12504 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12505 {
12506 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12507 + TREE_INT_CST_LOW (arg1));
12508
12509 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12510 being well defined. */
12511 if (low >= prec)
12512 {
12513 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12514 low = low % prec;
12515 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12516 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12517 TREE_OPERAND (arg0, 0));
12518 else
12519 low = prec - 1;
12520 }
12521
12522 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12523 build_int_cst (TREE_TYPE (arg1), low));
12524 }
12525
12526 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12527 into x & ((unsigned)-1 >> c) for unsigned types. */
12528 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12529 || (TYPE_UNSIGNED (type)
12530 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12531 && host_integerp (arg1, false)
12532 && TREE_INT_CST_LOW (arg1) < prec
12533 && host_integerp (TREE_OPERAND (arg0, 1), false)
12534 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12535 {
12536 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12537 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12538 tree lshift;
12539 tree arg00;
12540
12541 if (low0 == low1)
12542 {
12543 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12544
12545 lshift = build_minus_one_cst (type);
12546 lshift = const_binop (code, lshift, arg1);
12547
12548 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12549 }
12550 }
12551
12552 /* Rewrite an LROTATE_EXPR by a constant into an
12553 RROTATE_EXPR by a new constant. */
12554 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12555 {
12556 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12557 tem = const_binop (MINUS_EXPR, tem, arg1);
12558 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12559 }
12560
12561 /* If we have a rotate of a bit operation with the rotate count and
12562 the second operand of the bit operation both constant,
12563 permute the two operations. */
12564 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12565 && (TREE_CODE (arg0) == BIT_AND_EXPR
12566 || TREE_CODE (arg0) == BIT_IOR_EXPR
12567 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12568 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12569 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12570 fold_build2_loc (loc, code, type,
12571 TREE_OPERAND (arg0, 0), arg1),
12572 fold_build2_loc (loc, code, type,
12573 TREE_OPERAND (arg0, 1), arg1));
12574
12575 /* Two consecutive rotates adding up to the precision of the
12576 type can be ignored. */
12577 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12578 && TREE_CODE (arg0) == RROTATE_EXPR
12579 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12580 && TREE_INT_CST_HIGH (arg1) == 0
12581 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12582 && ((TREE_INT_CST_LOW (arg1)
12583 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12584 == prec))
12585 return TREE_OPERAND (arg0, 0);
12586
12587 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12588 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12589 if the latter can be further optimized. */
12590 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12591 && TREE_CODE (arg0) == BIT_AND_EXPR
12592 && TREE_CODE (arg1) == INTEGER_CST
12593 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12594 {
12595 tree mask = fold_build2_loc (loc, code, type,
12596 fold_convert_loc (loc, type,
12597 TREE_OPERAND (arg0, 1)),
12598 arg1);
12599 tree shift = fold_build2_loc (loc, code, type,
12600 fold_convert_loc (loc, type,
12601 TREE_OPERAND (arg0, 0)),
12602 arg1);
12603 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12604 if (tem)
12605 return tem;
12606 }
12607
12608 return NULL_TREE;
12609
12610 case MIN_EXPR:
12611 if (operand_equal_p (arg0, arg1, 0))
12612 return omit_one_operand_loc (loc, type, arg0, arg1);
12613 if (INTEGRAL_TYPE_P (type)
12614 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12615 return omit_one_operand_loc (loc, type, arg1, arg0);
12616 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12617 if (tem)
12618 return tem;
12619 goto associate;
12620
12621 case MAX_EXPR:
12622 if (operand_equal_p (arg0, arg1, 0))
12623 return omit_one_operand_loc (loc, type, arg0, arg1);
12624 if (INTEGRAL_TYPE_P (type)
12625 && TYPE_MAX_VALUE (type)
12626 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12627 return omit_one_operand_loc (loc, type, arg1, arg0);
12628 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12629 if (tem)
12630 return tem;
12631 goto associate;
12632
12633 case TRUTH_ANDIF_EXPR:
12634 /* Note that the operands of this must be ints
12635 and their values must be 0 or 1.
12636 ("true" is a fixed value perhaps depending on the language.) */
12637 /* If first arg is constant zero, return it. */
12638 if (integer_zerop (arg0))
12639 return fold_convert_loc (loc, type, arg0);
12640 case TRUTH_AND_EXPR:
12641 /* If either arg is constant true, drop it. */
12642 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12643 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12644 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12645 /* Preserve sequence points. */
12646 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12647 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12648 /* If second arg is constant zero, result is zero, but first arg
12649 must be evaluated. */
12650 if (integer_zerop (arg1))
12651 return omit_one_operand_loc (loc, type, arg1, arg0);
12652 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12653 case will be handled here. */
12654 if (integer_zerop (arg0))
12655 return omit_one_operand_loc (loc, type, arg0, arg1);
12656
12657 /* !X && X is always false. */
12658 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12659 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12660 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12661 /* X && !X is always false. */
12662 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12663 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12664 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12665
12666 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12667 means A >= Y && A != MAX, but in this case we know that
12668 A < X <= MAX. */
12669
12670 if (!TREE_SIDE_EFFECTS (arg0)
12671 && !TREE_SIDE_EFFECTS (arg1))
12672 {
12673 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12674 if (tem && !operand_equal_p (tem, arg0, 0))
12675 return fold_build2_loc (loc, code, type, tem, arg1);
12676
12677 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12678 if (tem && !operand_equal_p (tem, arg1, 0))
12679 return fold_build2_loc (loc, code, type, arg0, tem);
12680 }
12681
12682 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12683 != NULL_TREE)
12684 return tem;
12685
12686 return NULL_TREE;
12687
12688 case TRUTH_ORIF_EXPR:
12689 /* Note that the operands of this must be ints
12690 and their values must be 0 or true.
12691 ("true" is a fixed value perhaps depending on the language.) */
12692 /* If first arg is constant true, return it. */
12693 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12694 return fold_convert_loc (loc, type, arg0);
12695 case TRUTH_OR_EXPR:
12696 /* If either arg is constant zero, drop it. */
12697 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12698 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12699 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12700 /* Preserve sequence points. */
12701 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12702 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12703 /* If second arg is constant true, result is true, but we must
12704 evaluate first arg. */
12705 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12706 return omit_one_operand_loc (loc, type, arg1, arg0);
12707 /* Likewise for first arg, but note this only occurs here for
12708 TRUTH_OR_EXPR. */
12709 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12710 return omit_one_operand_loc (loc, type, arg0, arg1);
12711
12712 /* !X || X is always true. */
12713 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12714 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12715 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12716 /* X || !X is always true. */
12717 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12718 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12719 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12720
12721 /* (X && !Y) || (!X && Y) is X ^ Y */
12722 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12723 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12724 {
12725 tree a0, a1, l0, l1, n0, n1;
12726
12727 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12728 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12729
12730 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12731 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12732
12733 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12734 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12735
12736 if ((operand_equal_p (n0, a0, 0)
12737 && operand_equal_p (n1, a1, 0))
12738 || (operand_equal_p (n0, a1, 0)
12739 && operand_equal_p (n1, a0, 0)))
12740 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12741 }
12742
12743 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12744 != NULL_TREE)
12745 return tem;
12746
12747 return NULL_TREE;
12748
12749 case TRUTH_XOR_EXPR:
12750 /* If the second arg is constant zero, drop it. */
12751 if (integer_zerop (arg1))
12752 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12753 /* If the second arg is constant true, this is a logical inversion. */
12754 if (integer_onep (arg1))
12755 {
12756 tem = invert_truthvalue_loc (loc, arg0);
12757 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12758 }
12759 /* Identical arguments cancel to zero. */
12760 if (operand_equal_p (arg0, arg1, 0))
12761 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12762
12763 /* !X ^ X is always true. */
12764 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12765 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12766 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12767
12768 /* X ^ !X is always true. */
12769 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12770 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12771 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12772
12773 return NULL_TREE;
12774
12775 case EQ_EXPR:
12776 case NE_EXPR:
12777 STRIP_NOPS (arg0);
12778 STRIP_NOPS (arg1);
12779
12780 tem = fold_comparison (loc, code, type, op0, op1);
12781 if (tem != NULL_TREE)
12782 return tem;
12783
12784 /* bool_var != 0 becomes bool_var. */
12785 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12786 && code == NE_EXPR)
12787 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12788
12789 /* bool_var == 1 becomes bool_var. */
12790 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12791 && code == EQ_EXPR)
12792 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12793
12794 /* bool_var != 1 becomes !bool_var. */
12795 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12796 && code == NE_EXPR)
12797 return fold_convert_loc (loc, type,
12798 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12799 TREE_TYPE (arg0), arg0));
12800
12801 /* bool_var == 0 becomes !bool_var. */
12802 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12803 && code == EQ_EXPR)
12804 return fold_convert_loc (loc, type,
12805 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12806 TREE_TYPE (arg0), arg0));
12807
12808 /* !exp != 0 becomes !exp */
12809 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12810 && code == NE_EXPR)
12811 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12812
12813 /* If this is an equality comparison of the address of two non-weak,
12814 unaliased symbols neither of which are extern (since we do not
12815 have access to attributes for externs), then we know the result. */
12816 if (TREE_CODE (arg0) == ADDR_EXPR
12817 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12818 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12819 && ! lookup_attribute ("alias",
12820 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12821 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12822 && TREE_CODE (arg1) == ADDR_EXPR
12823 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12824 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12825 && ! lookup_attribute ("alias",
12826 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12827 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12828 {
12829 /* We know that we're looking at the address of two
12830 non-weak, unaliased, static _DECL nodes.
12831
12832 It is both wasteful and incorrect to call operand_equal_p
12833 to compare the two ADDR_EXPR nodes. It is wasteful in that
12834 all we need to do is test pointer equality for the arguments
12835 to the two ADDR_EXPR nodes. It is incorrect to use
12836 operand_equal_p as that function is NOT equivalent to a
12837 C equality test. It can in fact return false for two
12838 objects which would test as equal using the C equality
12839 operator. */
12840 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12841 return constant_boolean_node (equal
12842 ? code == EQ_EXPR : code != EQ_EXPR,
12843 type);
12844 }
12845
12846 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12847 a MINUS_EXPR of a constant, we can convert it into a comparison with
12848 a revised constant as long as no overflow occurs. */
12849 if (TREE_CODE (arg1) == INTEGER_CST
12850 && (TREE_CODE (arg0) == PLUS_EXPR
12851 || TREE_CODE (arg0) == MINUS_EXPR)
12852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12853 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12854 ? MINUS_EXPR : PLUS_EXPR,
12855 fold_convert_loc (loc, TREE_TYPE (arg0),
12856 arg1),
12857 TREE_OPERAND (arg0, 1)))
12858 && !TREE_OVERFLOW (tem))
12859 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12860
12861 /* Similarly for a NEGATE_EXPR. */
12862 if (TREE_CODE (arg0) == NEGATE_EXPR
12863 && TREE_CODE (arg1) == INTEGER_CST
12864 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12865 arg1)))
12866 && TREE_CODE (tem) == INTEGER_CST
12867 && !TREE_OVERFLOW (tem))
12868 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12869
12870 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12871 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12872 && TREE_CODE (arg1) == INTEGER_CST
12873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12874 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12875 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12876 fold_convert_loc (loc,
12877 TREE_TYPE (arg0),
12878 arg1),
12879 TREE_OPERAND (arg0, 1)));
12880
12881 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12882 if ((TREE_CODE (arg0) == PLUS_EXPR
12883 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12884 || TREE_CODE (arg0) == MINUS_EXPR)
12885 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12886 0)),
12887 arg1, 0)
12888 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12889 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12890 {
12891 tree val = TREE_OPERAND (arg0, 1);
12892 return omit_two_operands_loc (loc, type,
12893 fold_build2_loc (loc, code, type,
12894 val,
12895 build_int_cst (TREE_TYPE (val),
12896 0)),
12897 TREE_OPERAND (arg0, 0), arg1);
12898 }
12899
12900 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12901 if (TREE_CODE (arg0) == MINUS_EXPR
12902 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12903 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12904 1)),
12905 arg1, 0)
12906 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12907 {
12908 return omit_two_operands_loc (loc, type,
12909 code == NE_EXPR
12910 ? boolean_true_node : boolean_false_node,
12911 TREE_OPERAND (arg0, 1), arg1);
12912 }
12913
12914 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12915 for !=. Don't do this for ordered comparisons due to overflow. */
12916 if (TREE_CODE (arg0) == MINUS_EXPR
12917 && integer_zerop (arg1))
12918 return fold_build2_loc (loc, code, type,
12919 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12920
12921 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12922 if (TREE_CODE (arg0) == ABS_EXPR
12923 && (integer_zerop (arg1) || real_zerop (arg1)))
12924 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12925
12926 /* If this is an EQ or NE comparison with zero and ARG0 is
12927 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12928 two operations, but the latter can be done in one less insn
12929 on machines that have only two-operand insns or on which a
12930 constant cannot be the first operand. */
12931 if (TREE_CODE (arg0) == BIT_AND_EXPR
12932 && integer_zerop (arg1))
12933 {
12934 tree arg00 = TREE_OPERAND (arg0, 0);
12935 tree arg01 = TREE_OPERAND (arg0, 1);
12936 if (TREE_CODE (arg00) == LSHIFT_EXPR
12937 && integer_onep (TREE_OPERAND (arg00, 0)))
12938 {
12939 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12940 arg01, TREE_OPERAND (arg00, 1));
12941 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12942 build_int_cst (TREE_TYPE (arg0), 1));
12943 return fold_build2_loc (loc, code, type,
12944 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12945 arg1);
12946 }
12947 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12948 && integer_onep (TREE_OPERAND (arg01, 0)))
12949 {
12950 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12951 arg00, TREE_OPERAND (arg01, 1));
12952 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12953 build_int_cst (TREE_TYPE (arg0), 1));
12954 return fold_build2_loc (loc, code, type,
12955 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12956 arg1);
12957 }
12958 }
12959
12960 /* If this is an NE or EQ comparison of zero against the result of a
12961 signed MOD operation whose second operand is a power of 2, make
12962 the MOD operation unsigned since it is simpler and equivalent. */
12963 if (integer_zerop (arg1)
12964 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12965 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12966 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12967 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12968 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12969 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12970 {
12971 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12972 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12973 fold_convert_loc (loc, newtype,
12974 TREE_OPERAND (arg0, 0)),
12975 fold_convert_loc (loc, newtype,
12976 TREE_OPERAND (arg0, 1)));
12977
12978 return fold_build2_loc (loc, code, type, newmod,
12979 fold_convert_loc (loc, newtype, arg1));
12980 }
12981
12982 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12983 C1 is a valid shift constant, and C2 is a power of two, i.e.
12984 a single bit. */
12985 if (TREE_CODE (arg0) == BIT_AND_EXPR
12986 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12987 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12988 == INTEGER_CST
12989 && integer_pow2p (TREE_OPERAND (arg0, 1))
12990 && integer_zerop (arg1))
12991 {
12992 tree itype = TREE_TYPE (arg0);
12993 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12994 prec = TYPE_PRECISION (itype);
12995
12996 /* Check for a valid shift count. */
12997 if (TREE_INT_CST_HIGH (arg001) == 0
12998 && TREE_INT_CST_LOW (arg001) < prec)
12999 {
13000 tree arg01 = TREE_OPERAND (arg0, 1);
13001 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13002 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13003 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13004 can be rewritten as (X & (C2 << C1)) != 0. */
13005 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13006 {
13007 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13008 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13009 return fold_build2_loc (loc, code, type, tem,
13010 fold_convert_loc (loc, itype, arg1));
13011 }
13012 /* Otherwise, for signed (arithmetic) shifts,
13013 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13014 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13015 else if (!TYPE_UNSIGNED (itype))
13016 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13017 arg000, build_int_cst (itype, 0));
13018 /* Otherwise, of unsigned (logical) shifts,
13019 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13020 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13021 else
13022 return omit_one_operand_loc (loc, type,
13023 code == EQ_EXPR ? integer_one_node
13024 : integer_zero_node,
13025 arg000);
13026 }
13027 }
13028
13029 /* If we have (A & C) == C where C is a power of 2, convert this into
13030 (A & C) != 0. Similarly for NE_EXPR. */
13031 if (TREE_CODE (arg0) == BIT_AND_EXPR
13032 && integer_pow2p (TREE_OPERAND (arg0, 1))
13033 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13034 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13035 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13036 integer_zero_node));
13037
13038 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13039 bit, then fold the expression into A < 0 or A >= 0. */
13040 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13041 if (tem)
13042 return tem;
13043
13044 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13045 Similarly for NE_EXPR. */
13046 if (TREE_CODE (arg0) == BIT_AND_EXPR
13047 && TREE_CODE (arg1) == INTEGER_CST
13048 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13049 {
13050 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13051 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13052 TREE_OPERAND (arg0, 1));
13053 tree dandnotc
13054 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13055 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13056 notc);
13057 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13058 if (integer_nonzerop (dandnotc))
13059 return omit_one_operand_loc (loc, type, rslt, arg0);
13060 }
13061
13062 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13063 Similarly for NE_EXPR. */
13064 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13065 && TREE_CODE (arg1) == INTEGER_CST
13066 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13067 {
13068 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13069 tree candnotd
13070 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13071 TREE_OPERAND (arg0, 1),
13072 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13073 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13074 if (integer_nonzerop (candnotd))
13075 return omit_one_operand_loc (loc, type, rslt, arg0);
13076 }
13077
13078 /* If this is a comparison of a field, we may be able to simplify it. */
13079 if ((TREE_CODE (arg0) == COMPONENT_REF
13080 || TREE_CODE (arg0) == BIT_FIELD_REF)
13081 /* Handle the constant case even without -O
13082 to make sure the warnings are given. */
13083 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13084 {
13085 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13086 if (t1)
13087 return t1;
13088 }
13089
13090 /* Optimize comparisons of strlen vs zero to a compare of the
13091 first character of the string vs zero. To wit,
13092 strlen(ptr) == 0 => *ptr == 0
13093 strlen(ptr) != 0 => *ptr != 0
13094 Other cases should reduce to one of these two (or a constant)
13095 due to the return value of strlen being unsigned. */
13096 if (TREE_CODE (arg0) == CALL_EXPR
13097 && integer_zerop (arg1))
13098 {
13099 tree fndecl = get_callee_fndecl (arg0);
13100
13101 if (fndecl
13102 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13103 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13104 && call_expr_nargs (arg0) == 1
13105 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13106 {
13107 tree iref = build_fold_indirect_ref_loc (loc,
13108 CALL_EXPR_ARG (arg0, 0));
13109 return fold_build2_loc (loc, code, type, iref,
13110 build_int_cst (TREE_TYPE (iref), 0));
13111 }
13112 }
13113
13114 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13115 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13116 if (TREE_CODE (arg0) == RSHIFT_EXPR
13117 && integer_zerop (arg1)
13118 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13119 {
13120 tree arg00 = TREE_OPERAND (arg0, 0);
13121 tree arg01 = TREE_OPERAND (arg0, 1);
13122 tree itype = TREE_TYPE (arg00);
13123 if (TREE_INT_CST_HIGH (arg01) == 0
13124 && TREE_INT_CST_LOW (arg01)
13125 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13126 {
13127 if (TYPE_UNSIGNED (itype))
13128 {
13129 itype = signed_type_for (itype);
13130 arg00 = fold_convert_loc (loc, itype, arg00);
13131 }
13132 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13133 type, arg00, build_zero_cst (itype));
13134 }
13135 }
13136
13137 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13138 if (integer_zerop (arg1)
13139 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13140 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13141 TREE_OPERAND (arg0, 1));
13142
13143 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13144 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13145 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13146 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13147 build_zero_cst (TREE_TYPE (arg0)));
13148 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13149 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13150 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13151 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13152 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13153 build_zero_cst (TREE_TYPE (arg0)));
13154
13155 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13156 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13157 && TREE_CODE (arg1) == INTEGER_CST
13158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13159 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13160 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13161 TREE_OPERAND (arg0, 1), arg1));
13162
13163 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13164 (X & C) == 0 when C is a single bit. */
13165 if (TREE_CODE (arg0) == BIT_AND_EXPR
13166 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13167 && integer_zerop (arg1)
13168 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13169 {
13170 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13171 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13172 TREE_OPERAND (arg0, 1));
13173 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13174 type, tem,
13175 fold_convert_loc (loc, TREE_TYPE (arg0),
13176 arg1));
13177 }
13178
13179 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13180 constant C is a power of two, i.e. a single bit. */
13181 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13182 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13183 && integer_zerop (arg1)
13184 && integer_pow2p (TREE_OPERAND (arg0, 1))
13185 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13186 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13187 {
13188 tree arg00 = TREE_OPERAND (arg0, 0);
13189 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13190 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13191 }
13192
13193 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13194 when is C is a power of two, i.e. a single bit. */
13195 if (TREE_CODE (arg0) == BIT_AND_EXPR
13196 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13197 && integer_zerop (arg1)
13198 && integer_pow2p (TREE_OPERAND (arg0, 1))
13199 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13200 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13201 {
13202 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13203 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13204 arg000, TREE_OPERAND (arg0, 1));
13205 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13206 tem, build_int_cst (TREE_TYPE (tem), 0));
13207 }
13208
13209 if (integer_zerop (arg1)
13210 && tree_expr_nonzero_p (arg0))
13211 {
13212 tree res = constant_boolean_node (code==NE_EXPR, type);
13213 return omit_one_operand_loc (loc, type, res, arg0);
13214 }
13215
13216 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13217 if (TREE_CODE (arg0) == NEGATE_EXPR
13218 && TREE_CODE (arg1) == NEGATE_EXPR)
13219 return fold_build2_loc (loc, code, type,
13220 TREE_OPERAND (arg0, 0),
13221 fold_convert_loc (loc, TREE_TYPE (arg0),
13222 TREE_OPERAND (arg1, 0)));
13223
13224 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13225 if (TREE_CODE (arg0) == BIT_AND_EXPR
13226 && TREE_CODE (arg1) == BIT_AND_EXPR)
13227 {
13228 tree arg00 = TREE_OPERAND (arg0, 0);
13229 tree arg01 = TREE_OPERAND (arg0, 1);
13230 tree arg10 = TREE_OPERAND (arg1, 0);
13231 tree arg11 = TREE_OPERAND (arg1, 1);
13232 tree itype = TREE_TYPE (arg0);
13233
13234 if (operand_equal_p (arg01, arg11, 0))
13235 return fold_build2_loc (loc, code, type,
13236 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13237 fold_build2_loc (loc,
13238 BIT_XOR_EXPR, itype,
13239 arg00, arg10),
13240 arg01),
13241 build_zero_cst (itype));
13242
13243 if (operand_equal_p (arg01, arg10, 0))
13244 return fold_build2_loc (loc, code, type,
13245 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13246 fold_build2_loc (loc,
13247 BIT_XOR_EXPR, itype,
13248 arg00, arg11),
13249 arg01),
13250 build_zero_cst (itype));
13251
13252 if (operand_equal_p (arg00, arg11, 0))
13253 return fold_build2_loc (loc, code, type,
13254 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13255 fold_build2_loc (loc,
13256 BIT_XOR_EXPR, itype,
13257 arg01, arg10),
13258 arg00),
13259 build_zero_cst (itype));
13260
13261 if (operand_equal_p (arg00, arg10, 0))
13262 return fold_build2_loc (loc, code, type,
13263 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13264 fold_build2_loc (loc,
13265 BIT_XOR_EXPR, itype,
13266 arg01, arg11),
13267 arg00),
13268 build_zero_cst (itype));
13269 }
13270
13271 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13272 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13273 {
13274 tree arg00 = TREE_OPERAND (arg0, 0);
13275 tree arg01 = TREE_OPERAND (arg0, 1);
13276 tree arg10 = TREE_OPERAND (arg1, 0);
13277 tree arg11 = TREE_OPERAND (arg1, 1);
13278 tree itype = TREE_TYPE (arg0);
13279
13280 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13281 operand_equal_p guarantees no side-effects so we don't need
13282 to use omit_one_operand on Z. */
13283 if (operand_equal_p (arg01, arg11, 0))
13284 return fold_build2_loc (loc, code, type, arg00,
13285 fold_convert_loc (loc, TREE_TYPE (arg00),
13286 arg10));
13287 if (operand_equal_p (arg01, arg10, 0))
13288 return fold_build2_loc (loc, code, type, arg00,
13289 fold_convert_loc (loc, TREE_TYPE (arg00),
13290 arg11));
13291 if (operand_equal_p (arg00, arg11, 0))
13292 return fold_build2_loc (loc, code, type, arg01,
13293 fold_convert_loc (loc, TREE_TYPE (arg01),
13294 arg10));
13295 if (operand_equal_p (arg00, arg10, 0))
13296 return fold_build2_loc (loc, code, type, arg01,
13297 fold_convert_loc (loc, TREE_TYPE (arg01),
13298 arg11));
13299
13300 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13301 if (TREE_CODE (arg01) == INTEGER_CST
13302 && TREE_CODE (arg11) == INTEGER_CST)
13303 {
13304 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13305 fold_convert_loc (loc, itype, arg11));
13306 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13307 return fold_build2_loc (loc, code, type, tem,
13308 fold_convert_loc (loc, itype, arg10));
13309 }
13310 }
13311
13312 /* Attempt to simplify equality/inequality comparisons of complex
13313 values. Only lower the comparison if the result is known or
13314 can be simplified to a single scalar comparison. */
13315 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13316 || TREE_CODE (arg0) == COMPLEX_CST)
13317 && (TREE_CODE (arg1) == COMPLEX_EXPR
13318 || TREE_CODE (arg1) == COMPLEX_CST))
13319 {
13320 tree real0, imag0, real1, imag1;
13321 tree rcond, icond;
13322
13323 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13324 {
13325 real0 = TREE_OPERAND (arg0, 0);
13326 imag0 = TREE_OPERAND (arg0, 1);
13327 }
13328 else
13329 {
13330 real0 = TREE_REALPART (arg0);
13331 imag0 = TREE_IMAGPART (arg0);
13332 }
13333
13334 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13335 {
13336 real1 = TREE_OPERAND (arg1, 0);
13337 imag1 = TREE_OPERAND (arg1, 1);
13338 }
13339 else
13340 {
13341 real1 = TREE_REALPART (arg1);
13342 imag1 = TREE_IMAGPART (arg1);
13343 }
13344
13345 rcond = fold_binary_loc (loc, code, type, real0, real1);
13346 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13347 {
13348 if (integer_zerop (rcond))
13349 {
13350 if (code == EQ_EXPR)
13351 return omit_two_operands_loc (loc, type, boolean_false_node,
13352 imag0, imag1);
13353 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13354 }
13355 else
13356 {
13357 if (code == NE_EXPR)
13358 return omit_two_operands_loc (loc, type, boolean_true_node,
13359 imag0, imag1);
13360 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13361 }
13362 }
13363
13364 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13365 if (icond && TREE_CODE (icond) == INTEGER_CST)
13366 {
13367 if (integer_zerop (icond))
13368 {
13369 if (code == EQ_EXPR)
13370 return omit_two_operands_loc (loc, type, boolean_false_node,
13371 real0, real1);
13372 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13373 }
13374 else
13375 {
13376 if (code == NE_EXPR)
13377 return omit_two_operands_loc (loc, type, boolean_true_node,
13378 real0, real1);
13379 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13380 }
13381 }
13382 }
13383
13384 return NULL_TREE;
13385
13386 case LT_EXPR:
13387 case GT_EXPR:
13388 case LE_EXPR:
13389 case GE_EXPR:
13390 tem = fold_comparison (loc, code, type, op0, op1);
13391 if (tem != NULL_TREE)
13392 return tem;
13393
13394 /* Transform comparisons of the form X +- C CMP X. */
13395 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13396 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13397 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13398 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13399 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13400 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13401 {
13402 tree arg01 = TREE_OPERAND (arg0, 1);
13403 enum tree_code code0 = TREE_CODE (arg0);
13404 int is_positive;
13405
13406 if (TREE_CODE (arg01) == REAL_CST)
13407 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13408 else
13409 is_positive = tree_int_cst_sgn (arg01);
13410
13411 /* (X - c) > X becomes false. */
13412 if (code == GT_EXPR
13413 && ((code0 == MINUS_EXPR && is_positive >= 0)
13414 || (code0 == PLUS_EXPR && is_positive <= 0)))
13415 {
13416 if (TREE_CODE (arg01) == INTEGER_CST
13417 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13418 fold_overflow_warning (("assuming signed overflow does not "
13419 "occur when assuming that (X - c) > X "
13420 "is always false"),
13421 WARN_STRICT_OVERFLOW_ALL);
13422 return constant_boolean_node (0, type);
13423 }
13424
13425 /* Likewise (X + c) < X becomes false. */
13426 if (code == LT_EXPR
13427 && ((code0 == PLUS_EXPR && is_positive >= 0)
13428 || (code0 == MINUS_EXPR && is_positive <= 0)))
13429 {
13430 if (TREE_CODE (arg01) == INTEGER_CST
13431 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13432 fold_overflow_warning (("assuming signed overflow does not "
13433 "occur when assuming that "
13434 "(X + c) < X is always false"),
13435 WARN_STRICT_OVERFLOW_ALL);
13436 return constant_boolean_node (0, type);
13437 }
13438
13439 /* Convert (X - c) <= X to true. */
13440 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13441 && code == LE_EXPR
13442 && ((code0 == MINUS_EXPR && is_positive >= 0)
13443 || (code0 == PLUS_EXPR && is_positive <= 0)))
13444 {
13445 if (TREE_CODE (arg01) == INTEGER_CST
13446 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13447 fold_overflow_warning (("assuming signed overflow does not "
13448 "occur when assuming that "
13449 "(X - c) <= X is always true"),
13450 WARN_STRICT_OVERFLOW_ALL);
13451 return constant_boolean_node (1, type);
13452 }
13453
13454 /* Convert (X + c) >= X to true. */
13455 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13456 && code == GE_EXPR
13457 && ((code0 == PLUS_EXPR && is_positive >= 0)
13458 || (code0 == MINUS_EXPR && is_positive <= 0)))
13459 {
13460 if (TREE_CODE (arg01) == INTEGER_CST
13461 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13462 fold_overflow_warning (("assuming signed overflow does not "
13463 "occur when assuming that "
13464 "(X + c) >= X is always true"),
13465 WARN_STRICT_OVERFLOW_ALL);
13466 return constant_boolean_node (1, type);
13467 }
13468
13469 if (TREE_CODE (arg01) == INTEGER_CST)
13470 {
13471 /* Convert X + c > X and X - c < X to true for integers. */
13472 if (code == GT_EXPR
13473 && ((code0 == PLUS_EXPR && is_positive > 0)
13474 || (code0 == MINUS_EXPR && is_positive < 0)))
13475 {
13476 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13477 fold_overflow_warning (("assuming signed overflow does "
13478 "not occur when assuming that "
13479 "(X + c) > X is always true"),
13480 WARN_STRICT_OVERFLOW_ALL);
13481 return constant_boolean_node (1, type);
13482 }
13483
13484 if (code == LT_EXPR
13485 && ((code0 == MINUS_EXPR && is_positive > 0)
13486 || (code0 == PLUS_EXPR && is_positive < 0)))
13487 {
13488 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13489 fold_overflow_warning (("assuming signed overflow does "
13490 "not occur when assuming that "
13491 "(X - c) < X is always true"),
13492 WARN_STRICT_OVERFLOW_ALL);
13493 return constant_boolean_node (1, type);
13494 }
13495
13496 /* Convert X + c <= X and X - c >= X to false for integers. */
13497 if (code == LE_EXPR
13498 && ((code0 == PLUS_EXPR && is_positive > 0)
13499 || (code0 == MINUS_EXPR && is_positive < 0)))
13500 {
13501 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13502 fold_overflow_warning (("assuming signed overflow does "
13503 "not occur when assuming that "
13504 "(X + c) <= X is always false"),
13505 WARN_STRICT_OVERFLOW_ALL);
13506 return constant_boolean_node (0, type);
13507 }
13508
13509 if (code == GE_EXPR
13510 && ((code0 == MINUS_EXPR && is_positive > 0)
13511 || (code0 == PLUS_EXPR && is_positive < 0)))
13512 {
13513 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13514 fold_overflow_warning (("assuming signed overflow does "
13515 "not occur when assuming that "
13516 "(X - c) >= X is always false"),
13517 WARN_STRICT_OVERFLOW_ALL);
13518 return constant_boolean_node (0, type);
13519 }
13520 }
13521 }
13522
13523 /* Comparisons with the highest or lowest possible integer of
13524 the specified precision will have known values. */
13525 {
13526 tree arg1_type = TREE_TYPE (arg1);
13527 unsigned int width = TYPE_PRECISION (arg1_type);
13528
13529 if (TREE_CODE (arg1) == INTEGER_CST
13530 && width <= HOST_BITS_PER_DOUBLE_INT
13531 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13532 {
13533 HOST_WIDE_INT signed_max_hi;
13534 unsigned HOST_WIDE_INT signed_max_lo;
13535 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13536
13537 if (width <= HOST_BITS_PER_WIDE_INT)
13538 {
13539 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13540 - 1;
13541 signed_max_hi = 0;
13542 max_hi = 0;
13543
13544 if (TYPE_UNSIGNED (arg1_type))
13545 {
13546 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13547 min_lo = 0;
13548 min_hi = 0;
13549 }
13550 else
13551 {
13552 max_lo = signed_max_lo;
13553 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13554 min_hi = -1;
13555 }
13556 }
13557 else
13558 {
13559 width -= HOST_BITS_PER_WIDE_INT;
13560 signed_max_lo = -1;
13561 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13562 - 1;
13563 max_lo = -1;
13564 min_lo = 0;
13565
13566 if (TYPE_UNSIGNED (arg1_type))
13567 {
13568 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13569 min_hi = 0;
13570 }
13571 else
13572 {
13573 max_hi = signed_max_hi;
13574 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13575 }
13576 }
13577
13578 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13579 && TREE_INT_CST_LOW (arg1) == max_lo)
13580 switch (code)
13581 {
13582 case GT_EXPR:
13583 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13584
13585 case GE_EXPR:
13586 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13587
13588 case LE_EXPR:
13589 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13590
13591 case LT_EXPR:
13592 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13593
13594 /* The GE_EXPR and LT_EXPR cases above are not normally
13595 reached because of previous transformations. */
13596
13597 default:
13598 break;
13599 }
13600 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13601 == max_hi
13602 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13603 switch (code)
13604 {
13605 case GT_EXPR:
13606 arg1 = const_binop (PLUS_EXPR, arg1,
13607 build_int_cst (TREE_TYPE (arg1), 1));
13608 return fold_build2_loc (loc, EQ_EXPR, type,
13609 fold_convert_loc (loc,
13610 TREE_TYPE (arg1), arg0),
13611 arg1);
13612 case LE_EXPR:
13613 arg1 = const_binop (PLUS_EXPR, arg1,
13614 build_int_cst (TREE_TYPE (arg1), 1));
13615 return fold_build2_loc (loc, NE_EXPR, type,
13616 fold_convert_loc (loc, TREE_TYPE (arg1),
13617 arg0),
13618 arg1);
13619 default:
13620 break;
13621 }
13622 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13623 == min_hi
13624 && TREE_INT_CST_LOW (arg1) == min_lo)
13625 switch (code)
13626 {
13627 case LT_EXPR:
13628 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13629
13630 case LE_EXPR:
13631 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13632
13633 case GE_EXPR:
13634 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13635
13636 case GT_EXPR:
13637 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13638
13639 default:
13640 break;
13641 }
13642 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13643 == min_hi
13644 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13645 switch (code)
13646 {
13647 case GE_EXPR:
13648 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13649 return fold_build2_loc (loc, NE_EXPR, type,
13650 fold_convert_loc (loc,
13651 TREE_TYPE (arg1), arg0),
13652 arg1);
13653 case LT_EXPR:
13654 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13655 return fold_build2_loc (loc, EQ_EXPR, type,
13656 fold_convert_loc (loc, TREE_TYPE (arg1),
13657 arg0),
13658 arg1);
13659 default:
13660 break;
13661 }
13662
13663 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13664 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13665 && TYPE_UNSIGNED (arg1_type)
13666 /* We will flip the signedness of the comparison operator
13667 associated with the mode of arg1, so the sign bit is
13668 specified by this mode. Check that arg1 is the signed
13669 max associated with this sign bit. */
13670 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13671 /* signed_type does not work on pointer types. */
13672 && INTEGRAL_TYPE_P (arg1_type))
13673 {
13674 /* The following case also applies to X < signed_max+1
13675 and X >= signed_max+1 because previous transformations. */
13676 if (code == LE_EXPR || code == GT_EXPR)
13677 {
13678 tree st;
13679 st = signed_type_for (TREE_TYPE (arg1));
13680 return fold_build2_loc (loc,
13681 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13682 type, fold_convert_loc (loc, st, arg0),
13683 build_int_cst (st, 0));
13684 }
13685 }
13686 }
13687 }
13688
13689 /* If we are comparing an ABS_EXPR with a constant, we can
13690 convert all the cases into explicit comparisons, but they may
13691 well not be faster than doing the ABS and one comparison.
13692 But ABS (X) <= C is a range comparison, which becomes a subtraction
13693 and a comparison, and is probably faster. */
13694 if (code == LE_EXPR
13695 && TREE_CODE (arg1) == INTEGER_CST
13696 && TREE_CODE (arg0) == ABS_EXPR
13697 && ! TREE_SIDE_EFFECTS (arg0)
13698 && (0 != (tem = negate_expr (arg1)))
13699 && TREE_CODE (tem) == INTEGER_CST
13700 && !TREE_OVERFLOW (tem))
13701 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13702 build2 (GE_EXPR, type,
13703 TREE_OPERAND (arg0, 0), tem),
13704 build2 (LE_EXPR, type,
13705 TREE_OPERAND (arg0, 0), arg1));
13706
13707 /* Convert ABS_EXPR<x> >= 0 to true. */
13708 strict_overflow_p = false;
13709 if (code == GE_EXPR
13710 && (integer_zerop (arg1)
13711 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13712 && real_zerop (arg1)))
13713 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13714 {
13715 if (strict_overflow_p)
13716 fold_overflow_warning (("assuming signed overflow does not occur "
13717 "when simplifying comparison of "
13718 "absolute value and zero"),
13719 WARN_STRICT_OVERFLOW_CONDITIONAL);
13720 return omit_one_operand_loc (loc, type,
13721 constant_boolean_node (true, type),
13722 arg0);
13723 }
13724
13725 /* Convert ABS_EXPR<x> < 0 to false. */
13726 strict_overflow_p = false;
13727 if (code == LT_EXPR
13728 && (integer_zerop (arg1) || real_zerop (arg1))
13729 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13730 {
13731 if (strict_overflow_p)
13732 fold_overflow_warning (("assuming signed overflow does not occur "
13733 "when simplifying comparison of "
13734 "absolute value and zero"),
13735 WARN_STRICT_OVERFLOW_CONDITIONAL);
13736 return omit_one_operand_loc (loc, type,
13737 constant_boolean_node (false, type),
13738 arg0);
13739 }
13740
13741 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13742 and similarly for >= into !=. */
13743 if ((code == LT_EXPR || code == GE_EXPR)
13744 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13745 && TREE_CODE (arg1) == LSHIFT_EXPR
13746 && integer_onep (TREE_OPERAND (arg1, 0)))
13747 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13748 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13749 TREE_OPERAND (arg1, 1)),
13750 build_zero_cst (TREE_TYPE (arg0)));
13751
13752 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13753 otherwise Y might be >= # of bits in X's type and thus e.g.
13754 (unsigned char) (1 << Y) for Y 15 might be 0.
13755 If the cast is widening, then 1 << Y should have unsigned type,
13756 otherwise if Y is number of bits in the signed shift type minus 1,
13757 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13758 31 might be 0xffffffff80000000. */
13759 if ((code == LT_EXPR || code == GE_EXPR)
13760 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13761 && CONVERT_EXPR_P (arg1)
13762 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13763 && (TYPE_PRECISION (TREE_TYPE (arg1))
13764 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13765 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13766 || (TYPE_PRECISION (TREE_TYPE (arg1))
13767 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13768 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13769 {
13770 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13771 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13772 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13773 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13774 build_zero_cst (TREE_TYPE (arg0)));
13775 }
13776
13777 return NULL_TREE;
13778
13779 case UNORDERED_EXPR:
13780 case ORDERED_EXPR:
13781 case UNLT_EXPR:
13782 case UNLE_EXPR:
13783 case UNGT_EXPR:
13784 case UNGE_EXPR:
13785 case UNEQ_EXPR:
13786 case LTGT_EXPR:
13787 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13788 {
13789 t1 = fold_relational_const (code, type, arg0, arg1);
13790 if (t1 != NULL_TREE)
13791 return t1;
13792 }
13793
13794 /* If the first operand is NaN, the result is constant. */
13795 if (TREE_CODE (arg0) == REAL_CST
13796 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13797 && (code != LTGT_EXPR || ! flag_trapping_math))
13798 {
13799 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13800 ? integer_zero_node
13801 : integer_one_node;
13802 return omit_one_operand_loc (loc, type, t1, arg1);
13803 }
13804
13805 /* If the second operand is NaN, the result is constant. */
13806 if (TREE_CODE (arg1) == REAL_CST
13807 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13808 && (code != LTGT_EXPR || ! flag_trapping_math))
13809 {
13810 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13811 ? integer_zero_node
13812 : integer_one_node;
13813 return omit_one_operand_loc (loc, type, t1, arg0);
13814 }
13815
13816 /* Simplify unordered comparison of something with itself. */
13817 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13818 && operand_equal_p (arg0, arg1, 0))
13819 return constant_boolean_node (1, type);
13820
13821 if (code == LTGT_EXPR
13822 && !flag_trapping_math
13823 && operand_equal_p (arg0, arg1, 0))
13824 return constant_boolean_node (0, type);
13825
13826 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13827 {
13828 tree targ0 = strip_float_extensions (arg0);
13829 tree targ1 = strip_float_extensions (arg1);
13830 tree newtype = TREE_TYPE (targ0);
13831
13832 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13833 newtype = TREE_TYPE (targ1);
13834
13835 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13836 return fold_build2_loc (loc, code, type,
13837 fold_convert_loc (loc, newtype, targ0),
13838 fold_convert_loc (loc, newtype, targ1));
13839 }
13840
13841 return NULL_TREE;
13842
13843 case COMPOUND_EXPR:
13844 /* When pedantic, a compound expression can be neither an lvalue
13845 nor an integer constant expression. */
13846 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13847 return NULL_TREE;
13848 /* Don't let (0, 0) be null pointer constant. */
13849 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13850 : fold_convert_loc (loc, type, arg1);
13851 return pedantic_non_lvalue_loc (loc, tem);
13852
13853 case COMPLEX_EXPR:
13854 if ((TREE_CODE (arg0) == REAL_CST
13855 && TREE_CODE (arg1) == REAL_CST)
13856 || (TREE_CODE (arg0) == INTEGER_CST
13857 && TREE_CODE (arg1) == INTEGER_CST))
13858 return build_complex (type, arg0, arg1);
13859 if (TREE_CODE (arg0) == REALPART_EXPR
13860 && TREE_CODE (arg1) == IMAGPART_EXPR
13861 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13862 && operand_equal_p (TREE_OPERAND (arg0, 0),
13863 TREE_OPERAND (arg1, 0), 0))
13864 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13865 TREE_OPERAND (arg1, 0));
13866 return NULL_TREE;
13867
13868 case ASSERT_EXPR:
13869 /* An ASSERT_EXPR should never be passed to fold_binary. */
13870 gcc_unreachable ();
13871
13872 case VEC_PACK_TRUNC_EXPR:
13873 case VEC_PACK_FIX_TRUNC_EXPR:
13874 {
13875 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13876 tree *elts;
13877
13878 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13879 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13880 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13881 return NULL_TREE;
13882
13883 elts = XALLOCAVEC (tree, nelts);
13884 if (!vec_cst_ctor_to_array (arg0, elts)
13885 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13886 return NULL_TREE;
13887
13888 for (i = 0; i < nelts; i++)
13889 {
13890 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13891 ? NOP_EXPR : FIX_TRUNC_EXPR,
13892 TREE_TYPE (type), elts[i]);
13893 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13894 return NULL_TREE;
13895 }
13896
13897 return build_vector (type, elts);
13898 }
13899
13900 case VEC_WIDEN_MULT_LO_EXPR:
13901 case VEC_WIDEN_MULT_HI_EXPR:
13902 case VEC_WIDEN_MULT_EVEN_EXPR:
13903 case VEC_WIDEN_MULT_ODD_EXPR:
13904 {
13905 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13906 unsigned int out, ofs, scale;
13907 tree *elts;
13908
13909 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13910 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13911 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13912 return NULL_TREE;
13913
13914 elts = XALLOCAVEC (tree, nelts * 4);
13915 if (!vec_cst_ctor_to_array (arg0, elts)
13916 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13917 return NULL_TREE;
13918
13919 if (code == VEC_WIDEN_MULT_LO_EXPR)
13920 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13921 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13922 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13923 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13924 scale = 1, ofs = 0;
13925 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13926 scale = 1, ofs = 1;
13927
13928 for (out = 0; out < nelts; out++)
13929 {
13930 unsigned int in1 = (out << scale) + ofs;
13931 unsigned int in2 = in1 + nelts * 2;
13932 tree t1, t2;
13933
13934 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13935 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13936
13937 if (t1 == NULL_TREE || t2 == NULL_TREE)
13938 return NULL_TREE;
13939 elts[out] = const_binop (MULT_EXPR, t1, t2);
13940 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13941 return NULL_TREE;
13942 }
13943
13944 return build_vector (type, elts);
13945 }
13946
13947 default:
13948 return NULL_TREE;
13949 } /* switch (code) */
13950 }
13951
13952 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13953 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13954 of GOTO_EXPR. */
13955
13956 static tree
13957 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13958 {
13959 switch (TREE_CODE (*tp))
13960 {
13961 case LABEL_EXPR:
13962 return *tp;
13963
13964 case GOTO_EXPR:
13965 *walk_subtrees = 0;
13966
13967 /* ... fall through ... */
13968
13969 default:
13970 return NULL_TREE;
13971 }
13972 }
13973
13974 /* Return whether the sub-tree ST contains a label which is accessible from
13975 outside the sub-tree. */
13976
13977 static bool
13978 contains_label_p (tree st)
13979 {
13980 return
13981 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13982 }
13983
13984 /* Fold a ternary expression of code CODE and type TYPE with operands
13985 OP0, OP1, and OP2. Return the folded expression if folding is
13986 successful. Otherwise, return NULL_TREE. */
13987
13988 tree
13989 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13990 tree op0, tree op1, tree op2)
13991 {
13992 tree tem;
13993 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13994 enum tree_code_class kind = TREE_CODE_CLASS (code);
13995
13996 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13997 && TREE_CODE_LENGTH (code) == 3);
13998
13999 /* Strip any conversions that don't change the mode. This is safe
14000 for every expression, except for a comparison expression because
14001 its signedness is derived from its operands. So, in the latter
14002 case, only strip conversions that don't change the signedness.
14003
14004 Note that this is done as an internal manipulation within the
14005 constant folder, in order to find the simplest representation of
14006 the arguments so that their form can be studied. In any cases,
14007 the appropriate type conversions should be put back in the tree
14008 that will get out of the constant folder. */
14009 if (op0)
14010 {
14011 arg0 = op0;
14012 STRIP_NOPS (arg0);
14013 }
14014
14015 if (op1)
14016 {
14017 arg1 = op1;
14018 STRIP_NOPS (arg1);
14019 }
14020
14021 if (op2)
14022 {
14023 arg2 = op2;
14024 STRIP_NOPS (arg2);
14025 }
14026
14027 switch (code)
14028 {
14029 case COMPONENT_REF:
14030 if (TREE_CODE (arg0) == CONSTRUCTOR
14031 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14032 {
14033 unsigned HOST_WIDE_INT idx;
14034 tree field, value;
14035 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14036 if (field == arg1)
14037 return value;
14038 }
14039 return NULL_TREE;
14040
14041 case COND_EXPR:
14042 case VEC_COND_EXPR:
14043 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14044 so all simple results must be passed through pedantic_non_lvalue. */
14045 if (TREE_CODE (arg0) == INTEGER_CST)
14046 {
14047 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14048 tem = integer_zerop (arg0) ? op2 : op1;
14049 /* Only optimize constant conditions when the selected branch
14050 has the same type as the COND_EXPR. This avoids optimizing
14051 away "c ? x : throw", where the throw has a void type.
14052 Avoid throwing away that operand which contains label. */
14053 if ((!TREE_SIDE_EFFECTS (unused_op)
14054 || !contains_label_p (unused_op))
14055 && (! VOID_TYPE_P (TREE_TYPE (tem))
14056 || VOID_TYPE_P (type)))
14057 return pedantic_non_lvalue_loc (loc, tem);
14058 return NULL_TREE;
14059 }
14060 else if (TREE_CODE (arg0) == VECTOR_CST)
14061 {
14062 if (integer_all_onesp (arg0))
14063 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14064 if (integer_zerop (arg0))
14065 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14066
14067 if ((TREE_CODE (arg1) == VECTOR_CST
14068 || TREE_CODE (arg1) == CONSTRUCTOR)
14069 && (TREE_CODE (arg2) == VECTOR_CST
14070 || TREE_CODE (arg2) == CONSTRUCTOR))
14071 {
14072 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14073 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14074 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14075 for (i = 0; i < nelts; i++)
14076 {
14077 tree val = VECTOR_CST_ELT (arg0, i);
14078 if (integer_all_onesp (val))
14079 sel[i] = i;
14080 else if (integer_zerop (val))
14081 sel[i] = nelts + i;
14082 else /* Currently unreachable. */
14083 return NULL_TREE;
14084 }
14085 tree t = fold_vec_perm (type, arg1, arg2, sel);
14086 if (t != NULL_TREE)
14087 return t;
14088 }
14089 }
14090
14091 if (operand_equal_p (arg1, op2, 0))
14092 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14093
14094 /* If we have A op B ? A : C, we may be able to convert this to a
14095 simpler expression, depending on the operation and the values
14096 of B and C. Signed zeros prevent all of these transformations,
14097 for reasons given above each one.
14098
14099 Also try swapping the arguments and inverting the conditional. */
14100 if (COMPARISON_CLASS_P (arg0)
14101 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14102 arg1, TREE_OPERAND (arg0, 1))
14103 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14104 {
14105 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14106 if (tem)
14107 return tem;
14108 }
14109
14110 if (COMPARISON_CLASS_P (arg0)
14111 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14112 op2,
14113 TREE_OPERAND (arg0, 1))
14114 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14115 {
14116 location_t loc0 = expr_location_or (arg0, loc);
14117 tem = fold_invert_truthvalue (loc0, arg0);
14118 if (tem && COMPARISON_CLASS_P (tem))
14119 {
14120 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14121 if (tem)
14122 return tem;
14123 }
14124 }
14125
14126 /* If the second operand is simpler than the third, swap them
14127 since that produces better jump optimization results. */
14128 if (truth_value_p (TREE_CODE (arg0))
14129 && tree_swap_operands_p (op1, op2, false))
14130 {
14131 location_t loc0 = expr_location_or (arg0, loc);
14132 /* See if this can be inverted. If it can't, possibly because
14133 it was a floating-point inequality comparison, don't do
14134 anything. */
14135 tem = fold_invert_truthvalue (loc0, arg0);
14136 if (tem)
14137 return fold_build3_loc (loc, code, type, tem, op2, op1);
14138 }
14139
14140 /* Convert A ? 1 : 0 to simply A. */
14141 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14142 : (integer_onep (op1)
14143 && !VECTOR_TYPE_P (type)))
14144 && integer_zerop (op2)
14145 /* If we try to convert OP0 to our type, the
14146 call to fold will try to move the conversion inside
14147 a COND, which will recurse. In that case, the COND_EXPR
14148 is probably the best choice, so leave it alone. */
14149 && type == TREE_TYPE (arg0))
14150 return pedantic_non_lvalue_loc (loc, arg0);
14151
14152 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14153 over COND_EXPR in cases such as floating point comparisons. */
14154 if (integer_zerop (op1)
14155 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14156 : (integer_onep (op2)
14157 && !VECTOR_TYPE_P (type)))
14158 && truth_value_p (TREE_CODE (arg0)))
14159 return pedantic_non_lvalue_loc (loc,
14160 fold_convert_loc (loc, type,
14161 invert_truthvalue_loc (loc,
14162 arg0)));
14163
14164 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14165 if (TREE_CODE (arg0) == LT_EXPR
14166 && integer_zerop (TREE_OPERAND (arg0, 1))
14167 && integer_zerop (op2)
14168 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14169 {
14170 /* sign_bit_p only checks ARG1 bits within A's precision.
14171 If <sign bit of A> has wider type than A, bits outside
14172 of A's precision in <sign bit of A> need to be checked.
14173 If they are all 0, this optimization needs to be done
14174 in unsigned A's type, if they are all 1 in signed A's type,
14175 otherwise this can't be done. */
14176 if (TYPE_PRECISION (TREE_TYPE (tem))
14177 < TYPE_PRECISION (TREE_TYPE (arg1))
14178 && TYPE_PRECISION (TREE_TYPE (tem))
14179 < TYPE_PRECISION (type))
14180 {
14181 unsigned HOST_WIDE_INT mask_lo;
14182 HOST_WIDE_INT mask_hi;
14183 int inner_width, outer_width;
14184 tree tem_type;
14185
14186 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14187 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14188 if (outer_width > TYPE_PRECISION (type))
14189 outer_width = TYPE_PRECISION (type);
14190
14191 if (outer_width > HOST_BITS_PER_WIDE_INT)
14192 {
14193 mask_hi = (HOST_WIDE_INT_M1U
14194 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14195 mask_lo = -1;
14196 }
14197 else
14198 {
14199 mask_hi = 0;
14200 mask_lo = (HOST_WIDE_INT_M1U
14201 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14202 }
14203 if (inner_width > HOST_BITS_PER_WIDE_INT)
14204 {
14205 mask_hi &= ~(HOST_WIDE_INT_M1U
14206 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14207 mask_lo = 0;
14208 }
14209 else
14210 mask_lo &= ~(HOST_WIDE_INT_M1U
14211 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14212
14213 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14214 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14215 {
14216 tem_type = signed_type_for (TREE_TYPE (tem));
14217 tem = fold_convert_loc (loc, tem_type, tem);
14218 }
14219 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14220 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14221 {
14222 tem_type = unsigned_type_for (TREE_TYPE (tem));
14223 tem = fold_convert_loc (loc, tem_type, tem);
14224 }
14225 else
14226 tem = NULL;
14227 }
14228
14229 if (tem)
14230 return
14231 fold_convert_loc (loc, type,
14232 fold_build2_loc (loc, BIT_AND_EXPR,
14233 TREE_TYPE (tem), tem,
14234 fold_convert_loc (loc,
14235 TREE_TYPE (tem),
14236 arg1)));
14237 }
14238
14239 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14240 already handled above. */
14241 if (TREE_CODE (arg0) == BIT_AND_EXPR
14242 && integer_onep (TREE_OPERAND (arg0, 1))
14243 && integer_zerop (op2)
14244 && integer_pow2p (arg1))
14245 {
14246 tree tem = TREE_OPERAND (arg0, 0);
14247 STRIP_NOPS (tem);
14248 if (TREE_CODE (tem) == RSHIFT_EXPR
14249 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14250 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14251 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14252 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14253 TREE_OPERAND (tem, 0), arg1);
14254 }
14255
14256 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14257 is probably obsolete because the first operand should be a
14258 truth value (that's why we have the two cases above), but let's
14259 leave it in until we can confirm this for all front-ends. */
14260 if (integer_zerop (op2)
14261 && TREE_CODE (arg0) == NE_EXPR
14262 && integer_zerop (TREE_OPERAND (arg0, 1))
14263 && integer_pow2p (arg1)
14264 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14265 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14266 arg1, OEP_ONLY_CONST))
14267 return pedantic_non_lvalue_loc (loc,
14268 fold_convert_loc (loc, type,
14269 TREE_OPERAND (arg0, 0)));
14270
14271 /* Disable the transformations below for vectors, since
14272 fold_binary_op_with_conditional_arg may undo them immediately,
14273 yielding an infinite loop. */
14274 if (code == VEC_COND_EXPR)
14275 return NULL_TREE;
14276
14277 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14278 if (integer_zerop (op2)
14279 && truth_value_p (TREE_CODE (arg0))
14280 && truth_value_p (TREE_CODE (arg1))
14281 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14282 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14283 : TRUTH_ANDIF_EXPR,
14284 type, fold_convert_loc (loc, type, arg0), arg1);
14285
14286 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14287 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14288 && truth_value_p (TREE_CODE (arg0))
14289 && truth_value_p (TREE_CODE (arg1))
14290 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14291 {
14292 location_t loc0 = expr_location_or (arg0, loc);
14293 /* Only perform transformation if ARG0 is easily inverted. */
14294 tem = fold_invert_truthvalue (loc0, arg0);
14295 if (tem)
14296 return fold_build2_loc (loc, code == VEC_COND_EXPR
14297 ? BIT_IOR_EXPR
14298 : TRUTH_ORIF_EXPR,
14299 type, fold_convert_loc (loc, type, tem),
14300 arg1);
14301 }
14302
14303 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14304 if (integer_zerop (arg1)
14305 && truth_value_p (TREE_CODE (arg0))
14306 && truth_value_p (TREE_CODE (op2))
14307 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14308 {
14309 location_t loc0 = expr_location_or (arg0, loc);
14310 /* Only perform transformation if ARG0 is easily inverted. */
14311 tem = fold_invert_truthvalue (loc0, arg0);
14312 if (tem)
14313 return fold_build2_loc (loc, code == VEC_COND_EXPR
14314 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14315 type, fold_convert_loc (loc, type, tem),
14316 op2);
14317 }
14318
14319 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14320 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14321 && truth_value_p (TREE_CODE (arg0))
14322 && truth_value_p (TREE_CODE (op2))
14323 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14324 return fold_build2_loc (loc, code == VEC_COND_EXPR
14325 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14326 type, fold_convert_loc (loc, type, arg0), op2);
14327
14328 return NULL_TREE;
14329
14330 case CALL_EXPR:
14331 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14332 of fold_ternary on them. */
14333 gcc_unreachable ();
14334
14335 case BIT_FIELD_REF:
14336 if ((TREE_CODE (arg0) == VECTOR_CST
14337 || (TREE_CODE (arg0) == CONSTRUCTOR
14338 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14339 && (type == TREE_TYPE (TREE_TYPE (arg0))
14340 || (TREE_CODE (type) == VECTOR_TYPE
14341 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14342 {
14343 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14344 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14345 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14346 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14347
14348 if (n != 0
14349 && (idx % width) == 0
14350 && (n % width) == 0
14351 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14352 {
14353 idx = idx / width;
14354 n = n / width;
14355
14356 if (TREE_CODE (arg0) == VECTOR_CST)
14357 {
14358 if (n == 1)
14359 return VECTOR_CST_ELT (arg0, idx);
14360
14361 tree *vals = XALLOCAVEC (tree, n);
14362 for (unsigned i = 0; i < n; ++i)
14363 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14364 return build_vector (type, vals);
14365 }
14366
14367 /* Constructor elements can be subvectors. */
14368 unsigned HOST_WIDE_INT k = 1;
14369 if (CONSTRUCTOR_NELTS (arg0) != 0)
14370 {
14371 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14372 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14373 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14374 }
14375
14376 /* We keep an exact subset of the constructor elements. */
14377 if ((idx % k) == 0 && (n % k) == 0)
14378 {
14379 if (CONSTRUCTOR_NELTS (arg0) == 0)
14380 return build_constructor (type, NULL);
14381 idx /= k;
14382 n /= k;
14383 if (n == 1)
14384 {
14385 if (idx < CONSTRUCTOR_NELTS (arg0))
14386 return CONSTRUCTOR_ELT (arg0, idx)->value;
14387 return build_zero_cst (type);
14388 }
14389
14390 vec<constructor_elt, va_gc> *vals;
14391 vec_alloc (vals, n);
14392 for (unsigned i = 0;
14393 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14394 ++i)
14395 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14396 CONSTRUCTOR_ELT
14397 (arg0, idx + i)->value);
14398 return build_constructor (type, vals);
14399 }
14400 /* The bitfield references a single constructor element. */
14401 else if (idx + n <= (idx / k + 1) * k)
14402 {
14403 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14404 return build_zero_cst (type);
14405 else if (n == k)
14406 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14407 else
14408 return fold_build3_loc (loc, code, type,
14409 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14410 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14411 }
14412 }
14413 }
14414
14415 /* A bit-field-ref that referenced the full argument can be stripped. */
14416 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14417 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14418 && integer_zerop (op2))
14419 return fold_convert_loc (loc, type, arg0);
14420
14421 /* On constants we can use native encode/interpret to constant
14422 fold (nearly) all BIT_FIELD_REFs. */
14423 if (CONSTANT_CLASS_P (arg0)
14424 && can_native_interpret_type_p (type)
14425 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14426 /* This limitation should not be necessary, we just need to
14427 round this up to mode size. */
14428 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14429 /* Need bit-shifting of the buffer to relax the following. */
14430 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14431 {
14432 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14433 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14434 unsigned HOST_WIDE_INT clen;
14435 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14436 /* ??? We cannot tell native_encode_expr to start at
14437 some random byte only. So limit us to a reasonable amount
14438 of work. */
14439 if (clen <= 4096)
14440 {
14441 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14442 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14443 if (len > 0
14444 && len * BITS_PER_UNIT >= bitpos + bitsize)
14445 {
14446 tree v = native_interpret_expr (type,
14447 b + bitpos / BITS_PER_UNIT,
14448 bitsize / BITS_PER_UNIT);
14449 if (v)
14450 return v;
14451 }
14452 }
14453 }
14454
14455 return NULL_TREE;
14456
14457 case FMA_EXPR:
14458 /* For integers we can decompose the FMA if possible. */
14459 if (TREE_CODE (arg0) == INTEGER_CST
14460 && TREE_CODE (arg1) == INTEGER_CST)
14461 return fold_build2_loc (loc, PLUS_EXPR, type,
14462 const_binop (MULT_EXPR, arg0, arg1), arg2);
14463 if (integer_zerop (arg2))
14464 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14465
14466 return fold_fma (loc, type, arg0, arg1, arg2);
14467
14468 case VEC_PERM_EXPR:
14469 if (TREE_CODE (arg2) == VECTOR_CST)
14470 {
14471 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14472 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14473 tree t;
14474 bool need_mask_canon = false;
14475 bool all_in_vec0 = true;
14476 bool all_in_vec1 = true;
14477 bool maybe_identity = true;
14478 bool single_arg = (op0 == op1);
14479 bool changed = false;
14480
14481 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14482 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14483 for (i = 0; i < nelts; i++)
14484 {
14485 tree val = VECTOR_CST_ELT (arg2, i);
14486 if (TREE_CODE (val) != INTEGER_CST)
14487 return NULL_TREE;
14488
14489 sel[i] = TREE_INT_CST_LOW (val) & mask;
14490 if (TREE_INT_CST_HIGH (val)
14491 || ((unsigned HOST_WIDE_INT)
14492 TREE_INT_CST_LOW (val) != sel[i]))
14493 need_mask_canon = true;
14494
14495 if (sel[i] < nelts)
14496 all_in_vec1 = false;
14497 else
14498 all_in_vec0 = false;
14499
14500 if ((sel[i] & (nelts-1)) != i)
14501 maybe_identity = false;
14502 }
14503
14504 if (maybe_identity)
14505 {
14506 if (all_in_vec0)
14507 return op0;
14508 if (all_in_vec1)
14509 return op1;
14510 }
14511
14512 if (all_in_vec0)
14513 op1 = op0;
14514 else if (all_in_vec1)
14515 {
14516 op0 = op1;
14517 for (i = 0; i < nelts; i++)
14518 sel[i] -= nelts;
14519 need_mask_canon = true;
14520 }
14521
14522 if ((TREE_CODE (op0) == VECTOR_CST
14523 || TREE_CODE (op0) == CONSTRUCTOR)
14524 && (TREE_CODE (op1) == VECTOR_CST
14525 || TREE_CODE (op1) == CONSTRUCTOR))
14526 {
14527 t = fold_vec_perm (type, op0, op1, sel);
14528 if (t != NULL_TREE)
14529 return t;
14530 }
14531
14532 if (op0 == op1 && !single_arg)
14533 changed = true;
14534
14535 if (need_mask_canon && arg2 == op2)
14536 {
14537 tree *tsel = XALLOCAVEC (tree, nelts);
14538 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14539 for (i = 0; i < nelts; i++)
14540 tsel[i] = build_int_cst (eltype, sel[i]);
14541 op2 = build_vector (TREE_TYPE (arg2), tsel);
14542 changed = true;
14543 }
14544
14545 if (changed)
14546 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14547 }
14548 return NULL_TREE;
14549
14550 default:
14551 return NULL_TREE;
14552 } /* switch (code) */
14553 }
14554
14555 /* Perform constant folding and related simplification of EXPR.
14556 The related simplifications include x*1 => x, x*0 => 0, etc.,
14557 and application of the associative law.
14558 NOP_EXPR conversions may be removed freely (as long as we
14559 are careful not to change the type of the overall expression).
14560 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14561 but we can constant-fold them if they have constant operands. */
14562
14563 #ifdef ENABLE_FOLD_CHECKING
14564 # define fold(x) fold_1 (x)
14565 static tree fold_1 (tree);
14566 static
14567 #endif
14568 tree
14569 fold (tree expr)
14570 {
14571 const tree t = expr;
14572 enum tree_code code = TREE_CODE (t);
14573 enum tree_code_class kind = TREE_CODE_CLASS (code);
14574 tree tem;
14575 location_t loc = EXPR_LOCATION (expr);
14576
14577 /* Return right away if a constant. */
14578 if (kind == tcc_constant)
14579 return t;
14580
14581 /* CALL_EXPR-like objects with variable numbers of operands are
14582 treated specially. */
14583 if (kind == tcc_vl_exp)
14584 {
14585 if (code == CALL_EXPR)
14586 {
14587 tem = fold_call_expr (loc, expr, false);
14588 return tem ? tem : expr;
14589 }
14590 return expr;
14591 }
14592
14593 if (IS_EXPR_CODE_CLASS (kind))
14594 {
14595 tree type = TREE_TYPE (t);
14596 tree op0, op1, op2;
14597
14598 switch (TREE_CODE_LENGTH (code))
14599 {
14600 case 1:
14601 op0 = TREE_OPERAND (t, 0);
14602 tem = fold_unary_loc (loc, code, type, op0);
14603 return tem ? tem : expr;
14604 case 2:
14605 op0 = TREE_OPERAND (t, 0);
14606 op1 = TREE_OPERAND (t, 1);
14607 tem = fold_binary_loc (loc, code, type, op0, op1);
14608 return tem ? tem : expr;
14609 case 3:
14610 op0 = TREE_OPERAND (t, 0);
14611 op1 = TREE_OPERAND (t, 1);
14612 op2 = TREE_OPERAND (t, 2);
14613 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14614 return tem ? tem : expr;
14615 default:
14616 break;
14617 }
14618 }
14619
14620 switch (code)
14621 {
14622 case ARRAY_REF:
14623 {
14624 tree op0 = TREE_OPERAND (t, 0);
14625 tree op1 = TREE_OPERAND (t, 1);
14626
14627 if (TREE_CODE (op1) == INTEGER_CST
14628 && TREE_CODE (op0) == CONSTRUCTOR
14629 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14630 {
14631 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14632 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14633 unsigned HOST_WIDE_INT begin = 0;
14634
14635 /* Find a matching index by means of a binary search. */
14636 while (begin != end)
14637 {
14638 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14639 tree index = (*elts)[middle].index;
14640
14641 if (TREE_CODE (index) == INTEGER_CST
14642 && tree_int_cst_lt (index, op1))
14643 begin = middle + 1;
14644 else if (TREE_CODE (index) == INTEGER_CST
14645 && tree_int_cst_lt (op1, index))
14646 end = middle;
14647 else if (TREE_CODE (index) == RANGE_EXPR
14648 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14649 begin = middle + 1;
14650 else if (TREE_CODE (index) == RANGE_EXPR
14651 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14652 end = middle;
14653 else
14654 return (*elts)[middle].value;
14655 }
14656 }
14657
14658 return t;
14659 }
14660
14661 /* Return a VECTOR_CST if possible. */
14662 case CONSTRUCTOR:
14663 {
14664 tree type = TREE_TYPE (t);
14665 if (TREE_CODE (type) != VECTOR_TYPE)
14666 return t;
14667
14668 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14669 unsigned HOST_WIDE_INT idx, pos = 0;
14670 tree value;
14671
14672 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14673 {
14674 if (!CONSTANT_CLASS_P (value))
14675 return t;
14676 if (TREE_CODE (value) == VECTOR_CST)
14677 {
14678 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14679 vec[pos++] = VECTOR_CST_ELT (value, i);
14680 }
14681 else
14682 vec[pos++] = value;
14683 }
14684 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14685 vec[pos] = build_zero_cst (TREE_TYPE (type));
14686
14687 return build_vector (type, vec);
14688 }
14689
14690 case CONST_DECL:
14691 return fold (DECL_INITIAL (t));
14692
14693 default:
14694 return t;
14695 } /* switch (code) */
14696 }
14697
14698 #ifdef ENABLE_FOLD_CHECKING
14699 #undef fold
14700
14701 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14702 hash_table <pointer_hash <tree_node> >);
14703 static void fold_check_failed (const_tree, const_tree);
14704 void print_fold_checksum (const_tree);
14705
14706 /* When --enable-checking=fold, compute a digest of expr before
14707 and after actual fold call to see if fold did not accidentally
14708 change original expr. */
14709
14710 tree
14711 fold (tree expr)
14712 {
14713 tree ret;
14714 struct md5_ctx ctx;
14715 unsigned char checksum_before[16], checksum_after[16];
14716 hash_table <pointer_hash <tree_node> > ht;
14717
14718 ht.create (32);
14719 md5_init_ctx (&ctx);
14720 fold_checksum_tree (expr, &ctx, ht);
14721 md5_finish_ctx (&ctx, checksum_before);
14722 ht.empty ();
14723
14724 ret = fold_1 (expr);
14725
14726 md5_init_ctx (&ctx);
14727 fold_checksum_tree (expr, &ctx, ht);
14728 md5_finish_ctx (&ctx, checksum_after);
14729 ht.dispose ();
14730
14731 if (memcmp (checksum_before, checksum_after, 16))
14732 fold_check_failed (expr, ret);
14733
14734 return ret;
14735 }
14736
14737 void
14738 print_fold_checksum (const_tree expr)
14739 {
14740 struct md5_ctx ctx;
14741 unsigned char checksum[16], cnt;
14742 hash_table <pointer_hash <tree_node> > ht;
14743
14744 ht.create (32);
14745 md5_init_ctx (&ctx);
14746 fold_checksum_tree (expr, &ctx, ht);
14747 md5_finish_ctx (&ctx, checksum);
14748 ht.dispose ();
14749 for (cnt = 0; cnt < 16; ++cnt)
14750 fprintf (stderr, "%02x", checksum[cnt]);
14751 putc ('\n', stderr);
14752 }
14753
14754 static void
14755 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14756 {
14757 internal_error ("fold check: original tree changed by fold");
14758 }
14759
14760 static void
14761 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14762 hash_table <pointer_hash <tree_node> > ht)
14763 {
14764 tree_node **slot;
14765 enum tree_code code;
14766 union tree_node buf;
14767 int i, len;
14768
14769 recursive_label:
14770 if (expr == NULL)
14771 return;
14772 slot = ht.find_slot (expr, INSERT);
14773 if (*slot != NULL)
14774 return;
14775 *slot = CONST_CAST_TREE (expr);
14776 code = TREE_CODE (expr);
14777 if (TREE_CODE_CLASS (code) == tcc_declaration
14778 && DECL_ASSEMBLER_NAME_SET_P (expr))
14779 {
14780 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14781 memcpy ((char *) &buf, expr, tree_size (expr));
14782 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14783 expr = (tree) &buf;
14784 }
14785 else if (TREE_CODE_CLASS (code) == tcc_type
14786 && (TYPE_POINTER_TO (expr)
14787 || TYPE_REFERENCE_TO (expr)
14788 || TYPE_CACHED_VALUES_P (expr)
14789 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14790 || TYPE_NEXT_VARIANT (expr)))
14791 {
14792 /* Allow these fields to be modified. */
14793 tree tmp;
14794 memcpy ((char *) &buf, expr, tree_size (expr));
14795 expr = tmp = (tree) &buf;
14796 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14797 TYPE_POINTER_TO (tmp) = NULL;
14798 TYPE_REFERENCE_TO (tmp) = NULL;
14799 TYPE_NEXT_VARIANT (tmp) = NULL;
14800 if (TYPE_CACHED_VALUES_P (tmp))
14801 {
14802 TYPE_CACHED_VALUES_P (tmp) = 0;
14803 TYPE_CACHED_VALUES (tmp) = NULL;
14804 }
14805 }
14806 md5_process_bytes (expr, tree_size (expr), ctx);
14807 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14808 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14809 if (TREE_CODE_CLASS (code) != tcc_type
14810 && TREE_CODE_CLASS (code) != tcc_declaration
14811 && code != TREE_LIST
14812 && code != SSA_NAME
14813 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14814 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14815 switch (TREE_CODE_CLASS (code))
14816 {
14817 case tcc_constant:
14818 switch (code)
14819 {
14820 case STRING_CST:
14821 md5_process_bytes (TREE_STRING_POINTER (expr),
14822 TREE_STRING_LENGTH (expr), ctx);
14823 break;
14824 case COMPLEX_CST:
14825 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14826 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14827 break;
14828 case VECTOR_CST:
14829 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14830 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14831 break;
14832 default:
14833 break;
14834 }
14835 break;
14836 case tcc_exceptional:
14837 switch (code)
14838 {
14839 case TREE_LIST:
14840 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14841 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14842 expr = TREE_CHAIN (expr);
14843 goto recursive_label;
14844 break;
14845 case TREE_VEC:
14846 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14847 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14848 break;
14849 default:
14850 break;
14851 }
14852 break;
14853 case tcc_expression:
14854 case tcc_reference:
14855 case tcc_comparison:
14856 case tcc_unary:
14857 case tcc_binary:
14858 case tcc_statement:
14859 case tcc_vl_exp:
14860 len = TREE_OPERAND_LENGTH (expr);
14861 for (i = 0; i < len; ++i)
14862 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14863 break;
14864 case tcc_declaration:
14865 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14866 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14867 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14868 {
14869 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14870 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14871 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14872 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14873 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14874 }
14875 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14876 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14877
14878 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14879 {
14880 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14881 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14882 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14883 }
14884 break;
14885 case tcc_type:
14886 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14887 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14888 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14889 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14890 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14891 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14892 if (INTEGRAL_TYPE_P (expr)
14893 || SCALAR_FLOAT_TYPE_P (expr))
14894 {
14895 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14896 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14897 }
14898 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14899 if (TREE_CODE (expr) == RECORD_TYPE
14900 || TREE_CODE (expr) == UNION_TYPE
14901 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14902 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14903 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14904 break;
14905 default:
14906 break;
14907 }
14908 }
14909
14910 /* Helper function for outputting the checksum of a tree T. When
14911 debugging with gdb, you can "define mynext" to be "next" followed
14912 by "call debug_fold_checksum (op0)", then just trace down till the
14913 outputs differ. */
14914
14915 DEBUG_FUNCTION void
14916 debug_fold_checksum (const_tree t)
14917 {
14918 int i;
14919 unsigned char checksum[16];
14920 struct md5_ctx ctx;
14921 hash_table <pointer_hash <tree_node> > ht;
14922 ht.create (32);
14923
14924 md5_init_ctx (&ctx);
14925 fold_checksum_tree (t, &ctx, ht);
14926 md5_finish_ctx (&ctx, checksum);
14927 ht.empty ();
14928
14929 for (i = 0; i < 16; i++)
14930 fprintf (stderr, "%d ", checksum[i]);
14931
14932 fprintf (stderr, "\n");
14933 }
14934
14935 #endif
14936
14937 /* Fold a unary tree expression with code CODE of type TYPE with an
14938 operand OP0. LOC is the location of the resulting expression.
14939 Return a folded expression if successful. Otherwise, return a tree
14940 expression with code CODE of type TYPE with an operand OP0. */
14941
14942 tree
14943 fold_build1_stat_loc (location_t loc,
14944 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14945 {
14946 tree tem;
14947 #ifdef ENABLE_FOLD_CHECKING
14948 unsigned char checksum_before[16], checksum_after[16];
14949 struct md5_ctx ctx;
14950 hash_table <pointer_hash <tree_node> > ht;
14951
14952 ht.create (32);
14953 md5_init_ctx (&ctx);
14954 fold_checksum_tree (op0, &ctx, ht);
14955 md5_finish_ctx (&ctx, checksum_before);
14956 ht.empty ();
14957 #endif
14958
14959 tem = fold_unary_loc (loc, code, type, op0);
14960 if (!tem)
14961 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14962
14963 #ifdef ENABLE_FOLD_CHECKING
14964 md5_init_ctx (&ctx);
14965 fold_checksum_tree (op0, &ctx, ht);
14966 md5_finish_ctx (&ctx, checksum_after);
14967 ht.dispose ();
14968
14969 if (memcmp (checksum_before, checksum_after, 16))
14970 fold_check_failed (op0, tem);
14971 #endif
14972 return tem;
14973 }
14974
14975 /* Fold a binary tree expression with code CODE of type TYPE with
14976 operands OP0 and OP1. LOC is the location of the resulting
14977 expression. Return a folded expression if successful. Otherwise,
14978 return a tree expression with code CODE of type TYPE with operands
14979 OP0 and OP1. */
14980
14981 tree
14982 fold_build2_stat_loc (location_t loc,
14983 enum tree_code code, tree type, tree op0, tree op1
14984 MEM_STAT_DECL)
14985 {
14986 tree tem;
14987 #ifdef ENABLE_FOLD_CHECKING
14988 unsigned char checksum_before_op0[16],
14989 checksum_before_op1[16],
14990 checksum_after_op0[16],
14991 checksum_after_op1[16];
14992 struct md5_ctx ctx;
14993 hash_table <pointer_hash <tree_node> > ht;
14994
14995 ht.create (32);
14996 md5_init_ctx (&ctx);
14997 fold_checksum_tree (op0, &ctx, ht);
14998 md5_finish_ctx (&ctx, checksum_before_op0);
14999 ht.empty ();
15000
15001 md5_init_ctx (&ctx);
15002 fold_checksum_tree (op1, &ctx, ht);
15003 md5_finish_ctx (&ctx, checksum_before_op1);
15004 ht.empty ();
15005 #endif
15006
15007 tem = fold_binary_loc (loc, code, type, op0, op1);
15008 if (!tem)
15009 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15010
15011 #ifdef ENABLE_FOLD_CHECKING
15012 md5_init_ctx (&ctx);
15013 fold_checksum_tree (op0, &ctx, ht);
15014 md5_finish_ctx (&ctx, checksum_after_op0);
15015 ht.empty ();
15016
15017 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15018 fold_check_failed (op0, tem);
15019
15020 md5_init_ctx (&ctx);
15021 fold_checksum_tree (op1, &ctx, ht);
15022 md5_finish_ctx (&ctx, checksum_after_op1);
15023 ht.dispose ();
15024
15025 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15026 fold_check_failed (op1, tem);
15027 #endif
15028 return tem;
15029 }
15030
15031 /* Fold a ternary tree expression with code CODE of type TYPE with
15032 operands OP0, OP1, and OP2. Return a folded expression if
15033 successful. Otherwise, return a tree expression with code CODE of
15034 type TYPE with operands OP0, OP1, and OP2. */
15035
15036 tree
15037 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15038 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15039 {
15040 tree tem;
15041 #ifdef ENABLE_FOLD_CHECKING
15042 unsigned char checksum_before_op0[16],
15043 checksum_before_op1[16],
15044 checksum_before_op2[16],
15045 checksum_after_op0[16],
15046 checksum_after_op1[16],
15047 checksum_after_op2[16];
15048 struct md5_ctx ctx;
15049 hash_table <pointer_hash <tree_node> > ht;
15050
15051 ht.create (32);
15052 md5_init_ctx (&ctx);
15053 fold_checksum_tree (op0, &ctx, ht);
15054 md5_finish_ctx (&ctx, checksum_before_op0);
15055 ht.empty ();
15056
15057 md5_init_ctx (&ctx);
15058 fold_checksum_tree (op1, &ctx, ht);
15059 md5_finish_ctx (&ctx, checksum_before_op1);
15060 ht.empty ();
15061
15062 md5_init_ctx (&ctx);
15063 fold_checksum_tree (op2, &ctx, ht);
15064 md5_finish_ctx (&ctx, checksum_before_op2);
15065 ht.empty ();
15066 #endif
15067
15068 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15069 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15070 if (!tem)
15071 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15072
15073 #ifdef ENABLE_FOLD_CHECKING
15074 md5_init_ctx (&ctx);
15075 fold_checksum_tree (op0, &ctx, ht);
15076 md5_finish_ctx (&ctx, checksum_after_op0);
15077 ht.empty ();
15078
15079 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15080 fold_check_failed (op0, tem);
15081
15082 md5_init_ctx (&ctx);
15083 fold_checksum_tree (op1, &ctx, ht);
15084 md5_finish_ctx (&ctx, checksum_after_op1);
15085 ht.empty ();
15086
15087 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15088 fold_check_failed (op1, tem);
15089
15090 md5_init_ctx (&ctx);
15091 fold_checksum_tree (op2, &ctx, ht);
15092 md5_finish_ctx (&ctx, checksum_after_op2);
15093 ht.dispose ();
15094
15095 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15096 fold_check_failed (op2, tem);
15097 #endif
15098 return tem;
15099 }
15100
15101 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15102 arguments in ARGARRAY, and a null static chain.
15103 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15104 of type TYPE from the given operands as constructed by build_call_array. */
15105
15106 tree
15107 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15108 int nargs, tree *argarray)
15109 {
15110 tree tem;
15111 #ifdef ENABLE_FOLD_CHECKING
15112 unsigned char checksum_before_fn[16],
15113 checksum_before_arglist[16],
15114 checksum_after_fn[16],
15115 checksum_after_arglist[16];
15116 struct md5_ctx ctx;
15117 hash_table <pointer_hash <tree_node> > ht;
15118 int i;
15119
15120 ht.create (32);
15121 md5_init_ctx (&ctx);
15122 fold_checksum_tree (fn, &ctx, ht);
15123 md5_finish_ctx (&ctx, checksum_before_fn);
15124 ht.empty ();
15125
15126 md5_init_ctx (&ctx);
15127 for (i = 0; i < nargs; i++)
15128 fold_checksum_tree (argarray[i], &ctx, ht);
15129 md5_finish_ctx (&ctx, checksum_before_arglist);
15130 ht.empty ();
15131 #endif
15132
15133 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15134
15135 #ifdef ENABLE_FOLD_CHECKING
15136 md5_init_ctx (&ctx);
15137 fold_checksum_tree (fn, &ctx, ht);
15138 md5_finish_ctx (&ctx, checksum_after_fn);
15139 ht.empty ();
15140
15141 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15142 fold_check_failed (fn, tem);
15143
15144 md5_init_ctx (&ctx);
15145 for (i = 0; i < nargs; i++)
15146 fold_checksum_tree (argarray[i], &ctx, ht);
15147 md5_finish_ctx (&ctx, checksum_after_arglist);
15148 ht.dispose ();
15149
15150 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15151 fold_check_failed (NULL_TREE, tem);
15152 #endif
15153 return tem;
15154 }
15155
15156 /* Perform constant folding and related simplification of initializer
15157 expression EXPR. These behave identically to "fold_buildN" but ignore
15158 potential run-time traps and exceptions that fold must preserve. */
15159
15160 #define START_FOLD_INIT \
15161 int saved_signaling_nans = flag_signaling_nans;\
15162 int saved_trapping_math = flag_trapping_math;\
15163 int saved_rounding_math = flag_rounding_math;\
15164 int saved_trapv = flag_trapv;\
15165 int saved_folding_initializer = folding_initializer;\
15166 flag_signaling_nans = 0;\
15167 flag_trapping_math = 0;\
15168 flag_rounding_math = 0;\
15169 flag_trapv = 0;\
15170 folding_initializer = 1;
15171
15172 #define END_FOLD_INIT \
15173 flag_signaling_nans = saved_signaling_nans;\
15174 flag_trapping_math = saved_trapping_math;\
15175 flag_rounding_math = saved_rounding_math;\
15176 flag_trapv = saved_trapv;\
15177 folding_initializer = saved_folding_initializer;
15178
15179 tree
15180 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15181 tree type, tree op)
15182 {
15183 tree result;
15184 START_FOLD_INIT;
15185
15186 result = fold_build1_loc (loc, code, type, op);
15187
15188 END_FOLD_INIT;
15189 return result;
15190 }
15191
15192 tree
15193 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15194 tree type, tree op0, tree op1)
15195 {
15196 tree result;
15197 START_FOLD_INIT;
15198
15199 result = fold_build2_loc (loc, code, type, op0, op1);
15200
15201 END_FOLD_INIT;
15202 return result;
15203 }
15204
15205 tree
15206 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15207 tree type, tree op0, tree op1, tree op2)
15208 {
15209 tree result;
15210 START_FOLD_INIT;
15211
15212 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15213
15214 END_FOLD_INIT;
15215 return result;
15216 }
15217
15218 tree
15219 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15220 int nargs, tree *argarray)
15221 {
15222 tree result;
15223 START_FOLD_INIT;
15224
15225 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15226
15227 END_FOLD_INIT;
15228 return result;
15229 }
15230
15231 #undef START_FOLD_INIT
15232 #undef END_FOLD_INIT
15233
15234 /* Determine if first argument is a multiple of second argument. Return 0 if
15235 it is not, or we cannot easily determined it to be.
15236
15237 An example of the sort of thing we care about (at this point; this routine
15238 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15239 fold cases do now) is discovering that
15240
15241 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15242
15243 is a multiple of
15244
15245 SAVE_EXPR (J * 8)
15246
15247 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15248
15249 This code also handles discovering that
15250
15251 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15252
15253 is a multiple of 8 so we don't have to worry about dealing with a
15254 possible remainder.
15255
15256 Note that we *look* inside a SAVE_EXPR only to determine how it was
15257 calculated; it is not safe for fold to do much of anything else with the
15258 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15259 at run time. For example, the latter example above *cannot* be implemented
15260 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15261 evaluation time of the original SAVE_EXPR is not necessarily the same at
15262 the time the new expression is evaluated. The only optimization of this
15263 sort that would be valid is changing
15264
15265 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15266
15267 divided by 8 to
15268
15269 SAVE_EXPR (I) * SAVE_EXPR (J)
15270
15271 (where the same SAVE_EXPR (J) is used in the original and the
15272 transformed version). */
15273
15274 int
15275 multiple_of_p (tree type, const_tree top, const_tree bottom)
15276 {
15277 if (operand_equal_p (top, bottom, 0))
15278 return 1;
15279
15280 if (TREE_CODE (type) != INTEGER_TYPE)
15281 return 0;
15282
15283 switch (TREE_CODE (top))
15284 {
15285 case BIT_AND_EXPR:
15286 /* Bitwise and provides a power of two multiple. If the mask is
15287 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15288 if (!integer_pow2p (bottom))
15289 return 0;
15290 /* FALLTHRU */
15291
15292 case MULT_EXPR:
15293 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15294 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15295
15296 case PLUS_EXPR:
15297 case MINUS_EXPR:
15298 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15299 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15300
15301 case LSHIFT_EXPR:
15302 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15303 {
15304 tree op1, t1;
15305
15306 op1 = TREE_OPERAND (top, 1);
15307 /* const_binop may not detect overflow correctly,
15308 so check for it explicitly here. */
15309 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15310 > TREE_INT_CST_LOW (op1)
15311 && TREE_INT_CST_HIGH (op1) == 0
15312 && 0 != (t1 = fold_convert (type,
15313 const_binop (LSHIFT_EXPR,
15314 size_one_node,
15315 op1)))
15316 && !TREE_OVERFLOW (t1))
15317 return multiple_of_p (type, t1, bottom);
15318 }
15319 return 0;
15320
15321 case NOP_EXPR:
15322 /* Can't handle conversions from non-integral or wider integral type. */
15323 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15324 || (TYPE_PRECISION (type)
15325 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15326 return 0;
15327
15328 /* .. fall through ... */
15329
15330 case SAVE_EXPR:
15331 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15332
15333 case COND_EXPR:
15334 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15335 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15336
15337 case INTEGER_CST:
15338 if (TREE_CODE (bottom) != INTEGER_CST
15339 || integer_zerop (bottom)
15340 || (TYPE_UNSIGNED (type)
15341 && (tree_int_cst_sgn (top) < 0
15342 || tree_int_cst_sgn (bottom) < 0)))
15343 return 0;
15344 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15345 top, bottom));
15346
15347 default:
15348 return 0;
15349 }
15350 }
15351
15352 /* Return true if CODE or TYPE is known to be non-negative. */
15353
15354 static bool
15355 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15356 {
15357 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15358 && truth_value_p (code))
15359 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15360 have a signed:1 type (where the value is -1 and 0). */
15361 return true;
15362 return false;
15363 }
15364
15365 /* Return true if (CODE OP0) is known to be non-negative. If the return
15366 value is based on the assumption that signed overflow is undefined,
15367 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15368 *STRICT_OVERFLOW_P. */
15369
15370 bool
15371 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15372 bool *strict_overflow_p)
15373 {
15374 if (TYPE_UNSIGNED (type))
15375 return true;
15376
15377 switch (code)
15378 {
15379 case ABS_EXPR:
15380 /* We can't return 1 if flag_wrapv is set because
15381 ABS_EXPR<INT_MIN> = INT_MIN. */
15382 if (!INTEGRAL_TYPE_P (type))
15383 return true;
15384 if (TYPE_OVERFLOW_UNDEFINED (type))
15385 {
15386 *strict_overflow_p = true;
15387 return true;
15388 }
15389 break;
15390
15391 case NON_LVALUE_EXPR:
15392 case FLOAT_EXPR:
15393 case FIX_TRUNC_EXPR:
15394 return tree_expr_nonnegative_warnv_p (op0,
15395 strict_overflow_p);
15396
15397 case NOP_EXPR:
15398 {
15399 tree inner_type = TREE_TYPE (op0);
15400 tree outer_type = type;
15401
15402 if (TREE_CODE (outer_type) == REAL_TYPE)
15403 {
15404 if (TREE_CODE (inner_type) == REAL_TYPE)
15405 return tree_expr_nonnegative_warnv_p (op0,
15406 strict_overflow_p);
15407 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15408 {
15409 if (TYPE_UNSIGNED (inner_type))
15410 return true;
15411 return tree_expr_nonnegative_warnv_p (op0,
15412 strict_overflow_p);
15413 }
15414 }
15415 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15416 {
15417 if (TREE_CODE (inner_type) == REAL_TYPE)
15418 return tree_expr_nonnegative_warnv_p (op0,
15419 strict_overflow_p);
15420 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15421 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15422 && TYPE_UNSIGNED (inner_type);
15423 }
15424 }
15425 break;
15426
15427 default:
15428 return tree_simple_nonnegative_warnv_p (code, type);
15429 }
15430
15431 /* We don't know sign of `t', so be conservative and return false. */
15432 return false;
15433 }
15434
15435 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15436 value is based on the assumption that signed overflow is undefined,
15437 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15438 *STRICT_OVERFLOW_P. */
15439
15440 bool
15441 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15442 tree op1, bool *strict_overflow_p)
15443 {
15444 if (TYPE_UNSIGNED (type))
15445 return true;
15446
15447 switch (code)
15448 {
15449 case POINTER_PLUS_EXPR:
15450 case PLUS_EXPR:
15451 if (FLOAT_TYPE_P (type))
15452 return (tree_expr_nonnegative_warnv_p (op0,
15453 strict_overflow_p)
15454 && tree_expr_nonnegative_warnv_p (op1,
15455 strict_overflow_p));
15456
15457 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15458 both unsigned and at least 2 bits shorter than the result. */
15459 if (TREE_CODE (type) == INTEGER_TYPE
15460 && TREE_CODE (op0) == NOP_EXPR
15461 && TREE_CODE (op1) == NOP_EXPR)
15462 {
15463 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15464 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15465 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15466 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15467 {
15468 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15469 TYPE_PRECISION (inner2)) + 1;
15470 return prec < TYPE_PRECISION (type);
15471 }
15472 }
15473 break;
15474
15475 case MULT_EXPR:
15476 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15477 {
15478 /* x * x is always non-negative for floating point x
15479 or without overflow. */
15480 if (operand_equal_p (op0, op1, 0)
15481 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15482 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15483 {
15484 if (TYPE_OVERFLOW_UNDEFINED (type))
15485 *strict_overflow_p = true;
15486 return true;
15487 }
15488 }
15489
15490 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15491 both unsigned and their total bits is shorter than the result. */
15492 if (TREE_CODE (type) == INTEGER_TYPE
15493 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15494 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15495 {
15496 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15497 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15498 : TREE_TYPE (op0);
15499 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15500 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15501 : TREE_TYPE (op1);
15502
15503 bool unsigned0 = TYPE_UNSIGNED (inner0);
15504 bool unsigned1 = TYPE_UNSIGNED (inner1);
15505
15506 if (TREE_CODE (op0) == INTEGER_CST)
15507 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15508
15509 if (TREE_CODE (op1) == INTEGER_CST)
15510 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15511
15512 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15513 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15514 {
15515 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15516 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15517 : TYPE_PRECISION (inner0);
15518
15519 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15520 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15521 : TYPE_PRECISION (inner1);
15522
15523 return precision0 + precision1 < TYPE_PRECISION (type);
15524 }
15525 }
15526 return false;
15527
15528 case BIT_AND_EXPR:
15529 case MAX_EXPR:
15530 return (tree_expr_nonnegative_warnv_p (op0,
15531 strict_overflow_p)
15532 || tree_expr_nonnegative_warnv_p (op1,
15533 strict_overflow_p));
15534
15535 case BIT_IOR_EXPR:
15536 case BIT_XOR_EXPR:
15537 case MIN_EXPR:
15538 case RDIV_EXPR:
15539 case TRUNC_DIV_EXPR:
15540 case CEIL_DIV_EXPR:
15541 case FLOOR_DIV_EXPR:
15542 case ROUND_DIV_EXPR:
15543 return (tree_expr_nonnegative_warnv_p (op0,
15544 strict_overflow_p)
15545 && tree_expr_nonnegative_warnv_p (op1,
15546 strict_overflow_p));
15547
15548 case TRUNC_MOD_EXPR:
15549 case CEIL_MOD_EXPR:
15550 case FLOOR_MOD_EXPR:
15551 case ROUND_MOD_EXPR:
15552 return tree_expr_nonnegative_warnv_p (op0,
15553 strict_overflow_p);
15554 default:
15555 return tree_simple_nonnegative_warnv_p (code, type);
15556 }
15557
15558 /* We don't know sign of `t', so be conservative and return false. */
15559 return false;
15560 }
15561
15562 /* Return true if T is known to be non-negative. If the return
15563 value is based on the assumption that signed overflow is undefined,
15564 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15565 *STRICT_OVERFLOW_P. */
15566
15567 bool
15568 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15569 {
15570 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15571 return true;
15572
15573 switch (TREE_CODE (t))
15574 {
15575 case INTEGER_CST:
15576 return tree_int_cst_sgn (t) >= 0;
15577
15578 case REAL_CST:
15579 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15580
15581 case FIXED_CST:
15582 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15583
15584 case COND_EXPR:
15585 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15586 strict_overflow_p)
15587 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15588 strict_overflow_p));
15589 default:
15590 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15591 TREE_TYPE (t));
15592 }
15593 /* We don't know sign of `t', so be conservative and return false. */
15594 return false;
15595 }
15596
15597 /* Return true if T is known to be non-negative. If the return
15598 value is based on the assumption that signed overflow is undefined,
15599 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15600 *STRICT_OVERFLOW_P. */
15601
15602 bool
15603 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15604 tree arg0, tree arg1, bool *strict_overflow_p)
15605 {
15606 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15607 switch (DECL_FUNCTION_CODE (fndecl))
15608 {
15609 CASE_FLT_FN (BUILT_IN_ACOS):
15610 CASE_FLT_FN (BUILT_IN_ACOSH):
15611 CASE_FLT_FN (BUILT_IN_CABS):
15612 CASE_FLT_FN (BUILT_IN_COSH):
15613 CASE_FLT_FN (BUILT_IN_ERFC):
15614 CASE_FLT_FN (BUILT_IN_EXP):
15615 CASE_FLT_FN (BUILT_IN_EXP10):
15616 CASE_FLT_FN (BUILT_IN_EXP2):
15617 CASE_FLT_FN (BUILT_IN_FABS):
15618 CASE_FLT_FN (BUILT_IN_FDIM):
15619 CASE_FLT_FN (BUILT_IN_HYPOT):
15620 CASE_FLT_FN (BUILT_IN_POW10):
15621 CASE_INT_FN (BUILT_IN_FFS):
15622 CASE_INT_FN (BUILT_IN_PARITY):
15623 CASE_INT_FN (BUILT_IN_POPCOUNT):
15624 CASE_INT_FN (BUILT_IN_CLZ):
15625 CASE_INT_FN (BUILT_IN_CLRSB):
15626 case BUILT_IN_BSWAP32:
15627 case BUILT_IN_BSWAP64:
15628 /* Always true. */
15629 return true;
15630
15631 CASE_FLT_FN (BUILT_IN_SQRT):
15632 /* sqrt(-0.0) is -0.0. */
15633 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15634 return true;
15635 return tree_expr_nonnegative_warnv_p (arg0,
15636 strict_overflow_p);
15637
15638 CASE_FLT_FN (BUILT_IN_ASINH):
15639 CASE_FLT_FN (BUILT_IN_ATAN):
15640 CASE_FLT_FN (BUILT_IN_ATANH):
15641 CASE_FLT_FN (BUILT_IN_CBRT):
15642 CASE_FLT_FN (BUILT_IN_CEIL):
15643 CASE_FLT_FN (BUILT_IN_ERF):
15644 CASE_FLT_FN (BUILT_IN_EXPM1):
15645 CASE_FLT_FN (BUILT_IN_FLOOR):
15646 CASE_FLT_FN (BUILT_IN_FMOD):
15647 CASE_FLT_FN (BUILT_IN_FREXP):
15648 CASE_FLT_FN (BUILT_IN_ICEIL):
15649 CASE_FLT_FN (BUILT_IN_IFLOOR):
15650 CASE_FLT_FN (BUILT_IN_IRINT):
15651 CASE_FLT_FN (BUILT_IN_IROUND):
15652 CASE_FLT_FN (BUILT_IN_LCEIL):
15653 CASE_FLT_FN (BUILT_IN_LDEXP):
15654 CASE_FLT_FN (BUILT_IN_LFLOOR):
15655 CASE_FLT_FN (BUILT_IN_LLCEIL):
15656 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15657 CASE_FLT_FN (BUILT_IN_LLRINT):
15658 CASE_FLT_FN (BUILT_IN_LLROUND):
15659 CASE_FLT_FN (BUILT_IN_LRINT):
15660 CASE_FLT_FN (BUILT_IN_LROUND):
15661 CASE_FLT_FN (BUILT_IN_MODF):
15662 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15663 CASE_FLT_FN (BUILT_IN_RINT):
15664 CASE_FLT_FN (BUILT_IN_ROUND):
15665 CASE_FLT_FN (BUILT_IN_SCALB):
15666 CASE_FLT_FN (BUILT_IN_SCALBLN):
15667 CASE_FLT_FN (BUILT_IN_SCALBN):
15668 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15669 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15670 CASE_FLT_FN (BUILT_IN_SINH):
15671 CASE_FLT_FN (BUILT_IN_TANH):
15672 CASE_FLT_FN (BUILT_IN_TRUNC):
15673 /* True if the 1st argument is nonnegative. */
15674 return tree_expr_nonnegative_warnv_p (arg0,
15675 strict_overflow_p);
15676
15677 CASE_FLT_FN (BUILT_IN_FMAX):
15678 /* True if the 1st OR 2nd arguments are nonnegative. */
15679 return (tree_expr_nonnegative_warnv_p (arg0,
15680 strict_overflow_p)
15681 || (tree_expr_nonnegative_warnv_p (arg1,
15682 strict_overflow_p)));
15683
15684 CASE_FLT_FN (BUILT_IN_FMIN):
15685 /* True if the 1st AND 2nd arguments are nonnegative. */
15686 return (tree_expr_nonnegative_warnv_p (arg0,
15687 strict_overflow_p)
15688 && (tree_expr_nonnegative_warnv_p (arg1,
15689 strict_overflow_p)));
15690
15691 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15692 /* True if the 2nd argument is nonnegative. */
15693 return tree_expr_nonnegative_warnv_p (arg1,
15694 strict_overflow_p);
15695
15696 CASE_FLT_FN (BUILT_IN_POWI):
15697 /* True if the 1st argument is nonnegative or the second
15698 argument is an even integer. */
15699 if (TREE_CODE (arg1) == INTEGER_CST
15700 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15701 return true;
15702 return tree_expr_nonnegative_warnv_p (arg0,
15703 strict_overflow_p);
15704
15705 CASE_FLT_FN (BUILT_IN_POW):
15706 /* True if the 1st argument is nonnegative or the second
15707 argument is an even integer valued real. */
15708 if (TREE_CODE (arg1) == REAL_CST)
15709 {
15710 REAL_VALUE_TYPE c;
15711 HOST_WIDE_INT n;
15712
15713 c = TREE_REAL_CST (arg1);
15714 n = real_to_integer (&c);
15715 if ((n & 1) == 0)
15716 {
15717 REAL_VALUE_TYPE cint;
15718 real_from_integer (&cint, VOIDmode, n,
15719 n < 0 ? -1 : 0, 0);
15720 if (real_identical (&c, &cint))
15721 return true;
15722 }
15723 }
15724 return tree_expr_nonnegative_warnv_p (arg0,
15725 strict_overflow_p);
15726
15727 default:
15728 break;
15729 }
15730 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15731 type);
15732 }
15733
15734 /* Return true if T is known to be non-negative. If the return
15735 value is based on the assumption that signed overflow is undefined,
15736 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15737 *STRICT_OVERFLOW_P. */
15738
15739 bool
15740 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15741 {
15742 enum tree_code code = TREE_CODE (t);
15743 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15744 return true;
15745
15746 switch (code)
15747 {
15748 case TARGET_EXPR:
15749 {
15750 tree temp = TARGET_EXPR_SLOT (t);
15751 t = TARGET_EXPR_INITIAL (t);
15752
15753 /* If the initializer is non-void, then it's a normal expression
15754 that will be assigned to the slot. */
15755 if (!VOID_TYPE_P (t))
15756 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15757
15758 /* Otherwise, the initializer sets the slot in some way. One common
15759 way is an assignment statement at the end of the initializer. */
15760 while (1)
15761 {
15762 if (TREE_CODE (t) == BIND_EXPR)
15763 t = expr_last (BIND_EXPR_BODY (t));
15764 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15765 || TREE_CODE (t) == TRY_CATCH_EXPR)
15766 t = expr_last (TREE_OPERAND (t, 0));
15767 else if (TREE_CODE (t) == STATEMENT_LIST)
15768 t = expr_last (t);
15769 else
15770 break;
15771 }
15772 if (TREE_CODE (t) == MODIFY_EXPR
15773 && TREE_OPERAND (t, 0) == temp)
15774 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15775 strict_overflow_p);
15776
15777 return false;
15778 }
15779
15780 case CALL_EXPR:
15781 {
15782 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15783 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15784
15785 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15786 get_callee_fndecl (t),
15787 arg0,
15788 arg1,
15789 strict_overflow_p);
15790 }
15791 case COMPOUND_EXPR:
15792 case MODIFY_EXPR:
15793 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15794 strict_overflow_p);
15795 case BIND_EXPR:
15796 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15797 strict_overflow_p);
15798 case SAVE_EXPR:
15799 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15800 strict_overflow_p);
15801
15802 default:
15803 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15804 TREE_TYPE (t));
15805 }
15806
15807 /* We don't know sign of `t', so be conservative and return false. */
15808 return false;
15809 }
15810
15811 /* Return true if T is known to be non-negative. If the return
15812 value is based on the assumption that signed overflow is undefined,
15813 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15814 *STRICT_OVERFLOW_P. */
15815
15816 bool
15817 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15818 {
15819 enum tree_code code;
15820 if (t == error_mark_node)
15821 return false;
15822
15823 code = TREE_CODE (t);
15824 switch (TREE_CODE_CLASS (code))
15825 {
15826 case tcc_binary:
15827 case tcc_comparison:
15828 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15829 TREE_TYPE (t),
15830 TREE_OPERAND (t, 0),
15831 TREE_OPERAND (t, 1),
15832 strict_overflow_p);
15833
15834 case tcc_unary:
15835 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15836 TREE_TYPE (t),
15837 TREE_OPERAND (t, 0),
15838 strict_overflow_p);
15839
15840 case tcc_constant:
15841 case tcc_declaration:
15842 case tcc_reference:
15843 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15844
15845 default:
15846 break;
15847 }
15848
15849 switch (code)
15850 {
15851 case TRUTH_AND_EXPR:
15852 case TRUTH_OR_EXPR:
15853 case TRUTH_XOR_EXPR:
15854 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15855 TREE_TYPE (t),
15856 TREE_OPERAND (t, 0),
15857 TREE_OPERAND (t, 1),
15858 strict_overflow_p);
15859 case TRUTH_NOT_EXPR:
15860 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15861 TREE_TYPE (t),
15862 TREE_OPERAND (t, 0),
15863 strict_overflow_p);
15864
15865 case COND_EXPR:
15866 case CONSTRUCTOR:
15867 case OBJ_TYPE_REF:
15868 case ASSERT_EXPR:
15869 case ADDR_EXPR:
15870 case WITH_SIZE_EXPR:
15871 case SSA_NAME:
15872 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15873
15874 default:
15875 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15876 }
15877 }
15878
15879 /* Return true if `t' is known to be non-negative. Handle warnings
15880 about undefined signed overflow. */
15881
15882 bool
15883 tree_expr_nonnegative_p (tree t)
15884 {
15885 bool ret, strict_overflow_p;
15886
15887 strict_overflow_p = false;
15888 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15889 if (strict_overflow_p)
15890 fold_overflow_warning (("assuming signed overflow does not occur when "
15891 "determining that expression is always "
15892 "non-negative"),
15893 WARN_STRICT_OVERFLOW_MISC);
15894 return ret;
15895 }
15896
15897
15898 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15899 For floating point we further ensure that T is not denormal.
15900 Similar logic is present in nonzero_address in rtlanal.h.
15901
15902 If the return value is based on the assumption that signed overflow
15903 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15904 change *STRICT_OVERFLOW_P. */
15905
15906 bool
15907 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15908 bool *strict_overflow_p)
15909 {
15910 switch (code)
15911 {
15912 case ABS_EXPR:
15913 return tree_expr_nonzero_warnv_p (op0,
15914 strict_overflow_p);
15915
15916 case NOP_EXPR:
15917 {
15918 tree inner_type = TREE_TYPE (op0);
15919 tree outer_type = type;
15920
15921 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15922 && tree_expr_nonzero_warnv_p (op0,
15923 strict_overflow_p));
15924 }
15925 break;
15926
15927 case NON_LVALUE_EXPR:
15928 return tree_expr_nonzero_warnv_p (op0,
15929 strict_overflow_p);
15930
15931 default:
15932 break;
15933 }
15934
15935 return false;
15936 }
15937
15938 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15939 For floating point we further ensure that T is not denormal.
15940 Similar logic is present in nonzero_address in rtlanal.h.
15941
15942 If the return value is based on the assumption that signed overflow
15943 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15944 change *STRICT_OVERFLOW_P. */
15945
15946 bool
15947 tree_binary_nonzero_warnv_p (enum tree_code code,
15948 tree type,
15949 tree op0,
15950 tree op1, bool *strict_overflow_p)
15951 {
15952 bool sub_strict_overflow_p;
15953 switch (code)
15954 {
15955 case POINTER_PLUS_EXPR:
15956 case PLUS_EXPR:
15957 if (TYPE_OVERFLOW_UNDEFINED (type))
15958 {
15959 /* With the presence of negative values it is hard
15960 to say something. */
15961 sub_strict_overflow_p = false;
15962 if (!tree_expr_nonnegative_warnv_p (op0,
15963 &sub_strict_overflow_p)
15964 || !tree_expr_nonnegative_warnv_p (op1,
15965 &sub_strict_overflow_p))
15966 return false;
15967 /* One of operands must be positive and the other non-negative. */
15968 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15969 overflows, on a twos-complement machine the sum of two
15970 nonnegative numbers can never be zero. */
15971 return (tree_expr_nonzero_warnv_p (op0,
15972 strict_overflow_p)
15973 || tree_expr_nonzero_warnv_p (op1,
15974 strict_overflow_p));
15975 }
15976 break;
15977
15978 case MULT_EXPR:
15979 if (TYPE_OVERFLOW_UNDEFINED (type))
15980 {
15981 if (tree_expr_nonzero_warnv_p (op0,
15982 strict_overflow_p)
15983 && tree_expr_nonzero_warnv_p (op1,
15984 strict_overflow_p))
15985 {
15986 *strict_overflow_p = true;
15987 return true;
15988 }
15989 }
15990 break;
15991
15992 case MIN_EXPR:
15993 sub_strict_overflow_p = false;
15994 if (tree_expr_nonzero_warnv_p (op0,
15995 &sub_strict_overflow_p)
15996 && tree_expr_nonzero_warnv_p (op1,
15997 &sub_strict_overflow_p))
15998 {
15999 if (sub_strict_overflow_p)
16000 *strict_overflow_p = true;
16001 }
16002 break;
16003
16004 case MAX_EXPR:
16005 sub_strict_overflow_p = false;
16006 if (tree_expr_nonzero_warnv_p (op0,
16007 &sub_strict_overflow_p))
16008 {
16009 if (sub_strict_overflow_p)
16010 *strict_overflow_p = true;
16011
16012 /* When both operands are nonzero, then MAX must be too. */
16013 if (tree_expr_nonzero_warnv_p (op1,
16014 strict_overflow_p))
16015 return true;
16016
16017 /* MAX where operand 0 is positive is positive. */
16018 return tree_expr_nonnegative_warnv_p (op0,
16019 strict_overflow_p);
16020 }
16021 /* MAX where operand 1 is positive is positive. */
16022 else if (tree_expr_nonzero_warnv_p (op1,
16023 &sub_strict_overflow_p)
16024 && tree_expr_nonnegative_warnv_p (op1,
16025 &sub_strict_overflow_p))
16026 {
16027 if (sub_strict_overflow_p)
16028 *strict_overflow_p = true;
16029 return true;
16030 }
16031 break;
16032
16033 case BIT_IOR_EXPR:
16034 return (tree_expr_nonzero_warnv_p (op1,
16035 strict_overflow_p)
16036 || tree_expr_nonzero_warnv_p (op0,
16037 strict_overflow_p));
16038
16039 default:
16040 break;
16041 }
16042
16043 return false;
16044 }
16045
16046 /* Return true when T is an address and is known to be nonzero.
16047 For floating point we further ensure that T is not denormal.
16048 Similar logic is present in nonzero_address in rtlanal.h.
16049
16050 If the return value is based on the assumption that signed overflow
16051 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16052 change *STRICT_OVERFLOW_P. */
16053
16054 bool
16055 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16056 {
16057 bool sub_strict_overflow_p;
16058 switch (TREE_CODE (t))
16059 {
16060 case INTEGER_CST:
16061 return !integer_zerop (t);
16062
16063 case ADDR_EXPR:
16064 {
16065 tree base = TREE_OPERAND (t, 0);
16066 if (!DECL_P (base))
16067 base = get_base_address (base);
16068
16069 if (!base)
16070 return false;
16071
16072 /* Weak declarations may link to NULL. Other things may also be NULL
16073 so protect with -fdelete-null-pointer-checks; but not variables
16074 allocated on the stack. */
16075 if (DECL_P (base)
16076 && (flag_delete_null_pointer_checks
16077 || (DECL_CONTEXT (base)
16078 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16079 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16080 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16081
16082 /* Constants are never weak. */
16083 if (CONSTANT_CLASS_P (base))
16084 return true;
16085
16086 return false;
16087 }
16088
16089 case COND_EXPR:
16090 sub_strict_overflow_p = false;
16091 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16092 &sub_strict_overflow_p)
16093 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16094 &sub_strict_overflow_p))
16095 {
16096 if (sub_strict_overflow_p)
16097 *strict_overflow_p = true;
16098 return true;
16099 }
16100 break;
16101
16102 default:
16103 break;
16104 }
16105 return false;
16106 }
16107
16108 /* Return true when T is an address and is known to be nonzero.
16109 For floating point we further ensure that T is not denormal.
16110 Similar logic is present in nonzero_address in rtlanal.h.
16111
16112 If the return value is based on the assumption that signed overflow
16113 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16114 change *STRICT_OVERFLOW_P. */
16115
16116 bool
16117 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16118 {
16119 tree type = TREE_TYPE (t);
16120 enum tree_code code;
16121
16122 /* Doing something useful for floating point would need more work. */
16123 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16124 return false;
16125
16126 code = TREE_CODE (t);
16127 switch (TREE_CODE_CLASS (code))
16128 {
16129 case tcc_unary:
16130 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16131 strict_overflow_p);
16132 case tcc_binary:
16133 case tcc_comparison:
16134 return tree_binary_nonzero_warnv_p (code, type,
16135 TREE_OPERAND (t, 0),
16136 TREE_OPERAND (t, 1),
16137 strict_overflow_p);
16138 case tcc_constant:
16139 case tcc_declaration:
16140 case tcc_reference:
16141 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16142
16143 default:
16144 break;
16145 }
16146
16147 switch (code)
16148 {
16149 case TRUTH_NOT_EXPR:
16150 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16151 strict_overflow_p);
16152
16153 case TRUTH_AND_EXPR:
16154 case TRUTH_OR_EXPR:
16155 case TRUTH_XOR_EXPR:
16156 return tree_binary_nonzero_warnv_p (code, type,
16157 TREE_OPERAND (t, 0),
16158 TREE_OPERAND (t, 1),
16159 strict_overflow_p);
16160
16161 case COND_EXPR:
16162 case CONSTRUCTOR:
16163 case OBJ_TYPE_REF:
16164 case ASSERT_EXPR:
16165 case ADDR_EXPR:
16166 case WITH_SIZE_EXPR:
16167 case SSA_NAME:
16168 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16169
16170 case COMPOUND_EXPR:
16171 case MODIFY_EXPR:
16172 case BIND_EXPR:
16173 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16174 strict_overflow_p);
16175
16176 case SAVE_EXPR:
16177 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16178 strict_overflow_p);
16179
16180 case CALL_EXPR:
16181 return alloca_call_p (t);
16182
16183 default:
16184 break;
16185 }
16186 return false;
16187 }
16188
16189 /* Return true when T is an address and is known to be nonzero.
16190 Handle warnings about undefined signed overflow. */
16191
16192 bool
16193 tree_expr_nonzero_p (tree t)
16194 {
16195 bool ret, strict_overflow_p;
16196
16197 strict_overflow_p = false;
16198 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16199 if (strict_overflow_p)
16200 fold_overflow_warning (("assuming signed overflow does not occur when "
16201 "determining that expression is always "
16202 "non-zero"),
16203 WARN_STRICT_OVERFLOW_MISC);
16204 return ret;
16205 }
16206
16207 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16208 attempt to fold the expression to a constant without modifying TYPE,
16209 OP0 or OP1.
16210
16211 If the expression could be simplified to a constant, then return
16212 the constant. If the expression would not be simplified to a
16213 constant, then return NULL_TREE. */
16214
16215 tree
16216 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16217 {
16218 tree tem = fold_binary (code, type, op0, op1);
16219 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16220 }
16221
16222 /* Given the components of a unary expression CODE, TYPE and OP0,
16223 attempt to fold the expression to a constant without modifying
16224 TYPE or OP0.
16225
16226 If the expression could be simplified to a constant, then return
16227 the constant. If the expression would not be simplified to a
16228 constant, then return NULL_TREE. */
16229
16230 tree
16231 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16232 {
16233 tree tem = fold_unary (code, type, op0);
16234 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16235 }
16236
16237 /* If EXP represents referencing an element in a constant string
16238 (either via pointer arithmetic or array indexing), return the
16239 tree representing the value accessed, otherwise return NULL. */
16240
16241 tree
16242 fold_read_from_constant_string (tree exp)
16243 {
16244 if ((TREE_CODE (exp) == INDIRECT_REF
16245 || TREE_CODE (exp) == ARRAY_REF)
16246 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16247 {
16248 tree exp1 = TREE_OPERAND (exp, 0);
16249 tree index;
16250 tree string;
16251 location_t loc = EXPR_LOCATION (exp);
16252
16253 if (TREE_CODE (exp) == INDIRECT_REF)
16254 string = string_constant (exp1, &index);
16255 else
16256 {
16257 tree low_bound = array_ref_low_bound (exp);
16258 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16259
16260 /* Optimize the special-case of a zero lower bound.
16261
16262 We convert the low_bound to sizetype to avoid some problems
16263 with constant folding. (E.g. suppose the lower bound is 1,
16264 and its mode is QI. Without the conversion,l (ARRAY
16265 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16266 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16267 if (! integer_zerop (low_bound))
16268 index = size_diffop_loc (loc, index,
16269 fold_convert_loc (loc, sizetype, low_bound));
16270
16271 string = exp1;
16272 }
16273
16274 if (string
16275 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16276 && TREE_CODE (string) == STRING_CST
16277 && TREE_CODE (index) == INTEGER_CST
16278 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16279 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16280 == MODE_INT)
16281 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16282 return build_int_cst_type (TREE_TYPE (exp),
16283 (TREE_STRING_POINTER (string)
16284 [TREE_INT_CST_LOW (index)]));
16285 }
16286 return NULL;
16287 }
16288
16289 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16290 an integer constant, real, or fixed-point constant.
16291
16292 TYPE is the type of the result. */
16293
16294 static tree
16295 fold_negate_const (tree arg0, tree type)
16296 {
16297 tree t = NULL_TREE;
16298
16299 switch (TREE_CODE (arg0))
16300 {
16301 case INTEGER_CST:
16302 {
16303 double_int val = tree_to_double_int (arg0);
16304 bool overflow;
16305 val = val.neg_with_overflow (&overflow);
16306 t = force_fit_type_double (type, val, 1,
16307 (overflow | TREE_OVERFLOW (arg0))
16308 && !TYPE_UNSIGNED (type));
16309 break;
16310 }
16311
16312 case REAL_CST:
16313 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16314 break;
16315
16316 case FIXED_CST:
16317 {
16318 FIXED_VALUE_TYPE f;
16319 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16320 &(TREE_FIXED_CST (arg0)), NULL,
16321 TYPE_SATURATING (type));
16322 t = build_fixed (type, f);
16323 /* Propagate overflow flags. */
16324 if (overflow_p | TREE_OVERFLOW (arg0))
16325 TREE_OVERFLOW (t) = 1;
16326 break;
16327 }
16328
16329 default:
16330 gcc_unreachable ();
16331 }
16332
16333 return t;
16334 }
16335
16336 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16337 an integer constant or real constant.
16338
16339 TYPE is the type of the result. */
16340
16341 tree
16342 fold_abs_const (tree arg0, tree type)
16343 {
16344 tree t = NULL_TREE;
16345
16346 switch (TREE_CODE (arg0))
16347 {
16348 case INTEGER_CST:
16349 {
16350 double_int val = tree_to_double_int (arg0);
16351
16352 /* If the value is unsigned or non-negative, then the absolute value
16353 is the same as the ordinary value. */
16354 if (TYPE_UNSIGNED (type)
16355 || !val.is_negative ())
16356 t = arg0;
16357
16358 /* If the value is negative, then the absolute value is
16359 its negation. */
16360 else
16361 {
16362 bool overflow;
16363 val = val.neg_with_overflow (&overflow);
16364 t = force_fit_type_double (type, val, -1,
16365 overflow | TREE_OVERFLOW (arg0));
16366 }
16367 }
16368 break;
16369
16370 case REAL_CST:
16371 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16372 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16373 else
16374 t = arg0;
16375 break;
16376
16377 default:
16378 gcc_unreachable ();
16379 }
16380
16381 return t;
16382 }
16383
16384 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16385 constant. TYPE is the type of the result. */
16386
16387 static tree
16388 fold_not_const (const_tree arg0, tree type)
16389 {
16390 double_int val;
16391
16392 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16393
16394 val = ~tree_to_double_int (arg0);
16395 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16396 }
16397
16398 /* Given CODE, a relational operator, the target type, TYPE and two
16399 constant operands OP0 and OP1, return the result of the
16400 relational operation. If the result is not a compile time
16401 constant, then return NULL_TREE. */
16402
16403 static tree
16404 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16405 {
16406 int result, invert;
16407
16408 /* From here on, the only cases we handle are when the result is
16409 known to be a constant. */
16410
16411 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16412 {
16413 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16414 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16415
16416 /* Handle the cases where either operand is a NaN. */
16417 if (real_isnan (c0) || real_isnan (c1))
16418 {
16419 switch (code)
16420 {
16421 case EQ_EXPR:
16422 case ORDERED_EXPR:
16423 result = 0;
16424 break;
16425
16426 case NE_EXPR:
16427 case UNORDERED_EXPR:
16428 case UNLT_EXPR:
16429 case UNLE_EXPR:
16430 case UNGT_EXPR:
16431 case UNGE_EXPR:
16432 case UNEQ_EXPR:
16433 result = 1;
16434 break;
16435
16436 case LT_EXPR:
16437 case LE_EXPR:
16438 case GT_EXPR:
16439 case GE_EXPR:
16440 case LTGT_EXPR:
16441 if (flag_trapping_math)
16442 return NULL_TREE;
16443 result = 0;
16444 break;
16445
16446 default:
16447 gcc_unreachable ();
16448 }
16449
16450 return constant_boolean_node (result, type);
16451 }
16452
16453 return constant_boolean_node (real_compare (code, c0, c1), type);
16454 }
16455
16456 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16457 {
16458 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16459 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16460 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16461 }
16462
16463 /* Handle equality/inequality of complex constants. */
16464 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16465 {
16466 tree rcond = fold_relational_const (code, type,
16467 TREE_REALPART (op0),
16468 TREE_REALPART (op1));
16469 tree icond = fold_relational_const (code, type,
16470 TREE_IMAGPART (op0),
16471 TREE_IMAGPART (op1));
16472 if (code == EQ_EXPR)
16473 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16474 else if (code == NE_EXPR)
16475 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16476 else
16477 return NULL_TREE;
16478 }
16479
16480 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16481 {
16482 unsigned count = VECTOR_CST_NELTS (op0);
16483 tree *elts = XALLOCAVEC (tree, count);
16484 gcc_assert (VECTOR_CST_NELTS (op1) == count
16485 && TYPE_VECTOR_SUBPARTS (type) == count);
16486
16487 for (unsigned i = 0; i < count; i++)
16488 {
16489 tree elem_type = TREE_TYPE (type);
16490 tree elem0 = VECTOR_CST_ELT (op0, i);
16491 tree elem1 = VECTOR_CST_ELT (op1, i);
16492
16493 tree tem = fold_relational_const (code, elem_type,
16494 elem0, elem1);
16495
16496 if (tem == NULL_TREE)
16497 return NULL_TREE;
16498
16499 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16500 }
16501
16502 return build_vector (type, elts);
16503 }
16504
16505 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16506
16507 To compute GT, swap the arguments and do LT.
16508 To compute GE, do LT and invert the result.
16509 To compute LE, swap the arguments, do LT and invert the result.
16510 To compute NE, do EQ and invert the result.
16511
16512 Therefore, the code below must handle only EQ and LT. */
16513
16514 if (code == LE_EXPR || code == GT_EXPR)
16515 {
16516 tree tem = op0;
16517 op0 = op1;
16518 op1 = tem;
16519 code = swap_tree_comparison (code);
16520 }
16521
16522 /* Note that it is safe to invert for real values here because we
16523 have already handled the one case that it matters. */
16524
16525 invert = 0;
16526 if (code == NE_EXPR || code == GE_EXPR)
16527 {
16528 invert = 1;
16529 code = invert_tree_comparison (code, false);
16530 }
16531
16532 /* Compute a result for LT or EQ if args permit;
16533 Otherwise return T. */
16534 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16535 {
16536 if (code == EQ_EXPR)
16537 result = tree_int_cst_equal (op0, op1);
16538 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16539 result = INT_CST_LT_UNSIGNED (op0, op1);
16540 else
16541 result = INT_CST_LT (op0, op1);
16542 }
16543 else
16544 return NULL_TREE;
16545
16546 if (invert)
16547 result ^= 1;
16548 return constant_boolean_node (result, type);
16549 }
16550
16551 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16552 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16553 itself. */
16554
16555 tree
16556 fold_build_cleanup_point_expr (tree type, tree expr)
16557 {
16558 /* If the expression does not have side effects then we don't have to wrap
16559 it with a cleanup point expression. */
16560 if (!TREE_SIDE_EFFECTS (expr))
16561 return expr;
16562
16563 /* If the expression is a return, check to see if the expression inside the
16564 return has no side effects or the right hand side of the modify expression
16565 inside the return. If either don't have side effects set we don't need to
16566 wrap the expression in a cleanup point expression. Note we don't check the
16567 left hand side of the modify because it should always be a return decl. */
16568 if (TREE_CODE (expr) == RETURN_EXPR)
16569 {
16570 tree op = TREE_OPERAND (expr, 0);
16571 if (!op || !TREE_SIDE_EFFECTS (op))
16572 return expr;
16573 op = TREE_OPERAND (op, 1);
16574 if (!TREE_SIDE_EFFECTS (op))
16575 return expr;
16576 }
16577
16578 return build1 (CLEANUP_POINT_EXPR, type, expr);
16579 }
16580
16581 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16582 of an indirection through OP0, or NULL_TREE if no simplification is
16583 possible. */
16584
16585 tree
16586 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16587 {
16588 tree sub = op0;
16589 tree subtype;
16590
16591 STRIP_NOPS (sub);
16592 subtype = TREE_TYPE (sub);
16593 if (!POINTER_TYPE_P (subtype))
16594 return NULL_TREE;
16595
16596 if (TREE_CODE (sub) == ADDR_EXPR)
16597 {
16598 tree op = TREE_OPERAND (sub, 0);
16599 tree optype = TREE_TYPE (op);
16600 /* *&CONST_DECL -> to the value of the const decl. */
16601 if (TREE_CODE (op) == CONST_DECL)
16602 return DECL_INITIAL (op);
16603 /* *&p => p; make sure to handle *&"str"[cst] here. */
16604 if (type == optype)
16605 {
16606 tree fop = fold_read_from_constant_string (op);
16607 if (fop)
16608 return fop;
16609 else
16610 return op;
16611 }
16612 /* *(foo *)&fooarray => fooarray[0] */
16613 else if (TREE_CODE (optype) == ARRAY_TYPE
16614 && type == TREE_TYPE (optype)
16615 && (!in_gimple_form
16616 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16617 {
16618 tree type_domain = TYPE_DOMAIN (optype);
16619 tree min_val = size_zero_node;
16620 if (type_domain && TYPE_MIN_VALUE (type_domain))
16621 min_val = TYPE_MIN_VALUE (type_domain);
16622 if (in_gimple_form
16623 && TREE_CODE (min_val) != INTEGER_CST)
16624 return NULL_TREE;
16625 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16626 NULL_TREE, NULL_TREE);
16627 }
16628 /* *(foo *)&complexfoo => __real__ complexfoo */
16629 else if (TREE_CODE (optype) == COMPLEX_TYPE
16630 && type == TREE_TYPE (optype))
16631 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16632 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16633 else if (TREE_CODE (optype) == VECTOR_TYPE
16634 && type == TREE_TYPE (optype))
16635 {
16636 tree part_width = TYPE_SIZE (type);
16637 tree index = bitsize_int (0);
16638 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16639 }
16640 }
16641
16642 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16643 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16644 {
16645 tree op00 = TREE_OPERAND (sub, 0);
16646 tree op01 = TREE_OPERAND (sub, 1);
16647
16648 STRIP_NOPS (op00);
16649 if (TREE_CODE (op00) == ADDR_EXPR)
16650 {
16651 tree op00type;
16652 op00 = TREE_OPERAND (op00, 0);
16653 op00type = TREE_TYPE (op00);
16654
16655 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16656 if (TREE_CODE (op00type) == VECTOR_TYPE
16657 && type == TREE_TYPE (op00type))
16658 {
16659 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16660 tree part_width = TYPE_SIZE (type);
16661 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16662 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16663 tree index = bitsize_int (indexi);
16664
16665 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16666 return fold_build3_loc (loc,
16667 BIT_FIELD_REF, type, op00,
16668 part_width, index);
16669
16670 }
16671 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16672 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16673 && type == TREE_TYPE (op00type))
16674 {
16675 tree size = TYPE_SIZE_UNIT (type);
16676 if (tree_int_cst_equal (size, op01))
16677 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16678 }
16679 /* ((foo *)&fooarray)[1] => fooarray[1] */
16680 else if (TREE_CODE (op00type) == ARRAY_TYPE
16681 && type == TREE_TYPE (op00type))
16682 {
16683 tree type_domain = TYPE_DOMAIN (op00type);
16684 tree min_val = size_zero_node;
16685 if (type_domain && TYPE_MIN_VALUE (type_domain))
16686 min_val = TYPE_MIN_VALUE (type_domain);
16687 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16688 TYPE_SIZE_UNIT (type));
16689 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16690 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16691 NULL_TREE, NULL_TREE);
16692 }
16693 }
16694 }
16695
16696 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16697 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16698 && type == TREE_TYPE (TREE_TYPE (subtype))
16699 && (!in_gimple_form
16700 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16701 {
16702 tree type_domain;
16703 tree min_val = size_zero_node;
16704 sub = build_fold_indirect_ref_loc (loc, sub);
16705 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16706 if (type_domain && TYPE_MIN_VALUE (type_domain))
16707 min_val = TYPE_MIN_VALUE (type_domain);
16708 if (in_gimple_form
16709 && TREE_CODE (min_val) != INTEGER_CST)
16710 return NULL_TREE;
16711 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16712 NULL_TREE);
16713 }
16714
16715 return NULL_TREE;
16716 }
16717
16718 /* Builds an expression for an indirection through T, simplifying some
16719 cases. */
16720
16721 tree
16722 build_fold_indirect_ref_loc (location_t loc, tree t)
16723 {
16724 tree type = TREE_TYPE (TREE_TYPE (t));
16725 tree sub = fold_indirect_ref_1 (loc, type, t);
16726
16727 if (sub)
16728 return sub;
16729
16730 return build1_loc (loc, INDIRECT_REF, type, t);
16731 }
16732
16733 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16734
16735 tree
16736 fold_indirect_ref_loc (location_t loc, tree t)
16737 {
16738 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16739
16740 if (sub)
16741 return sub;
16742 else
16743 return t;
16744 }
16745
16746 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16747 whose result is ignored. The type of the returned tree need not be
16748 the same as the original expression. */
16749
16750 tree
16751 fold_ignored_result (tree t)
16752 {
16753 if (!TREE_SIDE_EFFECTS (t))
16754 return integer_zero_node;
16755
16756 for (;;)
16757 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16758 {
16759 case tcc_unary:
16760 t = TREE_OPERAND (t, 0);
16761 break;
16762
16763 case tcc_binary:
16764 case tcc_comparison:
16765 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16766 t = TREE_OPERAND (t, 0);
16767 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16768 t = TREE_OPERAND (t, 1);
16769 else
16770 return t;
16771 break;
16772
16773 case tcc_expression:
16774 switch (TREE_CODE (t))
16775 {
16776 case COMPOUND_EXPR:
16777 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16778 return t;
16779 t = TREE_OPERAND (t, 0);
16780 break;
16781
16782 case COND_EXPR:
16783 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16784 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16785 return t;
16786 t = TREE_OPERAND (t, 0);
16787 break;
16788
16789 default:
16790 return t;
16791 }
16792 break;
16793
16794 default:
16795 return t;
16796 }
16797 }
16798
16799 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16800 This can only be applied to objects of a sizetype. */
16801
16802 tree
16803 round_up_loc (location_t loc, tree value, int divisor)
16804 {
16805 tree div = NULL_TREE;
16806
16807 gcc_assert (divisor > 0);
16808 if (divisor == 1)
16809 return value;
16810
16811 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16812 have to do anything. Only do this when we are not given a const,
16813 because in that case, this check is more expensive than just
16814 doing it. */
16815 if (TREE_CODE (value) != INTEGER_CST)
16816 {
16817 div = build_int_cst (TREE_TYPE (value), divisor);
16818
16819 if (multiple_of_p (TREE_TYPE (value), value, div))
16820 return value;
16821 }
16822
16823 /* If divisor is a power of two, simplify this to bit manipulation. */
16824 if (divisor == (divisor & -divisor))
16825 {
16826 if (TREE_CODE (value) == INTEGER_CST)
16827 {
16828 double_int val = tree_to_double_int (value);
16829 bool overflow_p;
16830
16831 if ((val.low & (divisor - 1)) == 0)
16832 return value;
16833
16834 overflow_p = TREE_OVERFLOW (value);
16835 val.low &= ~(divisor - 1);
16836 val.low += divisor;
16837 if (val.low == 0)
16838 {
16839 val.high++;
16840 if (val.high == 0)
16841 overflow_p = true;
16842 }
16843
16844 return force_fit_type_double (TREE_TYPE (value), val,
16845 -1, overflow_p);
16846 }
16847 else
16848 {
16849 tree t;
16850
16851 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16852 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16853 t = build_int_cst (TREE_TYPE (value), -divisor);
16854 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16855 }
16856 }
16857 else
16858 {
16859 if (!div)
16860 div = build_int_cst (TREE_TYPE (value), divisor);
16861 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16862 value = size_binop_loc (loc, MULT_EXPR, value, div);
16863 }
16864
16865 return value;
16866 }
16867
16868 /* Likewise, but round down. */
16869
16870 tree
16871 round_down_loc (location_t loc, tree value, int divisor)
16872 {
16873 tree div = NULL_TREE;
16874
16875 gcc_assert (divisor > 0);
16876 if (divisor == 1)
16877 return value;
16878
16879 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16880 have to do anything. Only do this when we are not given a const,
16881 because in that case, this check is more expensive than just
16882 doing it. */
16883 if (TREE_CODE (value) != INTEGER_CST)
16884 {
16885 div = build_int_cst (TREE_TYPE (value), divisor);
16886
16887 if (multiple_of_p (TREE_TYPE (value), value, div))
16888 return value;
16889 }
16890
16891 /* If divisor is a power of two, simplify this to bit manipulation. */
16892 if (divisor == (divisor & -divisor))
16893 {
16894 tree t;
16895
16896 t = build_int_cst (TREE_TYPE (value), -divisor);
16897 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16898 }
16899 else
16900 {
16901 if (!div)
16902 div = build_int_cst (TREE_TYPE (value), divisor);
16903 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16904 value = size_binop_loc (loc, MULT_EXPR, value, div);
16905 }
16906
16907 return value;
16908 }
16909
16910 /* Returns the pointer to the base of the object addressed by EXP and
16911 extracts the information about the offset of the access, storing it
16912 to PBITPOS and POFFSET. */
16913
16914 static tree
16915 split_address_to_core_and_offset (tree exp,
16916 HOST_WIDE_INT *pbitpos, tree *poffset)
16917 {
16918 tree core;
16919 enum machine_mode mode;
16920 int unsignedp, volatilep;
16921 HOST_WIDE_INT bitsize;
16922 location_t loc = EXPR_LOCATION (exp);
16923
16924 if (TREE_CODE (exp) == ADDR_EXPR)
16925 {
16926 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16927 poffset, &mode, &unsignedp, &volatilep,
16928 false);
16929 core = build_fold_addr_expr_loc (loc, core);
16930 }
16931 else
16932 {
16933 core = exp;
16934 *pbitpos = 0;
16935 *poffset = NULL_TREE;
16936 }
16937
16938 return core;
16939 }
16940
16941 /* Returns true if addresses of E1 and E2 differ by a constant, false
16942 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16943
16944 bool
16945 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16946 {
16947 tree core1, core2;
16948 HOST_WIDE_INT bitpos1, bitpos2;
16949 tree toffset1, toffset2, tdiff, type;
16950
16951 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16952 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16953
16954 if (bitpos1 % BITS_PER_UNIT != 0
16955 || bitpos2 % BITS_PER_UNIT != 0
16956 || !operand_equal_p (core1, core2, 0))
16957 return false;
16958
16959 if (toffset1 && toffset2)
16960 {
16961 type = TREE_TYPE (toffset1);
16962 if (type != TREE_TYPE (toffset2))
16963 toffset2 = fold_convert (type, toffset2);
16964
16965 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16966 if (!cst_and_fits_in_hwi (tdiff))
16967 return false;
16968
16969 *diff = int_cst_value (tdiff);
16970 }
16971 else if (toffset1 || toffset2)
16972 {
16973 /* If only one of the offsets is non-constant, the difference cannot
16974 be a constant. */
16975 return false;
16976 }
16977 else
16978 *diff = 0;
16979
16980 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16981 return true;
16982 }
16983
16984 /* Simplify the floating point expression EXP when the sign of the
16985 result is not significant. Return NULL_TREE if no simplification
16986 is possible. */
16987
16988 tree
16989 fold_strip_sign_ops (tree exp)
16990 {
16991 tree arg0, arg1;
16992 location_t loc = EXPR_LOCATION (exp);
16993
16994 switch (TREE_CODE (exp))
16995 {
16996 case ABS_EXPR:
16997 case NEGATE_EXPR:
16998 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16999 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17000
17001 case MULT_EXPR:
17002 case RDIV_EXPR:
17003 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17004 return NULL_TREE;
17005 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17006 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17007 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17008 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17009 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17010 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17011 break;
17012
17013 case COMPOUND_EXPR:
17014 arg0 = TREE_OPERAND (exp, 0);
17015 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17016 if (arg1)
17017 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17018 break;
17019
17020 case COND_EXPR:
17021 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17022 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17023 if (arg0 || arg1)
17024 return fold_build3_loc (loc,
17025 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17026 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17027 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17028 break;
17029
17030 case CALL_EXPR:
17031 {
17032 const enum built_in_function fcode = builtin_mathfn_code (exp);
17033 switch (fcode)
17034 {
17035 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17036 /* Strip copysign function call, return the 1st argument. */
17037 arg0 = CALL_EXPR_ARG (exp, 0);
17038 arg1 = CALL_EXPR_ARG (exp, 1);
17039 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17040
17041 default:
17042 /* Strip sign ops from the argument of "odd" math functions. */
17043 if (negate_mathfn_p (fcode))
17044 {
17045 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17046 if (arg0)
17047 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17048 }
17049 break;
17050 }
17051 }
17052 break;
17053
17054 default:
17055 break;
17056 }
17057 return NULL_TREE;
17058 }