re PR tree-optimization/58794 (ICE in set_lattice_value, at tree-ssa-ccp.c:455 on...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-ssa.h"
62
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
66
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
87 };
88
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
141
142 static location_t
143 expr_location_or (tree t, location_t loc)
144 {
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
147 }
148
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
151
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
154 {
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
160 {
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
163 }
164 return x;
165 }
166 \f
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
170
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
173 {
174 double_int quo, rem;
175 int uns;
176
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
181
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
184
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
187
188 return NULL_TREE;
189 }
190 \f
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
199
200 static int fold_deferring_overflow_warnings;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
206
207 static const char* fold_deferred_overflow_warning;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
211
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
216
217 void
218 fold_defer_overflow_warnings (void)
219 {
220 ++fold_deferring_overflow_warnings;
221 }
222
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
231
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235 const char *warnmsg;
236 location_t locus;
237
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
241 {
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
247 }
248
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
251
252 if (!issue || warnmsg == NULL)
253 return;
254
255 if (gimple_no_warning_p (stmt))
256 return;
257
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
262
263 if (!issue_strict_overflow_warning (code))
264 return;
265
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
275
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279 fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281
282 /* Whether we are deferring overflow warnings. */
283
284 bool
285 fold_deferring_overflow_warnings_p (void)
286 {
287 return fold_deferring_overflow_warnings > 0;
288 }
289
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
292
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296 if (fold_deferring_overflow_warnings > 0)
297 {
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
300 {
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
303 }
304 }
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
307 }
308 \f
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
311
312 static bool
313 negate_mathfn_p (enum built_in_function code)
314 {
315 switch (code)
316 {
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
341
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
347
348 default:
349 break;
350 }
351 return false;
352 }
353
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
356
357 bool
358 may_negate_without_overflow_p (const_tree t)
359 {
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
363
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
369
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
372 {
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
377 }
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
383 }
384
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
387
388 static bool
389 negate_expr_p (tree t)
390 {
391 tree type;
392
393 if (t == 0)
394 return false;
395
396 type = TREE_TYPE (t);
397
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
400 {
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
404
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
410
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
414
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
423
424 case VECTOR_CST:
425 {
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
428
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
430
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
434
435 return true;
436 }
437
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
441
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
444
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
456
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
463
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
467
468 /* Fall through. */
469
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
475
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
487 {
488 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
489 break;
490 /* If overflow is undefined then we have to be careful because
491 we ask whether it's ok to associate the negate with the
492 division which is not ok for example for
493 -((a - b) / c) where (-(a - b)) / c may invoke undefined
494 overflow because of negating INT_MIN. So do not use
495 negate_expr_p here but open-code the two important cases. */
496 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
497 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
498 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
499 return true;
500 }
501 else if (negate_expr_p (TREE_OPERAND (t, 0)))
502 return true;
503 return negate_expr_p (TREE_OPERAND (t, 1));
504
505 case NOP_EXPR:
506 /* Negate -((double)float) as (double)(-float). */
507 if (TREE_CODE (type) == REAL_TYPE)
508 {
509 tree tem = strip_float_extensions (t);
510 if (tem != t)
511 return negate_expr_p (tem);
512 }
513 break;
514
515 case CALL_EXPR:
516 /* Negate -f(x) as f(-x). */
517 if (negate_mathfn_p (builtin_mathfn_code (t)))
518 return negate_expr_p (CALL_EXPR_ARG (t, 0));
519 break;
520
521 case RSHIFT_EXPR:
522 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
523 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 {
525 tree op1 = TREE_OPERAND (t, 1);
526 if (TREE_INT_CST_HIGH (op1) == 0
527 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
528 == TREE_INT_CST_LOW (op1))
529 return true;
530 }
531 break;
532
533 default:
534 break;
535 }
536 return false;
537 }
538
539 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
540 simplification is possible.
541 If negate_expr_p would return true for T, NULL_TREE will never be
542 returned. */
543
544 static tree
545 fold_negate_expr (location_t loc, tree t)
546 {
547 tree type = TREE_TYPE (t);
548 tree tem;
549
550 switch (TREE_CODE (t))
551 {
552 /* Convert - (~A) to A + 1. */
553 case BIT_NOT_EXPR:
554 if (INTEGRAL_TYPE_P (type))
555 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
556 build_one_cst (type));
557 break;
558
559 case INTEGER_CST:
560 tem = fold_negate_const (t, type);
561 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
562 || !TYPE_OVERFLOW_TRAPS (type))
563 return tem;
564 break;
565
566 case REAL_CST:
567 tem = fold_negate_const (t, type);
568 /* Two's complement FP formats, such as c4x, may overflow. */
569 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
570 return tem;
571 break;
572
573 case FIXED_CST:
574 tem = fold_negate_const (t, type);
575 return tem;
576
577 case COMPLEX_CST:
578 {
579 tree rpart = negate_expr (TREE_REALPART (t));
580 tree ipart = negate_expr (TREE_IMAGPART (t));
581
582 if ((TREE_CODE (rpart) == REAL_CST
583 && TREE_CODE (ipart) == REAL_CST)
584 || (TREE_CODE (rpart) == INTEGER_CST
585 && TREE_CODE (ipart) == INTEGER_CST))
586 return build_complex (type, rpart, ipart);
587 }
588 break;
589
590 case VECTOR_CST:
591 {
592 int count = TYPE_VECTOR_SUBPARTS (type), i;
593 tree *elts = XALLOCAVEC (tree, count);
594
595 for (i = 0; i < count; i++)
596 {
597 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
598 if (elts[i] == NULL_TREE)
599 return NULL_TREE;
600 }
601
602 return build_vector (type, elts);
603 }
604
605 case COMPLEX_EXPR:
606 if (negate_expr_p (t))
607 return fold_build2_loc (loc, COMPLEX_EXPR, type,
608 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
609 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
610 break;
611
612 case CONJ_EXPR:
613 if (negate_expr_p (t))
614 return fold_build1_loc (loc, CONJ_EXPR, type,
615 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
616 break;
617
618 case NEGATE_EXPR:
619 return TREE_OPERAND (t, 0);
620
621 case PLUS_EXPR:
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
624 {
625 /* -(A + B) -> (-B) - A. */
626 if (negate_expr_p (TREE_OPERAND (t, 1))
627 && reorder_operands_p (TREE_OPERAND (t, 0),
628 TREE_OPERAND (t, 1)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 1));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 0));
633 }
634
635 /* -(A + B) -> (-A) - B. */
636 if (negate_expr_p (TREE_OPERAND (t, 0)))
637 {
638 tem = negate_expr (TREE_OPERAND (t, 0));
639 return fold_build2_loc (loc, MINUS_EXPR, type,
640 tem, TREE_OPERAND (t, 1));
641 }
642 }
643 break;
644
645 case MINUS_EXPR:
646 /* - (A - B) -> B - A */
647 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
648 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
649 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
650 return fold_build2_loc (loc, MINUS_EXPR, type,
651 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
652 break;
653
654 case MULT_EXPR:
655 if (TYPE_UNSIGNED (type))
656 break;
657
658 /* Fall through. */
659
660 case RDIV_EXPR:
661 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
662 {
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 TREE_OPERAND (t, 0), negate_expr (tem));
667 tem = TREE_OPERAND (t, 0);
668 if (negate_expr_p (tem))
669 return fold_build2_loc (loc, TREE_CODE (t), type,
670 negate_expr (tem), TREE_OPERAND (t, 1));
671 }
672 break;
673
674 case TRUNC_DIV_EXPR:
675 case ROUND_DIV_EXPR:
676 case FLOOR_DIV_EXPR:
677 case CEIL_DIV_EXPR:
678 case EXACT_DIV_EXPR:
679 /* In general we can't negate A / B, because if A is INT_MIN and
680 B is 1, we may turn this into INT_MIN / -1 which is undefined
681 and actually traps on some architectures. But if overflow is
682 undefined, we can negate, because - (INT_MIN / 1) is an
683 overflow. */
684 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
685 {
686 const char * const warnmsg = G_("assuming signed overflow does not "
687 "occur when negating a division");
688 tem = TREE_OPERAND (t, 1);
689 if (negate_expr_p (tem))
690 {
691 if (INTEGRAL_TYPE_P (type)
692 && (TREE_CODE (tem) != INTEGER_CST
693 || integer_onep (tem)))
694 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
695 return fold_build2_loc (loc, TREE_CODE (t), type,
696 TREE_OPERAND (t, 0), negate_expr (tem));
697 }
698 /* If overflow is undefined then we have to be careful because
699 we ask whether it's ok to associate the negate with the
700 division which is not ok for example for
701 -((a - b) / c) where (-(a - b)) / c may invoke undefined
702 overflow because of negating INT_MIN. So do not use
703 negate_expr_p here but open-code the two important cases. */
704 tem = TREE_OPERAND (t, 0);
705 if ((INTEGRAL_TYPE_P (type)
706 && (TREE_CODE (tem) == NEGATE_EXPR
707 || (TREE_CODE (tem) == INTEGER_CST
708 && may_negate_without_overflow_p (tem))))
709 || !INTEGRAL_TYPE_P (type))
710 return fold_build2_loc (loc, TREE_CODE (t), type,
711 negate_expr (tem), TREE_OPERAND (t, 1));
712 }
713 break;
714
715 case NOP_EXPR:
716 /* Convert -((double)float) into (double)(-float). */
717 if (TREE_CODE (type) == REAL_TYPE)
718 {
719 tem = strip_float_extensions (t);
720 if (tem != t && negate_expr_p (tem))
721 return fold_convert_loc (loc, type, negate_expr (tem));
722 }
723 break;
724
725 case CALL_EXPR:
726 /* Negate -f(x) as f(-x). */
727 if (negate_mathfn_p (builtin_mathfn_code (t))
728 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
729 {
730 tree fndecl, arg;
731
732 fndecl = get_callee_fndecl (t);
733 arg = negate_expr (CALL_EXPR_ARG (t, 0));
734 return build_call_expr_loc (loc, fndecl, 1, arg);
735 }
736 break;
737
738 case RSHIFT_EXPR:
739 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
740 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
741 {
742 tree op1 = TREE_OPERAND (t, 1);
743 if (TREE_INT_CST_HIGH (op1) == 0
744 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
745 == TREE_INT_CST_LOW (op1))
746 {
747 tree ntype = TYPE_UNSIGNED (type)
748 ? signed_type_for (type)
749 : unsigned_type_for (type);
750 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
751 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
752 return fold_convert_loc (loc, type, temp);
753 }
754 }
755 break;
756
757 default:
758 break;
759 }
760
761 return NULL_TREE;
762 }
763
764 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
765 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
766 return NULL_TREE. */
767
768 static tree
769 negate_expr (tree t)
770 {
771 tree type, tem;
772 location_t loc;
773
774 if (t == NULL_TREE)
775 return NULL_TREE;
776
777 loc = EXPR_LOCATION (t);
778 type = TREE_TYPE (t);
779 STRIP_SIGN_NOPS (t);
780
781 tem = fold_negate_expr (loc, t);
782 if (!tem)
783 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
784 return fold_convert_loc (loc, type, tem);
785 }
786 \f
787 /* Split a tree IN into a constant, literal and variable parts that could be
788 combined with CODE to make IN. "constant" means an expression with
789 TREE_CONSTANT but that isn't an actual constant. CODE must be a
790 commutative arithmetic operation. Store the constant part into *CONP,
791 the literal in *LITP and return the variable part. If a part isn't
792 present, set it to null. If the tree does not decompose in this way,
793 return the entire tree as the variable part and the other parts as null.
794
795 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
796 case, we negate an operand that was subtracted. Except if it is a
797 literal for which we use *MINUS_LITP instead.
798
799 If NEGATE_P is true, we are negating all of IN, again except a literal
800 for which we use *MINUS_LITP instead.
801
802 If IN is itself a literal or constant, return it as appropriate.
803
804 Note that we do not guarantee that any of the three values will be the
805 same type as IN, but they will have the same signedness and mode. */
806
807 static tree
808 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
809 tree *minus_litp, int negate_p)
810 {
811 tree var = 0;
812
813 *conp = 0;
814 *litp = 0;
815 *minus_litp = 0;
816
817 /* Strip any conversions that don't change the machine mode or signedness. */
818 STRIP_SIGN_NOPS (in);
819
820 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
821 || TREE_CODE (in) == FIXED_CST)
822 *litp = in;
823 else if (TREE_CODE (in) == code
824 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
825 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
826 /* We can associate addition and subtraction together (even
827 though the C standard doesn't say so) for integers because
828 the value is not affected. For reals, the value might be
829 affected, so we can't. */
830 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
831 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
832 {
833 tree op0 = TREE_OPERAND (in, 0);
834 tree op1 = TREE_OPERAND (in, 1);
835 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
836 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
837
838 /* First see if either of the operands is a literal, then a constant. */
839 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
840 || TREE_CODE (op0) == FIXED_CST)
841 *litp = op0, op0 = 0;
842 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
843 || TREE_CODE (op1) == FIXED_CST)
844 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
845
846 if (op0 != 0 && TREE_CONSTANT (op0))
847 *conp = op0, op0 = 0;
848 else if (op1 != 0 && TREE_CONSTANT (op1))
849 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
850
851 /* If we haven't dealt with either operand, this is not a case we can
852 decompose. Otherwise, VAR is either of the ones remaining, if any. */
853 if (op0 != 0 && op1 != 0)
854 var = in;
855 else if (op0 != 0)
856 var = op0;
857 else
858 var = op1, neg_var_p = neg1_p;
859
860 /* Now do any needed negations. */
861 if (neg_litp_p)
862 *minus_litp = *litp, *litp = 0;
863 if (neg_conp_p)
864 *conp = negate_expr (*conp);
865 if (neg_var_p)
866 var = negate_expr (var);
867 }
868 else if (TREE_CODE (in) == BIT_NOT_EXPR
869 && code == PLUS_EXPR)
870 {
871 /* -X - 1 is folded to ~X, undo that here. */
872 *minus_litp = build_one_cst (TREE_TYPE (in));
873 var = negate_expr (TREE_OPERAND (in, 0));
874 }
875 else if (TREE_CONSTANT (in))
876 *conp = in;
877 else
878 var = in;
879
880 if (negate_p)
881 {
882 if (*litp)
883 *minus_litp = *litp, *litp = 0;
884 else if (*minus_litp)
885 *litp = *minus_litp, *minus_litp = 0;
886 *conp = negate_expr (*conp);
887 var = negate_expr (var);
888 }
889
890 return var;
891 }
892
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
897
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
900 {
901 if (t1 == 0)
902 return t2;
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
911 {
912 if (code == PLUS_EXPR)
913 {
914 if (TREE_CODE (t1) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t2),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t1, 0)));
919 else if (TREE_CODE (t2) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t2, 0)));
924 else if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
926 }
927 else if (code == MINUS_EXPR)
928 {
929 if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
931 }
932
933 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type, t2));
935 }
936
937 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
939 }
940 \f
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
943
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
946 {
947 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948 return false;
949 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return false;
951
952 switch (code)
953 {
954 case LSHIFT_EXPR:
955 case RSHIFT_EXPR:
956 case LROTATE_EXPR:
957 case RROTATE_EXPR:
958 return true;
959
960 default:
961 break;
962 }
963
964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 && TYPE_MODE (type1) == TYPE_MODE (type2);
967 }
968
969
970 /* Combine two integer constants ARG1 and ARG2 under operation CODE
971 to produce a new constant. Return NULL_TREE if we don't know how
972 to evaluate CODE at compile-time. */
973
974 static tree
975 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
976 int overflowable)
977 {
978 double_int op1, op2, res, tmp;
979 tree t;
980 tree type = TREE_TYPE (arg1);
981 bool uns = TYPE_UNSIGNED (type);
982 bool overflow = false;
983
984 op1 = tree_to_double_int (arg1);
985 op2 = tree_to_double_int (arg2);
986
987 switch (code)
988 {
989 case BIT_IOR_EXPR:
990 res = op1 | op2;
991 break;
992
993 case BIT_XOR_EXPR:
994 res = op1 ^ op2;
995 break;
996
997 case BIT_AND_EXPR:
998 res = op1 & op2;
999 break;
1000
1001 case RSHIFT_EXPR:
1002 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1003 break;
1004
1005 case LSHIFT_EXPR:
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1010 break;
1011
1012 case RROTATE_EXPR:
1013 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1014 break;
1015
1016 case LROTATE_EXPR:
1017 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = op1.add_with_sign (op2, false, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = op1.sub_with_overflow (op2, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = op1.mul_with_sign (op2, false, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1034 {
1035 bool dummy_overflow;
1036 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1037 return NULL_TREE;
1038 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1039 }
1040 else
1041 {
1042 bool dummy_overflow;
1043 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1044 is performed in twice the precision of arguments. */
1045 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1046 res = tmp.rshift (TYPE_PRECISION (type),
1047 2 * TYPE_PRECISION (type), !uns);
1048 }
1049 break;
1050
1051 case TRUNC_DIV_EXPR:
1052 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 /* This is a shortcut for a common special case. */
1055 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1056 && !TREE_OVERFLOW (arg1)
1057 && !TREE_OVERFLOW (arg2)
1058 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1059 {
1060 if (code == CEIL_DIV_EXPR)
1061 op1.low += op2.low - 1;
1062
1063 res.low = op1.low / op2.low, res.high = 0;
1064 break;
1065 }
1066
1067 /* ... fall through ... */
1068
1069 case ROUND_DIV_EXPR:
1070 if (op2.is_zero ())
1071 return NULL_TREE;
1072 if (op2.is_one ())
1073 {
1074 res = op1;
1075 break;
1076 }
1077 if (op1 == op2 && !op1.is_zero ())
1078 {
1079 res = double_int_one;
1080 break;
1081 }
1082 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1083 break;
1084
1085 case TRUNC_MOD_EXPR:
1086 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1087 /* This is a shortcut for a common special case. */
1088 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1089 && !TREE_OVERFLOW (arg1)
1090 && !TREE_OVERFLOW (arg2)
1091 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1092 {
1093 if (code == CEIL_MOD_EXPR)
1094 op1.low += op2.low - 1;
1095 res.low = op1.low % op2.low, res.high = 0;
1096 break;
1097 }
1098
1099 /* ... fall through ... */
1100
1101 case ROUND_MOD_EXPR:
1102 if (op2.is_zero ())
1103 return NULL_TREE;
1104 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1105 break;
1106
1107 case MIN_EXPR:
1108 res = op1.min (op2, uns);
1109 break;
1110
1111 case MAX_EXPR:
1112 res = op1.max (op2, uns);
1113 break;
1114
1115 default:
1116 return NULL_TREE;
1117 }
1118
1119 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1120 (!uns && overflow)
1121 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1122
1123 return t;
1124 }
1125
1126 tree
1127 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1128 {
1129 return int_const_binop_1 (code, arg1, arg2, 1);
1130 }
1131
1132 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1133 constant. We assume ARG1 and ARG2 have the same data type, or at least
1134 are the same kind of constant and the same machine mode. Return zero if
1135 combining the constants is not allowed in the current operating mode. */
1136
1137 static tree
1138 const_binop (enum tree_code code, tree arg1, tree arg2)
1139 {
1140 /* Sanity check for the recursive cases. */
1141 if (!arg1 || !arg2)
1142 return NULL_TREE;
1143
1144 STRIP_NOPS (arg1);
1145 STRIP_NOPS (arg2);
1146
1147 if (TREE_CODE (arg1) == INTEGER_CST)
1148 return int_const_binop (code, arg1, arg2);
1149
1150 if (TREE_CODE (arg1) == REAL_CST)
1151 {
1152 enum machine_mode mode;
1153 REAL_VALUE_TYPE d1;
1154 REAL_VALUE_TYPE d2;
1155 REAL_VALUE_TYPE value;
1156 REAL_VALUE_TYPE result;
1157 bool inexact;
1158 tree t, type;
1159
1160 /* The following codes are handled by real_arithmetic. */
1161 switch (code)
1162 {
1163 case PLUS_EXPR:
1164 case MINUS_EXPR:
1165 case MULT_EXPR:
1166 case RDIV_EXPR:
1167 case MIN_EXPR:
1168 case MAX_EXPR:
1169 break;
1170
1171 default:
1172 return NULL_TREE;
1173 }
1174
1175 d1 = TREE_REAL_CST (arg1);
1176 d2 = TREE_REAL_CST (arg2);
1177
1178 type = TREE_TYPE (arg1);
1179 mode = TYPE_MODE (type);
1180
1181 /* Don't perform operation if we honor signaling NaNs and
1182 either operand is a NaN. */
1183 if (HONOR_SNANS (mode)
1184 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1185 return NULL_TREE;
1186
1187 /* Don't perform operation if it would raise a division
1188 by zero exception. */
1189 if (code == RDIV_EXPR
1190 && REAL_VALUES_EQUAL (d2, dconst0)
1191 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1192 return NULL_TREE;
1193
1194 /* If either operand is a NaN, just return it. Otherwise, set up
1195 for floating-point trap; we return an overflow. */
1196 if (REAL_VALUE_ISNAN (d1))
1197 return arg1;
1198 else if (REAL_VALUE_ISNAN (d2))
1199 return arg2;
1200
1201 inexact = real_arithmetic (&value, code, &d1, &d2);
1202 real_convert (&result, mode, &value);
1203
1204 /* Don't constant fold this floating point operation if
1205 the result has overflowed and flag_trapping_math. */
1206 if (flag_trapping_math
1207 && MODE_HAS_INFINITIES (mode)
1208 && REAL_VALUE_ISINF (result)
1209 && !REAL_VALUE_ISINF (d1)
1210 && !REAL_VALUE_ISINF (d2))
1211 return NULL_TREE;
1212
1213 /* Don't constant fold this floating point operation if the
1214 result may dependent upon the run-time rounding mode and
1215 flag_rounding_math is set, or if GCC's software emulation
1216 is unable to accurately represent the result. */
1217 if ((flag_rounding_math
1218 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1219 && (inexact || !real_identical (&result, &value)))
1220 return NULL_TREE;
1221
1222 t = build_real (type, result);
1223
1224 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1225 return t;
1226 }
1227
1228 if (TREE_CODE (arg1) == FIXED_CST)
1229 {
1230 FIXED_VALUE_TYPE f1;
1231 FIXED_VALUE_TYPE f2;
1232 FIXED_VALUE_TYPE result;
1233 tree t, type;
1234 int sat_p;
1235 bool overflow_p;
1236
1237 /* The following codes are handled by fixed_arithmetic. */
1238 switch (code)
1239 {
1240 case PLUS_EXPR:
1241 case MINUS_EXPR:
1242 case MULT_EXPR:
1243 case TRUNC_DIV_EXPR:
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 f2.data.high = TREE_INT_CST_HIGH (arg2);
1250 f2.data.low = TREE_INT_CST_LOW (arg2);
1251 f2.mode = SImode;
1252 break;
1253
1254 default:
1255 return NULL_TREE;
1256 }
1257
1258 f1 = TREE_FIXED_CST (arg1);
1259 type = TREE_TYPE (arg1);
1260 sat_p = TYPE_SATURATING (type);
1261 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1262 t = build_fixed (type, result);
1263 /* Propagate overflow flags. */
1264 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1265 TREE_OVERFLOW (t) = 1;
1266 return t;
1267 }
1268
1269 if (TREE_CODE (arg1) == COMPLEX_CST)
1270 {
1271 tree type = TREE_TYPE (arg1);
1272 tree r1 = TREE_REALPART (arg1);
1273 tree i1 = TREE_IMAGPART (arg1);
1274 tree r2 = TREE_REALPART (arg2);
1275 tree i2 = TREE_IMAGPART (arg2);
1276 tree real, imag;
1277
1278 switch (code)
1279 {
1280 case PLUS_EXPR:
1281 case MINUS_EXPR:
1282 real = const_binop (code, r1, r2);
1283 imag = const_binop (code, i1, i2);
1284 break;
1285
1286 case MULT_EXPR:
1287 if (COMPLEX_FLOAT_TYPE_P (type))
1288 return do_mpc_arg2 (arg1, arg2, type,
1289 /* do_nonfinite= */ folding_initializer,
1290 mpc_mul);
1291
1292 real = const_binop (MINUS_EXPR,
1293 const_binop (MULT_EXPR, r1, r2),
1294 const_binop (MULT_EXPR, i1, i2));
1295 imag = const_binop (PLUS_EXPR,
1296 const_binop (MULT_EXPR, r1, i2),
1297 const_binop (MULT_EXPR, i1, r2));
1298 break;
1299
1300 case RDIV_EXPR:
1301 if (COMPLEX_FLOAT_TYPE_P (type))
1302 return do_mpc_arg2 (arg1, arg2, type,
1303 /* do_nonfinite= */ folding_initializer,
1304 mpc_div);
1305 /* Fallthru ... */
1306 case TRUNC_DIV_EXPR:
1307 case CEIL_DIV_EXPR:
1308 case FLOOR_DIV_EXPR:
1309 case ROUND_DIV_EXPR:
1310 if (flag_complex_method == 0)
1311 {
1312 /* Keep this algorithm in sync with
1313 tree-complex.c:expand_complex_div_straight().
1314
1315 Expand complex division to scalars, straightforward algorithm.
1316 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1317 t = br*br + bi*bi
1318 */
1319 tree magsquared
1320 = const_binop (PLUS_EXPR,
1321 const_binop (MULT_EXPR, r2, r2),
1322 const_binop (MULT_EXPR, i2, i2));
1323 tree t1
1324 = const_binop (PLUS_EXPR,
1325 const_binop (MULT_EXPR, r1, r2),
1326 const_binop (MULT_EXPR, i1, i2));
1327 tree t2
1328 = const_binop (MINUS_EXPR,
1329 const_binop (MULT_EXPR, i1, r2),
1330 const_binop (MULT_EXPR, r1, i2));
1331
1332 real = const_binop (code, t1, magsquared);
1333 imag = const_binop (code, t2, magsquared);
1334 }
1335 else
1336 {
1337 /* Keep this algorithm in sync with
1338 tree-complex.c:expand_complex_div_wide().
1339
1340 Expand complex division to scalars, modified algorithm to minimize
1341 overflow with wide input ranges. */
1342 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1343 fold_abs_const (r2, TREE_TYPE (type)),
1344 fold_abs_const (i2, TREE_TYPE (type)));
1345
1346 if (integer_nonzerop (compare))
1347 {
1348 /* In the TRUE branch, we compute
1349 ratio = br/bi;
1350 div = (br * ratio) + bi;
1351 tr = (ar * ratio) + ai;
1352 ti = (ai * ratio) - ar;
1353 tr = tr / div;
1354 ti = ti / div; */
1355 tree ratio = const_binop (code, r2, i2);
1356 tree div = const_binop (PLUS_EXPR, i2,
1357 const_binop (MULT_EXPR, r2, ratio));
1358 real = const_binop (MULT_EXPR, r1, ratio);
1359 real = const_binop (PLUS_EXPR, real, i1);
1360 real = const_binop (code, real, div);
1361
1362 imag = const_binop (MULT_EXPR, i1, ratio);
1363 imag = const_binop (MINUS_EXPR, imag, r1);
1364 imag = const_binop (code, imag, div);
1365 }
1366 else
1367 {
1368 /* In the FALSE branch, we compute
1369 ratio = d/c;
1370 divisor = (d * ratio) + c;
1371 tr = (b * ratio) + a;
1372 ti = b - (a * ratio);
1373 tr = tr / div;
1374 ti = ti / div; */
1375 tree ratio = const_binop (code, i2, r2);
1376 tree div = const_binop (PLUS_EXPR, r2,
1377 const_binop (MULT_EXPR, i2, ratio));
1378
1379 real = const_binop (MULT_EXPR, i1, ratio);
1380 real = const_binop (PLUS_EXPR, real, r1);
1381 real = const_binop (code, real, div);
1382
1383 imag = const_binop (MULT_EXPR, r1, ratio);
1384 imag = const_binop (MINUS_EXPR, i1, imag);
1385 imag = const_binop (code, imag, div);
1386 }
1387 }
1388 break;
1389
1390 default:
1391 return NULL_TREE;
1392 }
1393
1394 if (real && imag)
1395 return build_complex (type, real, imag);
1396 }
1397
1398 if (TREE_CODE (arg1) == VECTOR_CST
1399 && TREE_CODE (arg2) == VECTOR_CST)
1400 {
1401 tree type = TREE_TYPE (arg1);
1402 int count = TYPE_VECTOR_SUBPARTS (type), i;
1403 tree *elts = XALLOCAVEC (tree, count);
1404
1405 for (i = 0; i < count; i++)
1406 {
1407 tree elem1 = VECTOR_CST_ELT (arg1, i);
1408 tree elem2 = VECTOR_CST_ELT (arg2, i);
1409
1410 elts[i] = const_binop (code, elem1, elem2);
1411
1412 /* It is possible that const_binop cannot handle the given
1413 code and return NULL_TREE */
1414 if (elts[i] == NULL_TREE)
1415 return NULL_TREE;
1416 }
1417
1418 return build_vector (type, elts);
1419 }
1420
1421 /* Shifts allow a scalar offset for a vector. */
1422 if (TREE_CODE (arg1) == VECTOR_CST
1423 && TREE_CODE (arg2) == INTEGER_CST)
1424 {
1425 tree type = TREE_TYPE (arg1);
1426 int count = TYPE_VECTOR_SUBPARTS (type), i;
1427 tree *elts = XALLOCAVEC (tree, count);
1428
1429 if (code == VEC_LSHIFT_EXPR
1430 || code == VEC_RSHIFT_EXPR)
1431 {
1432 if (!host_integerp (arg2, 1))
1433 return NULL_TREE;
1434
1435 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1436 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1437 unsigned HOST_WIDE_INT innerc
1438 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1439 if (shiftc >= outerc || (shiftc % innerc) != 0)
1440 return NULL_TREE;
1441 int offset = shiftc / innerc;
1442 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1443 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1444 for !BYTES_BIG_ENDIAN picks first vector element, but
1445 for BYTES_BIG_ENDIAN last element from the vector. */
1446 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1447 offset = -offset;
1448 tree zero = build_zero_cst (TREE_TYPE (type));
1449 for (i = 0; i < count; i++)
1450 {
1451 if (i + offset < 0 || i + offset >= count)
1452 elts[i] = zero;
1453 else
1454 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1455 }
1456 }
1457 else
1458 for (i = 0; i < count; i++)
1459 {
1460 tree elem1 = VECTOR_CST_ELT (arg1, i);
1461
1462 elts[i] = const_binop (code, elem1, arg2);
1463
1464 /* It is possible that const_binop cannot handle the given
1465 code and return NULL_TREE */
1466 if (elts[i] == NULL_TREE)
1467 return NULL_TREE;
1468 }
1469
1470 return build_vector (type, elts);
1471 }
1472 return NULL_TREE;
1473 }
1474
1475 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1476 indicates which particular sizetype to create. */
1477
1478 tree
1479 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1480 {
1481 return build_int_cst (sizetype_tab[(int) kind], number);
1482 }
1483 \f
1484 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1485 is a tree code. The type of the result is taken from the operands.
1486 Both must be equivalent integer types, ala int_binop_types_match_p.
1487 If the operands are constant, so is the result. */
1488
1489 tree
1490 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1491 {
1492 tree type = TREE_TYPE (arg0);
1493
1494 if (arg0 == error_mark_node || arg1 == error_mark_node)
1495 return error_mark_node;
1496
1497 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1498 TREE_TYPE (arg1)));
1499
1500 /* Handle the special case of two integer constants faster. */
1501 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1502 {
1503 /* And some specific cases even faster than that. */
1504 if (code == PLUS_EXPR)
1505 {
1506 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1507 return arg1;
1508 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1509 return arg0;
1510 }
1511 else if (code == MINUS_EXPR)
1512 {
1513 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1514 return arg0;
1515 }
1516 else if (code == MULT_EXPR)
1517 {
1518 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1519 return arg1;
1520 }
1521
1522 /* Handle general case of two integer constants. For sizetype
1523 constant calculations we always want to know about overflow,
1524 even in the unsigned case. */
1525 return int_const_binop_1 (code, arg0, arg1, -1);
1526 }
1527
1528 return fold_build2_loc (loc, code, type, arg0, arg1);
1529 }
1530
1531 /* Given two values, either both of sizetype or both of bitsizetype,
1532 compute the difference between the two values. Return the value
1533 in signed type corresponding to the type of the operands. */
1534
1535 tree
1536 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1537 {
1538 tree type = TREE_TYPE (arg0);
1539 tree ctype;
1540
1541 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1542 TREE_TYPE (arg1)));
1543
1544 /* If the type is already signed, just do the simple thing. */
1545 if (!TYPE_UNSIGNED (type))
1546 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1547
1548 if (type == sizetype)
1549 ctype = ssizetype;
1550 else if (type == bitsizetype)
1551 ctype = sbitsizetype;
1552 else
1553 ctype = signed_type_for (type);
1554
1555 /* If either operand is not a constant, do the conversions to the signed
1556 type and subtract. The hardware will do the right thing with any
1557 overflow in the subtraction. */
1558 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1559 return size_binop_loc (loc, MINUS_EXPR,
1560 fold_convert_loc (loc, ctype, arg0),
1561 fold_convert_loc (loc, ctype, arg1));
1562
1563 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1564 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1565 overflow) and negate (which can't either). Special-case a result
1566 of zero while we're here. */
1567 if (tree_int_cst_equal (arg0, arg1))
1568 return build_int_cst (ctype, 0);
1569 else if (tree_int_cst_lt (arg1, arg0))
1570 return fold_convert_loc (loc, ctype,
1571 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1572 else
1573 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1574 fold_convert_loc (loc, ctype,
1575 size_binop_loc (loc,
1576 MINUS_EXPR,
1577 arg1, arg0)));
1578 }
1579 \f
1580 /* A subroutine of fold_convert_const handling conversions of an
1581 INTEGER_CST to another integer type. */
1582
1583 static tree
1584 fold_convert_const_int_from_int (tree type, const_tree arg1)
1585 {
1586 tree t;
1587
1588 /* Given an integer constant, make new constant with new type,
1589 appropriately sign-extended or truncated. */
1590 t = force_fit_type_double (type, tree_to_double_int (arg1),
1591 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1592 (TREE_INT_CST_HIGH (arg1) < 0
1593 && (TYPE_UNSIGNED (type)
1594 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1595 | TREE_OVERFLOW (arg1));
1596
1597 return t;
1598 }
1599
1600 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1601 to an integer type. */
1602
1603 static tree
1604 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1605 {
1606 int overflow = 0;
1607 tree t;
1608
1609 /* The following code implements the floating point to integer
1610 conversion rules required by the Java Language Specification,
1611 that IEEE NaNs are mapped to zero and values that overflow
1612 the target precision saturate, i.e. values greater than
1613 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1614 are mapped to INT_MIN. These semantics are allowed by the
1615 C and C++ standards that simply state that the behavior of
1616 FP-to-integer conversion is unspecified upon overflow. */
1617
1618 double_int val;
1619 REAL_VALUE_TYPE r;
1620 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1621
1622 switch (code)
1623 {
1624 case FIX_TRUNC_EXPR:
1625 real_trunc (&r, VOIDmode, &x);
1626 break;
1627
1628 default:
1629 gcc_unreachable ();
1630 }
1631
1632 /* If R is NaN, return zero and show we have an overflow. */
1633 if (REAL_VALUE_ISNAN (r))
1634 {
1635 overflow = 1;
1636 val = double_int_zero;
1637 }
1638
1639 /* See if R is less than the lower bound or greater than the
1640 upper bound. */
1641
1642 if (! overflow)
1643 {
1644 tree lt = TYPE_MIN_VALUE (type);
1645 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1646 if (REAL_VALUES_LESS (r, l))
1647 {
1648 overflow = 1;
1649 val = tree_to_double_int (lt);
1650 }
1651 }
1652
1653 if (! overflow)
1654 {
1655 tree ut = TYPE_MAX_VALUE (type);
1656 if (ut)
1657 {
1658 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1659 if (REAL_VALUES_LESS (u, r))
1660 {
1661 overflow = 1;
1662 val = tree_to_double_int (ut);
1663 }
1664 }
1665 }
1666
1667 if (! overflow)
1668 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1669
1670 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1671 return t;
1672 }
1673
1674 /* A subroutine of fold_convert_const handling conversions of a
1675 FIXED_CST to an integer type. */
1676
1677 static tree
1678 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1679 {
1680 tree t;
1681 double_int temp, temp_trunc;
1682 unsigned int mode;
1683
1684 /* Right shift FIXED_CST to temp by fbit. */
1685 temp = TREE_FIXED_CST (arg1).data;
1686 mode = TREE_FIXED_CST (arg1).mode;
1687 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1688 {
1689 temp = temp.rshift (GET_MODE_FBIT (mode),
1690 HOST_BITS_PER_DOUBLE_INT,
1691 SIGNED_FIXED_POINT_MODE_P (mode));
1692
1693 /* Left shift temp to temp_trunc by fbit. */
1694 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1695 HOST_BITS_PER_DOUBLE_INT,
1696 SIGNED_FIXED_POINT_MODE_P (mode));
1697 }
1698 else
1699 {
1700 temp = double_int_zero;
1701 temp_trunc = double_int_zero;
1702 }
1703
1704 /* If FIXED_CST is negative, we need to round the value toward 0.
1705 By checking if the fractional bits are not zero to add 1 to temp. */
1706 if (SIGNED_FIXED_POINT_MODE_P (mode)
1707 && temp_trunc.is_negative ()
1708 && TREE_FIXED_CST (arg1).data != temp_trunc)
1709 temp += double_int_one;
1710
1711 /* Given a fixed-point constant, make new constant with new type,
1712 appropriately sign-extended or truncated. */
1713 t = force_fit_type_double (type, temp, -1,
1714 (temp.is_negative ()
1715 && (TYPE_UNSIGNED (type)
1716 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1717 | TREE_OVERFLOW (arg1));
1718
1719 return t;
1720 }
1721
1722 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1723 to another floating point type. */
1724
1725 static tree
1726 fold_convert_const_real_from_real (tree type, const_tree arg1)
1727 {
1728 REAL_VALUE_TYPE value;
1729 tree t;
1730
1731 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1732 t = build_real (type, value);
1733
1734 /* If converting an infinity or NAN to a representation that doesn't
1735 have one, set the overflow bit so that we can produce some kind of
1736 error message at the appropriate point if necessary. It's not the
1737 most user-friendly message, but it's better than nothing. */
1738 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1739 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1740 TREE_OVERFLOW (t) = 1;
1741 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1742 && !MODE_HAS_NANS (TYPE_MODE (type)))
1743 TREE_OVERFLOW (t) = 1;
1744 /* Regular overflow, conversion produced an infinity in a mode that
1745 can't represent them. */
1746 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1747 && REAL_VALUE_ISINF (value)
1748 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1749 TREE_OVERFLOW (t) = 1;
1750 else
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1753 }
1754
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to a floating point type. */
1757
1758 static tree
1759 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1760 {
1761 REAL_VALUE_TYPE value;
1762 tree t;
1763
1764 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1765 t = build_real (type, value);
1766
1767 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1768 return t;
1769 }
1770
1771 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1772 to another fixed-point type. */
1773
1774 static tree
1775 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1776 {
1777 FIXED_VALUE_TYPE value;
1778 tree t;
1779 bool overflow_p;
1780
1781 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1782 TYPE_SATURATING (type));
1783 t = build_fixed (type, value);
1784
1785 /* Propagate overflow flags. */
1786 if (overflow_p | TREE_OVERFLOW (arg1))
1787 TREE_OVERFLOW (t) = 1;
1788 return t;
1789 }
1790
1791 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1792 to a fixed-point type. */
1793
1794 static tree
1795 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1796 {
1797 FIXED_VALUE_TYPE value;
1798 tree t;
1799 bool overflow_p;
1800
1801 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1802 TREE_INT_CST (arg1),
1803 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1804 TYPE_SATURATING (type));
1805 t = build_fixed (type, value);
1806
1807 /* Propagate overflow flags. */
1808 if (overflow_p | TREE_OVERFLOW (arg1))
1809 TREE_OVERFLOW (t) = 1;
1810 return t;
1811 }
1812
1813 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1814 to a fixed-point type. */
1815
1816 static tree
1817 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1818 {
1819 FIXED_VALUE_TYPE value;
1820 tree t;
1821 bool overflow_p;
1822
1823 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1824 &TREE_REAL_CST (arg1),
1825 TYPE_SATURATING (type));
1826 t = build_fixed (type, value);
1827
1828 /* Propagate overflow flags. */
1829 if (overflow_p | TREE_OVERFLOW (arg1))
1830 TREE_OVERFLOW (t) = 1;
1831 return t;
1832 }
1833
1834 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1835 type TYPE. If no simplification can be done return NULL_TREE. */
1836
1837 static tree
1838 fold_convert_const (enum tree_code code, tree type, tree arg1)
1839 {
1840 if (TREE_TYPE (arg1) == type)
1841 return arg1;
1842
1843 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1844 || TREE_CODE (type) == OFFSET_TYPE)
1845 {
1846 if (TREE_CODE (arg1) == INTEGER_CST)
1847 return fold_convert_const_int_from_int (type, arg1);
1848 else if (TREE_CODE (arg1) == REAL_CST)
1849 return fold_convert_const_int_from_real (code, type, arg1);
1850 else if (TREE_CODE (arg1) == FIXED_CST)
1851 return fold_convert_const_int_from_fixed (type, arg1);
1852 }
1853 else if (TREE_CODE (type) == REAL_TYPE)
1854 {
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return build_real_from_int_cst (type, arg1);
1857 else if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_real_from_real (type, arg1);
1859 else if (TREE_CODE (arg1) == FIXED_CST)
1860 return fold_convert_const_real_from_fixed (type, arg1);
1861 }
1862 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1863 {
1864 if (TREE_CODE (arg1) == FIXED_CST)
1865 return fold_convert_const_fixed_from_fixed (type, arg1);
1866 else if (TREE_CODE (arg1) == INTEGER_CST)
1867 return fold_convert_const_fixed_from_int (type, arg1);
1868 else if (TREE_CODE (arg1) == REAL_CST)
1869 return fold_convert_const_fixed_from_real (type, arg1);
1870 }
1871 return NULL_TREE;
1872 }
1873
1874 /* Construct a vector of zero elements of vector type TYPE. */
1875
1876 static tree
1877 build_zero_vector (tree type)
1878 {
1879 tree t;
1880
1881 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1882 return build_vector_from_val (type, t);
1883 }
1884
1885 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1886
1887 bool
1888 fold_convertible_p (const_tree type, const_tree arg)
1889 {
1890 tree orig = TREE_TYPE (arg);
1891
1892 if (type == orig)
1893 return true;
1894
1895 if (TREE_CODE (arg) == ERROR_MARK
1896 || TREE_CODE (type) == ERROR_MARK
1897 || TREE_CODE (orig) == ERROR_MARK)
1898 return false;
1899
1900 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1901 return true;
1902
1903 switch (TREE_CODE (type))
1904 {
1905 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1906 case POINTER_TYPE: case REFERENCE_TYPE:
1907 case OFFSET_TYPE:
1908 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1909 || TREE_CODE (orig) == OFFSET_TYPE)
1910 return true;
1911 return (TREE_CODE (orig) == VECTOR_TYPE
1912 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1913
1914 case REAL_TYPE:
1915 case FIXED_POINT_TYPE:
1916 case COMPLEX_TYPE:
1917 case VECTOR_TYPE:
1918 case VOID_TYPE:
1919 return TREE_CODE (type) == TREE_CODE (orig);
1920
1921 default:
1922 return false;
1923 }
1924 }
1925
1926 /* Convert expression ARG to type TYPE. Used by the middle-end for
1927 simple conversions in preference to calling the front-end's convert. */
1928
1929 tree
1930 fold_convert_loc (location_t loc, tree type, tree arg)
1931 {
1932 tree orig = TREE_TYPE (arg);
1933 tree tem;
1934
1935 if (type == orig)
1936 return arg;
1937
1938 if (TREE_CODE (arg) == ERROR_MARK
1939 || TREE_CODE (type) == ERROR_MARK
1940 || TREE_CODE (orig) == ERROR_MARK)
1941 return error_mark_node;
1942
1943 switch (TREE_CODE (type))
1944 {
1945 case POINTER_TYPE:
1946 case REFERENCE_TYPE:
1947 /* Handle conversions between pointers to different address spaces. */
1948 if (POINTER_TYPE_P (orig)
1949 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1950 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1951 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1952 /* fall through */
1953
1954 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1955 case OFFSET_TYPE:
1956 if (TREE_CODE (arg) == INTEGER_CST)
1957 {
1958 tem = fold_convert_const (NOP_EXPR, type, arg);
1959 if (tem != NULL_TREE)
1960 return tem;
1961 }
1962 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1963 || TREE_CODE (orig) == OFFSET_TYPE)
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 if (TREE_CODE (orig) == COMPLEX_TYPE)
1966 return fold_convert_loc (loc, type,
1967 fold_build1_loc (loc, REALPART_EXPR,
1968 TREE_TYPE (orig), arg));
1969 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1970 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1971 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1972
1973 case REAL_TYPE:
1974 if (TREE_CODE (arg) == INTEGER_CST)
1975 {
1976 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1977 if (tem != NULL_TREE)
1978 return tem;
1979 }
1980 else if (TREE_CODE (arg) == REAL_CST)
1981 {
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1985 }
1986 else if (TREE_CODE (arg) == FIXED_CST)
1987 {
1988 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1989 if (tem != NULL_TREE)
1990 return tem;
1991 }
1992
1993 switch (TREE_CODE (orig))
1994 {
1995 case INTEGER_TYPE:
1996 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1997 case POINTER_TYPE: case REFERENCE_TYPE:
1998 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1999
2000 case REAL_TYPE:
2001 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2002
2003 case FIXED_POINT_TYPE:
2004 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2005
2006 case COMPLEX_TYPE:
2007 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2008 return fold_convert_loc (loc, type, tem);
2009
2010 default:
2011 gcc_unreachable ();
2012 }
2013
2014 case FIXED_POINT_TYPE:
2015 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2016 || TREE_CODE (arg) == REAL_CST)
2017 {
2018 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2019 if (tem != NULL_TREE)
2020 goto fold_convert_exit;
2021 }
2022
2023 switch (TREE_CODE (orig))
2024 {
2025 case FIXED_POINT_TYPE:
2026 case INTEGER_TYPE:
2027 case ENUMERAL_TYPE:
2028 case BOOLEAN_TYPE:
2029 case REAL_TYPE:
2030 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2031
2032 case COMPLEX_TYPE:
2033 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2034 return fold_convert_loc (loc, type, tem);
2035
2036 default:
2037 gcc_unreachable ();
2038 }
2039
2040 case COMPLEX_TYPE:
2041 switch (TREE_CODE (orig))
2042 {
2043 case INTEGER_TYPE:
2044 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2045 case POINTER_TYPE: case REFERENCE_TYPE:
2046 case REAL_TYPE:
2047 case FIXED_POINT_TYPE:
2048 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2049 fold_convert_loc (loc, TREE_TYPE (type), arg),
2050 fold_convert_loc (loc, TREE_TYPE (type),
2051 integer_zero_node));
2052 case COMPLEX_TYPE:
2053 {
2054 tree rpart, ipart;
2055
2056 if (TREE_CODE (arg) == COMPLEX_EXPR)
2057 {
2058 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2059 TREE_OPERAND (arg, 0));
2060 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2061 TREE_OPERAND (arg, 1));
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2063 }
2064
2065 arg = save_expr (arg);
2066 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2067 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2068 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2069 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2070 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2071 }
2072
2073 default:
2074 gcc_unreachable ();
2075 }
2076
2077 case VECTOR_TYPE:
2078 if (integer_zerop (arg))
2079 return build_zero_vector (type);
2080 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2081 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2082 || TREE_CODE (orig) == VECTOR_TYPE);
2083 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2084
2085 case VOID_TYPE:
2086 tem = fold_ignored_result (arg);
2087 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2088
2089 default:
2090 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2091 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2092 gcc_unreachable ();
2093 }
2094 fold_convert_exit:
2095 protected_set_expr_location_unshare (tem, loc);
2096 return tem;
2097 }
2098 \f
2099 /* Return false if expr can be assumed not to be an lvalue, true
2100 otherwise. */
2101
2102 static bool
2103 maybe_lvalue_p (const_tree x)
2104 {
2105 /* We only need to wrap lvalue tree codes. */
2106 switch (TREE_CODE (x))
2107 {
2108 case VAR_DECL:
2109 case PARM_DECL:
2110 case RESULT_DECL:
2111 case LABEL_DECL:
2112 case FUNCTION_DECL:
2113 case SSA_NAME:
2114
2115 case COMPONENT_REF:
2116 case MEM_REF:
2117 case INDIRECT_REF:
2118 case ARRAY_REF:
2119 case ARRAY_RANGE_REF:
2120 case BIT_FIELD_REF:
2121 case OBJ_TYPE_REF:
2122
2123 case REALPART_EXPR:
2124 case IMAGPART_EXPR:
2125 case PREINCREMENT_EXPR:
2126 case PREDECREMENT_EXPR:
2127 case SAVE_EXPR:
2128 case TRY_CATCH_EXPR:
2129 case WITH_CLEANUP_EXPR:
2130 case COMPOUND_EXPR:
2131 case MODIFY_EXPR:
2132 case TARGET_EXPR:
2133 case COND_EXPR:
2134 case BIND_EXPR:
2135 break;
2136
2137 default:
2138 /* Assume the worst for front-end tree codes. */
2139 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2140 break;
2141 return false;
2142 }
2143
2144 return true;
2145 }
2146
2147 /* Return an expr equal to X but certainly not valid as an lvalue. */
2148
2149 tree
2150 non_lvalue_loc (location_t loc, tree x)
2151 {
2152 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2153 us. */
2154 if (in_gimple_form)
2155 return x;
2156
2157 if (! maybe_lvalue_p (x))
2158 return x;
2159 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2160 }
2161
2162 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2163 Zero means allow extended lvalues. */
2164
2165 int pedantic_lvalues;
2166
2167 /* When pedantic, return an expr equal to X but certainly not valid as a
2168 pedantic lvalue. Otherwise, return X. */
2169
2170 static tree
2171 pedantic_non_lvalue_loc (location_t loc, tree x)
2172 {
2173 if (pedantic_lvalues)
2174 return non_lvalue_loc (loc, x);
2175
2176 return protected_set_expr_location_unshare (x, loc);
2177 }
2178 \f
2179 /* Given a tree comparison code, return the code that is the logical inverse.
2180 It is generally not safe to do this for floating-point comparisons, except
2181 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2182 ERROR_MARK in this case. */
2183
2184 enum tree_code
2185 invert_tree_comparison (enum tree_code code, bool honor_nans)
2186 {
2187 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2188 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2189 return ERROR_MARK;
2190
2191 switch (code)
2192 {
2193 case EQ_EXPR:
2194 return NE_EXPR;
2195 case NE_EXPR:
2196 return EQ_EXPR;
2197 case GT_EXPR:
2198 return honor_nans ? UNLE_EXPR : LE_EXPR;
2199 case GE_EXPR:
2200 return honor_nans ? UNLT_EXPR : LT_EXPR;
2201 case LT_EXPR:
2202 return honor_nans ? UNGE_EXPR : GE_EXPR;
2203 case LE_EXPR:
2204 return honor_nans ? UNGT_EXPR : GT_EXPR;
2205 case LTGT_EXPR:
2206 return UNEQ_EXPR;
2207 case UNEQ_EXPR:
2208 return LTGT_EXPR;
2209 case UNGT_EXPR:
2210 return LE_EXPR;
2211 case UNGE_EXPR:
2212 return LT_EXPR;
2213 case UNLT_EXPR:
2214 return GE_EXPR;
2215 case UNLE_EXPR:
2216 return GT_EXPR;
2217 case ORDERED_EXPR:
2218 return UNORDERED_EXPR;
2219 case UNORDERED_EXPR:
2220 return ORDERED_EXPR;
2221 default:
2222 gcc_unreachable ();
2223 }
2224 }
2225
2226 /* Similar, but return the comparison that results if the operands are
2227 swapped. This is safe for floating-point. */
2228
2229 enum tree_code
2230 swap_tree_comparison (enum tree_code code)
2231 {
2232 switch (code)
2233 {
2234 case EQ_EXPR:
2235 case NE_EXPR:
2236 case ORDERED_EXPR:
2237 case UNORDERED_EXPR:
2238 case LTGT_EXPR:
2239 case UNEQ_EXPR:
2240 return code;
2241 case GT_EXPR:
2242 return LT_EXPR;
2243 case GE_EXPR:
2244 return LE_EXPR;
2245 case LT_EXPR:
2246 return GT_EXPR;
2247 case LE_EXPR:
2248 return GE_EXPR;
2249 case UNGT_EXPR:
2250 return UNLT_EXPR;
2251 case UNGE_EXPR:
2252 return UNLE_EXPR;
2253 case UNLT_EXPR:
2254 return UNGT_EXPR;
2255 case UNLE_EXPR:
2256 return UNGE_EXPR;
2257 default:
2258 gcc_unreachable ();
2259 }
2260 }
2261
2262
2263 /* Convert a comparison tree code from an enum tree_code representation
2264 into a compcode bit-based encoding. This function is the inverse of
2265 compcode_to_comparison. */
2266
2267 static enum comparison_code
2268 comparison_to_compcode (enum tree_code code)
2269 {
2270 switch (code)
2271 {
2272 case LT_EXPR:
2273 return COMPCODE_LT;
2274 case EQ_EXPR:
2275 return COMPCODE_EQ;
2276 case LE_EXPR:
2277 return COMPCODE_LE;
2278 case GT_EXPR:
2279 return COMPCODE_GT;
2280 case NE_EXPR:
2281 return COMPCODE_NE;
2282 case GE_EXPR:
2283 return COMPCODE_GE;
2284 case ORDERED_EXPR:
2285 return COMPCODE_ORD;
2286 case UNORDERED_EXPR:
2287 return COMPCODE_UNORD;
2288 case UNLT_EXPR:
2289 return COMPCODE_UNLT;
2290 case UNEQ_EXPR:
2291 return COMPCODE_UNEQ;
2292 case UNLE_EXPR:
2293 return COMPCODE_UNLE;
2294 case UNGT_EXPR:
2295 return COMPCODE_UNGT;
2296 case LTGT_EXPR:
2297 return COMPCODE_LTGT;
2298 case UNGE_EXPR:
2299 return COMPCODE_UNGE;
2300 default:
2301 gcc_unreachable ();
2302 }
2303 }
2304
2305 /* Convert a compcode bit-based encoding of a comparison operator back
2306 to GCC's enum tree_code representation. This function is the
2307 inverse of comparison_to_compcode. */
2308
2309 static enum tree_code
2310 compcode_to_comparison (enum comparison_code code)
2311 {
2312 switch (code)
2313 {
2314 case COMPCODE_LT:
2315 return LT_EXPR;
2316 case COMPCODE_EQ:
2317 return EQ_EXPR;
2318 case COMPCODE_LE:
2319 return LE_EXPR;
2320 case COMPCODE_GT:
2321 return GT_EXPR;
2322 case COMPCODE_NE:
2323 return NE_EXPR;
2324 case COMPCODE_GE:
2325 return GE_EXPR;
2326 case COMPCODE_ORD:
2327 return ORDERED_EXPR;
2328 case COMPCODE_UNORD:
2329 return UNORDERED_EXPR;
2330 case COMPCODE_UNLT:
2331 return UNLT_EXPR;
2332 case COMPCODE_UNEQ:
2333 return UNEQ_EXPR;
2334 case COMPCODE_UNLE:
2335 return UNLE_EXPR;
2336 case COMPCODE_UNGT:
2337 return UNGT_EXPR;
2338 case COMPCODE_LTGT:
2339 return LTGT_EXPR;
2340 case COMPCODE_UNGE:
2341 return UNGE_EXPR;
2342 default:
2343 gcc_unreachable ();
2344 }
2345 }
2346
2347 /* Return a tree for the comparison which is the combination of
2348 doing the AND or OR (depending on CODE) of the two operations LCODE
2349 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2350 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2351 if this makes the transformation invalid. */
2352
2353 tree
2354 combine_comparisons (location_t loc,
2355 enum tree_code code, enum tree_code lcode,
2356 enum tree_code rcode, tree truth_type,
2357 tree ll_arg, tree lr_arg)
2358 {
2359 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2360 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2361 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2362 int compcode;
2363
2364 switch (code)
2365 {
2366 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2367 compcode = lcompcode & rcompcode;
2368 break;
2369
2370 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2371 compcode = lcompcode | rcompcode;
2372 break;
2373
2374 default:
2375 return NULL_TREE;
2376 }
2377
2378 if (!honor_nans)
2379 {
2380 /* Eliminate unordered comparisons, as well as LTGT and ORD
2381 which are not used unless the mode has NaNs. */
2382 compcode &= ~COMPCODE_UNORD;
2383 if (compcode == COMPCODE_LTGT)
2384 compcode = COMPCODE_NE;
2385 else if (compcode == COMPCODE_ORD)
2386 compcode = COMPCODE_TRUE;
2387 }
2388 else if (flag_trapping_math)
2389 {
2390 /* Check that the original operation and the optimized ones will trap
2391 under the same condition. */
2392 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2393 && (lcompcode != COMPCODE_EQ)
2394 && (lcompcode != COMPCODE_ORD);
2395 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2396 && (rcompcode != COMPCODE_EQ)
2397 && (rcompcode != COMPCODE_ORD);
2398 bool trap = (compcode & COMPCODE_UNORD) == 0
2399 && (compcode != COMPCODE_EQ)
2400 && (compcode != COMPCODE_ORD);
2401
2402 /* In a short-circuited boolean expression the LHS might be
2403 such that the RHS, if evaluated, will never trap. For
2404 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2405 if neither x nor y is NaN. (This is a mixed blessing: for
2406 example, the expression above will never trap, hence
2407 optimizing it to x < y would be invalid). */
2408 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2409 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2410 rtrap = false;
2411
2412 /* If the comparison was short-circuited, and only the RHS
2413 trapped, we may now generate a spurious trap. */
2414 if (rtrap && !ltrap
2415 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2416 return NULL_TREE;
2417
2418 /* If we changed the conditions that cause a trap, we lose. */
2419 if ((ltrap || rtrap) != trap)
2420 return NULL_TREE;
2421 }
2422
2423 if (compcode == COMPCODE_TRUE)
2424 return constant_boolean_node (true, truth_type);
2425 else if (compcode == COMPCODE_FALSE)
2426 return constant_boolean_node (false, truth_type);
2427 else
2428 {
2429 enum tree_code tcode;
2430
2431 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2432 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2433 }
2434 }
2435 \f
2436 /* Return nonzero if two operands (typically of the same tree node)
2437 are necessarily equal. If either argument has side-effects this
2438 function returns zero. FLAGS modifies behavior as follows:
2439
2440 If OEP_ONLY_CONST is set, only return nonzero for constants.
2441 This function tests whether the operands are indistinguishable;
2442 it does not test whether they are equal using C's == operation.
2443 The distinction is important for IEEE floating point, because
2444 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2445 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2446
2447 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2448 even though it may hold multiple values during a function.
2449 This is because a GCC tree node guarantees that nothing else is
2450 executed between the evaluation of its "operands" (which may often
2451 be evaluated in arbitrary order). Hence if the operands themselves
2452 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2453 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2454 unset means assuming isochronic (or instantaneous) tree equivalence.
2455 Unless comparing arbitrary expression trees, such as from different
2456 statements, this flag can usually be left unset.
2457
2458 If OEP_PURE_SAME is set, then pure functions with identical arguments
2459 are considered the same. It is used when the caller has other ways
2460 to ensure that global memory is unchanged in between. */
2461
2462 int
2463 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2464 {
2465 /* If either is ERROR_MARK, they aren't equal. */
2466 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2467 || TREE_TYPE (arg0) == error_mark_node
2468 || TREE_TYPE (arg1) == error_mark_node)
2469 return 0;
2470
2471 /* Similar, if either does not have a type (like a released SSA name),
2472 they aren't equal. */
2473 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2474 return 0;
2475
2476 /* Check equality of integer constants before bailing out due to
2477 precision differences. */
2478 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2479 return tree_int_cst_equal (arg0, arg1);
2480
2481 /* If both types don't have the same signedness, then we can't consider
2482 them equal. We must check this before the STRIP_NOPS calls
2483 because they may change the signedness of the arguments. As pointers
2484 strictly don't have a signedness, require either two pointers or
2485 two non-pointers as well. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2487 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2488 return 0;
2489
2490 /* We cannot consider pointers to different address space equal. */
2491 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2492 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2493 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2494 return 0;
2495
2496 /* If both types don't have the same precision, then it is not safe
2497 to strip NOPs. */
2498 if (element_precision (TREE_TYPE (arg0))
2499 != element_precision (TREE_TYPE (arg1)))
2500 return 0;
2501
2502 STRIP_NOPS (arg0);
2503 STRIP_NOPS (arg1);
2504
2505 /* In case both args are comparisons but with different comparison
2506 code, try to swap the comparison operands of one arg to produce
2507 a match and compare that variant. */
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 && COMPARISON_CLASS_P (arg0)
2510 && COMPARISON_CLASS_P (arg1))
2511 {
2512 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2513
2514 if (TREE_CODE (arg0) == swap_code)
2515 return operand_equal_p (TREE_OPERAND (arg0, 0),
2516 TREE_OPERAND (arg1, 1), flags)
2517 && operand_equal_p (TREE_OPERAND (arg0, 1),
2518 TREE_OPERAND (arg1, 0), flags);
2519 }
2520
2521 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2522 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2523 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2524 return 0;
2525
2526 /* This is needed for conversions and for COMPONENT_REF.
2527 Might as well play it safe and always test this. */
2528 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2529 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2530 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2531 return 0;
2532
2533 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2534 We don't care about side effects in that case because the SAVE_EXPR
2535 takes care of that for us. In all other cases, two expressions are
2536 equal if they have no side effects. If we have two identical
2537 expressions with side effects that should be treated the same due
2538 to the only side effects being identical SAVE_EXPR's, that will
2539 be detected in the recursive calls below.
2540 If we are taking an invariant address of two identical objects
2541 they are necessarily equal as well. */
2542 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2543 && (TREE_CODE (arg0) == SAVE_EXPR
2544 || (flags & OEP_CONSTANT_ADDRESS_OF)
2545 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2546 return 1;
2547
2548 /* Next handle constant cases, those for which we can return 1 even
2549 if ONLY_CONST is set. */
2550 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2551 switch (TREE_CODE (arg0))
2552 {
2553 case INTEGER_CST:
2554 return tree_int_cst_equal (arg0, arg1);
2555
2556 case FIXED_CST:
2557 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2558 TREE_FIXED_CST (arg1));
2559
2560 case REAL_CST:
2561 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2562 TREE_REAL_CST (arg1)))
2563 return 1;
2564
2565
2566 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2567 {
2568 /* If we do not distinguish between signed and unsigned zero,
2569 consider them equal. */
2570 if (real_zerop (arg0) && real_zerop (arg1))
2571 return 1;
2572 }
2573 return 0;
2574
2575 case VECTOR_CST:
2576 {
2577 unsigned i;
2578
2579 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2580 return 0;
2581
2582 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2583 {
2584 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2585 VECTOR_CST_ELT (arg1, i), flags))
2586 return 0;
2587 }
2588 return 1;
2589 }
2590
2591 case COMPLEX_CST:
2592 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2593 flags)
2594 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2595 flags));
2596
2597 case STRING_CST:
2598 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2599 && ! memcmp (TREE_STRING_POINTER (arg0),
2600 TREE_STRING_POINTER (arg1),
2601 TREE_STRING_LENGTH (arg0)));
2602
2603 case ADDR_EXPR:
2604 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2605 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2606 ? OEP_CONSTANT_ADDRESS_OF : 0);
2607 default:
2608 break;
2609 }
2610
2611 if (flags & OEP_ONLY_CONST)
2612 return 0;
2613
2614 /* Define macros to test an operand from arg0 and arg1 for equality and a
2615 variant that allows null and views null as being different from any
2616 non-null value. In the latter case, if either is null, the both
2617 must be; otherwise, do the normal comparison. */
2618 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2619 TREE_OPERAND (arg1, N), flags)
2620
2621 #define OP_SAME_WITH_NULL(N) \
2622 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2623 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2624
2625 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2626 {
2627 case tcc_unary:
2628 /* Two conversions are equal only if signedness and modes match. */
2629 switch (TREE_CODE (arg0))
2630 {
2631 CASE_CONVERT:
2632 case FIX_TRUNC_EXPR:
2633 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2634 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635 return 0;
2636 break;
2637 default:
2638 break;
2639 }
2640
2641 return OP_SAME (0);
2642
2643
2644 case tcc_comparison:
2645 case tcc_binary:
2646 if (OP_SAME (0) && OP_SAME (1))
2647 return 1;
2648
2649 /* For commutative ops, allow the other order. */
2650 return (commutative_tree_code (TREE_CODE (arg0))
2651 && operand_equal_p (TREE_OPERAND (arg0, 0),
2652 TREE_OPERAND (arg1, 1), flags)
2653 && operand_equal_p (TREE_OPERAND (arg0, 1),
2654 TREE_OPERAND (arg1, 0), flags));
2655
2656 case tcc_reference:
2657 /* If either of the pointer (or reference) expressions we are
2658 dereferencing contain a side effect, these cannot be equal,
2659 but their addresses can be. */
2660 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2661 && (TREE_SIDE_EFFECTS (arg0)
2662 || TREE_SIDE_EFFECTS (arg1)))
2663 return 0;
2664
2665 switch (TREE_CODE (arg0))
2666 {
2667 case INDIRECT_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 return OP_SAME (0);
2670
2671 case REALPART_EXPR:
2672 case IMAGPART_EXPR:
2673 return OP_SAME (0);
2674
2675 case TARGET_MEM_REF:
2676 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2677 /* Require equal extra operands and then fall through to MEM_REF
2678 handling of the two common operands. */
2679 if (!OP_SAME_WITH_NULL (2)
2680 || !OP_SAME_WITH_NULL (3)
2681 || !OP_SAME_WITH_NULL (4))
2682 return 0;
2683 /* Fallthru. */
2684 case MEM_REF:
2685 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2686 /* Require equal access sizes, and similar pointer types.
2687 We can have incomplete types for array references of
2688 variable-sized arrays from the Fortran frontend
2689 though. Also verify the types are compatible. */
2690 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2691 || (TYPE_SIZE (TREE_TYPE (arg0))
2692 && TYPE_SIZE (TREE_TYPE (arg1))
2693 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2694 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2695 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2696 && alias_ptr_types_compatible_p
2697 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2698 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2699 && OP_SAME (0) && OP_SAME (1));
2700
2701 case ARRAY_REF:
2702 case ARRAY_RANGE_REF:
2703 /* Operands 2 and 3 may be null.
2704 Compare the array index by value if it is constant first as we
2705 may have different types but same value here. */
2706 if (!OP_SAME (0))
2707 return 0;
2708 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2709 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2710 TREE_OPERAND (arg1, 1))
2711 || OP_SAME (1))
2712 && OP_SAME_WITH_NULL (2)
2713 && OP_SAME_WITH_NULL (3));
2714
2715 case COMPONENT_REF:
2716 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2717 may be NULL when we're called to compare MEM_EXPRs. */
2718 if (!OP_SAME_WITH_NULL (0)
2719 || !OP_SAME (1))
2720 return 0;
2721 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2722 return OP_SAME_WITH_NULL (2);
2723
2724 case BIT_FIELD_REF:
2725 if (!OP_SAME (0))
2726 return 0;
2727 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2728 return OP_SAME (1) && OP_SAME (2);
2729
2730 default:
2731 return 0;
2732 }
2733
2734 case tcc_expression:
2735 switch (TREE_CODE (arg0))
2736 {
2737 case ADDR_EXPR:
2738 case TRUTH_NOT_EXPR:
2739 return OP_SAME (0);
2740
2741 case TRUTH_ANDIF_EXPR:
2742 case TRUTH_ORIF_EXPR:
2743 return OP_SAME (0) && OP_SAME (1);
2744
2745 case FMA_EXPR:
2746 case WIDEN_MULT_PLUS_EXPR:
2747 case WIDEN_MULT_MINUS_EXPR:
2748 if (!OP_SAME (2))
2749 return 0;
2750 /* The multiplcation operands are commutative. */
2751 /* FALLTHRU */
2752
2753 case TRUTH_AND_EXPR:
2754 case TRUTH_OR_EXPR:
2755 case TRUTH_XOR_EXPR:
2756 if (OP_SAME (0) && OP_SAME (1))
2757 return 1;
2758
2759 /* Otherwise take into account this is a commutative operation. */
2760 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2761 TREE_OPERAND (arg1, 1), flags)
2762 && operand_equal_p (TREE_OPERAND (arg0, 1),
2763 TREE_OPERAND (arg1, 0), flags));
2764
2765 case COND_EXPR:
2766 case VEC_COND_EXPR:
2767 case DOT_PROD_EXPR:
2768 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2769
2770 default:
2771 return 0;
2772 }
2773
2774 case tcc_vl_exp:
2775 switch (TREE_CODE (arg0))
2776 {
2777 case CALL_EXPR:
2778 /* If the CALL_EXPRs call different functions, then they
2779 clearly can not be equal. */
2780 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2781 flags))
2782 return 0;
2783
2784 {
2785 unsigned int cef = call_expr_flags (arg0);
2786 if (flags & OEP_PURE_SAME)
2787 cef &= ECF_CONST | ECF_PURE;
2788 else
2789 cef &= ECF_CONST;
2790 if (!cef)
2791 return 0;
2792 }
2793
2794 /* Now see if all the arguments are the same. */
2795 {
2796 const_call_expr_arg_iterator iter0, iter1;
2797 const_tree a0, a1;
2798 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2799 a1 = first_const_call_expr_arg (arg1, &iter1);
2800 a0 && a1;
2801 a0 = next_const_call_expr_arg (&iter0),
2802 a1 = next_const_call_expr_arg (&iter1))
2803 if (! operand_equal_p (a0, a1, flags))
2804 return 0;
2805
2806 /* If we get here and both argument lists are exhausted
2807 then the CALL_EXPRs are equal. */
2808 return ! (a0 || a1);
2809 }
2810 default:
2811 return 0;
2812 }
2813
2814 case tcc_declaration:
2815 /* Consider __builtin_sqrt equal to sqrt. */
2816 return (TREE_CODE (arg0) == FUNCTION_DECL
2817 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2818 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2819 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2820
2821 default:
2822 return 0;
2823 }
2824
2825 #undef OP_SAME
2826 #undef OP_SAME_WITH_NULL
2827 }
2828 \f
2829 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2830 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2831
2832 When in doubt, return 0. */
2833
2834 static int
2835 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2836 {
2837 int unsignedp1, unsignedpo;
2838 tree primarg0, primarg1, primother;
2839 unsigned int correct_width;
2840
2841 if (operand_equal_p (arg0, arg1, 0))
2842 return 1;
2843
2844 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2845 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2846 return 0;
2847
2848 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2849 and see if the inner values are the same. This removes any
2850 signedness comparison, which doesn't matter here. */
2851 primarg0 = arg0, primarg1 = arg1;
2852 STRIP_NOPS (primarg0);
2853 STRIP_NOPS (primarg1);
2854 if (operand_equal_p (primarg0, primarg1, 0))
2855 return 1;
2856
2857 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2858 actual comparison operand, ARG0.
2859
2860 First throw away any conversions to wider types
2861 already present in the operands. */
2862
2863 primarg1 = get_narrower (arg1, &unsignedp1);
2864 primother = get_narrower (other, &unsignedpo);
2865
2866 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2867 if (unsignedp1 == unsignedpo
2868 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2869 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2870 {
2871 tree type = TREE_TYPE (arg0);
2872
2873 /* Make sure shorter operand is extended the right way
2874 to match the longer operand. */
2875 primarg1 = fold_convert (signed_or_unsigned_type_for
2876 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2877
2878 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2879 return 1;
2880 }
2881
2882 return 0;
2883 }
2884 \f
2885 /* See if ARG is an expression that is either a comparison or is performing
2886 arithmetic on comparisons. The comparisons must only be comparing
2887 two different values, which will be stored in *CVAL1 and *CVAL2; if
2888 they are nonzero it means that some operands have already been found.
2889 No variables may be used anywhere else in the expression except in the
2890 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2891 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2892
2893 If this is true, return 1. Otherwise, return zero. */
2894
2895 static int
2896 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2897 {
2898 enum tree_code code = TREE_CODE (arg);
2899 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2900
2901 /* We can handle some of the tcc_expression cases here. */
2902 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2903 tclass = tcc_unary;
2904 else if (tclass == tcc_expression
2905 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2906 || code == COMPOUND_EXPR))
2907 tclass = tcc_binary;
2908
2909 else if (tclass == tcc_expression && code == SAVE_EXPR
2910 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2911 {
2912 /* If we've already found a CVAL1 or CVAL2, this expression is
2913 two complex to handle. */
2914 if (*cval1 || *cval2)
2915 return 0;
2916
2917 tclass = tcc_unary;
2918 *save_p = 1;
2919 }
2920
2921 switch (tclass)
2922 {
2923 case tcc_unary:
2924 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2925
2926 case tcc_binary:
2927 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2928 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2929 cval1, cval2, save_p));
2930
2931 case tcc_constant:
2932 return 1;
2933
2934 case tcc_expression:
2935 if (code == COND_EXPR)
2936 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2937 cval1, cval2, save_p)
2938 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2939 cval1, cval2, save_p)
2940 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2941 cval1, cval2, save_p));
2942 return 0;
2943
2944 case tcc_comparison:
2945 /* First see if we can handle the first operand, then the second. For
2946 the second operand, we know *CVAL1 can't be zero. It must be that
2947 one side of the comparison is each of the values; test for the
2948 case where this isn't true by failing if the two operands
2949 are the same. */
2950
2951 if (operand_equal_p (TREE_OPERAND (arg, 0),
2952 TREE_OPERAND (arg, 1), 0))
2953 return 0;
2954
2955 if (*cval1 == 0)
2956 *cval1 = TREE_OPERAND (arg, 0);
2957 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2958 ;
2959 else if (*cval2 == 0)
2960 *cval2 = TREE_OPERAND (arg, 0);
2961 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2962 ;
2963 else
2964 return 0;
2965
2966 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2967 ;
2968 else if (*cval2 == 0)
2969 *cval2 = TREE_OPERAND (arg, 1);
2970 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2971 ;
2972 else
2973 return 0;
2974
2975 return 1;
2976
2977 default:
2978 return 0;
2979 }
2980 }
2981 \f
2982 /* ARG is a tree that is known to contain just arithmetic operations and
2983 comparisons. Evaluate the operations in the tree substituting NEW0 for
2984 any occurrence of OLD0 as an operand of a comparison and likewise for
2985 NEW1 and OLD1. */
2986
2987 static tree
2988 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2989 tree old1, tree new1)
2990 {
2991 tree type = TREE_TYPE (arg);
2992 enum tree_code code = TREE_CODE (arg);
2993 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2994
2995 /* We can handle some of the tcc_expression cases here. */
2996 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2997 tclass = tcc_unary;
2998 else if (tclass == tcc_expression
2999 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3000 tclass = tcc_binary;
3001
3002 switch (tclass)
3003 {
3004 case tcc_unary:
3005 return fold_build1_loc (loc, code, type,
3006 eval_subst (loc, TREE_OPERAND (arg, 0),
3007 old0, new0, old1, new1));
3008
3009 case tcc_binary:
3010 return fold_build2_loc (loc, code, type,
3011 eval_subst (loc, TREE_OPERAND (arg, 0),
3012 old0, new0, old1, new1),
3013 eval_subst (loc, TREE_OPERAND (arg, 1),
3014 old0, new0, old1, new1));
3015
3016 case tcc_expression:
3017 switch (code)
3018 {
3019 case SAVE_EXPR:
3020 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3021 old1, new1);
3022
3023 case COMPOUND_EXPR:
3024 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3025 old1, new1);
3026
3027 case COND_EXPR:
3028 return fold_build3_loc (loc, code, type,
3029 eval_subst (loc, TREE_OPERAND (arg, 0),
3030 old0, new0, old1, new1),
3031 eval_subst (loc, TREE_OPERAND (arg, 1),
3032 old0, new0, old1, new1),
3033 eval_subst (loc, TREE_OPERAND (arg, 2),
3034 old0, new0, old1, new1));
3035 default:
3036 break;
3037 }
3038 /* Fall through - ??? */
3039
3040 case tcc_comparison:
3041 {
3042 tree arg0 = TREE_OPERAND (arg, 0);
3043 tree arg1 = TREE_OPERAND (arg, 1);
3044
3045 /* We need to check both for exact equality and tree equality. The
3046 former will be true if the operand has a side-effect. In that
3047 case, we know the operand occurred exactly once. */
3048
3049 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3050 arg0 = new0;
3051 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3052 arg0 = new1;
3053
3054 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3055 arg1 = new0;
3056 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3057 arg1 = new1;
3058
3059 return fold_build2_loc (loc, code, type, arg0, arg1);
3060 }
3061
3062 default:
3063 return arg;
3064 }
3065 }
3066 \f
3067 /* Return a tree for the case when the result of an expression is RESULT
3068 converted to TYPE and OMITTED was previously an operand of the expression
3069 but is now not needed (e.g., we folded OMITTED * 0).
3070
3071 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3072 the conversion of RESULT to TYPE. */
3073
3074 tree
3075 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3076 {
3077 tree t = fold_convert_loc (loc, type, result);
3078
3079 /* If the resulting operand is an empty statement, just return the omitted
3080 statement casted to void. */
3081 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3082 return build1_loc (loc, NOP_EXPR, void_type_node,
3083 fold_ignored_result (omitted));
3084
3085 if (TREE_SIDE_EFFECTS (omitted))
3086 return build2_loc (loc, COMPOUND_EXPR, type,
3087 fold_ignored_result (omitted), t);
3088
3089 return non_lvalue_loc (loc, t);
3090 }
3091
3092 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3093
3094 static tree
3095 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3096 tree omitted)
3097 {
3098 tree t = fold_convert_loc (loc, type, result);
3099
3100 /* If the resulting operand is an empty statement, just return the omitted
3101 statement casted to void. */
3102 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3103 return build1_loc (loc, NOP_EXPR, void_type_node,
3104 fold_ignored_result (omitted));
3105
3106 if (TREE_SIDE_EFFECTS (omitted))
3107 return build2_loc (loc, COMPOUND_EXPR, type,
3108 fold_ignored_result (omitted), t);
3109
3110 return pedantic_non_lvalue_loc (loc, t);
3111 }
3112
3113 /* Return a tree for the case when the result of an expression is RESULT
3114 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3115 of the expression but are now not needed.
3116
3117 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3118 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3119 evaluated before OMITTED2. Otherwise, if neither has side effects,
3120 just do the conversion of RESULT to TYPE. */
3121
3122 tree
3123 omit_two_operands_loc (location_t loc, tree type, tree result,
3124 tree omitted1, tree omitted2)
3125 {
3126 tree t = fold_convert_loc (loc, type, result);
3127
3128 if (TREE_SIDE_EFFECTS (omitted2))
3129 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3130 if (TREE_SIDE_EFFECTS (omitted1))
3131 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3132
3133 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3134 }
3135
3136 \f
3137 /* Return a simplified tree node for the truth-negation of ARG. This
3138 never alters ARG itself. We assume that ARG is an operation that
3139 returns a truth value (0 or 1).
3140
3141 FIXME: one would think we would fold the result, but it causes
3142 problems with the dominator optimizer. */
3143
3144 static tree
3145 fold_truth_not_expr (location_t loc, tree arg)
3146 {
3147 tree type = TREE_TYPE (arg);
3148 enum tree_code code = TREE_CODE (arg);
3149 location_t loc1, loc2;
3150
3151 /* If this is a comparison, we can simply invert it, except for
3152 floating-point non-equality comparisons, in which case we just
3153 enclose a TRUTH_NOT_EXPR around what we have. */
3154
3155 if (TREE_CODE_CLASS (code) == tcc_comparison)
3156 {
3157 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3158 if (FLOAT_TYPE_P (op_type)
3159 && flag_trapping_math
3160 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3161 && code != NE_EXPR && code != EQ_EXPR)
3162 return NULL_TREE;
3163
3164 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3165 if (code == ERROR_MARK)
3166 return NULL_TREE;
3167
3168 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3169 TREE_OPERAND (arg, 1));
3170 }
3171
3172 switch (code)
3173 {
3174 case INTEGER_CST:
3175 return constant_boolean_node (integer_zerop (arg), type);
3176
3177 case TRUTH_AND_EXPR:
3178 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3179 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3180 return build2_loc (loc, TRUTH_OR_EXPR, type,
3181 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3182 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3183
3184 case TRUTH_OR_EXPR:
3185 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3186 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3187 return build2_loc (loc, TRUTH_AND_EXPR, type,
3188 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3189 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3190
3191 case TRUTH_XOR_EXPR:
3192 /* Here we can invert either operand. We invert the first operand
3193 unless the second operand is a TRUTH_NOT_EXPR in which case our
3194 result is the XOR of the first operand with the inside of the
3195 negation of the second operand. */
3196
3197 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3198 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3199 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3200 else
3201 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3202 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3203 TREE_OPERAND (arg, 1));
3204
3205 case TRUTH_ANDIF_EXPR:
3206 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3207 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3208 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3209 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3210 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3211
3212 case TRUTH_ORIF_EXPR:
3213 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3214 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3215 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3216 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3217 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3218
3219 case TRUTH_NOT_EXPR:
3220 return TREE_OPERAND (arg, 0);
3221
3222 case COND_EXPR:
3223 {
3224 tree arg1 = TREE_OPERAND (arg, 1);
3225 tree arg2 = TREE_OPERAND (arg, 2);
3226
3227 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3228 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3229
3230 /* A COND_EXPR may have a throw as one operand, which
3231 then has void type. Just leave void operands
3232 as they are. */
3233 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3234 VOID_TYPE_P (TREE_TYPE (arg1))
3235 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3236 VOID_TYPE_P (TREE_TYPE (arg2))
3237 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3238 }
3239
3240 case COMPOUND_EXPR:
3241 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3242 return build2_loc (loc, COMPOUND_EXPR, type,
3243 TREE_OPERAND (arg, 0),
3244 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3245
3246 case NON_LVALUE_EXPR:
3247 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3248 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3249
3250 CASE_CONVERT:
3251 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3252 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3253
3254 /* ... fall through ... */
3255
3256 case FLOAT_EXPR:
3257 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3258 return build1_loc (loc, TREE_CODE (arg), type,
3259 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3260
3261 case BIT_AND_EXPR:
3262 if (!integer_onep (TREE_OPERAND (arg, 1)))
3263 return NULL_TREE;
3264 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3265
3266 case SAVE_EXPR:
3267 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3268
3269 case CLEANUP_POINT_EXPR:
3270 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3271 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3272 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3273
3274 default:
3275 return NULL_TREE;
3276 }
3277 }
3278
3279 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3280 assume that ARG is an operation that returns a truth value (0 or 1
3281 for scalars, 0 or -1 for vectors). Return the folded expression if
3282 folding is successful. Otherwise, return NULL_TREE. */
3283
3284 static tree
3285 fold_invert_truthvalue (location_t loc, tree arg)
3286 {
3287 tree type = TREE_TYPE (arg);
3288 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3289 ? BIT_NOT_EXPR
3290 : TRUTH_NOT_EXPR,
3291 type, arg);
3292 }
3293
3294 /* Return a simplified tree node for the truth-negation of ARG. This
3295 never alters ARG itself. We assume that ARG is an operation that
3296 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3297
3298 tree
3299 invert_truthvalue_loc (location_t loc, tree arg)
3300 {
3301 if (TREE_CODE (arg) == ERROR_MARK)
3302 return arg;
3303
3304 tree type = TREE_TYPE (arg);
3305 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3306 ? BIT_NOT_EXPR
3307 : TRUTH_NOT_EXPR,
3308 type, arg);
3309 }
3310
3311 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3312 operands are another bit-wise operation with a common input. If so,
3313 distribute the bit operations to save an operation and possibly two if
3314 constants are involved. For example, convert
3315 (A | B) & (A | C) into A | (B & C)
3316 Further simplification will occur if B and C are constants.
3317
3318 If this optimization cannot be done, 0 will be returned. */
3319
3320 static tree
3321 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3322 tree arg0, tree arg1)
3323 {
3324 tree common;
3325 tree left, right;
3326
3327 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3328 || TREE_CODE (arg0) == code
3329 || (TREE_CODE (arg0) != BIT_AND_EXPR
3330 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3331 return 0;
3332
3333 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3334 {
3335 common = TREE_OPERAND (arg0, 0);
3336 left = TREE_OPERAND (arg0, 1);
3337 right = TREE_OPERAND (arg1, 1);
3338 }
3339 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3340 {
3341 common = TREE_OPERAND (arg0, 0);
3342 left = TREE_OPERAND (arg0, 1);
3343 right = TREE_OPERAND (arg1, 0);
3344 }
3345 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3346 {
3347 common = TREE_OPERAND (arg0, 1);
3348 left = TREE_OPERAND (arg0, 0);
3349 right = TREE_OPERAND (arg1, 1);
3350 }
3351 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3352 {
3353 common = TREE_OPERAND (arg0, 1);
3354 left = TREE_OPERAND (arg0, 0);
3355 right = TREE_OPERAND (arg1, 0);
3356 }
3357 else
3358 return 0;
3359
3360 common = fold_convert_loc (loc, type, common);
3361 left = fold_convert_loc (loc, type, left);
3362 right = fold_convert_loc (loc, type, right);
3363 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3364 fold_build2_loc (loc, code, type, left, right));
3365 }
3366
3367 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3368 with code CODE. This optimization is unsafe. */
3369 static tree
3370 distribute_real_division (location_t loc, enum tree_code code, tree type,
3371 tree arg0, tree arg1)
3372 {
3373 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3374 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3375
3376 /* (A / C) +- (B / C) -> (A +- B) / C. */
3377 if (mul0 == mul1
3378 && operand_equal_p (TREE_OPERAND (arg0, 1),
3379 TREE_OPERAND (arg1, 1), 0))
3380 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3381 fold_build2_loc (loc, code, type,
3382 TREE_OPERAND (arg0, 0),
3383 TREE_OPERAND (arg1, 0)),
3384 TREE_OPERAND (arg0, 1));
3385
3386 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3387 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3388 TREE_OPERAND (arg1, 0), 0)
3389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3390 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3391 {
3392 REAL_VALUE_TYPE r0, r1;
3393 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3394 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3395 if (!mul0)
3396 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3397 if (!mul1)
3398 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3399 real_arithmetic (&r0, code, &r0, &r1);
3400 return fold_build2_loc (loc, MULT_EXPR, type,
3401 TREE_OPERAND (arg0, 0),
3402 build_real (type, r0));
3403 }
3404
3405 return NULL_TREE;
3406 }
3407 \f
3408 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3409 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3410
3411 static tree
3412 make_bit_field_ref (location_t loc, tree inner, tree type,
3413 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3414 {
3415 tree result, bftype;
3416
3417 if (bitpos == 0)
3418 {
3419 tree size = TYPE_SIZE (TREE_TYPE (inner));
3420 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3421 || POINTER_TYPE_P (TREE_TYPE (inner)))
3422 && host_integerp (size, 0)
3423 && tree_low_cst (size, 0) == bitsize)
3424 return fold_convert_loc (loc, type, inner);
3425 }
3426
3427 bftype = type;
3428 if (TYPE_PRECISION (bftype) != bitsize
3429 || TYPE_UNSIGNED (bftype) == !unsignedp)
3430 bftype = build_nonstandard_integer_type (bitsize, 0);
3431
3432 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3433 size_int (bitsize), bitsize_int (bitpos));
3434
3435 if (bftype != type)
3436 result = fold_convert_loc (loc, type, result);
3437
3438 return result;
3439 }
3440
3441 /* Optimize a bit-field compare.
3442
3443 There are two cases: First is a compare against a constant and the
3444 second is a comparison of two items where the fields are at the same
3445 bit position relative to the start of a chunk (byte, halfword, word)
3446 large enough to contain it. In these cases we can avoid the shift
3447 implicit in bitfield extractions.
3448
3449 For constants, we emit a compare of the shifted constant with the
3450 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3451 compared. For two fields at the same position, we do the ANDs with the
3452 similar mask and compare the result of the ANDs.
3453
3454 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3455 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3456 are the left and right operands of the comparison, respectively.
3457
3458 If the optimization described above can be done, we return the resulting
3459 tree. Otherwise we return zero. */
3460
3461 static tree
3462 optimize_bit_field_compare (location_t loc, enum tree_code code,
3463 tree compare_type, tree lhs, tree rhs)
3464 {
3465 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3466 tree type = TREE_TYPE (lhs);
3467 tree signed_type, unsigned_type;
3468 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3469 enum machine_mode lmode, rmode, nmode;
3470 int lunsignedp, runsignedp;
3471 int lvolatilep = 0, rvolatilep = 0;
3472 tree linner, rinner = NULL_TREE;
3473 tree mask;
3474 tree offset;
3475
3476 /* In the strict volatile bitfields case, doing code changes here may prevent
3477 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3478 if (flag_strict_volatile_bitfields > 0)
3479 return 0;
3480
3481 /* Get all the information about the extractions being done. If the bit size
3482 if the same as the size of the underlying object, we aren't doing an
3483 extraction at all and so can do nothing. We also don't want to
3484 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3485 then will no longer be able to replace it. */
3486 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3487 &lunsignedp, &lvolatilep, false);
3488 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3489 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3490 return 0;
3491
3492 if (!const_p)
3493 {
3494 /* If this is not a constant, we can only do something if bit positions,
3495 sizes, and signedness are the same. */
3496 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3497 &runsignedp, &rvolatilep, false);
3498
3499 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3500 || lunsignedp != runsignedp || offset != 0
3501 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3502 return 0;
3503 }
3504
3505 /* See if we can find a mode to refer to this field. We should be able to,
3506 but fail if we can't. */
3507 if (lvolatilep
3508 && GET_MODE_BITSIZE (lmode) > 0
3509 && flag_strict_volatile_bitfields > 0)
3510 nmode = lmode;
3511 else
3512 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3513 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3514 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3515 TYPE_ALIGN (TREE_TYPE (rinner))),
3516 word_mode, lvolatilep || rvolatilep);
3517 if (nmode == VOIDmode)
3518 return 0;
3519
3520 /* Set signed and unsigned types of the precision of this mode for the
3521 shifts below. */
3522 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3523 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3524
3525 /* Compute the bit position and size for the new reference and our offset
3526 within it. If the new reference is the same size as the original, we
3527 won't optimize anything, so return zero. */
3528 nbitsize = GET_MODE_BITSIZE (nmode);
3529 nbitpos = lbitpos & ~ (nbitsize - 1);
3530 lbitpos -= nbitpos;
3531 if (nbitsize == lbitsize)
3532 return 0;
3533
3534 if (BYTES_BIG_ENDIAN)
3535 lbitpos = nbitsize - lbitsize - lbitpos;
3536
3537 /* Make the mask to be used against the extracted field. */
3538 mask = build_int_cst_type (unsigned_type, -1);
3539 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3540 mask = const_binop (RSHIFT_EXPR, mask,
3541 size_int (nbitsize - lbitsize - lbitpos));
3542
3543 if (! const_p)
3544 /* If not comparing with constant, just rework the comparison
3545 and return. */
3546 return fold_build2_loc (loc, code, compare_type,
3547 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3548 make_bit_field_ref (loc, linner,
3549 unsigned_type,
3550 nbitsize, nbitpos,
3551 1),
3552 mask),
3553 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3554 make_bit_field_ref (loc, rinner,
3555 unsigned_type,
3556 nbitsize, nbitpos,
3557 1),
3558 mask));
3559
3560 /* Otherwise, we are handling the constant case. See if the constant is too
3561 big for the field. Warn and return a tree of for 0 (false) if so. We do
3562 this not only for its own sake, but to avoid having to test for this
3563 error case below. If we didn't, we might generate wrong code.
3564
3565 For unsigned fields, the constant shifted right by the field length should
3566 be all zero. For signed fields, the high-order bits should agree with
3567 the sign bit. */
3568
3569 if (lunsignedp)
3570 {
3571 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3572 fold_convert_loc (loc,
3573 unsigned_type, rhs),
3574 size_int (lbitsize))))
3575 {
3576 warning (0, "comparison is always %d due to width of bit-field",
3577 code == NE_EXPR);
3578 return constant_boolean_node (code == NE_EXPR, compare_type);
3579 }
3580 }
3581 else
3582 {
3583 tree tem = const_binop (RSHIFT_EXPR,
3584 fold_convert_loc (loc, signed_type, rhs),
3585 size_int (lbitsize - 1));
3586 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3587 {
3588 warning (0, "comparison is always %d due to width of bit-field",
3589 code == NE_EXPR);
3590 return constant_boolean_node (code == NE_EXPR, compare_type);
3591 }
3592 }
3593
3594 /* Single-bit compares should always be against zero. */
3595 if (lbitsize == 1 && ! integer_zerop (rhs))
3596 {
3597 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3598 rhs = build_int_cst (type, 0);
3599 }
3600
3601 /* Make a new bitfield reference, shift the constant over the
3602 appropriate number of bits and mask it with the computed mask
3603 (in case this was a signed field). If we changed it, make a new one. */
3604 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3605 if (lvolatilep)
3606 {
3607 TREE_SIDE_EFFECTS (lhs) = 1;
3608 TREE_THIS_VOLATILE (lhs) = 1;
3609 }
3610
3611 rhs = const_binop (BIT_AND_EXPR,
3612 const_binop (LSHIFT_EXPR,
3613 fold_convert_loc (loc, unsigned_type, rhs),
3614 size_int (lbitpos)),
3615 mask);
3616
3617 lhs = build2_loc (loc, code, compare_type,
3618 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3619 return lhs;
3620 }
3621 \f
3622 /* Subroutine for fold_truth_andor_1: decode a field reference.
3623
3624 If EXP is a comparison reference, we return the innermost reference.
3625
3626 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3627 set to the starting bit number.
3628
3629 If the innermost field can be completely contained in a mode-sized
3630 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3631
3632 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3633 otherwise it is not changed.
3634
3635 *PUNSIGNEDP is set to the signedness of the field.
3636
3637 *PMASK is set to the mask used. This is either contained in a
3638 BIT_AND_EXPR or derived from the width of the field.
3639
3640 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3641
3642 Return 0 if this is not a component reference or is one that we can't
3643 do anything with. */
3644
3645 static tree
3646 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3647 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3648 int *punsignedp, int *pvolatilep,
3649 tree *pmask, tree *pand_mask)
3650 {
3651 tree outer_type = 0;
3652 tree and_mask = 0;
3653 tree mask, inner, offset;
3654 tree unsigned_type;
3655 unsigned int precision;
3656
3657 /* All the optimizations using this function assume integer fields.
3658 There are problems with FP fields since the type_for_size call
3659 below can fail for, e.g., XFmode. */
3660 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3661 return 0;
3662
3663 /* We are interested in the bare arrangement of bits, so strip everything
3664 that doesn't affect the machine mode. However, record the type of the
3665 outermost expression if it may matter below. */
3666 if (CONVERT_EXPR_P (exp)
3667 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3668 outer_type = TREE_TYPE (exp);
3669 STRIP_NOPS (exp);
3670
3671 if (TREE_CODE (exp) == BIT_AND_EXPR)
3672 {
3673 and_mask = TREE_OPERAND (exp, 1);
3674 exp = TREE_OPERAND (exp, 0);
3675 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3676 if (TREE_CODE (and_mask) != INTEGER_CST)
3677 return 0;
3678 }
3679
3680 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3681 punsignedp, pvolatilep, false);
3682 if ((inner == exp && and_mask == 0)
3683 || *pbitsize < 0 || offset != 0
3684 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3685 return 0;
3686
3687 /* If the number of bits in the reference is the same as the bitsize of
3688 the outer type, then the outer type gives the signedness. Otherwise
3689 (in case of a small bitfield) the signedness is unchanged. */
3690 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3691 *punsignedp = TYPE_UNSIGNED (outer_type);
3692
3693 /* Compute the mask to access the bitfield. */
3694 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3695 precision = TYPE_PRECISION (unsigned_type);
3696
3697 mask = build_int_cst_type (unsigned_type, -1);
3698
3699 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3700 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3701
3702 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3703 if (and_mask != 0)
3704 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3705 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3706
3707 *pmask = mask;
3708 *pand_mask = and_mask;
3709 return inner;
3710 }
3711
3712 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3713 bit positions. */
3714
3715 static int
3716 all_ones_mask_p (const_tree mask, int size)
3717 {
3718 tree type = TREE_TYPE (mask);
3719 unsigned int precision = TYPE_PRECISION (type);
3720 tree tmask;
3721
3722 tmask = build_int_cst_type (signed_type_for (type), -1);
3723
3724 return
3725 tree_int_cst_equal (mask,
3726 const_binop (RSHIFT_EXPR,
3727 const_binop (LSHIFT_EXPR, tmask,
3728 size_int (precision - size)),
3729 size_int (precision - size)));
3730 }
3731
3732 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3733 represents the sign bit of EXP's type. If EXP represents a sign
3734 or zero extension, also test VAL against the unextended type.
3735 The return value is the (sub)expression whose sign bit is VAL,
3736 or NULL_TREE otherwise. */
3737
3738 static tree
3739 sign_bit_p (tree exp, const_tree val)
3740 {
3741 unsigned HOST_WIDE_INT mask_lo, lo;
3742 HOST_WIDE_INT mask_hi, hi;
3743 int width;
3744 tree t;
3745
3746 /* Tree EXP must have an integral type. */
3747 t = TREE_TYPE (exp);
3748 if (! INTEGRAL_TYPE_P (t))
3749 return NULL_TREE;
3750
3751 /* Tree VAL must be an integer constant. */
3752 if (TREE_CODE (val) != INTEGER_CST
3753 || TREE_OVERFLOW (val))
3754 return NULL_TREE;
3755
3756 width = TYPE_PRECISION (t);
3757 if (width > HOST_BITS_PER_WIDE_INT)
3758 {
3759 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3760 lo = 0;
3761
3762 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3763 mask_lo = -1;
3764 }
3765 else
3766 {
3767 hi = 0;
3768 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3769
3770 mask_hi = 0;
3771 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3772 }
3773
3774 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3775 treat VAL as if it were unsigned. */
3776 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3777 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3778 return exp;
3779
3780 /* Handle extension from a narrower type. */
3781 if (TREE_CODE (exp) == NOP_EXPR
3782 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3783 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3784
3785 return NULL_TREE;
3786 }
3787
3788 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3789 to be evaluated unconditionally. */
3790
3791 static int
3792 simple_operand_p (const_tree exp)
3793 {
3794 /* Strip any conversions that don't change the machine mode. */
3795 STRIP_NOPS (exp);
3796
3797 return (CONSTANT_CLASS_P (exp)
3798 || TREE_CODE (exp) == SSA_NAME
3799 || (DECL_P (exp)
3800 && ! TREE_ADDRESSABLE (exp)
3801 && ! TREE_THIS_VOLATILE (exp)
3802 && ! DECL_NONLOCAL (exp)
3803 /* Don't regard global variables as simple. They may be
3804 allocated in ways unknown to the compiler (shared memory,
3805 #pragma weak, etc). */
3806 && ! TREE_PUBLIC (exp)
3807 && ! DECL_EXTERNAL (exp)
3808 /* Weakrefs are not safe to be read, since they can be NULL.
3809 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3810 have DECL_WEAK flag set. */
3811 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3812 /* Loading a static variable is unduly expensive, but global
3813 registers aren't expensive. */
3814 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3815 }
3816
3817 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3818 to be evaluated unconditionally.
3819 I addition to simple_operand_p, we assume that comparisons, conversions,
3820 and logic-not operations are simple, if their operands are simple, too. */
3821
3822 static bool
3823 simple_operand_p_2 (tree exp)
3824 {
3825 enum tree_code code;
3826
3827 if (TREE_SIDE_EFFECTS (exp)
3828 || tree_could_trap_p (exp))
3829 return false;
3830
3831 while (CONVERT_EXPR_P (exp))
3832 exp = TREE_OPERAND (exp, 0);
3833
3834 code = TREE_CODE (exp);
3835
3836 if (TREE_CODE_CLASS (code) == tcc_comparison)
3837 return (simple_operand_p (TREE_OPERAND (exp, 0))
3838 && simple_operand_p (TREE_OPERAND (exp, 1)));
3839
3840 if (code == TRUTH_NOT_EXPR)
3841 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3842
3843 return simple_operand_p (exp);
3844 }
3845
3846 \f
3847 /* The following functions are subroutines to fold_range_test and allow it to
3848 try to change a logical combination of comparisons into a range test.
3849
3850 For example, both
3851 X == 2 || X == 3 || X == 4 || X == 5
3852 and
3853 X >= 2 && X <= 5
3854 are converted to
3855 (unsigned) (X - 2) <= 3
3856
3857 We describe each set of comparisons as being either inside or outside
3858 a range, using a variable named like IN_P, and then describe the
3859 range with a lower and upper bound. If one of the bounds is omitted,
3860 it represents either the highest or lowest value of the type.
3861
3862 In the comments below, we represent a range by two numbers in brackets
3863 preceded by a "+" to designate being inside that range, or a "-" to
3864 designate being outside that range, so the condition can be inverted by
3865 flipping the prefix. An omitted bound is represented by a "-". For
3866 example, "- [-, 10]" means being outside the range starting at the lowest
3867 possible value and ending at 10, in other words, being greater than 10.
3868 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3869 always false.
3870
3871 We set up things so that the missing bounds are handled in a consistent
3872 manner so neither a missing bound nor "true" and "false" need to be
3873 handled using a special case. */
3874
3875 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3876 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3877 and UPPER1_P are nonzero if the respective argument is an upper bound
3878 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3879 must be specified for a comparison. ARG1 will be converted to ARG0's
3880 type if both are specified. */
3881
3882 static tree
3883 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3884 tree arg1, int upper1_p)
3885 {
3886 tree tem;
3887 int result;
3888 int sgn0, sgn1;
3889
3890 /* If neither arg represents infinity, do the normal operation.
3891 Else, if not a comparison, return infinity. Else handle the special
3892 comparison rules. Note that most of the cases below won't occur, but
3893 are handled for consistency. */
3894
3895 if (arg0 != 0 && arg1 != 0)
3896 {
3897 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3898 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3899 STRIP_NOPS (tem);
3900 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3901 }
3902
3903 if (TREE_CODE_CLASS (code) != tcc_comparison)
3904 return 0;
3905
3906 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3907 for neither. In real maths, we cannot assume open ended ranges are
3908 the same. But, this is computer arithmetic, where numbers are finite.
3909 We can therefore make the transformation of any unbounded range with
3910 the value Z, Z being greater than any representable number. This permits
3911 us to treat unbounded ranges as equal. */
3912 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3913 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3914 switch (code)
3915 {
3916 case EQ_EXPR:
3917 result = sgn0 == sgn1;
3918 break;
3919 case NE_EXPR:
3920 result = sgn0 != sgn1;
3921 break;
3922 case LT_EXPR:
3923 result = sgn0 < sgn1;
3924 break;
3925 case LE_EXPR:
3926 result = sgn0 <= sgn1;
3927 break;
3928 case GT_EXPR:
3929 result = sgn0 > sgn1;
3930 break;
3931 case GE_EXPR:
3932 result = sgn0 >= sgn1;
3933 break;
3934 default:
3935 gcc_unreachable ();
3936 }
3937
3938 return constant_boolean_node (result, type);
3939 }
3940 \f
3941 /* Helper routine for make_range. Perform one step for it, return
3942 new expression if the loop should continue or NULL_TREE if it should
3943 stop. */
3944
3945 tree
3946 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3947 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3948 bool *strict_overflow_p)
3949 {
3950 tree arg0_type = TREE_TYPE (arg0);
3951 tree n_low, n_high, low = *p_low, high = *p_high;
3952 int in_p = *p_in_p, n_in_p;
3953
3954 switch (code)
3955 {
3956 case TRUTH_NOT_EXPR:
3957 /* We can only do something if the range is testing for zero. */
3958 if (low == NULL_TREE || high == NULL_TREE
3959 || ! integer_zerop (low) || ! integer_zerop (high))
3960 return NULL_TREE;
3961 *p_in_p = ! in_p;
3962 return arg0;
3963
3964 case EQ_EXPR: case NE_EXPR:
3965 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3966 /* We can only do something if the range is testing for zero
3967 and if the second operand is an integer constant. Note that
3968 saying something is "in" the range we make is done by
3969 complementing IN_P since it will set in the initial case of
3970 being not equal to zero; "out" is leaving it alone. */
3971 if (low == NULL_TREE || high == NULL_TREE
3972 || ! integer_zerop (low) || ! integer_zerop (high)
3973 || TREE_CODE (arg1) != INTEGER_CST)
3974 return NULL_TREE;
3975
3976 switch (code)
3977 {
3978 case NE_EXPR: /* - [c, c] */
3979 low = high = arg1;
3980 break;
3981 case EQ_EXPR: /* + [c, c] */
3982 in_p = ! in_p, low = high = arg1;
3983 break;
3984 case GT_EXPR: /* - [-, c] */
3985 low = 0, high = arg1;
3986 break;
3987 case GE_EXPR: /* + [c, -] */
3988 in_p = ! in_p, low = arg1, high = 0;
3989 break;
3990 case LT_EXPR: /* - [c, -] */
3991 low = arg1, high = 0;
3992 break;
3993 case LE_EXPR: /* + [-, c] */
3994 in_p = ! in_p, low = 0, high = arg1;
3995 break;
3996 default:
3997 gcc_unreachable ();
3998 }
3999
4000 /* If this is an unsigned comparison, we also know that EXP is
4001 greater than or equal to zero. We base the range tests we make
4002 on that fact, so we record it here so we can parse existing
4003 range tests. We test arg0_type since often the return type
4004 of, e.g. EQ_EXPR, is boolean. */
4005 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4006 {
4007 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4008 in_p, low, high, 1,
4009 build_int_cst (arg0_type, 0),
4010 NULL_TREE))
4011 return NULL_TREE;
4012
4013 in_p = n_in_p, low = n_low, high = n_high;
4014
4015 /* If the high bound is missing, but we have a nonzero low
4016 bound, reverse the range so it goes from zero to the low bound
4017 minus 1. */
4018 if (high == 0 && low && ! integer_zerop (low))
4019 {
4020 in_p = ! in_p;
4021 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4022 integer_one_node, 0);
4023 low = build_int_cst (arg0_type, 0);
4024 }
4025 }
4026
4027 *p_low = low;
4028 *p_high = high;
4029 *p_in_p = in_p;
4030 return arg0;
4031
4032 case NEGATE_EXPR:
4033 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4034 low and high are non-NULL, then normalize will DTRT. */
4035 if (!TYPE_UNSIGNED (arg0_type)
4036 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4037 {
4038 if (low == NULL_TREE)
4039 low = TYPE_MIN_VALUE (arg0_type);
4040 if (high == NULL_TREE)
4041 high = TYPE_MAX_VALUE (arg0_type);
4042 }
4043
4044 /* (-x) IN [a,b] -> x in [-b, -a] */
4045 n_low = range_binop (MINUS_EXPR, exp_type,
4046 build_int_cst (exp_type, 0),
4047 0, high, 1);
4048 n_high = range_binop (MINUS_EXPR, exp_type,
4049 build_int_cst (exp_type, 0),
4050 0, low, 0);
4051 if (n_high != 0 && TREE_OVERFLOW (n_high))
4052 return NULL_TREE;
4053 goto normalize;
4054
4055 case BIT_NOT_EXPR:
4056 /* ~ X -> -X - 1 */
4057 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4058 build_int_cst (exp_type, 1));
4059
4060 case PLUS_EXPR:
4061 case MINUS_EXPR:
4062 if (TREE_CODE (arg1) != INTEGER_CST)
4063 return NULL_TREE;
4064
4065 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4066 move a constant to the other side. */
4067 if (!TYPE_UNSIGNED (arg0_type)
4068 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4069 return NULL_TREE;
4070
4071 /* If EXP is signed, any overflow in the computation is undefined,
4072 so we don't worry about it so long as our computations on
4073 the bounds don't overflow. For unsigned, overflow is defined
4074 and this is exactly the right thing. */
4075 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4076 arg0_type, low, 0, arg1, 0);
4077 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4078 arg0_type, high, 1, arg1, 0);
4079 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4080 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4081 return NULL_TREE;
4082
4083 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4084 *strict_overflow_p = true;
4085
4086 normalize:
4087 /* Check for an unsigned range which has wrapped around the maximum
4088 value thus making n_high < n_low, and normalize it. */
4089 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4090 {
4091 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4092 integer_one_node, 0);
4093 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4094 integer_one_node, 0);
4095
4096 /* If the range is of the form +/- [ x+1, x ], we won't
4097 be able to normalize it. But then, it represents the
4098 whole range or the empty set, so make it
4099 +/- [ -, - ]. */
4100 if (tree_int_cst_equal (n_low, low)
4101 && tree_int_cst_equal (n_high, high))
4102 low = high = 0;
4103 else
4104 in_p = ! in_p;
4105 }
4106 else
4107 low = n_low, high = n_high;
4108
4109 *p_low = low;
4110 *p_high = high;
4111 *p_in_p = in_p;
4112 return arg0;
4113
4114 CASE_CONVERT:
4115 case NON_LVALUE_EXPR:
4116 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4117 return NULL_TREE;
4118
4119 if (! INTEGRAL_TYPE_P (arg0_type)
4120 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4121 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4122 return NULL_TREE;
4123
4124 n_low = low, n_high = high;
4125
4126 if (n_low != 0)
4127 n_low = fold_convert_loc (loc, arg0_type, n_low);
4128
4129 if (n_high != 0)
4130 n_high = fold_convert_loc (loc, arg0_type, n_high);
4131
4132 /* If we're converting arg0 from an unsigned type, to exp,
4133 a signed type, we will be doing the comparison as unsigned.
4134 The tests above have already verified that LOW and HIGH
4135 are both positive.
4136
4137 So we have to ensure that we will handle large unsigned
4138 values the same way that the current signed bounds treat
4139 negative values. */
4140
4141 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4142 {
4143 tree high_positive;
4144 tree equiv_type;
4145 /* For fixed-point modes, we need to pass the saturating flag
4146 as the 2nd parameter. */
4147 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4148 equiv_type
4149 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4150 TYPE_SATURATING (arg0_type));
4151 else
4152 equiv_type
4153 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4154
4155 /* A range without an upper bound is, naturally, unbounded.
4156 Since convert would have cropped a very large value, use
4157 the max value for the destination type. */
4158 high_positive
4159 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4160 : TYPE_MAX_VALUE (arg0_type);
4161
4162 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4163 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4164 fold_convert_loc (loc, arg0_type,
4165 high_positive),
4166 build_int_cst (arg0_type, 1));
4167
4168 /* If the low bound is specified, "and" the range with the
4169 range for which the original unsigned value will be
4170 positive. */
4171 if (low != 0)
4172 {
4173 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4174 1, fold_convert_loc (loc, arg0_type,
4175 integer_zero_node),
4176 high_positive))
4177 return NULL_TREE;
4178
4179 in_p = (n_in_p == in_p);
4180 }
4181 else
4182 {
4183 /* Otherwise, "or" the range with the range of the input
4184 that will be interpreted as negative. */
4185 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4186 1, fold_convert_loc (loc, arg0_type,
4187 integer_zero_node),
4188 high_positive))
4189 return NULL_TREE;
4190
4191 in_p = (in_p != n_in_p);
4192 }
4193 }
4194
4195 *p_low = n_low;
4196 *p_high = n_high;
4197 *p_in_p = in_p;
4198 return arg0;
4199
4200 default:
4201 return NULL_TREE;
4202 }
4203 }
4204
4205 /* Given EXP, a logical expression, set the range it is testing into
4206 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4207 actually being tested. *PLOW and *PHIGH will be made of the same
4208 type as the returned expression. If EXP is not a comparison, we
4209 will most likely not be returning a useful value and range. Set
4210 *STRICT_OVERFLOW_P to true if the return value is only valid
4211 because signed overflow is undefined; otherwise, do not change
4212 *STRICT_OVERFLOW_P. */
4213
4214 tree
4215 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4216 bool *strict_overflow_p)
4217 {
4218 enum tree_code code;
4219 tree arg0, arg1 = NULL_TREE;
4220 tree exp_type, nexp;
4221 int in_p;
4222 tree low, high;
4223 location_t loc = EXPR_LOCATION (exp);
4224
4225 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4226 and see if we can refine the range. Some of the cases below may not
4227 happen, but it doesn't seem worth worrying about this. We "continue"
4228 the outer loop when we've changed something; otherwise we "break"
4229 the switch, which will "break" the while. */
4230
4231 in_p = 0;
4232 low = high = build_int_cst (TREE_TYPE (exp), 0);
4233
4234 while (1)
4235 {
4236 code = TREE_CODE (exp);
4237 exp_type = TREE_TYPE (exp);
4238 arg0 = NULL_TREE;
4239
4240 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4241 {
4242 if (TREE_OPERAND_LENGTH (exp) > 0)
4243 arg0 = TREE_OPERAND (exp, 0);
4244 if (TREE_CODE_CLASS (code) == tcc_binary
4245 || TREE_CODE_CLASS (code) == tcc_comparison
4246 || (TREE_CODE_CLASS (code) == tcc_expression
4247 && TREE_OPERAND_LENGTH (exp) > 1))
4248 arg1 = TREE_OPERAND (exp, 1);
4249 }
4250 if (arg0 == NULL_TREE)
4251 break;
4252
4253 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4254 &high, &in_p, strict_overflow_p);
4255 if (nexp == NULL_TREE)
4256 break;
4257 exp = nexp;
4258 }
4259
4260 /* If EXP is a constant, we can evaluate whether this is true or false. */
4261 if (TREE_CODE (exp) == INTEGER_CST)
4262 {
4263 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4264 exp, 0, low, 0))
4265 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4266 exp, 1, high, 1)));
4267 low = high = 0;
4268 exp = 0;
4269 }
4270
4271 *pin_p = in_p, *plow = low, *phigh = high;
4272 return exp;
4273 }
4274 \f
4275 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4276 type, TYPE, return an expression to test if EXP is in (or out of, depending
4277 on IN_P) the range. Return 0 if the test couldn't be created. */
4278
4279 tree
4280 build_range_check (location_t loc, tree type, tree exp, int in_p,
4281 tree low, tree high)
4282 {
4283 tree etype = TREE_TYPE (exp), value;
4284
4285 #ifdef HAVE_canonicalize_funcptr_for_compare
4286 /* Disable this optimization for function pointer expressions
4287 on targets that require function pointer canonicalization. */
4288 if (HAVE_canonicalize_funcptr_for_compare
4289 && TREE_CODE (etype) == POINTER_TYPE
4290 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4291 return NULL_TREE;
4292 #endif
4293
4294 if (! in_p)
4295 {
4296 value = build_range_check (loc, type, exp, 1, low, high);
4297 if (value != 0)
4298 return invert_truthvalue_loc (loc, value);
4299
4300 return 0;
4301 }
4302
4303 if (low == 0 && high == 0)
4304 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4305
4306 if (low == 0)
4307 return fold_build2_loc (loc, LE_EXPR, type, exp,
4308 fold_convert_loc (loc, etype, high));
4309
4310 if (high == 0)
4311 return fold_build2_loc (loc, GE_EXPR, type, exp,
4312 fold_convert_loc (loc, etype, low));
4313
4314 if (operand_equal_p (low, high, 0))
4315 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4316 fold_convert_loc (loc, etype, low));
4317
4318 if (integer_zerop (low))
4319 {
4320 if (! TYPE_UNSIGNED (etype))
4321 {
4322 etype = unsigned_type_for (etype);
4323 high = fold_convert_loc (loc, etype, high);
4324 exp = fold_convert_loc (loc, etype, exp);
4325 }
4326 return build_range_check (loc, type, exp, 1, 0, high);
4327 }
4328
4329 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4330 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4331 {
4332 unsigned HOST_WIDE_INT lo;
4333 HOST_WIDE_INT hi;
4334 int prec;
4335
4336 prec = TYPE_PRECISION (etype);
4337 if (prec <= HOST_BITS_PER_WIDE_INT)
4338 {
4339 hi = 0;
4340 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4341 }
4342 else
4343 {
4344 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4345 lo = HOST_WIDE_INT_M1U;
4346 }
4347
4348 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4349 {
4350 if (TYPE_UNSIGNED (etype))
4351 {
4352 tree signed_etype = signed_type_for (etype);
4353 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4354 etype
4355 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4356 else
4357 etype = signed_etype;
4358 exp = fold_convert_loc (loc, etype, exp);
4359 }
4360 return fold_build2_loc (loc, GT_EXPR, type, exp,
4361 build_int_cst (etype, 0));
4362 }
4363 }
4364
4365 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4366 This requires wrap-around arithmetics for the type of the expression.
4367 First make sure that arithmetics in this type is valid, then make sure
4368 that it wraps around. */
4369 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4370 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4371 TYPE_UNSIGNED (etype));
4372
4373 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4374 {
4375 tree utype, minv, maxv;
4376
4377 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4378 for the type in question, as we rely on this here. */
4379 utype = unsigned_type_for (etype);
4380 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4381 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4382 integer_one_node, 1);
4383 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4384
4385 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4386 minv, 1, maxv, 1)))
4387 etype = utype;
4388 else
4389 return 0;
4390 }
4391
4392 high = fold_convert_loc (loc, etype, high);
4393 low = fold_convert_loc (loc, etype, low);
4394 exp = fold_convert_loc (loc, etype, exp);
4395
4396 value = const_binop (MINUS_EXPR, high, low);
4397
4398
4399 if (POINTER_TYPE_P (etype))
4400 {
4401 if (value != 0 && !TREE_OVERFLOW (value))
4402 {
4403 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4404 return build_range_check (loc, type,
4405 fold_build_pointer_plus_loc (loc, exp, low),
4406 1, build_int_cst (etype, 0), value);
4407 }
4408 return 0;
4409 }
4410
4411 if (value != 0 && !TREE_OVERFLOW (value))
4412 return build_range_check (loc, type,
4413 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4414 1, build_int_cst (etype, 0), value);
4415
4416 return 0;
4417 }
4418 \f
4419 /* Return the predecessor of VAL in its type, handling the infinite case. */
4420
4421 static tree
4422 range_predecessor (tree val)
4423 {
4424 tree type = TREE_TYPE (val);
4425
4426 if (INTEGRAL_TYPE_P (type)
4427 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4428 return 0;
4429 else
4430 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4431 }
4432
4433 /* Return the successor of VAL in its type, handling the infinite case. */
4434
4435 static tree
4436 range_successor (tree val)
4437 {
4438 tree type = TREE_TYPE (val);
4439
4440 if (INTEGRAL_TYPE_P (type)
4441 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4442 return 0;
4443 else
4444 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4445 }
4446
4447 /* Given two ranges, see if we can merge them into one. Return 1 if we
4448 can, 0 if we can't. Set the output range into the specified parameters. */
4449
4450 bool
4451 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4452 tree high0, int in1_p, tree low1, tree high1)
4453 {
4454 int no_overlap;
4455 int subset;
4456 int temp;
4457 tree tem;
4458 int in_p;
4459 tree low, high;
4460 int lowequal = ((low0 == 0 && low1 == 0)
4461 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4462 low0, 0, low1, 0)));
4463 int highequal = ((high0 == 0 && high1 == 0)
4464 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4465 high0, 1, high1, 1)));
4466
4467 /* Make range 0 be the range that starts first, or ends last if they
4468 start at the same value. Swap them if it isn't. */
4469 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4470 low0, 0, low1, 0))
4471 || (lowequal
4472 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4473 high1, 1, high0, 1))))
4474 {
4475 temp = in0_p, in0_p = in1_p, in1_p = temp;
4476 tem = low0, low0 = low1, low1 = tem;
4477 tem = high0, high0 = high1, high1 = tem;
4478 }
4479
4480 /* Now flag two cases, whether the ranges are disjoint or whether the
4481 second range is totally subsumed in the first. Note that the tests
4482 below are simplified by the ones above. */
4483 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4484 high0, 1, low1, 0));
4485 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4486 high1, 1, high0, 1));
4487
4488 /* We now have four cases, depending on whether we are including or
4489 excluding the two ranges. */
4490 if (in0_p && in1_p)
4491 {
4492 /* If they don't overlap, the result is false. If the second range
4493 is a subset it is the result. Otherwise, the range is from the start
4494 of the second to the end of the first. */
4495 if (no_overlap)
4496 in_p = 0, low = high = 0;
4497 else if (subset)
4498 in_p = 1, low = low1, high = high1;
4499 else
4500 in_p = 1, low = low1, high = high0;
4501 }
4502
4503 else if (in0_p && ! in1_p)
4504 {
4505 /* If they don't overlap, the result is the first range. If they are
4506 equal, the result is false. If the second range is a subset of the
4507 first, and the ranges begin at the same place, we go from just after
4508 the end of the second range to the end of the first. If the second
4509 range is not a subset of the first, or if it is a subset and both
4510 ranges end at the same place, the range starts at the start of the
4511 first range and ends just before the second range.
4512 Otherwise, we can't describe this as a single range. */
4513 if (no_overlap)
4514 in_p = 1, low = low0, high = high0;
4515 else if (lowequal && highequal)
4516 in_p = 0, low = high = 0;
4517 else if (subset && lowequal)
4518 {
4519 low = range_successor (high1);
4520 high = high0;
4521 in_p = 1;
4522 if (low == 0)
4523 {
4524 /* We are in the weird situation where high0 > high1 but
4525 high1 has no successor. Punt. */
4526 return 0;
4527 }
4528 }
4529 else if (! subset || highequal)
4530 {
4531 low = low0;
4532 high = range_predecessor (low1);
4533 in_p = 1;
4534 if (high == 0)
4535 {
4536 /* low0 < low1 but low1 has no predecessor. Punt. */
4537 return 0;
4538 }
4539 }
4540 else
4541 return 0;
4542 }
4543
4544 else if (! in0_p && in1_p)
4545 {
4546 /* If they don't overlap, the result is the second range. If the second
4547 is a subset of the first, the result is false. Otherwise,
4548 the range starts just after the first range and ends at the
4549 end of the second. */
4550 if (no_overlap)
4551 in_p = 1, low = low1, high = high1;
4552 else if (subset || highequal)
4553 in_p = 0, low = high = 0;
4554 else
4555 {
4556 low = range_successor (high0);
4557 high = high1;
4558 in_p = 1;
4559 if (low == 0)
4560 {
4561 /* high1 > high0 but high0 has no successor. Punt. */
4562 return 0;
4563 }
4564 }
4565 }
4566
4567 else
4568 {
4569 /* The case where we are excluding both ranges. Here the complex case
4570 is if they don't overlap. In that case, the only time we have a
4571 range is if they are adjacent. If the second is a subset of the
4572 first, the result is the first. Otherwise, the range to exclude
4573 starts at the beginning of the first range and ends at the end of the
4574 second. */
4575 if (no_overlap)
4576 {
4577 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4578 range_successor (high0),
4579 1, low1, 0)))
4580 in_p = 0, low = low0, high = high1;
4581 else
4582 {
4583 /* Canonicalize - [min, x] into - [-, x]. */
4584 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4585 switch (TREE_CODE (TREE_TYPE (low0)))
4586 {
4587 case ENUMERAL_TYPE:
4588 if (TYPE_PRECISION (TREE_TYPE (low0))
4589 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4590 break;
4591 /* FALLTHROUGH */
4592 case INTEGER_TYPE:
4593 if (tree_int_cst_equal (low0,
4594 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4595 low0 = 0;
4596 break;
4597 case POINTER_TYPE:
4598 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4599 && integer_zerop (low0))
4600 low0 = 0;
4601 break;
4602 default:
4603 break;
4604 }
4605
4606 /* Canonicalize - [x, max] into - [x, -]. */
4607 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4608 switch (TREE_CODE (TREE_TYPE (high1)))
4609 {
4610 case ENUMERAL_TYPE:
4611 if (TYPE_PRECISION (TREE_TYPE (high1))
4612 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4613 break;
4614 /* FALLTHROUGH */
4615 case INTEGER_TYPE:
4616 if (tree_int_cst_equal (high1,
4617 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4618 high1 = 0;
4619 break;
4620 case POINTER_TYPE:
4621 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4622 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4623 high1, 1,
4624 integer_one_node, 1)))
4625 high1 = 0;
4626 break;
4627 default:
4628 break;
4629 }
4630
4631 /* The ranges might be also adjacent between the maximum and
4632 minimum values of the given type. For
4633 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4634 return + [x + 1, y - 1]. */
4635 if (low0 == 0 && high1 == 0)
4636 {
4637 low = range_successor (high0);
4638 high = range_predecessor (low1);
4639 if (low == 0 || high == 0)
4640 return 0;
4641
4642 in_p = 1;
4643 }
4644 else
4645 return 0;
4646 }
4647 }
4648 else if (subset)
4649 in_p = 0, low = low0, high = high0;
4650 else
4651 in_p = 0, low = low0, high = high1;
4652 }
4653
4654 *pin_p = in_p, *plow = low, *phigh = high;
4655 return 1;
4656 }
4657 \f
4658
4659 /* Subroutine of fold, looking inside expressions of the form
4660 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4661 of the COND_EXPR. This function is being used also to optimize
4662 A op B ? C : A, by reversing the comparison first.
4663
4664 Return a folded expression whose code is not a COND_EXPR
4665 anymore, or NULL_TREE if no folding opportunity is found. */
4666
4667 static tree
4668 fold_cond_expr_with_comparison (location_t loc, tree type,
4669 tree arg0, tree arg1, tree arg2)
4670 {
4671 enum tree_code comp_code = TREE_CODE (arg0);
4672 tree arg00 = TREE_OPERAND (arg0, 0);
4673 tree arg01 = TREE_OPERAND (arg0, 1);
4674 tree arg1_type = TREE_TYPE (arg1);
4675 tree tem;
4676
4677 STRIP_NOPS (arg1);
4678 STRIP_NOPS (arg2);
4679
4680 /* If we have A op 0 ? A : -A, consider applying the following
4681 transformations:
4682
4683 A == 0? A : -A same as -A
4684 A != 0? A : -A same as A
4685 A >= 0? A : -A same as abs (A)
4686 A > 0? A : -A same as abs (A)
4687 A <= 0? A : -A same as -abs (A)
4688 A < 0? A : -A same as -abs (A)
4689
4690 None of these transformations work for modes with signed
4691 zeros. If A is +/-0, the first two transformations will
4692 change the sign of the result (from +0 to -0, or vice
4693 versa). The last four will fix the sign of the result,
4694 even though the original expressions could be positive or
4695 negative, depending on the sign of A.
4696
4697 Note that all these transformations are correct if A is
4698 NaN, since the two alternatives (A and -A) are also NaNs. */
4699 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4700 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4701 ? real_zerop (arg01)
4702 : integer_zerop (arg01))
4703 && ((TREE_CODE (arg2) == NEGATE_EXPR
4704 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4705 /* In the case that A is of the form X-Y, '-A' (arg2) may
4706 have already been folded to Y-X, check for that. */
4707 || (TREE_CODE (arg1) == MINUS_EXPR
4708 && TREE_CODE (arg2) == MINUS_EXPR
4709 && operand_equal_p (TREE_OPERAND (arg1, 0),
4710 TREE_OPERAND (arg2, 1), 0)
4711 && operand_equal_p (TREE_OPERAND (arg1, 1),
4712 TREE_OPERAND (arg2, 0), 0))))
4713 switch (comp_code)
4714 {
4715 case EQ_EXPR:
4716 case UNEQ_EXPR:
4717 tem = fold_convert_loc (loc, arg1_type, arg1);
4718 return pedantic_non_lvalue_loc (loc,
4719 fold_convert_loc (loc, type,
4720 negate_expr (tem)));
4721 case NE_EXPR:
4722 case LTGT_EXPR:
4723 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4724 case UNGE_EXPR:
4725 case UNGT_EXPR:
4726 if (flag_trapping_math)
4727 break;
4728 /* Fall through. */
4729 case GE_EXPR:
4730 case GT_EXPR:
4731 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4732 arg1 = fold_convert_loc (loc, signed_type_for
4733 (TREE_TYPE (arg1)), arg1);
4734 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4735 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4736 case UNLE_EXPR:
4737 case UNLT_EXPR:
4738 if (flag_trapping_math)
4739 break;
4740 case LE_EXPR:
4741 case LT_EXPR:
4742 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4743 arg1 = fold_convert_loc (loc, signed_type_for
4744 (TREE_TYPE (arg1)), arg1);
4745 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4746 return negate_expr (fold_convert_loc (loc, type, tem));
4747 default:
4748 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4749 break;
4750 }
4751
4752 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4753 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4754 both transformations are correct when A is NaN: A != 0
4755 is then true, and A == 0 is false. */
4756
4757 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4758 && integer_zerop (arg01) && integer_zerop (arg2))
4759 {
4760 if (comp_code == NE_EXPR)
4761 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4762 else if (comp_code == EQ_EXPR)
4763 return build_zero_cst (type);
4764 }
4765
4766 /* Try some transformations of A op B ? A : B.
4767
4768 A == B? A : B same as B
4769 A != B? A : B same as A
4770 A >= B? A : B same as max (A, B)
4771 A > B? A : B same as max (B, A)
4772 A <= B? A : B same as min (A, B)
4773 A < B? A : B same as min (B, A)
4774
4775 As above, these transformations don't work in the presence
4776 of signed zeros. For example, if A and B are zeros of
4777 opposite sign, the first two transformations will change
4778 the sign of the result. In the last four, the original
4779 expressions give different results for (A=+0, B=-0) and
4780 (A=-0, B=+0), but the transformed expressions do not.
4781
4782 The first two transformations are correct if either A or B
4783 is a NaN. In the first transformation, the condition will
4784 be false, and B will indeed be chosen. In the case of the
4785 second transformation, the condition A != B will be true,
4786 and A will be chosen.
4787
4788 The conversions to max() and min() are not correct if B is
4789 a number and A is not. The conditions in the original
4790 expressions will be false, so all four give B. The min()
4791 and max() versions would give a NaN instead. */
4792 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4793 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4794 /* Avoid these transformations if the COND_EXPR may be used
4795 as an lvalue in the C++ front-end. PR c++/19199. */
4796 && (in_gimple_form
4797 || VECTOR_TYPE_P (type)
4798 || (strcmp (lang_hooks.name, "GNU C++") != 0
4799 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4800 || ! maybe_lvalue_p (arg1)
4801 || ! maybe_lvalue_p (arg2)))
4802 {
4803 tree comp_op0 = arg00;
4804 tree comp_op1 = arg01;
4805 tree comp_type = TREE_TYPE (comp_op0);
4806
4807 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4808 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4809 {
4810 comp_type = type;
4811 comp_op0 = arg1;
4812 comp_op1 = arg2;
4813 }
4814
4815 switch (comp_code)
4816 {
4817 case EQ_EXPR:
4818 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4819 case NE_EXPR:
4820 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4821 case LE_EXPR:
4822 case LT_EXPR:
4823 case UNLE_EXPR:
4824 case UNLT_EXPR:
4825 /* In C++ a ?: expression can be an lvalue, so put the
4826 operand which will be used if they are equal first
4827 so that we can convert this back to the
4828 corresponding COND_EXPR. */
4829 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4830 {
4831 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4832 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4833 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4834 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4835 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4836 comp_op1, comp_op0);
4837 return pedantic_non_lvalue_loc (loc,
4838 fold_convert_loc (loc, type, tem));
4839 }
4840 break;
4841 case GE_EXPR:
4842 case GT_EXPR:
4843 case UNGE_EXPR:
4844 case UNGT_EXPR:
4845 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4846 {
4847 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4848 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4849 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4850 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4851 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4852 comp_op1, comp_op0);
4853 return pedantic_non_lvalue_loc (loc,
4854 fold_convert_loc (loc, type, tem));
4855 }
4856 break;
4857 case UNEQ_EXPR:
4858 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4859 return pedantic_non_lvalue_loc (loc,
4860 fold_convert_loc (loc, type, arg2));
4861 break;
4862 case LTGT_EXPR:
4863 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4864 return pedantic_non_lvalue_loc (loc,
4865 fold_convert_loc (loc, type, arg1));
4866 break;
4867 default:
4868 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4869 break;
4870 }
4871 }
4872
4873 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4874 we might still be able to simplify this. For example,
4875 if C1 is one less or one more than C2, this might have started
4876 out as a MIN or MAX and been transformed by this function.
4877 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4878
4879 if (INTEGRAL_TYPE_P (type)
4880 && TREE_CODE (arg01) == INTEGER_CST
4881 && TREE_CODE (arg2) == INTEGER_CST)
4882 switch (comp_code)
4883 {
4884 case EQ_EXPR:
4885 if (TREE_CODE (arg1) == INTEGER_CST)
4886 break;
4887 /* We can replace A with C1 in this case. */
4888 arg1 = fold_convert_loc (loc, type, arg01);
4889 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4890
4891 case LT_EXPR:
4892 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4893 MIN_EXPR, to preserve the signedness of the comparison. */
4894 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4895 OEP_ONLY_CONST)
4896 && operand_equal_p (arg01,
4897 const_binop (PLUS_EXPR, arg2,
4898 build_int_cst (type, 1)),
4899 OEP_ONLY_CONST))
4900 {
4901 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4902 fold_convert_loc (loc, TREE_TYPE (arg00),
4903 arg2));
4904 return pedantic_non_lvalue_loc (loc,
4905 fold_convert_loc (loc, type, tem));
4906 }
4907 break;
4908
4909 case LE_EXPR:
4910 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4911 as above. */
4912 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4913 OEP_ONLY_CONST)
4914 && operand_equal_p (arg01,
4915 const_binop (MINUS_EXPR, arg2,
4916 build_int_cst (type, 1)),
4917 OEP_ONLY_CONST))
4918 {
4919 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4920 fold_convert_loc (loc, TREE_TYPE (arg00),
4921 arg2));
4922 return pedantic_non_lvalue_loc (loc,
4923 fold_convert_loc (loc, type, tem));
4924 }
4925 break;
4926
4927 case GT_EXPR:
4928 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4929 MAX_EXPR, to preserve the signedness of the comparison. */
4930 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4931 OEP_ONLY_CONST)
4932 && operand_equal_p (arg01,
4933 const_binop (MINUS_EXPR, arg2,
4934 build_int_cst (type, 1)),
4935 OEP_ONLY_CONST))
4936 {
4937 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4938 fold_convert_loc (loc, TREE_TYPE (arg00),
4939 arg2));
4940 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4941 }
4942 break;
4943
4944 case GE_EXPR:
4945 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4946 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4947 OEP_ONLY_CONST)
4948 && operand_equal_p (arg01,
4949 const_binop (PLUS_EXPR, arg2,
4950 build_int_cst (type, 1)),
4951 OEP_ONLY_CONST))
4952 {
4953 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4954 fold_convert_loc (loc, TREE_TYPE (arg00),
4955 arg2));
4956 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4957 }
4958 break;
4959 case NE_EXPR:
4960 break;
4961 default:
4962 gcc_unreachable ();
4963 }
4964
4965 return NULL_TREE;
4966 }
4967
4968
4969 \f
4970 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4971 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4972 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4973 false) >= 2)
4974 #endif
4975
4976 /* EXP is some logical combination of boolean tests. See if we can
4977 merge it into some range test. Return the new tree if so. */
4978
4979 static tree
4980 fold_range_test (location_t loc, enum tree_code code, tree type,
4981 tree op0, tree op1)
4982 {
4983 int or_op = (code == TRUTH_ORIF_EXPR
4984 || code == TRUTH_OR_EXPR);
4985 int in0_p, in1_p, in_p;
4986 tree low0, low1, low, high0, high1, high;
4987 bool strict_overflow_p = false;
4988 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4989 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4990 tree tem;
4991 const char * const warnmsg = G_("assuming signed overflow does not occur "
4992 "when simplifying range test");
4993
4994 /* If this is an OR operation, invert both sides; we will invert
4995 again at the end. */
4996 if (or_op)
4997 in0_p = ! in0_p, in1_p = ! in1_p;
4998
4999 /* If both expressions are the same, if we can merge the ranges, and we
5000 can build the range test, return it or it inverted. If one of the
5001 ranges is always true or always false, consider it to be the same
5002 expression as the other. */
5003 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5004 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5005 in1_p, low1, high1)
5006 && 0 != (tem = (build_range_check (loc, type,
5007 lhs != 0 ? lhs
5008 : rhs != 0 ? rhs : integer_zero_node,
5009 in_p, low, high))))
5010 {
5011 if (strict_overflow_p)
5012 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5013 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5014 }
5015
5016 /* On machines where the branch cost is expensive, if this is a
5017 short-circuited branch and the underlying object on both sides
5018 is the same, make a non-short-circuit operation. */
5019 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5020 && lhs != 0 && rhs != 0
5021 && (code == TRUTH_ANDIF_EXPR
5022 || code == TRUTH_ORIF_EXPR)
5023 && operand_equal_p (lhs, rhs, 0))
5024 {
5025 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5026 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5027 which cases we can't do this. */
5028 if (simple_operand_p (lhs))
5029 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5030 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5031 type, op0, op1);
5032
5033 else if (!lang_hooks.decls.global_bindings_p ()
5034 && !CONTAINS_PLACEHOLDER_P (lhs))
5035 {
5036 tree common = save_expr (lhs);
5037
5038 if (0 != (lhs = build_range_check (loc, type, common,
5039 or_op ? ! in0_p : in0_p,
5040 low0, high0))
5041 && (0 != (rhs = build_range_check (loc, type, common,
5042 or_op ? ! in1_p : in1_p,
5043 low1, high1))))
5044 {
5045 if (strict_overflow_p)
5046 fold_overflow_warning (warnmsg,
5047 WARN_STRICT_OVERFLOW_COMPARISON);
5048 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5049 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5050 type, lhs, rhs);
5051 }
5052 }
5053 }
5054
5055 return 0;
5056 }
5057 \f
5058 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5059 bit value. Arrange things so the extra bits will be set to zero if and
5060 only if C is signed-extended to its full width. If MASK is nonzero,
5061 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5062
5063 static tree
5064 unextend (tree c, int p, int unsignedp, tree mask)
5065 {
5066 tree type = TREE_TYPE (c);
5067 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5068 tree temp;
5069
5070 if (p == modesize || unsignedp)
5071 return c;
5072
5073 /* We work by getting just the sign bit into the low-order bit, then
5074 into the high-order bit, then sign-extend. We then XOR that value
5075 with C. */
5076 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5077 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5078
5079 /* We must use a signed type in order to get an arithmetic right shift.
5080 However, we must also avoid introducing accidental overflows, so that
5081 a subsequent call to integer_zerop will work. Hence we must
5082 do the type conversion here. At this point, the constant is either
5083 zero or one, and the conversion to a signed type can never overflow.
5084 We could get an overflow if this conversion is done anywhere else. */
5085 if (TYPE_UNSIGNED (type))
5086 temp = fold_convert (signed_type_for (type), temp);
5087
5088 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5089 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5090 if (mask != 0)
5091 temp = const_binop (BIT_AND_EXPR, temp,
5092 fold_convert (TREE_TYPE (c), mask));
5093 /* If necessary, convert the type back to match the type of C. */
5094 if (TYPE_UNSIGNED (type))
5095 temp = fold_convert (type, temp);
5096
5097 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5098 }
5099 \f
5100 /* For an expression that has the form
5101 (A && B) || ~B
5102 or
5103 (A || B) && ~B,
5104 we can drop one of the inner expressions and simplify to
5105 A || ~B
5106 or
5107 A && ~B
5108 LOC is the location of the resulting expression. OP is the inner
5109 logical operation; the left-hand side in the examples above, while CMPOP
5110 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5111 removing a condition that guards another, as in
5112 (A != NULL && A->...) || A == NULL
5113 which we must not transform. If RHS_ONLY is true, only eliminate the
5114 right-most operand of the inner logical operation. */
5115
5116 static tree
5117 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5118 bool rhs_only)
5119 {
5120 tree type = TREE_TYPE (cmpop);
5121 enum tree_code code = TREE_CODE (cmpop);
5122 enum tree_code truthop_code = TREE_CODE (op);
5123 tree lhs = TREE_OPERAND (op, 0);
5124 tree rhs = TREE_OPERAND (op, 1);
5125 tree orig_lhs = lhs, orig_rhs = rhs;
5126 enum tree_code rhs_code = TREE_CODE (rhs);
5127 enum tree_code lhs_code = TREE_CODE (lhs);
5128 enum tree_code inv_code;
5129
5130 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5131 return NULL_TREE;
5132
5133 if (TREE_CODE_CLASS (code) != tcc_comparison)
5134 return NULL_TREE;
5135
5136 if (rhs_code == truthop_code)
5137 {
5138 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5139 if (newrhs != NULL_TREE)
5140 {
5141 rhs = newrhs;
5142 rhs_code = TREE_CODE (rhs);
5143 }
5144 }
5145 if (lhs_code == truthop_code && !rhs_only)
5146 {
5147 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5148 if (newlhs != NULL_TREE)
5149 {
5150 lhs = newlhs;
5151 lhs_code = TREE_CODE (lhs);
5152 }
5153 }
5154
5155 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5156 if (inv_code == rhs_code
5157 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5158 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5159 return lhs;
5160 if (!rhs_only && inv_code == lhs_code
5161 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5162 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5163 return rhs;
5164 if (rhs != orig_rhs || lhs != orig_lhs)
5165 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5166 lhs, rhs);
5167 return NULL_TREE;
5168 }
5169
5170 /* Find ways of folding logical expressions of LHS and RHS:
5171 Try to merge two comparisons to the same innermost item.
5172 Look for range tests like "ch >= '0' && ch <= '9'".
5173 Look for combinations of simple terms on machines with expensive branches
5174 and evaluate the RHS unconditionally.
5175
5176 For example, if we have p->a == 2 && p->b == 4 and we can make an
5177 object large enough to span both A and B, we can do this with a comparison
5178 against the object ANDed with the a mask.
5179
5180 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5181 operations to do this with one comparison.
5182
5183 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5184 function and the one above.
5185
5186 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5187 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5188
5189 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5190 two operands.
5191
5192 We return the simplified tree or 0 if no optimization is possible. */
5193
5194 static tree
5195 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5196 tree lhs, tree rhs)
5197 {
5198 /* If this is the "or" of two comparisons, we can do something if
5199 the comparisons are NE_EXPR. If this is the "and", we can do something
5200 if the comparisons are EQ_EXPR. I.e.,
5201 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5202
5203 WANTED_CODE is this operation code. For single bit fields, we can
5204 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5205 comparison for one-bit fields. */
5206
5207 enum tree_code wanted_code;
5208 enum tree_code lcode, rcode;
5209 tree ll_arg, lr_arg, rl_arg, rr_arg;
5210 tree ll_inner, lr_inner, rl_inner, rr_inner;
5211 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5212 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5213 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5214 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5215 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5216 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5217 enum machine_mode lnmode, rnmode;
5218 tree ll_mask, lr_mask, rl_mask, rr_mask;
5219 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5220 tree l_const, r_const;
5221 tree lntype, rntype, result;
5222 HOST_WIDE_INT first_bit, end_bit;
5223 int volatilep;
5224
5225 /* Start by getting the comparison codes. Fail if anything is volatile.
5226 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5227 it were surrounded with a NE_EXPR. */
5228
5229 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5230 return 0;
5231
5232 lcode = TREE_CODE (lhs);
5233 rcode = TREE_CODE (rhs);
5234
5235 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5236 {
5237 lhs = build2 (NE_EXPR, truth_type, lhs,
5238 build_int_cst (TREE_TYPE (lhs), 0));
5239 lcode = NE_EXPR;
5240 }
5241
5242 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5243 {
5244 rhs = build2 (NE_EXPR, truth_type, rhs,
5245 build_int_cst (TREE_TYPE (rhs), 0));
5246 rcode = NE_EXPR;
5247 }
5248
5249 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5250 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5251 return 0;
5252
5253 ll_arg = TREE_OPERAND (lhs, 0);
5254 lr_arg = TREE_OPERAND (lhs, 1);
5255 rl_arg = TREE_OPERAND (rhs, 0);
5256 rr_arg = TREE_OPERAND (rhs, 1);
5257
5258 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5259 if (simple_operand_p (ll_arg)
5260 && simple_operand_p (lr_arg))
5261 {
5262 if (operand_equal_p (ll_arg, rl_arg, 0)
5263 && operand_equal_p (lr_arg, rr_arg, 0))
5264 {
5265 result = combine_comparisons (loc, code, lcode, rcode,
5266 truth_type, ll_arg, lr_arg);
5267 if (result)
5268 return result;
5269 }
5270 else if (operand_equal_p (ll_arg, rr_arg, 0)
5271 && operand_equal_p (lr_arg, rl_arg, 0))
5272 {
5273 result = combine_comparisons (loc, code, lcode,
5274 swap_tree_comparison (rcode),
5275 truth_type, ll_arg, lr_arg);
5276 if (result)
5277 return result;
5278 }
5279 }
5280
5281 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5282 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5283
5284 /* If the RHS can be evaluated unconditionally and its operands are
5285 simple, it wins to evaluate the RHS unconditionally on machines
5286 with expensive branches. In this case, this isn't a comparison
5287 that can be merged. */
5288
5289 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5290 false) >= 2
5291 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5292 && simple_operand_p (rl_arg)
5293 && simple_operand_p (rr_arg))
5294 {
5295 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5296 if (code == TRUTH_OR_EXPR
5297 && lcode == NE_EXPR && integer_zerop (lr_arg)
5298 && rcode == NE_EXPR && integer_zerop (rr_arg)
5299 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5300 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5301 return build2_loc (loc, NE_EXPR, truth_type,
5302 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5303 ll_arg, rl_arg),
5304 build_int_cst (TREE_TYPE (ll_arg), 0));
5305
5306 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5307 if (code == TRUTH_AND_EXPR
5308 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5309 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5310 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5311 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5312 return build2_loc (loc, EQ_EXPR, truth_type,
5313 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5314 ll_arg, rl_arg),
5315 build_int_cst (TREE_TYPE (ll_arg), 0));
5316 }
5317
5318 /* See if the comparisons can be merged. Then get all the parameters for
5319 each side. */
5320
5321 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5322 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5323 return 0;
5324
5325 volatilep = 0;
5326 ll_inner = decode_field_reference (loc, ll_arg,
5327 &ll_bitsize, &ll_bitpos, &ll_mode,
5328 &ll_unsignedp, &volatilep, &ll_mask,
5329 &ll_and_mask);
5330 lr_inner = decode_field_reference (loc, lr_arg,
5331 &lr_bitsize, &lr_bitpos, &lr_mode,
5332 &lr_unsignedp, &volatilep, &lr_mask,
5333 &lr_and_mask);
5334 rl_inner = decode_field_reference (loc, rl_arg,
5335 &rl_bitsize, &rl_bitpos, &rl_mode,
5336 &rl_unsignedp, &volatilep, &rl_mask,
5337 &rl_and_mask);
5338 rr_inner = decode_field_reference (loc, rr_arg,
5339 &rr_bitsize, &rr_bitpos, &rr_mode,
5340 &rr_unsignedp, &volatilep, &rr_mask,
5341 &rr_and_mask);
5342
5343 /* It must be true that the inner operation on the lhs of each
5344 comparison must be the same if we are to be able to do anything.
5345 Then see if we have constants. If not, the same must be true for
5346 the rhs's. */
5347 if (volatilep || ll_inner == 0 || rl_inner == 0
5348 || ! operand_equal_p (ll_inner, rl_inner, 0))
5349 return 0;
5350
5351 if (TREE_CODE (lr_arg) == INTEGER_CST
5352 && TREE_CODE (rr_arg) == INTEGER_CST)
5353 l_const = lr_arg, r_const = rr_arg;
5354 else if (lr_inner == 0 || rr_inner == 0
5355 || ! operand_equal_p (lr_inner, rr_inner, 0))
5356 return 0;
5357 else
5358 l_const = r_const = 0;
5359
5360 /* If either comparison code is not correct for our logical operation,
5361 fail. However, we can convert a one-bit comparison against zero into
5362 the opposite comparison against that bit being set in the field. */
5363
5364 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5365 if (lcode != wanted_code)
5366 {
5367 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5368 {
5369 /* Make the left operand unsigned, since we are only interested
5370 in the value of one bit. Otherwise we are doing the wrong
5371 thing below. */
5372 ll_unsignedp = 1;
5373 l_const = ll_mask;
5374 }
5375 else
5376 return 0;
5377 }
5378
5379 /* This is analogous to the code for l_const above. */
5380 if (rcode != wanted_code)
5381 {
5382 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5383 {
5384 rl_unsignedp = 1;
5385 r_const = rl_mask;
5386 }
5387 else
5388 return 0;
5389 }
5390
5391 /* See if we can find a mode that contains both fields being compared on
5392 the left. If we can't, fail. Otherwise, update all constants and masks
5393 to be relative to a field of that size. */
5394 first_bit = MIN (ll_bitpos, rl_bitpos);
5395 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5396 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5397 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5398 volatilep);
5399 if (lnmode == VOIDmode)
5400 return 0;
5401
5402 lnbitsize = GET_MODE_BITSIZE (lnmode);
5403 lnbitpos = first_bit & ~ (lnbitsize - 1);
5404 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5405 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5406
5407 if (BYTES_BIG_ENDIAN)
5408 {
5409 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5410 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5411 }
5412
5413 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5414 size_int (xll_bitpos));
5415 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5416 size_int (xrl_bitpos));
5417
5418 if (l_const)
5419 {
5420 l_const = fold_convert_loc (loc, lntype, l_const);
5421 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5422 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5423 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5424 fold_build1_loc (loc, BIT_NOT_EXPR,
5425 lntype, ll_mask))))
5426 {
5427 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5428
5429 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5430 }
5431 }
5432 if (r_const)
5433 {
5434 r_const = fold_convert_loc (loc, lntype, r_const);
5435 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5436 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5437 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5438 fold_build1_loc (loc, BIT_NOT_EXPR,
5439 lntype, rl_mask))))
5440 {
5441 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5442
5443 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5444 }
5445 }
5446
5447 /* If the right sides are not constant, do the same for it. Also,
5448 disallow this optimization if a size or signedness mismatch occurs
5449 between the left and right sides. */
5450 if (l_const == 0)
5451 {
5452 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5453 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5454 /* Make sure the two fields on the right
5455 correspond to the left without being swapped. */
5456 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5457 return 0;
5458
5459 first_bit = MIN (lr_bitpos, rr_bitpos);
5460 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5461 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5462 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5463 volatilep);
5464 if (rnmode == VOIDmode)
5465 return 0;
5466
5467 rnbitsize = GET_MODE_BITSIZE (rnmode);
5468 rnbitpos = first_bit & ~ (rnbitsize - 1);
5469 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5470 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5471
5472 if (BYTES_BIG_ENDIAN)
5473 {
5474 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5475 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5476 }
5477
5478 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5479 rntype, lr_mask),
5480 size_int (xlr_bitpos));
5481 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5482 rntype, rr_mask),
5483 size_int (xrr_bitpos));
5484
5485 /* Make a mask that corresponds to both fields being compared.
5486 Do this for both items being compared. If the operands are the
5487 same size and the bits being compared are in the same position
5488 then we can do this by masking both and comparing the masked
5489 results. */
5490 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5491 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5492 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5493 {
5494 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5495 ll_unsignedp || rl_unsignedp);
5496 if (! all_ones_mask_p (ll_mask, lnbitsize))
5497 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5498
5499 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5500 lr_unsignedp || rr_unsignedp);
5501 if (! all_ones_mask_p (lr_mask, rnbitsize))
5502 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5503
5504 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5505 }
5506
5507 /* There is still another way we can do something: If both pairs of
5508 fields being compared are adjacent, we may be able to make a wider
5509 field containing them both.
5510
5511 Note that we still must mask the lhs/rhs expressions. Furthermore,
5512 the mask must be shifted to account for the shift done by
5513 make_bit_field_ref. */
5514 if ((ll_bitsize + ll_bitpos == rl_bitpos
5515 && lr_bitsize + lr_bitpos == rr_bitpos)
5516 || (ll_bitpos == rl_bitpos + rl_bitsize
5517 && lr_bitpos == rr_bitpos + rr_bitsize))
5518 {
5519 tree type;
5520
5521 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5522 ll_bitsize + rl_bitsize,
5523 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5524 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5525 lr_bitsize + rr_bitsize,
5526 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5527
5528 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5529 size_int (MIN (xll_bitpos, xrl_bitpos)));
5530 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5531 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5532
5533 /* Convert to the smaller type before masking out unwanted bits. */
5534 type = lntype;
5535 if (lntype != rntype)
5536 {
5537 if (lnbitsize > rnbitsize)
5538 {
5539 lhs = fold_convert_loc (loc, rntype, lhs);
5540 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5541 type = rntype;
5542 }
5543 else if (lnbitsize < rnbitsize)
5544 {
5545 rhs = fold_convert_loc (loc, lntype, rhs);
5546 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5547 type = lntype;
5548 }
5549 }
5550
5551 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5552 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5553
5554 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5555 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5556
5557 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5558 }
5559
5560 return 0;
5561 }
5562
5563 /* Handle the case of comparisons with constants. If there is something in
5564 common between the masks, those bits of the constants must be the same.
5565 If not, the condition is always false. Test for this to avoid generating
5566 incorrect code below. */
5567 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5568 if (! integer_zerop (result)
5569 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5570 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5571 {
5572 if (wanted_code == NE_EXPR)
5573 {
5574 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5575 return constant_boolean_node (true, truth_type);
5576 }
5577 else
5578 {
5579 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5580 return constant_boolean_node (false, truth_type);
5581 }
5582 }
5583
5584 /* Construct the expression we will return. First get the component
5585 reference we will make. Unless the mask is all ones the width of
5586 that field, perform the mask operation. Then compare with the
5587 merged constant. */
5588 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5589 ll_unsignedp || rl_unsignedp);
5590
5591 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5592 if (! all_ones_mask_p (ll_mask, lnbitsize))
5593 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5594
5595 return build2_loc (loc, wanted_code, truth_type, result,
5596 const_binop (BIT_IOR_EXPR, l_const, r_const));
5597 }
5598 \f
5599 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5600 constant. */
5601
5602 static tree
5603 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5604 tree op0, tree op1)
5605 {
5606 tree arg0 = op0;
5607 enum tree_code op_code;
5608 tree comp_const;
5609 tree minmax_const;
5610 int consts_equal, consts_lt;
5611 tree inner;
5612
5613 STRIP_SIGN_NOPS (arg0);
5614
5615 op_code = TREE_CODE (arg0);
5616 minmax_const = TREE_OPERAND (arg0, 1);
5617 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5618 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5619 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5620 inner = TREE_OPERAND (arg0, 0);
5621
5622 /* If something does not permit us to optimize, return the original tree. */
5623 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5624 || TREE_CODE (comp_const) != INTEGER_CST
5625 || TREE_OVERFLOW (comp_const)
5626 || TREE_CODE (minmax_const) != INTEGER_CST
5627 || TREE_OVERFLOW (minmax_const))
5628 return NULL_TREE;
5629
5630 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5631 and GT_EXPR, doing the rest with recursive calls using logical
5632 simplifications. */
5633 switch (code)
5634 {
5635 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5636 {
5637 tree tem
5638 = optimize_minmax_comparison (loc,
5639 invert_tree_comparison (code, false),
5640 type, op0, op1);
5641 if (tem)
5642 return invert_truthvalue_loc (loc, tem);
5643 return NULL_TREE;
5644 }
5645
5646 case GE_EXPR:
5647 return
5648 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5649 optimize_minmax_comparison
5650 (loc, EQ_EXPR, type, arg0, comp_const),
5651 optimize_minmax_comparison
5652 (loc, GT_EXPR, type, arg0, comp_const));
5653
5654 case EQ_EXPR:
5655 if (op_code == MAX_EXPR && consts_equal)
5656 /* MAX (X, 0) == 0 -> X <= 0 */
5657 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5658
5659 else if (op_code == MAX_EXPR && consts_lt)
5660 /* MAX (X, 0) == 5 -> X == 5 */
5661 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5662
5663 else if (op_code == MAX_EXPR)
5664 /* MAX (X, 0) == -1 -> false */
5665 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5666
5667 else if (consts_equal)
5668 /* MIN (X, 0) == 0 -> X >= 0 */
5669 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5670
5671 else if (consts_lt)
5672 /* MIN (X, 0) == 5 -> false */
5673 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5674
5675 else
5676 /* MIN (X, 0) == -1 -> X == -1 */
5677 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5678
5679 case GT_EXPR:
5680 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5681 /* MAX (X, 0) > 0 -> X > 0
5682 MAX (X, 0) > 5 -> X > 5 */
5683 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5684
5685 else if (op_code == MAX_EXPR)
5686 /* MAX (X, 0) > -1 -> true */
5687 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5688
5689 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5690 /* MIN (X, 0) > 0 -> false
5691 MIN (X, 0) > 5 -> false */
5692 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5693
5694 else
5695 /* MIN (X, 0) > -1 -> X > -1 */
5696 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5697
5698 default:
5699 return NULL_TREE;
5700 }
5701 }
5702 \f
5703 /* T is an integer expression that is being multiplied, divided, or taken a
5704 modulus (CODE says which and what kind of divide or modulus) by a
5705 constant C. See if we can eliminate that operation by folding it with
5706 other operations already in T. WIDE_TYPE, if non-null, is a type that
5707 should be used for the computation if wider than our type.
5708
5709 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5710 (X * 2) + (Y * 4). We must, however, be assured that either the original
5711 expression would not overflow or that overflow is undefined for the type
5712 in the language in question.
5713
5714 If we return a non-null expression, it is an equivalent form of the
5715 original computation, but need not be in the original type.
5716
5717 We set *STRICT_OVERFLOW_P to true if the return values depends on
5718 signed overflow being undefined. Otherwise we do not change
5719 *STRICT_OVERFLOW_P. */
5720
5721 static tree
5722 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5723 bool *strict_overflow_p)
5724 {
5725 /* To avoid exponential search depth, refuse to allow recursion past
5726 three levels. Beyond that (1) it's highly unlikely that we'll find
5727 something interesting and (2) we've probably processed it before
5728 when we built the inner expression. */
5729
5730 static int depth;
5731 tree ret;
5732
5733 if (depth > 3)
5734 return NULL;
5735
5736 depth++;
5737 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5738 depth--;
5739
5740 return ret;
5741 }
5742
5743 static tree
5744 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5745 bool *strict_overflow_p)
5746 {
5747 tree type = TREE_TYPE (t);
5748 enum tree_code tcode = TREE_CODE (t);
5749 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5750 > GET_MODE_SIZE (TYPE_MODE (type)))
5751 ? wide_type : type);
5752 tree t1, t2;
5753 int same_p = tcode == code;
5754 tree op0 = NULL_TREE, op1 = NULL_TREE;
5755 bool sub_strict_overflow_p;
5756
5757 /* Don't deal with constants of zero here; they confuse the code below. */
5758 if (integer_zerop (c))
5759 return NULL_TREE;
5760
5761 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5762 op0 = TREE_OPERAND (t, 0);
5763
5764 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5765 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5766
5767 /* Note that we need not handle conditional operations here since fold
5768 already handles those cases. So just do arithmetic here. */
5769 switch (tcode)
5770 {
5771 case INTEGER_CST:
5772 /* For a constant, we can always simplify if we are a multiply
5773 or (for divide and modulus) if it is a multiple of our constant. */
5774 if (code == MULT_EXPR
5775 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5776 return const_binop (code, fold_convert (ctype, t),
5777 fold_convert (ctype, c));
5778 break;
5779
5780 CASE_CONVERT: case NON_LVALUE_EXPR:
5781 /* If op0 is an expression ... */
5782 if ((COMPARISON_CLASS_P (op0)
5783 || UNARY_CLASS_P (op0)
5784 || BINARY_CLASS_P (op0)
5785 || VL_EXP_CLASS_P (op0)
5786 || EXPRESSION_CLASS_P (op0))
5787 /* ... and has wrapping overflow, and its type is smaller
5788 than ctype, then we cannot pass through as widening. */
5789 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5790 && (TYPE_PRECISION (ctype)
5791 > TYPE_PRECISION (TREE_TYPE (op0))))
5792 /* ... or this is a truncation (t is narrower than op0),
5793 then we cannot pass through this narrowing. */
5794 || (TYPE_PRECISION (type)
5795 < TYPE_PRECISION (TREE_TYPE (op0)))
5796 /* ... or signedness changes for division or modulus,
5797 then we cannot pass through this conversion. */
5798 || (code != MULT_EXPR
5799 && (TYPE_UNSIGNED (ctype)
5800 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5801 /* ... or has undefined overflow while the converted to
5802 type has not, we cannot do the operation in the inner type
5803 as that would introduce undefined overflow. */
5804 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5805 && !TYPE_OVERFLOW_UNDEFINED (type))))
5806 break;
5807
5808 /* Pass the constant down and see if we can make a simplification. If
5809 we can, replace this expression with the inner simplification for
5810 possible later conversion to our or some other type. */
5811 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5812 && TREE_CODE (t2) == INTEGER_CST
5813 && !TREE_OVERFLOW (t2)
5814 && (0 != (t1 = extract_muldiv (op0, t2, code,
5815 code == MULT_EXPR
5816 ? ctype : NULL_TREE,
5817 strict_overflow_p))))
5818 return t1;
5819 break;
5820
5821 case ABS_EXPR:
5822 /* If widening the type changes it from signed to unsigned, then we
5823 must avoid building ABS_EXPR itself as unsigned. */
5824 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5825 {
5826 tree cstype = (*signed_type_for) (ctype);
5827 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5828 != 0)
5829 {
5830 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5831 return fold_convert (ctype, t1);
5832 }
5833 break;
5834 }
5835 /* If the constant is negative, we cannot simplify this. */
5836 if (tree_int_cst_sgn (c) == -1)
5837 break;
5838 /* FALLTHROUGH */
5839 case NEGATE_EXPR:
5840 /* For division and modulus, type can't be unsigned, as e.g.
5841 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5842 For signed types, even with wrapping overflow, this is fine. */
5843 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5844 break;
5845 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5846 != 0)
5847 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5848 break;
5849
5850 case MIN_EXPR: case MAX_EXPR:
5851 /* If widening the type changes the signedness, then we can't perform
5852 this optimization as that changes the result. */
5853 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5854 break;
5855
5856 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5857 sub_strict_overflow_p = false;
5858 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5859 &sub_strict_overflow_p)) != 0
5860 && (t2 = extract_muldiv (op1, c, code, wide_type,
5861 &sub_strict_overflow_p)) != 0)
5862 {
5863 if (tree_int_cst_sgn (c) < 0)
5864 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5865 if (sub_strict_overflow_p)
5866 *strict_overflow_p = true;
5867 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5868 fold_convert (ctype, t2));
5869 }
5870 break;
5871
5872 case LSHIFT_EXPR: case RSHIFT_EXPR:
5873 /* If the second operand is constant, this is a multiplication
5874 or floor division, by a power of two, so we can treat it that
5875 way unless the multiplier or divisor overflows. Signed
5876 left-shift overflow is implementation-defined rather than
5877 undefined in C90, so do not convert signed left shift into
5878 multiplication. */
5879 if (TREE_CODE (op1) == INTEGER_CST
5880 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5881 /* const_binop may not detect overflow correctly,
5882 so check for it explicitly here. */
5883 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5884 && TREE_INT_CST_HIGH (op1) == 0
5885 && 0 != (t1 = fold_convert (ctype,
5886 const_binop (LSHIFT_EXPR,
5887 size_one_node,
5888 op1)))
5889 && !TREE_OVERFLOW (t1))
5890 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5891 ? MULT_EXPR : FLOOR_DIV_EXPR,
5892 ctype,
5893 fold_convert (ctype, op0),
5894 t1),
5895 c, code, wide_type, strict_overflow_p);
5896 break;
5897
5898 case PLUS_EXPR: case MINUS_EXPR:
5899 /* See if we can eliminate the operation on both sides. If we can, we
5900 can return a new PLUS or MINUS. If we can't, the only remaining
5901 cases where we can do anything are if the second operand is a
5902 constant. */
5903 sub_strict_overflow_p = false;
5904 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5905 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5906 if (t1 != 0 && t2 != 0
5907 && (code == MULT_EXPR
5908 /* If not multiplication, we can only do this if both operands
5909 are divisible by c. */
5910 || (multiple_of_p (ctype, op0, c)
5911 && multiple_of_p (ctype, op1, c))))
5912 {
5913 if (sub_strict_overflow_p)
5914 *strict_overflow_p = true;
5915 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5916 fold_convert (ctype, t2));
5917 }
5918
5919 /* If this was a subtraction, negate OP1 and set it to be an addition.
5920 This simplifies the logic below. */
5921 if (tcode == MINUS_EXPR)
5922 {
5923 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5924 /* If OP1 was not easily negatable, the constant may be OP0. */
5925 if (TREE_CODE (op0) == INTEGER_CST)
5926 {
5927 tree tem = op0;
5928 op0 = op1;
5929 op1 = tem;
5930 tem = t1;
5931 t1 = t2;
5932 t2 = tem;
5933 }
5934 }
5935
5936 if (TREE_CODE (op1) != INTEGER_CST)
5937 break;
5938
5939 /* If either OP1 or C are negative, this optimization is not safe for
5940 some of the division and remainder types while for others we need
5941 to change the code. */
5942 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5943 {
5944 if (code == CEIL_DIV_EXPR)
5945 code = FLOOR_DIV_EXPR;
5946 else if (code == FLOOR_DIV_EXPR)
5947 code = CEIL_DIV_EXPR;
5948 else if (code != MULT_EXPR
5949 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5950 break;
5951 }
5952
5953 /* If it's a multiply or a division/modulus operation of a multiple
5954 of our constant, do the operation and verify it doesn't overflow. */
5955 if (code == MULT_EXPR
5956 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5957 {
5958 op1 = const_binop (code, fold_convert (ctype, op1),
5959 fold_convert (ctype, c));
5960 /* We allow the constant to overflow with wrapping semantics. */
5961 if (op1 == 0
5962 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5963 break;
5964 }
5965 else
5966 break;
5967
5968 /* If we have an unsigned type, we cannot widen the operation since it
5969 will change the result if the original computation overflowed. */
5970 if (TYPE_UNSIGNED (ctype) && ctype != type)
5971 break;
5972
5973 /* If we were able to eliminate our operation from the first side,
5974 apply our operation to the second side and reform the PLUS. */
5975 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5976 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5977
5978 /* The last case is if we are a multiply. In that case, we can
5979 apply the distributive law to commute the multiply and addition
5980 if the multiplication of the constants doesn't overflow
5981 and overflow is defined. With undefined overflow
5982 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5983 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5984 return fold_build2 (tcode, ctype,
5985 fold_build2 (code, ctype,
5986 fold_convert (ctype, op0),
5987 fold_convert (ctype, c)),
5988 op1);
5989
5990 break;
5991
5992 case MULT_EXPR:
5993 /* We have a special case here if we are doing something like
5994 (C * 8) % 4 since we know that's zero. */
5995 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5996 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5997 /* If the multiplication can overflow we cannot optimize this. */
5998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5999 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6000 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6001 {
6002 *strict_overflow_p = true;
6003 return omit_one_operand (type, integer_zero_node, op0);
6004 }
6005
6006 /* ... fall through ... */
6007
6008 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6009 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6010 /* If we can extract our operation from the LHS, do so and return a
6011 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6012 do something only if the second operand is a constant. */
6013 if (same_p
6014 && (t1 = extract_muldiv (op0, c, code, wide_type,
6015 strict_overflow_p)) != 0)
6016 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6017 fold_convert (ctype, op1));
6018 else if (tcode == MULT_EXPR && code == MULT_EXPR
6019 && (t1 = extract_muldiv (op1, c, code, wide_type,
6020 strict_overflow_p)) != 0)
6021 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6022 fold_convert (ctype, t1));
6023 else if (TREE_CODE (op1) != INTEGER_CST)
6024 return 0;
6025
6026 /* If these are the same operation types, we can associate them
6027 assuming no overflow. */
6028 if (tcode == code)
6029 {
6030 double_int mul;
6031 bool overflow_p;
6032 unsigned prec = TYPE_PRECISION (ctype);
6033 bool uns = TYPE_UNSIGNED (ctype);
6034 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6035 double_int dic = tree_to_double_int (c).ext (prec, uns);
6036 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6037 overflow_p = ((!uns && overflow_p)
6038 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6039 if (!double_int_fits_to_tree_p (ctype, mul)
6040 && ((uns && tcode != MULT_EXPR) || !uns))
6041 overflow_p = 1;
6042 if (!overflow_p)
6043 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6044 double_int_to_tree (ctype, mul));
6045 }
6046
6047 /* If these operations "cancel" each other, we have the main
6048 optimizations of this pass, which occur when either constant is a
6049 multiple of the other, in which case we replace this with either an
6050 operation or CODE or TCODE.
6051
6052 If we have an unsigned type, we cannot do this since it will change
6053 the result if the original computation overflowed. */
6054 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6055 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6056 || (tcode == MULT_EXPR
6057 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6058 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6059 && code != MULT_EXPR)))
6060 {
6061 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6062 {
6063 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6064 *strict_overflow_p = true;
6065 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6066 fold_convert (ctype,
6067 const_binop (TRUNC_DIV_EXPR,
6068 op1, c)));
6069 }
6070 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6071 {
6072 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6073 *strict_overflow_p = true;
6074 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6075 fold_convert (ctype,
6076 const_binop (TRUNC_DIV_EXPR,
6077 c, op1)));
6078 }
6079 }
6080 break;
6081
6082 default:
6083 break;
6084 }
6085
6086 return 0;
6087 }
6088 \f
6089 /* Return a node which has the indicated constant VALUE (either 0 or
6090 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6091 and is of the indicated TYPE. */
6092
6093 tree
6094 constant_boolean_node (bool value, tree type)
6095 {
6096 if (type == integer_type_node)
6097 return value ? integer_one_node : integer_zero_node;
6098 else if (type == boolean_type_node)
6099 return value ? boolean_true_node : boolean_false_node;
6100 else if (TREE_CODE (type) == VECTOR_TYPE)
6101 return build_vector_from_val (type,
6102 build_int_cst (TREE_TYPE (type),
6103 value ? -1 : 0));
6104 else
6105 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6106 }
6107
6108
6109 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6110 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6111 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6112 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6113 COND is the first argument to CODE; otherwise (as in the example
6114 given here), it is the second argument. TYPE is the type of the
6115 original expression. Return NULL_TREE if no simplification is
6116 possible. */
6117
6118 static tree
6119 fold_binary_op_with_conditional_arg (location_t loc,
6120 enum tree_code code,
6121 tree type, tree op0, tree op1,
6122 tree cond, tree arg, int cond_first_p)
6123 {
6124 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6125 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6126 tree test, true_value, false_value;
6127 tree lhs = NULL_TREE;
6128 tree rhs = NULL_TREE;
6129 enum tree_code cond_code = COND_EXPR;
6130
6131 if (TREE_CODE (cond) == COND_EXPR
6132 || TREE_CODE (cond) == VEC_COND_EXPR)
6133 {
6134 test = TREE_OPERAND (cond, 0);
6135 true_value = TREE_OPERAND (cond, 1);
6136 false_value = TREE_OPERAND (cond, 2);
6137 /* If this operand throws an expression, then it does not make
6138 sense to try to perform a logical or arithmetic operation
6139 involving it. */
6140 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6141 lhs = true_value;
6142 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6143 rhs = false_value;
6144 }
6145 else
6146 {
6147 tree testtype = TREE_TYPE (cond);
6148 test = cond;
6149 true_value = constant_boolean_node (true, testtype);
6150 false_value = constant_boolean_node (false, testtype);
6151 }
6152
6153 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6154 cond_code = VEC_COND_EXPR;
6155
6156 /* This transformation is only worthwhile if we don't have to wrap ARG
6157 in a SAVE_EXPR and the operation can be simplified without recursing
6158 on at least one of the branches once its pushed inside the COND_EXPR. */
6159 if (!TREE_CONSTANT (arg)
6160 && (TREE_SIDE_EFFECTS (arg)
6161 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6162 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6163 return NULL_TREE;
6164
6165 arg = fold_convert_loc (loc, arg_type, arg);
6166 if (lhs == 0)
6167 {
6168 true_value = fold_convert_loc (loc, cond_type, true_value);
6169 if (cond_first_p)
6170 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6171 else
6172 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6173 }
6174 if (rhs == 0)
6175 {
6176 false_value = fold_convert_loc (loc, cond_type, false_value);
6177 if (cond_first_p)
6178 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6179 else
6180 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6181 }
6182
6183 /* Check that we have simplified at least one of the branches. */
6184 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6185 return NULL_TREE;
6186
6187 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6188 }
6189
6190 \f
6191 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6192
6193 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6194 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6195 ADDEND is the same as X.
6196
6197 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6198 and finite. The problematic cases are when X is zero, and its mode
6199 has signed zeros. In the case of rounding towards -infinity,
6200 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6201 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6202
6203 bool
6204 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6205 {
6206 if (!real_zerop (addend))
6207 return false;
6208
6209 /* Don't allow the fold with -fsignaling-nans. */
6210 if (HONOR_SNANS (TYPE_MODE (type)))
6211 return false;
6212
6213 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6214 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6215 return true;
6216
6217 /* In a vector or complex, we would need to check the sign of all zeros. */
6218 if (TREE_CODE (addend) != REAL_CST)
6219 return false;
6220
6221 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6222 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6223 negate = !negate;
6224
6225 /* The mode has signed zeros, and we have to honor their sign.
6226 In this situation, there is only one case we can return true for.
6227 X - 0 is the same as X unless rounding towards -infinity is
6228 supported. */
6229 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6230 }
6231
6232 /* Subroutine of fold() that checks comparisons of built-in math
6233 functions against real constants.
6234
6235 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6236 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6237 is the type of the result and ARG0 and ARG1 are the operands of the
6238 comparison. ARG1 must be a TREE_REAL_CST.
6239
6240 The function returns the constant folded tree if a simplification
6241 can be made, and NULL_TREE otherwise. */
6242
6243 static tree
6244 fold_mathfn_compare (location_t loc,
6245 enum built_in_function fcode, enum tree_code code,
6246 tree type, tree arg0, tree arg1)
6247 {
6248 REAL_VALUE_TYPE c;
6249
6250 if (BUILTIN_SQRT_P (fcode))
6251 {
6252 tree arg = CALL_EXPR_ARG (arg0, 0);
6253 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6254
6255 c = TREE_REAL_CST (arg1);
6256 if (REAL_VALUE_NEGATIVE (c))
6257 {
6258 /* sqrt(x) < y is always false, if y is negative. */
6259 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6260 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6261
6262 /* sqrt(x) > y is always true, if y is negative and we
6263 don't care about NaNs, i.e. negative values of x. */
6264 if (code == NE_EXPR || !HONOR_NANS (mode))
6265 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6266
6267 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6268 return fold_build2_loc (loc, GE_EXPR, type, arg,
6269 build_real (TREE_TYPE (arg), dconst0));
6270 }
6271 else if (code == GT_EXPR || code == GE_EXPR)
6272 {
6273 REAL_VALUE_TYPE c2;
6274
6275 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6276 real_convert (&c2, mode, &c2);
6277
6278 if (REAL_VALUE_ISINF (c2))
6279 {
6280 /* sqrt(x) > y is x == +Inf, when y is very large. */
6281 if (HONOR_INFINITIES (mode))
6282 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6283 build_real (TREE_TYPE (arg), c2));
6284
6285 /* sqrt(x) > y is always false, when y is very large
6286 and we don't care about infinities. */
6287 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6288 }
6289
6290 /* sqrt(x) > c is the same as x > c*c. */
6291 return fold_build2_loc (loc, code, type, arg,
6292 build_real (TREE_TYPE (arg), c2));
6293 }
6294 else if (code == LT_EXPR || code == LE_EXPR)
6295 {
6296 REAL_VALUE_TYPE c2;
6297
6298 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6299 real_convert (&c2, mode, &c2);
6300
6301 if (REAL_VALUE_ISINF (c2))
6302 {
6303 /* sqrt(x) < y is always true, when y is a very large
6304 value and we don't care about NaNs or Infinities. */
6305 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6306 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6307
6308 /* sqrt(x) < y is x != +Inf when y is very large and we
6309 don't care about NaNs. */
6310 if (! HONOR_NANS (mode))
6311 return fold_build2_loc (loc, NE_EXPR, type, arg,
6312 build_real (TREE_TYPE (arg), c2));
6313
6314 /* sqrt(x) < y is x >= 0 when y is very large and we
6315 don't care about Infinities. */
6316 if (! HONOR_INFINITIES (mode))
6317 return fold_build2_loc (loc, GE_EXPR, type, arg,
6318 build_real (TREE_TYPE (arg), dconst0));
6319
6320 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6321 arg = save_expr (arg);
6322 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6323 fold_build2_loc (loc, GE_EXPR, type, arg,
6324 build_real (TREE_TYPE (arg),
6325 dconst0)),
6326 fold_build2_loc (loc, NE_EXPR, type, arg,
6327 build_real (TREE_TYPE (arg),
6328 c2)));
6329 }
6330
6331 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6332 if (! HONOR_NANS (mode))
6333 return fold_build2_loc (loc, code, type, arg,
6334 build_real (TREE_TYPE (arg), c2));
6335
6336 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6337 arg = save_expr (arg);
6338 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6339 fold_build2_loc (loc, GE_EXPR, type, arg,
6340 build_real (TREE_TYPE (arg),
6341 dconst0)),
6342 fold_build2_loc (loc, code, type, arg,
6343 build_real (TREE_TYPE (arg),
6344 c2)));
6345 }
6346 }
6347
6348 return NULL_TREE;
6349 }
6350
6351 /* Subroutine of fold() that optimizes comparisons against Infinities,
6352 either +Inf or -Inf.
6353
6354 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6355 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6356 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6357
6358 The function returns the constant folded tree if a simplification
6359 can be made, and NULL_TREE otherwise. */
6360
6361 static tree
6362 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6363 tree arg0, tree arg1)
6364 {
6365 enum machine_mode mode;
6366 REAL_VALUE_TYPE max;
6367 tree temp;
6368 bool neg;
6369
6370 mode = TYPE_MODE (TREE_TYPE (arg0));
6371
6372 /* For negative infinity swap the sense of the comparison. */
6373 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6374 if (neg)
6375 code = swap_tree_comparison (code);
6376
6377 switch (code)
6378 {
6379 case GT_EXPR:
6380 /* x > +Inf is always false, if with ignore sNANs. */
6381 if (HONOR_SNANS (mode))
6382 return NULL_TREE;
6383 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6384
6385 case LE_EXPR:
6386 /* x <= +Inf is always true, if we don't case about NaNs. */
6387 if (! HONOR_NANS (mode))
6388 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6389
6390 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6391 arg0 = save_expr (arg0);
6392 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6393
6394 case EQ_EXPR:
6395 case GE_EXPR:
6396 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6397 real_maxval (&max, neg, mode);
6398 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6399 arg0, build_real (TREE_TYPE (arg0), max));
6400
6401 case LT_EXPR:
6402 /* x < +Inf is always equal to x <= DBL_MAX. */
6403 real_maxval (&max, neg, mode);
6404 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6405 arg0, build_real (TREE_TYPE (arg0), max));
6406
6407 case NE_EXPR:
6408 /* x != +Inf is always equal to !(x > DBL_MAX). */
6409 real_maxval (&max, neg, mode);
6410 if (! HONOR_NANS (mode))
6411 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6412 arg0, build_real (TREE_TYPE (arg0), max));
6413
6414 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6415 arg0, build_real (TREE_TYPE (arg0), max));
6416 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6417
6418 default:
6419 break;
6420 }
6421
6422 return NULL_TREE;
6423 }
6424
6425 /* Subroutine of fold() that optimizes comparisons of a division by
6426 a nonzero integer constant against an integer constant, i.e.
6427 X/C1 op C2.
6428
6429 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6430 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6431 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6432
6433 The function returns the constant folded tree if a simplification
6434 can be made, and NULL_TREE otherwise. */
6435
6436 static tree
6437 fold_div_compare (location_t loc,
6438 enum tree_code code, tree type, tree arg0, tree arg1)
6439 {
6440 tree prod, tmp, hi, lo;
6441 tree arg00 = TREE_OPERAND (arg0, 0);
6442 tree arg01 = TREE_OPERAND (arg0, 1);
6443 double_int val;
6444 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6445 bool neg_overflow;
6446 bool overflow;
6447
6448 /* We have to do this the hard way to detect unsigned overflow.
6449 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6450 val = TREE_INT_CST (arg01)
6451 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6452 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6453 neg_overflow = false;
6454
6455 if (unsigned_p)
6456 {
6457 tmp = int_const_binop (MINUS_EXPR, arg01,
6458 build_int_cst (TREE_TYPE (arg01), 1));
6459 lo = prod;
6460
6461 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6462 val = TREE_INT_CST (prod)
6463 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6464 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6465 -1, overflow | TREE_OVERFLOW (prod));
6466 }
6467 else if (tree_int_cst_sgn (arg01) >= 0)
6468 {
6469 tmp = int_const_binop (MINUS_EXPR, arg01,
6470 build_int_cst (TREE_TYPE (arg01), 1));
6471 switch (tree_int_cst_sgn (arg1))
6472 {
6473 case -1:
6474 neg_overflow = true;
6475 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6476 hi = prod;
6477 break;
6478
6479 case 0:
6480 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6481 hi = tmp;
6482 break;
6483
6484 case 1:
6485 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6486 lo = prod;
6487 break;
6488
6489 default:
6490 gcc_unreachable ();
6491 }
6492 }
6493 else
6494 {
6495 /* A negative divisor reverses the relational operators. */
6496 code = swap_tree_comparison (code);
6497
6498 tmp = int_const_binop (PLUS_EXPR, arg01,
6499 build_int_cst (TREE_TYPE (arg01), 1));
6500 switch (tree_int_cst_sgn (arg1))
6501 {
6502 case -1:
6503 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6504 lo = prod;
6505 break;
6506
6507 case 0:
6508 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6509 lo = tmp;
6510 break;
6511
6512 case 1:
6513 neg_overflow = true;
6514 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6515 hi = prod;
6516 break;
6517
6518 default:
6519 gcc_unreachable ();
6520 }
6521 }
6522
6523 switch (code)
6524 {
6525 case EQ_EXPR:
6526 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6527 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6528 if (TREE_OVERFLOW (hi))
6529 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6530 if (TREE_OVERFLOW (lo))
6531 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6532 return build_range_check (loc, type, arg00, 1, lo, hi);
6533
6534 case NE_EXPR:
6535 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6536 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6537 if (TREE_OVERFLOW (hi))
6538 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6539 if (TREE_OVERFLOW (lo))
6540 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6541 return build_range_check (loc, type, arg00, 0, lo, hi);
6542
6543 case LT_EXPR:
6544 if (TREE_OVERFLOW (lo))
6545 {
6546 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6547 return omit_one_operand_loc (loc, type, tmp, arg00);
6548 }
6549 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6550
6551 case LE_EXPR:
6552 if (TREE_OVERFLOW (hi))
6553 {
6554 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6555 return omit_one_operand_loc (loc, type, tmp, arg00);
6556 }
6557 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6558
6559 case GT_EXPR:
6560 if (TREE_OVERFLOW (hi))
6561 {
6562 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6563 return omit_one_operand_loc (loc, type, tmp, arg00);
6564 }
6565 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6566
6567 case GE_EXPR:
6568 if (TREE_OVERFLOW (lo))
6569 {
6570 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6571 return omit_one_operand_loc (loc, type, tmp, arg00);
6572 }
6573 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6574
6575 default:
6576 break;
6577 }
6578
6579 return NULL_TREE;
6580 }
6581
6582
6583 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6584 equality/inequality test, then return a simplified form of the test
6585 using a sign testing. Otherwise return NULL. TYPE is the desired
6586 result type. */
6587
6588 static tree
6589 fold_single_bit_test_into_sign_test (location_t loc,
6590 enum tree_code code, tree arg0, tree arg1,
6591 tree result_type)
6592 {
6593 /* If this is testing a single bit, we can optimize the test. */
6594 if ((code == NE_EXPR || code == EQ_EXPR)
6595 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6596 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6597 {
6598 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6599 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6600 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6601
6602 if (arg00 != NULL_TREE
6603 /* This is only a win if casting to a signed type is cheap,
6604 i.e. when arg00's type is not a partial mode. */
6605 && TYPE_PRECISION (TREE_TYPE (arg00))
6606 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6607 {
6608 tree stype = signed_type_for (TREE_TYPE (arg00));
6609 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6610 result_type,
6611 fold_convert_loc (loc, stype, arg00),
6612 build_int_cst (stype, 0));
6613 }
6614 }
6615
6616 return NULL_TREE;
6617 }
6618
6619 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6620 equality/inequality test, then return a simplified form of
6621 the test using shifts and logical operations. Otherwise return
6622 NULL. TYPE is the desired result type. */
6623
6624 tree
6625 fold_single_bit_test (location_t loc, enum tree_code code,
6626 tree arg0, tree arg1, tree result_type)
6627 {
6628 /* If this is testing a single bit, we can optimize the test. */
6629 if ((code == NE_EXPR || code == EQ_EXPR)
6630 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6631 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6632 {
6633 tree inner = TREE_OPERAND (arg0, 0);
6634 tree type = TREE_TYPE (arg0);
6635 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6636 enum machine_mode operand_mode = TYPE_MODE (type);
6637 int ops_unsigned;
6638 tree signed_type, unsigned_type, intermediate_type;
6639 tree tem, one;
6640
6641 /* First, see if we can fold the single bit test into a sign-bit
6642 test. */
6643 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6644 result_type);
6645 if (tem)
6646 return tem;
6647
6648 /* Otherwise we have (A & C) != 0 where C is a single bit,
6649 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6650 Similarly for (A & C) == 0. */
6651
6652 /* If INNER is a right shift of a constant and it plus BITNUM does
6653 not overflow, adjust BITNUM and INNER. */
6654 if (TREE_CODE (inner) == RSHIFT_EXPR
6655 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6656 && host_integerp (TREE_OPERAND (inner, 1), 1)
6657 && bitnum < TYPE_PRECISION (type)
6658 && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
6659 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6660 {
6661 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6662 inner = TREE_OPERAND (inner, 0);
6663 }
6664
6665 /* If we are going to be able to omit the AND below, we must do our
6666 operations as unsigned. If we must use the AND, we have a choice.
6667 Normally unsigned is faster, but for some machines signed is. */
6668 #ifdef LOAD_EXTEND_OP
6669 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6670 && !flag_syntax_only) ? 0 : 1;
6671 #else
6672 ops_unsigned = 1;
6673 #endif
6674
6675 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6676 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6677 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6678 inner = fold_convert_loc (loc, intermediate_type, inner);
6679
6680 if (bitnum != 0)
6681 inner = build2 (RSHIFT_EXPR, intermediate_type,
6682 inner, size_int (bitnum));
6683
6684 one = build_int_cst (intermediate_type, 1);
6685
6686 if (code == EQ_EXPR)
6687 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6688
6689 /* Put the AND last so it can combine with more things. */
6690 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6691
6692 /* Make sure to return the proper type. */
6693 inner = fold_convert_loc (loc, result_type, inner);
6694
6695 return inner;
6696 }
6697 return NULL_TREE;
6698 }
6699
6700 /* Check whether we are allowed to reorder operands arg0 and arg1,
6701 such that the evaluation of arg1 occurs before arg0. */
6702
6703 static bool
6704 reorder_operands_p (const_tree arg0, const_tree arg1)
6705 {
6706 if (! flag_evaluation_order)
6707 return true;
6708 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6709 return true;
6710 return ! TREE_SIDE_EFFECTS (arg0)
6711 && ! TREE_SIDE_EFFECTS (arg1);
6712 }
6713
6714 /* Test whether it is preferable two swap two operands, ARG0 and
6715 ARG1, for example because ARG0 is an integer constant and ARG1
6716 isn't. If REORDER is true, only recommend swapping if we can
6717 evaluate the operands in reverse order. */
6718
6719 bool
6720 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6721 {
6722 STRIP_SIGN_NOPS (arg0);
6723 STRIP_SIGN_NOPS (arg1);
6724
6725 if (TREE_CODE (arg1) == INTEGER_CST)
6726 return 0;
6727 if (TREE_CODE (arg0) == INTEGER_CST)
6728 return 1;
6729
6730 if (TREE_CODE (arg1) == REAL_CST)
6731 return 0;
6732 if (TREE_CODE (arg0) == REAL_CST)
6733 return 1;
6734
6735 if (TREE_CODE (arg1) == FIXED_CST)
6736 return 0;
6737 if (TREE_CODE (arg0) == FIXED_CST)
6738 return 1;
6739
6740 if (TREE_CODE (arg1) == COMPLEX_CST)
6741 return 0;
6742 if (TREE_CODE (arg0) == COMPLEX_CST)
6743 return 1;
6744
6745 if (TREE_CONSTANT (arg1))
6746 return 0;
6747 if (TREE_CONSTANT (arg0))
6748 return 1;
6749
6750 if (optimize_function_for_size_p (cfun))
6751 return 0;
6752
6753 if (reorder && flag_evaluation_order
6754 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6755 return 0;
6756
6757 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6758 for commutative and comparison operators. Ensuring a canonical
6759 form allows the optimizers to find additional redundancies without
6760 having to explicitly check for both orderings. */
6761 if (TREE_CODE (arg0) == SSA_NAME
6762 && TREE_CODE (arg1) == SSA_NAME
6763 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6764 return 1;
6765
6766 /* Put SSA_NAMEs last. */
6767 if (TREE_CODE (arg1) == SSA_NAME)
6768 return 0;
6769 if (TREE_CODE (arg0) == SSA_NAME)
6770 return 1;
6771
6772 /* Put variables last. */
6773 if (DECL_P (arg1))
6774 return 0;
6775 if (DECL_P (arg0))
6776 return 1;
6777
6778 return 0;
6779 }
6780
6781 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6782 ARG0 is extended to a wider type. */
6783
6784 static tree
6785 fold_widened_comparison (location_t loc, enum tree_code code,
6786 tree type, tree arg0, tree arg1)
6787 {
6788 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6789 tree arg1_unw;
6790 tree shorter_type, outer_type;
6791 tree min, max;
6792 bool above, below;
6793
6794 if (arg0_unw == arg0)
6795 return NULL_TREE;
6796 shorter_type = TREE_TYPE (arg0_unw);
6797
6798 #ifdef HAVE_canonicalize_funcptr_for_compare
6799 /* Disable this optimization if we're casting a function pointer
6800 type on targets that require function pointer canonicalization. */
6801 if (HAVE_canonicalize_funcptr_for_compare
6802 && TREE_CODE (shorter_type) == POINTER_TYPE
6803 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6804 return NULL_TREE;
6805 #endif
6806
6807 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6808 return NULL_TREE;
6809
6810 arg1_unw = get_unwidened (arg1, NULL_TREE);
6811
6812 /* If possible, express the comparison in the shorter mode. */
6813 if ((code == EQ_EXPR || code == NE_EXPR
6814 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6815 && (TREE_TYPE (arg1_unw) == shorter_type
6816 || ((TYPE_PRECISION (shorter_type)
6817 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6818 && (TYPE_UNSIGNED (shorter_type)
6819 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6820 || (TREE_CODE (arg1_unw) == INTEGER_CST
6821 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6822 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6823 && int_fits_type_p (arg1_unw, shorter_type))))
6824 return fold_build2_loc (loc, code, type, arg0_unw,
6825 fold_convert_loc (loc, shorter_type, arg1_unw));
6826
6827 if (TREE_CODE (arg1_unw) != INTEGER_CST
6828 || TREE_CODE (shorter_type) != INTEGER_TYPE
6829 || !int_fits_type_p (arg1_unw, shorter_type))
6830 return NULL_TREE;
6831
6832 /* If we are comparing with the integer that does not fit into the range
6833 of the shorter type, the result is known. */
6834 outer_type = TREE_TYPE (arg1_unw);
6835 min = lower_bound_in_type (outer_type, shorter_type);
6836 max = upper_bound_in_type (outer_type, shorter_type);
6837
6838 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6839 max, arg1_unw));
6840 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6841 arg1_unw, min));
6842
6843 switch (code)
6844 {
6845 case EQ_EXPR:
6846 if (above || below)
6847 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6848 break;
6849
6850 case NE_EXPR:
6851 if (above || below)
6852 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6853 break;
6854
6855 case LT_EXPR:
6856 case LE_EXPR:
6857 if (above)
6858 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6859 else if (below)
6860 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6861
6862 case GT_EXPR:
6863 case GE_EXPR:
6864 if (above)
6865 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6866 else if (below)
6867 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6868
6869 default:
6870 break;
6871 }
6872
6873 return NULL_TREE;
6874 }
6875
6876 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6877 ARG0 just the signedness is changed. */
6878
6879 static tree
6880 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6881 tree arg0, tree arg1)
6882 {
6883 tree arg0_inner;
6884 tree inner_type, outer_type;
6885
6886 if (!CONVERT_EXPR_P (arg0))
6887 return NULL_TREE;
6888
6889 outer_type = TREE_TYPE (arg0);
6890 arg0_inner = TREE_OPERAND (arg0, 0);
6891 inner_type = TREE_TYPE (arg0_inner);
6892
6893 #ifdef HAVE_canonicalize_funcptr_for_compare
6894 /* Disable this optimization if we're casting a function pointer
6895 type on targets that require function pointer canonicalization. */
6896 if (HAVE_canonicalize_funcptr_for_compare
6897 && TREE_CODE (inner_type) == POINTER_TYPE
6898 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6899 return NULL_TREE;
6900 #endif
6901
6902 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6903 return NULL_TREE;
6904
6905 if (TREE_CODE (arg1) != INTEGER_CST
6906 && !(CONVERT_EXPR_P (arg1)
6907 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6908 return NULL_TREE;
6909
6910 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6911 && code != NE_EXPR
6912 && code != EQ_EXPR)
6913 return NULL_TREE;
6914
6915 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6916 return NULL_TREE;
6917
6918 if (TREE_CODE (arg1) == INTEGER_CST)
6919 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6920 0, TREE_OVERFLOW (arg1));
6921 else
6922 arg1 = fold_convert_loc (loc, inner_type, arg1);
6923
6924 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6925 }
6926
6927 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6928 step of the array. Reconstructs s and delta in the case of s *
6929 delta being an integer constant (and thus already folded). ADDR is
6930 the address. MULT is the multiplicative expression. If the
6931 function succeeds, the new address expression is returned.
6932 Otherwise NULL_TREE is returned. LOC is the location of the
6933 resulting expression. */
6934
6935 static tree
6936 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6937 {
6938 tree s, delta, step;
6939 tree ref = TREE_OPERAND (addr, 0), pref;
6940 tree ret, pos;
6941 tree itype;
6942 bool mdim = false;
6943
6944 /* Strip the nops that might be added when converting op1 to sizetype. */
6945 STRIP_NOPS (op1);
6946
6947 /* Canonicalize op1 into a possibly non-constant delta
6948 and an INTEGER_CST s. */
6949 if (TREE_CODE (op1) == MULT_EXPR)
6950 {
6951 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6952
6953 STRIP_NOPS (arg0);
6954 STRIP_NOPS (arg1);
6955
6956 if (TREE_CODE (arg0) == INTEGER_CST)
6957 {
6958 s = arg0;
6959 delta = arg1;
6960 }
6961 else if (TREE_CODE (arg1) == INTEGER_CST)
6962 {
6963 s = arg1;
6964 delta = arg0;
6965 }
6966 else
6967 return NULL_TREE;
6968 }
6969 else if (TREE_CODE (op1) == INTEGER_CST)
6970 {
6971 delta = op1;
6972 s = NULL_TREE;
6973 }
6974 else
6975 {
6976 /* Simulate we are delta * 1. */
6977 delta = op1;
6978 s = integer_one_node;
6979 }
6980
6981 /* Handle &x.array the same as we would handle &x.array[0]. */
6982 if (TREE_CODE (ref) == COMPONENT_REF
6983 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6984 {
6985 tree domain;
6986
6987 /* Remember if this was a multi-dimensional array. */
6988 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6989 mdim = true;
6990
6991 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6992 if (! domain)
6993 goto cont;
6994 itype = TREE_TYPE (domain);
6995
6996 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6997 if (TREE_CODE (step) != INTEGER_CST)
6998 goto cont;
6999
7000 if (s)
7001 {
7002 if (! tree_int_cst_equal (step, s))
7003 goto cont;
7004 }
7005 else
7006 {
7007 /* Try if delta is a multiple of step. */
7008 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7009 if (! tmp)
7010 goto cont;
7011 delta = tmp;
7012 }
7013
7014 /* Only fold here if we can verify we do not overflow one
7015 dimension of a multi-dimensional array. */
7016 if (mdim)
7017 {
7018 tree tmp;
7019
7020 if (!TYPE_MIN_VALUE (domain)
7021 || !TYPE_MAX_VALUE (domain)
7022 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7023 goto cont;
7024
7025 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7026 fold_convert_loc (loc, itype,
7027 TYPE_MIN_VALUE (domain)),
7028 fold_convert_loc (loc, itype, delta));
7029 if (TREE_CODE (tmp) != INTEGER_CST
7030 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7031 goto cont;
7032 }
7033
7034 /* We found a suitable component reference. */
7035
7036 pref = TREE_OPERAND (addr, 0);
7037 ret = copy_node (pref);
7038 SET_EXPR_LOCATION (ret, loc);
7039
7040 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7041 fold_build2_loc
7042 (loc, PLUS_EXPR, itype,
7043 fold_convert_loc (loc, itype,
7044 TYPE_MIN_VALUE
7045 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7046 fold_convert_loc (loc, itype, delta)),
7047 NULL_TREE, NULL_TREE);
7048 return build_fold_addr_expr_loc (loc, ret);
7049 }
7050
7051 cont:
7052
7053 for (;; ref = TREE_OPERAND (ref, 0))
7054 {
7055 if (TREE_CODE (ref) == ARRAY_REF)
7056 {
7057 tree domain;
7058
7059 /* Remember if this was a multi-dimensional array. */
7060 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7061 mdim = true;
7062
7063 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7064 if (! domain)
7065 continue;
7066 itype = TREE_TYPE (domain);
7067
7068 step = array_ref_element_size (ref);
7069 if (TREE_CODE (step) != INTEGER_CST)
7070 continue;
7071
7072 if (s)
7073 {
7074 if (! tree_int_cst_equal (step, s))
7075 continue;
7076 }
7077 else
7078 {
7079 /* Try if delta is a multiple of step. */
7080 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7081 if (! tmp)
7082 continue;
7083 delta = tmp;
7084 }
7085
7086 /* Only fold here if we can verify we do not overflow one
7087 dimension of a multi-dimensional array. */
7088 if (mdim)
7089 {
7090 tree tmp;
7091
7092 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7093 || !TYPE_MAX_VALUE (domain)
7094 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7095 continue;
7096
7097 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7098 fold_convert_loc (loc, itype,
7099 TREE_OPERAND (ref, 1)),
7100 fold_convert_loc (loc, itype, delta));
7101 if (!tmp
7102 || TREE_CODE (tmp) != INTEGER_CST
7103 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7104 continue;
7105 }
7106
7107 break;
7108 }
7109 else
7110 mdim = false;
7111
7112 if (!handled_component_p (ref))
7113 return NULL_TREE;
7114 }
7115
7116 /* We found the suitable array reference. So copy everything up to it,
7117 and replace the index. */
7118
7119 pref = TREE_OPERAND (addr, 0);
7120 ret = copy_node (pref);
7121 SET_EXPR_LOCATION (ret, loc);
7122 pos = ret;
7123
7124 while (pref != ref)
7125 {
7126 pref = TREE_OPERAND (pref, 0);
7127 TREE_OPERAND (pos, 0) = copy_node (pref);
7128 pos = TREE_OPERAND (pos, 0);
7129 }
7130
7131 TREE_OPERAND (pos, 1)
7132 = fold_build2_loc (loc, PLUS_EXPR, itype,
7133 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7134 fold_convert_loc (loc, itype, delta));
7135 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7136 }
7137
7138
7139 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7140 means A >= Y && A != MAX, but in this case we know that
7141 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7142
7143 static tree
7144 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7145 {
7146 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7147
7148 if (TREE_CODE (bound) == LT_EXPR)
7149 a = TREE_OPERAND (bound, 0);
7150 else if (TREE_CODE (bound) == GT_EXPR)
7151 a = TREE_OPERAND (bound, 1);
7152 else
7153 return NULL_TREE;
7154
7155 typea = TREE_TYPE (a);
7156 if (!INTEGRAL_TYPE_P (typea)
7157 && !POINTER_TYPE_P (typea))
7158 return NULL_TREE;
7159
7160 if (TREE_CODE (ineq) == LT_EXPR)
7161 {
7162 a1 = TREE_OPERAND (ineq, 1);
7163 y = TREE_OPERAND (ineq, 0);
7164 }
7165 else if (TREE_CODE (ineq) == GT_EXPR)
7166 {
7167 a1 = TREE_OPERAND (ineq, 0);
7168 y = TREE_OPERAND (ineq, 1);
7169 }
7170 else
7171 return NULL_TREE;
7172
7173 if (TREE_TYPE (a1) != typea)
7174 return NULL_TREE;
7175
7176 if (POINTER_TYPE_P (typea))
7177 {
7178 /* Convert the pointer types into integer before taking the difference. */
7179 tree ta = fold_convert_loc (loc, ssizetype, a);
7180 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7181 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7182 }
7183 else
7184 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7185
7186 if (!diff || !integer_onep (diff))
7187 return NULL_TREE;
7188
7189 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7190 }
7191
7192 /* Fold a sum or difference of at least one multiplication.
7193 Returns the folded tree or NULL if no simplification could be made. */
7194
7195 static tree
7196 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7197 tree arg0, tree arg1)
7198 {
7199 tree arg00, arg01, arg10, arg11;
7200 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7201
7202 /* (A * C) +- (B * C) -> (A+-B) * C.
7203 (A * C) +- A -> A * (C+-1).
7204 We are most concerned about the case where C is a constant,
7205 but other combinations show up during loop reduction. Since
7206 it is not difficult, try all four possibilities. */
7207
7208 if (TREE_CODE (arg0) == MULT_EXPR)
7209 {
7210 arg00 = TREE_OPERAND (arg0, 0);
7211 arg01 = TREE_OPERAND (arg0, 1);
7212 }
7213 else if (TREE_CODE (arg0) == INTEGER_CST)
7214 {
7215 arg00 = build_one_cst (type);
7216 arg01 = arg0;
7217 }
7218 else
7219 {
7220 /* We cannot generate constant 1 for fract. */
7221 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7222 return NULL_TREE;
7223 arg00 = arg0;
7224 arg01 = build_one_cst (type);
7225 }
7226 if (TREE_CODE (arg1) == MULT_EXPR)
7227 {
7228 arg10 = TREE_OPERAND (arg1, 0);
7229 arg11 = TREE_OPERAND (arg1, 1);
7230 }
7231 else if (TREE_CODE (arg1) == INTEGER_CST)
7232 {
7233 arg10 = build_one_cst (type);
7234 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7235 the purpose of this canonicalization. */
7236 if (TREE_INT_CST_HIGH (arg1) == -1
7237 && negate_expr_p (arg1)
7238 && code == PLUS_EXPR)
7239 {
7240 arg11 = negate_expr (arg1);
7241 code = MINUS_EXPR;
7242 }
7243 else
7244 arg11 = arg1;
7245 }
7246 else
7247 {
7248 /* We cannot generate constant 1 for fract. */
7249 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7250 return NULL_TREE;
7251 arg10 = arg1;
7252 arg11 = build_one_cst (type);
7253 }
7254 same = NULL_TREE;
7255
7256 if (operand_equal_p (arg01, arg11, 0))
7257 same = arg01, alt0 = arg00, alt1 = arg10;
7258 else if (operand_equal_p (arg00, arg10, 0))
7259 same = arg00, alt0 = arg01, alt1 = arg11;
7260 else if (operand_equal_p (arg00, arg11, 0))
7261 same = arg00, alt0 = arg01, alt1 = arg10;
7262 else if (operand_equal_p (arg01, arg10, 0))
7263 same = arg01, alt0 = arg00, alt1 = arg11;
7264
7265 /* No identical multiplicands; see if we can find a common
7266 power-of-two factor in non-power-of-two multiplies. This
7267 can help in multi-dimensional array access. */
7268 else if (host_integerp (arg01, 0)
7269 && host_integerp (arg11, 0))
7270 {
7271 HOST_WIDE_INT int01, int11, tmp;
7272 bool swap = false;
7273 tree maybe_same;
7274 int01 = TREE_INT_CST_LOW (arg01);
7275 int11 = TREE_INT_CST_LOW (arg11);
7276
7277 /* Move min of absolute values to int11. */
7278 if (absu_hwi (int01) < absu_hwi (int11))
7279 {
7280 tmp = int01, int01 = int11, int11 = tmp;
7281 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7282 maybe_same = arg01;
7283 swap = true;
7284 }
7285 else
7286 maybe_same = arg11;
7287
7288 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7289 /* The remainder should not be a constant, otherwise we
7290 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7291 increased the number of multiplications necessary. */
7292 && TREE_CODE (arg10) != INTEGER_CST)
7293 {
7294 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7295 build_int_cst (TREE_TYPE (arg00),
7296 int01 / int11));
7297 alt1 = arg10;
7298 same = maybe_same;
7299 if (swap)
7300 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7301 }
7302 }
7303
7304 if (same)
7305 return fold_build2_loc (loc, MULT_EXPR, type,
7306 fold_build2_loc (loc, code, type,
7307 fold_convert_loc (loc, type, alt0),
7308 fold_convert_loc (loc, type, alt1)),
7309 fold_convert_loc (loc, type, same));
7310
7311 return NULL_TREE;
7312 }
7313
7314 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7315 specified by EXPR into the buffer PTR of length LEN bytes.
7316 Return the number of bytes placed in the buffer, or zero
7317 upon failure. */
7318
7319 static int
7320 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7321 {
7322 tree type = TREE_TYPE (expr);
7323 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7324 int byte, offset, word, words;
7325 unsigned char value;
7326
7327 if (total_bytes > len)
7328 return 0;
7329 words = total_bytes / UNITS_PER_WORD;
7330
7331 for (byte = 0; byte < total_bytes; byte++)
7332 {
7333 int bitpos = byte * BITS_PER_UNIT;
7334 if (bitpos < HOST_BITS_PER_WIDE_INT)
7335 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7336 else
7337 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7338 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7339
7340 if (total_bytes > UNITS_PER_WORD)
7341 {
7342 word = byte / UNITS_PER_WORD;
7343 if (WORDS_BIG_ENDIAN)
7344 word = (words - 1) - word;
7345 offset = word * UNITS_PER_WORD;
7346 if (BYTES_BIG_ENDIAN)
7347 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7348 else
7349 offset += byte % UNITS_PER_WORD;
7350 }
7351 else
7352 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7353 ptr[offset] = value;
7354 }
7355 return total_bytes;
7356 }
7357
7358
7359 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7360 specified by EXPR into the buffer PTR of length LEN bytes.
7361 Return the number of bytes placed in the buffer, or zero
7362 upon failure. */
7363
7364 static int
7365 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7366 {
7367 tree type = TREE_TYPE (expr);
7368 enum machine_mode mode = TYPE_MODE (type);
7369 int total_bytes = GET_MODE_SIZE (mode);
7370 FIXED_VALUE_TYPE value;
7371 tree i_value, i_type;
7372
7373 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7374 return 0;
7375
7376 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7377
7378 if (NULL_TREE == i_type
7379 || TYPE_PRECISION (i_type) != total_bytes)
7380 return 0;
7381
7382 value = TREE_FIXED_CST (expr);
7383 i_value = double_int_to_tree (i_type, value.data);
7384
7385 return native_encode_int (i_value, ptr, len);
7386 }
7387
7388
7389 /* Subroutine of native_encode_expr. Encode the REAL_CST
7390 specified by EXPR into the buffer PTR of length LEN bytes.
7391 Return the number of bytes placed in the buffer, or zero
7392 upon failure. */
7393
7394 static int
7395 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7396 {
7397 tree type = TREE_TYPE (expr);
7398 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7399 int byte, offset, word, words, bitpos;
7400 unsigned char value;
7401
7402 /* There are always 32 bits in each long, no matter the size of
7403 the hosts long. We handle floating point representations with
7404 up to 192 bits. */
7405 long tmp[6];
7406
7407 if (total_bytes > len)
7408 return 0;
7409 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7410
7411 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7412
7413 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7414 bitpos += BITS_PER_UNIT)
7415 {
7416 byte = (bitpos / BITS_PER_UNIT) & 3;
7417 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7418
7419 if (UNITS_PER_WORD < 4)
7420 {
7421 word = byte / UNITS_PER_WORD;
7422 if (WORDS_BIG_ENDIAN)
7423 word = (words - 1) - word;
7424 offset = word * UNITS_PER_WORD;
7425 if (BYTES_BIG_ENDIAN)
7426 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7427 else
7428 offset += byte % UNITS_PER_WORD;
7429 }
7430 else
7431 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7432 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7433 }
7434 return total_bytes;
7435 }
7436
7437 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7438 specified by EXPR into the buffer PTR of length LEN bytes.
7439 Return the number of bytes placed in the buffer, or zero
7440 upon failure. */
7441
7442 static int
7443 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7444 {
7445 int rsize, isize;
7446 tree part;
7447
7448 part = TREE_REALPART (expr);
7449 rsize = native_encode_expr (part, ptr, len);
7450 if (rsize == 0)
7451 return 0;
7452 part = TREE_IMAGPART (expr);
7453 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7454 if (isize != rsize)
7455 return 0;
7456 return rsize + isize;
7457 }
7458
7459
7460 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7461 specified by EXPR into the buffer PTR of length LEN bytes.
7462 Return the number of bytes placed in the buffer, or zero
7463 upon failure. */
7464
7465 static int
7466 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7467 {
7468 unsigned i, count;
7469 int size, offset;
7470 tree itype, elem;
7471
7472 offset = 0;
7473 count = VECTOR_CST_NELTS (expr);
7474 itype = TREE_TYPE (TREE_TYPE (expr));
7475 size = GET_MODE_SIZE (TYPE_MODE (itype));
7476 for (i = 0; i < count; i++)
7477 {
7478 elem = VECTOR_CST_ELT (expr, i);
7479 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7480 return 0;
7481 offset += size;
7482 }
7483 return offset;
7484 }
7485
7486
7487 /* Subroutine of native_encode_expr. Encode the STRING_CST
7488 specified by EXPR into the buffer PTR of length LEN bytes.
7489 Return the number of bytes placed in the buffer, or zero
7490 upon failure. */
7491
7492 static int
7493 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7494 {
7495 tree type = TREE_TYPE (expr);
7496 HOST_WIDE_INT total_bytes;
7497
7498 if (TREE_CODE (type) != ARRAY_TYPE
7499 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7500 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7501 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7502 return 0;
7503 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7504 if (total_bytes > len)
7505 return 0;
7506 if (TREE_STRING_LENGTH (expr) < total_bytes)
7507 {
7508 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7509 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7510 total_bytes - TREE_STRING_LENGTH (expr));
7511 }
7512 else
7513 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7514 return total_bytes;
7515 }
7516
7517
7518 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7519 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7520 buffer PTR of length LEN bytes. Return the number of bytes
7521 placed in the buffer, or zero upon failure. */
7522
7523 int
7524 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7525 {
7526 switch (TREE_CODE (expr))
7527 {
7528 case INTEGER_CST:
7529 return native_encode_int (expr, ptr, len);
7530
7531 case REAL_CST:
7532 return native_encode_real (expr, ptr, len);
7533
7534 case FIXED_CST:
7535 return native_encode_fixed (expr, ptr, len);
7536
7537 case COMPLEX_CST:
7538 return native_encode_complex (expr, ptr, len);
7539
7540 case VECTOR_CST:
7541 return native_encode_vector (expr, ptr, len);
7542
7543 case STRING_CST:
7544 return native_encode_string (expr, ptr, len);
7545
7546 default:
7547 return 0;
7548 }
7549 }
7550
7551
7552 /* Subroutine of native_interpret_expr. Interpret the contents of
7553 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7554 If the buffer cannot be interpreted, return NULL_TREE. */
7555
7556 static tree
7557 native_interpret_int (tree type, const unsigned char *ptr, int len)
7558 {
7559 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7560 double_int result;
7561
7562 if (total_bytes > len
7563 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7564 return NULL_TREE;
7565
7566 result = double_int::from_buffer (ptr, total_bytes);
7567
7568 return double_int_to_tree (type, result);
7569 }
7570
7571
7572 /* Subroutine of native_interpret_expr. Interpret the contents of
7573 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7574 If the buffer cannot be interpreted, return NULL_TREE. */
7575
7576 static tree
7577 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7578 {
7579 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7580 double_int result;
7581 FIXED_VALUE_TYPE fixed_value;
7582
7583 if (total_bytes > len
7584 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7585 return NULL_TREE;
7586
7587 result = double_int::from_buffer (ptr, total_bytes);
7588 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7589
7590 return build_fixed (type, fixed_value);
7591 }
7592
7593
7594 /* Subroutine of native_interpret_expr. Interpret the contents of
7595 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7596 If the buffer cannot be interpreted, return NULL_TREE. */
7597
7598 static tree
7599 native_interpret_real (tree type, const unsigned char *ptr, int len)
7600 {
7601 enum machine_mode mode = TYPE_MODE (type);
7602 int total_bytes = GET_MODE_SIZE (mode);
7603 int byte, offset, word, words, bitpos;
7604 unsigned char value;
7605 /* There are always 32 bits in each long, no matter the size of
7606 the hosts long. We handle floating point representations with
7607 up to 192 bits. */
7608 REAL_VALUE_TYPE r;
7609 long tmp[6];
7610
7611 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7612 if (total_bytes > len || total_bytes > 24)
7613 return NULL_TREE;
7614 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7615
7616 memset (tmp, 0, sizeof (tmp));
7617 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7618 bitpos += BITS_PER_UNIT)
7619 {
7620 byte = (bitpos / BITS_PER_UNIT) & 3;
7621 if (UNITS_PER_WORD < 4)
7622 {
7623 word = byte / UNITS_PER_WORD;
7624 if (WORDS_BIG_ENDIAN)
7625 word = (words - 1) - word;
7626 offset = word * UNITS_PER_WORD;
7627 if (BYTES_BIG_ENDIAN)
7628 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7629 else
7630 offset += byte % UNITS_PER_WORD;
7631 }
7632 else
7633 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7634 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7635
7636 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7637 }
7638
7639 real_from_target (&r, tmp, mode);
7640 return build_real (type, r);
7641 }
7642
7643
7644 /* Subroutine of native_interpret_expr. Interpret the contents of
7645 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7646 If the buffer cannot be interpreted, return NULL_TREE. */
7647
7648 static tree
7649 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7650 {
7651 tree etype, rpart, ipart;
7652 int size;
7653
7654 etype = TREE_TYPE (type);
7655 size = GET_MODE_SIZE (TYPE_MODE (etype));
7656 if (size * 2 > len)
7657 return NULL_TREE;
7658 rpart = native_interpret_expr (etype, ptr, size);
7659 if (!rpart)
7660 return NULL_TREE;
7661 ipart = native_interpret_expr (etype, ptr+size, size);
7662 if (!ipart)
7663 return NULL_TREE;
7664 return build_complex (type, rpart, ipart);
7665 }
7666
7667
7668 /* Subroutine of native_interpret_expr. Interpret the contents of
7669 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7670 If the buffer cannot be interpreted, return NULL_TREE. */
7671
7672 static tree
7673 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7674 {
7675 tree etype, elem;
7676 int i, size, count;
7677 tree *elements;
7678
7679 etype = TREE_TYPE (type);
7680 size = GET_MODE_SIZE (TYPE_MODE (etype));
7681 count = TYPE_VECTOR_SUBPARTS (type);
7682 if (size * count > len)
7683 return NULL_TREE;
7684
7685 elements = XALLOCAVEC (tree, count);
7686 for (i = count - 1; i >= 0; i--)
7687 {
7688 elem = native_interpret_expr (etype, ptr+(i*size), size);
7689 if (!elem)
7690 return NULL_TREE;
7691 elements[i] = elem;
7692 }
7693 return build_vector (type, elements);
7694 }
7695
7696
7697 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7698 the buffer PTR of length LEN as a constant of type TYPE. For
7699 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7700 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7701 return NULL_TREE. */
7702
7703 tree
7704 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7705 {
7706 switch (TREE_CODE (type))
7707 {
7708 case INTEGER_TYPE:
7709 case ENUMERAL_TYPE:
7710 case BOOLEAN_TYPE:
7711 case POINTER_TYPE:
7712 case REFERENCE_TYPE:
7713 return native_interpret_int (type, ptr, len);
7714
7715 case REAL_TYPE:
7716 return native_interpret_real (type, ptr, len);
7717
7718 case FIXED_POINT_TYPE:
7719 return native_interpret_fixed (type, ptr, len);
7720
7721 case COMPLEX_TYPE:
7722 return native_interpret_complex (type, ptr, len);
7723
7724 case VECTOR_TYPE:
7725 return native_interpret_vector (type, ptr, len);
7726
7727 default:
7728 return NULL_TREE;
7729 }
7730 }
7731
7732 /* Returns true if we can interpret the contents of a native encoding
7733 as TYPE. */
7734
7735 static bool
7736 can_native_interpret_type_p (tree type)
7737 {
7738 switch (TREE_CODE (type))
7739 {
7740 case INTEGER_TYPE:
7741 case ENUMERAL_TYPE:
7742 case BOOLEAN_TYPE:
7743 case POINTER_TYPE:
7744 case REFERENCE_TYPE:
7745 case FIXED_POINT_TYPE:
7746 case REAL_TYPE:
7747 case COMPLEX_TYPE:
7748 case VECTOR_TYPE:
7749 return true;
7750 default:
7751 return false;
7752 }
7753 }
7754
7755 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7756 TYPE at compile-time. If we're unable to perform the conversion
7757 return NULL_TREE. */
7758
7759 static tree
7760 fold_view_convert_expr (tree type, tree expr)
7761 {
7762 /* We support up to 512-bit values (for V8DFmode). */
7763 unsigned char buffer[64];
7764 int len;
7765
7766 /* Check that the host and target are sane. */
7767 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7768 return NULL_TREE;
7769
7770 len = native_encode_expr (expr, buffer, sizeof (buffer));
7771 if (len == 0)
7772 return NULL_TREE;
7773
7774 return native_interpret_expr (type, buffer, len);
7775 }
7776
7777 /* Build an expression for the address of T. Folds away INDIRECT_REF
7778 to avoid confusing the gimplify process. */
7779
7780 tree
7781 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7782 {
7783 /* The size of the object is not relevant when talking about its address. */
7784 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7785 t = TREE_OPERAND (t, 0);
7786
7787 if (TREE_CODE (t) == INDIRECT_REF)
7788 {
7789 t = TREE_OPERAND (t, 0);
7790
7791 if (TREE_TYPE (t) != ptrtype)
7792 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7793 }
7794 else if (TREE_CODE (t) == MEM_REF
7795 && integer_zerop (TREE_OPERAND (t, 1)))
7796 return TREE_OPERAND (t, 0);
7797 else if (TREE_CODE (t) == MEM_REF
7798 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7799 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7800 TREE_OPERAND (t, 0),
7801 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7802 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7803 {
7804 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7805
7806 if (TREE_TYPE (t) != ptrtype)
7807 t = fold_convert_loc (loc, ptrtype, t);
7808 }
7809 else
7810 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7811
7812 return t;
7813 }
7814
7815 /* Build an expression for the address of T. */
7816
7817 tree
7818 build_fold_addr_expr_loc (location_t loc, tree t)
7819 {
7820 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7821
7822 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7823 }
7824
7825 static bool vec_cst_ctor_to_array (tree, tree *);
7826
7827 /* Fold a unary expression of code CODE and type TYPE with operand
7828 OP0. Return the folded expression if folding is successful.
7829 Otherwise, return NULL_TREE. */
7830
7831 tree
7832 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7833 {
7834 tree tem;
7835 tree arg0;
7836 enum tree_code_class kind = TREE_CODE_CLASS (code);
7837
7838 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7839 && TREE_CODE_LENGTH (code) == 1);
7840
7841 arg0 = op0;
7842 if (arg0)
7843 {
7844 if (CONVERT_EXPR_CODE_P (code)
7845 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7846 {
7847 /* Don't use STRIP_NOPS, because signedness of argument type
7848 matters. */
7849 STRIP_SIGN_NOPS (arg0);
7850 }
7851 else
7852 {
7853 /* Strip any conversions that don't change the mode. This
7854 is safe for every expression, except for a comparison
7855 expression because its signedness is derived from its
7856 operands.
7857
7858 Note that this is done as an internal manipulation within
7859 the constant folder, in order to find the simplest
7860 representation of the arguments so that their form can be
7861 studied. In any cases, the appropriate type conversions
7862 should be put back in the tree that will get out of the
7863 constant folder. */
7864 STRIP_NOPS (arg0);
7865 }
7866 }
7867
7868 if (TREE_CODE_CLASS (code) == tcc_unary)
7869 {
7870 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7871 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7872 fold_build1_loc (loc, code, type,
7873 fold_convert_loc (loc, TREE_TYPE (op0),
7874 TREE_OPERAND (arg0, 1))));
7875 else if (TREE_CODE (arg0) == COND_EXPR)
7876 {
7877 tree arg01 = TREE_OPERAND (arg0, 1);
7878 tree arg02 = TREE_OPERAND (arg0, 2);
7879 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7880 arg01 = fold_build1_loc (loc, code, type,
7881 fold_convert_loc (loc,
7882 TREE_TYPE (op0), arg01));
7883 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7884 arg02 = fold_build1_loc (loc, code, type,
7885 fold_convert_loc (loc,
7886 TREE_TYPE (op0), arg02));
7887 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7888 arg01, arg02);
7889
7890 /* If this was a conversion, and all we did was to move into
7891 inside the COND_EXPR, bring it back out. But leave it if
7892 it is a conversion from integer to integer and the
7893 result precision is no wider than a word since such a
7894 conversion is cheap and may be optimized away by combine,
7895 while it couldn't if it were outside the COND_EXPR. Then return
7896 so we don't get into an infinite recursion loop taking the
7897 conversion out and then back in. */
7898
7899 if ((CONVERT_EXPR_CODE_P (code)
7900 || code == NON_LVALUE_EXPR)
7901 && TREE_CODE (tem) == COND_EXPR
7902 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7903 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7904 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7905 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7906 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7907 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7908 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7909 && (INTEGRAL_TYPE_P
7910 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7911 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7912 || flag_syntax_only))
7913 tem = build1_loc (loc, code, type,
7914 build3 (COND_EXPR,
7915 TREE_TYPE (TREE_OPERAND
7916 (TREE_OPERAND (tem, 1), 0)),
7917 TREE_OPERAND (tem, 0),
7918 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7919 TREE_OPERAND (TREE_OPERAND (tem, 2),
7920 0)));
7921 return tem;
7922 }
7923 }
7924
7925 switch (code)
7926 {
7927 case PAREN_EXPR:
7928 /* Re-association barriers around constants and other re-association
7929 barriers can be removed. */
7930 if (CONSTANT_CLASS_P (op0)
7931 || TREE_CODE (op0) == PAREN_EXPR)
7932 return fold_convert_loc (loc, type, op0);
7933 return NULL_TREE;
7934
7935 CASE_CONVERT:
7936 case FLOAT_EXPR:
7937 case FIX_TRUNC_EXPR:
7938 if (TREE_TYPE (op0) == type)
7939 return op0;
7940
7941 if (COMPARISON_CLASS_P (op0))
7942 {
7943 /* If we have (type) (a CMP b) and type is an integral type, return
7944 new expression involving the new type. Canonicalize
7945 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7946 non-integral type.
7947 Do not fold the result as that would not simplify further, also
7948 folding again results in recursions. */
7949 if (TREE_CODE (type) == BOOLEAN_TYPE)
7950 return build2_loc (loc, TREE_CODE (op0), type,
7951 TREE_OPERAND (op0, 0),
7952 TREE_OPERAND (op0, 1));
7953 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7954 && TREE_CODE (type) != VECTOR_TYPE)
7955 return build3_loc (loc, COND_EXPR, type, op0,
7956 constant_boolean_node (true, type),
7957 constant_boolean_node (false, type));
7958 }
7959
7960 /* Handle cases of two conversions in a row. */
7961 if (CONVERT_EXPR_P (op0))
7962 {
7963 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7964 tree inter_type = TREE_TYPE (op0);
7965 int inside_int = INTEGRAL_TYPE_P (inside_type);
7966 int inside_ptr = POINTER_TYPE_P (inside_type);
7967 int inside_float = FLOAT_TYPE_P (inside_type);
7968 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7969 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7970 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7971 int inter_int = INTEGRAL_TYPE_P (inter_type);
7972 int inter_ptr = POINTER_TYPE_P (inter_type);
7973 int inter_float = FLOAT_TYPE_P (inter_type);
7974 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7975 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7976 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7977 int final_int = INTEGRAL_TYPE_P (type);
7978 int final_ptr = POINTER_TYPE_P (type);
7979 int final_float = FLOAT_TYPE_P (type);
7980 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7981 unsigned int final_prec = TYPE_PRECISION (type);
7982 int final_unsignedp = TYPE_UNSIGNED (type);
7983
7984 /* In addition to the cases of two conversions in a row
7985 handled below, if we are converting something to its own
7986 type via an object of identical or wider precision, neither
7987 conversion is needed. */
7988 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7989 && (((inter_int || inter_ptr) && final_int)
7990 || (inter_float && final_float))
7991 && inter_prec >= final_prec)
7992 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7993
7994 /* Likewise, if the intermediate and initial types are either both
7995 float or both integer, we don't need the middle conversion if the
7996 former is wider than the latter and doesn't change the signedness
7997 (for integers). Avoid this if the final type is a pointer since
7998 then we sometimes need the middle conversion. Likewise if the
7999 final type has a precision not equal to the size of its mode. */
8000 if (((inter_int && inside_int)
8001 || (inter_float && inside_float)
8002 || (inter_vec && inside_vec))
8003 && inter_prec >= inside_prec
8004 && (inter_float || inter_vec
8005 || inter_unsignedp == inside_unsignedp)
8006 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8007 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8008 && ! final_ptr
8009 && (! final_vec || inter_prec == inside_prec))
8010 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8011
8012 /* If we have a sign-extension of a zero-extended value, we can
8013 replace that by a single zero-extension. Likewise if the
8014 final conversion does not change precision we can drop the
8015 intermediate conversion. */
8016 if (inside_int && inter_int && final_int
8017 && ((inside_prec < inter_prec && inter_prec < final_prec
8018 && inside_unsignedp && !inter_unsignedp)
8019 || final_prec == inter_prec))
8020 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8021
8022 /* Two conversions in a row are not needed unless:
8023 - some conversion is floating-point (overstrict for now), or
8024 - some conversion is a vector (overstrict for now), or
8025 - the intermediate type is narrower than both initial and
8026 final, or
8027 - the intermediate type and innermost type differ in signedness,
8028 and the outermost type is wider than the intermediate, or
8029 - the initial type is a pointer type and the precisions of the
8030 intermediate and final types differ, or
8031 - the final type is a pointer type and the precisions of the
8032 initial and intermediate types differ. */
8033 if (! inside_float && ! inter_float && ! final_float
8034 && ! inside_vec && ! inter_vec && ! final_vec
8035 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8036 && ! (inside_int && inter_int
8037 && inter_unsignedp != inside_unsignedp
8038 && inter_prec < final_prec)
8039 && ((inter_unsignedp && inter_prec > inside_prec)
8040 == (final_unsignedp && final_prec > inter_prec))
8041 && ! (inside_ptr && inter_prec != final_prec)
8042 && ! (final_ptr && inside_prec != inter_prec)
8043 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8044 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8045 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8046 }
8047
8048 /* Handle (T *)&A.B.C for A being of type T and B and C
8049 living at offset zero. This occurs frequently in
8050 C++ upcasting and then accessing the base. */
8051 if (TREE_CODE (op0) == ADDR_EXPR
8052 && POINTER_TYPE_P (type)
8053 && handled_component_p (TREE_OPERAND (op0, 0)))
8054 {
8055 HOST_WIDE_INT bitsize, bitpos;
8056 tree offset;
8057 enum machine_mode mode;
8058 int unsignedp, volatilep;
8059 tree base = TREE_OPERAND (op0, 0);
8060 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8061 &mode, &unsignedp, &volatilep, false);
8062 /* If the reference was to a (constant) zero offset, we can use
8063 the address of the base if it has the same base type
8064 as the result type and the pointer type is unqualified. */
8065 if (! offset && bitpos == 0
8066 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8067 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8068 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8069 return fold_convert_loc (loc, type,
8070 build_fold_addr_expr_loc (loc, base));
8071 }
8072
8073 if (TREE_CODE (op0) == MODIFY_EXPR
8074 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8075 /* Detect assigning a bitfield. */
8076 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8077 && DECL_BIT_FIELD
8078 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8079 {
8080 /* Don't leave an assignment inside a conversion
8081 unless assigning a bitfield. */
8082 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8083 /* First do the assignment, then return converted constant. */
8084 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8085 TREE_NO_WARNING (tem) = 1;
8086 TREE_USED (tem) = 1;
8087 return tem;
8088 }
8089
8090 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8091 constants (if x has signed type, the sign bit cannot be set
8092 in c). This folds extension into the BIT_AND_EXPR.
8093 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8094 very likely don't have maximal range for their precision and this
8095 transformation effectively doesn't preserve non-maximal ranges. */
8096 if (TREE_CODE (type) == INTEGER_TYPE
8097 && TREE_CODE (op0) == BIT_AND_EXPR
8098 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8099 {
8100 tree and_expr = op0;
8101 tree and0 = TREE_OPERAND (and_expr, 0);
8102 tree and1 = TREE_OPERAND (and_expr, 1);
8103 int change = 0;
8104
8105 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8106 || (TYPE_PRECISION (type)
8107 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8108 change = 1;
8109 else if (TYPE_PRECISION (TREE_TYPE (and1))
8110 <= HOST_BITS_PER_WIDE_INT
8111 && host_integerp (and1, 1))
8112 {
8113 unsigned HOST_WIDE_INT cst;
8114
8115 cst = tree_low_cst (and1, 1);
8116 cst &= HOST_WIDE_INT_M1U
8117 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8118 change = (cst == 0);
8119 #ifdef LOAD_EXTEND_OP
8120 if (change
8121 && !flag_syntax_only
8122 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8123 == ZERO_EXTEND))
8124 {
8125 tree uns = unsigned_type_for (TREE_TYPE (and0));
8126 and0 = fold_convert_loc (loc, uns, and0);
8127 and1 = fold_convert_loc (loc, uns, and1);
8128 }
8129 #endif
8130 }
8131 if (change)
8132 {
8133 tem = force_fit_type_double (type, tree_to_double_int (and1),
8134 0, TREE_OVERFLOW (and1));
8135 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8136 fold_convert_loc (loc, type, and0), tem);
8137 }
8138 }
8139
8140 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8141 when one of the new casts will fold away. Conservatively we assume
8142 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8143 if (POINTER_TYPE_P (type)
8144 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8145 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8146 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8147 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8148 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8149 {
8150 tree arg00 = TREE_OPERAND (arg0, 0);
8151 tree arg01 = TREE_OPERAND (arg0, 1);
8152
8153 return fold_build_pointer_plus_loc
8154 (loc, fold_convert_loc (loc, type, arg00), arg01);
8155 }
8156
8157 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8158 of the same precision, and X is an integer type not narrower than
8159 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8160 if (INTEGRAL_TYPE_P (type)
8161 && TREE_CODE (op0) == BIT_NOT_EXPR
8162 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8163 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8164 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8165 {
8166 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8167 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8168 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8169 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8170 fold_convert_loc (loc, type, tem));
8171 }
8172
8173 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8174 type of X and Y (integer types only). */
8175 if (INTEGRAL_TYPE_P (type)
8176 && TREE_CODE (op0) == MULT_EXPR
8177 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8178 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8179 {
8180 /* Be careful not to introduce new overflows. */
8181 tree mult_type;
8182 if (TYPE_OVERFLOW_WRAPS (type))
8183 mult_type = type;
8184 else
8185 mult_type = unsigned_type_for (type);
8186
8187 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8188 {
8189 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8190 fold_convert_loc (loc, mult_type,
8191 TREE_OPERAND (op0, 0)),
8192 fold_convert_loc (loc, mult_type,
8193 TREE_OPERAND (op0, 1)));
8194 return fold_convert_loc (loc, type, tem);
8195 }
8196 }
8197
8198 tem = fold_convert_const (code, type, op0);
8199 return tem ? tem : NULL_TREE;
8200
8201 case ADDR_SPACE_CONVERT_EXPR:
8202 if (integer_zerop (arg0))
8203 return fold_convert_const (code, type, arg0);
8204 return NULL_TREE;
8205
8206 case FIXED_CONVERT_EXPR:
8207 tem = fold_convert_const (code, type, arg0);
8208 return tem ? tem : NULL_TREE;
8209
8210 case VIEW_CONVERT_EXPR:
8211 if (TREE_TYPE (op0) == type)
8212 return op0;
8213 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8214 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8215 type, TREE_OPERAND (op0, 0));
8216 if (TREE_CODE (op0) == MEM_REF)
8217 return fold_build2_loc (loc, MEM_REF, type,
8218 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8219
8220 /* For integral conversions with the same precision or pointer
8221 conversions use a NOP_EXPR instead. */
8222 if ((INTEGRAL_TYPE_P (type)
8223 || POINTER_TYPE_P (type))
8224 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8225 || POINTER_TYPE_P (TREE_TYPE (op0)))
8226 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8227 return fold_convert_loc (loc, type, op0);
8228
8229 /* Strip inner integral conversions that do not change the precision. */
8230 if (CONVERT_EXPR_P (op0)
8231 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8232 || POINTER_TYPE_P (TREE_TYPE (op0)))
8233 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8234 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8235 && (TYPE_PRECISION (TREE_TYPE (op0))
8236 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8237 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8238 type, TREE_OPERAND (op0, 0));
8239
8240 return fold_view_convert_expr (type, op0);
8241
8242 case NEGATE_EXPR:
8243 tem = fold_negate_expr (loc, arg0);
8244 if (tem)
8245 return fold_convert_loc (loc, type, tem);
8246 return NULL_TREE;
8247
8248 case ABS_EXPR:
8249 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8250 return fold_abs_const (arg0, type);
8251 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8252 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8253 /* Convert fabs((double)float) into (double)fabsf(float). */
8254 else if (TREE_CODE (arg0) == NOP_EXPR
8255 && TREE_CODE (type) == REAL_TYPE)
8256 {
8257 tree targ0 = strip_float_extensions (arg0);
8258 if (targ0 != arg0)
8259 return fold_convert_loc (loc, type,
8260 fold_build1_loc (loc, ABS_EXPR,
8261 TREE_TYPE (targ0),
8262 targ0));
8263 }
8264 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8265 else if (TREE_CODE (arg0) == ABS_EXPR)
8266 return arg0;
8267 else if (tree_expr_nonnegative_p (arg0))
8268 return arg0;
8269
8270 /* Strip sign ops from argument. */
8271 if (TREE_CODE (type) == REAL_TYPE)
8272 {
8273 tem = fold_strip_sign_ops (arg0);
8274 if (tem)
8275 return fold_build1_loc (loc, ABS_EXPR, type,
8276 fold_convert_loc (loc, type, tem));
8277 }
8278 return NULL_TREE;
8279
8280 case CONJ_EXPR:
8281 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8282 return fold_convert_loc (loc, type, arg0);
8283 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8284 {
8285 tree itype = TREE_TYPE (type);
8286 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8287 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8288 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8289 negate_expr (ipart));
8290 }
8291 if (TREE_CODE (arg0) == COMPLEX_CST)
8292 {
8293 tree itype = TREE_TYPE (type);
8294 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8295 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8296 return build_complex (type, rpart, negate_expr (ipart));
8297 }
8298 if (TREE_CODE (arg0) == CONJ_EXPR)
8299 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8300 return NULL_TREE;
8301
8302 case BIT_NOT_EXPR:
8303 if (TREE_CODE (arg0) == INTEGER_CST)
8304 return fold_not_const (arg0, type);
8305 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8306 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8307 /* Convert ~ (-A) to A - 1. */
8308 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8309 return fold_build2_loc (loc, MINUS_EXPR, type,
8310 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8311 build_int_cst (type, 1));
8312 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8313 else if (INTEGRAL_TYPE_P (type)
8314 && ((TREE_CODE (arg0) == MINUS_EXPR
8315 && integer_onep (TREE_OPERAND (arg0, 1)))
8316 || (TREE_CODE (arg0) == PLUS_EXPR
8317 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8318 return fold_build1_loc (loc, NEGATE_EXPR, type,
8319 fold_convert_loc (loc, type,
8320 TREE_OPERAND (arg0, 0)));
8321 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8322 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8323 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8324 fold_convert_loc (loc, type,
8325 TREE_OPERAND (arg0, 0)))))
8326 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8327 fold_convert_loc (loc, type,
8328 TREE_OPERAND (arg0, 1)));
8329 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8330 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8331 fold_convert_loc (loc, type,
8332 TREE_OPERAND (arg0, 1)))))
8333 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8334 fold_convert_loc (loc, type,
8335 TREE_OPERAND (arg0, 0)), tem);
8336 /* Perform BIT_NOT_EXPR on each element individually. */
8337 else if (TREE_CODE (arg0) == VECTOR_CST)
8338 {
8339 tree *elements;
8340 tree elem;
8341 unsigned count = VECTOR_CST_NELTS (arg0), i;
8342
8343 elements = XALLOCAVEC (tree, count);
8344 for (i = 0; i < count; i++)
8345 {
8346 elem = VECTOR_CST_ELT (arg0, i);
8347 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8348 if (elem == NULL_TREE)
8349 break;
8350 elements[i] = elem;
8351 }
8352 if (i == count)
8353 return build_vector (type, elements);
8354 }
8355 else if (COMPARISON_CLASS_P (arg0)
8356 && (VECTOR_TYPE_P (type)
8357 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8358 {
8359 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8360 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8361 HONOR_NANS (TYPE_MODE (op_type)));
8362 if (subcode != ERROR_MARK)
8363 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8364 TREE_OPERAND (arg0, 1));
8365 }
8366
8367
8368 return NULL_TREE;
8369
8370 case TRUTH_NOT_EXPR:
8371 /* Note that the operand of this must be an int
8372 and its values must be 0 or 1.
8373 ("true" is a fixed value perhaps depending on the language,
8374 but we don't handle values other than 1 correctly yet.) */
8375 tem = fold_truth_not_expr (loc, arg0);
8376 if (!tem)
8377 return NULL_TREE;
8378 return fold_convert_loc (loc, type, tem);
8379
8380 case REALPART_EXPR:
8381 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8382 return fold_convert_loc (loc, type, arg0);
8383 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8384 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8385 TREE_OPERAND (arg0, 1));
8386 if (TREE_CODE (arg0) == COMPLEX_CST)
8387 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8388 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8389 {
8390 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8391 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8392 fold_build1_loc (loc, REALPART_EXPR, itype,
8393 TREE_OPERAND (arg0, 0)),
8394 fold_build1_loc (loc, REALPART_EXPR, itype,
8395 TREE_OPERAND (arg0, 1)));
8396 return fold_convert_loc (loc, type, tem);
8397 }
8398 if (TREE_CODE (arg0) == CONJ_EXPR)
8399 {
8400 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8401 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8402 TREE_OPERAND (arg0, 0));
8403 return fold_convert_loc (loc, type, tem);
8404 }
8405 if (TREE_CODE (arg0) == CALL_EXPR)
8406 {
8407 tree fn = get_callee_fndecl (arg0);
8408 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8409 switch (DECL_FUNCTION_CODE (fn))
8410 {
8411 CASE_FLT_FN (BUILT_IN_CEXPI):
8412 fn = mathfn_built_in (type, BUILT_IN_COS);
8413 if (fn)
8414 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8415 break;
8416
8417 default:
8418 break;
8419 }
8420 }
8421 return NULL_TREE;
8422
8423 case IMAGPART_EXPR:
8424 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8425 return build_zero_cst (type);
8426 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8427 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8428 TREE_OPERAND (arg0, 0));
8429 if (TREE_CODE (arg0) == COMPLEX_CST)
8430 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8431 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8432 {
8433 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8434 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8435 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8436 TREE_OPERAND (arg0, 0)),
8437 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8438 TREE_OPERAND (arg0, 1)));
8439 return fold_convert_loc (loc, type, tem);
8440 }
8441 if (TREE_CODE (arg0) == CONJ_EXPR)
8442 {
8443 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8444 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8445 return fold_convert_loc (loc, type, negate_expr (tem));
8446 }
8447 if (TREE_CODE (arg0) == CALL_EXPR)
8448 {
8449 tree fn = get_callee_fndecl (arg0);
8450 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8451 switch (DECL_FUNCTION_CODE (fn))
8452 {
8453 CASE_FLT_FN (BUILT_IN_CEXPI):
8454 fn = mathfn_built_in (type, BUILT_IN_SIN);
8455 if (fn)
8456 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8457 break;
8458
8459 default:
8460 break;
8461 }
8462 }
8463 return NULL_TREE;
8464
8465 case INDIRECT_REF:
8466 /* Fold *&X to X if X is an lvalue. */
8467 if (TREE_CODE (op0) == ADDR_EXPR)
8468 {
8469 tree op00 = TREE_OPERAND (op0, 0);
8470 if ((TREE_CODE (op00) == VAR_DECL
8471 || TREE_CODE (op00) == PARM_DECL
8472 || TREE_CODE (op00) == RESULT_DECL)
8473 && !TREE_READONLY (op00))
8474 return op00;
8475 }
8476 return NULL_TREE;
8477
8478 case VEC_UNPACK_LO_EXPR:
8479 case VEC_UNPACK_HI_EXPR:
8480 case VEC_UNPACK_FLOAT_LO_EXPR:
8481 case VEC_UNPACK_FLOAT_HI_EXPR:
8482 {
8483 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8484 tree *elts;
8485 enum tree_code subcode;
8486
8487 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8488 if (TREE_CODE (arg0) != VECTOR_CST)
8489 return NULL_TREE;
8490
8491 elts = XALLOCAVEC (tree, nelts * 2);
8492 if (!vec_cst_ctor_to_array (arg0, elts))
8493 return NULL_TREE;
8494
8495 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8496 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8497 elts += nelts;
8498
8499 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8500 subcode = NOP_EXPR;
8501 else
8502 subcode = FLOAT_EXPR;
8503
8504 for (i = 0; i < nelts; i++)
8505 {
8506 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8507 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8508 return NULL_TREE;
8509 }
8510
8511 return build_vector (type, elts);
8512 }
8513
8514 case REDUC_MIN_EXPR:
8515 case REDUC_MAX_EXPR:
8516 case REDUC_PLUS_EXPR:
8517 {
8518 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8519 tree *elts;
8520 enum tree_code subcode;
8521
8522 if (TREE_CODE (op0) != VECTOR_CST)
8523 return NULL_TREE;
8524
8525 elts = XALLOCAVEC (tree, nelts);
8526 if (!vec_cst_ctor_to_array (op0, elts))
8527 return NULL_TREE;
8528
8529 switch (code)
8530 {
8531 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8532 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8533 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8534 default: gcc_unreachable ();
8535 }
8536
8537 for (i = 1; i < nelts; i++)
8538 {
8539 elts[0] = const_binop (subcode, elts[0], elts[i]);
8540 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8541 return NULL_TREE;
8542 elts[i] = build_zero_cst (TREE_TYPE (type));
8543 }
8544
8545 return build_vector (type, elts);
8546 }
8547
8548 default:
8549 return NULL_TREE;
8550 } /* switch (code) */
8551 }
8552
8553
8554 /* If the operation was a conversion do _not_ mark a resulting constant
8555 with TREE_OVERFLOW if the original constant was not. These conversions
8556 have implementation defined behavior and retaining the TREE_OVERFLOW
8557 flag here would confuse later passes such as VRP. */
8558 tree
8559 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8560 tree type, tree op0)
8561 {
8562 tree res = fold_unary_loc (loc, code, type, op0);
8563 if (res
8564 && TREE_CODE (res) == INTEGER_CST
8565 && TREE_CODE (op0) == INTEGER_CST
8566 && CONVERT_EXPR_CODE_P (code))
8567 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8568
8569 return res;
8570 }
8571
8572 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8573 operands OP0 and OP1. LOC is the location of the resulting expression.
8574 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8575 Return the folded expression if folding is successful. Otherwise,
8576 return NULL_TREE. */
8577 static tree
8578 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8579 tree arg0, tree arg1, tree op0, tree op1)
8580 {
8581 tree tem;
8582
8583 /* We only do these simplifications if we are optimizing. */
8584 if (!optimize)
8585 return NULL_TREE;
8586
8587 /* Check for things like (A || B) && (A || C). We can convert this
8588 to A || (B && C). Note that either operator can be any of the four
8589 truth and/or operations and the transformation will still be
8590 valid. Also note that we only care about order for the
8591 ANDIF and ORIF operators. If B contains side effects, this
8592 might change the truth-value of A. */
8593 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8594 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8595 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8596 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8597 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8598 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8599 {
8600 tree a00 = TREE_OPERAND (arg0, 0);
8601 tree a01 = TREE_OPERAND (arg0, 1);
8602 tree a10 = TREE_OPERAND (arg1, 0);
8603 tree a11 = TREE_OPERAND (arg1, 1);
8604 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8605 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8606 && (code == TRUTH_AND_EXPR
8607 || code == TRUTH_OR_EXPR));
8608
8609 if (operand_equal_p (a00, a10, 0))
8610 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8611 fold_build2_loc (loc, code, type, a01, a11));
8612 else if (commutative && operand_equal_p (a00, a11, 0))
8613 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8614 fold_build2_loc (loc, code, type, a01, a10));
8615 else if (commutative && operand_equal_p (a01, a10, 0))
8616 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8617 fold_build2_loc (loc, code, type, a00, a11));
8618
8619 /* This case if tricky because we must either have commutative
8620 operators or else A10 must not have side-effects. */
8621
8622 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8623 && operand_equal_p (a01, a11, 0))
8624 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8625 fold_build2_loc (loc, code, type, a00, a10),
8626 a01);
8627 }
8628
8629 /* See if we can build a range comparison. */
8630 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8631 return tem;
8632
8633 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8634 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8635 {
8636 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8637 if (tem)
8638 return fold_build2_loc (loc, code, type, tem, arg1);
8639 }
8640
8641 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8642 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8643 {
8644 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8645 if (tem)
8646 return fold_build2_loc (loc, code, type, arg0, tem);
8647 }
8648
8649 /* Check for the possibility of merging component references. If our
8650 lhs is another similar operation, try to merge its rhs with our
8651 rhs. Then try to merge our lhs and rhs. */
8652 if (TREE_CODE (arg0) == code
8653 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8654 TREE_OPERAND (arg0, 1), arg1)))
8655 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8656
8657 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8658 return tem;
8659
8660 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8661 && (code == TRUTH_AND_EXPR
8662 || code == TRUTH_ANDIF_EXPR
8663 || code == TRUTH_OR_EXPR
8664 || code == TRUTH_ORIF_EXPR))
8665 {
8666 enum tree_code ncode, icode;
8667
8668 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8669 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8670 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8671
8672 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8673 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8674 We don't want to pack more than two leafs to a non-IF AND/OR
8675 expression.
8676 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8677 equal to IF-CODE, then we don't want to add right-hand operand.
8678 If the inner right-hand side of left-hand operand has
8679 side-effects, or isn't simple, then we can't add to it,
8680 as otherwise we might destroy if-sequence. */
8681 if (TREE_CODE (arg0) == icode
8682 && simple_operand_p_2 (arg1)
8683 /* Needed for sequence points to handle trappings, and
8684 side-effects. */
8685 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8686 {
8687 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8688 arg1);
8689 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8690 tem);
8691 }
8692 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8693 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8694 else if (TREE_CODE (arg1) == icode
8695 && simple_operand_p_2 (arg0)
8696 /* Needed for sequence points to handle trappings, and
8697 side-effects. */
8698 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8699 {
8700 tem = fold_build2_loc (loc, ncode, type,
8701 arg0, TREE_OPERAND (arg1, 0));
8702 return fold_build2_loc (loc, icode, type, tem,
8703 TREE_OPERAND (arg1, 1));
8704 }
8705 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8706 into (A OR B).
8707 For sequence point consistancy, we need to check for trapping,
8708 and side-effects. */
8709 else if (code == icode && simple_operand_p_2 (arg0)
8710 && simple_operand_p_2 (arg1))
8711 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8712 }
8713
8714 return NULL_TREE;
8715 }
8716
8717 /* Fold a binary expression of code CODE and type TYPE with operands
8718 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8719 Return the folded expression if folding is successful. Otherwise,
8720 return NULL_TREE. */
8721
8722 static tree
8723 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8724 {
8725 enum tree_code compl_code;
8726
8727 if (code == MIN_EXPR)
8728 compl_code = MAX_EXPR;
8729 else if (code == MAX_EXPR)
8730 compl_code = MIN_EXPR;
8731 else
8732 gcc_unreachable ();
8733
8734 /* MIN (MAX (a, b), b) == b. */
8735 if (TREE_CODE (op0) == compl_code
8736 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8737 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8738
8739 /* MIN (MAX (b, a), b) == b. */
8740 if (TREE_CODE (op0) == compl_code
8741 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8742 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8743 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8744
8745 /* MIN (a, MAX (a, b)) == a. */
8746 if (TREE_CODE (op1) == compl_code
8747 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8748 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8749 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8750
8751 /* MIN (a, MAX (b, a)) == a. */
8752 if (TREE_CODE (op1) == compl_code
8753 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8754 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8755 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8756
8757 return NULL_TREE;
8758 }
8759
8760 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8761 by changing CODE to reduce the magnitude of constants involved in
8762 ARG0 of the comparison.
8763 Returns a canonicalized comparison tree if a simplification was
8764 possible, otherwise returns NULL_TREE.
8765 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8766 valid if signed overflow is undefined. */
8767
8768 static tree
8769 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8770 tree arg0, tree arg1,
8771 bool *strict_overflow_p)
8772 {
8773 enum tree_code code0 = TREE_CODE (arg0);
8774 tree t, cst0 = NULL_TREE;
8775 int sgn0;
8776 bool swap = false;
8777
8778 /* Match A +- CST code arg1 and CST code arg1. We can change the
8779 first form only if overflow is undefined. */
8780 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8781 /* In principle pointers also have undefined overflow behavior,
8782 but that causes problems elsewhere. */
8783 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8784 && (code0 == MINUS_EXPR
8785 || code0 == PLUS_EXPR)
8786 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8787 || code0 == INTEGER_CST))
8788 return NULL_TREE;
8789
8790 /* Identify the constant in arg0 and its sign. */
8791 if (code0 == INTEGER_CST)
8792 cst0 = arg0;
8793 else
8794 cst0 = TREE_OPERAND (arg0, 1);
8795 sgn0 = tree_int_cst_sgn (cst0);
8796
8797 /* Overflowed constants and zero will cause problems. */
8798 if (integer_zerop (cst0)
8799 || TREE_OVERFLOW (cst0))
8800 return NULL_TREE;
8801
8802 /* See if we can reduce the magnitude of the constant in
8803 arg0 by changing the comparison code. */
8804 if (code0 == INTEGER_CST)
8805 {
8806 /* CST <= arg1 -> CST-1 < arg1. */
8807 if (code == LE_EXPR && sgn0 == 1)
8808 code = LT_EXPR;
8809 /* -CST < arg1 -> -CST-1 <= arg1. */
8810 else if (code == LT_EXPR && sgn0 == -1)
8811 code = LE_EXPR;
8812 /* CST > arg1 -> CST-1 >= arg1. */
8813 else if (code == GT_EXPR && sgn0 == 1)
8814 code = GE_EXPR;
8815 /* -CST >= arg1 -> -CST-1 > arg1. */
8816 else if (code == GE_EXPR && sgn0 == -1)
8817 code = GT_EXPR;
8818 else
8819 return NULL_TREE;
8820 /* arg1 code' CST' might be more canonical. */
8821 swap = true;
8822 }
8823 else
8824 {
8825 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8826 if (code == LT_EXPR
8827 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8828 code = LE_EXPR;
8829 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8830 else if (code == GT_EXPR
8831 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8832 code = GE_EXPR;
8833 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8834 else if (code == LE_EXPR
8835 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8836 code = LT_EXPR;
8837 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8838 else if (code == GE_EXPR
8839 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8840 code = GT_EXPR;
8841 else
8842 return NULL_TREE;
8843 *strict_overflow_p = true;
8844 }
8845
8846 /* Now build the constant reduced in magnitude. But not if that
8847 would produce one outside of its types range. */
8848 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8849 && ((sgn0 == 1
8850 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8851 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8852 || (sgn0 == -1
8853 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8854 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8855 /* We cannot swap the comparison here as that would cause us to
8856 endlessly recurse. */
8857 return NULL_TREE;
8858
8859 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8860 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8861 if (code0 != INTEGER_CST)
8862 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8863 t = fold_convert (TREE_TYPE (arg1), t);
8864
8865 /* If swapping might yield to a more canonical form, do so. */
8866 if (swap)
8867 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8868 else
8869 return fold_build2_loc (loc, code, type, t, arg1);
8870 }
8871
8872 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8873 overflow further. Try to decrease the magnitude of constants involved
8874 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8875 and put sole constants at the second argument position.
8876 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8877
8878 static tree
8879 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8880 tree arg0, tree arg1)
8881 {
8882 tree t;
8883 bool strict_overflow_p;
8884 const char * const warnmsg = G_("assuming signed overflow does not occur "
8885 "when reducing constant in comparison");
8886
8887 /* Try canonicalization by simplifying arg0. */
8888 strict_overflow_p = false;
8889 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8890 &strict_overflow_p);
8891 if (t)
8892 {
8893 if (strict_overflow_p)
8894 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8895 return t;
8896 }
8897
8898 /* Try canonicalization by simplifying arg1 using the swapped
8899 comparison. */
8900 code = swap_tree_comparison (code);
8901 strict_overflow_p = false;
8902 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8903 &strict_overflow_p);
8904 if (t && strict_overflow_p)
8905 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8906 return t;
8907 }
8908
8909 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8910 space. This is used to avoid issuing overflow warnings for
8911 expressions like &p->x which can not wrap. */
8912
8913 static bool
8914 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8915 {
8916 double_int di_offset, total;
8917
8918 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8919 return true;
8920
8921 if (bitpos < 0)
8922 return true;
8923
8924 if (offset == NULL_TREE)
8925 di_offset = double_int_zero;
8926 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8927 return true;
8928 else
8929 di_offset = TREE_INT_CST (offset);
8930
8931 bool overflow;
8932 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8933 total = di_offset.add_with_sign (units, true, &overflow);
8934 if (overflow)
8935 return true;
8936
8937 if (total.high != 0)
8938 return true;
8939
8940 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8941 if (size <= 0)
8942 return true;
8943
8944 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8945 array. */
8946 if (TREE_CODE (base) == ADDR_EXPR)
8947 {
8948 HOST_WIDE_INT base_size;
8949
8950 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8951 if (base_size > 0 && size < base_size)
8952 size = base_size;
8953 }
8954
8955 return total.low > (unsigned HOST_WIDE_INT) size;
8956 }
8957
8958 /* Subroutine of fold_binary. This routine performs all of the
8959 transformations that are common to the equality/inequality
8960 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8961 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8962 fold_binary should call fold_binary. Fold a comparison with
8963 tree code CODE and type TYPE with operands OP0 and OP1. Return
8964 the folded comparison or NULL_TREE. */
8965
8966 static tree
8967 fold_comparison (location_t loc, enum tree_code code, tree type,
8968 tree op0, tree op1)
8969 {
8970 tree arg0, arg1, tem;
8971
8972 arg0 = op0;
8973 arg1 = op1;
8974
8975 STRIP_SIGN_NOPS (arg0);
8976 STRIP_SIGN_NOPS (arg1);
8977
8978 tem = fold_relational_const (code, type, arg0, arg1);
8979 if (tem != NULL_TREE)
8980 return tem;
8981
8982 /* If one arg is a real or integer constant, put it last. */
8983 if (tree_swap_operands_p (arg0, arg1, true))
8984 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8985
8986 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8987 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8988 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8989 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8990 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8991 && (TREE_CODE (arg1) == INTEGER_CST
8992 && !TREE_OVERFLOW (arg1)))
8993 {
8994 tree const1 = TREE_OPERAND (arg0, 1);
8995 tree const2 = arg1;
8996 tree variable = TREE_OPERAND (arg0, 0);
8997 tree lhs;
8998 int lhs_add;
8999 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9000
9001 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9002 TREE_TYPE (arg1), const2, const1);
9003
9004 /* If the constant operation overflowed this can be
9005 simplified as a comparison against INT_MAX/INT_MIN. */
9006 if (TREE_CODE (lhs) == INTEGER_CST
9007 && TREE_OVERFLOW (lhs))
9008 {
9009 int const1_sgn = tree_int_cst_sgn (const1);
9010 enum tree_code code2 = code;
9011
9012 /* Get the sign of the constant on the lhs if the
9013 operation were VARIABLE + CONST1. */
9014 if (TREE_CODE (arg0) == MINUS_EXPR)
9015 const1_sgn = -const1_sgn;
9016
9017 /* The sign of the constant determines if we overflowed
9018 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9019 Canonicalize to the INT_MIN overflow by swapping the comparison
9020 if necessary. */
9021 if (const1_sgn == -1)
9022 code2 = swap_tree_comparison (code);
9023
9024 /* We now can look at the canonicalized case
9025 VARIABLE + 1 CODE2 INT_MIN
9026 and decide on the result. */
9027 if (code2 == LT_EXPR
9028 || code2 == LE_EXPR
9029 || code2 == EQ_EXPR)
9030 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9031 else if (code2 == NE_EXPR
9032 || code2 == GE_EXPR
9033 || code2 == GT_EXPR)
9034 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9035 }
9036
9037 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9038 && (TREE_CODE (lhs) != INTEGER_CST
9039 || !TREE_OVERFLOW (lhs)))
9040 {
9041 if (code != EQ_EXPR && code != NE_EXPR)
9042 fold_overflow_warning ("assuming signed overflow does not occur "
9043 "when changing X +- C1 cmp C2 to "
9044 "X cmp C1 +- C2",
9045 WARN_STRICT_OVERFLOW_COMPARISON);
9046 return fold_build2_loc (loc, code, type, variable, lhs);
9047 }
9048 }
9049
9050 /* For comparisons of pointers we can decompose it to a compile time
9051 comparison of the base objects and the offsets into the object.
9052 This requires at least one operand being an ADDR_EXPR or a
9053 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9054 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9055 && (TREE_CODE (arg0) == ADDR_EXPR
9056 || TREE_CODE (arg1) == ADDR_EXPR
9057 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9058 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9059 {
9060 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9061 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9062 enum machine_mode mode;
9063 int volatilep, unsignedp;
9064 bool indirect_base0 = false, indirect_base1 = false;
9065
9066 /* Get base and offset for the access. Strip ADDR_EXPR for
9067 get_inner_reference, but put it back by stripping INDIRECT_REF
9068 off the base object if possible. indirect_baseN will be true
9069 if baseN is not an address but refers to the object itself. */
9070 base0 = arg0;
9071 if (TREE_CODE (arg0) == ADDR_EXPR)
9072 {
9073 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9074 &bitsize, &bitpos0, &offset0, &mode,
9075 &unsignedp, &volatilep, false);
9076 if (TREE_CODE (base0) == INDIRECT_REF)
9077 base0 = TREE_OPERAND (base0, 0);
9078 else
9079 indirect_base0 = true;
9080 }
9081 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9082 {
9083 base0 = TREE_OPERAND (arg0, 0);
9084 STRIP_SIGN_NOPS (base0);
9085 if (TREE_CODE (base0) == ADDR_EXPR)
9086 {
9087 base0 = TREE_OPERAND (base0, 0);
9088 indirect_base0 = true;
9089 }
9090 offset0 = TREE_OPERAND (arg0, 1);
9091 if (host_integerp (offset0, 0))
9092 {
9093 HOST_WIDE_INT off = size_low_cst (offset0);
9094 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9095 * BITS_PER_UNIT)
9096 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9097 {
9098 bitpos0 = off * BITS_PER_UNIT;
9099 offset0 = NULL_TREE;
9100 }
9101 }
9102 }
9103
9104 base1 = arg1;
9105 if (TREE_CODE (arg1) == ADDR_EXPR)
9106 {
9107 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9108 &bitsize, &bitpos1, &offset1, &mode,
9109 &unsignedp, &volatilep, false);
9110 if (TREE_CODE (base1) == INDIRECT_REF)
9111 base1 = TREE_OPERAND (base1, 0);
9112 else
9113 indirect_base1 = true;
9114 }
9115 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9116 {
9117 base1 = TREE_OPERAND (arg1, 0);
9118 STRIP_SIGN_NOPS (base1);
9119 if (TREE_CODE (base1) == ADDR_EXPR)
9120 {
9121 base1 = TREE_OPERAND (base1, 0);
9122 indirect_base1 = true;
9123 }
9124 offset1 = TREE_OPERAND (arg1, 1);
9125 if (host_integerp (offset1, 0))
9126 {
9127 HOST_WIDE_INT off = size_low_cst (offset1);
9128 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9129 * BITS_PER_UNIT)
9130 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9131 {
9132 bitpos1 = off * BITS_PER_UNIT;
9133 offset1 = NULL_TREE;
9134 }
9135 }
9136 }
9137
9138 /* A local variable can never be pointed to by
9139 the default SSA name of an incoming parameter. */
9140 if ((TREE_CODE (arg0) == ADDR_EXPR
9141 && indirect_base0
9142 && TREE_CODE (base0) == VAR_DECL
9143 && auto_var_in_fn_p (base0, current_function_decl)
9144 && !indirect_base1
9145 && TREE_CODE (base1) == SSA_NAME
9146 && SSA_NAME_IS_DEFAULT_DEF (base1)
9147 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9148 || (TREE_CODE (arg1) == ADDR_EXPR
9149 && indirect_base1
9150 && TREE_CODE (base1) == VAR_DECL
9151 && auto_var_in_fn_p (base1, current_function_decl)
9152 && !indirect_base0
9153 && TREE_CODE (base0) == SSA_NAME
9154 && SSA_NAME_IS_DEFAULT_DEF (base0)
9155 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9156 {
9157 if (code == NE_EXPR)
9158 return constant_boolean_node (1, type);
9159 else if (code == EQ_EXPR)
9160 return constant_boolean_node (0, type);
9161 }
9162 /* If we have equivalent bases we might be able to simplify. */
9163 else if (indirect_base0 == indirect_base1
9164 && operand_equal_p (base0, base1, 0))
9165 {
9166 /* We can fold this expression to a constant if the non-constant
9167 offset parts are equal. */
9168 if ((offset0 == offset1
9169 || (offset0 && offset1
9170 && operand_equal_p (offset0, offset1, 0)))
9171 && (code == EQ_EXPR
9172 || code == NE_EXPR
9173 || (indirect_base0 && DECL_P (base0))
9174 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9175
9176 {
9177 if (code != EQ_EXPR
9178 && code != NE_EXPR
9179 && bitpos0 != bitpos1
9180 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9181 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9182 fold_overflow_warning (("assuming pointer wraparound does not "
9183 "occur when comparing P +- C1 with "
9184 "P +- C2"),
9185 WARN_STRICT_OVERFLOW_CONDITIONAL);
9186
9187 switch (code)
9188 {
9189 case EQ_EXPR:
9190 return constant_boolean_node (bitpos0 == bitpos1, type);
9191 case NE_EXPR:
9192 return constant_boolean_node (bitpos0 != bitpos1, type);
9193 case LT_EXPR:
9194 return constant_boolean_node (bitpos0 < bitpos1, type);
9195 case LE_EXPR:
9196 return constant_boolean_node (bitpos0 <= bitpos1, type);
9197 case GE_EXPR:
9198 return constant_boolean_node (bitpos0 >= bitpos1, type);
9199 case GT_EXPR:
9200 return constant_boolean_node (bitpos0 > bitpos1, type);
9201 default:;
9202 }
9203 }
9204 /* We can simplify the comparison to a comparison of the variable
9205 offset parts if the constant offset parts are equal.
9206 Be careful to use signed sizetype here because otherwise we
9207 mess with array offsets in the wrong way. This is possible
9208 because pointer arithmetic is restricted to retain within an
9209 object and overflow on pointer differences is undefined as of
9210 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9211 else if (bitpos0 == bitpos1
9212 && ((code == EQ_EXPR || code == NE_EXPR)
9213 || (indirect_base0 && DECL_P (base0))
9214 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9215 {
9216 /* By converting to signed sizetype we cover middle-end pointer
9217 arithmetic which operates on unsigned pointer types of size
9218 type size and ARRAY_REF offsets which are properly sign or
9219 zero extended from their type in case it is narrower than
9220 sizetype. */
9221 if (offset0 == NULL_TREE)
9222 offset0 = build_int_cst (ssizetype, 0);
9223 else
9224 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9225 if (offset1 == NULL_TREE)
9226 offset1 = build_int_cst (ssizetype, 0);
9227 else
9228 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9229
9230 if (code != EQ_EXPR
9231 && code != NE_EXPR
9232 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9233 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9234 fold_overflow_warning (("assuming pointer wraparound does not "
9235 "occur when comparing P +- C1 with "
9236 "P +- C2"),
9237 WARN_STRICT_OVERFLOW_COMPARISON);
9238
9239 return fold_build2_loc (loc, code, type, offset0, offset1);
9240 }
9241 }
9242 /* For non-equal bases we can simplify if they are addresses
9243 of local binding decls or constants. */
9244 else if (indirect_base0 && indirect_base1
9245 /* We know that !operand_equal_p (base0, base1, 0)
9246 because the if condition was false. But make
9247 sure two decls are not the same. */
9248 && base0 != base1
9249 && TREE_CODE (arg0) == ADDR_EXPR
9250 && TREE_CODE (arg1) == ADDR_EXPR
9251 && (((TREE_CODE (base0) == VAR_DECL
9252 || TREE_CODE (base0) == PARM_DECL)
9253 && (targetm.binds_local_p (base0)
9254 || CONSTANT_CLASS_P (base1)))
9255 || CONSTANT_CLASS_P (base0))
9256 && (((TREE_CODE (base1) == VAR_DECL
9257 || TREE_CODE (base1) == PARM_DECL)
9258 && (targetm.binds_local_p (base1)
9259 || CONSTANT_CLASS_P (base0)))
9260 || CONSTANT_CLASS_P (base1)))
9261 {
9262 if (code == EQ_EXPR)
9263 return omit_two_operands_loc (loc, type, boolean_false_node,
9264 arg0, arg1);
9265 else if (code == NE_EXPR)
9266 return omit_two_operands_loc (loc, type, boolean_true_node,
9267 arg0, arg1);
9268 }
9269 /* For equal offsets we can simplify to a comparison of the
9270 base addresses. */
9271 else if (bitpos0 == bitpos1
9272 && (indirect_base0
9273 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9274 && (indirect_base1
9275 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9276 && ((offset0 == offset1)
9277 || (offset0 && offset1
9278 && operand_equal_p (offset0, offset1, 0))))
9279 {
9280 if (indirect_base0)
9281 base0 = build_fold_addr_expr_loc (loc, base0);
9282 if (indirect_base1)
9283 base1 = build_fold_addr_expr_loc (loc, base1);
9284 return fold_build2_loc (loc, code, type, base0, base1);
9285 }
9286 }
9287
9288 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9289 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9290 the resulting offset is smaller in absolute value than the
9291 original one. */
9292 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9293 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9294 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9295 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9296 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9297 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9298 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9299 {
9300 tree const1 = TREE_OPERAND (arg0, 1);
9301 tree const2 = TREE_OPERAND (arg1, 1);
9302 tree variable1 = TREE_OPERAND (arg0, 0);
9303 tree variable2 = TREE_OPERAND (arg1, 0);
9304 tree cst;
9305 const char * const warnmsg = G_("assuming signed overflow does not "
9306 "occur when combining constants around "
9307 "a comparison");
9308
9309 /* Put the constant on the side where it doesn't overflow and is
9310 of lower absolute value than before. */
9311 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9312 ? MINUS_EXPR : PLUS_EXPR,
9313 const2, const1);
9314 if (!TREE_OVERFLOW (cst)
9315 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9316 {
9317 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9318 return fold_build2_loc (loc, code, type,
9319 variable1,
9320 fold_build2_loc (loc,
9321 TREE_CODE (arg1), TREE_TYPE (arg1),
9322 variable2, cst));
9323 }
9324
9325 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9326 ? MINUS_EXPR : PLUS_EXPR,
9327 const1, const2);
9328 if (!TREE_OVERFLOW (cst)
9329 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9330 {
9331 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9332 return fold_build2_loc (loc, code, type,
9333 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9334 variable1, cst),
9335 variable2);
9336 }
9337 }
9338
9339 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9340 signed arithmetic case. That form is created by the compiler
9341 often enough for folding it to be of value. One example is in
9342 computing loop trip counts after Operator Strength Reduction. */
9343 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9344 && TREE_CODE (arg0) == MULT_EXPR
9345 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9346 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9347 && integer_zerop (arg1))
9348 {
9349 tree const1 = TREE_OPERAND (arg0, 1);
9350 tree const2 = arg1; /* zero */
9351 tree variable1 = TREE_OPERAND (arg0, 0);
9352 enum tree_code cmp_code = code;
9353
9354 /* Handle unfolded multiplication by zero. */
9355 if (integer_zerop (const1))
9356 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9357
9358 fold_overflow_warning (("assuming signed overflow does not occur when "
9359 "eliminating multiplication in comparison "
9360 "with zero"),
9361 WARN_STRICT_OVERFLOW_COMPARISON);
9362
9363 /* If const1 is negative we swap the sense of the comparison. */
9364 if (tree_int_cst_sgn (const1) < 0)
9365 cmp_code = swap_tree_comparison (cmp_code);
9366
9367 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9368 }
9369
9370 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9371 if (tem)
9372 return tem;
9373
9374 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9375 {
9376 tree targ0 = strip_float_extensions (arg0);
9377 tree targ1 = strip_float_extensions (arg1);
9378 tree newtype = TREE_TYPE (targ0);
9379
9380 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9381 newtype = TREE_TYPE (targ1);
9382
9383 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9384 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9385 return fold_build2_loc (loc, code, type,
9386 fold_convert_loc (loc, newtype, targ0),
9387 fold_convert_loc (loc, newtype, targ1));
9388
9389 /* (-a) CMP (-b) -> b CMP a */
9390 if (TREE_CODE (arg0) == NEGATE_EXPR
9391 && TREE_CODE (arg1) == NEGATE_EXPR)
9392 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9393 TREE_OPERAND (arg0, 0));
9394
9395 if (TREE_CODE (arg1) == REAL_CST)
9396 {
9397 REAL_VALUE_TYPE cst;
9398 cst = TREE_REAL_CST (arg1);
9399
9400 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9401 if (TREE_CODE (arg0) == NEGATE_EXPR)
9402 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9403 TREE_OPERAND (arg0, 0),
9404 build_real (TREE_TYPE (arg1),
9405 real_value_negate (&cst)));
9406
9407 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9408 /* a CMP (-0) -> a CMP 0 */
9409 if (REAL_VALUE_MINUS_ZERO (cst))
9410 return fold_build2_loc (loc, code, type, arg0,
9411 build_real (TREE_TYPE (arg1), dconst0));
9412
9413 /* x != NaN is always true, other ops are always false. */
9414 if (REAL_VALUE_ISNAN (cst)
9415 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9416 {
9417 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9418 return omit_one_operand_loc (loc, type, tem, arg0);
9419 }
9420
9421 /* Fold comparisons against infinity. */
9422 if (REAL_VALUE_ISINF (cst)
9423 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9424 {
9425 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9426 if (tem != NULL_TREE)
9427 return tem;
9428 }
9429 }
9430
9431 /* If this is a comparison of a real constant with a PLUS_EXPR
9432 or a MINUS_EXPR of a real constant, we can convert it into a
9433 comparison with a revised real constant as long as no overflow
9434 occurs when unsafe_math_optimizations are enabled. */
9435 if (flag_unsafe_math_optimizations
9436 && TREE_CODE (arg1) == REAL_CST
9437 && (TREE_CODE (arg0) == PLUS_EXPR
9438 || TREE_CODE (arg0) == MINUS_EXPR)
9439 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9440 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9441 ? MINUS_EXPR : PLUS_EXPR,
9442 arg1, TREE_OPERAND (arg0, 1)))
9443 && !TREE_OVERFLOW (tem))
9444 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9445
9446 /* Likewise, we can simplify a comparison of a real constant with
9447 a MINUS_EXPR whose first operand is also a real constant, i.e.
9448 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9449 floating-point types only if -fassociative-math is set. */
9450 if (flag_associative_math
9451 && TREE_CODE (arg1) == REAL_CST
9452 && TREE_CODE (arg0) == MINUS_EXPR
9453 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9454 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9455 arg1))
9456 && !TREE_OVERFLOW (tem))
9457 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9458 TREE_OPERAND (arg0, 1), tem);
9459
9460 /* Fold comparisons against built-in math functions. */
9461 if (TREE_CODE (arg1) == REAL_CST
9462 && flag_unsafe_math_optimizations
9463 && ! flag_errno_math)
9464 {
9465 enum built_in_function fcode = builtin_mathfn_code (arg0);
9466
9467 if (fcode != END_BUILTINS)
9468 {
9469 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9470 if (tem != NULL_TREE)
9471 return tem;
9472 }
9473 }
9474 }
9475
9476 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9477 && CONVERT_EXPR_P (arg0))
9478 {
9479 /* If we are widening one operand of an integer comparison,
9480 see if the other operand is similarly being widened. Perhaps we
9481 can do the comparison in the narrower type. */
9482 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9483 if (tem)
9484 return tem;
9485
9486 /* Or if we are changing signedness. */
9487 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9488 if (tem)
9489 return tem;
9490 }
9491
9492 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9493 constant, we can simplify it. */
9494 if (TREE_CODE (arg1) == INTEGER_CST
9495 && (TREE_CODE (arg0) == MIN_EXPR
9496 || TREE_CODE (arg0) == MAX_EXPR)
9497 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9498 {
9499 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9500 if (tem)
9501 return tem;
9502 }
9503
9504 /* Simplify comparison of something with itself. (For IEEE
9505 floating-point, we can only do some of these simplifications.) */
9506 if (operand_equal_p (arg0, arg1, 0))
9507 {
9508 switch (code)
9509 {
9510 case EQ_EXPR:
9511 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9512 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9513 return constant_boolean_node (1, type);
9514 break;
9515
9516 case GE_EXPR:
9517 case LE_EXPR:
9518 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9519 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9520 return constant_boolean_node (1, type);
9521 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9522
9523 case NE_EXPR:
9524 /* For NE, we can only do this simplification if integer
9525 or we don't honor IEEE floating point NaNs. */
9526 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9527 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9528 break;
9529 /* ... fall through ... */
9530 case GT_EXPR:
9531 case LT_EXPR:
9532 return constant_boolean_node (0, type);
9533 default:
9534 gcc_unreachable ();
9535 }
9536 }
9537
9538 /* If we are comparing an expression that just has comparisons
9539 of two integer values, arithmetic expressions of those comparisons,
9540 and constants, we can simplify it. There are only three cases
9541 to check: the two values can either be equal, the first can be
9542 greater, or the second can be greater. Fold the expression for
9543 those three values. Since each value must be 0 or 1, we have
9544 eight possibilities, each of which corresponds to the constant 0
9545 or 1 or one of the six possible comparisons.
9546
9547 This handles common cases like (a > b) == 0 but also handles
9548 expressions like ((x > y) - (y > x)) > 0, which supposedly
9549 occur in macroized code. */
9550
9551 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9552 {
9553 tree cval1 = 0, cval2 = 0;
9554 int save_p = 0;
9555
9556 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9557 /* Don't handle degenerate cases here; they should already
9558 have been handled anyway. */
9559 && cval1 != 0 && cval2 != 0
9560 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9561 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9562 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9563 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9564 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9565 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9566 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9567 {
9568 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9569 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9570
9571 /* We can't just pass T to eval_subst in case cval1 or cval2
9572 was the same as ARG1. */
9573
9574 tree high_result
9575 = fold_build2_loc (loc, code, type,
9576 eval_subst (loc, arg0, cval1, maxval,
9577 cval2, minval),
9578 arg1);
9579 tree equal_result
9580 = fold_build2_loc (loc, code, type,
9581 eval_subst (loc, arg0, cval1, maxval,
9582 cval2, maxval),
9583 arg1);
9584 tree low_result
9585 = fold_build2_loc (loc, code, type,
9586 eval_subst (loc, arg0, cval1, minval,
9587 cval2, maxval),
9588 arg1);
9589
9590 /* All three of these results should be 0 or 1. Confirm they are.
9591 Then use those values to select the proper code to use. */
9592
9593 if (TREE_CODE (high_result) == INTEGER_CST
9594 && TREE_CODE (equal_result) == INTEGER_CST
9595 && TREE_CODE (low_result) == INTEGER_CST)
9596 {
9597 /* Make a 3-bit mask with the high-order bit being the
9598 value for `>', the next for '=', and the low for '<'. */
9599 switch ((integer_onep (high_result) * 4)
9600 + (integer_onep (equal_result) * 2)
9601 + integer_onep (low_result))
9602 {
9603 case 0:
9604 /* Always false. */
9605 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9606 case 1:
9607 code = LT_EXPR;
9608 break;
9609 case 2:
9610 code = EQ_EXPR;
9611 break;
9612 case 3:
9613 code = LE_EXPR;
9614 break;
9615 case 4:
9616 code = GT_EXPR;
9617 break;
9618 case 5:
9619 code = NE_EXPR;
9620 break;
9621 case 6:
9622 code = GE_EXPR;
9623 break;
9624 case 7:
9625 /* Always true. */
9626 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9627 }
9628
9629 if (save_p)
9630 {
9631 tem = save_expr (build2 (code, type, cval1, cval2));
9632 SET_EXPR_LOCATION (tem, loc);
9633 return tem;
9634 }
9635 return fold_build2_loc (loc, code, type, cval1, cval2);
9636 }
9637 }
9638 }
9639
9640 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9641 into a single range test. */
9642 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9643 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9644 && TREE_CODE (arg1) == INTEGER_CST
9645 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9646 && !integer_zerop (TREE_OPERAND (arg0, 1))
9647 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9648 && !TREE_OVERFLOW (arg1))
9649 {
9650 tem = fold_div_compare (loc, code, type, arg0, arg1);
9651 if (tem != NULL_TREE)
9652 return tem;
9653 }
9654
9655 /* Fold ~X op ~Y as Y op X. */
9656 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9657 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9658 {
9659 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9660 return fold_build2_loc (loc, code, type,
9661 fold_convert_loc (loc, cmp_type,
9662 TREE_OPERAND (arg1, 0)),
9663 TREE_OPERAND (arg0, 0));
9664 }
9665
9666 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9667 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9668 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9669 {
9670 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9671 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9672 TREE_OPERAND (arg0, 0),
9673 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9674 fold_convert_loc (loc, cmp_type, arg1)));
9675 }
9676
9677 return NULL_TREE;
9678 }
9679
9680
9681 /* Subroutine of fold_binary. Optimize complex multiplications of the
9682 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9683 argument EXPR represents the expression "z" of type TYPE. */
9684
9685 static tree
9686 fold_mult_zconjz (location_t loc, tree type, tree expr)
9687 {
9688 tree itype = TREE_TYPE (type);
9689 tree rpart, ipart, tem;
9690
9691 if (TREE_CODE (expr) == COMPLEX_EXPR)
9692 {
9693 rpart = TREE_OPERAND (expr, 0);
9694 ipart = TREE_OPERAND (expr, 1);
9695 }
9696 else if (TREE_CODE (expr) == COMPLEX_CST)
9697 {
9698 rpart = TREE_REALPART (expr);
9699 ipart = TREE_IMAGPART (expr);
9700 }
9701 else
9702 {
9703 expr = save_expr (expr);
9704 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9705 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9706 }
9707
9708 rpart = save_expr (rpart);
9709 ipart = save_expr (ipart);
9710 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9711 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9712 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9713 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9714 build_zero_cst (itype));
9715 }
9716
9717
9718 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9719 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9720 guarantees that P and N have the same least significant log2(M) bits.
9721 N is not otherwise constrained. In particular, N is not normalized to
9722 0 <= N < M as is common. In general, the precise value of P is unknown.
9723 M is chosen as large as possible such that constant N can be determined.
9724
9725 Returns M and sets *RESIDUE to N.
9726
9727 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9728 account. This is not always possible due to PR 35705.
9729 */
9730
9731 static unsigned HOST_WIDE_INT
9732 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9733 bool allow_func_align)
9734 {
9735 enum tree_code code;
9736
9737 *residue = 0;
9738
9739 code = TREE_CODE (expr);
9740 if (code == ADDR_EXPR)
9741 {
9742 unsigned int bitalign;
9743 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9744 *residue /= BITS_PER_UNIT;
9745 return bitalign / BITS_PER_UNIT;
9746 }
9747 else if (code == POINTER_PLUS_EXPR)
9748 {
9749 tree op0, op1;
9750 unsigned HOST_WIDE_INT modulus;
9751 enum tree_code inner_code;
9752
9753 op0 = TREE_OPERAND (expr, 0);
9754 STRIP_NOPS (op0);
9755 modulus = get_pointer_modulus_and_residue (op0, residue,
9756 allow_func_align);
9757
9758 op1 = TREE_OPERAND (expr, 1);
9759 STRIP_NOPS (op1);
9760 inner_code = TREE_CODE (op1);
9761 if (inner_code == INTEGER_CST)
9762 {
9763 *residue += TREE_INT_CST_LOW (op1);
9764 return modulus;
9765 }
9766 else if (inner_code == MULT_EXPR)
9767 {
9768 op1 = TREE_OPERAND (op1, 1);
9769 if (TREE_CODE (op1) == INTEGER_CST)
9770 {
9771 unsigned HOST_WIDE_INT align;
9772
9773 /* Compute the greatest power-of-2 divisor of op1. */
9774 align = TREE_INT_CST_LOW (op1);
9775 align &= -align;
9776
9777 /* If align is non-zero and less than *modulus, replace
9778 *modulus with align., If align is 0, then either op1 is 0
9779 or the greatest power-of-2 divisor of op1 doesn't fit in an
9780 unsigned HOST_WIDE_INT. In either case, no additional
9781 constraint is imposed. */
9782 if (align)
9783 modulus = MIN (modulus, align);
9784
9785 return modulus;
9786 }
9787 }
9788 }
9789
9790 /* If we get here, we were unable to determine anything useful about the
9791 expression. */
9792 return 1;
9793 }
9794
9795 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9796 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9797
9798 static bool
9799 vec_cst_ctor_to_array (tree arg, tree *elts)
9800 {
9801 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9802
9803 if (TREE_CODE (arg) == VECTOR_CST)
9804 {
9805 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9806 elts[i] = VECTOR_CST_ELT (arg, i);
9807 }
9808 else if (TREE_CODE (arg) == CONSTRUCTOR)
9809 {
9810 constructor_elt *elt;
9811
9812 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9813 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9814 return false;
9815 else
9816 elts[i] = elt->value;
9817 }
9818 else
9819 return false;
9820 for (; i < nelts; i++)
9821 elts[i]
9822 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9823 return true;
9824 }
9825
9826 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9827 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9828 NULL_TREE otherwise. */
9829
9830 static tree
9831 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9832 {
9833 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9834 tree *elts;
9835 bool need_ctor = false;
9836
9837 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9838 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9839 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9840 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9841 return NULL_TREE;
9842
9843 elts = XALLOCAVEC (tree, nelts * 3);
9844 if (!vec_cst_ctor_to_array (arg0, elts)
9845 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9846 return NULL_TREE;
9847
9848 for (i = 0; i < nelts; i++)
9849 {
9850 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9851 need_ctor = true;
9852 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9853 }
9854
9855 if (need_ctor)
9856 {
9857 vec<constructor_elt, va_gc> *v;
9858 vec_alloc (v, nelts);
9859 for (i = 0; i < nelts; i++)
9860 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9861 return build_constructor (type, v);
9862 }
9863 else
9864 return build_vector (type, &elts[2 * nelts]);
9865 }
9866
9867 /* Try to fold a pointer difference of type TYPE two address expressions of
9868 array references AREF0 and AREF1 using location LOC. Return a
9869 simplified expression for the difference or NULL_TREE. */
9870
9871 static tree
9872 fold_addr_of_array_ref_difference (location_t loc, tree type,
9873 tree aref0, tree aref1)
9874 {
9875 tree base0 = TREE_OPERAND (aref0, 0);
9876 tree base1 = TREE_OPERAND (aref1, 0);
9877 tree base_offset = build_int_cst (type, 0);
9878
9879 /* If the bases are array references as well, recurse. If the bases
9880 are pointer indirections compute the difference of the pointers.
9881 If the bases are equal, we are set. */
9882 if ((TREE_CODE (base0) == ARRAY_REF
9883 && TREE_CODE (base1) == ARRAY_REF
9884 && (base_offset
9885 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9886 || (INDIRECT_REF_P (base0)
9887 && INDIRECT_REF_P (base1)
9888 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9889 TREE_OPERAND (base0, 0),
9890 TREE_OPERAND (base1, 0))))
9891 || operand_equal_p (base0, base1, 0))
9892 {
9893 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9894 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9895 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9896 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9897 return fold_build2_loc (loc, PLUS_EXPR, type,
9898 base_offset,
9899 fold_build2_loc (loc, MULT_EXPR, type,
9900 diff, esz));
9901 }
9902 return NULL_TREE;
9903 }
9904
9905 /* If the real or vector real constant CST of type TYPE has an exact
9906 inverse, return it, else return NULL. */
9907
9908 static tree
9909 exact_inverse (tree type, tree cst)
9910 {
9911 REAL_VALUE_TYPE r;
9912 tree unit_type, *elts;
9913 enum machine_mode mode;
9914 unsigned vec_nelts, i;
9915
9916 switch (TREE_CODE (cst))
9917 {
9918 case REAL_CST:
9919 r = TREE_REAL_CST (cst);
9920
9921 if (exact_real_inverse (TYPE_MODE (type), &r))
9922 return build_real (type, r);
9923
9924 return NULL_TREE;
9925
9926 case VECTOR_CST:
9927 vec_nelts = VECTOR_CST_NELTS (cst);
9928 elts = XALLOCAVEC (tree, vec_nelts);
9929 unit_type = TREE_TYPE (type);
9930 mode = TYPE_MODE (unit_type);
9931
9932 for (i = 0; i < vec_nelts; i++)
9933 {
9934 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9935 if (!exact_real_inverse (mode, &r))
9936 return NULL_TREE;
9937 elts[i] = build_real (unit_type, r);
9938 }
9939
9940 return build_vector (type, elts);
9941
9942 default:
9943 return NULL_TREE;
9944 }
9945 }
9946
9947 /* Mask out the tz least significant bits of X of type TYPE where
9948 tz is the number of trailing zeroes in Y. */
9949 static double_int
9950 mask_with_tz (tree type, double_int x, double_int y)
9951 {
9952 int tz = y.trailing_zeros ();
9953
9954 if (tz > 0)
9955 {
9956 double_int mask;
9957
9958 mask = ~double_int::mask (tz);
9959 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9960 return mask & x;
9961 }
9962 return x;
9963 }
9964
9965 /* Fold a binary expression of code CODE and type TYPE with operands
9966 OP0 and OP1. LOC is the location of the resulting expression.
9967 Return the folded expression if folding is successful. Otherwise,
9968 return NULL_TREE. */
9969
9970 tree
9971 fold_binary_loc (location_t loc,
9972 enum tree_code code, tree type, tree op0, tree op1)
9973 {
9974 enum tree_code_class kind = TREE_CODE_CLASS (code);
9975 tree arg0, arg1, tem;
9976 tree t1 = NULL_TREE;
9977 bool strict_overflow_p;
9978 unsigned int prec;
9979
9980 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9981 && TREE_CODE_LENGTH (code) == 2
9982 && op0 != NULL_TREE
9983 && op1 != NULL_TREE);
9984
9985 arg0 = op0;
9986 arg1 = op1;
9987
9988 /* Strip any conversions that don't change the mode. This is
9989 safe for every expression, except for a comparison expression
9990 because its signedness is derived from its operands. So, in
9991 the latter case, only strip conversions that don't change the
9992 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9993 preserved.
9994
9995 Note that this is done as an internal manipulation within the
9996 constant folder, in order to find the simplest representation
9997 of the arguments so that their form can be studied. In any
9998 cases, the appropriate type conversions should be put back in
9999 the tree that will get out of the constant folder. */
10000
10001 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10002 {
10003 STRIP_SIGN_NOPS (arg0);
10004 STRIP_SIGN_NOPS (arg1);
10005 }
10006 else
10007 {
10008 STRIP_NOPS (arg0);
10009 STRIP_NOPS (arg1);
10010 }
10011
10012 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10013 constant but we can't do arithmetic on them. */
10014 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10015 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10016 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10017 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10018 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10019 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10020 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10021 {
10022 if (kind == tcc_binary)
10023 {
10024 /* Make sure type and arg0 have the same saturating flag. */
10025 gcc_assert (TYPE_SATURATING (type)
10026 == TYPE_SATURATING (TREE_TYPE (arg0)));
10027 tem = const_binop (code, arg0, arg1);
10028 }
10029 else if (kind == tcc_comparison)
10030 tem = fold_relational_const (code, type, arg0, arg1);
10031 else
10032 tem = NULL_TREE;
10033
10034 if (tem != NULL_TREE)
10035 {
10036 if (TREE_TYPE (tem) != type)
10037 tem = fold_convert_loc (loc, type, tem);
10038 return tem;
10039 }
10040 }
10041
10042 /* If this is a commutative operation, and ARG0 is a constant, move it
10043 to ARG1 to reduce the number of tests below. */
10044 if (commutative_tree_code (code)
10045 && tree_swap_operands_p (arg0, arg1, true))
10046 return fold_build2_loc (loc, code, type, op1, op0);
10047
10048 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10049
10050 First check for cases where an arithmetic operation is applied to a
10051 compound, conditional, or comparison operation. Push the arithmetic
10052 operation inside the compound or conditional to see if any folding
10053 can then be done. Convert comparison to conditional for this purpose.
10054 The also optimizes non-constant cases that used to be done in
10055 expand_expr.
10056
10057 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10058 one of the operands is a comparison and the other is a comparison, a
10059 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10060 code below would make the expression more complex. Change it to a
10061 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10062 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10063
10064 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10065 || code == EQ_EXPR || code == NE_EXPR)
10066 && TREE_CODE (type) != VECTOR_TYPE
10067 && ((truth_value_p (TREE_CODE (arg0))
10068 && (truth_value_p (TREE_CODE (arg1))
10069 || (TREE_CODE (arg1) == BIT_AND_EXPR
10070 && integer_onep (TREE_OPERAND (arg1, 1)))))
10071 || (truth_value_p (TREE_CODE (arg1))
10072 && (truth_value_p (TREE_CODE (arg0))
10073 || (TREE_CODE (arg0) == BIT_AND_EXPR
10074 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10075 {
10076 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10077 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10078 : TRUTH_XOR_EXPR,
10079 boolean_type_node,
10080 fold_convert_loc (loc, boolean_type_node, arg0),
10081 fold_convert_loc (loc, boolean_type_node, arg1));
10082
10083 if (code == EQ_EXPR)
10084 tem = invert_truthvalue_loc (loc, tem);
10085
10086 return fold_convert_loc (loc, type, tem);
10087 }
10088
10089 if (TREE_CODE_CLASS (code) == tcc_binary
10090 || TREE_CODE_CLASS (code) == tcc_comparison)
10091 {
10092 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10093 {
10094 tem = fold_build2_loc (loc, code, type,
10095 fold_convert_loc (loc, TREE_TYPE (op0),
10096 TREE_OPERAND (arg0, 1)), op1);
10097 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10098 tem);
10099 }
10100 if (TREE_CODE (arg1) == COMPOUND_EXPR
10101 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10102 {
10103 tem = fold_build2_loc (loc, code, type, op0,
10104 fold_convert_loc (loc, TREE_TYPE (op1),
10105 TREE_OPERAND (arg1, 1)));
10106 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10107 tem);
10108 }
10109
10110 if (TREE_CODE (arg0) == COND_EXPR
10111 || TREE_CODE (arg0) == VEC_COND_EXPR
10112 || COMPARISON_CLASS_P (arg0))
10113 {
10114 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10115 arg0, arg1,
10116 /*cond_first_p=*/1);
10117 if (tem != NULL_TREE)
10118 return tem;
10119 }
10120
10121 if (TREE_CODE (arg1) == COND_EXPR
10122 || TREE_CODE (arg1) == VEC_COND_EXPR
10123 || COMPARISON_CLASS_P (arg1))
10124 {
10125 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10126 arg1, arg0,
10127 /*cond_first_p=*/0);
10128 if (tem != NULL_TREE)
10129 return tem;
10130 }
10131 }
10132
10133 switch (code)
10134 {
10135 case MEM_REF:
10136 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10137 if (TREE_CODE (arg0) == ADDR_EXPR
10138 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10139 {
10140 tree iref = TREE_OPERAND (arg0, 0);
10141 return fold_build2 (MEM_REF, type,
10142 TREE_OPERAND (iref, 0),
10143 int_const_binop (PLUS_EXPR, arg1,
10144 TREE_OPERAND (iref, 1)));
10145 }
10146
10147 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10148 if (TREE_CODE (arg0) == ADDR_EXPR
10149 && handled_component_p (TREE_OPERAND (arg0, 0)))
10150 {
10151 tree base;
10152 HOST_WIDE_INT coffset;
10153 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10154 &coffset);
10155 if (!base)
10156 return NULL_TREE;
10157 return fold_build2 (MEM_REF, type,
10158 build_fold_addr_expr (base),
10159 int_const_binop (PLUS_EXPR, arg1,
10160 size_int (coffset)));
10161 }
10162
10163 return NULL_TREE;
10164
10165 case POINTER_PLUS_EXPR:
10166 /* 0 +p index -> (type)index */
10167 if (integer_zerop (arg0))
10168 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10169
10170 /* PTR +p 0 -> PTR */
10171 if (integer_zerop (arg1))
10172 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10173
10174 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10175 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10176 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10177 return fold_convert_loc (loc, type,
10178 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10179 fold_convert_loc (loc, sizetype,
10180 arg1),
10181 fold_convert_loc (loc, sizetype,
10182 arg0)));
10183
10184 /* (PTR +p B) +p A -> PTR +p (B + A) */
10185 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10186 {
10187 tree inner;
10188 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10189 tree arg00 = TREE_OPERAND (arg0, 0);
10190 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10191 arg01, fold_convert_loc (loc, sizetype, arg1));
10192 return fold_convert_loc (loc, type,
10193 fold_build_pointer_plus_loc (loc,
10194 arg00, inner));
10195 }
10196
10197 /* PTR_CST +p CST -> CST1 */
10198 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10199 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10200 fold_convert_loc (loc, type, arg1));
10201
10202 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10203 of the array. Loop optimizer sometimes produce this type of
10204 expressions. */
10205 if (TREE_CODE (arg0) == ADDR_EXPR)
10206 {
10207 tem = try_move_mult_to_index (loc, arg0,
10208 fold_convert_loc (loc,
10209 ssizetype, arg1));
10210 if (tem)
10211 return fold_convert_loc (loc, type, tem);
10212 }
10213
10214 return NULL_TREE;
10215
10216 case PLUS_EXPR:
10217 /* A + (-B) -> A - B */
10218 if (TREE_CODE (arg1) == NEGATE_EXPR)
10219 return fold_build2_loc (loc, MINUS_EXPR, type,
10220 fold_convert_loc (loc, type, arg0),
10221 fold_convert_loc (loc, type,
10222 TREE_OPERAND (arg1, 0)));
10223 /* (-A) + B -> B - A */
10224 if (TREE_CODE (arg0) == NEGATE_EXPR
10225 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10226 return fold_build2_loc (loc, MINUS_EXPR, type,
10227 fold_convert_loc (loc, type, arg1),
10228 fold_convert_loc (loc, type,
10229 TREE_OPERAND (arg0, 0)));
10230
10231 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10232 {
10233 /* Convert ~A + 1 to -A. */
10234 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10235 && integer_onep (arg1))
10236 return fold_build1_loc (loc, NEGATE_EXPR, type,
10237 fold_convert_loc (loc, type,
10238 TREE_OPERAND (arg0, 0)));
10239
10240 /* ~X + X is -1. */
10241 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10242 && !TYPE_OVERFLOW_TRAPS (type))
10243 {
10244 tree tem = TREE_OPERAND (arg0, 0);
10245
10246 STRIP_NOPS (tem);
10247 if (operand_equal_p (tem, arg1, 0))
10248 {
10249 t1 = build_all_ones_cst (type);
10250 return omit_one_operand_loc (loc, type, t1, arg1);
10251 }
10252 }
10253
10254 /* X + ~X is -1. */
10255 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10256 && !TYPE_OVERFLOW_TRAPS (type))
10257 {
10258 tree tem = TREE_OPERAND (arg1, 0);
10259
10260 STRIP_NOPS (tem);
10261 if (operand_equal_p (arg0, tem, 0))
10262 {
10263 t1 = build_all_ones_cst (type);
10264 return omit_one_operand_loc (loc, type, t1, arg0);
10265 }
10266 }
10267
10268 /* X + (X / CST) * -CST is X % CST. */
10269 if (TREE_CODE (arg1) == MULT_EXPR
10270 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10271 && operand_equal_p (arg0,
10272 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10273 {
10274 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10275 tree cst1 = TREE_OPERAND (arg1, 1);
10276 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10277 cst1, cst0);
10278 if (sum && integer_zerop (sum))
10279 return fold_convert_loc (loc, type,
10280 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10281 TREE_TYPE (arg0), arg0,
10282 cst0));
10283 }
10284 }
10285
10286 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10287 one. Make sure the type is not saturating and has the signedness of
10288 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10289 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10290 if ((TREE_CODE (arg0) == MULT_EXPR
10291 || TREE_CODE (arg1) == MULT_EXPR)
10292 && !TYPE_SATURATING (type)
10293 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10294 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10295 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10296 {
10297 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10298 if (tem)
10299 return tem;
10300 }
10301
10302 if (! FLOAT_TYPE_P (type))
10303 {
10304 if (integer_zerop (arg1))
10305 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10306
10307 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10308 with a constant, and the two constants have no bits in common,
10309 we should treat this as a BIT_IOR_EXPR since this may produce more
10310 simplifications. */
10311 if (TREE_CODE (arg0) == BIT_AND_EXPR
10312 && TREE_CODE (arg1) == BIT_AND_EXPR
10313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10314 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10315 && integer_zerop (const_binop (BIT_AND_EXPR,
10316 TREE_OPERAND (arg0, 1),
10317 TREE_OPERAND (arg1, 1))))
10318 {
10319 code = BIT_IOR_EXPR;
10320 goto bit_ior;
10321 }
10322
10323 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10324 (plus (plus (mult) (mult)) (foo)) so that we can
10325 take advantage of the factoring cases below. */
10326 if (TYPE_OVERFLOW_WRAPS (type)
10327 && (((TREE_CODE (arg0) == PLUS_EXPR
10328 || TREE_CODE (arg0) == MINUS_EXPR)
10329 && TREE_CODE (arg1) == MULT_EXPR)
10330 || ((TREE_CODE (arg1) == PLUS_EXPR
10331 || TREE_CODE (arg1) == MINUS_EXPR)
10332 && TREE_CODE (arg0) == MULT_EXPR)))
10333 {
10334 tree parg0, parg1, parg, marg;
10335 enum tree_code pcode;
10336
10337 if (TREE_CODE (arg1) == MULT_EXPR)
10338 parg = arg0, marg = arg1;
10339 else
10340 parg = arg1, marg = arg0;
10341 pcode = TREE_CODE (parg);
10342 parg0 = TREE_OPERAND (parg, 0);
10343 parg1 = TREE_OPERAND (parg, 1);
10344 STRIP_NOPS (parg0);
10345 STRIP_NOPS (parg1);
10346
10347 if (TREE_CODE (parg0) == MULT_EXPR
10348 && TREE_CODE (parg1) != MULT_EXPR)
10349 return fold_build2_loc (loc, pcode, type,
10350 fold_build2_loc (loc, PLUS_EXPR, type,
10351 fold_convert_loc (loc, type,
10352 parg0),
10353 fold_convert_loc (loc, type,
10354 marg)),
10355 fold_convert_loc (loc, type, parg1));
10356 if (TREE_CODE (parg0) != MULT_EXPR
10357 && TREE_CODE (parg1) == MULT_EXPR)
10358 return
10359 fold_build2_loc (loc, PLUS_EXPR, type,
10360 fold_convert_loc (loc, type, parg0),
10361 fold_build2_loc (loc, pcode, type,
10362 fold_convert_loc (loc, type, marg),
10363 fold_convert_loc (loc, type,
10364 parg1)));
10365 }
10366 }
10367 else
10368 {
10369 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10370 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10371 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10372
10373 /* Likewise if the operands are reversed. */
10374 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10375 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10376
10377 /* Convert X + -C into X - C. */
10378 if (TREE_CODE (arg1) == REAL_CST
10379 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10380 {
10381 tem = fold_negate_const (arg1, type);
10382 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10383 return fold_build2_loc (loc, MINUS_EXPR, type,
10384 fold_convert_loc (loc, type, arg0),
10385 fold_convert_loc (loc, type, tem));
10386 }
10387
10388 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10389 to __complex__ ( x, y ). This is not the same for SNaNs or
10390 if signed zeros are involved. */
10391 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10392 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10393 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10394 {
10395 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10396 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10397 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10398 bool arg0rz = false, arg0iz = false;
10399 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10400 || (arg0i && (arg0iz = real_zerop (arg0i))))
10401 {
10402 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10403 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10404 if (arg0rz && arg1i && real_zerop (arg1i))
10405 {
10406 tree rp = arg1r ? arg1r
10407 : build1 (REALPART_EXPR, rtype, arg1);
10408 tree ip = arg0i ? arg0i
10409 : build1 (IMAGPART_EXPR, rtype, arg0);
10410 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10411 }
10412 else if (arg0iz && arg1r && real_zerop (arg1r))
10413 {
10414 tree rp = arg0r ? arg0r
10415 : build1 (REALPART_EXPR, rtype, arg0);
10416 tree ip = arg1i ? arg1i
10417 : build1 (IMAGPART_EXPR, rtype, arg1);
10418 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10419 }
10420 }
10421 }
10422
10423 if (flag_unsafe_math_optimizations
10424 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10425 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10426 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10427 return tem;
10428
10429 /* Convert x+x into x*2.0. */
10430 if (operand_equal_p (arg0, arg1, 0)
10431 && SCALAR_FLOAT_TYPE_P (type))
10432 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10433 build_real (type, dconst2));
10434
10435 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10436 We associate floats only if the user has specified
10437 -fassociative-math. */
10438 if (flag_associative_math
10439 && TREE_CODE (arg1) == PLUS_EXPR
10440 && TREE_CODE (arg0) != MULT_EXPR)
10441 {
10442 tree tree10 = TREE_OPERAND (arg1, 0);
10443 tree tree11 = TREE_OPERAND (arg1, 1);
10444 if (TREE_CODE (tree11) == MULT_EXPR
10445 && TREE_CODE (tree10) == MULT_EXPR)
10446 {
10447 tree tree0;
10448 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10449 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10450 }
10451 }
10452 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10453 We associate floats only if the user has specified
10454 -fassociative-math. */
10455 if (flag_associative_math
10456 && TREE_CODE (arg0) == PLUS_EXPR
10457 && TREE_CODE (arg1) != MULT_EXPR)
10458 {
10459 tree tree00 = TREE_OPERAND (arg0, 0);
10460 tree tree01 = TREE_OPERAND (arg0, 1);
10461 if (TREE_CODE (tree01) == MULT_EXPR
10462 && TREE_CODE (tree00) == MULT_EXPR)
10463 {
10464 tree tree0;
10465 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10466 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10467 }
10468 }
10469 }
10470
10471 bit_rotate:
10472 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10473 is a rotate of A by C1 bits. */
10474 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10475 is a rotate of A by B bits. */
10476 {
10477 enum tree_code code0, code1;
10478 tree rtype;
10479 code0 = TREE_CODE (arg0);
10480 code1 = TREE_CODE (arg1);
10481 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10482 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10483 && operand_equal_p (TREE_OPERAND (arg0, 0),
10484 TREE_OPERAND (arg1, 0), 0)
10485 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10486 TYPE_UNSIGNED (rtype))
10487 /* Only create rotates in complete modes. Other cases are not
10488 expanded properly. */
10489 && (element_precision (rtype)
10490 == element_precision (TYPE_MODE (rtype))))
10491 {
10492 tree tree01, tree11;
10493 enum tree_code code01, code11;
10494
10495 tree01 = TREE_OPERAND (arg0, 1);
10496 tree11 = TREE_OPERAND (arg1, 1);
10497 STRIP_NOPS (tree01);
10498 STRIP_NOPS (tree11);
10499 code01 = TREE_CODE (tree01);
10500 code11 = TREE_CODE (tree11);
10501 if (code01 == INTEGER_CST
10502 && code11 == INTEGER_CST
10503 && TREE_INT_CST_HIGH (tree01) == 0
10504 && TREE_INT_CST_HIGH (tree11) == 0
10505 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10506 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10507 {
10508 tem = build2_loc (loc, LROTATE_EXPR,
10509 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10510 TREE_OPERAND (arg0, 0),
10511 code0 == LSHIFT_EXPR ? tree01 : tree11);
10512 return fold_convert_loc (loc, type, tem);
10513 }
10514 else if (code11 == MINUS_EXPR)
10515 {
10516 tree tree110, tree111;
10517 tree110 = TREE_OPERAND (tree11, 0);
10518 tree111 = TREE_OPERAND (tree11, 1);
10519 STRIP_NOPS (tree110);
10520 STRIP_NOPS (tree111);
10521 if (TREE_CODE (tree110) == INTEGER_CST
10522 && 0 == compare_tree_int (tree110,
10523 element_precision
10524 (TREE_TYPE (TREE_OPERAND
10525 (arg0, 0))))
10526 && operand_equal_p (tree01, tree111, 0))
10527 return
10528 fold_convert_loc (loc, type,
10529 build2 ((code0 == LSHIFT_EXPR
10530 ? LROTATE_EXPR
10531 : RROTATE_EXPR),
10532 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10533 TREE_OPERAND (arg0, 0), tree01));
10534 }
10535 else if (code01 == MINUS_EXPR)
10536 {
10537 tree tree010, tree011;
10538 tree010 = TREE_OPERAND (tree01, 0);
10539 tree011 = TREE_OPERAND (tree01, 1);
10540 STRIP_NOPS (tree010);
10541 STRIP_NOPS (tree011);
10542 if (TREE_CODE (tree010) == INTEGER_CST
10543 && 0 == compare_tree_int (tree010,
10544 element_precision
10545 (TREE_TYPE (TREE_OPERAND
10546 (arg0, 0))))
10547 && operand_equal_p (tree11, tree011, 0))
10548 return fold_convert_loc
10549 (loc, type,
10550 build2 ((code0 != LSHIFT_EXPR
10551 ? LROTATE_EXPR
10552 : RROTATE_EXPR),
10553 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10554 TREE_OPERAND (arg0, 0), tree11));
10555 }
10556 }
10557 }
10558
10559 associate:
10560 /* In most languages, can't associate operations on floats through
10561 parentheses. Rather than remember where the parentheses were, we
10562 don't associate floats at all, unless the user has specified
10563 -fassociative-math.
10564 And, we need to make sure type is not saturating. */
10565
10566 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10567 && !TYPE_SATURATING (type))
10568 {
10569 tree var0, con0, lit0, minus_lit0;
10570 tree var1, con1, lit1, minus_lit1;
10571 tree atype = type;
10572 bool ok = true;
10573
10574 /* Split both trees into variables, constants, and literals. Then
10575 associate each group together, the constants with literals,
10576 then the result with variables. This increases the chances of
10577 literals being recombined later and of generating relocatable
10578 expressions for the sum of a constant and literal. */
10579 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10580 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10581 code == MINUS_EXPR);
10582
10583 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10584 if (code == MINUS_EXPR)
10585 code = PLUS_EXPR;
10586
10587 /* With undefined overflow prefer doing association in a type
10588 which wraps on overflow, if that is one of the operand types. */
10589 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10590 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10591 {
10592 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10593 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10594 atype = TREE_TYPE (arg0);
10595 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10596 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10597 atype = TREE_TYPE (arg1);
10598 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10599 }
10600
10601 /* With undefined overflow we can only associate constants with one
10602 variable, and constants whose association doesn't overflow. */
10603 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10604 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10605 {
10606 if (var0 && var1)
10607 {
10608 tree tmp0 = var0;
10609 tree tmp1 = var1;
10610
10611 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10612 tmp0 = TREE_OPERAND (tmp0, 0);
10613 if (CONVERT_EXPR_P (tmp0)
10614 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10615 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10616 <= TYPE_PRECISION (atype)))
10617 tmp0 = TREE_OPERAND (tmp0, 0);
10618 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10619 tmp1 = TREE_OPERAND (tmp1, 0);
10620 if (CONVERT_EXPR_P (tmp1)
10621 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10622 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10623 <= TYPE_PRECISION (atype)))
10624 tmp1 = TREE_OPERAND (tmp1, 0);
10625 /* The only case we can still associate with two variables
10626 is if they are the same, modulo negation and bit-pattern
10627 preserving conversions. */
10628 if (!operand_equal_p (tmp0, tmp1, 0))
10629 ok = false;
10630 }
10631 }
10632
10633 /* Only do something if we found more than two objects. Otherwise,
10634 nothing has changed and we risk infinite recursion. */
10635 if (ok
10636 && (2 < ((var0 != 0) + (var1 != 0)
10637 + (con0 != 0) + (con1 != 0)
10638 + (lit0 != 0) + (lit1 != 0)
10639 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10640 {
10641 bool any_overflows = false;
10642 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10643 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10644 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10645 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10646 var0 = associate_trees (loc, var0, var1, code, atype);
10647 con0 = associate_trees (loc, con0, con1, code, atype);
10648 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10649 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10650 code, atype);
10651
10652 /* Preserve the MINUS_EXPR if the negative part of the literal is
10653 greater than the positive part. Otherwise, the multiplicative
10654 folding code (i.e extract_muldiv) may be fooled in case
10655 unsigned constants are subtracted, like in the following
10656 example: ((X*2 + 4) - 8U)/2. */
10657 if (minus_lit0 && lit0)
10658 {
10659 if (TREE_CODE (lit0) == INTEGER_CST
10660 && TREE_CODE (minus_lit0) == INTEGER_CST
10661 && tree_int_cst_lt (lit0, minus_lit0))
10662 {
10663 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10664 MINUS_EXPR, atype);
10665 lit0 = 0;
10666 }
10667 else
10668 {
10669 lit0 = associate_trees (loc, lit0, minus_lit0,
10670 MINUS_EXPR, atype);
10671 minus_lit0 = 0;
10672 }
10673 }
10674
10675 /* Don't introduce overflows through reassociation. */
10676 if (!any_overflows
10677 && ((lit0 && TREE_OVERFLOW (lit0))
10678 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10679 return NULL_TREE;
10680
10681 if (minus_lit0)
10682 {
10683 if (con0 == 0)
10684 return
10685 fold_convert_loc (loc, type,
10686 associate_trees (loc, var0, minus_lit0,
10687 MINUS_EXPR, atype));
10688 else
10689 {
10690 con0 = associate_trees (loc, con0, minus_lit0,
10691 MINUS_EXPR, atype);
10692 return
10693 fold_convert_loc (loc, type,
10694 associate_trees (loc, var0, con0,
10695 PLUS_EXPR, atype));
10696 }
10697 }
10698
10699 con0 = associate_trees (loc, con0, lit0, code, atype);
10700 return
10701 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10702 code, atype));
10703 }
10704 }
10705
10706 return NULL_TREE;
10707
10708 case MINUS_EXPR:
10709 /* Pointer simplifications for subtraction, simple reassociations. */
10710 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10711 {
10712 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10713 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10714 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10715 {
10716 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10717 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10718 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10719 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10720 return fold_build2_loc (loc, PLUS_EXPR, type,
10721 fold_build2_loc (loc, MINUS_EXPR, type,
10722 arg00, arg10),
10723 fold_build2_loc (loc, MINUS_EXPR, type,
10724 arg01, arg11));
10725 }
10726 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10727 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10728 {
10729 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10730 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10731 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10732 fold_convert_loc (loc, type, arg1));
10733 if (tmp)
10734 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10735 }
10736 }
10737 /* A - (-B) -> A + B */
10738 if (TREE_CODE (arg1) == NEGATE_EXPR)
10739 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10740 fold_convert_loc (loc, type,
10741 TREE_OPERAND (arg1, 0)));
10742 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10743 if (TREE_CODE (arg0) == NEGATE_EXPR
10744 && negate_expr_p (arg1)
10745 && reorder_operands_p (arg0, arg1))
10746 return fold_build2_loc (loc, MINUS_EXPR, type,
10747 fold_convert_loc (loc, type,
10748 negate_expr (arg1)),
10749 fold_convert_loc (loc, type,
10750 TREE_OPERAND (arg0, 0)));
10751 /* Convert -A - 1 to ~A. */
10752 if (TREE_CODE (type) != COMPLEX_TYPE
10753 && TREE_CODE (arg0) == NEGATE_EXPR
10754 && integer_onep (arg1)
10755 && !TYPE_OVERFLOW_TRAPS (type))
10756 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10757 fold_convert_loc (loc, type,
10758 TREE_OPERAND (arg0, 0)));
10759
10760 /* Convert -1 - A to ~A. */
10761 if (TREE_CODE (type) != COMPLEX_TYPE
10762 && integer_all_onesp (arg0))
10763 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10764
10765
10766 /* X - (X / Y) * Y is X % Y. */
10767 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10768 && TREE_CODE (arg1) == MULT_EXPR
10769 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10770 && operand_equal_p (arg0,
10771 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10772 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10773 TREE_OPERAND (arg1, 1), 0))
10774 return
10775 fold_convert_loc (loc, type,
10776 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10777 arg0, TREE_OPERAND (arg1, 1)));
10778
10779 if (! FLOAT_TYPE_P (type))
10780 {
10781 if (integer_zerop (arg0))
10782 return negate_expr (fold_convert_loc (loc, type, arg1));
10783 if (integer_zerop (arg1))
10784 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10785
10786 /* Fold A - (A & B) into ~B & A. */
10787 if (!TREE_SIDE_EFFECTS (arg0)
10788 && TREE_CODE (arg1) == BIT_AND_EXPR)
10789 {
10790 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10791 {
10792 tree arg10 = fold_convert_loc (loc, type,
10793 TREE_OPERAND (arg1, 0));
10794 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10795 fold_build1_loc (loc, BIT_NOT_EXPR,
10796 type, arg10),
10797 fold_convert_loc (loc, type, arg0));
10798 }
10799 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10800 {
10801 tree arg11 = fold_convert_loc (loc,
10802 type, TREE_OPERAND (arg1, 1));
10803 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10804 fold_build1_loc (loc, BIT_NOT_EXPR,
10805 type, arg11),
10806 fold_convert_loc (loc, type, arg0));
10807 }
10808 }
10809
10810 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10811 any power of 2 minus 1. */
10812 if (TREE_CODE (arg0) == BIT_AND_EXPR
10813 && TREE_CODE (arg1) == BIT_AND_EXPR
10814 && operand_equal_p (TREE_OPERAND (arg0, 0),
10815 TREE_OPERAND (arg1, 0), 0))
10816 {
10817 tree mask0 = TREE_OPERAND (arg0, 1);
10818 tree mask1 = TREE_OPERAND (arg1, 1);
10819 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10820
10821 if (operand_equal_p (tem, mask1, 0))
10822 {
10823 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10824 TREE_OPERAND (arg0, 0), mask1);
10825 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10826 }
10827 }
10828 }
10829
10830 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10831 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10832 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10833
10834 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10835 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10836 (-ARG1 + ARG0) reduces to -ARG1. */
10837 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10838 return negate_expr (fold_convert_loc (loc, type, arg1));
10839
10840 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10841 __complex__ ( x, -y ). This is not the same for SNaNs or if
10842 signed zeros are involved. */
10843 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10844 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10845 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10846 {
10847 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10848 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10849 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10850 bool arg0rz = false, arg0iz = false;
10851 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10852 || (arg0i && (arg0iz = real_zerop (arg0i))))
10853 {
10854 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10855 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10856 if (arg0rz && arg1i && real_zerop (arg1i))
10857 {
10858 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10859 arg1r ? arg1r
10860 : build1 (REALPART_EXPR, rtype, arg1));
10861 tree ip = arg0i ? arg0i
10862 : build1 (IMAGPART_EXPR, rtype, arg0);
10863 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10864 }
10865 else if (arg0iz && arg1r && real_zerop (arg1r))
10866 {
10867 tree rp = arg0r ? arg0r
10868 : build1 (REALPART_EXPR, rtype, arg0);
10869 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10870 arg1i ? arg1i
10871 : build1 (IMAGPART_EXPR, rtype, arg1));
10872 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10873 }
10874 }
10875 }
10876
10877 /* Fold &x - &x. This can happen from &x.foo - &x.
10878 This is unsafe for certain floats even in non-IEEE formats.
10879 In IEEE, it is unsafe because it does wrong for NaNs.
10880 Also note that operand_equal_p is always false if an operand
10881 is volatile. */
10882
10883 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10884 && operand_equal_p (arg0, arg1, 0))
10885 return build_zero_cst (type);
10886
10887 /* A - B -> A + (-B) if B is easily negatable. */
10888 if (negate_expr_p (arg1)
10889 && ((FLOAT_TYPE_P (type)
10890 /* Avoid this transformation if B is a positive REAL_CST. */
10891 && (TREE_CODE (arg1) != REAL_CST
10892 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10893 || INTEGRAL_TYPE_P (type)))
10894 return fold_build2_loc (loc, PLUS_EXPR, type,
10895 fold_convert_loc (loc, type, arg0),
10896 fold_convert_loc (loc, type,
10897 negate_expr (arg1)));
10898
10899 /* Try folding difference of addresses. */
10900 {
10901 HOST_WIDE_INT diff;
10902
10903 if ((TREE_CODE (arg0) == ADDR_EXPR
10904 || TREE_CODE (arg1) == ADDR_EXPR)
10905 && ptr_difference_const (arg0, arg1, &diff))
10906 return build_int_cst_type (type, diff);
10907 }
10908
10909 /* Fold &a[i] - &a[j] to i-j. */
10910 if (TREE_CODE (arg0) == ADDR_EXPR
10911 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10912 && TREE_CODE (arg1) == ADDR_EXPR
10913 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10914 {
10915 tree tem = fold_addr_of_array_ref_difference (loc, type,
10916 TREE_OPERAND (arg0, 0),
10917 TREE_OPERAND (arg1, 0));
10918 if (tem)
10919 return tem;
10920 }
10921
10922 if (FLOAT_TYPE_P (type)
10923 && flag_unsafe_math_optimizations
10924 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10925 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10926 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10927 return tem;
10928
10929 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10930 one. Make sure the type is not saturating and has the signedness of
10931 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10932 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10933 if ((TREE_CODE (arg0) == MULT_EXPR
10934 || TREE_CODE (arg1) == MULT_EXPR)
10935 && !TYPE_SATURATING (type)
10936 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10937 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10938 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10939 {
10940 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10941 if (tem)
10942 return tem;
10943 }
10944
10945 goto associate;
10946
10947 case MULT_EXPR:
10948 /* (-A) * (-B) -> A * B */
10949 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10950 return fold_build2_loc (loc, MULT_EXPR, type,
10951 fold_convert_loc (loc, type,
10952 TREE_OPERAND (arg0, 0)),
10953 fold_convert_loc (loc, type,
10954 negate_expr (arg1)));
10955 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10956 return fold_build2_loc (loc, MULT_EXPR, type,
10957 fold_convert_loc (loc, type,
10958 negate_expr (arg0)),
10959 fold_convert_loc (loc, type,
10960 TREE_OPERAND (arg1, 0)));
10961
10962 if (! FLOAT_TYPE_P (type))
10963 {
10964 if (integer_zerop (arg1))
10965 return omit_one_operand_loc (loc, type, arg1, arg0);
10966 if (integer_onep (arg1))
10967 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10968 /* Transform x * -1 into -x. Make sure to do the negation
10969 on the original operand with conversions not stripped
10970 because we can only strip non-sign-changing conversions. */
10971 if (integer_minus_onep (arg1))
10972 return fold_convert_loc (loc, type, negate_expr (op0));
10973 /* Transform x * -C into -x * C if x is easily negatable. */
10974 if (TREE_CODE (arg1) == INTEGER_CST
10975 && tree_int_cst_sgn (arg1) == -1
10976 && negate_expr_p (arg0)
10977 && (tem = negate_expr (arg1)) != arg1
10978 && !TREE_OVERFLOW (tem))
10979 return fold_build2_loc (loc, MULT_EXPR, type,
10980 fold_convert_loc (loc, type,
10981 negate_expr (arg0)),
10982 tem);
10983
10984 /* (a * (1 << b)) is (a << b) */
10985 if (TREE_CODE (arg1) == LSHIFT_EXPR
10986 && integer_onep (TREE_OPERAND (arg1, 0)))
10987 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10988 TREE_OPERAND (arg1, 1));
10989 if (TREE_CODE (arg0) == LSHIFT_EXPR
10990 && integer_onep (TREE_OPERAND (arg0, 0)))
10991 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10992 TREE_OPERAND (arg0, 1));
10993
10994 /* (A + A) * C -> A * 2 * C */
10995 if (TREE_CODE (arg0) == PLUS_EXPR
10996 && TREE_CODE (arg1) == INTEGER_CST
10997 && operand_equal_p (TREE_OPERAND (arg0, 0),
10998 TREE_OPERAND (arg0, 1), 0))
10999 return fold_build2_loc (loc, MULT_EXPR, type,
11000 omit_one_operand_loc (loc, type,
11001 TREE_OPERAND (arg0, 0),
11002 TREE_OPERAND (arg0, 1)),
11003 fold_build2_loc (loc, MULT_EXPR, type,
11004 build_int_cst (type, 2) , arg1));
11005
11006 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11007 sign-changing only. */
11008 if (TREE_CODE (arg1) == INTEGER_CST
11009 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11010 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11011 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11012
11013 strict_overflow_p = false;
11014 if (TREE_CODE (arg1) == INTEGER_CST
11015 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11016 &strict_overflow_p)))
11017 {
11018 if (strict_overflow_p)
11019 fold_overflow_warning (("assuming signed overflow does not "
11020 "occur when simplifying "
11021 "multiplication"),
11022 WARN_STRICT_OVERFLOW_MISC);
11023 return fold_convert_loc (loc, type, tem);
11024 }
11025
11026 /* Optimize z * conj(z) for integer complex numbers. */
11027 if (TREE_CODE (arg0) == CONJ_EXPR
11028 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11029 return fold_mult_zconjz (loc, type, arg1);
11030 if (TREE_CODE (arg1) == CONJ_EXPR
11031 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11032 return fold_mult_zconjz (loc, type, arg0);
11033 }
11034 else
11035 {
11036 /* Maybe fold x * 0 to 0. The expressions aren't the same
11037 when x is NaN, since x * 0 is also NaN. Nor are they the
11038 same in modes with signed zeros, since multiplying a
11039 negative value by 0 gives -0, not +0. */
11040 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11041 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11042 && real_zerop (arg1))
11043 return omit_one_operand_loc (loc, type, arg1, arg0);
11044 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11045 Likewise for complex arithmetic with signed zeros. */
11046 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11047 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11048 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11049 && real_onep (arg1))
11050 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11051
11052 /* Transform x * -1.0 into -x. */
11053 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11054 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11055 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11056 && real_minus_onep (arg1))
11057 return fold_convert_loc (loc, type, negate_expr (arg0));
11058
11059 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11060 the result for floating point types due to rounding so it is applied
11061 only if -fassociative-math was specify. */
11062 if (flag_associative_math
11063 && TREE_CODE (arg0) == RDIV_EXPR
11064 && TREE_CODE (arg1) == REAL_CST
11065 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11066 {
11067 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11068 arg1);
11069 if (tem)
11070 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11071 TREE_OPERAND (arg0, 1));
11072 }
11073
11074 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11075 if (operand_equal_p (arg0, arg1, 0))
11076 {
11077 tree tem = fold_strip_sign_ops (arg0);
11078 if (tem != NULL_TREE)
11079 {
11080 tem = fold_convert_loc (loc, type, tem);
11081 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11082 }
11083 }
11084
11085 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11086 This is not the same for NaNs or if signed zeros are
11087 involved. */
11088 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11089 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11090 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11091 && TREE_CODE (arg1) == COMPLEX_CST
11092 && real_zerop (TREE_REALPART (arg1)))
11093 {
11094 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11095 if (real_onep (TREE_IMAGPART (arg1)))
11096 return
11097 fold_build2_loc (loc, COMPLEX_EXPR, type,
11098 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11099 rtype, arg0)),
11100 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11101 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11102 return
11103 fold_build2_loc (loc, COMPLEX_EXPR, type,
11104 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11105 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11106 rtype, arg0)));
11107 }
11108
11109 /* Optimize z * conj(z) for floating point complex numbers.
11110 Guarded by flag_unsafe_math_optimizations as non-finite
11111 imaginary components don't produce scalar results. */
11112 if (flag_unsafe_math_optimizations
11113 && TREE_CODE (arg0) == CONJ_EXPR
11114 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11115 return fold_mult_zconjz (loc, type, arg1);
11116 if (flag_unsafe_math_optimizations
11117 && TREE_CODE (arg1) == CONJ_EXPR
11118 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11119 return fold_mult_zconjz (loc, type, arg0);
11120
11121 if (flag_unsafe_math_optimizations)
11122 {
11123 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11124 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11125
11126 /* Optimizations of root(...)*root(...). */
11127 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11128 {
11129 tree rootfn, arg;
11130 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11131 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11132
11133 /* Optimize sqrt(x)*sqrt(x) as x. */
11134 if (BUILTIN_SQRT_P (fcode0)
11135 && operand_equal_p (arg00, arg10, 0)
11136 && ! HONOR_SNANS (TYPE_MODE (type)))
11137 return arg00;
11138
11139 /* Optimize root(x)*root(y) as root(x*y). */
11140 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11141 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11142 return build_call_expr_loc (loc, rootfn, 1, arg);
11143 }
11144
11145 /* Optimize expN(x)*expN(y) as expN(x+y). */
11146 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11147 {
11148 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11149 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11150 CALL_EXPR_ARG (arg0, 0),
11151 CALL_EXPR_ARG (arg1, 0));
11152 return build_call_expr_loc (loc, expfn, 1, arg);
11153 }
11154
11155 /* Optimizations of pow(...)*pow(...). */
11156 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11157 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11158 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11159 {
11160 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11161 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11162 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11163 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11164
11165 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11166 if (operand_equal_p (arg01, arg11, 0))
11167 {
11168 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11169 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11170 arg00, arg10);
11171 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11172 }
11173
11174 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11175 if (operand_equal_p (arg00, arg10, 0))
11176 {
11177 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11178 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11179 arg01, arg11);
11180 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11181 }
11182 }
11183
11184 /* Optimize tan(x)*cos(x) as sin(x). */
11185 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11186 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11187 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11188 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11189 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11190 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11191 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11192 CALL_EXPR_ARG (arg1, 0), 0))
11193 {
11194 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11195
11196 if (sinfn != NULL_TREE)
11197 return build_call_expr_loc (loc, sinfn, 1,
11198 CALL_EXPR_ARG (arg0, 0));
11199 }
11200
11201 /* Optimize x*pow(x,c) as pow(x,c+1). */
11202 if (fcode1 == BUILT_IN_POW
11203 || fcode1 == BUILT_IN_POWF
11204 || fcode1 == BUILT_IN_POWL)
11205 {
11206 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11207 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11208 if (TREE_CODE (arg11) == REAL_CST
11209 && !TREE_OVERFLOW (arg11)
11210 && operand_equal_p (arg0, arg10, 0))
11211 {
11212 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11213 REAL_VALUE_TYPE c;
11214 tree arg;
11215
11216 c = TREE_REAL_CST (arg11);
11217 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11218 arg = build_real (type, c);
11219 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11220 }
11221 }
11222
11223 /* Optimize pow(x,c)*x as pow(x,c+1). */
11224 if (fcode0 == BUILT_IN_POW
11225 || fcode0 == BUILT_IN_POWF
11226 || fcode0 == BUILT_IN_POWL)
11227 {
11228 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11229 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11230 if (TREE_CODE (arg01) == REAL_CST
11231 && !TREE_OVERFLOW (arg01)
11232 && operand_equal_p (arg1, arg00, 0))
11233 {
11234 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11235 REAL_VALUE_TYPE c;
11236 tree arg;
11237
11238 c = TREE_REAL_CST (arg01);
11239 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11240 arg = build_real (type, c);
11241 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11242 }
11243 }
11244
11245 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11246 if (!in_gimple_form
11247 && optimize
11248 && operand_equal_p (arg0, arg1, 0))
11249 {
11250 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11251
11252 if (powfn)
11253 {
11254 tree arg = build_real (type, dconst2);
11255 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11256 }
11257 }
11258 }
11259 }
11260 goto associate;
11261
11262 case BIT_IOR_EXPR:
11263 bit_ior:
11264 if (integer_all_onesp (arg1))
11265 return omit_one_operand_loc (loc, type, arg1, arg0);
11266 if (integer_zerop (arg1))
11267 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11268 if (operand_equal_p (arg0, arg1, 0))
11269 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11270
11271 /* ~X | X is -1. */
11272 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11273 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11274 {
11275 t1 = build_zero_cst (type);
11276 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11277 return omit_one_operand_loc (loc, type, t1, arg1);
11278 }
11279
11280 /* X | ~X is -1. */
11281 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11282 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11283 {
11284 t1 = build_zero_cst (type);
11285 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11286 return omit_one_operand_loc (loc, type, t1, arg0);
11287 }
11288
11289 /* Canonicalize (X & C1) | C2. */
11290 if (TREE_CODE (arg0) == BIT_AND_EXPR
11291 && TREE_CODE (arg1) == INTEGER_CST
11292 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11293 {
11294 double_int c1, c2, c3, msk;
11295 int width = TYPE_PRECISION (type), w;
11296 bool try_simplify = true;
11297
11298 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11299 c2 = tree_to_double_int (arg1);
11300
11301 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11302 if ((c1 & c2) == c1)
11303 return omit_one_operand_loc (loc, type, arg1,
11304 TREE_OPERAND (arg0, 0));
11305
11306 msk = double_int::mask (width);
11307
11308 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11309 if (msk.and_not (c1 | c2).is_zero ())
11310 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11311 TREE_OPERAND (arg0, 0), arg1);
11312
11313 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11314 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11315 mode which allows further optimizations. */
11316 c1 &= msk;
11317 c2 &= msk;
11318 c3 = c1.and_not (c2);
11319 for (w = BITS_PER_UNIT;
11320 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11321 w <<= 1)
11322 {
11323 unsigned HOST_WIDE_INT mask
11324 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11325 if (((c1.low | c2.low) & mask) == mask
11326 && (c1.low & ~mask) == 0 && c1.high == 0)
11327 {
11328 c3 = double_int::from_uhwi (mask);
11329 break;
11330 }
11331 }
11332
11333 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11334 with that optimization from the BIT_AND_EXPR optimizations.
11335 This could end up in an infinite recursion. */
11336 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11337 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11338 == INTEGER_CST)
11339 {
11340 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11341 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11342
11343 try_simplify = (masked != c1);
11344 }
11345
11346 if (try_simplify && c3 != c1)
11347 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11348 fold_build2_loc (loc, BIT_AND_EXPR, type,
11349 TREE_OPERAND (arg0, 0),
11350 double_int_to_tree (type,
11351 c3)),
11352 arg1);
11353 }
11354
11355 /* (X & Y) | Y is (X, Y). */
11356 if (TREE_CODE (arg0) == BIT_AND_EXPR
11357 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11358 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11359 /* (X & Y) | X is (Y, X). */
11360 if (TREE_CODE (arg0) == BIT_AND_EXPR
11361 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11362 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11363 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11364 /* X | (X & Y) is (Y, X). */
11365 if (TREE_CODE (arg1) == BIT_AND_EXPR
11366 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11367 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11368 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11369 /* X | (Y & X) is (Y, X). */
11370 if (TREE_CODE (arg1) == BIT_AND_EXPR
11371 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11372 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11373 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11374
11375 /* (X & ~Y) | (~X & Y) is X ^ Y */
11376 if (TREE_CODE (arg0) == BIT_AND_EXPR
11377 && TREE_CODE (arg1) == BIT_AND_EXPR)
11378 {
11379 tree a0, a1, l0, l1, n0, n1;
11380
11381 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11382 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11383
11384 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11385 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11386
11387 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11388 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11389
11390 if ((operand_equal_p (n0, a0, 0)
11391 && operand_equal_p (n1, a1, 0))
11392 || (operand_equal_p (n0, a1, 0)
11393 && operand_equal_p (n1, a0, 0)))
11394 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11395 }
11396
11397 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11398 if (t1 != NULL_TREE)
11399 return t1;
11400
11401 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11402
11403 This results in more efficient code for machines without a NAND
11404 instruction. Combine will canonicalize to the first form
11405 which will allow use of NAND instructions provided by the
11406 backend if they exist. */
11407 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11408 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11409 {
11410 return
11411 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11412 build2 (BIT_AND_EXPR, type,
11413 fold_convert_loc (loc, type,
11414 TREE_OPERAND (arg0, 0)),
11415 fold_convert_loc (loc, type,
11416 TREE_OPERAND (arg1, 0))));
11417 }
11418
11419 /* See if this can be simplified into a rotate first. If that
11420 is unsuccessful continue in the association code. */
11421 goto bit_rotate;
11422
11423 case BIT_XOR_EXPR:
11424 if (integer_zerop (arg1))
11425 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11426 if (integer_all_onesp (arg1))
11427 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11428 if (operand_equal_p (arg0, arg1, 0))
11429 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11430
11431 /* ~X ^ X is -1. */
11432 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11433 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11434 {
11435 t1 = build_zero_cst (type);
11436 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11437 return omit_one_operand_loc (loc, type, t1, arg1);
11438 }
11439
11440 /* X ^ ~X is -1. */
11441 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11442 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11443 {
11444 t1 = build_zero_cst (type);
11445 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11446 return omit_one_operand_loc (loc, type, t1, arg0);
11447 }
11448
11449 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11450 with a constant, and the two constants have no bits in common,
11451 we should treat this as a BIT_IOR_EXPR since this may produce more
11452 simplifications. */
11453 if (TREE_CODE (arg0) == BIT_AND_EXPR
11454 && TREE_CODE (arg1) == BIT_AND_EXPR
11455 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11456 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11457 && integer_zerop (const_binop (BIT_AND_EXPR,
11458 TREE_OPERAND (arg0, 1),
11459 TREE_OPERAND (arg1, 1))))
11460 {
11461 code = BIT_IOR_EXPR;
11462 goto bit_ior;
11463 }
11464
11465 /* (X | Y) ^ X -> Y & ~ X*/
11466 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11467 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11468 {
11469 tree t2 = TREE_OPERAND (arg0, 1);
11470 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11471 arg1);
11472 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11473 fold_convert_loc (loc, type, t2),
11474 fold_convert_loc (loc, type, t1));
11475 return t1;
11476 }
11477
11478 /* (Y | X) ^ X -> Y & ~ X*/
11479 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11481 {
11482 tree t2 = TREE_OPERAND (arg0, 0);
11483 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11484 arg1);
11485 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11486 fold_convert_loc (loc, type, t2),
11487 fold_convert_loc (loc, type, t1));
11488 return t1;
11489 }
11490
11491 /* X ^ (X | Y) -> Y & ~ X*/
11492 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11493 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11494 {
11495 tree t2 = TREE_OPERAND (arg1, 1);
11496 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11497 arg0);
11498 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11499 fold_convert_loc (loc, type, t2),
11500 fold_convert_loc (loc, type, t1));
11501 return t1;
11502 }
11503
11504 /* X ^ (Y | X) -> Y & ~ X*/
11505 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11506 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11507 {
11508 tree t2 = TREE_OPERAND (arg1, 0);
11509 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11510 arg0);
11511 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11512 fold_convert_loc (loc, type, t2),
11513 fold_convert_loc (loc, type, t1));
11514 return t1;
11515 }
11516
11517 /* Convert ~X ^ ~Y to X ^ Y. */
11518 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11519 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11520 return fold_build2_loc (loc, code, type,
11521 fold_convert_loc (loc, type,
11522 TREE_OPERAND (arg0, 0)),
11523 fold_convert_loc (loc, type,
11524 TREE_OPERAND (arg1, 0)));
11525
11526 /* Convert ~X ^ C to X ^ ~C. */
11527 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11528 && TREE_CODE (arg1) == INTEGER_CST)
11529 return fold_build2_loc (loc, code, type,
11530 fold_convert_loc (loc, type,
11531 TREE_OPERAND (arg0, 0)),
11532 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11533
11534 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11535 if (TREE_CODE (arg0) == BIT_AND_EXPR
11536 && integer_onep (TREE_OPERAND (arg0, 1))
11537 && integer_onep (arg1))
11538 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11539 build_zero_cst (TREE_TYPE (arg0)));
11540
11541 /* Fold (X & Y) ^ Y as ~X & Y. */
11542 if (TREE_CODE (arg0) == BIT_AND_EXPR
11543 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11544 {
11545 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11546 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11547 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11548 fold_convert_loc (loc, type, arg1));
11549 }
11550 /* Fold (X & Y) ^ X as ~Y & X. */
11551 if (TREE_CODE (arg0) == BIT_AND_EXPR
11552 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11553 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11554 {
11555 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11556 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11557 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11558 fold_convert_loc (loc, type, arg1));
11559 }
11560 /* Fold X ^ (X & Y) as X & ~Y. */
11561 if (TREE_CODE (arg1) == BIT_AND_EXPR
11562 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11563 {
11564 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11565 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11566 fold_convert_loc (loc, type, arg0),
11567 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11568 }
11569 /* Fold X ^ (Y & X) as ~Y & X. */
11570 if (TREE_CODE (arg1) == BIT_AND_EXPR
11571 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11572 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11573 {
11574 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11575 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11576 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11577 fold_convert_loc (loc, type, arg0));
11578 }
11579
11580 /* See if this can be simplified into a rotate first. If that
11581 is unsuccessful continue in the association code. */
11582 goto bit_rotate;
11583
11584 case BIT_AND_EXPR:
11585 if (integer_all_onesp (arg1))
11586 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11587 if (integer_zerop (arg1))
11588 return omit_one_operand_loc (loc, type, arg1, arg0);
11589 if (operand_equal_p (arg0, arg1, 0))
11590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11591
11592 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11593 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11594 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11595 || (TREE_CODE (arg0) == EQ_EXPR
11596 && integer_zerop (TREE_OPERAND (arg0, 1))))
11597 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11598 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11599
11600 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11601 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11602 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11603 || (TREE_CODE (arg1) == EQ_EXPR
11604 && integer_zerop (TREE_OPERAND (arg1, 1))))
11605 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11606 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11607
11608 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11609 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11610 && TREE_CODE (arg1) == INTEGER_CST
11611 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11612 {
11613 tree tmp1 = fold_convert_loc (loc, type, arg1);
11614 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11615 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11616 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11617 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11618 return
11619 fold_convert_loc (loc, type,
11620 fold_build2_loc (loc, BIT_IOR_EXPR,
11621 type, tmp2, tmp3));
11622 }
11623
11624 /* (X | Y) & Y is (X, Y). */
11625 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11626 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11627 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11628 /* (X | Y) & X is (Y, X). */
11629 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11630 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11631 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11632 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11633 /* X & (X | Y) is (Y, X). */
11634 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11635 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11636 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11637 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11638 /* X & (Y | X) is (Y, X). */
11639 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11640 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11641 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11642 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11643
11644 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11645 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11646 && integer_onep (TREE_OPERAND (arg0, 1))
11647 && integer_onep (arg1))
11648 {
11649 tree tem2;
11650 tem = TREE_OPERAND (arg0, 0);
11651 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11652 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11653 tem, tem2);
11654 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11655 build_zero_cst (TREE_TYPE (tem)));
11656 }
11657 /* Fold ~X & 1 as (X & 1) == 0. */
11658 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11659 && integer_onep (arg1))
11660 {
11661 tree tem2;
11662 tem = TREE_OPERAND (arg0, 0);
11663 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11664 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11665 tem, tem2);
11666 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11667 build_zero_cst (TREE_TYPE (tem)));
11668 }
11669 /* Fold !X & 1 as X == 0. */
11670 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11671 && integer_onep (arg1))
11672 {
11673 tem = TREE_OPERAND (arg0, 0);
11674 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11675 build_zero_cst (TREE_TYPE (tem)));
11676 }
11677
11678 /* Fold (X ^ Y) & Y as ~X & Y. */
11679 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11680 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11681 {
11682 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11683 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11684 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11685 fold_convert_loc (loc, type, arg1));
11686 }
11687 /* Fold (X ^ Y) & X as ~Y & X. */
11688 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11689 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11690 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11691 {
11692 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11693 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11694 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11695 fold_convert_loc (loc, type, arg1));
11696 }
11697 /* Fold X & (X ^ Y) as X & ~Y. */
11698 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11699 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11700 {
11701 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11702 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11703 fold_convert_loc (loc, type, arg0),
11704 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11705 }
11706 /* Fold X & (Y ^ X) as ~Y & X. */
11707 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11708 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11709 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11710 {
11711 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11712 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11713 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11714 fold_convert_loc (loc, type, arg0));
11715 }
11716
11717 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11718 multiple of 1 << CST. */
11719 if (TREE_CODE (arg1) == INTEGER_CST)
11720 {
11721 double_int cst1 = tree_to_double_int (arg1);
11722 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11723 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11724 if ((cst1 & ncst1) == ncst1
11725 && multiple_of_p (type, arg0,
11726 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11727 return fold_convert_loc (loc, type, arg0);
11728 }
11729
11730 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11731 bits from CST2. */
11732 if (TREE_CODE (arg1) == INTEGER_CST
11733 && TREE_CODE (arg0) == MULT_EXPR
11734 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11735 {
11736 double_int masked
11737 = mask_with_tz (type, tree_to_double_int (arg1),
11738 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11739
11740 if (masked.is_zero ())
11741 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11742 arg0, arg1);
11743 else if (masked != tree_to_double_int (arg1))
11744 return fold_build2_loc (loc, code, type, op0,
11745 double_int_to_tree (type, masked));
11746 }
11747
11748 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11749 ((A & N) + B) & M -> (A + B) & M
11750 Similarly if (N & M) == 0,
11751 ((A | N) + B) & M -> (A + B) & M
11752 and for - instead of + (or unary - instead of +)
11753 and/or ^ instead of |.
11754 If B is constant and (B & M) == 0, fold into A & M. */
11755 if (host_integerp (arg1, 1))
11756 {
11757 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11758 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11759 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11760 && (TREE_CODE (arg0) == PLUS_EXPR
11761 || TREE_CODE (arg0) == MINUS_EXPR
11762 || TREE_CODE (arg0) == NEGATE_EXPR)
11763 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11764 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11765 {
11766 tree pmop[2];
11767 int which = 0;
11768 unsigned HOST_WIDE_INT cst0;
11769
11770 /* Now we know that arg0 is (C + D) or (C - D) or
11771 -C and arg1 (M) is == (1LL << cst) - 1.
11772 Store C into PMOP[0] and D into PMOP[1]. */
11773 pmop[0] = TREE_OPERAND (arg0, 0);
11774 pmop[1] = NULL;
11775 if (TREE_CODE (arg0) != NEGATE_EXPR)
11776 {
11777 pmop[1] = TREE_OPERAND (arg0, 1);
11778 which = 1;
11779 }
11780
11781 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11782 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11783 & cst1) != cst1)
11784 which = -1;
11785
11786 for (; which >= 0; which--)
11787 switch (TREE_CODE (pmop[which]))
11788 {
11789 case BIT_AND_EXPR:
11790 case BIT_IOR_EXPR:
11791 case BIT_XOR_EXPR:
11792 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11793 != INTEGER_CST)
11794 break;
11795 /* tree_low_cst not used, because we don't care about
11796 the upper bits. */
11797 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11798 cst0 &= cst1;
11799 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11800 {
11801 if (cst0 != cst1)
11802 break;
11803 }
11804 else if (cst0 != 0)
11805 break;
11806 /* If C or D is of the form (A & N) where
11807 (N & M) == M, or of the form (A | N) or
11808 (A ^ N) where (N & M) == 0, replace it with A. */
11809 pmop[which] = TREE_OPERAND (pmop[which], 0);
11810 break;
11811 case INTEGER_CST:
11812 /* If C or D is a N where (N & M) == 0, it can be
11813 omitted (assumed 0). */
11814 if ((TREE_CODE (arg0) == PLUS_EXPR
11815 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11816 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11817 pmop[which] = NULL;
11818 break;
11819 default:
11820 break;
11821 }
11822
11823 /* Only build anything new if we optimized one or both arguments
11824 above. */
11825 if (pmop[0] != TREE_OPERAND (arg0, 0)
11826 || (TREE_CODE (arg0) != NEGATE_EXPR
11827 && pmop[1] != TREE_OPERAND (arg0, 1)))
11828 {
11829 tree utype = TREE_TYPE (arg0);
11830 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11831 {
11832 /* Perform the operations in a type that has defined
11833 overflow behavior. */
11834 utype = unsigned_type_for (TREE_TYPE (arg0));
11835 if (pmop[0] != NULL)
11836 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11837 if (pmop[1] != NULL)
11838 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11839 }
11840
11841 if (TREE_CODE (arg0) == NEGATE_EXPR)
11842 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11843 else if (TREE_CODE (arg0) == PLUS_EXPR)
11844 {
11845 if (pmop[0] != NULL && pmop[1] != NULL)
11846 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11847 pmop[0], pmop[1]);
11848 else if (pmop[0] != NULL)
11849 tem = pmop[0];
11850 else if (pmop[1] != NULL)
11851 tem = pmop[1];
11852 else
11853 return build_int_cst (type, 0);
11854 }
11855 else if (pmop[0] == NULL)
11856 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11857 else
11858 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11859 pmop[0], pmop[1]);
11860 /* TEM is now the new binary +, - or unary - replacement. */
11861 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11862 fold_convert_loc (loc, utype, arg1));
11863 return fold_convert_loc (loc, type, tem);
11864 }
11865 }
11866 }
11867
11868 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11869 if (t1 != NULL_TREE)
11870 return t1;
11871 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11872 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11873 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11874 {
11875 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11876
11877 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11878 && (~TREE_INT_CST_LOW (arg1)
11879 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11880 return
11881 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11882 }
11883
11884 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11885
11886 This results in more efficient code for machines without a NOR
11887 instruction. Combine will canonicalize to the first form
11888 which will allow use of NOR instructions provided by the
11889 backend if they exist. */
11890 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11891 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11892 {
11893 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11894 build2 (BIT_IOR_EXPR, type,
11895 fold_convert_loc (loc, type,
11896 TREE_OPERAND (arg0, 0)),
11897 fold_convert_loc (loc, type,
11898 TREE_OPERAND (arg1, 0))));
11899 }
11900
11901 /* If arg0 is derived from the address of an object or function, we may
11902 be able to fold this expression using the object or function's
11903 alignment. */
11904 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11905 {
11906 unsigned HOST_WIDE_INT modulus, residue;
11907 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11908
11909 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11910 integer_onep (arg1));
11911
11912 /* This works because modulus is a power of 2. If this weren't the
11913 case, we'd have to replace it by its greatest power-of-2
11914 divisor: modulus & -modulus. */
11915 if (low < modulus)
11916 return build_int_cst (type, residue & low);
11917 }
11918
11919 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11920 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11921 if the new mask might be further optimized. */
11922 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11923 || TREE_CODE (arg0) == RSHIFT_EXPR)
11924 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11925 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11926 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11927 < TYPE_PRECISION (TREE_TYPE (arg0))
11928 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11929 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11930 {
11931 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11932 unsigned HOST_WIDE_INT mask
11933 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11934 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11935 tree shift_type = TREE_TYPE (arg0);
11936
11937 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11938 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11939 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11940 && TYPE_PRECISION (TREE_TYPE (arg0))
11941 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11942 {
11943 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11944 tree arg00 = TREE_OPERAND (arg0, 0);
11945 /* See if more bits can be proven as zero because of
11946 zero extension. */
11947 if (TREE_CODE (arg00) == NOP_EXPR
11948 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11949 {
11950 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11951 if (TYPE_PRECISION (inner_type)
11952 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11953 && TYPE_PRECISION (inner_type) < prec)
11954 {
11955 prec = TYPE_PRECISION (inner_type);
11956 /* See if we can shorten the right shift. */
11957 if (shiftc < prec)
11958 shift_type = inner_type;
11959 }
11960 }
11961 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11962 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11963 zerobits <<= prec - shiftc;
11964 /* For arithmetic shift if sign bit could be set, zerobits
11965 can contain actually sign bits, so no transformation is
11966 possible, unless MASK masks them all away. In that
11967 case the shift needs to be converted into logical shift. */
11968 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11969 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11970 {
11971 if ((mask & zerobits) == 0)
11972 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11973 else
11974 zerobits = 0;
11975 }
11976 }
11977
11978 /* ((X << 16) & 0xff00) is (X, 0). */
11979 if ((mask & zerobits) == mask)
11980 return omit_one_operand_loc (loc, type,
11981 build_int_cst (type, 0), arg0);
11982
11983 newmask = mask | zerobits;
11984 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11985 {
11986 /* Only do the transformation if NEWMASK is some integer
11987 mode's mask. */
11988 for (prec = BITS_PER_UNIT;
11989 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11990 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11991 break;
11992 if (prec < HOST_BITS_PER_WIDE_INT
11993 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11994 {
11995 tree newmaskt;
11996
11997 if (shift_type != TREE_TYPE (arg0))
11998 {
11999 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12000 fold_convert_loc (loc, shift_type,
12001 TREE_OPERAND (arg0, 0)),
12002 TREE_OPERAND (arg0, 1));
12003 tem = fold_convert_loc (loc, type, tem);
12004 }
12005 else
12006 tem = op0;
12007 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12008 if (!tree_int_cst_equal (newmaskt, arg1))
12009 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12010 }
12011 }
12012 }
12013
12014 goto associate;
12015
12016 case RDIV_EXPR:
12017 /* Don't touch a floating-point divide by zero unless the mode
12018 of the constant can represent infinity. */
12019 if (TREE_CODE (arg1) == REAL_CST
12020 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12021 && real_zerop (arg1))
12022 return NULL_TREE;
12023
12024 /* Optimize A / A to 1.0 if we don't care about
12025 NaNs or Infinities. Skip the transformation
12026 for non-real operands. */
12027 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12028 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12029 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12030 && operand_equal_p (arg0, arg1, 0))
12031 {
12032 tree r = build_real (TREE_TYPE (arg0), dconst1);
12033
12034 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12035 }
12036
12037 /* The complex version of the above A / A optimization. */
12038 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12039 && operand_equal_p (arg0, arg1, 0))
12040 {
12041 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12042 if (! HONOR_NANS (TYPE_MODE (elem_type))
12043 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12044 {
12045 tree r = build_real (elem_type, dconst1);
12046 /* omit_two_operands will call fold_convert for us. */
12047 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12048 }
12049 }
12050
12051 /* (-A) / (-B) -> A / B */
12052 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12053 return fold_build2_loc (loc, RDIV_EXPR, type,
12054 TREE_OPERAND (arg0, 0),
12055 negate_expr (arg1));
12056 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12057 return fold_build2_loc (loc, RDIV_EXPR, type,
12058 negate_expr (arg0),
12059 TREE_OPERAND (arg1, 0));
12060
12061 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12062 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12063 && real_onep (arg1))
12064 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12065
12066 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12067 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12068 && real_minus_onep (arg1))
12069 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12070 negate_expr (arg0)));
12071
12072 /* If ARG1 is a constant, we can convert this to a multiply by the
12073 reciprocal. This does not have the same rounding properties,
12074 so only do this if -freciprocal-math. We can actually
12075 always safely do it if ARG1 is a power of two, but it's hard to
12076 tell if it is or not in a portable manner. */
12077 if (optimize
12078 && (TREE_CODE (arg1) == REAL_CST
12079 || (TREE_CODE (arg1) == COMPLEX_CST
12080 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12081 || (TREE_CODE (arg1) == VECTOR_CST
12082 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12083 {
12084 if (flag_reciprocal_math
12085 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12086 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12087 /* Find the reciprocal if optimizing and the result is exact.
12088 TODO: Complex reciprocal not implemented. */
12089 if (TREE_CODE (arg1) != COMPLEX_CST)
12090 {
12091 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12092
12093 if (inverse)
12094 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12095 }
12096 }
12097 /* Convert A/B/C to A/(B*C). */
12098 if (flag_reciprocal_math
12099 && TREE_CODE (arg0) == RDIV_EXPR)
12100 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12101 fold_build2_loc (loc, MULT_EXPR, type,
12102 TREE_OPERAND (arg0, 1), arg1));
12103
12104 /* Convert A/(B/C) to (A/B)*C. */
12105 if (flag_reciprocal_math
12106 && TREE_CODE (arg1) == RDIV_EXPR)
12107 return fold_build2_loc (loc, MULT_EXPR, type,
12108 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12109 TREE_OPERAND (arg1, 0)),
12110 TREE_OPERAND (arg1, 1));
12111
12112 /* Convert C1/(X*C2) into (C1/C2)/X. */
12113 if (flag_reciprocal_math
12114 && TREE_CODE (arg1) == MULT_EXPR
12115 && TREE_CODE (arg0) == REAL_CST
12116 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12117 {
12118 tree tem = const_binop (RDIV_EXPR, arg0,
12119 TREE_OPERAND (arg1, 1));
12120 if (tem)
12121 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12122 TREE_OPERAND (arg1, 0));
12123 }
12124
12125 if (flag_unsafe_math_optimizations)
12126 {
12127 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12128 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12129
12130 /* Optimize sin(x)/cos(x) as tan(x). */
12131 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12132 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12133 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12134 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12135 CALL_EXPR_ARG (arg1, 0), 0))
12136 {
12137 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12138
12139 if (tanfn != NULL_TREE)
12140 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12141 }
12142
12143 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12144 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12145 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12146 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12147 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12148 CALL_EXPR_ARG (arg1, 0), 0))
12149 {
12150 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12151
12152 if (tanfn != NULL_TREE)
12153 {
12154 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12155 CALL_EXPR_ARG (arg0, 0));
12156 return fold_build2_loc (loc, RDIV_EXPR, type,
12157 build_real (type, dconst1), tmp);
12158 }
12159 }
12160
12161 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12162 NaNs or Infinities. */
12163 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12164 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12165 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12166 {
12167 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12168 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12169
12170 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12171 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12172 && operand_equal_p (arg00, arg01, 0))
12173 {
12174 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12175
12176 if (cosfn != NULL_TREE)
12177 return build_call_expr_loc (loc, cosfn, 1, arg00);
12178 }
12179 }
12180
12181 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12182 NaNs or Infinities. */
12183 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12184 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12185 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12186 {
12187 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12188 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12189
12190 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12191 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12192 && operand_equal_p (arg00, arg01, 0))
12193 {
12194 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12195
12196 if (cosfn != NULL_TREE)
12197 {
12198 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12199 return fold_build2_loc (loc, RDIV_EXPR, type,
12200 build_real (type, dconst1),
12201 tmp);
12202 }
12203 }
12204 }
12205
12206 /* Optimize pow(x,c)/x as pow(x,c-1). */
12207 if (fcode0 == BUILT_IN_POW
12208 || fcode0 == BUILT_IN_POWF
12209 || fcode0 == BUILT_IN_POWL)
12210 {
12211 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12212 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12213 if (TREE_CODE (arg01) == REAL_CST
12214 && !TREE_OVERFLOW (arg01)
12215 && operand_equal_p (arg1, arg00, 0))
12216 {
12217 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12218 REAL_VALUE_TYPE c;
12219 tree arg;
12220
12221 c = TREE_REAL_CST (arg01);
12222 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12223 arg = build_real (type, c);
12224 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12225 }
12226 }
12227
12228 /* Optimize a/root(b/c) into a*root(c/b). */
12229 if (BUILTIN_ROOT_P (fcode1))
12230 {
12231 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12232
12233 if (TREE_CODE (rootarg) == RDIV_EXPR)
12234 {
12235 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12236 tree b = TREE_OPERAND (rootarg, 0);
12237 tree c = TREE_OPERAND (rootarg, 1);
12238
12239 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12240
12241 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12242 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12243 }
12244 }
12245
12246 /* Optimize x/expN(y) into x*expN(-y). */
12247 if (BUILTIN_EXPONENT_P (fcode1))
12248 {
12249 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12250 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12251 arg1 = build_call_expr_loc (loc,
12252 expfn, 1,
12253 fold_convert_loc (loc, type, arg));
12254 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12255 }
12256
12257 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12258 if (fcode1 == BUILT_IN_POW
12259 || fcode1 == BUILT_IN_POWF
12260 || fcode1 == BUILT_IN_POWL)
12261 {
12262 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12263 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12264 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12265 tree neg11 = fold_convert_loc (loc, type,
12266 negate_expr (arg11));
12267 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12268 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12269 }
12270 }
12271 return NULL_TREE;
12272
12273 case TRUNC_DIV_EXPR:
12274 /* Optimize (X & (-A)) / A where A is a power of 2,
12275 to X >> log2(A) */
12276 if (TREE_CODE (arg0) == BIT_AND_EXPR
12277 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12278 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12279 {
12280 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12281 arg1, TREE_OPERAND (arg0, 1));
12282 if (sum && integer_zerop (sum)) {
12283 unsigned long pow2;
12284
12285 if (TREE_INT_CST_LOW (arg1))
12286 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12287 else
12288 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12289 + HOST_BITS_PER_WIDE_INT;
12290
12291 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12292 TREE_OPERAND (arg0, 0),
12293 build_int_cst (integer_type_node, pow2));
12294 }
12295 }
12296
12297 /* Fall through */
12298
12299 case FLOOR_DIV_EXPR:
12300 /* Simplify A / (B << N) where A and B are positive and B is
12301 a power of 2, to A >> (N + log2(B)). */
12302 strict_overflow_p = false;
12303 if (TREE_CODE (arg1) == LSHIFT_EXPR
12304 && (TYPE_UNSIGNED (type)
12305 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12306 {
12307 tree sval = TREE_OPERAND (arg1, 0);
12308 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12309 {
12310 tree sh_cnt = TREE_OPERAND (arg1, 1);
12311 unsigned long pow2;
12312
12313 if (TREE_INT_CST_LOW (sval))
12314 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12315 else
12316 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12317 + HOST_BITS_PER_WIDE_INT;
12318
12319 if (strict_overflow_p)
12320 fold_overflow_warning (("assuming signed overflow does not "
12321 "occur when simplifying A / (B << N)"),
12322 WARN_STRICT_OVERFLOW_MISC);
12323
12324 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12325 sh_cnt,
12326 build_int_cst (TREE_TYPE (sh_cnt),
12327 pow2));
12328 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12329 fold_convert_loc (loc, type, arg0), sh_cnt);
12330 }
12331 }
12332
12333 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12334 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12335 if (INTEGRAL_TYPE_P (type)
12336 && TYPE_UNSIGNED (type)
12337 && code == FLOOR_DIV_EXPR)
12338 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12339
12340 /* Fall through */
12341
12342 case ROUND_DIV_EXPR:
12343 case CEIL_DIV_EXPR:
12344 case EXACT_DIV_EXPR:
12345 if (integer_onep (arg1))
12346 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12347 if (integer_zerop (arg1))
12348 return NULL_TREE;
12349 /* X / -1 is -X. */
12350 if (!TYPE_UNSIGNED (type)
12351 && TREE_CODE (arg1) == INTEGER_CST
12352 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12353 && TREE_INT_CST_HIGH (arg1) == -1)
12354 return fold_convert_loc (loc, type, negate_expr (arg0));
12355
12356 /* Convert -A / -B to A / B when the type is signed and overflow is
12357 undefined. */
12358 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12359 && TREE_CODE (arg0) == NEGATE_EXPR
12360 && negate_expr_p (arg1))
12361 {
12362 if (INTEGRAL_TYPE_P (type))
12363 fold_overflow_warning (("assuming signed overflow does not occur "
12364 "when distributing negation across "
12365 "division"),
12366 WARN_STRICT_OVERFLOW_MISC);
12367 return fold_build2_loc (loc, code, type,
12368 fold_convert_loc (loc, type,
12369 TREE_OPERAND (arg0, 0)),
12370 fold_convert_loc (loc, type,
12371 negate_expr (arg1)));
12372 }
12373 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12374 && TREE_CODE (arg1) == NEGATE_EXPR
12375 && negate_expr_p (arg0))
12376 {
12377 if (INTEGRAL_TYPE_P (type))
12378 fold_overflow_warning (("assuming signed overflow does not occur "
12379 "when distributing negation across "
12380 "division"),
12381 WARN_STRICT_OVERFLOW_MISC);
12382 return fold_build2_loc (loc, code, type,
12383 fold_convert_loc (loc, type,
12384 negate_expr (arg0)),
12385 fold_convert_loc (loc, type,
12386 TREE_OPERAND (arg1, 0)));
12387 }
12388
12389 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12390 operation, EXACT_DIV_EXPR.
12391
12392 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12393 At one time others generated faster code, it's not clear if they do
12394 after the last round to changes to the DIV code in expmed.c. */
12395 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12396 && multiple_of_p (type, arg0, arg1))
12397 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12398
12399 strict_overflow_p = false;
12400 if (TREE_CODE (arg1) == INTEGER_CST
12401 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12402 &strict_overflow_p)))
12403 {
12404 if (strict_overflow_p)
12405 fold_overflow_warning (("assuming signed overflow does not occur "
12406 "when simplifying division"),
12407 WARN_STRICT_OVERFLOW_MISC);
12408 return fold_convert_loc (loc, type, tem);
12409 }
12410
12411 return NULL_TREE;
12412
12413 case CEIL_MOD_EXPR:
12414 case FLOOR_MOD_EXPR:
12415 case ROUND_MOD_EXPR:
12416 case TRUNC_MOD_EXPR:
12417 /* X % 1 is always zero, but be sure to preserve any side
12418 effects in X. */
12419 if (integer_onep (arg1))
12420 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12421
12422 /* X % 0, return X % 0 unchanged so that we can get the
12423 proper warnings and errors. */
12424 if (integer_zerop (arg1))
12425 return NULL_TREE;
12426
12427 /* 0 % X is always zero, but be sure to preserve any side
12428 effects in X. Place this after checking for X == 0. */
12429 if (integer_zerop (arg0))
12430 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12431
12432 /* X % -1 is zero. */
12433 if (!TYPE_UNSIGNED (type)
12434 && TREE_CODE (arg1) == INTEGER_CST
12435 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12436 && TREE_INT_CST_HIGH (arg1) == -1)
12437 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12438
12439 /* X % -C is the same as X % C. */
12440 if (code == TRUNC_MOD_EXPR
12441 && !TYPE_UNSIGNED (type)
12442 && TREE_CODE (arg1) == INTEGER_CST
12443 && !TREE_OVERFLOW (arg1)
12444 && TREE_INT_CST_HIGH (arg1) < 0
12445 && !TYPE_OVERFLOW_TRAPS (type)
12446 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12447 && !sign_bit_p (arg1, arg1))
12448 return fold_build2_loc (loc, code, type,
12449 fold_convert_loc (loc, type, arg0),
12450 fold_convert_loc (loc, type,
12451 negate_expr (arg1)));
12452
12453 /* X % -Y is the same as X % Y. */
12454 if (code == TRUNC_MOD_EXPR
12455 && !TYPE_UNSIGNED (type)
12456 && TREE_CODE (arg1) == NEGATE_EXPR
12457 && !TYPE_OVERFLOW_TRAPS (type))
12458 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12459 fold_convert_loc (loc, type,
12460 TREE_OPERAND (arg1, 0)));
12461
12462 strict_overflow_p = false;
12463 if (TREE_CODE (arg1) == INTEGER_CST
12464 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12465 &strict_overflow_p)))
12466 {
12467 if (strict_overflow_p)
12468 fold_overflow_warning (("assuming signed overflow does not occur "
12469 "when simplifying modulus"),
12470 WARN_STRICT_OVERFLOW_MISC);
12471 return fold_convert_loc (loc, type, tem);
12472 }
12473
12474 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12475 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12476 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12477 && (TYPE_UNSIGNED (type)
12478 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12479 {
12480 tree c = arg1;
12481 /* Also optimize A % (C << N) where C is a power of 2,
12482 to A & ((C << N) - 1). */
12483 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12484 c = TREE_OPERAND (arg1, 0);
12485
12486 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12487 {
12488 tree mask
12489 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12490 build_int_cst (TREE_TYPE (arg1), 1));
12491 if (strict_overflow_p)
12492 fold_overflow_warning (("assuming signed overflow does not "
12493 "occur when simplifying "
12494 "X % (power of two)"),
12495 WARN_STRICT_OVERFLOW_MISC);
12496 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12497 fold_convert_loc (loc, type, arg0),
12498 fold_convert_loc (loc, type, mask));
12499 }
12500 }
12501
12502 return NULL_TREE;
12503
12504 case LROTATE_EXPR:
12505 case RROTATE_EXPR:
12506 if (integer_all_onesp (arg0))
12507 return omit_one_operand_loc (loc, type, arg0, arg1);
12508 goto shift;
12509
12510 case RSHIFT_EXPR:
12511 /* Optimize -1 >> x for arithmetic right shifts. */
12512 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12513 && tree_expr_nonnegative_p (arg1))
12514 return omit_one_operand_loc (loc, type, arg0, arg1);
12515 /* ... fall through ... */
12516
12517 case LSHIFT_EXPR:
12518 shift:
12519 if (integer_zerop (arg1))
12520 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12521 if (integer_zerop (arg0))
12522 return omit_one_operand_loc (loc, type, arg0, arg1);
12523
12524 /* Prefer vector1 << scalar to vector1 << vector2
12525 if vector2 is uniform. */
12526 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12527 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12528 return fold_build2_loc (loc, code, type, op0, tem);
12529
12530 /* Since negative shift count is not well-defined,
12531 don't try to compute it in the compiler. */
12532 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12533 return NULL_TREE;
12534
12535 prec = element_precision (type);
12536
12537 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12538 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12539 && TREE_INT_CST_LOW (arg1) < prec
12540 && host_integerp (TREE_OPERAND (arg0, 1), true)
12541 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12542 {
12543 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12544 + TREE_INT_CST_LOW (arg1));
12545
12546 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12547 being well defined. */
12548 if (low >= prec)
12549 {
12550 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12551 low = low % prec;
12552 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12553 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12554 TREE_OPERAND (arg0, 0));
12555 else
12556 low = prec - 1;
12557 }
12558
12559 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12560 build_int_cst (TREE_TYPE (arg1), low));
12561 }
12562
12563 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12564 into x & ((unsigned)-1 >> c) for unsigned types. */
12565 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12566 || (TYPE_UNSIGNED (type)
12567 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12568 && host_integerp (arg1, false)
12569 && TREE_INT_CST_LOW (arg1) < prec
12570 && host_integerp (TREE_OPERAND (arg0, 1), false)
12571 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12572 {
12573 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12574 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12575 tree lshift;
12576 tree arg00;
12577
12578 if (low0 == low1)
12579 {
12580 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12581
12582 lshift = build_minus_one_cst (type);
12583 lshift = const_binop (code, lshift, arg1);
12584
12585 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12586 }
12587 }
12588
12589 /* Rewrite an LROTATE_EXPR by a constant into an
12590 RROTATE_EXPR by a new constant. */
12591 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12592 {
12593 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12594 tem = const_binop (MINUS_EXPR, tem, arg1);
12595 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12596 }
12597
12598 /* If we have a rotate of a bit operation with the rotate count and
12599 the second operand of the bit operation both constant,
12600 permute the two operations. */
12601 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12602 && (TREE_CODE (arg0) == BIT_AND_EXPR
12603 || TREE_CODE (arg0) == BIT_IOR_EXPR
12604 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12605 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12606 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12607 fold_build2_loc (loc, code, type,
12608 TREE_OPERAND (arg0, 0), arg1),
12609 fold_build2_loc (loc, code, type,
12610 TREE_OPERAND (arg0, 1), arg1));
12611
12612 /* Two consecutive rotates adding up to the precision of the
12613 type can be ignored. */
12614 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12615 && TREE_CODE (arg0) == RROTATE_EXPR
12616 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12617 && TREE_INT_CST_HIGH (arg1) == 0
12618 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12619 && ((TREE_INT_CST_LOW (arg1)
12620 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12621 == prec))
12622 return TREE_OPERAND (arg0, 0);
12623
12624 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12625 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12626 if the latter can be further optimized. */
12627 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12628 && TREE_CODE (arg0) == BIT_AND_EXPR
12629 && TREE_CODE (arg1) == INTEGER_CST
12630 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12631 {
12632 tree mask = fold_build2_loc (loc, code, type,
12633 fold_convert_loc (loc, type,
12634 TREE_OPERAND (arg0, 1)),
12635 arg1);
12636 tree shift = fold_build2_loc (loc, code, type,
12637 fold_convert_loc (loc, type,
12638 TREE_OPERAND (arg0, 0)),
12639 arg1);
12640 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12641 if (tem)
12642 return tem;
12643 }
12644
12645 return NULL_TREE;
12646
12647 case MIN_EXPR:
12648 if (operand_equal_p (arg0, arg1, 0))
12649 return omit_one_operand_loc (loc, type, arg0, arg1);
12650 if (INTEGRAL_TYPE_P (type)
12651 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12652 return omit_one_operand_loc (loc, type, arg1, arg0);
12653 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12654 if (tem)
12655 return tem;
12656 goto associate;
12657
12658 case MAX_EXPR:
12659 if (operand_equal_p (arg0, arg1, 0))
12660 return omit_one_operand_loc (loc, type, arg0, arg1);
12661 if (INTEGRAL_TYPE_P (type)
12662 && TYPE_MAX_VALUE (type)
12663 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12664 return omit_one_operand_loc (loc, type, arg1, arg0);
12665 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12666 if (tem)
12667 return tem;
12668 goto associate;
12669
12670 case TRUTH_ANDIF_EXPR:
12671 /* Note that the operands of this must be ints
12672 and their values must be 0 or 1.
12673 ("true" is a fixed value perhaps depending on the language.) */
12674 /* If first arg is constant zero, return it. */
12675 if (integer_zerop (arg0))
12676 return fold_convert_loc (loc, type, arg0);
12677 case TRUTH_AND_EXPR:
12678 /* If either arg is constant true, drop it. */
12679 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12680 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12681 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12682 /* Preserve sequence points. */
12683 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12684 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12685 /* If second arg is constant zero, result is zero, but first arg
12686 must be evaluated. */
12687 if (integer_zerop (arg1))
12688 return omit_one_operand_loc (loc, type, arg1, arg0);
12689 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12690 case will be handled here. */
12691 if (integer_zerop (arg0))
12692 return omit_one_operand_loc (loc, type, arg0, arg1);
12693
12694 /* !X && X is always false. */
12695 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12696 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12697 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12698 /* X && !X is always false. */
12699 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12700 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12701 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12702
12703 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12704 means A >= Y && A != MAX, but in this case we know that
12705 A < X <= MAX. */
12706
12707 if (!TREE_SIDE_EFFECTS (arg0)
12708 && !TREE_SIDE_EFFECTS (arg1))
12709 {
12710 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12711 if (tem && !operand_equal_p (tem, arg0, 0))
12712 return fold_build2_loc (loc, code, type, tem, arg1);
12713
12714 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12715 if (tem && !operand_equal_p (tem, arg1, 0))
12716 return fold_build2_loc (loc, code, type, arg0, tem);
12717 }
12718
12719 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12720 != NULL_TREE)
12721 return tem;
12722
12723 return NULL_TREE;
12724
12725 case TRUTH_ORIF_EXPR:
12726 /* Note that the operands of this must be ints
12727 and their values must be 0 or true.
12728 ("true" is a fixed value perhaps depending on the language.) */
12729 /* If first arg is constant true, return it. */
12730 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12731 return fold_convert_loc (loc, type, arg0);
12732 case TRUTH_OR_EXPR:
12733 /* If either arg is constant zero, drop it. */
12734 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12735 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12736 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12737 /* Preserve sequence points. */
12738 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12739 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12740 /* If second arg is constant true, result is true, but we must
12741 evaluate first arg. */
12742 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12743 return omit_one_operand_loc (loc, type, arg1, arg0);
12744 /* Likewise for first arg, but note this only occurs here for
12745 TRUTH_OR_EXPR. */
12746 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12747 return omit_one_operand_loc (loc, type, arg0, arg1);
12748
12749 /* !X || X is always true. */
12750 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12751 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12752 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12753 /* X || !X is always true. */
12754 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12755 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12756 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12757
12758 /* (X && !Y) || (!X && Y) is X ^ Y */
12759 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12760 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12761 {
12762 tree a0, a1, l0, l1, n0, n1;
12763
12764 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12765 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12766
12767 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12768 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12769
12770 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12771 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12772
12773 if ((operand_equal_p (n0, a0, 0)
12774 && operand_equal_p (n1, a1, 0))
12775 || (operand_equal_p (n0, a1, 0)
12776 && operand_equal_p (n1, a0, 0)))
12777 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12778 }
12779
12780 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12781 != NULL_TREE)
12782 return tem;
12783
12784 return NULL_TREE;
12785
12786 case TRUTH_XOR_EXPR:
12787 /* If the second arg is constant zero, drop it. */
12788 if (integer_zerop (arg1))
12789 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12790 /* If the second arg is constant true, this is a logical inversion. */
12791 if (integer_onep (arg1))
12792 {
12793 tem = invert_truthvalue_loc (loc, arg0);
12794 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12795 }
12796 /* Identical arguments cancel to zero. */
12797 if (operand_equal_p (arg0, arg1, 0))
12798 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12799
12800 /* !X ^ X is always true. */
12801 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12802 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12803 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12804
12805 /* X ^ !X is always true. */
12806 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12807 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12808 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12809
12810 return NULL_TREE;
12811
12812 case EQ_EXPR:
12813 case NE_EXPR:
12814 STRIP_NOPS (arg0);
12815 STRIP_NOPS (arg1);
12816
12817 tem = fold_comparison (loc, code, type, op0, op1);
12818 if (tem != NULL_TREE)
12819 return tem;
12820
12821 /* bool_var != 0 becomes bool_var. */
12822 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12823 && code == NE_EXPR)
12824 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12825
12826 /* bool_var == 1 becomes bool_var. */
12827 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12828 && code == EQ_EXPR)
12829 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12830
12831 /* bool_var != 1 becomes !bool_var. */
12832 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12833 && code == NE_EXPR)
12834 return fold_convert_loc (loc, type,
12835 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12836 TREE_TYPE (arg0), arg0));
12837
12838 /* bool_var == 0 becomes !bool_var. */
12839 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12840 && code == EQ_EXPR)
12841 return fold_convert_loc (loc, type,
12842 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12843 TREE_TYPE (arg0), arg0));
12844
12845 /* !exp != 0 becomes !exp */
12846 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12847 && code == NE_EXPR)
12848 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12849
12850 /* If this is an equality comparison of the address of two non-weak,
12851 unaliased symbols neither of which are extern (since we do not
12852 have access to attributes for externs), then we know the result. */
12853 if (TREE_CODE (arg0) == ADDR_EXPR
12854 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12855 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12856 && ! lookup_attribute ("alias",
12857 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12858 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12859 && TREE_CODE (arg1) == ADDR_EXPR
12860 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12861 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12862 && ! lookup_attribute ("alias",
12863 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12864 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12865 {
12866 /* We know that we're looking at the address of two
12867 non-weak, unaliased, static _DECL nodes.
12868
12869 It is both wasteful and incorrect to call operand_equal_p
12870 to compare the two ADDR_EXPR nodes. It is wasteful in that
12871 all we need to do is test pointer equality for the arguments
12872 to the two ADDR_EXPR nodes. It is incorrect to use
12873 operand_equal_p as that function is NOT equivalent to a
12874 C equality test. It can in fact return false for two
12875 objects which would test as equal using the C equality
12876 operator. */
12877 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12878 return constant_boolean_node (equal
12879 ? code == EQ_EXPR : code != EQ_EXPR,
12880 type);
12881 }
12882
12883 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12884 a MINUS_EXPR of a constant, we can convert it into a comparison with
12885 a revised constant as long as no overflow occurs. */
12886 if (TREE_CODE (arg1) == INTEGER_CST
12887 && (TREE_CODE (arg0) == PLUS_EXPR
12888 || TREE_CODE (arg0) == MINUS_EXPR)
12889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12890 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12891 ? MINUS_EXPR : PLUS_EXPR,
12892 fold_convert_loc (loc, TREE_TYPE (arg0),
12893 arg1),
12894 TREE_OPERAND (arg0, 1)))
12895 && !TREE_OVERFLOW (tem))
12896 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12897
12898 /* Similarly for a NEGATE_EXPR. */
12899 if (TREE_CODE (arg0) == NEGATE_EXPR
12900 && TREE_CODE (arg1) == INTEGER_CST
12901 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12902 arg1)))
12903 && TREE_CODE (tem) == INTEGER_CST
12904 && !TREE_OVERFLOW (tem))
12905 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12906
12907 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12908 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12909 && TREE_CODE (arg1) == INTEGER_CST
12910 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12911 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12912 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12913 fold_convert_loc (loc,
12914 TREE_TYPE (arg0),
12915 arg1),
12916 TREE_OPERAND (arg0, 1)));
12917
12918 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12919 if ((TREE_CODE (arg0) == PLUS_EXPR
12920 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12921 || TREE_CODE (arg0) == MINUS_EXPR)
12922 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12923 0)),
12924 arg1, 0)
12925 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12926 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12927 {
12928 tree val = TREE_OPERAND (arg0, 1);
12929 return omit_two_operands_loc (loc, type,
12930 fold_build2_loc (loc, code, type,
12931 val,
12932 build_int_cst (TREE_TYPE (val),
12933 0)),
12934 TREE_OPERAND (arg0, 0), arg1);
12935 }
12936
12937 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12938 if (TREE_CODE (arg0) == MINUS_EXPR
12939 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12940 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12941 1)),
12942 arg1, 0)
12943 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12944 {
12945 return omit_two_operands_loc (loc, type,
12946 code == NE_EXPR
12947 ? boolean_true_node : boolean_false_node,
12948 TREE_OPERAND (arg0, 1), arg1);
12949 }
12950
12951 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12952 for !=. Don't do this for ordered comparisons due to overflow. */
12953 if (TREE_CODE (arg0) == MINUS_EXPR
12954 && integer_zerop (arg1))
12955 return fold_build2_loc (loc, code, type,
12956 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12957
12958 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12959 if (TREE_CODE (arg0) == ABS_EXPR
12960 && (integer_zerop (arg1) || real_zerop (arg1)))
12961 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12962
12963 /* If this is an EQ or NE comparison with zero and ARG0 is
12964 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12965 two operations, but the latter can be done in one less insn
12966 on machines that have only two-operand insns or on which a
12967 constant cannot be the first operand. */
12968 if (TREE_CODE (arg0) == BIT_AND_EXPR
12969 && integer_zerop (arg1))
12970 {
12971 tree arg00 = TREE_OPERAND (arg0, 0);
12972 tree arg01 = TREE_OPERAND (arg0, 1);
12973 if (TREE_CODE (arg00) == LSHIFT_EXPR
12974 && integer_onep (TREE_OPERAND (arg00, 0)))
12975 {
12976 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12977 arg01, TREE_OPERAND (arg00, 1));
12978 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12979 build_int_cst (TREE_TYPE (arg0), 1));
12980 return fold_build2_loc (loc, code, type,
12981 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12982 arg1);
12983 }
12984 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12985 && integer_onep (TREE_OPERAND (arg01, 0)))
12986 {
12987 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12988 arg00, TREE_OPERAND (arg01, 1));
12989 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12990 build_int_cst (TREE_TYPE (arg0), 1));
12991 return fold_build2_loc (loc, code, type,
12992 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12993 arg1);
12994 }
12995 }
12996
12997 /* If this is an NE or EQ comparison of zero against the result of a
12998 signed MOD operation whose second operand is a power of 2, make
12999 the MOD operation unsigned since it is simpler and equivalent. */
13000 if (integer_zerop (arg1)
13001 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13002 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13003 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13004 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13005 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13006 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13007 {
13008 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13009 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13010 fold_convert_loc (loc, newtype,
13011 TREE_OPERAND (arg0, 0)),
13012 fold_convert_loc (loc, newtype,
13013 TREE_OPERAND (arg0, 1)));
13014
13015 return fold_build2_loc (loc, code, type, newmod,
13016 fold_convert_loc (loc, newtype, arg1));
13017 }
13018
13019 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13020 C1 is a valid shift constant, and C2 is a power of two, i.e.
13021 a single bit. */
13022 if (TREE_CODE (arg0) == BIT_AND_EXPR
13023 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13024 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13025 == INTEGER_CST
13026 && integer_pow2p (TREE_OPERAND (arg0, 1))
13027 && integer_zerop (arg1))
13028 {
13029 tree itype = TREE_TYPE (arg0);
13030 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13031 prec = TYPE_PRECISION (itype);
13032
13033 /* Check for a valid shift count. */
13034 if (TREE_INT_CST_HIGH (arg001) == 0
13035 && TREE_INT_CST_LOW (arg001) < prec)
13036 {
13037 tree arg01 = TREE_OPERAND (arg0, 1);
13038 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13039 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13040 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13041 can be rewritten as (X & (C2 << C1)) != 0. */
13042 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13043 {
13044 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13045 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13046 return fold_build2_loc (loc, code, type, tem,
13047 fold_convert_loc (loc, itype, arg1));
13048 }
13049 /* Otherwise, for signed (arithmetic) shifts,
13050 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13051 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13052 else if (!TYPE_UNSIGNED (itype))
13053 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13054 arg000, build_int_cst (itype, 0));
13055 /* Otherwise, of unsigned (logical) shifts,
13056 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13057 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13058 else
13059 return omit_one_operand_loc (loc, type,
13060 code == EQ_EXPR ? integer_one_node
13061 : integer_zero_node,
13062 arg000);
13063 }
13064 }
13065
13066 /* If we have (A & C) == C where C is a power of 2, convert this into
13067 (A & C) != 0. Similarly for NE_EXPR. */
13068 if (TREE_CODE (arg0) == BIT_AND_EXPR
13069 && integer_pow2p (TREE_OPERAND (arg0, 1))
13070 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13071 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13072 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13073 integer_zero_node));
13074
13075 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13076 bit, then fold the expression into A < 0 or A >= 0. */
13077 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13078 if (tem)
13079 return tem;
13080
13081 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13082 Similarly for NE_EXPR. */
13083 if (TREE_CODE (arg0) == BIT_AND_EXPR
13084 && TREE_CODE (arg1) == INTEGER_CST
13085 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13086 {
13087 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13088 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13089 TREE_OPERAND (arg0, 1));
13090 tree dandnotc
13091 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13092 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13093 notc);
13094 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13095 if (integer_nonzerop (dandnotc))
13096 return omit_one_operand_loc (loc, type, rslt, arg0);
13097 }
13098
13099 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13100 Similarly for NE_EXPR. */
13101 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13102 && TREE_CODE (arg1) == INTEGER_CST
13103 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13104 {
13105 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13106 tree candnotd
13107 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13108 TREE_OPERAND (arg0, 1),
13109 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13110 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13111 if (integer_nonzerop (candnotd))
13112 return omit_one_operand_loc (loc, type, rslt, arg0);
13113 }
13114
13115 /* If this is a comparison of a field, we may be able to simplify it. */
13116 if ((TREE_CODE (arg0) == COMPONENT_REF
13117 || TREE_CODE (arg0) == BIT_FIELD_REF)
13118 /* Handle the constant case even without -O
13119 to make sure the warnings are given. */
13120 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13121 {
13122 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13123 if (t1)
13124 return t1;
13125 }
13126
13127 /* Optimize comparisons of strlen vs zero to a compare of the
13128 first character of the string vs zero. To wit,
13129 strlen(ptr) == 0 => *ptr == 0
13130 strlen(ptr) != 0 => *ptr != 0
13131 Other cases should reduce to one of these two (or a constant)
13132 due to the return value of strlen being unsigned. */
13133 if (TREE_CODE (arg0) == CALL_EXPR
13134 && integer_zerop (arg1))
13135 {
13136 tree fndecl = get_callee_fndecl (arg0);
13137
13138 if (fndecl
13139 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13140 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13141 && call_expr_nargs (arg0) == 1
13142 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13143 {
13144 tree iref = build_fold_indirect_ref_loc (loc,
13145 CALL_EXPR_ARG (arg0, 0));
13146 return fold_build2_loc (loc, code, type, iref,
13147 build_int_cst (TREE_TYPE (iref), 0));
13148 }
13149 }
13150
13151 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13152 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13153 if (TREE_CODE (arg0) == RSHIFT_EXPR
13154 && integer_zerop (arg1)
13155 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13156 {
13157 tree arg00 = TREE_OPERAND (arg0, 0);
13158 tree arg01 = TREE_OPERAND (arg0, 1);
13159 tree itype = TREE_TYPE (arg00);
13160 if (TREE_INT_CST_HIGH (arg01) == 0
13161 && TREE_INT_CST_LOW (arg01)
13162 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13163 {
13164 if (TYPE_UNSIGNED (itype))
13165 {
13166 itype = signed_type_for (itype);
13167 arg00 = fold_convert_loc (loc, itype, arg00);
13168 }
13169 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13170 type, arg00, build_zero_cst (itype));
13171 }
13172 }
13173
13174 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13175 if (integer_zerop (arg1)
13176 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13177 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13178 TREE_OPERAND (arg0, 1));
13179
13180 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13181 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13182 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13183 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13184 build_zero_cst (TREE_TYPE (arg0)));
13185 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13186 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13187 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13188 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13189 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13190 build_zero_cst (TREE_TYPE (arg0)));
13191
13192 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13193 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13194 && TREE_CODE (arg1) == INTEGER_CST
13195 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13196 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13197 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13198 TREE_OPERAND (arg0, 1), arg1));
13199
13200 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13201 (X & C) == 0 when C is a single bit. */
13202 if (TREE_CODE (arg0) == BIT_AND_EXPR
13203 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13204 && integer_zerop (arg1)
13205 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13206 {
13207 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13208 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13209 TREE_OPERAND (arg0, 1));
13210 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13211 type, tem,
13212 fold_convert_loc (loc, TREE_TYPE (arg0),
13213 arg1));
13214 }
13215
13216 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13217 constant C is a power of two, i.e. a single bit. */
13218 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13219 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13220 && integer_zerop (arg1)
13221 && integer_pow2p (TREE_OPERAND (arg0, 1))
13222 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13223 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13224 {
13225 tree arg00 = TREE_OPERAND (arg0, 0);
13226 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13227 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13228 }
13229
13230 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13231 when is C is a power of two, i.e. a single bit. */
13232 if (TREE_CODE (arg0) == BIT_AND_EXPR
13233 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13234 && integer_zerop (arg1)
13235 && integer_pow2p (TREE_OPERAND (arg0, 1))
13236 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13237 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13238 {
13239 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13240 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13241 arg000, TREE_OPERAND (arg0, 1));
13242 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13243 tem, build_int_cst (TREE_TYPE (tem), 0));
13244 }
13245
13246 if (integer_zerop (arg1)
13247 && tree_expr_nonzero_p (arg0))
13248 {
13249 tree res = constant_boolean_node (code==NE_EXPR, type);
13250 return omit_one_operand_loc (loc, type, res, arg0);
13251 }
13252
13253 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13254 if (TREE_CODE (arg0) == NEGATE_EXPR
13255 && TREE_CODE (arg1) == NEGATE_EXPR)
13256 return fold_build2_loc (loc, code, type,
13257 TREE_OPERAND (arg0, 0),
13258 fold_convert_loc (loc, TREE_TYPE (arg0),
13259 TREE_OPERAND (arg1, 0)));
13260
13261 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13262 if (TREE_CODE (arg0) == BIT_AND_EXPR
13263 && TREE_CODE (arg1) == BIT_AND_EXPR)
13264 {
13265 tree arg00 = TREE_OPERAND (arg0, 0);
13266 tree arg01 = TREE_OPERAND (arg0, 1);
13267 tree arg10 = TREE_OPERAND (arg1, 0);
13268 tree arg11 = TREE_OPERAND (arg1, 1);
13269 tree itype = TREE_TYPE (arg0);
13270
13271 if (operand_equal_p (arg01, arg11, 0))
13272 return fold_build2_loc (loc, code, type,
13273 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13274 fold_build2_loc (loc,
13275 BIT_XOR_EXPR, itype,
13276 arg00, arg10),
13277 arg01),
13278 build_zero_cst (itype));
13279
13280 if (operand_equal_p (arg01, arg10, 0))
13281 return fold_build2_loc (loc, code, type,
13282 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13283 fold_build2_loc (loc,
13284 BIT_XOR_EXPR, itype,
13285 arg00, arg11),
13286 arg01),
13287 build_zero_cst (itype));
13288
13289 if (operand_equal_p (arg00, arg11, 0))
13290 return fold_build2_loc (loc, code, type,
13291 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13292 fold_build2_loc (loc,
13293 BIT_XOR_EXPR, itype,
13294 arg01, arg10),
13295 arg00),
13296 build_zero_cst (itype));
13297
13298 if (operand_equal_p (arg00, arg10, 0))
13299 return fold_build2_loc (loc, code, type,
13300 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13301 fold_build2_loc (loc,
13302 BIT_XOR_EXPR, itype,
13303 arg01, arg11),
13304 arg00),
13305 build_zero_cst (itype));
13306 }
13307
13308 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13309 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13310 {
13311 tree arg00 = TREE_OPERAND (arg0, 0);
13312 tree arg01 = TREE_OPERAND (arg0, 1);
13313 tree arg10 = TREE_OPERAND (arg1, 0);
13314 tree arg11 = TREE_OPERAND (arg1, 1);
13315 tree itype = TREE_TYPE (arg0);
13316
13317 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13318 operand_equal_p guarantees no side-effects so we don't need
13319 to use omit_one_operand on Z. */
13320 if (operand_equal_p (arg01, arg11, 0))
13321 return fold_build2_loc (loc, code, type, arg00,
13322 fold_convert_loc (loc, TREE_TYPE (arg00),
13323 arg10));
13324 if (operand_equal_p (arg01, arg10, 0))
13325 return fold_build2_loc (loc, code, type, arg00,
13326 fold_convert_loc (loc, TREE_TYPE (arg00),
13327 arg11));
13328 if (operand_equal_p (arg00, arg11, 0))
13329 return fold_build2_loc (loc, code, type, arg01,
13330 fold_convert_loc (loc, TREE_TYPE (arg01),
13331 arg10));
13332 if (operand_equal_p (arg00, arg10, 0))
13333 return fold_build2_loc (loc, code, type, arg01,
13334 fold_convert_loc (loc, TREE_TYPE (arg01),
13335 arg11));
13336
13337 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13338 if (TREE_CODE (arg01) == INTEGER_CST
13339 && TREE_CODE (arg11) == INTEGER_CST)
13340 {
13341 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13342 fold_convert_loc (loc, itype, arg11));
13343 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13344 return fold_build2_loc (loc, code, type, tem,
13345 fold_convert_loc (loc, itype, arg10));
13346 }
13347 }
13348
13349 /* Attempt to simplify equality/inequality comparisons of complex
13350 values. Only lower the comparison if the result is known or
13351 can be simplified to a single scalar comparison. */
13352 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13353 || TREE_CODE (arg0) == COMPLEX_CST)
13354 && (TREE_CODE (arg1) == COMPLEX_EXPR
13355 || TREE_CODE (arg1) == COMPLEX_CST))
13356 {
13357 tree real0, imag0, real1, imag1;
13358 tree rcond, icond;
13359
13360 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13361 {
13362 real0 = TREE_OPERAND (arg0, 0);
13363 imag0 = TREE_OPERAND (arg0, 1);
13364 }
13365 else
13366 {
13367 real0 = TREE_REALPART (arg0);
13368 imag0 = TREE_IMAGPART (arg0);
13369 }
13370
13371 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13372 {
13373 real1 = TREE_OPERAND (arg1, 0);
13374 imag1 = TREE_OPERAND (arg1, 1);
13375 }
13376 else
13377 {
13378 real1 = TREE_REALPART (arg1);
13379 imag1 = TREE_IMAGPART (arg1);
13380 }
13381
13382 rcond = fold_binary_loc (loc, code, type, real0, real1);
13383 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13384 {
13385 if (integer_zerop (rcond))
13386 {
13387 if (code == EQ_EXPR)
13388 return omit_two_operands_loc (loc, type, boolean_false_node,
13389 imag0, imag1);
13390 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13391 }
13392 else
13393 {
13394 if (code == NE_EXPR)
13395 return omit_two_operands_loc (loc, type, boolean_true_node,
13396 imag0, imag1);
13397 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13398 }
13399 }
13400
13401 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13402 if (icond && TREE_CODE (icond) == INTEGER_CST)
13403 {
13404 if (integer_zerop (icond))
13405 {
13406 if (code == EQ_EXPR)
13407 return omit_two_operands_loc (loc, type, boolean_false_node,
13408 real0, real1);
13409 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13410 }
13411 else
13412 {
13413 if (code == NE_EXPR)
13414 return omit_two_operands_loc (loc, type, boolean_true_node,
13415 real0, real1);
13416 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13417 }
13418 }
13419 }
13420
13421 return NULL_TREE;
13422
13423 case LT_EXPR:
13424 case GT_EXPR:
13425 case LE_EXPR:
13426 case GE_EXPR:
13427 tem = fold_comparison (loc, code, type, op0, op1);
13428 if (tem != NULL_TREE)
13429 return tem;
13430
13431 /* Transform comparisons of the form X +- C CMP X. */
13432 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13433 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13434 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13435 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13436 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13437 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13438 {
13439 tree arg01 = TREE_OPERAND (arg0, 1);
13440 enum tree_code code0 = TREE_CODE (arg0);
13441 int is_positive;
13442
13443 if (TREE_CODE (arg01) == REAL_CST)
13444 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13445 else
13446 is_positive = tree_int_cst_sgn (arg01);
13447
13448 /* (X - c) > X becomes false. */
13449 if (code == GT_EXPR
13450 && ((code0 == MINUS_EXPR && is_positive >= 0)
13451 || (code0 == PLUS_EXPR && is_positive <= 0)))
13452 {
13453 if (TREE_CODE (arg01) == INTEGER_CST
13454 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13455 fold_overflow_warning (("assuming signed overflow does not "
13456 "occur when assuming that (X - c) > X "
13457 "is always false"),
13458 WARN_STRICT_OVERFLOW_ALL);
13459 return constant_boolean_node (0, type);
13460 }
13461
13462 /* Likewise (X + c) < X becomes false. */
13463 if (code == LT_EXPR
13464 && ((code0 == PLUS_EXPR && is_positive >= 0)
13465 || (code0 == MINUS_EXPR && is_positive <= 0)))
13466 {
13467 if (TREE_CODE (arg01) == INTEGER_CST
13468 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13469 fold_overflow_warning (("assuming signed overflow does not "
13470 "occur when assuming that "
13471 "(X + c) < X is always false"),
13472 WARN_STRICT_OVERFLOW_ALL);
13473 return constant_boolean_node (0, type);
13474 }
13475
13476 /* Convert (X - c) <= X to true. */
13477 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13478 && code == LE_EXPR
13479 && ((code0 == MINUS_EXPR && is_positive >= 0)
13480 || (code0 == PLUS_EXPR && is_positive <= 0)))
13481 {
13482 if (TREE_CODE (arg01) == INTEGER_CST
13483 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13484 fold_overflow_warning (("assuming signed overflow does not "
13485 "occur when assuming that "
13486 "(X - c) <= X is always true"),
13487 WARN_STRICT_OVERFLOW_ALL);
13488 return constant_boolean_node (1, type);
13489 }
13490
13491 /* Convert (X + c) >= X to true. */
13492 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13493 && code == GE_EXPR
13494 && ((code0 == PLUS_EXPR && is_positive >= 0)
13495 || (code0 == MINUS_EXPR && is_positive <= 0)))
13496 {
13497 if (TREE_CODE (arg01) == INTEGER_CST
13498 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13499 fold_overflow_warning (("assuming signed overflow does not "
13500 "occur when assuming that "
13501 "(X + c) >= X is always true"),
13502 WARN_STRICT_OVERFLOW_ALL);
13503 return constant_boolean_node (1, type);
13504 }
13505
13506 if (TREE_CODE (arg01) == INTEGER_CST)
13507 {
13508 /* Convert X + c > X and X - c < X to true for integers. */
13509 if (code == GT_EXPR
13510 && ((code0 == PLUS_EXPR && is_positive > 0)
13511 || (code0 == MINUS_EXPR && is_positive < 0)))
13512 {
13513 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13514 fold_overflow_warning (("assuming signed overflow does "
13515 "not occur when assuming that "
13516 "(X + c) > X is always true"),
13517 WARN_STRICT_OVERFLOW_ALL);
13518 return constant_boolean_node (1, type);
13519 }
13520
13521 if (code == LT_EXPR
13522 && ((code0 == MINUS_EXPR && is_positive > 0)
13523 || (code0 == PLUS_EXPR && is_positive < 0)))
13524 {
13525 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13526 fold_overflow_warning (("assuming signed overflow does "
13527 "not occur when assuming that "
13528 "(X - c) < X is always true"),
13529 WARN_STRICT_OVERFLOW_ALL);
13530 return constant_boolean_node (1, type);
13531 }
13532
13533 /* Convert X + c <= X and X - c >= X to false for integers. */
13534 if (code == LE_EXPR
13535 && ((code0 == PLUS_EXPR && is_positive > 0)
13536 || (code0 == MINUS_EXPR && is_positive < 0)))
13537 {
13538 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13539 fold_overflow_warning (("assuming signed overflow does "
13540 "not occur when assuming that "
13541 "(X + c) <= X is always false"),
13542 WARN_STRICT_OVERFLOW_ALL);
13543 return constant_boolean_node (0, type);
13544 }
13545
13546 if (code == GE_EXPR
13547 && ((code0 == MINUS_EXPR && is_positive > 0)
13548 || (code0 == PLUS_EXPR && is_positive < 0)))
13549 {
13550 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13551 fold_overflow_warning (("assuming signed overflow does "
13552 "not occur when assuming that "
13553 "(X - c) >= X is always false"),
13554 WARN_STRICT_OVERFLOW_ALL);
13555 return constant_boolean_node (0, type);
13556 }
13557 }
13558 }
13559
13560 /* Comparisons with the highest or lowest possible integer of
13561 the specified precision will have known values. */
13562 {
13563 tree arg1_type = TREE_TYPE (arg1);
13564 unsigned int width = TYPE_PRECISION (arg1_type);
13565
13566 if (TREE_CODE (arg1) == INTEGER_CST
13567 && width <= HOST_BITS_PER_DOUBLE_INT
13568 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13569 {
13570 HOST_WIDE_INT signed_max_hi;
13571 unsigned HOST_WIDE_INT signed_max_lo;
13572 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13573
13574 if (width <= HOST_BITS_PER_WIDE_INT)
13575 {
13576 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13577 - 1;
13578 signed_max_hi = 0;
13579 max_hi = 0;
13580
13581 if (TYPE_UNSIGNED (arg1_type))
13582 {
13583 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13584 min_lo = 0;
13585 min_hi = 0;
13586 }
13587 else
13588 {
13589 max_lo = signed_max_lo;
13590 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13591 min_hi = -1;
13592 }
13593 }
13594 else
13595 {
13596 width -= HOST_BITS_PER_WIDE_INT;
13597 signed_max_lo = -1;
13598 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13599 - 1;
13600 max_lo = -1;
13601 min_lo = 0;
13602
13603 if (TYPE_UNSIGNED (arg1_type))
13604 {
13605 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13606 min_hi = 0;
13607 }
13608 else
13609 {
13610 max_hi = signed_max_hi;
13611 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13612 }
13613 }
13614
13615 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13616 && TREE_INT_CST_LOW (arg1) == max_lo)
13617 switch (code)
13618 {
13619 case GT_EXPR:
13620 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13621
13622 case GE_EXPR:
13623 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13624
13625 case LE_EXPR:
13626 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13627
13628 case LT_EXPR:
13629 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13630
13631 /* The GE_EXPR and LT_EXPR cases above are not normally
13632 reached because of previous transformations. */
13633
13634 default:
13635 break;
13636 }
13637 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13638 == max_hi
13639 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13640 switch (code)
13641 {
13642 case GT_EXPR:
13643 arg1 = const_binop (PLUS_EXPR, arg1,
13644 build_int_cst (TREE_TYPE (arg1), 1));
13645 return fold_build2_loc (loc, EQ_EXPR, type,
13646 fold_convert_loc (loc,
13647 TREE_TYPE (arg1), arg0),
13648 arg1);
13649 case LE_EXPR:
13650 arg1 = const_binop (PLUS_EXPR, arg1,
13651 build_int_cst (TREE_TYPE (arg1), 1));
13652 return fold_build2_loc (loc, NE_EXPR, type,
13653 fold_convert_loc (loc, TREE_TYPE (arg1),
13654 arg0),
13655 arg1);
13656 default:
13657 break;
13658 }
13659 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13660 == min_hi
13661 && TREE_INT_CST_LOW (arg1) == min_lo)
13662 switch (code)
13663 {
13664 case LT_EXPR:
13665 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13666
13667 case LE_EXPR:
13668 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13669
13670 case GE_EXPR:
13671 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13672
13673 case GT_EXPR:
13674 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13675
13676 default:
13677 break;
13678 }
13679 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13680 == min_hi
13681 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13682 switch (code)
13683 {
13684 case GE_EXPR:
13685 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13686 return fold_build2_loc (loc, NE_EXPR, type,
13687 fold_convert_loc (loc,
13688 TREE_TYPE (arg1), arg0),
13689 arg1);
13690 case LT_EXPR:
13691 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13692 return fold_build2_loc (loc, EQ_EXPR, type,
13693 fold_convert_loc (loc, TREE_TYPE (arg1),
13694 arg0),
13695 arg1);
13696 default:
13697 break;
13698 }
13699
13700 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13701 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13702 && TYPE_UNSIGNED (arg1_type)
13703 /* We will flip the signedness of the comparison operator
13704 associated with the mode of arg1, so the sign bit is
13705 specified by this mode. Check that arg1 is the signed
13706 max associated with this sign bit. */
13707 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13708 /* signed_type does not work on pointer types. */
13709 && INTEGRAL_TYPE_P (arg1_type))
13710 {
13711 /* The following case also applies to X < signed_max+1
13712 and X >= signed_max+1 because previous transformations. */
13713 if (code == LE_EXPR || code == GT_EXPR)
13714 {
13715 tree st;
13716 st = signed_type_for (TREE_TYPE (arg1));
13717 return fold_build2_loc (loc,
13718 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13719 type, fold_convert_loc (loc, st, arg0),
13720 build_int_cst (st, 0));
13721 }
13722 }
13723 }
13724 }
13725
13726 /* If we are comparing an ABS_EXPR with a constant, we can
13727 convert all the cases into explicit comparisons, but they may
13728 well not be faster than doing the ABS and one comparison.
13729 But ABS (X) <= C is a range comparison, which becomes a subtraction
13730 and a comparison, and is probably faster. */
13731 if (code == LE_EXPR
13732 && TREE_CODE (arg1) == INTEGER_CST
13733 && TREE_CODE (arg0) == ABS_EXPR
13734 && ! TREE_SIDE_EFFECTS (arg0)
13735 && (0 != (tem = negate_expr (arg1)))
13736 && TREE_CODE (tem) == INTEGER_CST
13737 && !TREE_OVERFLOW (tem))
13738 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13739 build2 (GE_EXPR, type,
13740 TREE_OPERAND (arg0, 0), tem),
13741 build2 (LE_EXPR, type,
13742 TREE_OPERAND (arg0, 0), arg1));
13743
13744 /* Convert ABS_EXPR<x> >= 0 to true. */
13745 strict_overflow_p = false;
13746 if (code == GE_EXPR
13747 && (integer_zerop (arg1)
13748 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13749 && real_zerop (arg1)))
13750 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13751 {
13752 if (strict_overflow_p)
13753 fold_overflow_warning (("assuming signed overflow does not occur "
13754 "when simplifying comparison of "
13755 "absolute value and zero"),
13756 WARN_STRICT_OVERFLOW_CONDITIONAL);
13757 return omit_one_operand_loc (loc, type,
13758 constant_boolean_node (true, type),
13759 arg0);
13760 }
13761
13762 /* Convert ABS_EXPR<x> < 0 to false. */
13763 strict_overflow_p = false;
13764 if (code == LT_EXPR
13765 && (integer_zerop (arg1) || real_zerop (arg1))
13766 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13767 {
13768 if (strict_overflow_p)
13769 fold_overflow_warning (("assuming signed overflow does not occur "
13770 "when simplifying comparison of "
13771 "absolute value and zero"),
13772 WARN_STRICT_OVERFLOW_CONDITIONAL);
13773 return omit_one_operand_loc (loc, type,
13774 constant_boolean_node (false, type),
13775 arg0);
13776 }
13777
13778 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13779 and similarly for >= into !=. */
13780 if ((code == LT_EXPR || code == GE_EXPR)
13781 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13782 && TREE_CODE (arg1) == LSHIFT_EXPR
13783 && integer_onep (TREE_OPERAND (arg1, 0)))
13784 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13785 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13786 TREE_OPERAND (arg1, 1)),
13787 build_zero_cst (TREE_TYPE (arg0)));
13788
13789 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13790 otherwise Y might be >= # of bits in X's type and thus e.g.
13791 (unsigned char) (1 << Y) for Y 15 might be 0.
13792 If the cast is widening, then 1 << Y should have unsigned type,
13793 otherwise if Y is number of bits in the signed shift type minus 1,
13794 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13795 31 might be 0xffffffff80000000. */
13796 if ((code == LT_EXPR || code == GE_EXPR)
13797 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13798 && CONVERT_EXPR_P (arg1)
13799 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13800 && (TYPE_PRECISION (TREE_TYPE (arg1))
13801 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13802 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13803 || (TYPE_PRECISION (TREE_TYPE (arg1))
13804 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13805 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13806 {
13807 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13808 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13809 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13810 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13811 build_zero_cst (TREE_TYPE (arg0)));
13812 }
13813
13814 return NULL_TREE;
13815
13816 case UNORDERED_EXPR:
13817 case ORDERED_EXPR:
13818 case UNLT_EXPR:
13819 case UNLE_EXPR:
13820 case UNGT_EXPR:
13821 case UNGE_EXPR:
13822 case UNEQ_EXPR:
13823 case LTGT_EXPR:
13824 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13825 {
13826 t1 = fold_relational_const (code, type, arg0, arg1);
13827 if (t1 != NULL_TREE)
13828 return t1;
13829 }
13830
13831 /* If the first operand is NaN, the result is constant. */
13832 if (TREE_CODE (arg0) == REAL_CST
13833 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13834 && (code != LTGT_EXPR || ! flag_trapping_math))
13835 {
13836 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13837 ? integer_zero_node
13838 : integer_one_node;
13839 return omit_one_operand_loc (loc, type, t1, arg1);
13840 }
13841
13842 /* If the second operand is NaN, the result is constant. */
13843 if (TREE_CODE (arg1) == REAL_CST
13844 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13845 && (code != LTGT_EXPR || ! flag_trapping_math))
13846 {
13847 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13848 ? integer_zero_node
13849 : integer_one_node;
13850 return omit_one_operand_loc (loc, type, t1, arg0);
13851 }
13852
13853 /* Simplify unordered comparison of something with itself. */
13854 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13855 && operand_equal_p (arg0, arg1, 0))
13856 return constant_boolean_node (1, type);
13857
13858 if (code == LTGT_EXPR
13859 && !flag_trapping_math
13860 && operand_equal_p (arg0, arg1, 0))
13861 return constant_boolean_node (0, type);
13862
13863 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13864 {
13865 tree targ0 = strip_float_extensions (arg0);
13866 tree targ1 = strip_float_extensions (arg1);
13867 tree newtype = TREE_TYPE (targ0);
13868
13869 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13870 newtype = TREE_TYPE (targ1);
13871
13872 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13873 return fold_build2_loc (loc, code, type,
13874 fold_convert_loc (loc, newtype, targ0),
13875 fold_convert_loc (loc, newtype, targ1));
13876 }
13877
13878 return NULL_TREE;
13879
13880 case COMPOUND_EXPR:
13881 /* When pedantic, a compound expression can be neither an lvalue
13882 nor an integer constant expression. */
13883 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13884 return NULL_TREE;
13885 /* Don't let (0, 0) be null pointer constant. */
13886 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13887 : fold_convert_loc (loc, type, arg1);
13888 return pedantic_non_lvalue_loc (loc, tem);
13889
13890 case COMPLEX_EXPR:
13891 if ((TREE_CODE (arg0) == REAL_CST
13892 && TREE_CODE (arg1) == REAL_CST)
13893 || (TREE_CODE (arg0) == INTEGER_CST
13894 && TREE_CODE (arg1) == INTEGER_CST))
13895 return build_complex (type, arg0, arg1);
13896 if (TREE_CODE (arg0) == REALPART_EXPR
13897 && TREE_CODE (arg1) == IMAGPART_EXPR
13898 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13899 && operand_equal_p (TREE_OPERAND (arg0, 0),
13900 TREE_OPERAND (arg1, 0), 0))
13901 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13902 TREE_OPERAND (arg1, 0));
13903 return NULL_TREE;
13904
13905 case ASSERT_EXPR:
13906 /* An ASSERT_EXPR should never be passed to fold_binary. */
13907 gcc_unreachable ();
13908
13909 case VEC_PACK_TRUNC_EXPR:
13910 case VEC_PACK_FIX_TRUNC_EXPR:
13911 {
13912 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13913 tree *elts;
13914
13915 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13916 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13917 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13918 return NULL_TREE;
13919
13920 elts = XALLOCAVEC (tree, nelts);
13921 if (!vec_cst_ctor_to_array (arg0, elts)
13922 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13923 return NULL_TREE;
13924
13925 for (i = 0; i < nelts; i++)
13926 {
13927 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13928 ? NOP_EXPR : FIX_TRUNC_EXPR,
13929 TREE_TYPE (type), elts[i]);
13930 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13931 return NULL_TREE;
13932 }
13933
13934 return build_vector (type, elts);
13935 }
13936
13937 case VEC_WIDEN_MULT_LO_EXPR:
13938 case VEC_WIDEN_MULT_HI_EXPR:
13939 case VEC_WIDEN_MULT_EVEN_EXPR:
13940 case VEC_WIDEN_MULT_ODD_EXPR:
13941 {
13942 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13943 unsigned int out, ofs, scale;
13944 tree *elts;
13945
13946 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13947 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13948 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13949 return NULL_TREE;
13950
13951 elts = XALLOCAVEC (tree, nelts * 4);
13952 if (!vec_cst_ctor_to_array (arg0, elts)
13953 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13954 return NULL_TREE;
13955
13956 if (code == VEC_WIDEN_MULT_LO_EXPR)
13957 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13958 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13959 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13960 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13961 scale = 1, ofs = 0;
13962 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13963 scale = 1, ofs = 1;
13964
13965 for (out = 0; out < nelts; out++)
13966 {
13967 unsigned int in1 = (out << scale) + ofs;
13968 unsigned int in2 = in1 + nelts * 2;
13969 tree t1, t2;
13970
13971 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13972 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13973
13974 if (t1 == NULL_TREE || t2 == NULL_TREE)
13975 return NULL_TREE;
13976 elts[out] = const_binop (MULT_EXPR, t1, t2);
13977 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13978 return NULL_TREE;
13979 }
13980
13981 return build_vector (type, elts);
13982 }
13983
13984 default:
13985 return NULL_TREE;
13986 } /* switch (code) */
13987 }
13988
13989 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13990 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13991 of GOTO_EXPR. */
13992
13993 static tree
13994 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13995 {
13996 switch (TREE_CODE (*tp))
13997 {
13998 case LABEL_EXPR:
13999 return *tp;
14000
14001 case GOTO_EXPR:
14002 *walk_subtrees = 0;
14003
14004 /* ... fall through ... */
14005
14006 default:
14007 return NULL_TREE;
14008 }
14009 }
14010
14011 /* Return whether the sub-tree ST contains a label which is accessible from
14012 outside the sub-tree. */
14013
14014 static bool
14015 contains_label_p (tree st)
14016 {
14017 return
14018 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14019 }
14020
14021 /* Fold a ternary expression of code CODE and type TYPE with operands
14022 OP0, OP1, and OP2. Return the folded expression if folding is
14023 successful. Otherwise, return NULL_TREE. */
14024
14025 tree
14026 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14027 tree op0, tree op1, tree op2)
14028 {
14029 tree tem;
14030 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14031 enum tree_code_class kind = TREE_CODE_CLASS (code);
14032
14033 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14034 && TREE_CODE_LENGTH (code) == 3);
14035
14036 /* Strip any conversions that don't change the mode. This is safe
14037 for every expression, except for a comparison expression because
14038 its signedness is derived from its operands. So, in the latter
14039 case, only strip conversions that don't change the signedness.
14040
14041 Note that this is done as an internal manipulation within the
14042 constant folder, in order to find the simplest representation of
14043 the arguments so that their form can be studied. In any cases,
14044 the appropriate type conversions should be put back in the tree
14045 that will get out of the constant folder. */
14046 if (op0)
14047 {
14048 arg0 = op0;
14049 STRIP_NOPS (arg0);
14050 }
14051
14052 if (op1)
14053 {
14054 arg1 = op1;
14055 STRIP_NOPS (arg1);
14056 }
14057
14058 if (op2)
14059 {
14060 arg2 = op2;
14061 STRIP_NOPS (arg2);
14062 }
14063
14064 switch (code)
14065 {
14066 case COMPONENT_REF:
14067 if (TREE_CODE (arg0) == CONSTRUCTOR
14068 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14069 {
14070 unsigned HOST_WIDE_INT idx;
14071 tree field, value;
14072 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14073 if (field == arg1)
14074 return value;
14075 }
14076 return NULL_TREE;
14077
14078 case COND_EXPR:
14079 case VEC_COND_EXPR:
14080 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14081 so all simple results must be passed through pedantic_non_lvalue. */
14082 if (TREE_CODE (arg0) == INTEGER_CST)
14083 {
14084 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14085 tem = integer_zerop (arg0) ? op2 : op1;
14086 /* Only optimize constant conditions when the selected branch
14087 has the same type as the COND_EXPR. This avoids optimizing
14088 away "c ? x : throw", where the throw has a void type.
14089 Avoid throwing away that operand which contains label. */
14090 if ((!TREE_SIDE_EFFECTS (unused_op)
14091 || !contains_label_p (unused_op))
14092 && (! VOID_TYPE_P (TREE_TYPE (tem))
14093 || VOID_TYPE_P (type)))
14094 return pedantic_non_lvalue_loc (loc, tem);
14095 return NULL_TREE;
14096 }
14097 else if (TREE_CODE (arg0) == VECTOR_CST)
14098 {
14099 if (integer_all_onesp (arg0))
14100 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14101 if (integer_zerop (arg0))
14102 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14103
14104 if ((TREE_CODE (arg1) == VECTOR_CST
14105 || TREE_CODE (arg1) == CONSTRUCTOR)
14106 && (TREE_CODE (arg2) == VECTOR_CST
14107 || TREE_CODE (arg2) == CONSTRUCTOR))
14108 {
14109 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14110 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14111 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14112 for (i = 0; i < nelts; i++)
14113 {
14114 tree val = VECTOR_CST_ELT (arg0, i);
14115 if (integer_all_onesp (val))
14116 sel[i] = i;
14117 else if (integer_zerop (val))
14118 sel[i] = nelts + i;
14119 else /* Currently unreachable. */
14120 return NULL_TREE;
14121 }
14122 tree t = fold_vec_perm (type, arg1, arg2, sel);
14123 if (t != NULL_TREE)
14124 return t;
14125 }
14126 }
14127
14128 if (operand_equal_p (arg1, op2, 0))
14129 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14130
14131 /* If we have A op B ? A : C, we may be able to convert this to a
14132 simpler expression, depending on the operation and the values
14133 of B and C. Signed zeros prevent all of these transformations,
14134 for reasons given above each one.
14135
14136 Also try swapping the arguments and inverting the conditional. */
14137 if (COMPARISON_CLASS_P (arg0)
14138 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14139 arg1, TREE_OPERAND (arg0, 1))
14140 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14141 {
14142 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14143 if (tem)
14144 return tem;
14145 }
14146
14147 if (COMPARISON_CLASS_P (arg0)
14148 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14149 op2,
14150 TREE_OPERAND (arg0, 1))
14151 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14152 {
14153 location_t loc0 = expr_location_or (arg0, loc);
14154 tem = fold_invert_truthvalue (loc0, arg0);
14155 if (tem && COMPARISON_CLASS_P (tem))
14156 {
14157 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14158 if (tem)
14159 return tem;
14160 }
14161 }
14162
14163 /* If the second operand is simpler than the third, swap them
14164 since that produces better jump optimization results. */
14165 if (truth_value_p (TREE_CODE (arg0))
14166 && tree_swap_operands_p (op1, op2, false))
14167 {
14168 location_t loc0 = expr_location_or (arg0, loc);
14169 /* See if this can be inverted. If it can't, possibly because
14170 it was a floating-point inequality comparison, don't do
14171 anything. */
14172 tem = fold_invert_truthvalue (loc0, arg0);
14173 if (tem)
14174 return fold_build3_loc (loc, code, type, tem, op2, op1);
14175 }
14176
14177 /* Convert A ? 1 : 0 to simply A. */
14178 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14179 : (integer_onep (op1)
14180 && !VECTOR_TYPE_P (type)))
14181 && integer_zerop (op2)
14182 /* If we try to convert OP0 to our type, the
14183 call to fold will try to move the conversion inside
14184 a COND, which will recurse. In that case, the COND_EXPR
14185 is probably the best choice, so leave it alone. */
14186 && type == TREE_TYPE (arg0))
14187 return pedantic_non_lvalue_loc (loc, arg0);
14188
14189 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14190 over COND_EXPR in cases such as floating point comparisons. */
14191 if (integer_zerop (op1)
14192 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14193 : (integer_onep (op2)
14194 && !VECTOR_TYPE_P (type)))
14195 && truth_value_p (TREE_CODE (arg0)))
14196 return pedantic_non_lvalue_loc (loc,
14197 fold_convert_loc (loc, type,
14198 invert_truthvalue_loc (loc,
14199 arg0)));
14200
14201 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14202 if (TREE_CODE (arg0) == LT_EXPR
14203 && integer_zerop (TREE_OPERAND (arg0, 1))
14204 && integer_zerop (op2)
14205 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14206 {
14207 /* sign_bit_p looks through both zero and sign extensions,
14208 but for this optimization only sign extensions are
14209 usable. */
14210 tree tem2 = TREE_OPERAND (arg0, 0);
14211 while (tem != tem2)
14212 {
14213 if (TREE_CODE (tem2) != NOP_EXPR
14214 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14215 {
14216 tem = NULL_TREE;
14217 break;
14218 }
14219 tem2 = TREE_OPERAND (tem2, 0);
14220 }
14221 /* sign_bit_p only checks ARG1 bits within A's precision.
14222 If <sign bit of A> has wider type than A, bits outside
14223 of A's precision in <sign bit of A> need to be checked.
14224 If they are all 0, this optimization needs to be done
14225 in unsigned A's type, if they are all 1 in signed A's type,
14226 otherwise this can't be done. */
14227 if (tem
14228 && TYPE_PRECISION (TREE_TYPE (tem))
14229 < TYPE_PRECISION (TREE_TYPE (arg1))
14230 && TYPE_PRECISION (TREE_TYPE (tem))
14231 < TYPE_PRECISION (type))
14232 {
14233 unsigned HOST_WIDE_INT mask_lo;
14234 HOST_WIDE_INT mask_hi;
14235 int inner_width, outer_width;
14236 tree tem_type;
14237
14238 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14239 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14240 if (outer_width > TYPE_PRECISION (type))
14241 outer_width = TYPE_PRECISION (type);
14242
14243 if (outer_width > HOST_BITS_PER_WIDE_INT)
14244 {
14245 mask_hi = (HOST_WIDE_INT_M1U
14246 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14247 mask_lo = -1;
14248 }
14249 else
14250 {
14251 mask_hi = 0;
14252 mask_lo = (HOST_WIDE_INT_M1U
14253 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14254 }
14255 if (inner_width > HOST_BITS_PER_WIDE_INT)
14256 {
14257 mask_hi &= ~(HOST_WIDE_INT_M1U
14258 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14259 mask_lo = 0;
14260 }
14261 else
14262 mask_lo &= ~(HOST_WIDE_INT_M1U
14263 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14264
14265 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14266 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14267 {
14268 tem_type = signed_type_for (TREE_TYPE (tem));
14269 tem = fold_convert_loc (loc, tem_type, tem);
14270 }
14271 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14272 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14273 {
14274 tem_type = unsigned_type_for (TREE_TYPE (tem));
14275 tem = fold_convert_loc (loc, tem_type, tem);
14276 }
14277 else
14278 tem = NULL;
14279 }
14280
14281 if (tem)
14282 return
14283 fold_convert_loc (loc, type,
14284 fold_build2_loc (loc, BIT_AND_EXPR,
14285 TREE_TYPE (tem), tem,
14286 fold_convert_loc (loc,
14287 TREE_TYPE (tem),
14288 arg1)));
14289 }
14290
14291 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14292 already handled above. */
14293 if (TREE_CODE (arg0) == BIT_AND_EXPR
14294 && integer_onep (TREE_OPERAND (arg0, 1))
14295 && integer_zerop (op2)
14296 && integer_pow2p (arg1))
14297 {
14298 tree tem = TREE_OPERAND (arg0, 0);
14299 STRIP_NOPS (tem);
14300 if (TREE_CODE (tem) == RSHIFT_EXPR
14301 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14302 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14303 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14304 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14305 TREE_OPERAND (tem, 0), arg1);
14306 }
14307
14308 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14309 is probably obsolete because the first operand should be a
14310 truth value (that's why we have the two cases above), but let's
14311 leave it in until we can confirm this for all front-ends. */
14312 if (integer_zerop (op2)
14313 && TREE_CODE (arg0) == NE_EXPR
14314 && integer_zerop (TREE_OPERAND (arg0, 1))
14315 && integer_pow2p (arg1)
14316 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14317 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14318 arg1, OEP_ONLY_CONST))
14319 return pedantic_non_lvalue_loc (loc,
14320 fold_convert_loc (loc, type,
14321 TREE_OPERAND (arg0, 0)));
14322
14323 /* Disable the transformations below for vectors, since
14324 fold_binary_op_with_conditional_arg may undo them immediately,
14325 yielding an infinite loop. */
14326 if (code == VEC_COND_EXPR)
14327 return NULL_TREE;
14328
14329 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14330 if (integer_zerop (op2)
14331 && truth_value_p (TREE_CODE (arg0))
14332 && truth_value_p (TREE_CODE (arg1))
14333 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14334 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14335 : TRUTH_ANDIF_EXPR,
14336 type, fold_convert_loc (loc, type, arg0), arg1);
14337
14338 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14339 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14340 && truth_value_p (TREE_CODE (arg0))
14341 && truth_value_p (TREE_CODE (arg1))
14342 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14343 {
14344 location_t loc0 = expr_location_or (arg0, loc);
14345 /* Only perform transformation if ARG0 is easily inverted. */
14346 tem = fold_invert_truthvalue (loc0, arg0);
14347 if (tem)
14348 return fold_build2_loc (loc, code == VEC_COND_EXPR
14349 ? BIT_IOR_EXPR
14350 : TRUTH_ORIF_EXPR,
14351 type, fold_convert_loc (loc, type, tem),
14352 arg1);
14353 }
14354
14355 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14356 if (integer_zerop (arg1)
14357 && truth_value_p (TREE_CODE (arg0))
14358 && truth_value_p (TREE_CODE (op2))
14359 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14360 {
14361 location_t loc0 = expr_location_or (arg0, loc);
14362 /* Only perform transformation if ARG0 is easily inverted. */
14363 tem = fold_invert_truthvalue (loc0, arg0);
14364 if (tem)
14365 return fold_build2_loc (loc, code == VEC_COND_EXPR
14366 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14367 type, fold_convert_loc (loc, type, tem),
14368 op2);
14369 }
14370
14371 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14372 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14373 && truth_value_p (TREE_CODE (arg0))
14374 && truth_value_p (TREE_CODE (op2))
14375 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14376 return fold_build2_loc (loc, code == VEC_COND_EXPR
14377 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14378 type, fold_convert_loc (loc, type, arg0), op2);
14379
14380 return NULL_TREE;
14381
14382 case CALL_EXPR:
14383 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14384 of fold_ternary on them. */
14385 gcc_unreachable ();
14386
14387 case BIT_FIELD_REF:
14388 if ((TREE_CODE (arg0) == VECTOR_CST
14389 || (TREE_CODE (arg0) == CONSTRUCTOR
14390 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14391 && (type == TREE_TYPE (TREE_TYPE (arg0))
14392 || (TREE_CODE (type) == VECTOR_TYPE
14393 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14394 {
14395 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14396 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14397 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14398 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14399
14400 if (n != 0
14401 && (idx % width) == 0
14402 && (n % width) == 0
14403 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14404 {
14405 idx = idx / width;
14406 n = n / width;
14407
14408 if (TREE_CODE (arg0) == VECTOR_CST)
14409 {
14410 if (n == 1)
14411 return VECTOR_CST_ELT (arg0, idx);
14412
14413 tree *vals = XALLOCAVEC (tree, n);
14414 for (unsigned i = 0; i < n; ++i)
14415 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14416 return build_vector (type, vals);
14417 }
14418
14419 /* Constructor elements can be subvectors. */
14420 unsigned HOST_WIDE_INT k = 1;
14421 if (CONSTRUCTOR_NELTS (arg0) != 0)
14422 {
14423 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14424 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14425 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14426 }
14427
14428 /* We keep an exact subset of the constructor elements. */
14429 if ((idx % k) == 0 && (n % k) == 0)
14430 {
14431 if (CONSTRUCTOR_NELTS (arg0) == 0)
14432 return build_constructor (type, NULL);
14433 idx /= k;
14434 n /= k;
14435 if (n == 1)
14436 {
14437 if (idx < CONSTRUCTOR_NELTS (arg0))
14438 return CONSTRUCTOR_ELT (arg0, idx)->value;
14439 return build_zero_cst (type);
14440 }
14441
14442 vec<constructor_elt, va_gc> *vals;
14443 vec_alloc (vals, n);
14444 for (unsigned i = 0;
14445 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14446 ++i)
14447 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14448 CONSTRUCTOR_ELT
14449 (arg0, idx + i)->value);
14450 return build_constructor (type, vals);
14451 }
14452 /* The bitfield references a single constructor element. */
14453 else if (idx + n <= (idx / k + 1) * k)
14454 {
14455 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14456 return build_zero_cst (type);
14457 else if (n == k)
14458 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14459 else
14460 return fold_build3_loc (loc, code, type,
14461 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14462 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14463 }
14464 }
14465 }
14466
14467 /* A bit-field-ref that referenced the full argument can be stripped. */
14468 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14469 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14470 && integer_zerop (op2))
14471 return fold_convert_loc (loc, type, arg0);
14472
14473 /* On constants we can use native encode/interpret to constant
14474 fold (nearly) all BIT_FIELD_REFs. */
14475 if (CONSTANT_CLASS_P (arg0)
14476 && can_native_interpret_type_p (type)
14477 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14478 /* This limitation should not be necessary, we just need to
14479 round this up to mode size. */
14480 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14481 /* Need bit-shifting of the buffer to relax the following. */
14482 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14483 {
14484 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14485 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14486 unsigned HOST_WIDE_INT clen;
14487 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14488 /* ??? We cannot tell native_encode_expr to start at
14489 some random byte only. So limit us to a reasonable amount
14490 of work. */
14491 if (clen <= 4096)
14492 {
14493 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14494 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14495 if (len > 0
14496 && len * BITS_PER_UNIT >= bitpos + bitsize)
14497 {
14498 tree v = native_interpret_expr (type,
14499 b + bitpos / BITS_PER_UNIT,
14500 bitsize / BITS_PER_UNIT);
14501 if (v)
14502 return v;
14503 }
14504 }
14505 }
14506
14507 return NULL_TREE;
14508
14509 case FMA_EXPR:
14510 /* For integers we can decompose the FMA if possible. */
14511 if (TREE_CODE (arg0) == INTEGER_CST
14512 && TREE_CODE (arg1) == INTEGER_CST)
14513 return fold_build2_loc (loc, PLUS_EXPR, type,
14514 const_binop (MULT_EXPR, arg0, arg1), arg2);
14515 if (integer_zerop (arg2))
14516 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14517
14518 return fold_fma (loc, type, arg0, arg1, arg2);
14519
14520 case VEC_PERM_EXPR:
14521 if (TREE_CODE (arg2) == VECTOR_CST)
14522 {
14523 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14524 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14525 tree t;
14526 bool need_mask_canon = false;
14527 bool all_in_vec0 = true;
14528 bool all_in_vec1 = true;
14529 bool maybe_identity = true;
14530 bool single_arg = (op0 == op1);
14531 bool changed = false;
14532
14533 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14534 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14535 for (i = 0; i < nelts; i++)
14536 {
14537 tree val = VECTOR_CST_ELT (arg2, i);
14538 if (TREE_CODE (val) != INTEGER_CST)
14539 return NULL_TREE;
14540
14541 sel[i] = TREE_INT_CST_LOW (val) & mask;
14542 if (TREE_INT_CST_HIGH (val)
14543 || ((unsigned HOST_WIDE_INT)
14544 TREE_INT_CST_LOW (val) != sel[i]))
14545 need_mask_canon = true;
14546
14547 if (sel[i] < nelts)
14548 all_in_vec1 = false;
14549 else
14550 all_in_vec0 = false;
14551
14552 if ((sel[i] & (nelts-1)) != i)
14553 maybe_identity = false;
14554 }
14555
14556 if (maybe_identity)
14557 {
14558 if (all_in_vec0)
14559 return op0;
14560 if (all_in_vec1)
14561 return op1;
14562 }
14563
14564 if (all_in_vec0)
14565 op1 = op0;
14566 else if (all_in_vec1)
14567 {
14568 op0 = op1;
14569 for (i = 0; i < nelts; i++)
14570 sel[i] -= nelts;
14571 need_mask_canon = true;
14572 }
14573
14574 if ((TREE_CODE (op0) == VECTOR_CST
14575 || TREE_CODE (op0) == CONSTRUCTOR)
14576 && (TREE_CODE (op1) == VECTOR_CST
14577 || TREE_CODE (op1) == CONSTRUCTOR))
14578 {
14579 t = fold_vec_perm (type, op0, op1, sel);
14580 if (t != NULL_TREE)
14581 return t;
14582 }
14583
14584 if (op0 == op1 && !single_arg)
14585 changed = true;
14586
14587 if (need_mask_canon && arg2 == op2)
14588 {
14589 tree *tsel = XALLOCAVEC (tree, nelts);
14590 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14591 for (i = 0; i < nelts; i++)
14592 tsel[i] = build_int_cst (eltype, sel[i]);
14593 op2 = build_vector (TREE_TYPE (arg2), tsel);
14594 changed = true;
14595 }
14596
14597 if (changed)
14598 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14599 }
14600 return NULL_TREE;
14601
14602 default:
14603 return NULL_TREE;
14604 } /* switch (code) */
14605 }
14606
14607 /* Perform constant folding and related simplification of EXPR.
14608 The related simplifications include x*1 => x, x*0 => 0, etc.,
14609 and application of the associative law.
14610 NOP_EXPR conversions may be removed freely (as long as we
14611 are careful not to change the type of the overall expression).
14612 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14613 but we can constant-fold them if they have constant operands. */
14614
14615 #ifdef ENABLE_FOLD_CHECKING
14616 # define fold(x) fold_1 (x)
14617 static tree fold_1 (tree);
14618 static
14619 #endif
14620 tree
14621 fold (tree expr)
14622 {
14623 const tree t = expr;
14624 enum tree_code code = TREE_CODE (t);
14625 enum tree_code_class kind = TREE_CODE_CLASS (code);
14626 tree tem;
14627 location_t loc = EXPR_LOCATION (expr);
14628
14629 /* Return right away if a constant. */
14630 if (kind == tcc_constant)
14631 return t;
14632
14633 /* CALL_EXPR-like objects with variable numbers of operands are
14634 treated specially. */
14635 if (kind == tcc_vl_exp)
14636 {
14637 if (code == CALL_EXPR)
14638 {
14639 tem = fold_call_expr (loc, expr, false);
14640 return tem ? tem : expr;
14641 }
14642 return expr;
14643 }
14644
14645 if (IS_EXPR_CODE_CLASS (kind))
14646 {
14647 tree type = TREE_TYPE (t);
14648 tree op0, op1, op2;
14649
14650 switch (TREE_CODE_LENGTH (code))
14651 {
14652 case 1:
14653 op0 = TREE_OPERAND (t, 0);
14654 tem = fold_unary_loc (loc, code, type, op0);
14655 return tem ? tem : expr;
14656 case 2:
14657 op0 = TREE_OPERAND (t, 0);
14658 op1 = TREE_OPERAND (t, 1);
14659 tem = fold_binary_loc (loc, code, type, op0, op1);
14660 return tem ? tem : expr;
14661 case 3:
14662 op0 = TREE_OPERAND (t, 0);
14663 op1 = TREE_OPERAND (t, 1);
14664 op2 = TREE_OPERAND (t, 2);
14665 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14666 return tem ? tem : expr;
14667 default:
14668 break;
14669 }
14670 }
14671
14672 switch (code)
14673 {
14674 case ARRAY_REF:
14675 {
14676 tree op0 = TREE_OPERAND (t, 0);
14677 tree op1 = TREE_OPERAND (t, 1);
14678
14679 if (TREE_CODE (op1) == INTEGER_CST
14680 && TREE_CODE (op0) == CONSTRUCTOR
14681 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14682 {
14683 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14684 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14685 unsigned HOST_WIDE_INT begin = 0;
14686
14687 /* Find a matching index by means of a binary search. */
14688 while (begin != end)
14689 {
14690 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14691 tree index = (*elts)[middle].index;
14692
14693 if (TREE_CODE (index) == INTEGER_CST
14694 && tree_int_cst_lt (index, op1))
14695 begin = middle + 1;
14696 else if (TREE_CODE (index) == INTEGER_CST
14697 && tree_int_cst_lt (op1, index))
14698 end = middle;
14699 else if (TREE_CODE (index) == RANGE_EXPR
14700 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14701 begin = middle + 1;
14702 else if (TREE_CODE (index) == RANGE_EXPR
14703 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14704 end = middle;
14705 else
14706 return (*elts)[middle].value;
14707 }
14708 }
14709
14710 return t;
14711 }
14712
14713 /* Return a VECTOR_CST if possible. */
14714 case CONSTRUCTOR:
14715 {
14716 tree type = TREE_TYPE (t);
14717 if (TREE_CODE (type) != VECTOR_TYPE)
14718 return t;
14719
14720 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14721 unsigned HOST_WIDE_INT idx, pos = 0;
14722 tree value;
14723
14724 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14725 {
14726 if (!CONSTANT_CLASS_P (value))
14727 return t;
14728 if (TREE_CODE (value) == VECTOR_CST)
14729 {
14730 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14731 vec[pos++] = VECTOR_CST_ELT (value, i);
14732 }
14733 else
14734 vec[pos++] = value;
14735 }
14736 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14737 vec[pos] = build_zero_cst (TREE_TYPE (type));
14738
14739 return build_vector (type, vec);
14740 }
14741
14742 case CONST_DECL:
14743 return fold (DECL_INITIAL (t));
14744
14745 default:
14746 return t;
14747 } /* switch (code) */
14748 }
14749
14750 #ifdef ENABLE_FOLD_CHECKING
14751 #undef fold
14752
14753 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14754 hash_table <pointer_hash <tree_node> >);
14755 static void fold_check_failed (const_tree, const_tree);
14756 void print_fold_checksum (const_tree);
14757
14758 /* When --enable-checking=fold, compute a digest of expr before
14759 and after actual fold call to see if fold did not accidentally
14760 change original expr. */
14761
14762 tree
14763 fold (tree expr)
14764 {
14765 tree ret;
14766 struct md5_ctx ctx;
14767 unsigned char checksum_before[16], checksum_after[16];
14768 hash_table <pointer_hash <tree_node> > ht;
14769
14770 ht.create (32);
14771 md5_init_ctx (&ctx);
14772 fold_checksum_tree (expr, &ctx, ht);
14773 md5_finish_ctx (&ctx, checksum_before);
14774 ht.empty ();
14775
14776 ret = fold_1 (expr);
14777
14778 md5_init_ctx (&ctx);
14779 fold_checksum_tree (expr, &ctx, ht);
14780 md5_finish_ctx (&ctx, checksum_after);
14781 ht.dispose ();
14782
14783 if (memcmp (checksum_before, checksum_after, 16))
14784 fold_check_failed (expr, ret);
14785
14786 return ret;
14787 }
14788
14789 void
14790 print_fold_checksum (const_tree expr)
14791 {
14792 struct md5_ctx ctx;
14793 unsigned char checksum[16], cnt;
14794 hash_table <pointer_hash <tree_node> > ht;
14795
14796 ht.create (32);
14797 md5_init_ctx (&ctx);
14798 fold_checksum_tree (expr, &ctx, ht);
14799 md5_finish_ctx (&ctx, checksum);
14800 ht.dispose ();
14801 for (cnt = 0; cnt < 16; ++cnt)
14802 fprintf (stderr, "%02x", checksum[cnt]);
14803 putc ('\n', stderr);
14804 }
14805
14806 static void
14807 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14808 {
14809 internal_error ("fold check: original tree changed by fold");
14810 }
14811
14812 static void
14813 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14814 hash_table <pointer_hash <tree_node> > ht)
14815 {
14816 tree_node **slot;
14817 enum tree_code code;
14818 union tree_node buf;
14819 int i, len;
14820
14821 recursive_label:
14822 if (expr == NULL)
14823 return;
14824 slot = ht.find_slot (expr, INSERT);
14825 if (*slot != NULL)
14826 return;
14827 *slot = CONST_CAST_TREE (expr);
14828 code = TREE_CODE (expr);
14829 if (TREE_CODE_CLASS (code) == tcc_declaration
14830 && DECL_ASSEMBLER_NAME_SET_P (expr))
14831 {
14832 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14833 memcpy ((char *) &buf, expr, tree_size (expr));
14834 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14835 expr = (tree) &buf;
14836 }
14837 else if (TREE_CODE_CLASS (code) == tcc_type
14838 && (TYPE_POINTER_TO (expr)
14839 || TYPE_REFERENCE_TO (expr)
14840 || TYPE_CACHED_VALUES_P (expr)
14841 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14842 || TYPE_NEXT_VARIANT (expr)))
14843 {
14844 /* Allow these fields to be modified. */
14845 tree tmp;
14846 memcpy ((char *) &buf, expr, tree_size (expr));
14847 expr = tmp = (tree) &buf;
14848 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14849 TYPE_POINTER_TO (tmp) = NULL;
14850 TYPE_REFERENCE_TO (tmp) = NULL;
14851 TYPE_NEXT_VARIANT (tmp) = NULL;
14852 if (TYPE_CACHED_VALUES_P (tmp))
14853 {
14854 TYPE_CACHED_VALUES_P (tmp) = 0;
14855 TYPE_CACHED_VALUES (tmp) = NULL;
14856 }
14857 }
14858 md5_process_bytes (expr, tree_size (expr), ctx);
14859 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14860 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14861 if (TREE_CODE_CLASS (code) != tcc_type
14862 && TREE_CODE_CLASS (code) != tcc_declaration
14863 && code != TREE_LIST
14864 && code != SSA_NAME
14865 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14866 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14867 switch (TREE_CODE_CLASS (code))
14868 {
14869 case tcc_constant:
14870 switch (code)
14871 {
14872 case STRING_CST:
14873 md5_process_bytes (TREE_STRING_POINTER (expr),
14874 TREE_STRING_LENGTH (expr), ctx);
14875 break;
14876 case COMPLEX_CST:
14877 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14878 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14879 break;
14880 case VECTOR_CST:
14881 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14882 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14883 break;
14884 default:
14885 break;
14886 }
14887 break;
14888 case tcc_exceptional:
14889 switch (code)
14890 {
14891 case TREE_LIST:
14892 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14893 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14894 expr = TREE_CHAIN (expr);
14895 goto recursive_label;
14896 break;
14897 case TREE_VEC:
14898 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14899 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14900 break;
14901 default:
14902 break;
14903 }
14904 break;
14905 case tcc_expression:
14906 case tcc_reference:
14907 case tcc_comparison:
14908 case tcc_unary:
14909 case tcc_binary:
14910 case tcc_statement:
14911 case tcc_vl_exp:
14912 len = TREE_OPERAND_LENGTH (expr);
14913 for (i = 0; i < len; ++i)
14914 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14915 break;
14916 case tcc_declaration:
14917 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14918 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14919 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14920 {
14921 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14922 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14923 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14924 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14925 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14926 }
14927 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14928 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14929
14930 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14931 {
14932 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14933 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14934 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14935 }
14936 break;
14937 case tcc_type:
14938 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14939 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14940 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14941 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14942 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14943 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14944 if (INTEGRAL_TYPE_P (expr)
14945 || SCALAR_FLOAT_TYPE_P (expr))
14946 {
14947 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14948 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14949 }
14950 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14951 if (TREE_CODE (expr) == RECORD_TYPE
14952 || TREE_CODE (expr) == UNION_TYPE
14953 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14954 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14955 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14956 break;
14957 default:
14958 break;
14959 }
14960 }
14961
14962 /* Helper function for outputting the checksum of a tree T. When
14963 debugging with gdb, you can "define mynext" to be "next" followed
14964 by "call debug_fold_checksum (op0)", then just trace down till the
14965 outputs differ. */
14966
14967 DEBUG_FUNCTION void
14968 debug_fold_checksum (const_tree t)
14969 {
14970 int i;
14971 unsigned char checksum[16];
14972 struct md5_ctx ctx;
14973 hash_table <pointer_hash <tree_node> > ht;
14974 ht.create (32);
14975
14976 md5_init_ctx (&ctx);
14977 fold_checksum_tree (t, &ctx, ht);
14978 md5_finish_ctx (&ctx, checksum);
14979 ht.empty ();
14980
14981 for (i = 0; i < 16; i++)
14982 fprintf (stderr, "%d ", checksum[i]);
14983
14984 fprintf (stderr, "\n");
14985 }
14986
14987 #endif
14988
14989 /* Fold a unary tree expression with code CODE of type TYPE with an
14990 operand OP0. LOC is the location of the resulting expression.
14991 Return a folded expression if successful. Otherwise, return a tree
14992 expression with code CODE of type TYPE with an operand OP0. */
14993
14994 tree
14995 fold_build1_stat_loc (location_t loc,
14996 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14997 {
14998 tree tem;
14999 #ifdef ENABLE_FOLD_CHECKING
15000 unsigned char checksum_before[16], checksum_after[16];
15001 struct md5_ctx ctx;
15002 hash_table <pointer_hash <tree_node> > ht;
15003
15004 ht.create (32);
15005 md5_init_ctx (&ctx);
15006 fold_checksum_tree (op0, &ctx, ht);
15007 md5_finish_ctx (&ctx, checksum_before);
15008 ht.empty ();
15009 #endif
15010
15011 tem = fold_unary_loc (loc, code, type, op0);
15012 if (!tem)
15013 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15014
15015 #ifdef ENABLE_FOLD_CHECKING
15016 md5_init_ctx (&ctx);
15017 fold_checksum_tree (op0, &ctx, ht);
15018 md5_finish_ctx (&ctx, checksum_after);
15019 ht.dispose ();
15020
15021 if (memcmp (checksum_before, checksum_after, 16))
15022 fold_check_failed (op0, tem);
15023 #endif
15024 return tem;
15025 }
15026
15027 /* Fold a binary tree expression with code CODE of type TYPE with
15028 operands OP0 and OP1. LOC is the location of the resulting
15029 expression. Return a folded expression if successful. Otherwise,
15030 return a tree expression with code CODE of type TYPE with operands
15031 OP0 and OP1. */
15032
15033 tree
15034 fold_build2_stat_loc (location_t loc,
15035 enum tree_code code, tree type, tree op0, tree op1
15036 MEM_STAT_DECL)
15037 {
15038 tree tem;
15039 #ifdef ENABLE_FOLD_CHECKING
15040 unsigned char checksum_before_op0[16],
15041 checksum_before_op1[16],
15042 checksum_after_op0[16],
15043 checksum_after_op1[16];
15044 struct md5_ctx ctx;
15045 hash_table <pointer_hash <tree_node> > ht;
15046
15047 ht.create (32);
15048 md5_init_ctx (&ctx);
15049 fold_checksum_tree (op0, &ctx, ht);
15050 md5_finish_ctx (&ctx, checksum_before_op0);
15051 ht.empty ();
15052
15053 md5_init_ctx (&ctx);
15054 fold_checksum_tree (op1, &ctx, ht);
15055 md5_finish_ctx (&ctx, checksum_before_op1);
15056 ht.empty ();
15057 #endif
15058
15059 tem = fold_binary_loc (loc, code, type, op0, op1);
15060 if (!tem)
15061 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15062
15063 #ifdef ENABLE_FOLD_CHECKING
15064 md5_init_ctx (&ctx);
15065 fold_checksum_tree (op0, &ctx, ht);
15066 md5_finish_ctx (&ctx, checksum_after_op0);
15067 ht.empty ();
15068
15069 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15070 fold_check_failed (op0, tem);
15071
15072 md5_init_ctx (&ctx);
15073 fold_checksum_tree (op1, &ctx, ht);
15074 md5_finish_ctx (&ctx, checksum_after_op1);
15075 ht.dispose ();
15076
15077 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15078 fold_check_failed (op1, tem);
15079 #endif
15080 return tem;
15081 }
15082
15083 /* Fold a ternary tree expression with code CODE of type TYPE with
15084 operands OP0, OP1, and OP2. Return a folded expression if
15085 successful. Otherwise, return a tree expression with code CODE of
15086 type TYPE with operands OP0, OP1, and OP2. */
15087
15088 tree
15089 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15090 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15091 {
15092 tree tem;
15093 #ifdef ENABLE_FOLD_CHECKING
15094 unsigned char checksum_before_op0[16],
15095 checksum_before_op1[16],
15096 checksum_before_op2[16],
15097 checksum_after_op0[16],
15098 checksum_after_op1[16],
15099 checksum_after_op2[16];
15100 struct md5_ctx ctx;
15101 hash_table <pointer_hash <tree_node> > ht;
15102
15103 ht.create (32);
15104 md5_init_ctx (&ctx);
15105 fold_checksum_tree (op0, &ctx, ht);
15106 md5_finish_ctx (&ctx, checksum_before_op0);
15107 ht.empty ();
15108
15109 md5_init_ctx (&ctx);
15110 fold_checksum_tree (op1, &ctx, ht);
15111 md5_finish_ctx (&ctx, checksum_before_op1);
15112 ht.empty ();
15113
15114 md5_init_ctx (&ctx);
15115 fold_checksum_tree (op2, &ctx, ht);
15116 md5_finish_ctx (&ctx, checksum_before_op2);
15117 ht.empty ();
15118 #endif
15119
15120 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15121 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15122 if (!tem)
15123 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15124
15125 #ifdef ENABLE_FOLD_CHECKING
15126 md5_init_ctx (&ctx);
15127 fold_checksum_tree (op0, &ctx, ht);
15128 md5_finish_ctx (&ctx, checksum_after_op0);
15129 ht.empty ();
15130
15131 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15132 fold_check_failed (op0, tem);
15133
15134 md5_init_ctx (&ctx);
15135 fold_checksum_tree (op1, &ctx, ht);
15136 md5_finish_ctx (&ctx, checksum_after_op1);
15137 ht.empty ();
15138
15139 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15140 fold_check_failed (op1, tem);
15141
15142 md5_init_ctx (&ctx);
15143 fold_checksum_tree (op2, &ctx, ht);
15144 md5_finish_ctx (&ctx, checksum_after_op2);
15145 ht.dispose ();
15146
15147 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15148 fold_check_failed (op2, tem);
15149 #endif
15150 return tem;
15151 }
15152
15153 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15154 arguments in ARGARRAY, and a null static chain.
15155 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15156 of type TYPE from the given operands as constructed by build_call_array. */
15157
15158 tree
15159 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15160 int nargs, tree *argarray)
15161 {
15162 tree tem;
15163 #ifdef ENABLE_FOLD_CHECKING
15164 unsigned char checksum_before_fn[16],
15165 checksum_before_arglist[16],
15166 checksum_after_fn[16],
15167 checksum_after_arglist[16];
15168 struct md5_ctx ctx;
15169 hash_table <pointer_hash <tree_node> > ht;
15170 int i;
15171
15172 ht.create (32);
15173 md5_init_ctx (&ctx);
15174 fold_checksum_tree (fn, &ctx, ht);
15175 md5_finish_ctx (&ctx, checksum_before_fn);
15176 ht.empty ();
15177
15178 md5_init_ctx (&ctx);
15179 for (i = 0; i < nargs; i++)
15180 fold_checksum_tree (argarray[i], &ctx, ht);
15181 md5_finish_ctx (&ctx, checksum_before_arglist);
15182 ht.empty ();
15183 #endif
15184
15185 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15186
15187 #ifdef ENABLE_FOLD_CHECKING
15188 md5_init_ctx (&ctx);
15189 fold_checksum_tree (fn, &ctx, ht);
15190 md5_finish_ctx (&ctx, checksum_after_fn);
15191 ht.empty ();
15192
15193 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15194 fold_check_failed (fn, tem);
15195
15196 md5_init_ctx (&ctx);
15197 for (i = 0; i < nargs; i++)
15198 fold_checksum_tree (argarray[i], &ctx, ht);
15199 md5_finish_ctx (&ctx, checksum_after_arglist);
15200 ht.dispose ();
15201
15202 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15203 fold_check_failed (NULL_TREE, tem);
15204 #endif
15205 return tem;
15206 }
15207
15208 /* Perform constant folding and related simplification of initializer
15209 expression EXPR. These behave identically to "fold_buildN" but ignore
15210 potential run-time traps and exceptions that fold must preserve. */
15211
15212 #define START_FOLD_INIT \
15213 int saved_signaling_nans = flag_signaling_nans;\
15214 int saved_trapping_math = flag_trapping_math;\
15215 int saved_rounding_math = flag_rounding_math;\
15216 int saved_trapv = flag_trapv;\
15217 int saved_folding_initializer = folding_initializer;\
15218 flag_signaling_nans = 0;\
15219 flag_trapping_math = 0;\
15220 flag_rounding_math = 0;\
15221 flag_trapv = 0;\
15222 folding_initializer = 1;
15223
15224 #define END_FOLD_INIT \
15225 flag_signaling_nans = saved_signaling_nans;\
15226 flag_trapping_math = saved_trapping_math;\
15227 flag_rounding_math = saved_rounding_math;\
15228 flag_trapv = saved_trapv;\
15229 folding_initializer = saved_folding_initializer;
15230
15231 tree
15232 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15233 tree type, tree op)
15234 {
15235 tree result;
15236 START_FOLD_INIT;
15237
15238 result = fold_build1_loc (loc, code, type, op);
15239
15240 END_FOLD_INIT;
15241 return result;
15242 }
15243
15244 tree
15245 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15246 tree type, tree op0, tree op1)
15247 {
15248 tree result;
15249 START_FOLD_INIT;
15250
15251 result = fold_build2_loc (loc, code, type, op0, op1);
15252
15253 END_FOLD_INIT;
15254 return result;
15255 }
15256
15257 tree
15258 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15259 tree type, tree op0, tree op1, tree op2)
15260 {
15261 tree result;
15262 START_FOLD_INIT;
15263
15264 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15265
15266 END_FOLD_INIT;
15267 return result;
15268 }
15269
15270 tree
15271 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15272 int nargs, tree *argarray)
15273 {
15274 tree result;
15275 START_FOLD_INIT;
15276
15277 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15278
15279 END_FOLD_INIT;
15280 return result;
15281 }
15282
15283 #undef START_FOLD_INIT
15284 #undef END_FOLD_INIT
15285
15286 /* Determine if first argument is a multiple of second argument. Return 0 if
15287 it is not, or we cannot easily determined it to be.
15288
15289 An example of the sort of thing we care about (at this point; this routine
15290 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15291 fold cases do now) is discovering that
15292
15293 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15294
15295 is a multiple of
15296
15297 SAVE_EXPR (J * 8)
15298
15299 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15300
15301 This code also handles discovering that
15302
15303 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15304
15305 is a multiple of 8 so we don't have to worry about dealing with a
15306 possible remainder.
15307
15308 Note that we *look* inside a SAVE_EXPR only to determine how it was
15309 calculated; it is not safe for fold to do much of anything else with the
15310 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15311 at run time. For example, the latter example above *cannot* be implemented
15312 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15313 evaluation time of the original SAVE_EXPR is not necessarily the same at
15314 the time the new expression is evaluated. The only optimization of this
15315 sort that would be valid is changing
15316
15317 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15318
15319 divided by 8 to
15320
15321 SAVE_EXPR (I) * SAVE_EXPR (J)
15322
15323 (where the same SAVE_EXPR (J) is used in the original and the
15324 transformed version). */
15325
15326 int
15327 multiple_of_p (tree type, const_tree top, const_tree bottom)
15328 {
15329 if (operand_equal_p (top, bottom, 0))
15330 return 1;
15331
15332 if (TREE_CODE (type) != INTEGER_TYPE)
15333 return 0;
15334
15335 switch (TREE_CODE (top))
15336 {
15337 case BIT_AND_EXPR:
15338 /* Bitwise and provides a power of two multiple. If the mask is
15339 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15340 if (!integer_pow2p (bottom))
15341 return 0;
15342 /* FALLTHRU */
15343
15344 case MULT_EXPR:
15345 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15346 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15347
15348 case PLUS_EXPR:
15349 case MINUS_EXPR:
15350 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15351 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15352
15353 case LSHIFT_EXPR:
15354 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15355 {
15356 tree op1, t1;
15357
15358 op1 = TREE_OPERAND (top, 1);
15359 /* const_binop may not detect overflow correctly,
15360 so check for it explicitly here. */
15361 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15362 > TREE_INT_CST_LOW (op1)
15363 && TREE_INT_CST_HIGH (op1) == 0
15364 && 0 != (t1 = fold_convert (type,
15365 const_binop (LSHIFT_EXPR,
15366 size_one_node,
15367 op1)))
15368 && !TREE_OVERFLOW (t1))
15369 return multiple_of_p (type, t1, bottom);
15370 }
15371 return 0;
15372
15373 case NOP_EXPR:
15374 /* Can't handle conversions from non-integral or wider integral type. */
15375 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15376 || (TYPE_PRECISION (type)
15377 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15378 return 0;
15379
15380 /* .. fall through ... */
15381
15382 case SAVE_EXPR:
15383 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15384
15385 case COND_EXPR:
15386 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15387 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15388
15389 case INTEGER_CST:
15390 if (TREE_CODE (bottom) != INTEGER_CST
15391 || integer_zerop (bottom)
15392 || (TYPE_UNSIGNED (type)
15393 && (tree_int_cst_sgn (top) < 0
15394 || tree_int_cst_sgn (bottom) < 0)))
15395 return 0;
15396 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15397 top, bottom));
15398
15399 default:
15400 return 0;
15401 }
15402 }
15403
15404 /* Return true if CODE or TYPE is known to be non-negative. */
15405
15406 static bool
15407 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15408 {
15409 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15410 && truth_value_p (code))
15411 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15412 have a signed:1 type (where the value is -1 and 0). */
15413 return true;
15414 return false;
15415 }
15416
15417 /* Return true if (CODE OP0) is known to be non-negative. If the return
15418 value is based on the assumption that signed overflow is undefined,
15419 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15420 *STRICT_OVERFLOW_P. */
15421
15422 bool
15423 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15424 bool *strict_overflow_p)
15425 {
15426 if (TYPE_UNSIGNED (type))
15427 return true;
15428
15429 switch (code)
15430 {
15431 case ABS_EXPR:
15432 /* We can't return 1 if flag_wrapv is set because
15433 ABS_EXPR<INT_MIN> = INT_MIN. */
15434 if (!INTEGRAL_TYPE_P (type))
15435 return true;
15436 if (TYPE_OVERFLOW_UNDEFINED (type))
15437 {
15438 *strict_overflow_p = true;
15439 return true;
15440 }
15441 break;
15442
15443 case NON_LVALUE_EXPR:
15444 case FLOAT_EXPR:
15445 case FIX_TRUNC_EXPR:
15446 return tree_expr_nonnegative_warnv_p (op0,
15447 strict_overflow_p);
15448
15449 case NOP_EXPR:
15450 {
15451 tree inner_type = TREE_TYPE (op0);
15452 tree outer_type = type;
15453
15454 if (TREE_CODE (outer_type) == REAL_TYPE)
15455 {
15456 if (TREE_CODE (inner_type) == REAL_TYPE)
15457 return tree_expr_nonnegative_warnv_p (op0,
15458 strict_overflow_p);
15459 if (INTEGRAL_TYPE_P (inner_type))
15460 {
15461 if (TYPE_UNSIGNED (inner_type))
15462 return true;
15463 return tree_expr_nonnegative_warnv_p (op0,
15464 strict_overflow_p);
15465 }
15466 }
15467 else if (INTEGRAL_TYPE_P (outer_type))
15468 {
15469 if (TREE_CODE (inner_type) == REAL_TYPE)
15470 return tree_expr_nonnegative_warnv_p (op0,
15471 strict_overflow_p);
15472 if (INTEGRAL_TYPE_P (inner_type))
15473 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15474 && TYPE_UNSIGNED (inner_type);
15475 }
15476 }
15477 break;
15478
15479 default:
15480 return tree_simple_nonnegative_warnv_p (code, type);
15481 }
15482
15483 /* We don't know sign of `t', so be conservative and return false. */
15484 return false;
15485 }
15486
15487 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15488 value is based on the assumption that signed overflow is undefined,
15489 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15490 *STRICT_OVERFLOW_P. */
15491
15492 bool
15493 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15494 tree op1, bool *strict_overflow_p)
15495 {
15496 if (TYPE_UNSIGNED (type))
15497 return true;
15498
15499 switch (code)
15500 {
15501 case POINTER_PLUS_EXPR:
15502 case PLUS_EXPR:
15503 if (FLOAT_TYPE_P (type))
15504 return (tree_expr_nonnegative_warnv_p (op0,
15505 strict_overflow_p)
15506 && tree_expr_nonnegative_warnv_p (op1,
15507 strict_overflow_p));
15508
15509 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15510 both unsigned and at least 2 bits shorter than the result. */
15511 if (TREE_CODE (type) == INTEGER_TYPE
15512 && TREE_CODE (op0) == NOP_EXPR
15513 && TREE_CODE (op1) == NOP_EXPR)
15514 {
15515 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15516 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15517 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15518 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15519 {
15520 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15521 TYPE_PRECISION (inner2)) + 1;
15522 return prec < TYPE_PRECISION (type);
15523 }
15524 }
15525 break;
15526
15527 case MULT_EXPR:
15528 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15529 {
15530 /* x * x is always non-negative for floating point x
15531 or without overflow. */
15532 if (operand_equal_p (op0, op1, 0)
15533 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15534 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15535 {
15536 if (TYPE_OVERFLOW_UNDEFINED (type))
15537 *strict_overflow_p = true;
15538 return true;
15539 }
15540 }
15541
15542 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15543 both unsigned and their total bits is shorter than the result. */
15544 if (TREE_CODE (type) == INTEGER_TYPE
15545 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15546 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15547 {
15548 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15549 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15550 : TREE_TYPE (op0);
15551 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15552 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15553 : TREE_TYPE (op1);
15554
15555 bool unsigned0 = TYPE_UNSIGNED (inner0);
15556 bool unsigned1 = TYPE_UNSIGNED (inner1);
15557
15558 if (TREE_CODE (op0) == INTEGER_CST)
15559 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15560
15561 if (TREE_CODE (op1) == INTEGER_CST)
15562 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15563
15564 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15565 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15566 {
15567 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15568 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15569 : TYPE_PRECISION (inner0);
15570
15571 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15572 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15573 : TYPE_PRECISION (inner1);
15574
15575 return precision0 + precision1 < TYPE_PRECISION (type);
15576 }
15577 }
15578 return false;
15579
15580 case BIT_AND_EXPR:
15581 case MAX_EXPR:
15582 return (tree_expr_nonnegative_warnv_p (op0,
15583 strict_overflow_p)
15584 || tree_expr_nonnegative_warnv_p (op1,
15585 strict_overflow_p));
15586
15587 case BIT_IOR_EXPR:
15588 case BIT_XOR_EXPR:
15589 case MIN_EXPR:
15590 case RDIV_EXPR:
15591 case TRUNC_DIV_EXPR:
15592 case CEIL_DIV_EXPR:
15593 case FLOOR_DIV_EXPR:
15594 case ROUND_DIV_EXPR:
15595 return (tree_expr_nonnegative_warnv_p (op0,
15596 strict_overflow_p)
15597 && tree_expr_nonnegative_warnv_p (op1,
15598 strict_overflow_p));
15599
15600 case TRUNC_MOD_EXPR:
15601 case CEIL_MOD_EXPR:
15602 case FLOOR_MOD_EXPR:
15603 case ROUND_MOD_EXPR:
15604 return tree_expr_nonnegative_warnv_p (op0,
15605 strict_overflow_p);
15606 default:
15607 return tree_simple_nonnegative_warnv_p (code, type);
15608 }
15609
15610 /* We don't know sign of `t', so be conservative and return false. */
15611 return false;
15612 }
15613
15614 /* Return true if T is known to be non-negative. If the return
15615 value is based on the assumption that signed overflow is undefined,
15616 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15617 *STRICT_OVERFLOW_P. */
15618
15619 bool
15620 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15621 {
15622 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15623 return true;
15624
15625 switch (TREE_CODE (t))
15626 {
15627 case INTEGER_CST:
15628 return tree_int_cst_sgn (t) >= 0;
15629
15630 case REAL_CST:
15631 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15632
15633 case FIXED_CST:
15634 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15635
15636 case COND_EXPR:
15637 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15638 strict_overflow_p)
15639 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15640 strict_overflow_p));
15641 default:
15642 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15643 TREE_TYPE (t));
15644 }
15645 /* We don't know sign of `t', so be conservative and return false. */
15646 return false;
15647 }
15648
15649 /* Return true if T is known to be non-negative. If the return
15650 value is based on the assumption that signed overflow is undefined,
15651 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15652 *STRICT_OVERFLOW_P. */
15653
15654 bool
15655 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15656 tree arg0, tree arg1, bool *strict_overflow_p)
15657 {
15658 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15659 switch (DECL_FUNCTION_CODE (fndecl))
15660 {
15661 CASE_FLT_FN (BUILT_IN_ACOS):
15662 CASE_FLT_FN (BUILT_IN_ACOSH):
15663 CASE_FLT_FN (BUILT_IN_CABS):
15664 CASE_FLT_FN (BUILT_IN_COSH):
15665 CASE_FLT_FN (BUILT_IN_ERFC):
15666 CASE_FLT_FN (BUILT_IN_EXP):
15667 CASE_FLT_FN (BUILT_IN_EXP10):
15668 CASE_FLT_FN (BUILT_IN_EXP2):
15669 CASE_FLT_FN (BUILT_IN_FABS):
15670 CASE_FLT_FN (BUILT_IN_FDIM):
15671 CASE_FLT_FN (BUILT_IN_HYPOT):
15672 CASE_FLT_FN (BUILT_IN_POW10):
15673 CASE_INT_FN (BUILT_IN_FFS):
15674 CASE_INT_FN (BUILT_IN_PARITY):
15675 CASE_INT_FN (BUILT_IN_POPCOUNT):
15676 CASE_INT_FN (BUILT_IN_CLZ):
15677 CASE_INT_FN (BUILT_IN_CLRSB):
15678 case BUILT_IN_BSWAP32:
15679 case BUILT_IN_BSWAP64:
15680 /* Always true. */
15681 return true;
15682
15683 CASE_FLT_FN (BUILT_IN_SQRT):
15684 /* sqrt(-0.0) is -0.0. */
15685 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15686 return true;
15687 return tree_expr_nonnegative_warnv_p (arg0,
15688 strict_overflow_p);
15689
15690 CASE_FLT_FN (BUILT_IN_ASINH):
15691 CASE_FLT_FN (BUILT_IN_ATAN):
15692 CASE_FLT_FN (BUILT_IN_ATANH):
15693 CASE_FLT_FN (BUILT_IN_CBRT):
15694 CASE_FLT_FN (BUILT_IN_CEIL):
15695 CASE_FLT_FN (BUILT_IN_ERF):
15696 CASE_FLT_FN (BUILT_IN_EXPM1):
15697 CASE_FLT_FN (BUILT_IN_FLOOR):
15698 CASE_FLT_FN (BUILT_IN_FMOD):
15699 CASE_FLT_FN (BUILT_IN_FREXP):
15700 CASE_FLT_FN (BUILT_IN_ICEIL):
15701 CASE_FLT_FN (BUILT_IN_IFLOOR):
15702 CASE_FLT_FN (BUILT_IN_IRINT):
15703 CASE_FLT_FN (BUILT_IN_IROUND):
15704 CASE_FLT_FN (BUILT_IN_LCEIL):
15705 CASE_FLT_FN (BUILT_IN_LDEXP):
15706 CASE_FLT_FN (BUILT_IN_LFLOOR):
15707 CASE_FLT_FN (BUILT_IN_LLCEIL):
15708 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15709 CASE_FLT_FN (BUILT_IN_LLRINT):
15710 CASE_FLT_FN (BUILT_IN_LLROUND):
15711 CASE_FLT_FN (BUILT_IN_LRINT):
15712 CASE_FLT_FN (BUILT_IN_LROUND):
15713 CASE_FLT_FN (BUILT_IN_MODF):
15714 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15715 CASE_FLT_FN (BUILT_IN_RINT):
15716 CASE_FLT_FN (BUILT_IN_ROUND):
15717 CASE_FLT_FN (BUILT_IN_SCALB):
15718 CASE_FLT_FN (BUILT_IN_SCALBLN):
15719 CASE_FLT_FN (BUILT_IN_SCALBN):
15720 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15721 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15722 CASE_FLT_FN (BUILT_IN_SINH):
15723 CASE_FLT_FN (BUILT_IN_TANH):
15724 CASE_FLT_FN (BUILT_IN_TRUNC):
15725 /* True if the 1st argument is nonnegative. */
15726 return tree_expr_nonnegative_warnv_p (arg0,
15727 strict_overflow_p);
15728
15729 CASE_FLT_FN (BUILT_IN_FMAX):
15730 /* True if the 1st OR 2nd arguments are nonnegative. */
15731 return (tree_expr_nonnegative_warnv_p (arg0,
15732 strict_overflow_p)
15733 || (tree_expr_nonnegative_warnv_p (arg1,
15734 strict_overflow_p)));
15735
15736 CASE_FLT_FN (BUILT_IN_FMIN):
15737 /* True if the 1st AND 2nd arguments are nonnegative. */
15738 return (tree_expr_nonnegative_warnv_p (arg0,
15739 strict_overflow_p)
15740 && (tree_expr_nonnegative_warnv_p (arg1,
15741 strict_overflow_p)));
15742
15743 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15744 /* True if the 2nd argument is nonnegative. */
15745 return tree_expr_nonnegative_warnv_p (arg1,
15746 strict_overflow_p);
15747
15748 CASE_FLT_FN (BUILT_IN_POWI):
15749 /* True if the 1st argument is nonnegative or the second
15750 argument is an even integer. */
15751 if (TREE_CODE (arg1) == INTEGER_CST
15752 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15753 return true;
15754 return tree_expr_nonnegative_warnv_p (arg0,
15755 strict_overflow_p);
15756
15757 CASE_FLT_FN (BUILT_IN_POW):
15758 /* True if the 1st argument is nonnegative or the second
15759 argument is an even integer valued real. */
15760 if (TREE_CODE (arg1) == REAL_CST)
15761 {
15762 REAL_VALUE_TYPE c;
15763 HOST_WIDE_INT n;
15764
15765 c = TREE_REAL_CST (arg1);
15766 n = real_to_integer (&c);
15767 if ((n & 1) == 0)
15768 {
15769 REAL_VALUE_TYPE cint;
15770 real_from_integer (&cint, VOIDmode, n,
15771 n < 0 ? -1 : 0, 0);
15772 if (real_identical (&c, &cint))
15773 return true;
15774 }
15775 }
15776 return tree_expr_nonnegative_warnv_p (arg0,
15777 strict_overflow_p);
15778
15779 default:
15780 break;
15781 }
15782 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15783 type);
15784 }
15785
15786 /* Return true if T is known to be non-negative. If the return
15787 value is based on the assumption that signed overflow is undefined,
15788 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15789 *STRICT_OVERFLOW_P. */
15790
15791 bool
15792 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15793 {
15794 enum tree_code code = TREE_CODE (t);
15795 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15796 return true;
15797
15798 switch (code)
15799 {
15800 case TARGET_EXPR:
15801 {
15802 tree temp = TARGET_EXPR_SLOT (t);
15803 t = TARGET_EXPR_INITIAL (t);
15804
15805 /* If the initializer is non-void, then it's a normal expression
15806 that will be assigned to the slot. */
15807 if (!VOID_TYPE_P (t))
15808 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15809
15810 /* Otherwise, the initializer sets the slot in some way. One common
15811 way is an assignment statement at the end of the initializer. */
15812 while (1)
15813 {
15814 if (TREE_CODE (t) == BIND_EXPR)
15815 t = expr_last (BIND_EXPR_BODY (t));
15816 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15817 || TREE_CODE (t) == TRY_CATCH_EXPR)
15818 t = expr_last (TREE_OPERAND (t, 0));
15819 else if (TREE_CODE (t) == STATEMENT_LIST)
15820 t = expr_last (t);
15821 else
15822 break;
15823 }
15824 if (TREE_CODE (t) == MODIFY_EXPR
15825 && TREE_OPERAND (t, 0) == temp)
15826 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15827 strict_overflow_p);
15828
15829 return false;
15830 }
15831
15832 case CALL_EXPR:
15833 {
15834 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15835 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15836
15837 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15838 get_callee_fndecl (t),
15839 arg0,
15840 arg1,
15841 strict_overflow_p);
15842 }
15843 case COMPOUND_EXPR:
15844 case MODIFY_EXPR:
15845 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15846 strict_overflow_p);
15847 case BIND_EXPR:
15848 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15849 strict_overflow_p);
15850 case SAVE_EXPR:
15851 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15852 strict_overflow_p);
15853
15854 default:
15855 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15856 TREE_TYPE (t));
15857 }
15858
15859 /* We don't know sign of `t', so be conservative and return false. */
15860 return false;
15861 }
15862
15863 /* Return true if T is known to be non-negative. If the return
15864 value is based on the assumption that signed overflow is undefined,
15865 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15866 *STRICT_OVERFLOW_P. */
15867
15868 bool
15869 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15870 {
15871 enum tree_code code;
15872 if (t == error_mark_node)
15873 return false;
15874
15875 code = TREE_CODE (t);
15876 switch (TREE_CODE_CLASS (code))
15877 {
15878 case tcc_binary:
15879 case tcc_comparison:
15880 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15881 TREE_TYPE (t),
15882 TREE_OPERAND (t, 0),
15883 TREE_OPERAND (t, 1),
15884 strict_overflow_p);
15885
15886 case tcc_unary:
15887 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15888 TREE_TYPE (t),
15889 TREE_OPERAND (t, 0),
15890 strict_overflow_p);
15891
15892 case tcc_constant:
15893 case tcc_declaration:
15894 case tcc_reference:
15895 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15896
15897 default:
15898 break;
15899 }
15900
15901 switch (code)
15902 {
15903 case TRUTH_AND_EXPR:
15904 case TRUTH_OR_EXPR:
15905 case TRUTH_XOR_EXPR:
15906 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15907 TREE_TYPE (t),
15908 TREE_OPERAND (t, 0),
15909 TREE_OPERAND (t, 1),
15910 strict_overflow_p);
15911 case TRUTH_NOT_EXPR:
15912 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15913 TREE_TYPE (t),
15914 TREE_OPERAND (t, 0),
15915 strict_overflow_p);
15916
15917 case COND_EXPR:
15918 case CONSTRUCTOR:
15919 case OBJ_TYPE_REF:
15920 case ASSERT_EXPR:
15921 case ADDR_EXPR:
15922 case WITH_SIZE_EXPR:
15923 case SSA_NAME:
15924 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15925
15926 default:
15927 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15928 }
15929 }
15930
15931 /* Return true if `t' is known to be non-negative. Handle warnings
15932 about undefined signed overflow. */
15933
15934 bool
15935 tree_expr_nonnegative_p (tree t)
15936 {
15937 bool ret, strict_overflow_p;
15938
15939 strict_overflow_p = false;
15940 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15941 if (strict_overflow_p)
15942 fold_overflow_warning (("assuming signed overflow does not occur when "
15943 "determining that expression is always "
15944 "non-negative"),
15945 WARN_STRICT_OVERFLOW_MISC);
15946 return ret;
15947 }
15948
15949
15950 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15951 For floating point we further ensure that T is not denormal.
15952 Similar logic is present in nonzero_address in rtlanal.h.
15953
15954 If the return value is based on the assumption that signed overflow
15955 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15956 change *STRICT_OVERFLOW_P. */
15957
15958 bool
15959 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15960 bool *strict_overflow_p)
15961 {
15962 switch (code)
15963 {
15964 case ABS_EXPR:
15965 return tree_expr_nonzero_warnv_p (op0,
15966 strict_overflow_p);
15967
15968 case NOP_EXPR:
15969 {
15970 tree inner_type = TREE_TYPE (op0);
15971 tree outer_type = type;
15972
15973 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15974 && tree_expr_nonzero_warnv_p (op0,
15975 strict_overflow_p));
15976 }
15977 break;
15978
15979 case NON_LVALUE_EXPR:
15980 return tree_expr_nonzero_warnv_p (op0,
15981 strict_overflow_p);
15982
15983 default:
15984 break;
15985 }
15986
15987 return false;
15988 }
15989
15990 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15991 For floating point we further ensure that T is not denormal.
15992 Similar logic is present in nonzero_address in rtlanal.h.
15993
15994 If the return value is based on the assumption that signed overflow
15995 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15996 change *STRICT_OVERFLOW_P. */
15997
15998 bool
15999 tree_binary_nonzero_warnv_p (enum tree_code code,
16000 tree type,
16001 tree op0,
16002 tree op1, bool *strict_overflow_p)
16003 {
16004 bool sub_strict_overflow_p;
16005 switch (code)
16006 {
16007 case POINTER_PLUS_EXPR:
16008 case PLUS_EXPR:
16009 if (TYPE_OVERFLOW_UNDEFINED (type))
16010 {
16011 /* With the presence of negative values it is hard
16012 to say something. */
16013 sub_strict_overflow_p = false;
16014 if (!tree_expr_nonnegative_warnv_p (op0,
16015 &sub_strict_overflow_p)
16016 || !tree_expr_nonnegative_warnv_p (op1,
16017 &sub_strict_overflow_p))
16018 return false;
16019 /* One of operands must be positive and the other non-negative. */
16020 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16021 overflows, on a twos-complement machine the sum of two
16022 nonnegative numbers can never be zero. */
16023 return (tree_expr_nonzero_warnv_p (op0,
16024 strict_overflow_p)
16025 || tree_expr_nonzero_warnv_p (op1,
16026 strict_overflow_p));
16027 }
16028 break;
16029
16030 case MULT_EXPR:
16031 if (TYPE_OVERFLOW_UNDEFINED (type))
16032 {
16033 if (tree_expr_nonzero_warnv_p (op0,
16034 strict_overflow_p)
16035 && tree_expr_nonzero_warnv_p (op1,
16036 strict_overflow_p))
16037 {
16038 *strict_overflow_p = true;
16039 return true;
16040 }
16041 }
16042 break;
16043
16044 case MIN_EXPR:
16045 sub_strict_overflow_p = false;
16046 if (tree_expr_nonzero_warnv_p (op0,
16047 &sub_strict_overflow_p)
16048 && tree_expr_nonzero_warnv_p (op1,
16049 &sub_strict_overflow_p))
16050 {
16051 if (sub_strict_overflow_p)
16052 *strict_overflow_p = true;
16053 }
16054 break;
16055
16056 case MAX_EXPR:
16057 sub_strict_overflow_p = false;
16058 if (tree_expr_nonzero_warnv_p (op0,
16059 &sub_strict_overflow_p))
16060 {
16061 if (sub_strict_overflow_p)
16062 *strict_overflow_p = true;
16063
16064 /* When both operands are nonzero, then MAX must be too. */
16065 if (tree_expr_nonzero_warnv_p (op1,
16066 strict_overflow_p))
16067 return true;
16068
16069 /* MAX where operand 0 is positive is positive. */
16070 return tree_expr_nonnegative_warnv_p (op0,
16071 strict_overflow_p);
16072 }
16073 /* MAX where operand 1 is positive is positive. */
16074 else if (tree_expr_nonzero_warnv_p (op1,
16075 &sub_strict_overflow_p)
16076 && tree_expr_nonnegative_warnv_p (op1,
16077 &sub_strict_overflow_p))
16078 {
16079 if (sub_strict_overflow_p)
16080 *strict_overflow_p = true;
16081 return true;
16082 }
16083 break;
16084
16085 case BIT_IOR_EXPR:
16086 return (tree_expr_nonzero_warnv_p (op1,
16087 strict_overflow_p)
16088 || tree_expr_nonzero_warnv_p (op0,
16089 strict_overflow_p));
16090
16091 default:
16092 break;
16093 }
16094
16095 return false;
16096 }
16097
16098 /* Return true when T is an address and is known to be nonzero.
16099 For floating point we further ensure that T is not denormal.
16100 Similar logic is present in nonzero_address in rtlanal.h.
16101
16102 If the return value is based on the assumption that signed overflow
16103 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16104 change *STRICT_OVERFLOW_P. */
16105
16106 bool
16107 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16108 {
16109 bool sub_strict_overflow_p;
16110 switch (TREE_CODE (t))
16111 {
16112 case INTEGER_CST:
16113 return !integer_zerop (t);
16114
16115 case ADDR_EXPR:
16116 {
16117 tree base = TREE_OPERAND (t, 0);
16118 if (!DECL_P (base))
16119 base = get_base_address (base);
16120
16121 if (!base)
16122 return false;
16123
16124 /* Weak declarations may link to NULL. Other things may also be NULL
16125 so protect with -fdelete-null-pointer-checks; but not variables
16126 allocated on the stack. */
16127 if (DECL_P (base)
16128 && (flag_delete_null_pointer_checks
16129 || (DECL_CONTEXT (base)
16130 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16131 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16132 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16133
16134 /* Constants are never weak. */
16135 if (CONSTANT_CLASS_P (base))
16136 return true;
16137
16138 return false;
16139 }
16140
16141 case COND_EXPR:
16142 sub_strict_overflow_p = false;
16143 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16144 &sub_strict_overflow_p)
16145 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16146 &sub_strict_overflow_p))
16147 {
16148 if (sub_strict_overflow_p)
16149 *strict_overflow_p = true;
16150 return true;
16151 }
16152 break;
16153
16154 default:
16155 break;
16156 }
16157 return false;
16158 }
16159
16160 /* Return true when T is an address and is known to be nonzero.
16161 For floating point we further ensure that T is not denormal.
16162 Similar logic is present in nonzero_address in rtlanal.h.
16163
16164 If the return value is based on the assumption that signed overflow
16165 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16166 change *STRICT_OVERFLOW_P. */
16167
16168 bool
16169 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16170 {
16171 tree type = TREE_TYPE (t);
16172 enum tree_code code;
16173
16174 /* Doing something useful for floating point would need more work. */
16175 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16176 return false;
16177
16178 code = TREE_CODE (t);
16179 switch (TREE_CODE_CLASS (code))
16180 {
16181 case tcc_unary:
16182 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16183 strict_overflow_p);
16184 case tcc_binary:
16185 case tcc_comparison:
16186 return tree_binary_nonzero_warnv_p (code, type,
16187 TREE_OPERAND (t, 0),
16188 TREE_OPERAND (t, 1),
16189 strict_overflow_p);
16190 case tcc_constant:
16191 case tcc_declaration:
16192 case tcc_reference:
16193 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16194
16195 default:
16196 break;
16197 }
16198
16199 switch (code)
16200 {
16201 case TRUTH_NOT_EXPR:
16202 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16203 strict_overflow_p);
16204
16205 case TRUTH_AND_EXPR:
16206 case TRUTH_OR_EXPR:
16207 case TRUTH_XOR_EXPR:
16208 return tree_binary_nonzero_warnv_p (code, type,
16209 TREE_OPERAND (t, 0),
16210 TREE_OPERAND (t, 1),
16211 strict_overflow_p);
16212
16213 case COND_EXPR:
16214 case CONSTRUCTOR:
16215 case OBJ_TYPE_REF:
16216 case ASSERT_EXPR:
16217 case ADDR_EXPR:
16218 case WITH_SIZE_EXPR:
16219 case SSA_NAME:
16220 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16221
16222 case COMPOUND_EXPR:
16223 case MODIFY_EXPR:
16224 case BIND_EXPR:
16225 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16226 strict_overflow_p);
16227
16228 case SAVE_EXPR:
16229 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16230 strict_overflow_p);
16231
16232 case CALL_EXPR:
16233 {
16234 tree fndecl = get_callee_fndecl (t);
16235 if (!fndecl) return false;
16236 if (flag_delete_null_pointer_checks && !flag_check_new
16237 && DECL_IS_OPERATOR_NEW (fndecl)
16238 && !TREE_NOTHROW (fndecl))
16239 return true;
16240 if (flag_delete_null_pointer_checks
16241 && lookup_attribute ("returns_nonnull",
16242 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
16243 return true;
16244 return alloca_call_p (t);
16245 }
16246
16247 default:
16248 break;
16249 }
16250 return false;
16251 }
16252
16253 /* Return true when T is an address and is known to be nonzero.
16254 Handle warnings about undefined signed overflow. */
16255
16256 bool
16257 tree_expr_nonzero_p (tree t)
16258 {
16259 bool ret, strict_overflow_p;
16260
16261 strict_overflow_p = false;
16262 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16263 if (strict_overflow_p)
16264 fold_overflow_warning (("assuming signed overflow does not occur when "
16265 "determining that expression is always "
16266 "non-zero"),
16267 WARN_STRICT_OVERFLOW_MISC);
16268 return ret;
16269 }
16270
16271 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16272 attempt to fold the expression to a constant without modifying TYPE,
16273 OP0 or OP1.
16274
16275 If the expression could be simplified to a constant, then return
16276 the constant. If the expression would not be simplified to a
16277 constant, then return NULL_TREE. */
16278
16279 tree
16280 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16281 {
16282 tree tem = fold_binary (code, type, op0, op1);
16283 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16284 }
16285
16286 /* Given the components of a unary expression CODE, TYPE and OP0,
16287 attempt to fold the expression to a constant without modifying
16288 TYPE or OP0.
16289
16290 If the expression could be simplified to a constant, then return
16291 the constant. If the expression would not be simplified to a
16292 constant, then return NULL_TREE. */
16293
16294 tree
16295 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16296 {
16297 tree tem = fold_unary (code, type, op0);
16298 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16299 }
16300
16301 /* If EXP represents referencing an element in a constant string
16302 (either via pointer arithmetic or array indexing), return the
16303 tree representing the value accessed, otherwise return NULL. */
16304
16305 tree
16306 fold_read_from_constant_string (tree exp)
16307 {
16308 if ((TREE_CODE (exp) == INDIRECT_REF
16309 || TREE_CODE (exp) == ARRAY_REF)
16310 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16311 {
16312 tree exp1 = TREE_OPERAND (exp, 0);
16313 tree index;
16314 tree string;
16315 location_t loc = EXPR_LOCATION (exp);
16316
16317 if (TREE_CODE (exp) == INDIRECT_REF)
16318 string = string_constant (exp1, &index);
16319 else
16320 {
16321 tree low_bound = array_ref_low_bound (exp);
16322 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16323
16324 /* Optimize the special-case of a zero lower bound.
16325
16326 We convert the low_bound to sizetype to avoid some problems
16327 with constant folding. (E.g. suppose the lower bound is 1,
16328 and its mode is QI. Without the conversion,l (ARRAY
16329 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16330 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16331 if (! integer_zerop (low_bound))
16332 index = size_diffop_loc (loc, index,
16333 fold_convert_loc (loc, sizetype, low_bound));
16334
16335 string = exp1;
16336 }
16337
16338 if (string
16339 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16340 && TREE_CODE (string) == STRING_CST
16341 && TREE_CODE (index) == INTEGER_CST
16342 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16343 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16344 == MODE_INT)
16345 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16346 return build_int_cst_type (TREE_TYPE (exp),
16347 (TREE_STRING_POINTER (string)
16348 [TREE_INT_CST_LOW (index)]));
16349 }
16350 return NULL;
16351 }
16352
16353 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16354 an integer constant, real, or fixed-point constant.
16355
16356 TYPE is the type of the result. */
16357
16358 static tree
16359 fold_negate_const (tree arg0, tree type)
16360 {
16361 tree t = NULL_TREE;
16362
16363 switch (TREE_CODE (arg0))
16364 {
16365 case INTEGER_CST:
16366 {
16367 double_int val = tree_to_double_int (arg0);
16368 bool overflow;
16369 val = val.neg_with_overflow (&overflow);
16370 t = force_fit_type_double (type, val, 1,
16371 (overflow | TREE_OVERFLOW (arg0))
16372 && !TYPE_UNSIGNED (type));
16373 break;
16374 }
16375
16376 case REAL_CST:
16377 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16378 break;
16379
16380 case FIXED_CST:
16381 {
16382 FIXED_VALUE_TYPE f;
16383 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16384 &(TREE_FIXED_CST (arg0)), NULL,
16385 TYPE_SATURATING (type));
16386 t = build_fixed (type, f);
16387 /* Propagate overflow flags. */
16388 if (overflow_p | TREE_OVERFLOW (arg0))
16389 TREE_OVERFLOW (t) = 1;
16390 break;
16391 }
16392
16393 default:
16394 gcc_unreachable ();
16395 }
16396
16397 return t;
16398 }
16399
16400 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16401 an integer constant or real constant.
16402
16403 TYPE is the type of the result. */
16404
16405 tree
16406 fold_abs_const (tree arg0, tree type)
16407 {
16408 tree t = NULL_TREE;
16409
16410 switch (TREE_CODE (arg0))
16411 {
16412 case INTEGER_CST:
16413 {
16414 double_int val = tree_to_double_int (arg0);
16415
16416 /* If the value is unsigned or non-negative, then the absolute value
16417 is the same as the ordinary value. */
16418 if (TYPE_UNSIGNED (type)
16419 || !val.is_negative ())
16420 t = arg0;
16421
16422 /* If the value is negative, then the absolute value is
16423 its negation. */
16424 else
16425 {
16426 bool overflow;
16427 val = val.neg_with_overflow (&overflow);
16428 t = force_fit_type_double (type, val, -1,
16429 overflow | TREE_OVERFLOW (arg0));
16430 }
16431 }
16432 break;
16433
16434 case REAL_CST:
16435 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16436 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16437 else
16438 t = arg0;
16439 break;
16440
16441 default:
16442 gcc_unreachable ();
16443 }
16444
16445 return t;
16446 }
16447
16448 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16449 constant. TYPE is the type of the result. */
16450
16451 static tree
16452 fold_not_const (const_tree arg0, tree type)
16453 {
16454 double_int val;
16455
16456 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16457
16458 val = ~tree_to_double_int (arg0);
16459 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16460 }
16461
16462 /* Given CODE, a relational operator, the target type, TYPE and two
16463 constant operands OP0 and OP1, return the result of the
16464 relational operation. If the result is not a compile time
16465 constant, then return NULL_TREE. */
16466
16467 static tree
16468 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16469 {
16470 int result, invert;
16471
16472 /* From here on, the only cases we handle are when the result is
16473 known to be a constant. */
16474
16475 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16476 {
16477 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16478 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16479
16480 /* Handle the cases where either operand is a NaN. */
16481 if (real_isnan (c0) || real_isnan (c1))
16482 {
16483 switch (code)
16484 {
16485 case EQ_EXPR:
16486 case ORDERED_EXPR:
16487 result = 0;
16488 break;
16489
16490 case NE_EXPR:
16491 case UNORDERED_EXPR:
16492 case UNLT_EXPR:
16493 case UNLE_EXPR:
16494 case UNGT_EXPR:
16495 case UNGE_EXPR:
16496 case UNEQ_EXPR:
16497 result = 1;
16498 break;
16499
16500 case LT_EXPR:
16501 case LE_EXPR:
16502 case GT_EXPR:
16503 case GE_EXPR:
16504 case LTGT_EXPR:
16505 if (flag_trapping_math)
16506 return NULL_TREE;
16507 result = 0;
16508 break;
16509
16510 default:
16511 gcc_unreachable ();
16512 }
16513
16514 return constant_boolean_node (result, type);
16515 }
16516
16517 return constant_boolean_node (real_compare (code, c0, c1), type);
16518 }
16519
16520 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16521 {
16522 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16523 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16524 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16525 }
16526
16527 /* Handle equality/inequality of complex constants. */
16528 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16529 {
16530 tree rcond = fold_relational_const (code, type,
16531 TREE_REALPART (op0),
16532 TREE_REALPART (op1));
16533 tree icond = fold_relational_const (code, type,
16534 TREE_IMAGPART (op0),
16535 TREE_IMAGPART (op1));
16536 if (code == EQ_EXPR)
16537 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16538 else if (code == NE_EXPR)
16539 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16540 else
16541 return NULL_TREE;
16542 }
16543
16544 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16545 {
16546 unsigned count = VECTOR_CST_NELTS (op0);
16547 tree *elts = XALLOCAVEC (tree, count);
16548 gcc_assert (VECTOR_CST_NELTS (op1) == count
16549 && TYPE_VECTOR_SUBPARTS (type) == count);
16550
16551 for (unsigned i = 0; i < count; i++)
16552 {
16553 tree elem_type = TREE_TYPE (type);
16554 tree elem0 = VECTOR_CST_ELT (op0, i);
16555 tree elem1 = VECTOR_CST_ELT (op1, i);
16556
16557 tree tem = fold_relational_const (code, elem_type,
16558 elem0, elem1);
16559
16560 if (tem == NULL_TREE)
16561 return NULL_TREE;
16562
16563 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16564 }
16565
16566 return build_vector (type, elts);
16567 }
16568
16569 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16570
16571 To compute GT, swap the arguments and do LT.
16572 To compute GE, do LT and invert the result.
16573 To compute LE, swap the arguments, do LT and invert the result.
16574 To compute NE, do EQ and invert the result.
16575
16576 Therefore, the code below must handle only EQ and LT. */
16577
16578 if (code == LE_EXPR || code == GT_EXPR)
16579 {
16580 tree tem = op0;
16581 op0 = op1;
16582 op1 = tem;
16583 code = swap_tree_comparison (code);
16584 }
16585
16586 /* Note that it is safe to invert for real values here because we
16587 have already handled the one case that it matters. */
16588
16589 invert = 0;
16590 if (code == NE_EXPR || code == GE_EXPR)
16591 {
16592 invert = 1;
16593 code = invert_tree_comparison (code, false);
16594 }
16595
16596 /* Compute a result for LT or EQ if args permit;
16597 Otherwise return T. */
16598 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16599 {
16600 if (code == EQ_EXPR)
16601 result = tree_int_cst_equal (op0, op1);
16602 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16603 result = INT_CST_LT_UNSIGNED (op0, op1);
16604 else
16605 result = INT_CST_LT (op0, op1);
16606 }
16607 else
16608 return NULL_TREE;
16609
16610 if (invert)
16611 result ^= 1;
16612 return constant_boolean_node (result, type);
16613 }
16614
16615 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16616 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16617 itself. */
16618
16619 tree
16620 fold_build_cleanup_point_expr (tree type, tree expr)
16621 {
16622 /* If the expression does not have side effects then we don't have to wrap
16623 it with a cleanup point expression. */
16624 if (!TREE_SIDE_EFFECTS (expr))
16625 return expr;
16626
16627 /* If the expression is a return, check to see if the expression inside the
16628 return has no side effects or the right hand side of the modify expression
16629 inside the return. If either don't have side effects set we don't need to
16630 wrap the expression in a cleanup point expression. Note we don't check the
16631 left hand side of the modify because it should always be a return decl. */
16632 if (TREE_CODE (expr) == RETURN_EXPR)
16633 {
16634 tree op = TREE_OPERAND (expr, 0);
16635 if (!op || !TREE_SIDE_EFFECTS (op))
16636 return expr;
16637 op = TREE_OPERAND (op, 1);
16638 if (!TREE_SIDE_EFFECTS (op))
16639 return expr;
16640 }
16641
16642 return build1 (CLEANUP_POINT_EXPR, type, expr);
16643 }
16644
16645 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16646 of an indirection through OP0, or NULL_TREE if no simplification is
16647 possible. */
16648
16649 tree
16650 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16651 {
16652 tree sub = op0;
16653 tree subtype;
16654
16655 STRIP_NOPS (sub);
16656 subtype = TREE_TYPE (sub);
16657 if (!POINTER_TYPE_P (subtype))
16658 return NULL_TREE;
16659
16660 if (TREE_CODE (sub) == ADDR_EXPR)
16661 {
16662 tree op = TREE_OPERAND (sub, 0);
16663 tree optype = TREE_TYPE (op);
16664 /* *&CONST_DECL -> to the value of the const decl. */
16665 if (TREE_CODE (op) == CONST_DECL)
16666 return DECL_INITIAL (op);
16667 /* *&p => p; make sure to handle *&"str"[cst] here. */
16668 if (type == optype)
16669 {
16670 tree fop = fold_read_from_constant_string (op);
16671 if (fop)
16672 return fop;
16673 else
16674 return op;
16675 }
16676 /* *(foo *)&fooarray => fooarray[0] */
16677 else if (TREE_CODE (optype) == ARRAY_TYPE
16678 && type == TREE_TYPE (optype)
16679 && (!in_gimple_form
16680 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16681 {
16682 tree type_domain = TYPE_DOMAIN (optype);
16683 tree min_val = size_zero_node;
16684 if (type_domain && TYPE_MIN_VALUE (type_domain))
16685 min_val = TYPE_MIN_VALUE (type_domain);
16686 if (in_gimple_form
16687 && TREE_CODE (min_val) != INTEGER_CST)
16688 return NULL_TREE;
16689 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16690 NULL_TREE, NULL_TREE);
16691 }
16692 /* *(foo *)&complexfoo => __real__ complexfoo */
16693 else if (TREE_CODE (optype) == COMPLEX_TYPE
16694 && type == TREE_TYPE (optype))
16695 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16696 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16697 else if (TREE_CODE (optype) == VECTOR_TYPE
16698 && type == TREE_TYPE (optype))
16699 {
16700 tree part_width = TYPE_SIZE (type);
16701 tree index = bitsize_int (0);
16702 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16703 }
16704 }
16705
16706 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16707 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16708 {
16709 tree op00 = TREE_OPERAND (sub, 0);
16710 tree op01 = TREE_OPERAND (sub, 1);
16711
16712 STRIP_NOPS (op00);
16713 if (TREE_CODE (op00) == ADDR_EXPR)
16714 {
16715 tree op00type;
16716 op00 = TREE_OPERAND (op00, 0);
16717 op00type = TREE_TYPE (op00);
16718
16719 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16720 if (TREE_CODE (op00type) == VECTOR_TYPE
16721 && type == TREE_TYPE (op00type))
16722 {
16723 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16724 tree part_width = TYPE_SIZE (type);
16725 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16726 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16727 tree index = bitsize_int (indexi);
16728
16729 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16730 return fold_build3_loc (loc,
16731 BIT_FIELD_REF, type, op00,
16732 part_width, index);
16733
16734 }
16735 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16736 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16737 && type == TREE_TYPE (op00type))
16738 {
16739 tree size = TYPE_SIZE_UNIT (type);
16740 if (tree_int_cst_equal (size, op01))
16741 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16742 }
16743 /* ((foo *)&fooarray)[1] => fooarray[1] */
16744 else if (TREE_CODE (op00type) == ARRAY_TYPE
16745 && type == TREE_TYPE (op00type))
16746 {
16747 tree type_domain = TYPE_DOMAIN (op00type);
16748 tree min_val = size_zero_node;
16749 if (type_domain && TYPE_MIN_VALUE (type_domain))
16750 min_val = TYPE_MIN_VALUE (type_domain);
16751 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16752 TYPE_SIZE_UNIT (type));
16753 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16754 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16755 NULL_TREE, NULL_TREE);
16756 }
16757 }
16758 }
16759
16760 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16761 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16762 && type == TREE_TYPE (TREE_TYPE (subtype))
16763 && (!in_gimple_form
16764 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16765 {
16766 tree type_domain;
16767 tree min_val = size_zero_node;
16768 sub = build_fold_indirect_ref_loc (loc, sub);
16769 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16770 if (type_domain && TYPE_MIN_VALUE (type_domain))
16771 min_val = TYPE_MIN_VALUE (type_domain);
16772 if (in_gimple_form
16773 && TREE_CODE (min_val) != INTEGER_CST)
16774 return NULL_TREE;
16775 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16776 NULL_TREE);
16777 }
16778
16779 return NULL_TREE;
16780 }
16781
16782 /* Builds an expression for an indirection through T, simplifying some
16783 cases. */
16784
16785 tree
16786 build_fold_indirect_ref_loc (location_t loc, tree t)
16787 {
16788 tree type = TREE_TYPE (TREE_TYPE (t));
16789 tree sub = fold_indirect_ref_1 (loc, type, t);
16790
16791 if (sub)
16792 return sub;
16793
16794 return build1_loc (loc, INDIRECT_REF, type, t);
16795 }
16796
16797 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16798
16799 tree
16800 fold_indirect_ref_loc (location_t loc, tree t)
16801 {
16802 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16803
16804 if (sub)
16805 return sub;
16806 else
16807 return t;
16808 }
16809
16810 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16811 whose result is ignored. The type of the returned tree need not be
16812 the same as the original expression. */
16813
16814 tree
16815 fold_ignored_result (tree t)
16816 {
16817 if (!TREE_SIDE_EFFECTS (t))
16818 return integer_zero_node;
16819
16820 for (;;)
16821 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16822 {
16823 case tcc_unary:
16824 t = TREE_OPERAND (t, 0);
16825 break;
16826
16827 case tcc_binary:
16828 case tcc_comparison:
16829 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16830 t = TREE_OPERAND (t, 0);
16831 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16832 t = TREE_OPERAND (t, 1);
16833 else
16834 return t;
16835 break;
16836
16837 case tcc_expression:
16838 switch (TREE_CODE (t))
16839 {
16840 case COMPOUND_EXPR:
16841 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16842 return t;
16843 t = TREE_OPERAND (t, 0);
16844 break;
16845
16846 case COND_EXPR:
16847 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16848 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16849 return t;
16850 t = TREE_OPERAND (t, 0);
16851 break;
16852
16853 default:
16854 return t;
16855 }
16856 break;
16857
16858 default:
16859 return t;
16860 }
16861 }
16862
16863 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16864 This can only be applied to objects of a sizetype. */
16865
16866 tree
16867 round_up_loc (location_t loc, tree value, int divisor)
16868 {
16869 tree div = NULL_TREE;
16870
16871 gcc_assert (divisor > 0);
16872 if (divisor == 1)
16873 return value;
16874
16875 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16876 have to do anything. Only do this when we are not given a const,
16877 because in that case, this check is more expensive than just
16878 doing it. */
16879 if (TREE_CODE (value) != INTEGER_CST)
16880 {
16881 div = build_int_cst (TREE_TYPE (value), divisor);
16882
16883 if (multiple_of_p (TREE_TYPE (value), value, div))
16884 return value;
16885 }
16886
16887 /* If divisor is a power of two, simplify this to bit manipulation. */
16888 if (divisor == (divisor & -divisor))
16889 {
16890 if (TREE_CODE (value) == INTEGER_CST)
16891 {
16892 double_int val = tree_to_double_int (value);
16893 bool overflow_p;
16894
16895 if ((val.low & (divisor - 1)) == 0)
16896 return value;
16897
16898 overflow_p = TREE_OVERFLOW (value);
16899 val.low &= ~(divisor - 1);
16900 val.low += divisor;
16901 if (val.low == 0)
16902 {
16903 val.high++;
16904 if (val.high == 0)
16905 overflow_p = true;
16906 }
16907
16908 return force_fit_type_double (TREE_TYPE (value), val,
16909 -1, overflow_p);
16910 }
16911 else
16912 {
16913 tree t;
16914
16915 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16916 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16917 t = build_int_cst (TREE_TYPE (value), -divisor);
16918 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16919 }
16920 }
16921 else
16922 {
16923 if (!div)
16924 div = build_int_cst (TREE_TYPE (value), divisor);
16925 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16926 value = size_binop_loc (loc, MULT_EXPR, value, div);
16927 }
16928
16929 return value;
16930 }
16931
16932 /* Likewise, but round down. */
16933
16934 tree
16935 round_down_loc (location_t loc, tree value, int divisor)
16936 {
16937 tree div = NULL_TREE;
16938
16939 gcc_assert (divisor > 0);
16940 if (divisor == 1)
16941 return value;
16942
16943 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16944 have to do anything. Only do this when we are not given a const,
16945 because in that case, this check is more expensive than just
16946 doing it. */
16947 if (TREE_CODE (value) != INTEGER_CST)
16948 {
16949 div = build_int_cst (TREE_TYPE (value), divisor);
16950
16951 if (multiple_of_p (TREE_TYPE (value), value, div))
16952 return value;
16953 }
16954
16955 /* If divisor is a power of two, simplify this to bit manipulation. */
16956 if (divisor == (divisor & -divisor))
16957 {
16958 tree t;
16959
16960 t = build_int_cst (TREE_TYPE (value), -divisor);
16961 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16962 }
16963 else
16964 {
16965 if (!div)
16966 div = build_int_cst (TREE_TYPE (value), divisor);
16967 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16968 value = size_binop_loc (loc, MULT_EXPR, value, div);
16969 }
16970
16971 return value;
16972 }
16973
16974 /* Returns the pointer to the base of the object addressed by EXP and
16975 extracts the information about the offset of the access, storing it
16976 to PBITPOS and POFFSET. */
16977
16978 static tree
16979 split_address_to_core_and_offset (tree exp,
16980 HOST_WIDE_INT *pbitpos, tree *poffset)
16981 {
16982 tree core;
16983 enum machine_mode mode;
16984 int unsignedp, volatilep;
16985 HOST_WIDE_INT bitsize;
16986 location_t loc = EXPR_LOCATION (exp);
16987
16988 if (TREE_CODE (exp) == ADDR_EXPR)
16989 {
16990 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16991 poffset, &mode, &unsignedp, &volatilep,
16992 false);
16993 core = build_fold_addr_expr_loc (loc, core);
16994 }
16995 else
16996 {
16997 core = exp;
16998 *pbitpos = 0;
16999 *poffset = NULL_TREE;
17000 }
17001
17002 return core;
17003 }
17004
17005 /* Returns true if addresses of E1 and E2 differ by a constant, false
17006 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17007
17008 bool
17009 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17010 {
17011 tree core1, core2;
17012 HOST_WIDE_INT bitpos1, bitpos2;
17013 tree toffset1, toffset2, tdiff, type;
17014
17015 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17016 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17017
17018 if (bitpos1 % BITS_PER_UNIT != 0
17019 || bitpos2 % BITS_PER_UNIT != 0
17020 || !operand_equal_p (core1, core2, 0))
17021 return false;
17022
17023 if (toffset1 && toffset2)
17024 {
17025 type = TREE_TYPE (toffset1);
17026 if (type != TREE_TYPE (toffset2))
17027 toffset2 = fold_convert (type, toffset2);
17028
17029 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17030 if (!cst_and_fits_in_hwi (tdiff))
17031 return false;
17032
17033 *diff = int_cst_value (tdiff);
17034 }
17035 else if (toffset1 || toffset2)
17036 {
17037 /* If only one of the offsets is non-constant, the difference cannot
17038 be a constant. */
17039 return false;
17040 }
17041 else
17042 *diff = 0;
17043
17044 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17045 return true;
17046 }
17047
17048 /* Simplify the floating point expression EXP when the sign of the
17049 result is not significant. Return NULL_TREE if no simplification
17050 is possible. */
17051
17052 tree
17053 fold_strip_sign_ops (tree exp)
17054 {
17055 tree arg0, arg1;
17056 location_t loc = EXPR_LOCATION (exp);
17057
17058 switch (TREE_CODE (exp))
17059 {
17060 case ABS_EXPR:
17061 case NEGATE_EXPR:
17062 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17063 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17064
17065 case MULT_EXPR:
17066 case RDIV_EXPR:
17067 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17068 return NULL_TREE;
17069 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17070 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17071 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17072 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17073 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17074 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17075 break;
17076
17077 case COMPOUND_EXPR:
17078 arg0 = TREE_OPERAND (exp, 0);
17079 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17080 if (arg1)
17081 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17082 break;
17083
17084 case COND_EXPR:
17085 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17086 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17087 if (arg0 || arg1)
17088 return fold_build3_loc (loc,
17089 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17090 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17091 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17092 break;
17093
17094 case CALL_EXPR:
17095 {
17096 const enum built_in_function fcode = builtin_mathfn_code (exp);
17097 switch (fcode)
17098 {
17099 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17100 /* Strip copysign function call, return the 1st argument. */
17101 arg0 = CALL_EXPR_ARG (exp, 0);
17102 arg1 = CALL_EXPR_ARG (exp, 1);
17103 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17104
17105 default:
17106 /* Strip sign ops from the argument of "odd" math functions. */
17107 if (negate_mathfn_p (fcode))
17108 {
17109 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17110 if (arg0)
17111 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17112 }
17113 break;
17114 }
17115 }
17116 break;
17117
17118 default:
17119 break;
17120 }
17121 return NULL_TREE;
17122 }