Implement -fsanitize=signed-integer-overflow.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
75
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
96 };
97
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
150
151 static location_t
152 expr_location_or (tree t, location_t loc)
153 {
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
156 }
157
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
160
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
163 {
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
169 {
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
172 }
173 return x;
174 }
175 \f
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177 of type CODE and returns the quotient.
178 Otherwise returns NULL_TREE. */
179
180 tree
181 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
182 {
183 double_int quo, rem;
184 int uns;
185
186 /* The sign of the division is according to operand two, that
187 does the correct thing for POINTER_PLUS_EXPR where we want
188 a signed division. */
189 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
190
191 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
192 uns, code, &rem);
193
194 if (rem.is_zero ())
195 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
196
197 return NULL_TREE;
198 }
199 \f
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
207 used. */
208
209 static int fold_deferring_overflow_warnings;
210
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
215
216 static const char* fold_deferred_overflow_warning;
217
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
220
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
222
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
225
226 void
227 fold_defer_overflow_warnings (void)
228 {
229 ++fold_deferring_overflow_warnings;
230 }
231
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
239 deferred code. */
240
241 void
242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
243 {
244 const char *warnmsg;
245 location_t locus;
246
247 gcc_assert (fold_deferring_overflow_warnings > 0);
248 --fold_deferring_overflow_warnings;
249 if (fold_deferring_overflow_warnings > 0)
250 {
251 if (fold_deferred_overflow_warning != NULL
252 && code != 0
253 && code < (int) fold_deferred_overflow_code)
254 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255 return;
256 }
257
258 warnmsg = fold_deferred_overflow_warning;
259 fold_deferred_overflow_warning = NULL;
260
261 if (!issue || warnmsg == NULL)
262 return;
263
264 if (gimple_no_warning_p (stmt))
265 return;
266
267 /* Use the smallest code level when deciding to issue the
268 warning. */
269 if (code == 0 || code > (int) fold_deferred_overflow_code)
270 code = fold_deferred_overflow_code;
271
272 if (!issue_strict_overflow_warning (code))
273 return;
274
275 if (stmt == NULL)
276 locus = input_location;
277 else
278 locus = gimple_location (stmt);
279 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 }
281
282 /* Stop deferring overflow warnings, ignoring any deferred
283 warnings. */
284
285 void
286 fold_undefer_and_ignore_overflow_warnings (void)
287 {
288 fold_undefer_overflow_warnings (false, NULL, 0);
289 }
290
291 /* Whether we are deferring overflow warnings. */
292
293 bool
294 fold_deferring_overflow_warnings_p (void)
295 {
296 return fold_deferring_overflow_warnings > 0;
297 }
298
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
301
302 static void
303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
304 {
305 if (fold_deferring_overflow_warnings > 0)
306 {
307 if (fold_deferred_overflow_warning == NULL
308 || wc < fold_deferred_overflow_code)
309 {
310 fold_deferred_overflow_warning = gmsgid;
311 fold_deferred_overflow_code = wc;
312 }
313 }
314 else if (issue_strict_overflow_warning (wc))
315 warning (OPT_Wstrict_overflow, gmsgid);
316 }
317 \f
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
320
321 static bool
322 negate_mathfn_p (enum built_in_function code)
323 {
324 switch (code)
325 {
326 CASE_FLT_FN (BUILT_IN_ASIN):
327 CASE_FLT_FN (BUILT_IN_ASINH):
328 CASE_FLT_FN (BUILT_IN_ATAN):
329 CASE_FLT_FN (BUILT_IN_ATANH):
330 CASE_FLT_FN (BUILT_IN_CASIN):
331 CASE_FLT_FN (BUILT_IN_CASINH):
332 CASE_FLT_FN (BUILT_IN_CATAN):
333 CASE_FLT_FN (BUILT_IN_CATANH):
334 CASE_FLT_FN (BUILT_IN_CBRT):
335 CASE_FLT_FN (BUILT_IN_CPROJ):
336 CASE_FLT_FN (BUILT_IN_CSIN):
337 CASE_FLT_FN (BUILT_IN_CSINH):
338 CASE_FLT_FN (BUILT_IN_CTAN):
339 CASE_FLT_FN (BUILT_IN_CTANH):
340 CASE_FLT_FN (BUILT_IN_ERF):
341 CASE_FLT_FN (BUILT_IN_LLROUND):
342 CASE_FLT_FN (BUILT_IN_LROUND):
343 CASE_FLT_FN (BUILT_IN_ROUND):
344 CASE_FLT_FN (BUILT_IN_SIN):
345 CASE_FLT_FN (BUILT_IN_SINH):
346 CASE_FLT_FN (BUILT_IN_TAN):
347 CASE_FLT_FN (BUILT_IN_TANH):
348 CASE_FLT_FN (BUILT_IN_TRUNC):
349 return true;
350
351 CASE_FLT_FN (BUILT_IN_LLRINT):
352 CASE_FLT_FN (BUILT_IN_LRINT):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
354 CASE_FLT_FN (BUILT_IN_RINT):
355 return !flag_rounding_math;
356
357 default:
358 break;
359 }
360 return false;
361 }
362
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
365
366 bool
367 may_negate_without_overflow_p (const_tree t)
368 {
369 unsigned HOST_WIDE_INT val;
370 unsigned int prec;
371 tree type;
372
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
378
379 prec = TYPE_PRECISION (type);
380 if (prec > HOST_BITS_PER_WIDE_INT)
381 {
382 if (TREE_INT_CST_LOW (t) != 0)
383 return true;
384 prec -= HOST_BITS_PER_WIDE_INT;
385 val = TREE_INT_CST_HIGH (t);
386 }
387 else
388 val = TREE_INT_CST_LOW (t);
389 if (prec < HOST_BITS_PER_WIDE_INT)
390 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
391 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
392 }
393
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
396
397 static bool
398 negate_expr_p (tree t)
399 {
400 tree type;
401
402 if (t == 0)
403 return false;
404
405 type = TREE_TYPE (t);
406
407 STRIP_SIGN_NOPS (t);
408 switch (TREE_CODE (t))
409 {
410 case INTEGER_CST:
411 if (TYPE_OVERFLOW_WRAPS (type))
412 return true;
413
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t);
416 case BIT_NOT_EXPR:
417 return (INTEGRAL_TYPE_P (type)
418 && TYPE_OVERFLOW_WRAPS (type));
419
420 case FIXED_CST:
421 case NEGATE_EXPR:
422 return true;
423
424 case REAL_CST:
425 /* We want to canonicalize to positive real constants. Pretend
426 that only negative ones can be easily negated. */
427 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
428
429 case COMPLEX_CST:
430 return negate_expr_p (TREE_REALPART (t))
431 && negate_expr_p (TREE_IMAGPART (t));
432
433 case VECTOR_CST:
434 {
435 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
436 return true;
437
438 int count = TYPE_VECTOR_SUBPARTS (type), i;
439
440 for (i = 0; i < count; i++)
441 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
442 return false;
443
444 return true;
445 }
446
447 case COMPLEX_EXPR:
448 return negate_expr_p (TREE_OPERAND (t, 0))
449 && negate_expr_p (TREE_OPERAND (t, 1));
450
451 case CONJ_EXPR:
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case PLUS_EXPR:
455 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
456 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
457 return false;
458 /* -(A + B) -> (-B) - A. */
459 if (negate_expr_p (TREE_OPERAND (t, 1))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1)))
462 return true;
463 /* -(A + B) -> (-A) - B. */
464 return negate_expr_p (TREE_OPERAND (t, 0));
465
466 case MINUS_EXPR:
467 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
468 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
469 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1));
472
473 case MULT_EXPR:
474 if (TYPE_UNSIGNED (TREE_TYPE (t)))
475 break;
476
477 /* Fall through. */
478
479 case RDIV_EXPR:
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
481 return negate_expr_p (TREE_OPERAND (t, 1))
482 || negate_expr_p (TREE_OPERAND (t, 0));
483 break;
484
485 case TRUNC_DIV_EXPR:
486 case ROUND_DIV_EXPR:
487 case FLOOR_DIV_EXPR:
488 case CEIL_DIV_EXPR:
489 case EXACT_DIV_EXPR:
490 /* In general we can't negate A / B, because if A is INT_MIN and
491 B is 1, we may turn this into INT_MIN / -1 which is undefined
492 and actually traps on some architectures. But if overflow is
493 undefined, we can negate, because - (INT_MIN / 1) is an
494 overflow. */
495 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
496 {
497 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
498 break;
499 /* If overflow is undefined then we have to be careful because
500 we ask whether it's ok to associate the negate with the
501 division which is not ok for example for
502 -((a - b) / c) where (-(a - b)) / c may invoke undefined
503 overflow because of negating INT_MIN. So do not use
504 negate_expr_p here but open-code the two important cases. */
505 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
506 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
507 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
508 return true;
509 }
510 else if (negate_expr_p (TREE_OPERAND (t, 0)))
511 return true;
512 return negate_expr_p (TREE_OPERAND (t, 1));
513
514 case NOP_EXPR:
515 /* Negate -((double)float) as (double)(-float). */
516 if (TREE_CODE (type) == REAL_TYPE)
517 {
518 tree tem = strip_float_extensions (t);
519 if (tem != t)
520 return negate_expr_p (tem);
521 }
522 break;
523
524 case CALL_EXPR:
525 /* Negate -f(x) as f(-x). */
526 if (negate_mathfn_p (builtin_mathfn_code (t)))
527 return negate_expr_p (CALL_EXPR_ARG (t, 0));
528 break;
529
530 case RSHIFT_EXPR:
531 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
532 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
533 {
534 tree op1 = TREE_OPERAND (t, 1);
535 if (TREE_INT_CST_HIGH (op1) == 0
536 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
537 == TREE_INT_CST_LOW (op1))
538 return true;
539 }
540 break;
541
542 default:
543 break;
544 }
545 return false;
546 }
547
548 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
549 simplification is possible.
550 If negate_expr_p would return true for T, NULL_TREE will never be
551 returned. */
552
553 static tree
554 fold_negate_expr (location_t loc, tree t)
555 {
556 tree type = TREE_TYPE (t);
557 tree tem;
558
559 switch (TREE_CODE (t))
560 {
561 /* Convert - (~A) to A + 1. */
562 case BIT_NOT_EXPR:
563 if (INTEGRAL_TYPE_P (type))
564 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
565 build_one_cst (type));
566 break;
567
568 case INTEGER_CST:
569 tem = fold_negate_const (t, type);
570 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
571 || !TYPE_OVERFLOW_TRAPS (type))
572 return tem;
573 break;
574
575 case REAL_CST:
576 tem = fold_negate_const (t, type);
577 /* Two's complement FP formats, such as c4x, may overflow. */
578 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
579 return tem;
580 break;
581
582 case FIXED_CST:
583 tem = fold_negate_const (t, type);
584 return tem;
585
586 case COMPLEX_CST:
587 {
588 tree rpart = negate_expr (TREE_REALPART (t));
589 tree ipart = negate_expr (TREE_IMAGPART (t));
590
591 if ((TREE_CODE (rpart) == REAL_CST
592 && TREE_CODE (ipart) == REAL_CST)
593 || (TREE_CODE (rpart) == INTEGER_CST
594 && TREE_CODE (ipart) == INTEGER_CST))
595 return build_complex (type, rpart, ipart);
596 }
597 break;
598
599 case VECTOR_CST:
600 {
601 int count = TYPE_VECTOR_SUBPARTS (type), i;
602 tree *elts = XALLOCAVEC (tree, count);
603
604 for (i = 0; i < count; i++)
605 {
606 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
607 if (elts[i] == NULL_TREE)
608 return NULL_TREE;
609 }
610
611 return build_vector (type, elts);
612 }
613
614 case COMPLEX_EXPR:
615 if (negate_expr_p (t))
616 return fold_build2_loc (loc, COMPLEX_EXPR, type,
617 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
618 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
619 break;
620
621 case CONJ_EXPR:
622 if (negate_expr_p (t))
623 return fold_build1_loc (loc, CONJ_EXPR, type,
624 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
625 break;
626
627 case NEGATE_EXPR:
628 return TREE_OPERAND (t, 0);
629
630 case PLUS_EXPR:
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
633 {
634 /* -(A + B) -> (-B) - A. */
635 if (negate_expr_p (TREE_OPERAND (t, 1))
636 && reorder_operands_p (TREE_OPERAND (t, 0),
637 TREE_OPERAND (t, 1)))
638 {
639 tem = negate_expr (TREE_OPERAND (t, 1));
640 return fold_build2_loc (loc, MINUS_EXPR, type,
641 tem, TREE_OPERAND (t, 0));
642 }
643
644 /* -(A + B) -> (-A) - B. */
645 if (negate_expr_p (TREE_OPERAND (t, 0)))
646 {
647 tem = negate_expr (TREE_OPERAND (t, 0));
648 return fold_build2_loc (loc, MINUS_EXPR, type,
649 tem, TREE_OPERAND (t, 1));
650 }
651 }
652 break;
653
654 case MINUS_EXPR:
655 /* - (A - B) -> B - A */
656 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
657 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
658 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
659 return fold_build2_loc (loc, MINUS_EXPR, type,
660 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
661 break;
662
663 case MULT_EXPR:
664 if (TYPE_UNSIGNED (type))
665 break;
666
667 /* Fall through. */
668
669 case RDIV_EXPR:
670 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
671 {
672 tem = TREE_OPERAND (t, 1);
673 if (negate_expr_p (tem))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 negate_expr (tem), TREE_OPERAND (t, 1));
680 }
681 break;
682
683 case TRUNC_DIV_EXPR:
684 case ROUND_DIV_EXPR:
685 case FLOOR_DIV_EXPR:
686 case CEIL_DIV_EXPR:
687 case EXACT_DIV_EXPR:
688 /* In general we can't negate A / B, because if A is INT_MIN and
689 B is 1, we may turn this into INT_MIN / -1 which is undefined
690 and actually traps on some architectures. But if overflow is
691 undefined, we can negate, because - (INT_MIN / 1) is an
692 overflow. */
693 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
694 {
695 const char * const warnmsg = G_("assuming signed overflow does not "
696 "occur when negating a division");
697 tem = TREE_OPERAND (t, 1);
698 if (negate_expr_p (tem))
699 {
700 if (INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) != INTEGER_CST
702 || integer_onep (tem)))
703 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 TREE_OPERAND (t, 0), negate_expr (tem));
706 }
707 /* If overflow is undefined then we have to be careful because
708 we ask whether it's ok to associate the negate with the
709 division which is not ok for example for
710 -((a - b) / c) where (-(a - b)) / c may invoke undefined
711 overflow because of negating INT_MIN. So do not use
712 negate_expr_p here but open-code the two important cases. */
713 tem = TREE_OPERAND (t, 0);
714 if ((INTEGRAL_TYPE_P (type)
715 && (TREE_CODE (tem) == NEGATE_EXPR
716 || (TREE_CODE (tem) == INTEGER_CST
717 && may_negate_without_overflow_p (tem))))
718 || !INTEGRAL_TYPE_P (type))
719 return fold_build2_loc (loc, TREE_CODE (t), type,
720 negate_expr (tem), TREE_OPERAND (t, 1));
721 }
722 break;
723
724 case NOP_EXPR:
725 /* Convert -((double)float) into (double)(-float). */
726 if (TREE_CODE (type) == REAL_TYPE)
727 {
728 tem = strip_float_extensions (t);
729 if (tem != t && negate_expr_p (tem))
730 return fold_convert_loc (loc, type, negate_expr (tem));
731 }
732 break;
733
734 case CALL_EXPR:
735 /* Negate -f(x) as f(-x). */
736 if (negate_mathfn_p (builtin_mathfn_code (t))
737 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
738 {
739 tree fndecl, arg;
740
741 fndecl = get_callee_fndecl (t);
742 arg = negate_expr (CALL_EXPR_ARG (t, 0));
743 return build_call_expr_loc (loc, fndecl, 1, arg);
744 }
745 break;
746
747 case RSHIFT_EXPR:
748 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
749 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
750 {
751 tree op1 = TREE_OPERAND (t, 1);
752 if (TREE_INT_CST_HIGH (op1) == 0
753 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
754 == TREE_INT_CST_LOW (op1))
755 {
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
762 }
763 }
764 break;
765
766 default:
767 break;
768 }
769
770 return NULL_TREE;
771 }
772
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
776
777 static tree
778 negate_expr (tree t)
779 {
780 tree type, tem;
781 location_t loc;
782
783 if (t == NULL_TREE)
784 return NULL_TREE;
785
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
789
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
794 }
795 \f
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
803
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
807
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
810
811 If IN is itself a literal or constant, return it as appropriate.
812
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
815
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
819 {
820 tree var = 0;
821
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
825
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
828
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
841 {
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
846
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
854
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
859
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
868
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
876 }
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
879 {
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
883 }
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
888
889 if (negate_p)
890 {
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
897 }
898
899 return var;
900 }
901
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
906
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 {
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
914
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
920 {
921 if (code == PLUS_EXPR)
922 {
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
935 }
936 else if (code == MINUS_EXPR)
937 {
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
940 }
941
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
944 }
945
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
948 }
949 \f
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
952
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
955 {
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
960
961 switch (code)
962 {
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
968
969 default:
970 break;
971 }
972
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
976 }
977
978
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
982
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
985 int overflowable)
986 {
987 double_int op1, op2, res, tmp;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 bool uns = TYPE_UNSIGNED (type);
991 bool overflow = false;
992
993 op1 = tree_to_double_int (arg1);
994 op2 = tree_to_double_int (arg2);
995
996 switch (code)
997 {
998 case BIT_IOR_EXPR:
999 res = op1 | op2;
1000 break;
1001
1002 case BIT_XOR_EXPR:
1003 res = op1 ^ op2;
1004 break;
1005
1006 case BIT_AND_EXPR:
1007 res = op1 & op2;
1008 break;
1009
1010 case RSHIFT_EXPR:
1011 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1012 break;
1013
1014 case LSHIFT_EXPR:
1015 /* It's unclear from the C standard whether shifts can overflow.
1016 The following code ignores overflow; perhaps a C standard
1017 interpretation ruling is needed. */
1018 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1019 break;
1020
1021 case RROTATE_EXPR:
1022 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1023 break;
1024
1025 case LROTATE_EXPR:
1026 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1027 break;
1028
1029 case PLUS_EXPR:
1030 res = op1.add_with_sign (op2, false, &overflow);
1031 break;
1032
1033 case MINUS_EXPR:
1034 res = op1.sub_with_overflow (op2, &overflow);
1035 break;
1036
1037 case MULT_EXPR:
1038 res = op1.mul_with_sign (op2, false, &overflow);
1039 break;
1040
1041 case MULT_HIGHPART_EXPR:
1042 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1043 {
1044 bool dummy_overflow;
1045 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1046 return NULL_TREE;
1047 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1048 }
1049 else
1050 {
1051 bool dummy_overflow;
1052 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1053 is performed in twice the precision of arguments. */
1054 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1055 res = tmp.rshift (TYPE_PRECISION (type),
1056 2 * TYPE_PRECISION (type), !uns);
1057 }
1058 break;
1059
1060 case TRUNC_DIV_EXPR:
1061 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1062 case EXACT_DIV_EXPR:
1063 /* This is a shortcut for a common special case. */
1064 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1065 && !TREE_OVERFLOW (arg1)
1066 && !TREE_OVERFLOW (arg2)
1067 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1068 {
1069 if (code == CEIL_DIV_EXPR)
1070 op1.low += op2.low - 1;
1071
1072 res.low = op1.low / op2.low, res.high = 0;
1073 break;
1074 }
1075
1076 /* ... fall through ... */
1077
1078 case ROUND_DIV_EXPR:
1079 if (op2.is_zero ())
1080 return NULL_TREE;
1081 if (op2.is_one ())
1082 {
1083 res = op1;
1084 break;
1085 }
1086 if (op1 == op2 && !op1.is_zero ())
1087 {
1088 res = double_int_one;
1089 break;
1090 }
1091 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1092 break;
1093
1094 case TRUNC_MOD_EXPR:
1095 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1096 /* This is a shortcut for a common special case. */
1097 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1098 && !TREE_OVERFLOW (arg1)
1099 && !TREE_OVERFLOW (arg2)
1100 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1101 {
1102 if (code == CEIL_MOD_EXPR)
1103 op1.low += op2.low - 1;
1104 res.low = op1.low % op2.low, res.high = 0;
1105 break;
1106 }
1107
1108 /* ... fall through ... */
1109
1110 case ROUND_MOD_EXPR:
1111 if (op2.is_zero ())
1112 return NULL_TREE;
1113
1114 /* Check for the case the case of INT_MIN % -1 and return
1115 overflow and result = 0. The TImode case is handled properly
1116 in double-int. */
1117 if (TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT
1118 && !uns
1119 && op2.is_minus_one ()
1120 && op1.high == (HOST_WIDE_INT) -1
1121 && (HOST_WIDE_INT) op1.low
1122 == (((HOST_WIDE_INT)-1) << (TYPE_PRECISION (type) - 1)))
1123 {
1124 overflow = 1;
1125 res = double_int_zero;
1126 }
1127 else
1128 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1129 break;
1130
1131 case MIN_EXPR:
1132 res = op1.min (op2, uns);
1133 break;
1134
1135 case MAX_EXPR:
1136 res = op1.max (op2, uns);
1137 break;
1138
1139 default:
1140 return NULL_TREE;
1141 }
1142
1143 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1144 (!uns && overflow)
1145 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1146
1147 return t;
1148 }
1149
1150 tree
1151 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1152 {
1153 return int_const_binop_1 (code, arg1, arg2, 1);
1154 }
1155
1156 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1157 constant. We assume ARG1 and ARG2 have the same data type, or at least
1158 are the same kind of constant and the same machine mode. Return zero if
1159 combining the constants is not allowed in the current operating mode. */
1160
1161 static tree
1162 const_binop (enum tree_code code, tree arg1, tree arg2)
1163 {
1164 /* Sanity check for the recursive cases. */
1165 if (!arg1 || !arg2)
1166 return NULL_TREE;
1167
1168 STRIP_NOPS (arg1);
1169 STRIP_NOPS (arg2);
1170
1171 if (TREE_CODE (arg1) == INTEGER_CST)
1172 return int_const_binop (code, arg1, arg2);
1173
1174 if (TREE_CODE (arg1) == REAL_CST)
1175 {
1176 enum machine_mode mode;
1177 REAL_VALUE_TYPE d1;
1178 REAL_VALUE_TYPE d2;
1179 REAL_VALUE_TYPE value;
1180 REAL_VALUE_TYPE result;
1181 bool inexact;
1182 tree t, type;
1183
1184 /* The following codes are handled by real_arithmetic. */
1185 switch (code)
1186 {
1187 case PLUS_EXPR:
1188 case MINUS_EXPR:
1189 case MULT_EXPR:
1190 case RDIV_EXPR:
1191 case MIN_EXPR:
1192 case MAX_EXPR:
1193 break;
1194
1195 default:
1196 return NULL_TREE;
1197 }
1198
1199 d1 = TREE_REAL_CST (arg1);
1200 d2 = TREE_REAL_CST (arg2);
1201
1202 type = TREE_TYPE (arg1);
1203 mode = TYPE_MODE (type);
1204
1205 /* Don't perform operation if we honor signaling NaNs and
1206 either operand is a NaN. */
1207 if (HONOR_SNANS (mode)
1208 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1209 return NULL_TREE;
1210
1211 /* Don't perform operation if it would raise a division
1212 by zero exception. */
1213 if (code == RDIV_EXPR
1214 && REAL_VALUES_EQUAL (d2, dconst0)
1215 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1216 return NULL_TREE;
1217
1218 /* If either operand is a NaN, just return it. Otherwise, set up
1219 for floating-point trap; we return an overflow. */
1220 if (REAL_VALUE_ISNAN (d1))
1221 return arg1;
1222 else if (REAL_VALUE_ISNAN (d2))
1223 return arg2;
1224
1225 inexact = real_arithmetic (&value, code, &d1, &d2);
1226 real_convert (&result, mode, &value);
1227
1228 /* Don't constant fold this floating point operation if
1229 the result has overflowed and flag_trapping_math. */
1230 if (flag_trapping_math
1231 && MODE_HAS_INFINITIES (mode)
1232 && REAL_VALUE_ISINF (result)
1233 && !REAL_VALUE_ISINF (d1)
1234 && !REAL_VALUE_ISINF (d2))
1235 return NULL_TREE;
1236
1237 /* Don't constant fold this floating point operation if the
1238 result may dependent upon the run-time rounding mode and
1239 flag_rounding_math is set, or if GCC's software emulation
1240 is unable to accurately represent the result. */
1241 if ((flag_rounding_math
1242 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1243 && (inexact || !real_identical (&result, &value)))
1244 return NULL_TREE;
1245
1246 t = build_real (type, result);
1247
1248 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1249 return t;
1250 }
1251
1252 if (TREE_CODE (arg1) == FIXED_CST)
1253 {
1254 FIXED_VALUE_TYPE f1;
1255 FIXED_VALUE_TYPE f2;
1256 FIXED_VALUE_TYPE result;
1257 tree t, type;
1258 int sat_p;
1259 bool overflow_p;
1260
1261 /* The following codes are handled by fixed_arithmetic. */
1262 switch (code)
1263 {
1264 case PLUS_EXPR:
1265 case MINUS_EXPR:
1266 case MULT_EXPR:
1267 case TRUNC_DIV_EXPR:
1268 f2 = TREE_FIXED_CST (arg2);
1269 break;
1270
1271 case LSHIFT_EXPR:
1272 case RSHIFT_EXPR:
1273 f2.data.high = TREE_INT_CST_HIGH (arg2);
1274 f2.data.low = TREE_INT_CST_LOW (arg2);
1275 f2.mode = SImode;
1276 break;
1277
1278 default:
1279 return NULL_TREE;
1280 }
1281
1282 f1 = TREE_FIXED_CST (arg1);
1283 type = TREE_TYPE (arg1);
1284 sat_p = TYPE_SATURATING (type);
1285 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1286 t = build_fixed (type, result);
1287 /* Propagate overflow flags. */
1288 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1289 TREE_OVERFLOW (t) = 1;
1290 return t;
1291 }
1292
1293 if (TREE_CODE (arg1) == COMPLEX_CST)
1294 {
1295 tree type = TREE_TYPE (arg1);
1296 tree r1 = TREE_REALPART (arg1);
1297 tree i1 = TREE_IMAGPART (arg1);
1298 tree r2 = TREE_REALPART (arg2);
1299 tree i2 = TREE_IMAGPART (arg2);
1300 tree real, imag;
1301
1302 switch (code)
1303 {
1304 case PLUS_EXPR:
1305 case MINUS_EXPR:
1306 real = const_binop (code, r1, r2);
1307 imag = const_binop (code, i1, i2);
1308 break;
1309
1310 case MULT_EXPR:
1311 if (COMPLEX_FLOAT_TYPE_P (type))
1312 return do_mpc_arg2 (arg1, arg2, type,
1313 /* do_nonfinite= */ folding_initializer,
1314 mpc_mul);
1315
1316 real = const_binop (MINUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 imag = const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r1, i2),
1321 const_binop (MULT_EXPR, i1, r2));
1322 break;
1323
1324 case RDIV_EXPR:
1325 if (COMPLEX_FLOAT_TYPE_P (type))
1326 return do_mpc_arg2 (arg1, arg2, type,
1327 /* do_nonfinite= */ folding_initializer,
1328 mpc_div);
1329 /* Fallthru ... */
1330 case TRUNC_DIV_EXPR:
1331 case CEIL_DIV_EXPR:
1332 case FLOOR_DIV_EXPR:
1333 case ROUND_DIV_EXPR:
1334 if (flag_complex_method == 0)
1335 {
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_straight().
1338
1339 Expand complex division to scalars, straightforward algorithm.
1340 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1341 t = br*br + bi*bi
1342 */
1343 tree magsquared
1344 = const_binop (PLUS_EXPR,
1345 const_binop (MULT_EXPR, r2, r2),
1346 const_binop (MULT_EXPR, i2, i2));
1347 tree t1
1348 = const_binop (PLUS_EXPR,
1349 const_binop (MULT_EXPR, r1, r2),
1350 const_binop (MULT_EXPR, i1, i2));
1351 tree t2
1352 = const_binop (MINUS_EXPR,
1353 const_binop (MULT_EXPR, i1, r2),
1354 const_binop (MULT_EXPR, r1, i2));
1355
1356 real = const_binop (code, t1, magsquared);
1357 imag = const_binop (code, t2, magsquared);
1358 }
1359 else
1360 {
1361 /* Keep this algorithm in sync with
1362 tree-complex.c:expand_complex_div_wide().
1363
1364 Expand complex division to scalars, modified algorithm to minimize
1365 overflow with wide input ranges. */
1366 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1367 fold_abs_const (r2, TREE_TYPE (type)),
1368 fold_abs_const (i2, TREE_TYPE (type)));
1369
1370 if (integer_nonzerop (compare))
1371 {
1372 /* In the TRUE branch, we compute
1373 ratio = br/bi;
1374 div = (br * ratio) + bi;
1375 tr = (ar * ratio) + ai;
1376 ti = (ai * ratio) - ar;
1377 tr = tr / div;
1378 ti = ti / div; */
1379 tree ratio = const_binop (code, r2, i2);
1380 tree div = const_binop (PLUS_EXPR, i2,
1381 const_binop (MULT_EXPR, r2, ratio));
1382 real = const_binop (MULT_EXPR, r1, ratio);
1383 real = const_binop (PLUS_EXPR, real, i1);
1384 real = const_binop (code, real, div);
1385
1386 imag = const_binop (MULT_EXPR, i1, ratio);
1387 imag = const_binop (MINUS_EXPR, imag, r1);
1388 imag = const_binop (code, imag, div);
1389 }
1390 else
1391 {
1392 /* In the FALSE branch, we compute
1393 ratio = d/c;
1394 divisor = (d * ratio) + c;
1395 tr = (b * ratio) + a;
1396 ti = b - (a * ratio);
1397 tr = tr / div;
1398 ti = ti / div; */
1399 tree ratio = const_binop (code, i2, r2);
1400 tree div = const_binop (PLUS_EXPR, r2,
1401 const_binop (MULT_EXPR, i2, ratio));
1402
1403 real = const_binop (MULT_EXPR, i1, ratio);
1404 real = const_binop (PLUS_EXPR, real, r1);
1405 real = const_binop (code, real, div);
1406
1407 imag = const_binop (MULT_EXPR, r1, ratio);
1408 imag = const_binop (MINUS_EXPR, i1, imag);
1409 imag = const_binop (code, imag, div);
1410 }
1411 }
1412 break;
1413
1414 default:
1415 return NULL_TREE;
1416 }
1417
1418 if (real && imag)
1419 return build_complex (type, real, imag);
1420 }
1421
1422 if (TREE_CODE (arg1) == VECTOR_CST
1423 && TREE_CODE (arg2) == VECTOR_CST)
1424 {
1425 tree type = TREE_TYPE (arg1);
1426 int count = TYPE_VECTOR_SUBPARTS (type), i;
1427 tree *elts = XALLOCAVEC (tree, count);
1428
1429 for (i = 0; i < count; i++)
1430 {
1431 tree elem1 = VECTOR_CST_ELT (arg1, i);
1432 tree elem2 = VECTOR_CST_ELT (arg2, i);
1433
1434 elts[i] = const_binop (code, elem1, elem2);
1435
1436 /* It is possible that const_binop cannot handle the given
1437 code and return NULL_TREE */
1438 if (elts[i] == NULL_TREE)
1439 return NULL_TREE;
1440 }
1441
1442 return build_vector (type, elts);
1443 }
1444
1445 /* Shifts allow a scalar offset for a vector. */
1446 if (TREE_CODE (arg1) == VECTOR_CST
1447 && TREE_CODE (arg2) == INTEGER_CST)
1448 {
1449 tree type = TREE_TYPE (arg1);
1450 int count = TYPE_VECTOR_SUBPARTS (type), i;
1451 tree *elts = XALLOCAVEC (tree, count);
1452
1453 if (code == VEC_LSHIFT_EXPR
1454 || code == VEC_RSHIFT_EXPR)
1455 {
1456 if (!tree_fits_uhwi_p (arg2))
1457 return NULL_TREE;
1458
1459 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1460 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1461 unsigned HOST_WIDE_INT innerc
1462 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1463 if (shiftc >= outerc || (shiftc % innerc) != 0)
1464 return NULL_TREE;
1465 int offset = shiftc / innerc;
1466 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1467 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1468 for !BYTES_BIG_ENDIAN picks first vector element, but
1469 for BYTES_BIG_ENDIAN last element from the vector. */
1470 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1471 offset = -offset;
1472 tree zero = build_zero_cst (TREE_TYPE (type));
1473 for (i = 0; i < count; i++)
1474 {
1475 if (i + offset < 0 || i + offset >= count)
1476 elts[i] = zero;
1477 else
1478 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1479 }
1480 }
1481 else
1482 for (i = 0; i < count; i++)
1483 {
1484 tree elem1 = VECTOR_CST_ELT (arg1, i);
1485
1486 elts[i] = const_binop (code, elem1, arg2);
1487
1488 /* It is possible that const_binop cannot handle the given
1489 code and return NULL_TREE */
1490 if (elts[i] == NULL_TREE)
1491 return NULL_TREE;
1492 }
1493
1494 return build_vector (type, elts);
1495 }
1496 return NULL_TREE;
1497 }
1498
1499 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1500 indicates which particular sizetype to create. */
1501
1502 tree
1503 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1504 {
1505 return build_int_cst (sizetype_tab[(int) kind], number);
1506 }
1507 \f
1508 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1509 is a tree code. The type of the result is taken from the operands.
1510 Both must be equivalent integer types, ala int_binop_types_match_p.
1511 If the operands are constant, so is the result. */
1512
1513 tree
1514 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1515 {
1516 tree type = TREE_TYPE (arg0);
1517
1518 if (arg0 == error_mark_node || arg1 == error_mark_node)
1519 return error_mark_node;
1520
1521 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1522 TREE_TYPE (arg1)));
1523
1524 /* Handle the special case of two integer constants faster. */
1525 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1526 {
1527 /* And some specific cases even faster than that. */
1528 if (code == PLUS_EXPR)
1529 {
1530 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1531 return arg1;
1532 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1533 return arg0;
1534 }
1535 else if (code == MINUS_EXPR)
1536 {
1537 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1538 return arg0;
1539 }
1540 else if (code == MULT_EXPR)
1541 {
1542 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1543 return arg1;
1544 }
1545
1546 /* Handle general case of two integer constants. For sizetype
1547 constant calculations we always want to know about overflow,
1548 even in the unsigned case. */
1549 return int_const_binop_1 (code, arg0, arg1, -1);
1550 }
1551
1552 return fold_build2_loc (loc, code, type, arg0, arg1);
1553 }
1554
1555 /* Given two values, either both of sizetype or both of bitsizetype,
1556 compute the difference between the two values. Return the value
1557 in signed type corresponding to the type of the operands. */
1558
1559 tree
1560 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1561 {
1562 tree type = TREE_TYPE (arg0);
1563 tree ctype;
1564
1565 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1566 TREE_TYPE (arg1)));
1567
1568 /* If the type is already signed, just do the simple thing. */
1569 if (!TYPE_UNSIGNED (type))
1570 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1571
1572 if (type == sizetype)
1573 ctype = ssizetype;
1574 else if (type == bitsizetype)
1575 ctype = sbitsizetype;
1576 else
1577 ctype = signed_type_for (type);
1578
1579 /* If either operand is not a constant, do the conversions to the signed
1580 type and subtract. The hardware will do the right thing with any
1581 overflow in the subtraction. */
1582 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1583 return size_binop_loc (loc, MINUS_EXPR,
1584 fold_convert_loc (loc, ctype, arg0),
1585 fold_convert_loc (loc, ctype, arg1));
1586
1587 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1588 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1589 overflow) and negate (which can't either). Special-case a result
1590 of zero while we're here. */
1591 if (tree_int_cst_equal (arg0, arg1))
1592 return build_int_cst (ctype, 0);
1593 else if (tree_int_cst_lt (arg1, arg0))
1594 return fold_convert_loc (loc, ctype,
1595 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1596 else
1597 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1598 fold_convert_loc (loc, ctype,
1599 size_binop_loc (loc,
1600 MINUS_EXPR,
1601 arg1, arg0)));
1602 }
1603 \f
1604 /* A subroutine of fold_convert_const handling conversions of an
1605 INTEGER_CST to another integer type. */
1606
1607 static tree
1608 fold_convert_const_int_from_int (tree type, const_tree arg1)
1609 {
1610 tree t;
1611
1612 /* Given an integer constant, make new constant with new type,
1613 appropriately sign-extended or truncated. */
1614 t = force_fit_type_double (type, tree_to_double_int (arg1),
1615 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1616 (TREE_INT_CST_HIGH (arg1) < 0
1617 && (TYPE_UNSIGNED (type)
1618 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1619 | TREE_OVERFLOW (arg1));
1620
1621 return t;
1622 }
1623
1624 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1625 to an integer type. */
1626
1627 static tree
1628 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1629 {
1630 int overflow = 0;
1631 tree t;
1632
1633 /* The following code implements the floating point to integer
1634 conversion rules required by the Java Language Specification,
1635 that IEEE NaNs are mapped to zero and values that overflow
1636 the target precision saturate, i.e. values greater than
1637 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1638 are mapped to INT_MIN. These semantics are allowed by the
1639 C and C++ standards that simply state that the behavior of
1640 FP-to-integer conversion is unspecified upon overflow. */
1641
1642 double_int val;
1643 REAL_VALUE_TYPE r;
1644 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1645
1646 switch (code)
1647 {
1648 case FIX_TRUNC_EXPR:
1649 real_trunc (&r, VOIDmode, &x);
1650 break;
1651
1652 default:
1653 gcc_unreachable ();
1654 }
1655
1656 /* If R is NaN, return zero and show we have an overflow. */
1657 if (REAL_VALUE_ISNAN (r))
1658 {
1659 overflow = 1;
1660 val = double_int_zero;
1661 }
1662
1663 /* See if R is less than the lower bound or greater than the
1664 upper bound. */
1665
1666 if (! overflow)
1667 {
1668 tree lt = TYPE_MIN_VALUE (type);
1669 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1670 if (REAL_VALUES_LESS (r, l))
1671 {
1672 overflow = 1;
1673 val = tree_to_double_int (lt);
1674 }
1675 }
1676
1677 if (! overflow)
1678 {
1679 tree ut = TYPE_MAX_VALUE (type);
1680 if (ut)
1681 {
1682 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1683 if (REAL_VALUES_LESS (u, r))
1684 {
1685 overflow = 1;
1686 val = tree_to_double_int (ut);
1687 }
1688 }
1689 }
1690
1691 if (! overflow)
1692 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1693
1694 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1695 return t;
1696 }
1697
1698 /* A subroutine of fold_convert_const handling conversions of a
1699 FIXED_CST to an integer type. */
1700
1701 static tree
1702 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1703 {
1704 tree t;
1705 double_int temp, temp_trunc;
1706 unsigned int mode;
1707
1708 /* Right shift FIXED_CST to temp by fbit. */
1709 temp = TREE_FIXED_CST (arg1).data;
1710 mode = TREE_FIXED_CST (arg1).mode;
1711 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1712 {
1713 temp = temp.rshift (GET_MODE_FBIT (mode),
1714 HOST_BITS_PER_DOUBLE_INT,
1715 SIGNED_FIXED_POINT_MODE_P (mode));
1716
1717 /* Left shift temp to temp_trunc by fbit. */
1718 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1719 HOST_BITS_PER_DOUBLE_INT,
1720 SIGNED_FIXED_POINT_MODE_P (mode));
1721 }
1722 else
1723 {
1724 temp = double_int_zero;
1725 temp_trunc = double_int_zero;
1726 }
1727
1728 /* If FIXED_CST is negative, we need to round the value toward 0.
1729 By checking if the fractional bits are not zero to add 1 to temp. */
1730 if (SIGNED_FIXED_POINT_MODE_P (mode)
1731 && temp_trunc.is_negative ()
1732 && TREE_FIXED_CST (arg1).data != temp_trunc)
1733 temp += double_int_one;
1734
1735 /* Given a fixed-point constant, make new constant with new type,
1736 appropriately sign-extended or truncated. */
1737 t = force_fit_type_double (type, temp, -1,
1738 (temp.is_negative ()
1739 && (TYPE_UNSIGNED (type)
1740 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1741 | TREE_OVERFLOW (arg1));
1742
1743 return t;
1744 }
1745
1746 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1747 to another floating point type. */
1748
1749 static tree
1750 fold_convert_const_real_from_real (tree type, const_tree arg1)
1751 {
1752 REAL_VALUE_TYPE value;
1753 tree t;
1754
1755 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1756 t = build_real (type, value);
1757
1758 /* If converting an infinity or NAN to a representation that doesn't
1759 have one, set the overflow bit so that we can produce some kind of
1760 error message at the appropriate point if necessary. It's not the
1761 most user-friendly message, but it's better than nothing. */
1762 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1763 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1764 TREE_OVERFLOW (t) = 1;
1765 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1766 && !MODE_HAS_NANS (TYPE_MODE (type)))
1767 TREE_OVERFLOW (t) = 1;
1768 /* Regular overflow, conversion produced an infinity in a mode that
1769 can't represent them. */
1770 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1771 && REAL_VALUE_ISINF (value)
1772 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1773 TREE_OVERFLOW (t) = 1;
1774 else
1775 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1776 return t;
1777 }
1778
1779 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1780 to a floating point type. */
1781
1782 static tree
1783 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1784 {
1785 REAL_VALUE_TYPE value;
1786 tree t;
1787
1788 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1789 t = build_real (type, value);
1790
1791 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1792 return t;
1793 }
1794
1795 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1796 to another fixed-point type. */
1797
1798 static tree
1799 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1800 {
1801 FIXED_VALUE_TYPE value;
1802 tree t;
1803 bool overflow_p;
1804
1805 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1806 TYPE_SATURATING (type));
1807 t = build_fixed (type, value);
1808
1809 /* Propagate overflow flags. */
1810 if (overflow_p | TREE_OVERFLOW (arg1))
1811 TREE_OVERFLOW (t) = 1;
1812 return t;
1813 }
1814
1815 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1816 to a fixed-point type. */
1817
1818 static tree
1819 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1820 {
1821 FIXED_VALUE_TYPE value;
1822 tree t;
1823 bool overflow_p;
1824
1825 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1826 TREE_INT_CST (arg1),
1827 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1828 TYPE_SATURATING (type));
1829 t = build_fixed (type, value);
1830
1831 /* Propagate overflow flags. */
1832 if (overflow_p | TREE_OVERFLOW (arg1))
1833 TREE_OVERFLOW (t) = 1;
1834 return t;
1835 }
1836
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to a fixed-point type. */
1839
1840 static tree
1841 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1842 {
1843 FIXED_VALUE_TYPE value;
1844 tree t;
1845 bool overflow_p;
1846
1847 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1848 &TREE_REAL_CST (arg1),
1849 TYPE_SATURATING (type));
1850 t = build_fixed (type, value);
1851
1852 /* Propagate overflow flags. */
1853 if (overflow_p | TREE_OVERFLOW (arg1))
1854 TREE_OVERFLOW (t) = 1;
1855 return t;
1856 }
1857
1858 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1859 type TYPE. If no simplification can be done return NULL_TREE. */
1860
1861 static tree
1862 fold_convert_const (enum tree_code code, tree type, tree arg1)
1863 {
1864 if (TREE_TYPE (arg1) == type)
1865 return arg1;
1866
1867 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1868 || TREE_CODE (type) == OFFSET_TYPE)
1869 {
1870 if (TREE_CODE (arg1) == INTEGER_CST)
1871 return fold_convert_const_int_from_int (type, arg1);
1872 else if (TREE_CODE (arg1) == REAL_CST)
1873 return fold_convert_const_int_from_real (code, type, arg1);
1874 else if (TREE_CODE (arg1) == FIXED_CST)
1875 return fold_convert_const_int_from_fixed (type, arg1);
1876 }
1877 else if (TREE_CODE (type) == REAL_TYPE)
1878 {
1879 if (TREE_CODE (arg1) == INTEGER_CST)
1880 return build_real_from_int_cst (type, arg1);
1881 else if (TREE_CODE (arg1) == REAL_CST)
1882 return fold_convert_const_real_from_real (type, arg1);
1883 else if (TREE_CODE (arg1) == FIXED_CST)
1884 return fold_convert_const_real_from_fixed (type, arg1);
1885 }
1886 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1887 {
1888 if (TREE_CODE (arg1) == FIXED_CST)
1889 return fold_convert_const_fixed_from_fixed (type, arg1);
1890 else if (TREE_CODE (arg1) == INTEGER_CST)
1891 return fold_convert_const_fixed_from_int (type, arg1);
1892 else if (TREE_CODE (arg1) == REAL_CST)
1893 return fold_convert_const_fixed_from_real (type, arg1);
1894 }
1895 return NULL_TREE;
1896 }
1897
1898 /* Construct a vector of zero elements of vector type TYPE. */
1899
1900 static tree
1901 build_zero_vector (tree type)
1902 {
1903 tree t;
1904
1905 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1906 return build_vector_from_val (type, t);
1907 }
1908
1909 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1910
1911 bool
1912 fold_convertible_p (const_tree type, const_tree arg)
1913 {
1914 tree orig = TREE_TYPE (arg);
1915
1916 if (type == orig)
1917 return true;
1918
1919 if (TREE_CODE (arg) == ERROR_MARK
1920 || TREE_CODE (type) == ERROR_MARK
1921 || TREE_CODE (orig) == ERROR_MARK)
1922 return false;
1923
1924 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1925 return true;
1926
1927 switch (TREE_CODE (type))
1928 {
1929 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1931 case OFFSET_TYPE:
1932 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1933 || TREE_CODE (orig) == OFFSET_TYPE)
1934 return true;
1935 return (TREE_CODE (orig) == VECTOR_TYPE
1936 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1937
1938 case REAL_TYPE:
1939 case FIXED_POINT_TYPE:
1940 case COMPLEX_TYPE:
1941 case VECTOR_TYPE:
1942 case VOID_TYPE:
1943 return TREE_CODE (type) == TREE_CODE (orig);
1944
1945 default:
1946 return false;
1947 }
1948 }
1949
1950 /* Convert expression ARG to type TYPE. Used by the middle-end for
1951 simple conversions in preference to calling the front-end's convert. */
1952
1953 tree
1954 fold_convert_loc (location_t loc, tree type, tree arg)
1955 {
1956 tree orig = TREE_TYPE (arg);
1957 tree tem;
1958
1959 if (type == orig)
1960 return arg;
1961
1962 if (TREE_CODE (arg) == ERROR_MARK
1963 || TREE_CODE (type) == ERROR_MARK
1964 || TREE_CODE (orig) == ERROR_MARK)
1965 return error_mark_node;
1966
1967 switch (TREE_CODE (type))
1968 {
1969 case POINTER_TYPE:
1970 case REFERENCE_TYPE:
1971 /* Handle conversions between pointers to different address spaces. */
1972 if (POINTER_TYPE_P (orig)
1973 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1974 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1975 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1976 /* fall through */
1977
1978 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1979 case OFFSET_TYPE:
1980 if (TREE_CODE (arg) == INTEGER_CST)
1981 {
1982 tem = fold_convert_const (NOP_EXPR, type, arg);
1983 if (tem != NULL_TREE)
1984 return tem;
1985 }
1986 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1987 || TREE_CODE (orig) == OFFSET_TYPE)
1988 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1989 if (TREE_CODE (orig) == COMPLEX_TYPE)
1990 return fold_convert_loc (loc, type,
1991 fold_build1_loc (loc, REALPART_EXPR,
1992 TREE_TYPE (orig), arg));
1993 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1994 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1995 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1996
1997 case REAL_TYPE:
1998 if (TREE_CODE (arg) == INTEGER_CST)
1999 {
2000 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2001 if (tem != NULL_TREE)
2002 return tem;
2003 }
2004 else if (TREE_CODE (arg) == REAL_CST)
2005 {
2006 tem = fold_convert_const (NOP_EXPR, type, arg);
2007 if (tem != NULL_TREE)
2008 return tem;
2009 }
2010 else if (TREE_CODE (arg) == FIXED_CST)
2011 {
2012 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2013 if (tem != NULL_TREE)
2014 return tem;
2015 }
2016
2017 switch (TREE_CODE (orig))
2018 {
2019 case INTEGER_TYPE:
2020 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2021 case POINTER_TYPE: case REFERENCE_TYPE:
2022 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2023
2024 case REAL_TYPE:
2025 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2026
2027 case FIXED_POINT_TYPE:
2028 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2029
2030 case COMPLEX_TYPE:
2031 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2032 return fold_convert_loc (loc, type, tem);
2033
2034 default:
2035 gcc_unreachable ();
2036 }
2037
2038 case FIXED_POINT_TYPE:
2039 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2040 || TREE_CODE (arg) == REAL_CST)
2041 {
2042 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2043 if (tem != NULL_TREE)
2044 goto fold_convert_exit;
2045 }
2046
2047 switch (TREE_CODE (orig))
2048 {
2049 case FIXED_POINT_TYPE:
2050 case INTEGER_TYPE:
2051 case ENUMERAL_TYPE:
2052 case BOOLEAN_TYPE:
2053 case REAL_TYPE:
2054 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2055
2056 case COMPLEX_TYPE:
2057 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2058 return fold_convert_loc (loc, type, tem);
2059
2060 default:
2061 gcc_unreachable ();
2062 }
2063
2064 case COMPLEX_TYPE:
2065 switch (TREE_CODE (orig))
2066 {
2067 case INTEGER_TYPE:
2068 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2069 case POINTER_TYPE: case REFERENCE_TYPE:
2070 case REAL_TYPE:
2071 case FIXED_POINT_TYPE:
2072 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2073 fold_convert_loc (loc, TREE_TYPE (type), arg),
2074 fold_convert_loc (loc, TREE_TYPE (type),
2075 integer_zero_node));
2076 case COMPLEX_TYPE:
2077 {
2078 tree rpart, ipart;
2079
2080 if (TREE_CODE (arg) == COMPLEX_EXPR)
2081 {
2082 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2083 TREE_OPERAND (arg, 0));
2084 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2085 TREE_OPERAND (arg, 1));
2086 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2087 }
2088
2089 arg = save_expr (arg);
2090 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2091 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2092 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2093 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2094 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2095 }
2096
2097 default:
2098 gcc_unreachable ();
2099 }
2100
2101 case VECTOR_TYPE:
2102 if (integer_zerop (arg))
2103 return build_zero_vector (type);
2104 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2105 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2106 || TREE_CODE (orig) == VECTOR_TYPE);
2107 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2108
2109 case VOID_TYPE:
2110 tem = fold_ignored_result (arg);
2111 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2112
2113 default:
2114 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2115 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2116 gcc_unreachable ();
2117 }
2118 fold_convert_exit:
2119 protected_set_expr_location_unshare (tem, loc);
2120 return tem;
2121 }
2122 \f
2123 /* Return false if expr can be assumed not to be an lvalue, true
2124 otherwise. */
2125
2126 static bool
2127 maybe_lvalue_p (const_tree x)
2128 {
2129 /* We only need to wrap lvalue tree codes. */
2130 switch (TREE_CODE (x))
2131 {
2132 case VAR_DECL:
2133 case PARM_DECL:
2134 case RESULT_DECL:
2135 case LABEL_DECL:
2136 case FUNCTION_DECL:
2137 case SSA_NAME:
2138
2139 case COMPONENT_REF:
2140 case MEM_REF:
2141 case INDIRECT_REF:
2142 case ARRAY_REF:
2143 case ARRAY_RANGE_REF:
2144 case BIT_FIELD_REF:
2145 case OBJ_TYPE_REF:
2146
2147 case REALPART_EXPR:
2148 case IMAGPART_EXPR:
2149 case PREINCREMENT_EXPR:
2150 case PREDECREMENT_EXPR:
2151 case SAVE_EXPR:
2152 case TRY_CATCH_EXPR:
2153 case WITH_CLEANUP_EXPR:
2154 case COMPOUND_EXPR:
2155 case MODIFY_EXPR:
2156 case TARGET_EXPR:
2157 case COND_EXPR:
2158 case BIND_EXPR:
2159 break;
2160
2161 default:
2162 /* Assume the worst for front-end tree codes. */
2163 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2164 break;
2165 return false;
2166 }
2167
2168 return true;
2169 }
2170
2171 /* Return an expr equal to X but certainly not valid as an lvalue. */
2172
2173 tree
2174 non_lvalue_loc (location_t loc, tree x)
2175 {
2176 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2177 us. */
2178 if (in_gimple_form)
2179 return x;
2180
2181 if (! maybe_lvalue_p (x))
2182 return x;
2183 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2184 }
2185
2186 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2187 Zero means allow extended lvalues. */
2188
2189 int pedantic_lvalues;
2190
2191 /* When pedantic, return an expr equal to X but certainly not valid as a
2192 pedantic lvalue. Otherwise, return X. */
2193
2194 static tree
2195 pedantic_non_lvalue_loc (location_t loc, tree x)
2196 {
2197 if (pedantic_lvalues)
2198 return non_lvalue_loc (loc, x);
2199
2200 return protected_set_expr_location_unshare (x, loc);
2201 }
2202 \f
2203 /* Given a tree comparison code, return the code that is the logical inverse.
2204 It is generally not safe to do this for floating-point comparisons, except
2205 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2206 ERROR_MARK in this case. */
2207
2208 enum tree_code
2209 invert_tree_comparison (enum tree_code code, bool honor_nans)
2210 {
2211 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2212 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2213 return ERROR_MARK;
2214
2215 switch (code)
2216 {
2217 case EQ_EXPR:
2218 return NE_EXPR;
2219 case NE_EXPR:
2220 return EQ_EXPR;
2221 case GT_EXPR:
2222 return honor_nans ? UNLE_EXPR : LE_EXPR;
2223 case GE_EXPR:
2224 return honor_nans ? UNLT_EXPR : LT_EXPR;
2225 case LT_EXPR:
2226 return honor_nans ? UNGE_EXPR : GE_EXPR;
2227 case LE_EXPR:
2228 return honor_nans ? UNGT_EXPR : GT_EXPR;
2229 case LTGT_EXPR:
2230 return UNEQ_EXPR;
2231 case UNEQ_EXPR:
2232 return LTGT_EXPR;
2233 case UNGT_EXPR:
2234 return LE_EXPR;
2235 case UNGE_EXPR:
2236 return LT_EXPR;
2237 case UNLT_EXPR:
2238 return GE_EXPR;
2239 case UNLE_EXPR:
2240 return GT_EXPR;
2241 case ORDERED_EXPR:
2242 return UNORDERED_EXPR;
2243 case UNORDERED_EXPR:
2244 return ORDERED_EXPR;
2245 default:
2246 gcc_unreachable ();
2247 }
2248 }
2249
2250 /* Similar, but return the comparison that results if the operands are
2251 swapped. This is safe for floating-point. */
2252
2253 enum tree_code
2254 swap_tree_comparison (enum tree_code code)
2255 {
2256 switch (code)
2257 {
2258 case EQ_EXPR:
2259 case NE_EXPR:
2260 case ORDERED_EXPR:
2261 case UNORDERED_EXPR:
2262 case LTGT_EXPR:
2263 case UNEQ_EXPR:
2264 return code;
2265 case GT_EXPR:
2266 return LT_EXPR;
2267 case GE_EXPR:
2268 return LE_EXPR;
2269 case LT_EXPR:
2270 return GT_EXPR;
2271 case LE_EXPR:
2272 return GE_EXPR;
2273 case UNGT_EXPR:
2274 return UNLT_EXPR;
2275 case UNGE_EXPR:
2276 return UNLE_EXPR;
2277 case UNLT_EXPR:
2278 return UNGT_EXPR;
2279 case UNLE_EXPR:
2280 return UNGE_EXPR;
2281 default:
2282 gcc_unreachable ();
2283 }
2284 }
2285
2286
2287 /* Convert a comparison tree code from an enum tree_code representation
2288 into a compcode bit-based encoding. This function is the inverse of
2289 compcode_to_comparison. */
2290
2291 static enum comparison_code
2292 comparison_to_compcode (enum tree_code code)
2293 {
2294 switch (code)
2295 {
2296 case LT_EXPR:
2297 return COMPCODE_LT;
2298 case EQ_EXPR:
2299 return COMPCODE_EQ;
2300 case LE_EXPR:
2301 return COMPCODE_LE;
2302 case GT_EXPR:
2303 return COMPCODE_GT;
2304 case NE_EXPR:
2305 return COMPCODE_NE;
2306 case GE_EXPR:
2307 return COMPCODE_GE;
2308 case ORDERED_EXPR:
2309 return COMPCODE_ORD;
2310 case UNORDERED_EXPR:
2311 return COMPCODE_UNORD;
2312 case UNLT_EXPR:
2313 return COMPCODE_UNLT;
2314 case UNEQ_EXPR:
2315 return COMPCODE_UNEQ;
2316 case UNLE_EXPR:
2317 return COMPCODE_UNLE;
2318 case UNGT_EXPR:
2319 return COMPCODE_UNGT;
2320 case LTGT_EXPR:
2321 return COMPCODE_LTGT;
2322 case UNGE_EXPR:
2323 return COMPCODE_UNGE;
2324 default:
2325 gcc_unreachable ();
2326 }
2327 }
2328
2329 /* Convert a compcode bit-based encoding of a comparison operator back
2330 to GCC's enum tree_code representation. This function is the
2331 inverse of comparison_to_compcode. */
2332
2333 static enum tree_code
2334 compcode_to_comparison (enum comparison_code code)
2335 {
2336 switch (code)
2337 {
2338 case COMPCODE_LT:
2339 return LT_EXPR;
2340 case COMPCODE_EQ:
2341 return EQ_EXPR;
2342 case COMPCODE_LE:
2343 return LE_EXPR;
2344 case COMPCODE_GT:
2345 return GT_EXPR;
2346 case COMPCODE_NE:
2347 return NE_EXPR;
2348 case COMPCODE_GE:
2349 return GE_EXPR;
2350 case COMPCODE_ORD:
2351 return ORDERED_EXPR;
2352 case COMPCODE_UNORD:
2353 return UNORDERED_EXPR;
2354 case COMPCODE_UNLT:
2355 return UNLT_EXPR;
2356 case COMPCODE_UNEQ:
2357 return UNEQ_EXPR;
2358 case COMPCODE_UNLE:
2359 return UNLE_EXPR;
2360 case COMPCODE_UNGT:
2361 return UNGT_EXPR;
2362 case COMPCODE_LTGT:
2363 return LTGT_EXPR;
2364 case COMPCODE_UNGE:
2365 return UNGE_EXPR;
2366 default:
2367 gcc_unreachable ();
2368 }
2369 }
2370
2371 /* Return a tree for the comparison which is the combination of
2372 doing the AND or OR (depending on CODE) of the two operations LCODE
2373 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2374 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2375 if this makes the transformation invalid. */
2376
2377 tree
2378 combine_comparisons (location_t loc,
2379 enum tree_code code, enum tree_code lcode,
2380 enum tree_code rcode, tree truth_type,
2381 tree ll_arg, tree lr_arg)
2382 {
2383 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2384 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2385 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2386 int compcode;
2387
2388 switch (code)
2389 {
2390 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2391 compcode = lcompcode & rcompcode;
2392 break;
2393
2394 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2395 compcode = lcompcode | rcompcode;
2396 break;
2397
2398 default:
2399 return NULL_TREE;
2400 }
2401
2402 if (!honor_nans)
2403 {
2404 /* Eliminate unordered comparisons, as well as LTGT and ORD
2405 which are not used unless the mode has NaNs. */
2406 compcode &= ~COMPCODE_UNORD;
2407 if (compcode == COMPCODE_LTGT)
2408 compcode = COMPCODE_NE;
2409 else if (compcode == COMPCODE_ORD)
2410 compcode = COMPCODE_TRUE;
2411 }
2412 else if (flag_trapping_math)
2413 {
2414 /* Check that the original operation and the optimized ones will trap
2415 under the same condition. */
2416 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2417 && (lcompcode != COMPCODE_EQ)
2418 && (lcompcode != COMPCODE_ORD);
2419 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2420 && (rcompcode != COMPCODE_EQ)
2421 && (rcompcode != COMPCODE_ORD);
2422 bool trap = (compcode & COMPCODE_UNORD) == 0
2423 && (compcode != COMPCODE_EQ)
2424 && (compcode != COMPCODE_ORD);
2425
2426 /* In a short-circuited boolean expression the LHS might be
2427 such that the RHS, if evaluated, will never trap. For
2428 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2429 if neither x nor y is NaN. (This is a mixed blessing: for
2430 example, the expression above will never trap, hence
2431 optimizing it to x < y would be invalid). */
2432 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2433 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2434 rtrap = false;
2435
2436 /* If the comparison was short-circuited, and only the RHS
2437 trapped, we may now generate a spurious trap. */
2438 if (rtrap && !ltrap
2439 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2440 return NULL_TREE;
2441
2442 /* If we changed the conditions that cause a trap, we lose. */
2443 if ((ltrap || rtrap) != trap)
2444 return NULL_TREE;
2445 }
2446
2447 if (compcode == COMPCODE_TRUE)
2448 return constant_boolean_node (true, truth_type);
2449 else if (compcode == COMPCODE_FALSE)
2450 return constant_boolean_node (false, truth_type);
2451 else
2452 {
2453 enum tree_code tcode;
2454
2455 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2456 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2457 }
2458 }
2459 \f
2460 /* Return nonzero if two operands (typically of the same tree node)
2461 are necessarily equal. If either argument has side-effects this
2462 function returns zero. FLAGS modifies behavior as follows:
2463
2464 If OEP_ONLY_CONST is set, only return nonzero for constants.
2465 This function tests whether the operands are indistinguishable;
2466 it does not test whether they are equal using C's == operation.
2467 The distinction is important for IEEE floating point, because
2468 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2469 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2470
2471 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2472 even though it may hold multiple values during a function.
2473 This is because a GCC tree node guarantees that nothing else is
2474 executed between the evaluation of its "operands" (which may often
2475 be evaluated in arbitrary order). Hence if the operands themselves
2476 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2477 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2478 unset means assuming isochronic (or instantaneous) tree equivalence.
2479 Unless comparing arbitrary expression trees, such as from different
2480 statements, this flag can usually be left unset.
2481
2482 If OEP_PURE_SAME is set, then pure functions with identical arguments
2483 are considered the same. It is used when the caller has other ways
2484 to ensure that global memory is unchanged in between. */
2485
2486 int
2487 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2488 {
2489 /* If either is ERROR_MARK, they aren't equal. */
2490 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2491 || TREE_TYPE (arg0) == error_mark_node
2492 || TREE_TYPE (arg1) == error_mark_node)
2493 return 0;
2494
2495 /* Similar, if either does not have a type (like a released SSA name),
2496 they aren't equal. */
2497 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2498 return 0;
2499
2500 /* Check equality of integer constants before bailing out due to
2501 precision differences. */
2502 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2503 return tree_int_cst_equal (arg0, arg1);
2504
2505 /* If both types don't have the same signedness, then we can't consider
2506 them equal. We must check this before the STRIP_NOPS calls
2507 because they may change the signedness of the arguments. As pointers
2508 strictly don't have a signedness, require either two pointers or
2509 two non-pointers as well. */
2510 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2511 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2512 return 0;
2513
2514 /* We cannot consider pointers to different address space equal. */
2515 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2516 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2517 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2518 return 0;
2519
2520 /* If both types don't have the same precision, then it is not safe
2521 to strip NOPs. */
2522 if (element_precision (TREE_TYPE (arg0))
2523 != element_precision (TREE_TYPE (arg1)))
2524 return 0;
2525
2526 STRIP_NOPS (arg0);
2527 STRIP_NOPS (arg1);
2528
2529 /* In case both args are comparisons but with different comparison
2530 code, try to swap the comparison operands of one arg to produce
2531 a match and compare that variant. */
2532 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2533 && COMPARISON_CLASS_P (arg0)
2534 && COMPARISON_CLASS_P (arg1))
2535 {
2536 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2537
2538 if (TREE_CODE (arg0) == swap_code)
2539 return operand_equal_p (TREE_OPERAND (arg0, 0),
2540 TREE_OPERAND (arg1, 1), flags)
2541 && operand_equal_p (TREE_OPERAND (arg0, 1),
2542 TREE_OPERAND (arg1, 0), flags);
2543 }
2544
2545 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2546 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2547 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2548 return 0;
2549
2550 /* This is needed for conversions and for COMPONENT_REF.
2551 Might as well play it safe and always test this. */
2552 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2553 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2554 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2555 return 0;
2556
2557 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2558 We don't care about side effects in that case because the SAVE_EXPR
2559 takes care of that for us. In all other cases, two expressions are
2560 equal if they have no side effects. If we have two identical
2561 expressions with side effects that should be treated the same due
2562 to the only side effects being identical SAVE_EXPR's, that will
2563 be detected in the recursive calls below.
2564 If we are taking an invariant address of two identical objects
2565 they are necessarily equal as well. */
2566 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2567 && (TREE_CODE (arg0) == SAVE_EXPR
2568 || (flags & OEP_CONSTANT_ADDRESS_OF)
2569 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2570 return 1;
2571
2572 /* Next handle constant cases, those for which we can return 1 even
2573 if ONLY_CONST is set. */
2574 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2575 switch (TREE_CODE (arg0))
2576 {
2577 case INTEGER_CST:
2578 return tree_int_cst_equal (arg0, arg1);
2579
2580 case FIXED_CST:
2581 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2582 TREE_FIXED_CST (arg1));
2583
2584 case REAL_CST:
2585 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2586 TREE_REAL_CST (arg1)))
2587 return 1;
2588
2589
2590 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2591 {
2592 /* If we do not distinguish between signed and unsigned zero,
2593 consider them equal. */
2594 if (real_zerop (arg0) && real_zerop (arg1))
2595 return 1;
2596 }
2597 return 0;
2598
2599 case VECTOR_CST:
2600 {
2601 unsigned i;
2602
2603 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2604 return 0;
2605
2606 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2607 {
2608 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2609 VECTOR_CST_ELT (arg1, i), flags))
2610 return 0;
2611 }
2612 return 1;
2613 }
2614
2615 case COMPLEX_CST:
2616 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2617 flags)
2618 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2619 flags));
2620
2621 case STRING_CST:
2622 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2623 && ! memcmp (TREE_STRING_POINTER (arg0),
2624 TREE_STRING_POINTER (arg1),
2625 TREE_STRING_LENGTH (arg0)));
2626
2627 case ADDR_EXPR:
2628 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2629 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2630 ? OEP_CONSTANT_ADDRESS_OF : 0);
2631 default:
2632 break;
2633 }
2634
2635 if (flags & OEP_ONLY_CONST)
2636 return 0;
2637
2638 /* Define macros to test an operand from arg0 and arg1 for equality and a
2639 variant that allows null and views null as being different from any
2640 non-null value. In the latter case, if either is null, the both
2641 must be; otherwise, do the normal comparison. */
2642 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2643 TREE_OPERAND (arg1, N), flags)
2644
2645 #define OP_SAME_WITH_NULL(N) \
2646 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2647 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2648
2649 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2650 {
2651 case tcc_unary:
2652 /* Two conversions are equal only if signedness and modes match. */
2653 switch (TREE_CODE (arg0))
2654 {
2655 CASE_CONVERT:
2656 case FIX_TRUNC_EXPR:
2657 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2658 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2659 return 0;
2660 break;
2661 default:
2662 break;
2663 }
2664
2665 return OP_SAME (0);
2666
2667
2668 case tcc_comparison:
2669 case tcc_binary:
2670 if (OP_SAME (0) && OP_SAME (1))
2671 return 1;
2672
2673 /* For commutative ops, allow the other order. */
2674 return (commutative_tree_code (TREE_CODE (arg0))
2675 && operand_equal_p (TREE_OPERAND (arg0, 0),
2676 TREE_OPERAND (arg1, 1), flags)
2677 && operand_equal_p (TREE_OPERAND (arg0, 1),
2678 TREE_OPERAND (arg1, 0), flags));
2679
2680 case tcc_reference:
2681 /* If either of the pointer (or reference) expressions we are
2682 dereferencing contain a side effect, these cannot be equal,
2683 but their addresses can be. */
2684 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2685 && (TREE_SIDE_EFFECTS (arg0)
2686 || TREE_SIDE_EFFECTS (arg1)))
2687 return 0;
2688
2689 switch (TREE_CODE (arg0))
2690 {
2691 case INDIRECT_REF:
2692 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2693 return OP_SAME (0);
2694
2695 case REALPART_EXPR:
2696 case IMAGPART_EXPR:
2697 return OP_SAME (0);
2698
2699 case TARGET_MEM_REF:
2700 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2701 /* Require equal extra operands and then fall through to MEM_REF
2702 handling of the two common operands. */
2703 if (!OP_SAME_WITH_NULL (2)
2704 || !OP_SAME_WITH_NULL (3)
2705 || !OP_SAME_WITH_NULL (4))
2706 return 0;
2707 /* Fallthru. */
2708 case MEM_REF:
2709 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2710 /* Require equal access sizes, and similar pointer types.
2711 We can have incomplete types for array references of
2712 variable-sized arrays from the Fortran frontend
2713 though. Also verify the types are compatible. */
2714 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2715 || (TYPE_SIZE (TREE_TYPE (arg0))
2716 && TYPE_SIZE (TREE_TYPE (arg1))
2717 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2718 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2719 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2720 && alias_ptr_types_compatible_p
2721 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2722 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2723 && OP_SAME (0) && OP_SAME (1));
2724
2725 case ARRAY_REF:
2726 case ARRAY_RANGE_REF:
2727 /* Operands 2 and 3 may be null.
2728 Compare the array index by value if it is constant first as we
2729 may have different types but same value here. */
2730 if (!OP_SAME (0))
2731 return 0;
2732 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2733 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2734 TREE_OPERAND (arg1, 1))
2735 || OP_SAME (1))
2736 && OP_SAME_WITH_NULL (2)
2737 && OP_SAME_WITH_NULL (3));
2738
2739 case COMPONENT_REF:
2740 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2741 may be NULL when we're called to compare MEM_EXPRs. */
2742 if (!OP_SAME_WITH_NULL (0)
2743 || !OP_SAME (1))
2744 return 0;
2745 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2746 return OP_SAME_WITH_NULL (2);
2747
2748 case BIT_FIELD_REF:
2749 if (!OP_SAME (0))
2750 return 0;
2751 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2752 return OP_SAME (1) && OP_SAME (2);
2753
2754 default:
2755 return 0;
2756 }
2757
2758 case tcc_expression:
2759 switch (TREE_CODE (arg0))
2760 {
2761 case ADDR_EXPR:
2762 case TRUTH_NOT_EXPR:
2763 return OP_SAME (0);
2764
2765 case TRUTH_ANDIF_EXPR:
2766 case TRUTH_ORIF_EXPR:
2767 return OP_SAME (0) && OP_SAME (1);
2768
2769 case FMA_EXPR:
2770 case WIDEN_MULT_PLUS_EXPR:
2771 case WIDEN_MULT_MINUS_EXPR:
2772 if (!OP_SAME (2))
2773 return 0;
2774 /* The multiplcation operands are commutative. */
2775 /* FALLTHRU */
2776
2777 case TRUTH_AND_EXPR:
2778 case TRUTH_OR_EXPR:
2779 case TRUTH_XOR_EXPR:
2780 if (OP_SAME (0) && OP_SAME (1))
2781 return 1;
2782
2783 /* Otherwise take into account this is a commutative operation. */
2784 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2785 TREE_OPERAND (arg1, 1), flags)
2786 && operand_equal_p (TREE_OPERAND (arg0, 1),
2787 TREE_OPERAND (arg1, 0), flags));
2788
2789 case COND_EXPR:
2790 case VEC_COND_EXPR:
2791 case DOT_PROD_EXPR:
2792 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2793
2794 default:
2795 return 0;
2796 }
2797
2798 case tcc_vl_exp:
2799 switch (TREE_CODE (arg0))
2800 {
2801 case CALL_EXPR:
2802 /* If the CALL_EXPRs call different functions, then they
2803 clearly can not be equal. */
2804 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2805 flags))
2806 return 0;
2807
2808 {
2809 unsigned int cef = call_expr_flags (arg0);
2810 if (flags & OEP_PURE_SAME)
2811 cef &= ECF_CONST | ECF_PURE;
2812 else
2813 cef &= ECF_CONST;
2814 if (!cef)
2815 return 0;
2816 }
2817
2818 /* Now see if all the arguments are the same. */
2819 {
2820 const_call_expr_arg_iterator iter0, iter1;
2821 const_tree a0, a1;
2822 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2823 a1 = first_const_call_expr_arg (arg1, &iter1);
2824 a0 && a1;
2825 a0 = next_const_call_expr_arg (&iter0),
2826 a1 = next_const_call_expr_arg (&iter1))
2827 if (! operand_equal_p (a0, a1, flags))
2828 return 0;
2829
2830 /* If we get here and both argument lists are exhausted
2831 then the CALL_EXPRs are equal. */
2832 return ! (a0 || a1);
2833 }
2834 default:
2835 return 0;
2836 }
2837
2838 case tcc_declaration:
2839 /* Consider __builtin_sqrt equal to sqrt. */
2840 return (TREE_CODE (arg0) == FUNCTION_DECL
2841 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2842 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2843 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2844
2845 default:
2846 return 0;
2847 }
2848
2849 #undef OP_SAME
2850 #undef OP_SAME_WITH_NULL
2851 }
2852 \f
2853 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2854 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2855
2856 When in doubt, return 0. */
2857
2858 static int
2859 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2860 {
2861 int unsignedp1, unsignedpo;
2862 tree primarg0, primarg1, primother;
2863 unsigned int correct_width;
2864
2865 if (operand_equal_p (arg0, arg1, 0))
2866 return 1;
2867
2868 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2869 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2870 return 0;
2871
2872 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2873 and see if the inner values are the same. This removes any
2874 signedness comparison, which doesn't matter here. */
2875 primarg0 = arg0, primarg1 = arg1;
2876 STRIP_NOPS (primarg0);
2877 STRIP_NOPS (primarg1);
2878 if (operand_equal_p (primarg0, primarg1, 0))
2879 return 1;
2880
2881 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2882 actual comparison operand, ARG0.
2883
2884 First throw away any conversions to wider types
2885 already present in the operands. */
2886
2887 primarg1 = get_narrower (arg1, &unsignedp1);
2888 primother = get_narrower (other, &unsignedpo);
2889
2890 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2891 if (unsignedp1 == unsignedpo
2892 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2893 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2894 {
2895 tree type = TREE_TYPE (arg0);
2896
2897 /* Make sure shorter operand is extended the right way
2898 to match the longer operand. */
2899 primarg1 = fold_convert (signed_or_unsigned_type_for
2900 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2901
2902 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2903 return 1;
2904 }
2905
2906 return 0;
2907 }
2908 \f
2909 /* See if ARG is an expression that is either a comparison or is performing
2910 arithmetic on comparisons. The comparisons must only be comparing
2911 two different values, which will be stored in *CVAL1 and *CVAL2; if
2912 they are nonzero it means that some operands have already been found.
2913 No variables may be used anywhere else in the expression except in the
2914 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2915 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2916
2917 If this is true, return 1. Otherwise, return zero. */
2918
2919 static int
2920 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2921 {
2922 enum tree_code code = TREE_CODE (arg);
2923 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2924
2925 /* We can handle some of the tcc_expression cases here. */
2926 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2927 tclass = tcc_unary;
2928 else if (tclass == tcc_expression
2929 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2930 || code == COMPOUND_EXPR))
2931 tclass = tcc_binary;
2932
2933 else if (tclass == tcc_expression && code == SAVE_EXPR
2934 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2935 {
2936 /* If we've already found a CVAL1 or CVAL2, this expression is
2937 two complex to handle. */
2938 if (*cval1 || *cval2)
2939 return 0;
2940
2941 tclass = tcc_unary;
2942 *save_p = 1;
2943 }
2944
2945 switch (tclass)
2946 {
2947 case tcc_unary:
2948 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2949
2950 case tcc_binary:
2951 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2952 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2953 cval1, cval2, save_p));
2954
2955 case tcc_constant:
2956 return 1;
2957
2958 case tcc_expression:
2959 if (code == COND_EXPR)
2960 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2961 cval1, cval2, save_p)
2962 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2963 cval1, cval2, save_p)
2964 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2965 cval1, cval2, save_p));
2966 return 0;
2967
2968 case tcc_comparison:
2969 /* First see if we can handle the first operand, then the second. For
2970 the second operand, we know *CVAL1 can't be zero. It must be that
2971 one side of the comparison is each of the values; test for the
2972 case where this isn't true by failing if the two operands
2973 are the same. */
2974
2975 if (operand_equal_p (TREE_OPERAND (arg, 0),
2976 TREE_OPERAND (arg, 1), 0))
2977 return 0;
2978
2979 if (*cval1 == 0)
2980 *cval1 = TREE_OPERAND (arg, 0);
2981 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2982 ;
2983 else if (*cval2 == 0)
2984 *cval2 = TREE_OPERAND (arg, 0);
2985 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2986 ;
2987 else
2988 return 0;
2989
2990 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2991 ;
2992 else if (*cval2 == 0)
2993 *cval2 = TREE_OPERAND (arg, 1);
2994 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2995 ;
2996 else
2997 return 0;
2998
2999 return 1;
3000
3001 default:
3002 return 0;
3003 }
3004 }
3005 \f
3006 /* ARG is a tree that is known to contain just arithmetic operations and
3007 comparisons. Evaluate the operations in the tree substituting NEW0 for
3008 any occurrence of OLD0 as an operand of a comparison and likewise for
3009 NEW1 and OLD1. */
3010
3011 static tree
3012 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3013 tree old1, tree new1)
3014 {
3015 tree type = TREE_TYPE (arg);
3016 enum tree_code code = TREE_CODE (arg);
3017 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3018
3019 /* We can handle some of the tcc_expression cases here. */
3020 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3021 tclass = tcc_unary;
3022 else if (tclass == tcc_expression
3023 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3024 tclass = tcc_binary;
3025
3026 switch (tclass)
3027 {
3028 case tcc_unary:
3029 return fold_build1_loc (loc, code, type,
3030 eval_subst (loc, TREE_OPERAND (arg, 0),
3031 old0, new0, old1, new1));
3032
3033 case tcc_binary:
3034 return fold_build2_loc (loc, code, type,
3035 eval_subst (loc, TREE_OPERAND (arg, 0),
3036 old0, new0, old1, new1),
3037 eval_subst (loc, TREE_OPERAND (arg, 1),
3038 old0, new0, old1, new1));
3039
3040 case tcc_expression:
3041 switch (code)
3042 {
3043 case SAVE_EXPR:
3044 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3045 old1, new1);
3046
3047 case COMPOUND_EXPR:
3048 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3049 old1, new1);
3050
3051 case COND_EXPR:
3052 return fold_build3_loc (loc, code, type,
3053 eval_subst (loc, TREE_OPERAND (arg, 0),
3054 old0, new0, old1, new1),
3055 eval_subst (loc, TREE_OPERAND (arg, 1),
3056 old0, new0, old1, new1),
3057 eval_subst (loc, TREE_OPERAND (arg, 2),
3058 old0, new0, old1, new1));
3059 default:
3060 break;
3061 }
3062 /* Fall through - ??? */
3063
3064 case tcc_comparison:
3065 {
3066 tree arg0 = TREE_OPERAND (arg, 0);
3067 tree arg1 = TREE_OPERAND (arg, 1);
3068
3069 /* We need to check both for exact equality and tree equality. The
3070 former will be true if the operand has a side-effect. In that
3071 case, we know the operand occurred exactly once. */
3072
3073 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3074 arg0 = new0;
3075 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3076 arg0 = new1;
3077
3078 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3079 arg1 = new0;
3080 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3081 arg1 = new1;
3082
3083 return fold_build2_loc (loc, code, type, arg0, arg1);
3084 }
3085
3086 default:
3087 return arg;
3088 }
3089 }
3090 \f
3091 /* Return a tree for the case when the result of an expression is RESULT
3092 converted to TYPE and OMITTED was previously an operand of the expression
3093 but is now not needed (e.g., we folded OMITTED * 0).
3094
3095 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3096 the conversion of RESULT to TYPE. */
3097
3098 tree
3099 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3100 {
3101 tree t = fold_convert_loc (loc, type, result);
3102
3103 /* If the resulting operand is an empty statement, just return the omitted
3104 statement casted to void. */
3105 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3106 return build1_loc (loc, NOP_EXPR, void_type_node,
3107 fold_ignored_result (omitted));
3108
3109 if (TREE_SIDE_EFFECTS (omitted))
3110 return build2_loc (loc, COMPOUND_EXPR, type,
3111 fold_ignored_result (omitted), t);
3112
3113 return non_lvalue_loc (loc, t);
3114 }
3115
3116 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3117
3118 static tree
3119 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3120 tree omitted)
3121 {
3122 tree t = fold_convert_loc (loc, type, result);
3123
3124 /* If the resulting operand is an empty statement, just return the omitted
3125 statement casted to void. */
3126 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3127 return build1_loc (loc, NOP_EXPR, void_type_node,
3128 fold_ignored_result (omitted));
3129
3130 if (TREE_SIDE_EFFECTS (omitted))
3131 return build2_loc (loc, COMPOUND_EXPR, type,
3132 fold_ignored_result (omitted), t);
3133
3134 return pedantic_non_lvalue_loc (loc, t);
3135 }
3136
3137 /* Return a tree for the case when the result of an expression is RESULT
3138 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3139 of the expression but are now not needed.
3140
3141 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3142 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3143 evaluated before OMITTED2. Otherwise, if neither has side effects,
3144 just do the conversion of RESULT to TYPE. */
3145
3146 tree
3147 omit_two_operands_loc (location_t loc, tree type, tree result,
3148 tree omitted1, tree omitted2)
3149 {
3150 tree t = fold_convert_loc (loc, type, result);
3151
3152 if (TREE_SIDE_EFFECTS (omitted2))
3153 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3154 if (TREE_SIDE_EFFECTS (omitted1))
3155 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3156
3157 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3158 }
3159
3160 \f
3161 /* Return a simplified tree node for the truth-negation of ARG. This
3162 never alters ARG itself. We assume that ARG is an operation that
3163 returns a truth value (0 or 1).
3164
3165 FIXME: one would think we would fold the result, but it causes
3166 problems with the dominator optimizer. */
3167
3168 static tree
3169 fold_truth_not_expr (location_t loc, tree arg)
3170 {
3171 tree type = TREE_TYPE (arg);
3172 enum tree_code code = TREE_CODE (arg);
3173 location_t loc1, loc2;
3174
3175 /* If this is a comparison, we can simply invert it, except for
3176 floating-point non-equality comparisons, in which case we just
3177 enclose a TRUTH_NOT_EXPR around what we have. */
3178
3179 if (TREE_CODE_CLASS (code) == tcc_comparison)
3180 {
3181 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3182 if (FLOAT_TYPE_P (op_type)
3183 && flag_trapping_math
3184 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3185 && code != NE_EXPR && code != EQ_EXPR)
3186 return NULL_TREE;
3187
3188 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3189 if (code == ERROR_MARK)
3190 return NULL_TREE;
3191
3192 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3193 TREE_OPERAND (arg, 1));
3194 }
3195
3196 switch (code)
3197 {
3198 case INTEGER_CST:
3199 return constant_boolean_node (integer_zerop (arg), type);
3200
3201 case TRUTH_AND_EXPR:
3202 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3203 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3204 return build2_loc (loc, TRUTH_OR_EXPR, type,
3205 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3206 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3207
3208 case TRUTH_OR_EXPR:
3209 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3210 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3211 return build2_loc (loc, TRUTH_AND_EXPR, type,
3212 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3213 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3214
3215 case TRUTH_XOR_EXPR:
3216 /* Here we can invert either operand. We invert the first operand
3217 unless the second operand is a TRUTH_NOT_EXPR in which case our
3218 result is the XOR of the first operand with the inside of the
3219 negation of the second operand. */
3220
3221 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3222 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3223 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3224 else
3225 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3226 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3227 TREE_OPERAND (arg, 1));
3228
3229 case TRUTH_ANDIF_EXPR:
3230 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3231 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3232 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3233 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3234 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3235
3236 case TRUTH_ORIF_EXPR:
3237 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3238 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3239 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3240 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3241 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3242
3243 case TRUTH_NOT_EXPR:
3244 return TREE_OPERAND (arg, 0);
3245
3246 case COND_EXPR:
3247 {
3248 tree arg1 = TREE_OPERAND (arg, 1);
3249 tree arg2 = TREE_OPERAND (arg, 2);
3250
3251 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3252 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3253
3254 /* A COND_EXPR may have a throw as one operand, which
3255 then has void type. Just leave void operands
3256 as they are. */
3257 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3258 VOID_TYPE_P (TREE_TYPE (arg1))
3259 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3260 VOID_TYPE_P (TREE_TYPE (arg2))
3261 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3262 }
3263
3264 case COMPOUND_EXPR:
3265 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3266 return build2_loc (loc, COMPOUND_EXPR, type,
3267 TREE_OPERAND (arg, 0),
3268 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3269
3270 case NON_LVALUE_EXPR:
3271 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3272 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3273
3274 CASE_CONVERT:
3275 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3276 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3277
3278 /* ... fall through ... */
3279
3280 case FLOAT_EXPR:
3281 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3282 return build1_loc (loc, TREE_CODE (arg), type,
3283 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3284
3285 case BIT_AND_EXPR:
3286 if (!integer_onep (TREE_OPERAND (arg, 1)))
3287 return NULL_TREE;
3288 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3289
3290 case SAVE_EXPR:
3291 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3292
3293 case CLEANUP_POINT_EXPR:
3294 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3295 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3296 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3297
3298 default:
3299 return NULL_TREE;
3300 }
3301 }
3302
3303 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3304 assume that ARG is an operation that returns a truth value (0 or 1
3305 for scalars, 0 or -1 for vectors). Return the folded expression if
3306 folding is successful. Otherwise, return NULL_TREE. */
3307
3308 static tree
3309 fold_invert_truthvalue (location_t loc, tree arg)
3310 {
3311 tree type = TREE_TYPE (arg);
3312 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3313 ? BIT_NOT_EXPR
3314 : TRUTH_NOT_EXPR,
3315 type, arg);
3316 }
3317
3318 /* Return a simplified tree node for the truth-negation of ARG. This
3319 never alters ARG itself. We assume that ARG is an operation that
3320 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3321
3322 tree
3323 invert_truthvalue_loc (location_t loc, tree arg)
3324 {
3325 if (TREE_CODE (arg) == ERROR_MARK)
3326 return arg;
3327
3328 tree type = TREE_TYPE (arg);
3329 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3330 ? BIT_NOT_EXPR
3331 : TRUTH_NOT_EXPR,
3332 type, arg);
3333 }
3334
3335 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3336 operands are another bit-wise operation with a common input. If so,
3337 distribute the bit operations to save an operation and possibly two if
3338 constants are involved. For example, convert
3339 (A | B) & (A | C) into A | (B & C)
3340 Further simplification will occur if B and C are constants.
3341
3342 If this optimization cannot be done, 0 will be returned. */
3343
3344 static tree
3345 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3346 tree arg0, tree arg1)
3347 {
3348 tree common;
3349 tree left, right;
3350
3351 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3352 || TREE_CODE (arg0) == code
3353 || (TREE_CODE (arg0) != BIT_AND_EXPR
3354 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3355 return 0;
3356
3357 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3358 {
3359 common = TREE_OPERAND (arg0, 0);
3360 left = TREE_OPERAND (arg0, 1);
3361 right = TREE_OPERAND (arg1, 1);
3362 }
3363 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3364 {
3365 common = TREE_OPERAND (arg0, 0);
3366 left = TREE_OPERAND (arg0, 1);
3367 right = TREE_OPERAND (arg1, 0);
3368 }
3369 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3370 {
3371 common = TREE_OPERAND (arg0, 1);
3372 left = TREE_OPERAND (arg0, 0);
3373 right = TREE_OPERAND (arg1, 1);
3374 }
3375 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3376 {
3377 common = TREE_OPERAND (arg0, 1);
3378 left = TREE_OPERAND (arg0, 0);
3379 right = TREE_OPERAND (arg1, 0);
3380 }
3381 else
3382 return 0;
3383
3384 common = fold_convert_loc (loc, type, common);
3385 left = fold_convert_loc (loc, type, left);
3386 right = fold_convert_loc (loc, type, right);
3387 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3388 fold_build2_loc (loc, code, type, left, right));
3389 }
3390
3391 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3392 with code CODE. This optimization is unsafe. */
3393 static tree
3394 distribute_real_division (location_t loc, enum tree_code code, tree type,
3395 tree arg0, tree arg1)
3396 {
3397 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3398 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3399
3400 /* (A / C) +- (B / C) -> (A +- B) / C. */
3401 if (mul0 == mul1
3402 && operand_equal_p (TREE_OPERAND (arg0, 1),
3403 TREE_OPERAND (arg1, 1), 0))
3404 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3405 fold_build2_loc (loc, code, type,
3406 TREE_OPERAND (arg0, 0),
3407 TREE_OPERAND (arg1, 0)),
3408 TREE_OPERAND (arg0, 1));
3409
3410 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3411 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3412 TREE_OPERAND (arg1, 0), 0)
3413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3414 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3415 {
3416 REAL_VALUE_TYPE r0, r1;
3417 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3418 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3419 if (!mul0)
3420 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3421 if (!mul1)
3422 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3423 real_arithmetic (&r0, code, &r0, &r1);
3424 return fold_build2_loc (loc, MULT_EXPR, type,
3425 TREE_OPERAND (arg0, 0),
3426 build_real (type, r0));
3427 }
3428
3429 return NULL_TREE;
3430 }
3431 \f
3432 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3433 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3434
3435 static tree
3436 make_bit_field_ref (location_t loc, tree inner, tree type,
3437 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3438 {
3439 tree result, bftype;
3440
3441 if (bitpos == 0)
3442 {
3443 tree size = TYPE_SIZE (TREE_TYPE (inner));
3444 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3445 || POINTER_TYPE_P (TREE_TYPE (inner)))
3446 && tree_fits_shwi_p (size)
3447 && tree_to_shwi (size) == bitsize)
3448 return fold_convert_loc (loc, type, inner);
3449 }
3450
3451 bftype = type;
3452 if (TYPE_PRECISION (bftype) != bitsize
3453 || TYPE_UNSIGNED (bftype) == !unsignedp)
3454 bftype = build_nonstandard_integer_type (bitsize, 0);
3455
3456 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3457 size_int (bitsize), bitsize_int (bitpos));
3458
3459 if (bftype != type)
3460 result = fold_convert_loc (loc, type, result);
3461
3462 return result;
3463 }
3464
3465 /* Optimize a bit-field compare.
3466
3467 There are two cases: First is a compare against a constant and the
3468 second is a comparison of two items where the fields are at the same
3469 bit position relative to the start of a chunk (byte, halfword, word)
3470 large enough to contain it. In these cases we can avoid the shift
3471 implicit in bitfield extractions.
3472
3473 For constants, we emit a compare of the shifted constant with the
3474 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3475 compared. For two fields at the same position, we do the ANDs with the
3476 similar mask and compare the result of the ANDs.
3477
3478 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3479 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3480 are the left and right operands of the comparison, respectively.
3481
3482 If the optimization described above can be done, we return the resulting
3483 tree. Otherwise we return zero. */
3484
3485 static tree
3486 optimize_bit_field_compare (location_t loc, enum tree_code code,
3487 tree compare_type, tree lhs, tree rhs)
3488 {
3489 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3490 tree type = TREE_TYPE (lhs);
3491 tree signed_type, unsigned_type;
3492 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3493 enum machine_mode lmode, rmode, nmode;
3494 int lunsignedp, runsignedp;
3495 int lvolatilep = 0, rvolatilep = 0;
3496 tree linner, rinner = NULL_TREE;
3497 tree mask;
3498 tree offset;
3499
3500 /* Get all the information about the extractions being done. If the bit size
3501 if the same as the size of the underlying object, we aren't doing an
3502 extraction at all and so can do nothing. We also don't want to
3503 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3504 then will no longer be able to replace it. */
3505 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3506 &lunsignedp, &lvolatilep, false);
3507 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3508 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3509 return 0;
3510
3511 if (!const_p)
3512 {
3513 /* If this is not a constant, we can only do something if bit positions,
3514 sizes, and signedness are the same. */
3515 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3516 &runsignedp, &rvolatilep, false);
3517
3518 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3519 || lunsignedp != runsignedp || offset != 0
3520 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3521 return 0;
3522 }
3523
3524 /* See if we can find a mode to refer to this field. We should be able to,
3525 but fail if we can't. */
3526 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3527 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3528 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3529 TYPE_ALIGN (TREE_TYPE (rinner))),
3530 word_mode, false);
3531 if (nmode == VOIDmode)
3532 return 0;
3533
3534 /* Set signed and unsigned types of the precision of this mode for the
3535 shifts below. */
3536 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3537 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3538
3539 /* Compute the bit position and size for the new reference and our offset
3540 within it. If the new reference is the same size as the original, we
3541 won't optimize anything, so return zero. */
3542 nbitsize = GET_MODE_BITSIZE (nmode);
3543 nbitpos = lbitpos & ~ (nbitsize - 1);
3544 lbitpos -= nbitpos;
3545 if (nbitsize == lbitsize)
3546 return 0;
3547
3548 if (BYTES_BIG_ENDIAN)
3549 lbitpos = nbitsize - lbitsize - lbitpos;
3550
3551 /* Make the mask to be used against the extracted field. */
3552 mask = build_int_cst_type (unsigned_type, -1);
3553 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3554 mask = const_binop (RSHIFT_EXPR, mask,
3555 size_int (nbitsize - lbitsize - lbitpos));
3556
3557 if (! const_p)
3558 /* If not comparing with constant, just rework the comparison
3559 and return. */
3560 return fold_build2_loc (loc, code, compare_type,
3561 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3562 make_bit_field_ref (loc, linner,
3563 unsigned_type,
3564 nbitsize, nbitpos,
3565 1),
3566 mask),
3567 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3568 make_bit_field_ref (loc, rinner,
3569 unsigned_type,
3570 nbitsize, nbitpos,
3571 1),
3572 mask));
3573
3574 /* Otherwise, we are handling the constant case. See if the constant is too
3575 big for the field. Warn and return a tree of for 0 (false) if so. We do
3576 this not only for its own sake, but to avoid having to test for this
3577 error case below. If we didn't, we might generate wrong code.
3578
3579 For unsigned fields, the constant shifted right by the field length should
3580 be all zero. For signed fields, the high-order bits should agree with
3581 the sign bit. */
3582
3583 if (lunsignedp)
3584 {
3585 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3586 fold_convert_loc (loc,
3587 unsigned_type, rhs),
3588 size_int (lbitsize))))
3589 {
3590 warning (0, "comparison is always %d due to width of bit-field",
3591 code == NE_EXPR);
3592 return constant_boolean_node (code == NE_EXPR, compare_type);
3593 }
3594 }
3595 else
3596 {
3597 tree tem = const_binop (RSHIFT_EXPR,
3598 fold_convert_loc (loc, signed_type, rhs),
3599 size_int (lbitsize - 1));
3600 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3601 {
3602 warning (0, "comparison is always %d due to width of bit-field",
3603 code == NE_EXPR);
3604 return constant_boolean_node (code == NE_EXPR, compare_type);
3605 }
3606 }
3607
3608 /* Single-bit compares should always be against zero. */
3609 if (lbitsize == 1 && ! integer_zerop (rhs))
3610 {
3611 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3612 rhs = build_int_cst (type, 0);
3613 }
3614
3615 /* Make a new bitfield reference, shift the constant over the
3616 appropriate number of bits and mask it with the computed mask
3617 (in case this was a signed field). If we changed it, make a new one. */
3618 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3619
3620 rhs = const_binop (BIT_AND_EXPR,
3621 const_binop (LSHIFT_EXPR,
3622 fold_convert_loc (loc, unsigned_type, rhs),
3623 size_int (lbitpos)),
3624 mask);
3625
3626 lhs = build2_loc (loc, code, compare_type,
3627 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3628 return lhs;
3629 }
3630 \f
3631 /* Subroutine for fold_truth_andor_1: decode a field reference.
3632
3633 If EXP is a comparison reference, we return the innermost reference.
3634
3635 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3636 set to the starting bit number.
3637
3638 If the innermost field can be completely contained in a mode-sized
3639 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3640
3641 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3642 otherwise it is not changed.
3643
3644 *PUNSIGNEDP is set to the signedness of the field.
3645
3646 *PMASK is set to the mask used. This is either contained in a
3647 BIT_AND_EXPR or derived from the width of the field.
3648
3649 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3650
3651 Return 0 if this is not a component reference or is one that we can't
3652 do anything with. */
3653
3654 static tree
3655 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3656 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3657 int *punsignedp, int *pvolatilep,
3658 tree *pmask, tree *pand_mask)
3659 {
3660 tree outer_type = 0;
3661 tree and_mask = 0;
3662 tree mask, inner, offset;
3663 tree unsigned_type;
3664 unsigned int precision;
3665
3666 /* All the optimizations using this function assume integer fields.
3667 There are problems with FP fields since the type_for_size call
3668 below can fail for, e.g., XFmode. */
3669 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3670 return 0;
3671
3672 /* We are interested in the bare arrangement of bits, so strip everything
3673 that doesn't affect the machine mode. However, record the type of the
3674 outermost expression if it may matter below. */
3675 if (CONVERT_EXPR_P (exp)
3676 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3677 outer_type = TREE_TYPE (exp);
3678 STRIP_NOPS (exp);
3679
3680 if (TREE_CODE (exp) == BIT_AND_EXPR)
3681 {
3682 and_mask = TREE_OPERAND (exp, 1);
3683 exp = TREE_OPERAND (exp, 0);
3684 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3685 if (TREE_CODE (and_mask) != INTEGER_CST)
3686 return 0;
3687 }
3688
3689 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3690 punsignedp, pvolatilep, false);
3691 if ((inner == exp && and_mask == 0)
3692 || *pbitsize < 0 || offset != 0
3693 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3694 return 0;
3695
3696 /* If the number of bits in the reference is the same as the bitsize of
3697 the outer type, then the outer type gives the signedness. Otherwise
3698 (in case of a small bitfield) the signedness is unchanged. */
3699 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3700 *punsignedp = TYPE_UNSIGNED (outer_type);
3701
3702 /* Compute the mask to access the bitfield. */
3703 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3704 precision = TYPE_PRECISION (unsigned_type);
3705
3706 mask = build_int_cst_type (unsigned_type, -1);
3707
3708 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3709 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3710
3711 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3712 if (and_mask != 0)
3713 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3714 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3715
3716 *pmask = mask;
3717 *pand_mask = and_mask;
3718 return inner;
3719 }
3720
3721 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3722 bit positions. */
3723
3724 static int
3725 all_ones_mask_p (const_tree mask, int size)
3726 {
3727 tree type = TREE_TYPE (mask);
3728 unsigned int precision = TYPE_PRECISION (type);
3729 tree tmask;
3730
3731 tmask = build_int_cst_type (signed_type_for (type), -1);
3732
3733 return
3734 tree_int_cst_equal (mask,
3735 const_binop (RSHIFT_EXPR,
3736 const_binop (LSHIFT_EXPR, tmask,
3737 size_int (precision - size)),
3738 size_int (precision - size)));
3739 }
3740
3741 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3742 represents the sign bit of EXP's type. If EXP represents a sign
3743 or zero extension, also test VAL against the unextended type.
3744 The return value is the (sub)expression whose sign bit is VAL,
3745 or NULL_TREE otherwise. */
3746
3747 static tree
3748 sign_bit_p (tree exp, const_tree val)
3749 {
3750 unsigned HOST_WIDE_INT mask_lo, lo;
3751 HOST_WIDE_INT mask_hi, hi;
3752 int width;
3753 tree t;
3754
3755 /* Tree EXP must have an integral type. */
3756 t = TREE_TYPE (exp);
3757 if (! INTEGRAL_TYPE_P (t))
3758 return NULL_TREE;
3759
3760 /* Tree VAL must be an integer constant. */
3761 if (TREE_CODE (val) != INTEGER_CST
3762 || TREE_OVERFLOW (val))
3763 return NULL_TREE;
3764
3765 width = TYPE_PRECISION (t);
3766 if (width > HOST_BITS_PER_WIDE_INT)
3767 {
3768 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3769 lo = 0;
3770
3771 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3772 mask_lo = -1;
3773 }
3774 else
3775 {
3776 hi = 0;
3777 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3778
3779 mask_hi = 0;
3780 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3781 }
3782
3783 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3784 treat VAL as if it were unsigned. */
3785 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3786 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3787 return exp;
3788
3789 /* Handle extension from a narrower type. */
3790 if (TREE_CODE (exp) == NOP_EXPR
3791 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3792 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3793
3794 return NULL_TREE;
3795 }
3796
3797 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3798 to be evaluated unconditionally. */
3799
3800 static int
3801 simple_operand_p (const_tree exp)
3802 {
3803 /* Strip any conversions that don't change the machine mode. */
3804 STRIP_NOPS (exp);
3805
3806 return (CONSTANT_CLASS_P (exp)
3807 || TREE_CODE (exp) == SSA_NAME
3808 || (DECL_P (exp)
3809 && ! TREE_ADDRESSABLE (exp)
3810 && ! TREE_THIS_VOLATILE (exp)
3811 && ! DECL_NONLOCAL (exp)
3812 /* Don't regard global variables as simple. They may be
3813 allocated in ways unknown to the compiler (shared memory,
3814 #pragma weak, etc). */
3815 && ! TREE_PUBLIC (exp)
3816 && ! DECL_EXTERNAL (exp)
3817 /* Weakrefs are not safe to be read, since they can be NULL.
3818 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3819 have DECL_WEAK flag set. */
3820 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3821 /* Loading a static variable is unduly expensive, but global
3822 registers aren't expensive. */
3823 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3824 }
3825
3826 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3827 to be evaluated unconditionally.
3828 I addition to simple_operand_p, we assume that comparisons, conversions,
3829 and logic-not operations are simple, if their operands are simple, too. */
3830
3831 static bool
3832 simple_operand_p_2 (tree exp)
3833 {
3834 enum tree_code code;
3835
3836 if (TREE_SIDE_EFFECTS (exp)
3837 || tree_could_trap_p (exp))
3838 return false;
3839
3840 while (CONVERT_EXPR_P (exp))
3841 exp = TREE_OPERAND (exp, 0);
3842
3843 code = TREE_CODE (exp);
3844
3845 if (TREE_CODE_CLASS (code) == tcc_comparison)
3846 return (simple_operand_p (TREE_OPERAND (exp, 0))
3847 && simple_operand_p (TREE_OPERAND (exp, 1)));
3848
3849 if (code == TRUTH_NOT_EXPR)
3850 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3851
3852 return simple_operand_p (exp);
3853 }
3854
3855 \f
3856 /* The following functions are subroutines to fold_range_test and allow it to
3857 try to change a logical combination of comparisons into a range test.
3858
3859 For example, both
3860 X == 2 || X == 3 || X == 4 || X == 5
3861 and
3862 X >= 2 && X <= 5
3863 are converted to
3864 (unsigned) (X - 2) <= 3
3865
3866 We describe each set of comparisons as being either inside or outside
3867 a range, using a variable named like IN_P, and then describe the
3868 range with a lower and upper bound. If one of the bounds is omitted,
3869 it represents either the highest or lowest value of the type.
3870
3871 In the comments below, we represent a range by two numbers in brackets
3872 preceded by a "+" to designate being inside that range, or a "-" to
3873 designate being outside that range, so the condition can be inverted by
3874 flipping the prefix. An omitted bound is represented by a "-". For
3875 example, "- [-, 10]" means being outside the range starting at the lowest
3876 possible value and ending at 10, in other words, being greater than 10.
3877 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3878 always false.
3879
3880 We set up things so that the missing bounds are handled in a consistent
3881 manner so neither a missing bound nor "true" and "false" need to be
3882 handled using a special case. */
3883
3884 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3885 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3886 and UPPER1_P are nonzero if the respective argument is an upper bound
3887 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3888 must be specified for a comparison. ARG1 will be converted to ARG0's
3889 type if both are specified. */
3890
3891 static tree
3892 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3893 tree arg1, int upper1_p)
3894 {
3895 tree tem;
3896 int result;
3897 int sgn0, sgn1;
3898
3899 /* If neither arg represents infinity, do the normal operation.
3900 Else, if not a comparison, return infinity. Else handle the special
3901 comparison rules. Note that most of the cases below won't occur, but
3902 are handled for consistency. */
3903
3904 if (arg0 != 0 && arg1 != 0)
3905 {
3906 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3907 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3908 STRIP_NOPS (tem);
3909 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3910 }
3911
3912 if (TREE_CODE_CLASS (code) != tcc_comparison)
3913 return 0;
3914
3915 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3916 for neither. In real maths, we cannot assume open ended ranges are
3917 the same. But, this is computer arithmetic, where numbers are finite.
3918 We can therefore make the transformation of any unbounded range with
3919 the value Z, Z being greater than any representable number. This permits
3920 us to treat unbounded ranges as equal. */
3921 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3922 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3923 switch (code)
3924 {
3925 case EQ_EXPR:
3926 result = sgn0 == sgn1;
3927 break;
3928 case NE_EXPR:
3929 result = sgn0 != sgn1;
3930 break;
3931 case LT_EXPR:
3932 result = sgn0 < sgn1;
3933 break;
3934 case LE_EXPR:
3935 result = sgn0 <= sgn1;
3936 break;
3937 case GT_EXPR:
3938 result = sgn0 > sgn1;
3939 break;
3940 case GE_EXPR:
3941 result = sgn0 >= sgn1;
3942 break;
3943 default:
3944 gcc_unreachable ();
3945 }
3946
3947 return constant_boolean_node (result, type);
3948 }
3949 \f
3950 /* Helper routine for make_range. Perform one step for it, return
3951 new expression if the loop should continue or NULL_TREE if it should
3952 stop. */
3953
3954 tree
3955 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3956 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3957 bool *strict_overflow_p)
3958 {
3959 tree arg0_type = TREE_TYPE (arg0);
3960 tree n_low, n_high, low = *p_low, high = *p_high;
3961 int in_p = *p_in_p, n_in_p;
3962
3963 switch (code)
3964 {
3965 case TRUTH_NOT_EXPR:
3966 /* We can only do something if the range is testing for zero. */
3967 if (low == NULL_TREE || high == NULL_TREE
3968 || ! integer_zerop (low) || ! integer_zerop (high))
3969 return NULL_TREE;
3970 *p_in_p = ! in_p;
3971 return arg0;
3972
3973 case EQ_EXPR: case NE_EXPR:
3974 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3975 /* We can only do something if the range is testing for zero
3976 and if the second operand is an integer constant. Note that
3977 saying something is "in" the range we make is done by
3978 complementing IN_P since it will set in the initial case of
3979 being not equal to zero; "out" is leaving it alone. */
3980 if (low == NULL_TREE || high == NULL_TREE
3981 || ! integer_zerop (low) || ! integer_zerop (high)
3982 || TREE_CODE (arg1) != INTEGER_CST)
3983 return NULL_TREE;
3984
3985 switch (code)
3986 {
3987 case NE_EXPR: /* - [c, c] */
3988 low = high = arg1;
3989 break;
3990 case EQ_EXPR: /* + [c, c] */
3991 in_p = ! in_p, low = high = arg1;
3992 break;
3993 case GT_EXPR: /* - [-, c] */
3994 low = 0, high = arg1;
3995 break;
3996 case GE_EXPR: /* + [c, -] */
3997 in_p = ! in_p, low = arg1, high = 0;
3998 break;
3999 case LT_EXPR: /* - [c, -] */
4000 low = arg1, high = 0;
4001 break;
4002 case LE_EXPR: /* + [-, c] */
4003 in_p = ! in_p, low = 0, high = arg1;
4004 break;
4005 default:
4006 gcc_unreachable ();
4007 }
4008
4009 /* If this is an unsigned comparison, we also know that EXP is
4010 greater than or equal to zero. We base the range tests we make
4011 on that fact, so we record it here so we can parse existing
4012 range tests. We test arg0_type since often the return type
4013 of, e.g. EQ_EXPR, is boolean. */
4014 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4015 {
4016 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4017 in_p, low, high, 1,
4018 build_int_cst (arg0_type, 0),
4019 NULL_TREE))
4020 return NULL_TREE;
4021
4022 in_p = n_in_p, low = n_low, high = n_high;
4023
4024 /* If the high bound is missing, but we have a nonzero low
4025 bound, reverse the range so it goes from zero to the low bound
4026 minus 1. */
4027 if (high == 0 && low && ! integer_zerop (low))
4028 {
4029 in_p = ! in_p;
4030 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4031 integer_one_node, 0);
4032 low = build_int_cst (arg0_type, 0);
4033 }
4034 }
4035
4036 *p_low = low;
4037 *p_high = high;
4038 *p_in_p = in_p;
4039 return arg0;
4040
4041 case NEGATE_EXPR:
4042 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4043 low and high are non-NULL, then normalize will DTRT. */
4044 if (!TYPE_UNSIGNED (arg0_type)
4045 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4046 {
4047 if (low == NULL_TREE)
4048 low = TYPE_MIN_VALUE (arg0_type);
4049 if (high == NULL_TREE)
4050 high = TYPE_MAX_VALUE (arg0_type);
4051 }
4052
4053 /* (-x) IN [a,b] -> x in [-b, -a] */
4054 n_low = range_binop (MINUS_EXPR, exp_type,
4055 build_int_cst (exp_type, 0),
4056 0, high, 1);
4057 n_high = range_binop (MINUS_EXPR, exp_type,
4058 build_int_cst (exp_type, 0),
4059 0, low, 0);
4060 if (n_high != 0 && TREE_OVERFLOW (n_high))
4061 return NULL_TREE;
4062 goto normalize;
4063
4064 case BIT_NOT_EXPR:
4065 /* ~ X -> -X - 1 */
4066 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4067 build_int_cst (exp_type, 1));
4068
4069 case PLUS_EXPR:
4070 case MINUS_EXPR:
4071 if (TREE_CODE (arg1) != INTEGER_CST)
4072 return NULL_TREE;
4073
4074 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4075 move a constant to the other side. */
4076 if (!TYPE_UNSIGNED (arg0_type)
4077 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4078 return NULL_TREE;
4079
4080 /* If EXP is signed, any overflow in the computation is undefined,
4081 so we don't worry about it so long as our computations on
4082 the bounds don't overflow. For unsigned, overflow is defined
4083 and this is exactly the right thing. */
4084 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4085 arg0_type, low, 0, arg1, 0);
4086 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4087 arg0_type, high, 1, arg1, 0);
4088 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4089 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4090 return NULL_TREE;
4091
4092 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4093 *strict_overflow_p = true;
4094
4095 normalize:
4096 /* Check for an unsigned range which has wrapped around the maximum
4097 value thus making n_high < n_low, and normalize it. */
4098 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4099 {
4100 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4101 integer_one_node, 0);
4102 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4103 integer_one_node, 0);
4104
4105 /* If the range is of the form +/- [ x+1, x ], we won't
4106 be able to normalize it. But then, it represents the
4107 whole range or the empty set, so make it
4108 +/- [ -, - ]. */
4109 if (tree_int_cst_equal (n_low, low)
4110 && tree_int_cst_equal (n_high, high))
4111 low = high = 0;
4112 else
4113 in_p = ! in_p;
4114 }
4115 else
4116 low = n_low, high = n_high;
4117
4118 *p_low = low;
4119 *p_high = high;
4120 *p_in_p = in_p;
4121 return arg0;
4122
4123 CASE_CONVERT:
4124 case NON_LVALUE_EXPR:
4125 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4126 return NULL_TREE;
4127
4128 if (! INTEGRAL_TYPE_P (arg0_type)
4129 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4130 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4131 return NULL_TREE;
4132
4133 n_low = low, n_high = high;
4134
4135 if (n_low != 0)
4136 n_low = fold_convert_loc (loc, arg0_type, n_low);
4137
4138 if (n_high != 0)
4139 n_high = fold_convert_loc (loc, arg0_type, n_high);
4140
4141 /* If we're converting arg0 from an unsigned type, to exp,
4142 a signed type, we will be doing the comparison as unsigned.
4143 The tests above have already verified that LOW and HIGH
4144 are both positive.
4145
4146 So we have to ensure that we will handle large unsigned
4147 values the same way that the current signed bounds treat
4148 negative values. */
4149
4150 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4151 {
4152 tree high_positive;
4153 tree equiv_type;
4154 /* For fixed-point modes, we need to pass the saturating flag
4155 as the 2nd parameter. */
4156 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4157 equiv_type
4158 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4159 TYPE_SATURATING (arg0_type));
4160 else
4161 equiv_type
4162 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4163
4164 /* A range without an upper bound is, naturally, unbounded.
4165 Since convert would have cropped a very large value, use
4166 the max value for the destination type. */
4167 high_positive
4168 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4169 : TYPE_MAX_VALUE (arg0_type);
4170
4171 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4172 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4173 fold_convert_loc (loc, arg0_type,
4174 high_positive),
4175 build_int_cst (arg0_type, 1));
4176
4177 /* If the low bound is specified, "and" the range with the
4178 range for which the original unsigned value will be
4179 positive. */
4180 if (low != 0)
4181 {
4182 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4183 1, fold_convert_loc (loc, arg0_type,
4184 integer_zero_node),
4185 high_positive))
4186 return NULL_TREE;
4187
4188 in_p = (n_in_p == in_p);
4189 }
4190 else
4191 {
4192 /* Otherwise, "or" the range with the range of the input
4193 that will be interpreted as negative. */
4194 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4195 1, fold_convert_loc (loc, arg0_type,
4196 integer_zero_node),
4197 high_positive))
4198 return NULL_TREE;
4199
4200 in_p = (in_p != n_in_p);
4201 }
4202 }
4203
4204 *p_low = n_low;
4205 *p_high = n_high;
4206 *p_in_p = in_p;
4207 return arg0;
4208
4209 default:
4210 return NULL_TREE;
4211 }
4212 }
4213
4214 /* Given EXP, a logical expression, set the range it is testing into
4215 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4216 actually being tested. *PLOW and *PHIGH will be made of the same
4217 type as the returned expression. If EXP is not a comparison, we
4218 will most likely not be returning a useful value and range. Set
4219 *STRICT_OVERFLOW_P to true if the return value is only valid
4220 because signed overflow is undefined; otherwise, do not change
4221 *STRICT_OVERFLOW_P. */
4222
4223 tree
4224 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4225 bool *strict_overflow_p)
4226 {
4227 enum tree_code code;
4228 tree arg0, arg1 = NULL_TREE;
4229 tree exp_type, nexp;
4230 int in_p;
4231 tree low, high;
4232 location_t loc = EXPR_LOCATION (exp);
4233
4234 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4235 and see if we can refine the range. Some of the cases below may not
4236 happen, but it doesn't seem worth worrying about this. We "continue"
4237 the outer loop when we've changed something; otherwise we "break"
4238 the switch, which will "break" the while. */
4239
4240 in_p = 0;
4241 low = high = build_int_cst (TREE_TYPE (exp), 0);
4242
4243 while (1)
4244 {
4245 code = TREE_CODE (exp);
4246 exp_type = TREE_TYPE (exp);
4247 arg0 = NULL_TREE;
4248
4249 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4250 {
4251 if (TREE_OPERAND_LENGTH (exp) > 0)
4252 arg0 = TREE_OPERAND (exp, 0);
4253 if (TREE_CODE_CLASS (code) == tcc_binary
4254 || TREE_CODE_CLASS (code) == tcc_comparison
4255 || (TREE_CODE_CLASS (code) == tcc_expression
4256 && TREE_OPERAND_LENGTH (exp) > 1))
4257 arg1 = TREE_OPERAND (exp, 1);
4258 }
4259 if (arg0 == NULL_TREE)
4260 break;
4261
4262 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4263 &high, &in_p, strict_overflow_p);
4264 if (nexp == NULL_TREE)
4265 break;
4266 exp = nexp;
4267 }
4268
4269 /* If EXP is a constant, we can evaluate whether this is true or false. */
4270 if (TREE_CODE (exp) == INTEGER_CST)
4271 {
4272 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4273 exp, 0, low, 0))
4274 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4275 exp, 1, high, 1)));
4276 low = high = 0;
4277 exp = 0;
4278 }
4279
4280 *pin_p = in_p, *plow = low, *phigh = high;
4281 return exp;
4282 }
4283 \f
4284 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4285 type, TYPE, return an expression to test if EXP is in (or out of, depending
4286 on IN_P) the range. Return 0 if the test couldn't be created. */
4287
4288 tree
4289 build_range_check (location_t loc, tree type, tree exp, int in_p,
4290 tree low, tree high)
4291 {
4292 tree etype = TREE_TYPE (exp), value;
4293
4294 #ifdef HAVE_canonicalize_funcptr_for_compare
4295 /* Disable this optimization for function pointer expressions
4296 on targets that require function pointer canonicalization. */
4297 if (HAVE_canonicalize_funcptr_for_compare
4298 && TREE_CODE (etype) == POINTER_TYPE
4299 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4300 return NULL_TREE;
4301 #endif
4302
4303 if (! in_p)
4304 {
4305 value = build_range_check (loc, type, exp, 1, low, high);
4306 if (value != 0)
4307 return invert_truthvalue_loc (loc, value);
4308
4309 return 0;
4310 }
4311
4312 if (low == 0 && high == 0)
4313 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4314
4315 if (low == 0)
4316 return fold_build2_loc (loc, LE_EXPR, type, exp,
4317 fold_convert_loc (loc, etype, high));
4318
4319 if (high == 0)
4320 return fold_build2_loc (loc, GE_EXPR, type, exp,
4321 fold_convert_loc (loc, etype, low));
4322
4323 if (operand_equal_p (low, high, 0))
4324 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4325 fold_convert_loc (loc, etype, low));
4326
4327 if (integer_zerop (low))
4328 {
4329 if (! TYPE_UNSIGNED (etype))
4330 {
4331 etype = unsigned_type_for (etype);
4332 high = fold_convert_loc (loc, etype, high);
4333 exp = fold_convert_loc (loc, etype, exp);
4334 }
4335 return build_range_check (loc, type, exp, 1, 0, high);
4336 }
4337
4338 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4339 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4340 {
4341 unsigned HOST_WIDE_INT lo;
4342 HOST_WIDE_INT hi;
4343 int prec;
4344
4345 prec = TYPE_PRECISION (etype);
4346 if (prec <= HOST_BITS_PER_WIDE_INT)
4347 {
4348 hi = 0;
4349 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4350 }
4351 else
4352 {
4353 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4354 lo = HOST_WIDE_INT_M1U;
4355 }
4356
4357 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4358 {
4359 if (TYPE_UNSIGNED (etype))
4360 {
4361 tree signed_etype = signed_type_for (etype);
4362 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4363 etype
4364 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4365 else
4366 etype = signed_etype;
4367 exp = fold_convert_loc (loc, etype, exp);
4368 }
4369 return fold_build2_loc (loc, GT_EXPR, type, exp,
4370 build_int_cst (etype, 0));
4371 }
4372 }
4373
4374 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4375 This requires wrap-around arithmetics for the type of the expression.
4376 First make sure that arithmetics in this type is valid, then make sure
4377 that it wraps around. */
4378 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4379 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4380 TYPE_UNSIGNED (etype));
4381
4382 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4383 {
4384 tree utype, minv, maxv;
4385
4386 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4387 for the type in question, as we rely on this here. */
4388 utype = unsigned_type_for (etype);
4389 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4390 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4391 integer_one_node, 1);
4392 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4393
4394 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4395 minv, 1, maxv, 1)))
4396 etype = utype;
4397 else
4398 return 0;
4399 }
4400
4401 high = fold_convert_loc (loc, etype, high);
4402 low = fold_convert_loc (loc, etype, low);
4403 exp = fold_convert_loc (loc, etype, exp);
4404
4405 value = const_binop (MINUS_EXPR, high, low);
4406
4407
4408 if (POINTER_TYPE_P (etype))
4409 {
4410 if (value != 0 && !TREE_OVERFLOW (value))
4411 {
4412 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4413 return build_range_check (loc, type,
4414 fold_build_pointer_plus_loc (loc, exp, low),
4415 1, build_int_cst (etype, 0), value);
4416 }
4417 return 0;
4418 }
4419
4420 if (value != 0 && !TREE_OVERFLOW (value))
4421 return build_range_check (loc, type,
4422 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4423 1, build_int_cst (etype, 0), value);
4424
4425 return 0;
4426 }
4427 \f
4428 /* Return the predecessor of VAL in its type, handling the infinite case. */
4429
4430 static tree
4431 range_predecessor (tree val)
4432 {
4433 tree type = TREE_TYPE (val);
4434
4435 if (INTEGRAL_TYPE_P (type)
4436 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4437 return 0;
4438 else
4439 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4440 }
4441
4442 /* Return the successor of VAL in its type, handling the infinite case. */
4443
4444 static tree
4445 range_successor (tree val)
4446 {
4447 tree type = TREE_TYPE (val);
4448
4449 if (INTEGRAL_TYPE_P (type)
4450 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4451 return 0;
4452 else
4453 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4454 }
4455
4456 /* Given two ranges, see if we can merge them into one. Return 1 if we
4457 can, 0 if we can't. Set the output range into the specified parameters. */
4458
4459 bool
4460 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4461 tree high0, int in1_p, tree low1, tree high1)
4462 {
4463 int no_overlap;
4464 int subset;
4465 int temp;
4466 tree tem;
4467 int in_p;
4468 tree low, high;
4469 int lowequal = ((low0 == 0 && low1 == 0)
4470 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4471 low0, 0, low1, 0)));
4472 int highequal = ((high0 == 0 && high1 == 0)
4473 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4474 high0, 1, high1, 1)));
4475
4476 /* Make range 0 be the range that starts first, or ends last if they
4477 start at the same value. Swap them if it isn't. */
4478 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4479 low0, 0, low1, 0))
4480 || (lowequal
4481 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4482 high1, 1, high0, 1))))
4483 {
4484 temp = in0_p, in0_p = in1_p, in1_p = temp;
4485 tem = low0, low0 = low1, low1 = tem;
4486 tem = high0, high0 = high1, high1 = tem;
4487 }
4488
4489 /* Now flag two cases, whether the ranges are disjoint or whether the
4490 second range is totally subsumed in the first. Note that the tests
4491 below are simplified by the ones above. */
4492 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4493 high0, 1, low1, 0));
4494 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4495 high1, 1, high0, 1));
4496
4497 /* We now have four cases, depending on whether we are including or
4498 excluding the two ranges. */
4499 if (in0_p && in1_p)
4500 {
4501 /* If they don't overlap, the result is false. If the second range
4502 is a subset it is the result. Otherwise, the range is from the start
4503 of the second to the end of the first. */
4504 if (no_overlap)
4505 in_p = 0, low = high = 0;
4506 else if (subset)
4507 in_p = 1, low = low1, high = high1;
4508 else
4509 in_p = 1, low = low1, high = high0;
4510 }
4511
4512 else if (in0_p && ! in1_p)
4513 {
4514 /* If they don't overlap, the result is the first range. If they are
4515 equal, the result is false. If the second range is a subset of the
4516 first, and the ranges begin at the same place, we go from just after
4517 the end of the second range to the end of the first. If the second
4518 range is not a subset of the first, or if it is a subset and both
4519 ranges end at the same place, the range starts at the start of the
4520 first range and ends just before the second range.
4521 Otherwise, we can't describe this as a single range. */
4522 if (no_overlap)
4523 in_p = 1, low = low0, high = high0;
4524 else if (lowequal && highequal)
4525 in_p = 0, low = high = 0;
4526 else if (subset && lowequal)
4527 {
4528 low = range_successor (high1);
4529 high = high0;
4530 in_p = 1;
4531 if (low == 0)
4532 {
4533 /* We are in the weird situation where high0 > high1 but
4534 high1 has no successor. Punt. */
4535 return 0;
4536 }
4537 }
4538 else if (! subset || highequal)
4539 {
4540 low = low0;
4541 high = range_predecessor (low1);
4542 in_p = 1;
4543 if (high == 0)
4544 {
4545 /* low0 < low1 but low1 has no predecessor. Punt. */
4546 return 0;
4547 }
4548 }
4549 else
4550 return 0;
4551 }
4552
4553 else if (! in0_p && in1_p)
4554 {
4555 /* If they don't overlap, the result is the second range. If the second
4556 is a subset of the first, the result is false. Otherwise,
4557 the range starts just after the first range and ends at the
4558 end of the second. */
4559 if (no_overlap)
4560 in_p = 1, low = low1, high = high1;
4561 else if (subset || highequal)
4562 in_p = 0, low = high = 0;
4563 else
4564 {
4565 low = range_successor (high0);
4566 high = high1;
4567 in_p = 1;
4568 if (low == 0)
4569 {
4570 /* high1 > high0 but high0 has no successor. Punt. */
4571 return 0;
4572 }
4573 }
4574 }
4575
4576 else
4577 {
4578 /* The case where we are excluding both ranges. Here the complex case
4579 is if they don't overlap. In that case, the only time we have a
4580 range is if they are adjacent. If the second is a subset of the
4581 first, the result is the first. Otherwise, the range to exclude
4582 starts at the beginning of the first range and ends at the end of the
4583 second. */
4584 if (no_overlap)
4585 {
4586 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4587 range_successor (high0),
4588 1, low1, 0)))
4589 in_p = 0, low = low0, high = high1;
4590 else
4591 {
4592 /* Canonicalize - [min, x] into - [-, x]. */
4593 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4594 switch (TREE_CODE (TREE_TYPE (low0)))
4595 {
4596 case ENUMERAL_TYPE:
4597 if (TYPE_PRECISION (TREE_TYPE (low0))
4598 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4599 break;
4600 /* FALLTHROUGH */
4601 case INTEGER_TYPE:
4602 if (tree_int_cst_equal (low0,
4603 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4604 low0 = 0;
4605 break;
4606 case POINTER_TYPE:
4607 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4608 && integer_zerop (low0))
4609 low0 = 0;
4610 break;
4611 default:
4612 break;
4613 }
4614
4615 /* Canonicalize - [x, max] into - [x, -]. */
4616 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4617 switch (TREE_CODE (TREE_TYPE (high1)))
4618 {
4619 case ENUMERAL_TYPE:
4620 if (TYPE_PRECISION (TREE_TYPE (high1))
4621 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4622 break;
4623 /* FALLTHROUGH */
4624 case INTEGER_TYPE:
4625 if (tree_int_cst_equal (high1,
4626 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4627 high1 = 0;
4628 break;
4629 case POINTER_TYPE:
4630 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4631 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4632 high1, 1,
4633 integer_one_node, 1)))
4634 high1 = 0;
4635 break;
4636 default:
4637 break;
4638 }
4639
4640 /* The ranges might be also adjacent between the maximum and
4641 minimum values of the given type. For
4642 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4643 return + [x + 1, y - 1]. */
4644 if (low0 == 0 && high1 == 0)
4645 {
4646 low = range_successor (high0);
4647 high = range_predecessor (low1);
4648 if (low == 0 || high == 0)
4649 return 0;
4650
4651 in_p = 1;
4652 }
4653 else
4654 return 0;
4655 }
4656 }
4657 else if (subset)
4658 in_p = 0, low = low0, high = high0;
4659 else
4660 in_p = 0, low = low0, high = high1;
4661 }
4662
4663 *pin_p = in_p, *plow = low, *phigh = high;
4664 return 1;
4665 }
4666 \f
4667
4668 /* Subroutine of fold, looking inside expressions of the form
4669 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4670 of the COND_EXPR. This function is being used also to optimize
4671 A op B ? C : A, by reversing the comparison first.
4672
4673 Return a folded expression whose code is not a COND_EXPR
4674 anymore, or NULL_TREE if no folding opportunity is found. */
4675
4676 static tree
4677 fold_cond_expr_with_comparison (location_t loc, tree type,
4678 tree arg0, tree arg1, tree arg2)
4679 {
4680 enum tree_code comp_code = TREE_CODE (arg0);
4681 tree arg00 = TREE_OPERAND (arg0, 0);
4682 tree arg01 = TREE_OPERAND (arg0, 1);
4683 tree arg1_type = TREE_TYPE (arg1);
4684 tree tem;
4685
4686 STRIP_NOPS (arg1);
4687 STRIP_NOPS (arg2);
4688
4689 /* If we have A op 0 ? A : -A, consider applying the following
4690 transformations:
4691
4692 A == 0? A : -A same as -A
4693 A != 0? A : -A same as A
4694 A >= 0? A : -A same as abs (A)
4695 A > 0? A : -A same as abs (A)
4696 A <= 0? A : -A same as -abs (A)
4697 A < 0? A : -A same as -abs (A)
4698
4699 None of these transformations work for modes with signed
4700 zeros. If A is +/-0, the first two transformations will
4701 change the sign of the result (from +0 to -0, or vice
4702 versa). The last four will fix the sign of the result,
4703 even though the original expressions could be positive or
4704 negative, depending on the sign of A.
4705
4706 Note that all these transformations are correct if A is
4707 NaN, since the two alternatives (A and -A) are also NaNs. */
4708 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4709 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4710 ? real_zerop (arg01)
4711 : integer_zerop (arg01))
4712 && ((TREE_CODE (arg2) == NEGATE_EXPR
4713 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4714 /* In the case that A is of the form X-Y, '-A' (arg2) may
4715 have already been folded to Y-X, check for that. */
4716 || (TREE_CODE (arg1) == MINUS_EXPR
4717 && TREE_CODE (arg2) == MINUS_EXPR
4718 && operand_equal_p (TREE_OPERAND (arg1, 0),
4719 TREE_OPERAND (arg2, 1), 0)
4720 && operand_equal_p (TREE_OPERAND (arg1, 1),
4721 TREE_OPERAND (arg2, 0), 0))))
4722 switch (comp_code)
4723 {
4724 case EQ_EXPR:
4725 case UNEQ_EXPR:
4726 tem = fold_convert_loc (loc, arg1_type, arg1);
4727 return pedantic_non_lvalue_loc (loc,
4728 fold_convert_loc (loc, type,
4729 negate_expr (tem)));
4730 case NE_EXPR:
4731 case LTGT_EXPR:
4732 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4733 case UNGE_EXPR:
4734 case UNGT_EXPR:
4735 if (flag_trapping_math)
4736 break;
4737 /* Fall through. */
4738 case GE_EXPR:
4739 case GT_EXPR:
4740 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4741 arg1 = fold_convert_loc (loc, signed_type_for
4742 (TREE_TYPE (arg1)), arg1);
4743 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4744 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4745 case UNLE_EXPR:
4746 case UNLT_EXPR:
4747 if (flag_trapping_math)
4748 break;
4749 case LE_EXPR:
4750 case LT_EXPR:
4751 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4752 arg1 = fold_convert_loc (loc, signed_type_for
4753 (TREE_TYPE (arg1)), arg1);
4754 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4755 return negate_expr (fold_convert_loc (loc, type, tem));
4756 default:
4757 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4758 break;
4759 }
4760
4761 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4762 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4763 both transformations are correct when A is NaN: A != 0
4764 is then true, and A == 0 is false. */
4765
4766 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4767 && integer_zerop (arg01) && integer_zerop (arg2))
4768 {
4769 if (comp_code == NE_EXPR)
4770 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4771 else if (comp_code == EQ_EXPR)
4772 return build_zero_cst (type);
4773 }
4774
4775 /* Try some transformations of A op B ? A : B.
4776
4777 A == B? A : B same as B
4778 A != B? A : B same as A
4779 A >= B? A : B same as max (A, B)
4780 A > B? A : B same as max (B, A)
4781 A <= B? A : B same as min (A, B)
4782 A < B? A : B same as min (B, A)
4783
4784 As above, these transformations don't work in the presence
4785 of signed zeros. For example, if A and B are zeros of
4786 opposite sign, the first two transformations will change
4787 the sign of the result. In the last four, the original
4788 expressions give different results for (A=+0, B=-0) and
4789 (A=-0, B=+0), but the transformed expressions do not.
4790
4791 The first two transformations are correct if either A or B
4792 is a NaN. In the first transformation, the condition will
4793 be false, and B will indeed be chosen. In the case of the
4794 second transformation, the condition A != B will be true,
4795 and A will be chosen.
4796
4797 The conversions to max() and min() are not correct if B is
4798 a number and A is not. The conditions in the original
4799 expressions will be false, so all four give B. The min()
4800 and max() versions would give a NaN instead. */
4801 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4802 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4803 /* Avoid these transformations if the COND_EXPR may be used
4804 as an lvalue in the C++ front-end. PR c++/19199. */
4805 && (in_gimple_form
4806 || VECTOR_TYPE_P (type)
4807 || (strcmp (lang_hooks.name, "GNU C++") != 0
4808 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4809 || ! maybe_lvalue_p (arg1)
4810 || ! maybe_lvalue_p (arg2)))
4811 {
4812 tree comp_op0 = arg00;
4813 tree comp_op1 = arg01;
4814 tree comp_type = TREE_TYPE (comp_op0);
4815
4816 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4817 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4818 {
4819 comp_type = type;
4820 comp_op0 = arg1;
4821 comp_op1 = arg2;
4822 }
4823
4824 switch (comp_code)
4825 {
4826 case EQ_EXPR:
4827 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4828 case NE_EXPR:
4829 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4830 case LE_EXPR:
4831 case LT_EXPR:
4832 case UNLE_EXPR:
4833 case UNLT_EXPR:
4834 /* In C++ a ?: expression can be an lvalue, so put the
4835 operand which will be used if they are equal first
4836 so that we can convert this back to the
4837 corresponding COND_EXPR. */
4838 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4839 {
4840 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4841 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4842 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4843 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4844 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4845 comp_op1, comp_op0);
4846 return pedantic_non_lvalue_loc (loc,
4847 fold_convert_loc (loc, type, tem));
4848 }
4849 break;
4850 case GE_EXPR:
4851 case GT_EXPR:
4852 case UNGE_EXPR:
4853 case UNGT_EXPR:
4854 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4855 {
4856 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4857 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4858 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4859 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4860 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4861 comp_op1, comp_op0);
4862 return pedantic_non_lvalue_loc (loc,
4863 fold_convert_loc (loc, type, tem));
4864 }
4865 break;
4866 case UNEQ_EXPR:
4867 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4868 return pedantic_non_lvalue_loc (loc,
4869 fold_convert_loc (loc, type, arg2));
4870 break;
4871 case LTGT_EXPR:
4872 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4873 return pedantic_non_lvalue_loc (loc,
4874 fold_convert_loc (loc, type, arg1));
4875 break;
4876 default:
4877 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4878 break;
4879 }
4880 }
4881
4882 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4883 we might still be able to simplify this. For example,
4884 if C1 is one less or one more than C2, this might have started
4885 out as a MIN or MAX and been transformed by this function.
4886 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4887
4888 if (INTEGRAL_TYPE_P (type)
4889 && TREE_CODE (arg01) == INTEGER_CST
4890 && TREE_CODE (arg2) == INTEGER_CST)
4891 switch (comp_code)
4892 {
4893 case EQ_EXPR:
4894 if (TREE_CODE (arg1) == INTEGER_CST)
4895 break;
4896 /* We can replace A with C1 in this case. */
4897 arg1 = fold_convert_loc (loc, type, arg01);
4898 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4899
4900 case LT_EXPR:
4901 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4902 MIN_EXPR, to preserve the signedness of the comparison. */
4903 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4904 OEP_ONLY_CONST)
4905 && operand_equal_p (arg01,
4906 const_binop (PLUS_EXPR, arg2,
4907 build_int_cst (type, 1)),
4908 OEP_ONLY_CONST))
4909 {
4910 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4911 fold_convert_loc (loc, TREE_TYPE (arg00),
4912 arg2));
4913 return pedantic_non_lvalue_loc (loc,
4914 fold_convert_loc (loc, type, tem));
4915 }
4916 break;
4917
4918 case LE_EXPR:
4919 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4920 as above. */
4921 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4922 OEP_ONLY_CONST)
4923 && operand_equal_p (arg01,
4924 const_binop (MINUS_EXPR, arg2,
4925 build_int_cst (type, 1)),
4926 OEP_ONLY_CONST))
4927 {
4928 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4929 fold_convert_loc (loc, TREE_TYPE (arg00),
4930 arg2));
4931 return pedantic_non_lvalue_loc (loc,
4932 fold_convert_loc (loc, type, tem));
4933 }
4934 break;
4935
4936 case GT_EXPR:
4937 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4938 MAX_EXPR, to preserve the signedness of the comparison. */
4939 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4940 OEP_ONLY_CONST)
4941 && operand_equal_p (arg01,
4942 const_binop (MINUS_EXPR, arg2,
4943 build_int_cst (type, 1)),
4944 OEP_ONLY_CONST))
4945 {
4946 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4947 fold_convert_loc (loc, TREE_TYPE (arg00),
4948 arg2));
4949 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4950 }
4951 break;
4952
4953 case GE_EXPR:
4954 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4955 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4956 OEP_ONLY_CONST)
4957 && operand_equal_p (arg01,
4958 const_binop (PLUS_EXPR, arg2,
4959 build_int_cst (type, 1)),
4960 OEP_ONLY_CONST))
4961 {
4962 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4963 fold_convert_loc (loc, TREE_TYPE (arg00),
4964 arg2));
4965 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4966 }
4967 break;
4968 case NE_EXPR:
4969 break;
4970 default:
4971 gcc_unreachable ();
4972 }
4973
4974 return NULL_TREE;
4975 }
4976
4977
4978 \f
4979 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4980 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4981 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4982 false) >= 2)
4983 #endif
4984
4985 /* EXP is some logical combination of boolean tests. See if we can
4986 merge it into some range test. Return the new tree if so. */
4987
4988 static tree
4989 fold_range_test (location_t loc, enum tree_code code, tree type,
4990 tree op0, tree op1)
4991 {
4992 int or_op = (code == TRUTH_ORIF_EXPR
4993 || code == TRUTH_OR_EXPR);
4994 int in0_p, in1_p, in_p;
4995 tree low0, low1, low, high0, high1, high;
4996 bool strict_overflow_p = false;
4997 tree tem, lhs, rhs;
4998 const char * const warnmsg = G_("assuming signed overflow does not occur "
4999 "when simplifying range test");
5000
5001 if (!INTEGRAL_TYPE_P (type))
5002 return 0;
5003
5004 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5005 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5006
5007 /* If this is an OR operation, invert both sides; we will invert
5008 again at the end. */
5009 if (or_op)
5010 in0_p = ! in0_p, in1_p = ! in1_p;
5011
5012 /* If both expressions are the same, if we can merge the ranges, and we
5013 can build the range test, return it or it inverted. If one of the
5014 ranges is always true or always false, consider it to be the same
5015 expression as the other. */
5016 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5017 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5018 in1_p, low1, high1)
5019 && 0 != (tem = (build_range_check (loc, type,
5020 lhs != 0 ? lhs
5021 : rhs != 0 ? rhs : integer_zero_node,
5022 in_p, low, high))))
5023 {
5024 if (strict_overflow_p)
5025 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5026 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5027 }
5028
5029 /* On machines where the branch cost is expensive, if this is a
5030 short-circuited branch and the underlying object on both sides
5031 is the same, make a non-short-circuit operation. */
5032 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5033 && lhs != 0 && rhs != 0
5034 && (code == TRUTH_ANDIF_EXPR
5035 || code == TRUTH_ORIF_EXPR)
5036 && operand_equal_p (lhs, rhs, 0))
5037 {
5038 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5039 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5040 which cases we can't do this. */
5041 if (simple_operand_p (lhs))
5042 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5043 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5044 type, op0, op1);
5045
5046 else if (!lang_hooks.decls.global_bindings_p ()
5047 && !CONTAINS_PLACEHOLDER_P (lhs))
5048 {
5049 tree common = save_expr (lhs);
5050
5051 if (0 != (lhs = build_range_check (loc, type, common,
5052 or_op ? ! in0_p : in0_p,
5053 low0, high0))
5054 && (0 != (rhs = build_range_check (loc, type, common,
5055 or_op ? ! in1_p : in1_p,
5056 low1, high1))))
5057 {
5058 if (strict_overflow_p)
5059 fold_overflow_warning (warnmsg,
5060 WARN_STRICT_OVERFLOW_COMPARISON);
5061 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5062 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5063 type, lhs, rhs);
5064 }
5065 }
5066 }
5067
5068 return 0;
5069 }
5070 \f
5071 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5072 bit value. Arrange things so the extra bits will be set to zero if and
5073 only if C is signed-extended to its full width. If MASK is nonzero,
5074 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5075
5076 static tree
5077 unextend (tree c, int p, int unsignedp, tree mask)
5078 {
5079 tree type = TREE_TYPE (c);
5080 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5081 tree temp;
5082
5083 if (p == modesize || unsignedp)
5084 return c;
5085
5086 /* We work by getting just the sign bit into the low-order bit, then
5087 into the high-order bit, then sign-extend. We then XOR that value
5088 with C. */
5089 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5090 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5091
5092 /* We must use a signed type in order to get an arithmetic right shift.
5093 However, we must also avoid introducing accidental overflows, so that
5094 a subsequent call to integer_zerop will work. Hence we must
5095 do the type conversion here. At this point, the constant is either
5096 zero or one, and the conversion to a signed type can never overflow.
5097 We could get an overflow if this conversion is done anywhere else. */
5098 if (TYPE_UNSIGNED (type))
5099 temp = fold_convert (signed_type_for (type), temp);
5100
5101 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5102 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5103 if (mask != 0)
5104 temp = const_binop (BIT_AND_EXPR, temp,
5105 fold_convert (TREE_TYPE (c), mask));
5106 /* If necessary, convert the type back to match the type of C. */
5107 if (TYPE_UNSIGNED (type))
5108 temp = fold_convert (type, temp);
5109
5110 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5111 }
5112 \f
5113 /* For an expression that has the form
5114 (A && B) || ~B
5115 or
5116 (A || B) && ~B,
5117 we can drop one of the inner expressions and simplify to
5118 A || ~B
5119 or
5120 A && ~B
5121 LOC is the location of the resulting expression. OP is the inner
5122 logical operation; the left-hand side in the examples above, while CMPOP
5123 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5124 removing a condition that guards another, as in
5125 (A != NULL && A->...) || A == NULL
5126 which we must not transform. If RHS_ONLY is true, only eliminate the
5127 right-most operand of the inner logical operation. */
5128
5129 static tree
5130 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5131 bool rhs_only)
5132 {
5133 tree type = TREE_TYPE (cmpop);
5134 enum tree_code code = TREE_CODE (cmpop);
5135 enum tree_code truthop_code = TREE_CODE (op);
5136 tree lhs = TREE_OPERAND (op, 0);
5137 tree rhs = TREE_OPERAND (op, 1);
5138 tree orig_lhs = lhs, orig_rhs = rhs;
5139 enum tree_code rhs_code = TREE_CODE (rhs);
5140 enum tree_code lhs_code = TREE_CODE (lhs);
5141 enum tree_code inv_code;
5142
5143 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5144 return NULL_TREE;
5145
5146 if (TREE_CODE_CLASS (code) != tcc_comparison)
5147 return NULL_TREE;
5148
5149 if (rhs_code == truthop_code)
5150 {
5151 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5152 if (newrhs != NULL_TREE)
5153 {
5154 rhs = newrhs;
5155 rhs_code = TREE_CODE (rhs);
5156 }
5157 }
5158 if (lhs_code == truthop_code && !rhs_only)
5159 {
5160 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5161 if (newlhs != NULL_TREE)
5162 {
5163 lhs = newlhs;
5164 lhs_code = TREE_CODE (lhs);
5165 }
5166 }
5167
5168 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5169 if (inv_code == rhs_code
5170 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5171 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5172 return lhs;
5173 if (!rhs_only && inv_code == lhs_code
5174 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5175 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5176 return rhs;
5177 if (rhs != orig_rhs || lhs != orig_lhs)
5178 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5179 lhs, rhs);
5180 return NULL_TREE;
5181 }
5182
5183 /* Find ways of folding logical expressions of LHS and RHS:
5184 Try to merge two comparisons to the same innermost item.
5185 Look for range tests like "ch >= '0' && ch <= '9'".
5186 Look for combinations of simple terms on machines with expensive branches
5187 and evaluate the RHS unconditionally.
5188
5189 For example, if we have p->a == 2 && p->b == 4 and we can make an
5190 object large enough to span both A and B, we can do this with a comparison
5191 against the object ANDed with the a mask.
5192
5193 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5194 operations to do this with one comparison.
5195
5196 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5197 function and the one above.
5198
5199 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5200 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5201
5202 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5203 two operands.
5204
5205 We return the simplified tree or 0 if no optimization is possible. */
5206
5207 static tree
5208 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5209 tree lhs, tree rhs)
5210 {
5211 /* If this is the "or" of two comparisons, we can do something if
5212 the comparisons are NE_EXPR. If this is the "and", we can do something
5213 if the comparisons are EQ_EXPR. I.e.,
5214 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5215
5216 WANTED_CODE is this operation code. For single bit fields, we can
5217 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5218 comparison for one-bit fields. */
5219
5220 enum tree_code wanted_code;
5221 enum tree_code lcode, rcode;
5222 tree ll_arg, lr_arg, rl_arg, rr_arg;
5223 tree ll_inner, lr_inner, rl_inner, rr_inner;
5224 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5225 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5226 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5227 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5228 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5229 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5230 enum machine_mode lnmode, rnmode;
5231 tree ll_mask, lr_mask, rl_mask, rr_mask;
5232 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5233 tree l_const, r_const;
5234 tree lntype, rntype, result;
5235 HOST_WIDE_INT first_bit, end_bit;
5236 int volatilep;
5237
5238 /* Start by getting the comparison codes. Fail if anything is volatile.
5239 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5240 it were surrounded with a NE_EXPR. */
5241
5242 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5243 return 0;
5244
5245 lcode = TREE_CODE (lhs);
5246 rcode = TREE_CODE (rhs);
5247
5248 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5249 {
5250 lhs = build2 (NE_EXPR, truth_type, lhs,
5251 build_int_cst (TREE_TYPE (lhs), 0));
5252 lcode = NE_EXPR;
5253 }
5254
5255 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5256 {
5257 rhs = build2 (NE_EXPR, truth_type, rhs,
5258 build_int_cst (TREE_TYPE (rhs), 0));
5259 rcode = NE_EXPR;
5260 }
5261
5262 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5263 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5264 return 0;
5265
5266 ll_arg = TREE_OPERAND (lhs, 0);
5267 lr_arg = TREE_OPERAND (lhs, 1);
5268 rl_arg = TREE_OPERAND (rhs, 0);
5269 rr_arg = TREE_OPERAND (rhs, 1);
5270
5271 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5272 if (simple_operand_p (ll_arg)
5273 && simple_operand_p (lr_arg))
5274 {
5275 if (operand_equal_p (ll_arg, rl_arg, 0)
5276 && operand_equal_p (lr_arg, rr_arg, 0))
5277 {
5278 result = combine_comparisons (loc, code, lcode, rcode,
5279 truth_type, ll_arg, lr_arg);
5280 if (result)
5281 return result;
5282 }
5283 else if (operand_equal_p (ll_arg, rr_arg, 0)
5284 && operand_equal_p (lr_arg, rl_arg, 0))
5285 {
5286 result = combine_comparisons (loc, code, lcode,
5287 swap_tree_comparison (rcode),
5288 truth_type, ll_arg, lr_arg);
5289 if (result)
5290 return result;
5291 }
5292 }
5293
5294 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5295 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5296
5297 /* If the RHS can be evaluated unconditionally and its operands are
5298 simple, it wins to evaluate the RHS unconditionally on machines
5299 with expensive branches. In this case, this isn't a comparison
5300 that can be merged. */
5301
5302 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5303 false) >= 2
5304 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5305 && simple_operand_p (rl_arg)
5306 && simple_operand_p (rr_arg))
5307 {
5308 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5309 if (code == TRUTH_OR_EXPR
5310 && lcode == NE_EXPR && integer_zerop (lr_arg)
5311 && rcode == NE_EXPR && integer_zerop (rr_arg)
5312 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5313 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5314 return build2_loc (loc, NE_EXPR, truth_type,
5315 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5316 ll_arg, rl_arg),
5317 build_int_cst (TREE_TYPE (ll_arg), 0));
5318
5319 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5320 if (code == TRUTH_AND_EXPR
5321 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5322 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5323 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5324 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5325 return build2_loc (loc, EQ_EXPR, truth_type,
5326 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5327 ll_arg, rl_arg),
5328 build_int_cst (TREE_TYPE (ll_arg), 0));
5329 }
5330
5331 /* See if the comparisons can be merged. Then get all the parameters for
5332 each side. */
5333
5334 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5335 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5336 return 0;
5337
5338 volatilep = 0;
5339 ll_inner = decode_field_reference (loc, ll_arg,
5340 &ll_bitsize, &ll_bitpos, &ll_mode,
5341 &ll_unsignedp, &volatilep, &ll_mask,
5342 &ll_and_mask);
5343 lr_inner = decode_field_reference (loc, lr_arg,
5344 &lr_bitsize, &lr_bitpos, &lr_mode,
5345 &lr_unsignedp, &volatilep, &lr_mask,
5346 &lr_and_mask);
5347 rl_inner = decode_field_reference (loc, rl_arg,
5348 &rl_bitsize, &rl_bitpos, &rl_mode,
5349 &rl_unsignedp, &volatilep, &rl_mask,
5350 &rl_and_mask);
5351 rr_inner = decode_field_reference (loc, rr_arg,
5352 &rr_bitsize, &rr_bitpos, &rr_mode,
5353 &rr_unsignedp, &volatilep, &rr_mask,
5354 &rr_and_mask);
5355
5356 /* It must be true that the inner operation on the lhs of each
5357 comparison must be the same if we are to be able to do anything.
5358 Then see if we have constants. If not, the same must be true for
5359 the rhs's. */
5360 if (volatilep || ll_inner == 0 || rl_inner == 0
5361 || ! operand_equal_p (ll_inner, rl_inner, 0))
5362 return 0;
5363
5364 if (TREE_CODE (lr_arg) == INTEGER_CST
5365 && TREE_CODE (rr_arg) == INTEGER_CST)
5366 l_const = lr_arg, r_const = rr_arg;
5367 else if (lr_inner == 0 || rr_inner == 0
5368 || ! operand_equal_p (lr_inner, rr_inner, 0))
5369 return 0;
5370 else
5371 l_const = r_const = 0;
5372
5373 /* If either comparison code is not correct for our logical operation,
5374 fail. However, we can convert a one-bit comparison against zero into
5375 the opposite comparison against that bit being set in the field. */
5376
5377 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5378 if (lcode != wanted_code)
5379 {
5380 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5381 {
5382 /* Make the left operand unsigned, since we are only interested
5383 in the value of one bit. Otherwise we are doing the wrong
5384 thing below. */
5385 ll_unsignedp = 1;
5386 l_const = ll_mask;
5387 }
5388 else
5389 return 0;
5390 }
5391
5392 /* This is analogous to the code for l_const above. */
5393 if (rcode != wanted_code)
5394 {
5395 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5396 {
5397 rl_unsignedp = 1;
5398 r_const = rl_mask;
5399 }
5400 else
5401 return 0;
5402 }
5403
5404 /* See if we can find a mode that contains both fields being compared on
5405 the left. If we can't, fail. Otherwise, update all constants and masks
5406 to be relative to a field of that size. */
5407 first_bit = MIN (ll_bitpos, rl_bitpos);
5408 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5409 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5410 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5411 volatilep);
5412 if (lnmode == VOIDmode)
5413 return 0;
5414
5415 lnbitsize = GET_MODE_BITSIZE (lnmode);
5416 lnbitpos = first_bit & ~ (lnbitsize - 1);
5417 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5418 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5419
5420 if (BYTES_BIG_ENDIAN)
5421 {
5422 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5423 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5424 }
5425
5426 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5427 size_int (xll_bitpos));
5428 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5429 size_int (xrl_bitpos));
5430
5431 if (l_const)
5432 {
5433 l_const = fold_convert_loc (loc, lntype, l_const);
5434 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5435 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5436 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5437 fold_build1_loc (loc, BIT_NOT_EXPR,
5438 lntype, ll_mask))))
5439 {
5440 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5441
5442 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5443 }
5444 }
5445 if (r_const)
5446 {
5447 r_const = fold_convert_loc (loc, lntype, r_const);
5448 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5449 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5450 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5451 fold_build1_loc (loc, BIT_NOT_EXPR,
5452 lntype, rl_mask))))
5453 {
5454 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5455
5456 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5457 }
5458 }
5459
5460 /* If the right sides are not constant, do the same for it. Also,
5461 disallow this optimization if a size or signedness mismatch occurs
5462 between the left and right sides. */
5463 if (l_const == 0)
5464 {
5465 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5466 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5467 /* Make sure the two fields on the right
5468 correspond to the left without being swapped. */
5469 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5470 return 0;
5471
5472 first_bit = MIN (lr_bitpos, rr_bitpos);
5473 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5474 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5475 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5476 volatilep);
5477 if (rnmode == VOIDmode)
5478 return 0;
5479
5480 rnbitsize = GET_MODE_BITSIZE (rnmode);
5481 rnbitpos = first_bit & ~ (rnbitsize - 1);
5482 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5483 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5484
5485 if (BYTES_BIG_ENDIAN)
5486 {
5487 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5488 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5489 }
5490
5491 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5492 rntype, lr_mask),
5493 size_int (xlr_bitpos));
5494 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5495 rntype, rr_mask),
5496 size_int (xrr_bitpos));
5497
5498 /* Make a mask that corresponds to both fields being compared.
5499 Do this for both items being compared. If the operands are the
5500 same size and the bits being compared are in the same position
5501 then we can do this by masking both and comparing the masked
5502 results. */
5503 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5504 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5505 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5506 {
5507 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5508 ll_unsignedp || rl_unsignedp);
5509 if (! all_ones_mask_p (ll_mask, lnbitsize))
5510 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5511
5512 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5513 lr_unsignedp || rr_unsignedp);
5514 if (! all_ones_mask_p (lr_mask, rnbitsize))
5515 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5516
5517 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5518 }
5519
5520 /* There is still another way we can do something: If both pairs of
5521 fields being compared are adjacent, we may be able to make a wider
5522 field containing them both.
5523
5524 Note that we still must mask the lhs/rhs expressions. Furthermore,
5525 the mask must be shifted to account for the shift done by
5526 make_bit_field_ref. */
5527 if ((ll_bitsize + ll_bitpos == rl_bitpos
5528 && lr_bitsize + lr_bitpos == rr_bitpos)
5529 || (ll_bitpos == rl_bitpos + rl_bitsize
5530 && lr_bitpos == rr_bitpos + rr_bitsize))
5531 {
5532 tree type;
5533
5534 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5535 ll_bitsize + rl_bitsize,
5536 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5537 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5538 lr_bitsize + rr_bitsize,
5539 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5540
5541 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5542 size_int (MIN (xll_bitpos, xrl_bitpos)));
5543 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5544 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5545
5546 /* Convert to the smaller type before masking out unwanted bits. */
5547 type = lntype;
5548 if (lntype != rntype)
5549 {
5550 if (lnbitsize > rnbitsize)
5551 {
5552 lhs = fold_convert_loc (loc, rntype, lhs);
5553 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5554 type = rntype;
5555 }
5556 else if (lnbitsize < rnbitsize)
5557 {
5558 rhs = fold_convert_loc (loc, lntype, rhs);
5559 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5560 type = lntype;
5561 }
5562 }
5563
5564 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5565 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5566
5567 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5568 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5569
5570 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5571 }
5572
5573 return 0;
5574 }
5575
5576 /* Handle the case of comparisons with constants. If there is something in
5577 common between the masks, those bits of the constants must be the same.
5578 If not, the condition is always false. Test for this to avoid generating
5579 incorrect code below. */
5580 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5581 if (! integer_zerop (result)
5582 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5583 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5584 {
5585 if (wanted_code == NE_EXPR)
5586 {
5587 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5588 return constant_boolean_node (true, truth_type);
5589 }
5590 else
5591 {
5592 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5593 return constant_boolean_node (false, truth_type);
5594 }
5595 }
5596
5597 /* Construct the expression we will return. First get the component
5598 reference we will make. Unless the mask is all ones the width of
5599 that field, perform the mask operation. Then compare with the
5600 merged constant. */
5601 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5602 ll_unsignedp || rl_unsignedp);
5603
5604 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5605 if (! all_ones_mask_p (ll_mask, lnbitsize))
5606 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5607
5608 return build2_loc (loc, wanted_code, truth_type, result,
5609 const_binop (BIT_IOR_EXPR, l_const, r_const));
5610 }
5611 \f
5612 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5613 constant. */
5614
5615 static tree
5616 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5617 tree op0, tree op1)
5618 {
5619 tree arg0 = op0;
5620 enum tree_code op_code;
5621 tree comp_const;
5622 tree minmax_const;
5623 int consts_equal, consts_lt;
5624 tree inner;
5625
5626 STRIP_SIGN_NOPS (arg0);
5627
5628 op_code = TREE_CODE (arg0);
5629 minmax_const = TREE_OPERAND (arg0, 1);
5630 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5631 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5632 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5633 inner = TREE_OPERAND (arg0, 0);
5634
5635 /* If something does not permit us to optimize, return the original tree. */
5636 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5637 || TREE_CODE (comp_const) != INTEGER_CST
5638 || TREE_OVERFLOW (comp_const)
5639 || TREE_CODE (minmax_const) != INTEGER_CST
5640 || TREE_OVERFLOW (minmax_const))
5641 return NULL_TREE;
5642
5643 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5644 and GT_EXPR, doing the rest with recursive calls using logical
5645 simplifications. */
5646 switch (code)
5647 {
5648 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5649 {
5650 tree tem
5651 = optimize_minmax_comparison (loc,
5652 invert_tree_comparison (code, false),
5653 type, op0, op1);
5654 if (tem)
5655 return invert_truthvalue_loc (loc, tem);
5656 return NULL_TREE;
5657 }
5658
5659 case GE_EXPR:
5660 return
5661 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5662 optimize_minmax_comparison
5663 (loc, EQ_EXPR, type, arg0, comp_const),
5664 optimize_minmax_comparison
5665 (loc, GT_EXPR, type, arg0, comp_const));
5666
5667 case EQ_EXPR:
5668 if (op_code == MAX_EXPR && consts_equal)
5669 /* MAX (X, 0) == 0 -> X <= 0 */
5670 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5671
5672 else if (op_code == MAX_EXPR && consts_lt)
5673 /* MAX (X, 0) == 5 -> X == 5 */
5674 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5675
5676 else if (op_code == MAX_EXPR)
5677 /* MAX (X, 0) == -1 -> false */
5678 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5679
5680 else if (consts_equal)
5681 /* MIN (X, 0) == 0 -> X >= 0 */
5682 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5683
5684 else if (consts_lt)
5685 /* MIN (X, 0) == 5 -> false */
5686 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5687
5688 else
5689 /* MIN (X, 0) == -1 -> X == -1 */
5690 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5691
5692 case GT_EXPR:
5693 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5694 /* MAX (X, 0) > 0 -> X > 0
5695 MAX (X, 0) > 5 -> X > 5 */
5696 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5697
5698 else if (op_code == MAX_EXPR)
5699 /* MAX (X, 0) > -1 -> true */
5700 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5701
5702 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5703 /* MIN (X, 0) > 0 -> false
5704 MIN (X, 0) > 5 -> false */
5705 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5706
5707 else
5708 /* MIN (X, 0) > -1 -> X > -1 */
5709 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5710
5711 default:
5712 return NULL_TREE;
5713 }
5714 }
5715 \f
5716 /* T is an integer expression that is being multiplied, divided, or taken a
5717 modulus (CODE says which and what kind of divide or modulus) by a
5718 constant C. See if we can eliminate that operation by folding it with
5719 other operations already in T. WIDE_TYPE, if non-null, is a type that
5720 should be used for the computation if wider than our type.
5721
5722 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5723 (X * 2) + (Y * 4). We must, however, be assured that either the original
5724 expression would not overflow or that overflow is undefined for the type
5725 in the language in question.
5726
5727 If we return a non-null expression, it is an equivalent form of the
5728 original computation, but need not be in the original type.
5729
5730 We set *STRICT_OVERFLOW_P to true if the return values depends on
5731 signed overflow being undefined. Otherwise we do not change
5732 *STRICT_OVERFLOW_P. */
5733
5734 static tree
5735 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5736 bool *strict_overflow_p)
5737 {
5738 /* To avoid exponential search depth, refuse to allow recursion past
5739 three levels. Beyond that (1) it's highly unlikely that we'll find
5740 something interesting and (2) we've probably processed it before
5741 when we built the inner expression. */
5742
5743 static int depth;
5744 tree ret;
5745
5746 if (depth > 3)
5747 return NULL;
5748
5749 depth++;
5750 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5751 depth--;
5752
5753 return ret;
5754 }
5755
5756 static tree
5757 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5758 bool *strict_overflow_p)
5759 {
5760 tree type = TREE_TYPE (t);
5761 enum tree_code tcode = TREE_CODE (t);
5762 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5763 > GET_MODE_SIZE (TYPE_MODE (type)))
5764 ? wide_type : type);
5765 tree t1, t2;
5766 int same_p = tcode == code;
5767 tree op0 = NULL_TREE, op1 = NULL_TREE;
5768 bool sub_strict_overflow_p;
5769
5770 /* Don't deal with constants of zero here; they confuse the code below. */
5771 if (integer_zerop (c))
5772 return NULL_TREE;
5773
5774 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5775 op0 = TREE_OPERAND (t, 0);
5776
5777 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5778 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5779
5780 /* Note that we need not handle conditional operations here since fold
5781 already handles those cases. So just do arithmetic here. */
5782 switch (tcode)
5783 {
5784 case INTEGER_CST:
5785 /* For a constant, we can always simplify if we are a multiply
5786 or (for divide and modulus) if it is a multiple of our constant. */
5787 if (code == MULT_EXPR
5788 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5789 return const_binop (code, fold_convert (ctype, t),
5790 fold_convert (ctype, c));
5791 break;
5792
5793 CASE_CONVERT: case NON_LVALUE_EXPR:
5794 /* If op0 is an expression ... */
5795 if ((COMPARISON_CLASS_P (op0)
5796 || UNARY_CLASS_P (op0)
5797 || BINARY_CLASS_P (op0)
5798 || VL_EXP_CLASS_P (op0)
5799 || EXPRESSION_CLASS_P (op0))
5800 /* ... and has wrapping overflow, and its type is smaller
5801 than ctype, then we cannot pass through as widening. */
5802 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5803 && (TYPE_PRECISION (ctype)
5804 > TYPE_PRECISION (TREE_TYPE (op0))))
5805 /* ... or this is a truncation (t is narrower than op0),
5806 then we cannot pass through this narrowing. */
5807 || (TYPE_PRECISION (type)
5808 < TYPE_PRECISION (TREE_TYPE (op0)))
5809 /* ... or signedness changes for division or modulus,
5810 then we cannot pass through this conversion. */
5811 || (code != MULT_EXPR
5812 && (TYPE_UNSIGNED (ctype)
5813 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5814 /* ... or has undefined overflow while the converted to
5815 type has not, we cannot do the operation in the inner type
5816 as that would introduce undefined overflow. */
5817 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5818 && !TYPE_OVERFLOW_UNDEFINED (type))))
5819 break;
5820
5821 /* Pass the constant down and see if we can make a simplification. If
5822 we can, replace this expression with the inner simplification for
5823 possible later conversion to our or some other type. */
5824 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5825 && TREE_CODE (t2) == INTEGER_CST
5826 && !TREE_OVERFLOW (t2)
5827 && (0 != (t1 = extract_muldiv (op0, t2, code,
5828 code == MULT_EXPR
5829 ? ctype : NULL_TREE,
5830 strict_overflow_p))))
5831 return t1;
5832 break;
5833
5834 case ABS_EXPR:
5835 /* If widening the type changes it from signed to unsigned, then we
5836 must avoid building ABS_EXPR itself as unsigned. */
5837 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5838 {
5839 tree cstype = (*signed_type_for) (ctype);
5840 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5841 != 0)
5842 {
5843 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5844 return fold_convert (ctype, t1);
5845 }
5846 break;
5847 }
5848 /* If the constant is negative, we cannot simplify this. */
5849 if (tree_int_cst_sgn (c) == -1)
5850 break;
5851 /* FALLTHROUGH */
5852 case NEGATE_EXPR:
5853 /* For division and modulus, type can't be unsigned, as e.g.
5854 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5855 For signed types, even with wrapping overflow, this is fine. */
5856 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5857 break;
5858 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5859 != 0)
5860 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5861 break;
5862
5863 case MIN_EXPR: case MAX_EXPR:
5864 /* If widening the type changes the signedness, then we can't perform
5865 this optimization as that changes the result. */
5866 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5867 break;
5868
5869 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5870 sub_strict_overflow_p = false;
5871 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5872 &sub_strict_overflow_p)) != 0
5873 && (t2 = extract_muldiv (op1, c, code, wide_type,
5874 &sub_strict_overflow_p)) != 0)
5875 {
5876 if (tree_int_cst_sgn (c) < 0)
5877 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5878 if (sub_strict_overflow_p)
5879 *strict_overflow_p = true;
5880 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5881 fold_convert (ctype, t2));
5882 }
5883 break;
5884
5885 case LSHIFT_EXPR: case RSHIFT_EXPR:
5886 /* If the second operand is constant, this is a multiplication
5887 or floor division, by a power of two, so we can treat it that
5888 way unless the multiplier or divisor overflows. Signed
5889 left-shift overflow is implementation-defined rather than
5890 undefined in C90, so do not convert signed left shift into
5891 multiplication. */
5892 if (TREE_CODE (op1) == INTEGER_CST
5893 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5894 /* const_binop may not detect overflow correctly,
5895 so check for it explicitly here. */
5896 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5897 && TREE_INT_CST_HIGH (op1) == 0
5898 && 0 != (t1 = fold_convert (ctype,
5899 const_binop (LSHIFT_EXPR,
5900 size_one_node,
5901 op1)))
5902 && !TREE_OVERFLOW (t1))
5903 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5904 ? MULT_EXPR : FLOOR_DIV_EXPR,
5905 ctype,
5906 fold_convert (ctype, op0),
5907 t1),
5908 c, code, wide_type, strict_overflow_p);
5909 break;
5910
5911 case PLUS_EXPR: case MINUS_EXPR:
5912 /* See if we can eliminate the operation on both sides. If we can, we
5913 can return a new PLUS or MINUS. If we can't, the only remaining
5914 cases where we can do anything are if the second operand is a
5915 constant. */
5916 sub_strict_overflow_p = false;
5917 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5918 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5919 if (t1 != 0 && t2 != 0
5920 && (code == MULT_EXPR
5921 /* If not multiplication, we can only do this if both operands
5922 are divisible by c. */
5923 || (multiple_of_p (ctype, op0, c)
5924 && multiple_of_p (ctype, op1, c))))
5925 {
5926 if (sub_strict_overflow_p)
5927 *strict_overflow_p = true;
5928 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5929 fold_convert (ctype, t2));
5930 }
5931
5932 /* If this was a subtraction, negate OP1 and set it to be an addition.
5933 This simplifies the logic below. */
5934 if (tcode == MINUS_EXPR)
5935 {
5936 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5937 /* If OP1 was not easily negatable, the constant may be OP0. */
5938 if (TREE_CODE (op0) == INTEGER_CST)
5939 {
5940 tree tem = op0;
5941 op0 = op1;
5942 op1 = tem;
5943 tem = t1;
5944 t1 = t2;
5945 t2 = tem;
5946 }
5947 }
5948
5949 if (TREE_CODE (op1) != INTEGER_CST)
5950 break;
5951
5952 /* If either OP1 or C are negative, this optimization is not safe for
5953 some of the division and remainder types while for others we need
5954 to change the code. */
5955 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5956 {
5957 if (code == CEIL_DIV_EXPR)
5958 code = FLOOR_DIV_EXPR;
5959 else if (code == FLOOR_DIV_EXPR)
5960 code = CEIL_DIV_EXPR;
5961 else if (code != MULT_EXPR
5962 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5963 break;
5964 }
5965
5966 /* If it's a multiply or a division/modulus operation of a multiple
5967 of our constant, do the operation and verify it doesn't overflow. */
5968 if (code == MULT_EXPR
5969 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5970 {
5971 op1 = const_binop (code, fold_convert (ctype, op1),
5972 fold_convert (ctype, c));
5973 /* We allow the constant to overflow with wrapping semantics. */
5974 if (op1 == 0
5975 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5976 break;
5977 }
5978 else
5979 break;
5980
5981 /* If we have an unsigned type, we cannot widen the operation since it
5982 will change the result if the original computation overflowed. */
5983 if (TYPE_UNSIGNED (ctype) && ctype != type)
5984 break;
5985
5986 /* If we were able to eliminate our operation from the first side,
5987 apply our operation to the second side and reform the PLUS. */
5988 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5989 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5990
5991 /* The last case is if we are a multiply. In that case, we can
5992 apply the distributive law to commute the multiply and addition
5993 if the multiplication of the constants doesn't overflow
5994 and overflow is defined. With undefined overflow
5995 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5996 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5997 return fold_build2 (tcode, ctype,
5998 fold_build2 (code, ctype,
5999 fold_convert (ctype, op0),
6000 fold_convert (ctype, c)),
6001 op1);
6002
6003 break;
6004
6005 case MULT_EXPR:
6006 /* We have a special case here if we are doing something like
6007 (C * 8) % 4 since we know that's zero. */
6008 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6009 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6010 /* If the multiplication can overflow we cannot optimize this. */
6011 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6012 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6013 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6014 {
6015 *strict_overflow_p = true;
6016 return omit_one_operand (type, integer_zero_node, op0);
6017 }
6018
6019 /* ... fall through ... */
6020
6021 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6022 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6023 /* If we can extract our operation from the LHS, do so and return a
6024 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6025 do something only if the second operand is a constant. */
6026 if (same_p
6027 && (t1 = extract_muldiv (op0, c, code, wide_type,
6028 strict_overflow_p)) != 0)
6029 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6030 fold_convert (ctype, op1));
6031 else if (tcode == MULT_EXPR && code == MULT_EXPR
6032 && (t1 = extract_muldiv (op1, c, code, wide_type,
6033 strict_overflow_p)) != 0)
6034 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6035 fold_convert (ctype, t1));
6036 else if (TREE_CODE (op1) != INTEGER_CST)
6037 return 0;
6038
6039 /* If these are the same operation types, we can associate them
6040 assuming no overflow. */
6041 if (tcode == code)
6042 {
6043 double_int mul;
6044 bool overflow_p;
6045 unsigned prec = TYPE_PRECISION (ctype);
6046 bool uns = TYPE_UNSIGNED (ctype);
6047 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6048 double_int dic = tree_to_double_int (c).ext (prec, uns);
6049 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6050 overflow_p = ((!uns && overflow_p)
6051 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6052 if (!double_int_fits_to_tree_p (ctype, mul)
6053 && ((uns && tcode != MULT_EXPR) || !uns))
6054 overflow_p = 1;
6055 if (!overflow_p)
6056 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6057 double_int_to_tree (ctype, mul));
6058 }
6059
6060 /* If these operations "cancel" each other, we have the main
6061 optimizations of this pass, which occur when either constant is a
6062 multiple of the other, in which case we replace this with either an
6063 operation or CODE or TCODE.
6064
6065 If we have an unsigned type, we cannot do this since it will change
6066 the result if the original computation overflowed. */
6067 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6068 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6069 || (tcode == MULT_EXPR
6070 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6071 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6072 && code != MULT_EXPR)))
6073 {
6074 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6075 {
6076 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6077 *strict_overflow_p = true;
6078 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6079 fold_convert (ctype,
6080 const_binop (TRUNC_DIV_EXPR,
6081 op1, c)));
6082 }
6083 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6084 {
6085 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6086 *strict_overflow_p = true;
6087 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6088 fold_convert (ctype,
6089 const_binop (TRUNC_DIV_EXPR,
6090 c, op1)));
6091 }
6092 }
6093 break;
6094
6095 default:
6096 break;
6097 }
6098
6099 return 0;
6100 }
6101 \f
6102 /* Return a node which has the indicated constant VALUE (either 0 or
6103 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6104 and is of the indicated TYPE. */
6105
6106 tree
6107 constant_boolean_node (bool value, tree type)
6108 {
6109 if (type == integer_type_node)
6110 return value ? integer_one_node : integer_zero_node;
6111 else if (type == boolean_type_node)
6112 return value ? boolean_true_node : boolean_false_node;
6113 else if (TREE_CODE (type) == VECTOR_TYPE)
6114 return build_vector_from_val (type,
6115 build_int_cst (TREE_TYPE (type),
6116 value ? -1 : 0));
6117 else
6118 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6119 }
6120
6121
6122 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6123 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6124 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6125 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6126 COND is the first argument to CODE; otherwise (as in the example
6127 given here), it is the second argument. TYPE is the type of the
6128 original expression. Return NULL_TREE if no simplification is
6129 possible. */
6130
6131 static tree
6132 fold_binary_op_with_conditional_arg (location_t loc,
6133 enum tree_code code,
6134 tree type, tree op0, tree op1,
6135 tree cond, tree arg, int cond_first_p)
6136 {
6137 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6138 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6139 tree test, true_value, false_value;
6140 tree lhs = NULL_TREE;
6141 tree rhs = NULL_TREE;
6142 enum tree_code cond_code = COND_EXPR;
6143
6144 if (TREE_CODE (cond) == COND_EXPR
6145 || TREE_CODE (cond) == VEC_COND_EXPR)
6146 {
6147 test = TREE_OPERAND (cond, 0);
6148 true_value = TREE_OPERAND (cond, 1);
6149 false_value = TREE_OPERAND (cond, 2);
6150 /* If this operand throws an expression, then it does not make
6151 sense to try to perform a logical or arithmetic operation
6152 involving it. */
6153 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6154 lhs = true_value;
6155 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6156 rhs = false_value;
6157 }
6158 else
6159 {
6160 tree testtype = TREE_TYPE (cond);
6161 test = cond;
6162 true_value = constant_boolean_node (true, testtype);
6163 false_value = constant_boolean_node (false, testtype);
6164 }
6165
6166 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6167 cond_code = VEC_COND_EXPR;
6168
6169 /* This transformation is only worthwhile if we don't have to wrap ARG
6170 in a SAVE_EXPR and the operation can be simplified without recursing
6171 on at least one of the branches once its pushed inside the COND_EXPR. */
6172 if (!TREE_CONSTANT (arg)
6173 && (TREE_SIDE_EFFECTS (arg)
6174 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6175 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6176 return NULL_TREE;
6177
6178 arg = fold_convert_loc (loc, arg_type, arg);
6179 if (lhs == 0)
6180 {
6181 true_value = fold_convert_loc (loc, cond_type, true_value);
6182 if (cond_first_p)
6183 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6184 else
6185 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6186 }
6187 if (rhs == 0)
6188 {
6189 false_value = fold_convert_loc (loc, cond_type, false_value);
6190 if (cond_first_p)
6191 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6192 else
6193 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6194 }
6195
6196 /* Check that we have simplified at least one of the branches. */
6197 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6198 return NULL_TREE;
6199
6200 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6201 }
6202
6203 \f
6204 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6205
6206 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6207 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6208 ADDEND is the same as X.
6209
6210 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6211 and finite. The problematic cases are when X is zero, and its mode
6212 has signed zeros. In the case of rounding towards -infinity,
6213 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6214 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6215
6216 bool
6217 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6218 {
6219 if (!real_zerop (addend))
6220 return false;
6221
6222 /* Don't allow the fold with -fsignaling-nans. */
6223 if (HONOR_SNANS (TYPE_MODE (type)))
6224 return false;
6225
6226 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6227 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6228 return true;
6229
6230 /* In a vector or complex, we would need to check the sign of all zeros. */
6231 if (TREE_CODE (addend) != REAL_CST)
6232 return false;
6233
6234 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6235 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6236 negate = !negate;
6237
6238 /* The mode has signed zeros, and we have to honor their sign.
6239 In this situation, there is only one case we can return true for.
6240 X - 0 is the same as X unless rounding towards -infinity is
6241 supported. */
6242 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6243 }
6244
6245 /* Subroutine of fold() that checks comparisons of built-in math
6246 functions against real constants.
6247
6248 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6249 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6250 is the type of the result and ARG0 and ARG1 are the operands of the
6251 comparison. ARG1 must be a TREE_REAL_CST.
6252
6253 The function returns the constant folded tree if a simplification
6254 can be made, and NULL_TREE otherwise. */
6255
6256 static tree
6257 fold_mathfn_compare (location_t loc,
6258 enum built_in_function fcode, enum tree_code code,
6259 tree type, tree arg0, tree arg1)
6260 {
6261 REAL_VALUE_TYPE c;
6262
6263 if (BUILTIN_SQRT_P (fcode))
6264 {
6265 tree arg = CALL_EXPR_ARG (arg0, 0);
6266 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6267
6268 c = TREE_REAL_CST (arg1);
6269 if (REAL_VALUE_NEGATIVE (c))
6270 {
6271 /* sqrt(x) < y is always false, if y is negative. */
6272 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6273 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6274
6275 /* sqrt(x) > y is always true, if y is negative and we
6276 don't care about NaNs, i.e. negative values of x. */
6277 if (code == NE_EXPR || !HONOR_NANS (mode))
6278 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6279
6280 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6281 return fold_build2_loc (loc, GE_EXPR, type, arg,
6282 build_real (TREE_TYPE (arg), dconst0));
6283 }
6284 else if (code == GT_EXPR || code == GE_EXPR)
6285 {
6286 REAL_VALUE_TYPE c2;
6287
6288 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6289 real_convert (&c2, mode, &c2);
6290
6291 if (REAL_VALUE_ISINF (c2))
6292 {
6293 /* sqrt(x) > y is x == +Inf, when y is very large. */
6294 if (HONOR_INFINITIES (mode))
6295 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6296 build_real (TREE_TYPE (arg), c2));
6297
6298 /* sqrt(x) > y is always false, when y is very large
6299 and we don't care about infinities. */
6300 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6301 }
6302
6303 /* sqrt(x) > c is the same as x > c*c. */
6304 return fold_build2_loc (loc, code, type, arg,
6305 build_real (TREE_TYPE (arg), c2));
6306 }
6307 else if (code == LT_EXPR || code == LE_EXPR)
6308 {
6309 REAL_VALUE_TYPE c2;
6310
6311 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6312 real_convert (&c2, mode, &c2);
6313
6314 if (REAL_VALUE_ISINF (c2))
6315 {
6316 /* sqrt(x) < y is always true, when y is a very large
6317 value and we don't care about NaNs or Infinities. */
6318 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6319 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6320
6321 /* sqrt(x) < y is x != +Inf when y is very large and we
6322 don't care about NaNs. */
6323 if (! HONOR_NANS (mode))
6324 return fold_build2_loc (loc, NE_EXPR, type, arg,
6325 build_real (TREE_TYPE (arg), c2));
6326
6327 /* sqrt(x) < y is x >= 0 when y is very large and we
6328 don't care about Infinities. */
6329 if (! HONOR_INFINITIES (mode))
6330 return fold_build2_loc (loc, GE_EXPR, type, arg,
6331 build_real (TREE_TYPE (arg), dconst0));
6332
6333 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6334 arg = save_expr (arg);
6335 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6336 fold_build2_loc (loc, GE_EXPR, type, arg,
6337 build_real (TREE_TYPE (arg),
6338 dconst0)),
6339 fold_build2_loc (loc, NE_EXPR, type, arg,
6340 build_real (TREE_TYPE (arg),
6341 c2)));
6342 }
6343
6344 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6345 if (! HONOR_NANS (mode))
6346 return fold_build2_loc (loc, code, type, arg,
6347 build_real (TREE_TYPE (arg), c2));
6348
6349 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6350 arg = save_expr (arg);
6351 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6352 fold_build2_loc (loc, GE_EXPR, type, arg,
6353 build_real (TREE_TYPE (arg),
6354 dconst0)),
6355 fold_build2_loc (loc, code, type, arg,
6356 build_real (TREE_TYPE (arg),
6357 c2)));
6358 }
6359 }
6360
6361 return NULL_TREE;
6362 }
6363
6364 /* Subroutine of fold() that optimizes comparisons against Infinities,
6365 either +Inf or -Inf.
6366
6367 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6368 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6369 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6370
6371 The function returns the constant folded tree if a simplification
6372 can be made, and NULL_TREE otherwise. */
6373
6374 static tree
6375 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6376 tree arg0, tree arg1)
6377 {
6378 enum machine_mode mode;
6379 REAL_VALUE_TYPE max;
6380 tree temp;
6381 bool neg;
6382
6383 mode = TYPE_MODE (TREE_TYPE (arg0));
6384
6385 /* For negative infinity swap the sense of the comparison. */
6386 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6387 if (neg)
6388 code = swap_tree_comparison (code);
6389
6390 switch (code)
6391 {
6392 case GT_EXPR:
6393 /* x > +Inf is always false, if with ignore sNANs. */
6394 if (HONOR_SNANS (mode))
6395 return NULL_TREE;
6396 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6397
6398 case LE_EXPR:
6399 /* x <= +Inf is always true, if we don't case about NaNs. */
6400 if (! HONOR_NANS (mode))
6401 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6402
6403 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6404 arg0 = save_expr (arg0);
6405 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6406
6407 case EQ_EXPR:
6408 case GE_EXPR:
6409 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6410 real_maxval (&max, neg, mode);
6411 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6412 arg0, build_real (TREE_TYPE (arg0), max));
6413
6414 case LT_EXPR:
6415 /* x < +Inf is always equal to x <= DBL_MAX. */
6416 real_maxval (&max, neg, mode);
6417 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6418 arg0, build_real (TREE_TYPE (arg0), max));
6419
6420 case NE_EXPR:
6421 /* x != +Inf is always equal to !(x > DBL_MAX). */
6422 real_maxval (&max, neg, mode);
6423 if (! HONOR_NANS (mode))
6424 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6425 arg0, build_real (TREE_TYPE (arg0), max));
6426
6427 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6428 arg0, build_real (TREE_TYPE (arg0), max));
6429 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6430
6431 default:
6432 break;
6433 }
6434
6435 return NULL_TREE;
6436 }
6437
6438 /* Subroutine of fold() that optimizes comparisons of a division by
6439 a nonzero integer constant against an integer constant, i.e.
6440 X/C1 op C2.
6441
6442 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6443 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6444 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6445
6446 The function returns the constant folded tree if a simplification
6447 can be made, and NULL_TREE otherwise. */
6448
6449 static tree
6450 fold_div_compare (location_t loc,
6451 enum tree_code code, tree type, tree arg0, tree arg1)
6452 {
6453 tree prod, tmp, hi, lo;
6454 tree arg00 = TREE_OPERAND (arg0, 0);
6455 tree arg01 = TREE_OPERAND (arg0, 1);
6456 double_int val;
6457 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6458 bool neg_overflow;
6459 bool overflow;
6460
6461 /* We have to do this the hard way to detect unsigned overflow.
6462 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6463 val = TREE_INT_CST (arg01)
6464 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6465 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6466 neg_overflow = false;
6467
6468 if (unsigned_p)
6469 {
6470 tmp = int_const_binop (MINUS_EXPR, arg01,
6471 build_int_cst (TREE_TYPE (arg01), 1));
6472 lo = prod;
6473
6474 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6475 val = TREE_INT_CST (prod)
6476 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6477 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6478 -1, overflow | TREE_OVERFLOW (prod));
6479 }
6480 else if (tree_int_cst_sgn (arg01) >= 0)
6481 {
6482 tmp = int_const_binop (MINUS_EXPR, arg01,
6483 build_int_cst (TREE_TYPE (arg01), 1));
6484 switch (tree_int_cst_sgn (arg1))
6485 {
6486 case -1:
6487 neg_overflow = true;
6488 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6489 hi = prod;
6490 break;
6491
6492 case 0:
6493 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6494 hi = tmp;
6495 break;
6496
6497 case 1:
6498 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6499 lo = prod;
6500 break;
6501
6502 default:
6503 gcc_unreachable ();
6504 }
6505 }
6506 else
6507 {
6508 /* A negative divisor reverses the relational operators. */
6509 code = swap_tree_comparison (code);
6510
6511 tmp = int_const_binop (PLUS_EXPR, arg01,
6512 build_int_cst (TREE_TYPE (arg01), 1));
6513 switch (tree_int_cst_sgn (arg1))
6514 {
6515 case -1:
6516 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6517 lo = prod;
6518 break;
6519
6520 case 0:
6521 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6522 lo = tmp;
6523 break;
6524
6525 case 1:
6526 neg_overflow = true;
6527 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6528 hi = prod;
6529 break;
6530
6531 default:
6532 gcc_unreachable ();
6533 }
6534 }
6535
6536 switch (code)
6537 {
6538 case EQ_EXPR:
6539 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6540 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6541 if (TREE_OVERFLOW (hi))
6542 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6543 if (TREE_OVERFLOW (lo))
6544 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6545 return build_range_check (loc, type, arg00, 1, lo, hi);
6546
6547 case NE_EXPR:
6548 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6549 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6550 if (TREE_OVERFLOW (hi))
6551 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6552 if (TREE_OVERFLOW (lo))
6553 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6554 return build_range_check (loc, type, arg00, 0, lo, hi);
6555
6556 case LT_EXPR:
6557 if (TREE_OVERFLOW (lo))
6558 {
6559 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6560 return omit_one_operand_loc (loc, type, tmp, arg00);
6561 }
6562 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6563
6564 case LE_EXPR:
6565 if (TREE_OVERFLOW (hi))
6566 {
6567 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6568 return omit_one_operand_loc (loc, type, tmp, arg00);
6569 }
6570 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6571
6572 case GT_EXPR:
6573 if (TREE_OVERFLOW (hi))
6574 {
6575 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6576 return omit_one_operand_loc (loc, type, tmp, arg00);
6577 }
6578 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6579
6580 case GE_EXPR:
6581 if (TREE_OVERFLOW (lo))
6582 {
6583 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6584 return omit_one_operand_loc (loc, type, tmp, arg00);
6585 }
6586 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6587
6588 default:
6589 break;
6590 }
6591
6592 return NULL_TREE;
6593 }
6594
6595
6596 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6597 equality/inequality test, then return a simplified form of the test
6598 using a sign testing. Otherwise return NULL. TYPE is the desired
6599 result type. */
6600
6601 static tree
6602 fold_single_bit_test_into_sign_test (location_t loc,
6603 enum tree_code code, tree arg0, tree arg1,
6604 tree result_type)
6605 {
6606 /* If this is testing a single bit, we can optimize the test. */
6607 if ((code == NE_EXPR || code == EQ_EXPR)
6608 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6609 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6610 {
6611 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6612 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6613 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6614
6615 if (arg00 != NULL_TREE
6616 /* This is only a win if casting to a signed type is cheap,
6617 i.e. when arg00's type is not a partial mode. */
6618 && TYPE_PRECISION (TREE_TYPE (arg00))
6619 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6620 {
6621 tree stype = signed_type_for (TREE_TYPE (arg00));
6622 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6623 result_type,
6624 fold_convert_loc (loc, stype, arg00),
6625 build_int_cst (stype, 0));
6626 }
6627 }
6628
6629 return NULL_TREE;
6630 }
6631
6632 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6633 equality/inequality test, then return a simplified form of
6634 the test using shifts and logical operations. Otherwise return
6635 NULL. TYPE is the desired result type. */
6636
6637 tree
6638 fold_single_bit_test (location_t loc, enum tree_code code,
6639 tree arg0, tree arg1, tree result_type)
6640 {
6641 /* If this is testing a single bit, we can optimize the test. */
6642 if ((code == NE_EXPR || code == EQ_EXPR)
6643 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6644 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6645 {
6646 tree inner = TREE_OPERAND (arg0, 0);
6647 tree type = TREE_TYPE (arg0);
6648 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6649 enum machine_mode operand_mode = TYPE_MODE (type);
6650 int ops_unsigned;
6651 tree signed_type, unsigned_type, intermediate_type;
6652 tree tem, one;
6653
6654 /* First, see if we can fold the single bit test into a sign-bit
6655 test. */
6656 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6657 result_type);
6658 if (tem)
6659 return tem;
6660
6661 /* Otherwise we have (A & C) != 0 where C is a single bit,
6662 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6663 Similarly for (A & C) == 0. */
6664
6665 /* If INNER is a right shift of a constant and it plus BITNUM does
6666 not overflow, adjust BITNUM and INNER. */
6667 if (TREE_CODE (inner) == RSHIFT_EXPR
6668 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6669 && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6670 && bitnum < TYPE_PRECISION (type)
6671 && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6672 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6673 {
6674 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6675 inner = TREE_OPERAND (inner, 0);
6676 }
6677
6678 /* If we are going to be able to omit the AND below, we must do our
6679 operations as unsigned. If we must use the AND, we have a choice.
6680 Normally unsigned is faster, but for some machines signed is. */
6681 #ifdef LOAD_EXTEND_OP
6682 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6683 && !flag_syntax_only) ? 0 : 1;
6684 #else
6685 ops_unsigned = 1;
6686 #endif
6687
6688 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6689 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6690 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6691 inner = fold_convert_loc (loc, intermediate_type, inner);
6692
6693 if (bitnum != 0)
6694 inner = build2 (RSHIFT_EXPR, intermediate_type,
6695 inner, size_int (bitnum));
6696
6697 one = build_int_cst (intermediate_type, 1);
6698
6699 if (code == EQ_EXPR)
6700 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6701
6702 /* Put the AND last so it can combine with more things. */
6703 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6704
6705 /* Make sure to return the proper type. */
6706 inner = fold_convert_loc (loc, result_type, inner);
6707
6708 return inner;
6709 }
6710 return NULL_TREE;
6711 }
6712
6713 /* Check whether we are allowed to reorder operands arg0 and arg1,
6714 such that the evaluation of arg1 occurs before arg0. */
6715
6716 static bool
6717 reorder_operands_p (const_tree arg0, const_tree arg1)
6718 {
6719 if (! flag_evaluation_order)
6720 return true;
6721 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6722 return true;
6723 return ! TREE_SIDE_EFFECTS (arg0)
6724 && ! TREE_SIDE_EFFECTS (arg1);
6725 }
6726
6727 /* Test whether it is preferable two swap two operands, ARG0 and
6728 ARG1, for example because ARG0 is an integer constant and ARG1
6729 isn't. If REORDER is true, only recommend swapping if we can
6730 evaluate the operands in reverse order. */
6731
6732 bool
6733 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6734 {
6735 STRIP_SIGN_NOPS (arg0);
6736 STRIP_SIGN_NOPS (arg1);
6737
6738 if (TREE_CODE (arg1) == INTEGER_CST)
6739 return 0;
6740 if (TREE_CODE (arg0) == INTEGER_CST)
6741 return 1;
6742
6743 if (TREE_CODE (arg1) == REAL_CST)
6744 return 0;
6745 if (TREE_CODE (arg0) == REAL_CST)
6746 return 1;
6747
6748 if (TREE_CODE (arg1) == FIXED_CST)
6749 return 0;
6750 if (TREE_CODE (arg0) == FIXED_CST)
6751 return 1;
6752
6753 if (TREE_CODE (arg1) == COMPLEX_CST)
6754 return 0;
6755 if (TREE_CODE (arg0) == COMPLEX_CST)
6756 return 1;
6757
6758 if (TREE_CONSTANT (arg1))
6759 return 0;
6760 if (TREE_CONSTANT (arg0))
6761 return 1;
6762
6763 if (optimize_function_for_size_p (cfun))
6764 return 0;
6765
6766 if (reorder && flag_evaluation_order
6767 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6768 return 0;
6769
6770 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6771 for commutative and comparison operators. Ensuring a canonical
6772 form allows the optimizers to find additional redundancies without
6773 having to explicitly check for both orderings. */
6774 if (TREE_CODE (arg0) == SSA_NAME
6775 && TREE_CODE (arg1) == SSA_NAME
6776 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6777 return 1;
6778
6779 /* Put SSA_NAMEs last. */
6780 if (TREE_CODE (arg1) == SSA_NAME)
6781 return 0;
6782 if (TREE_CODE (arg0) == SSA_NAME)
6783 return 1;
6784
6785 /* Put variables last. */
6786 if (DECL_P (arg1))
6787 return 0;
6788 if (DECL_P (arg0))
6789 return 1;
6790
6791 return 0;
6792 }
6793
6794 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6795 ARG0 is extended to a wider type. */
6796
6797 static tree
6798 fold_widened_comparison (location_t loc, enum tree_code code,
6799 tree type, tree arg0, tree arg1)
6800 {
6801 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6802 tree arg1_unw;
6803 tree shorter_type, outer_type;
6804 tree min, max;
6805 bool above, below;
6806
6807 if (arg0_unw == arg0)
6808 return NULL_TREE;
6809 shorter_type = TREE_TYPE (arg0_unw);
6810
6811 #ifdef HAVE_canonicalize_funcptr_for_compare
6812 /* Disable this optimization if we're casting a function pointer
6813 type on targets that require function pointer canonicalization. */
6814 if (HAVE_canonicalize_funcptr_for_compare
6815 && TREE_CODE (shorter_type) == POINTER_TYPE
6816 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6817 return NULL_TREE;
6818 #endif
6819
6820 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6821 return NULL_TREE;
6822
6823 arg1_unw = get_unwidened (arg1, NULL_TREE);
6824
6825 /* If possible, express the comparison in the shorter mode. */
6826 if ((code == EQ_EXPR || code == NE_EXPR
6827 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6828 && (TREE_TYPE (arg1_unw) == shorter_type
6829 || ((TYPE_PRECISION (shorter_type)
6830 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6831 && (TYPE_UNSIGNED (shorter_type)
6832 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6833 || (TREE_CODE (arg1_unw) == INTEGER_CST
6834 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6835 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6836 && int_fits_type_p (arg1_unw, shorter_type))))
6837 return fold_build2_loc (loc, code, type, arg0_unw,
6838 fold_convert_loc (loc, shorter_type, arg1_unw));
6839
6840 if (TREE_CODE (arg1_unw) != INTEGER_CST
6841 || TREE_CODE (shorter_type) != INTEGER_TYPE
6842 || !int_fits_type_p (arg1_unw, shorter_type))
6843 return NULL_TREE;
6844
6845 /* If we are comparing with the integer that does not fit into the range
6846 of the shorter type, the result is known. */
6847 outer_type = TREE_TYPE (arg1_unw);
6848 min = lower_bound_in_type (outer_type, shorter_type);
6849 max = upper_bound_in_type (outer_type, shorter_type);
6850
6851 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6852 max, arg1_unw));
6853 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6854 arg1_unw, min));
6855
6856 switch (code)
6857 {
6858 case EQ_EXPR:
6859 if (above || below)
6860 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6861 break;
6862
6863 case NE_EXPR:
6864 if (above || below)
6865 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6866 break;
6867
6868 case LT_EXPR:
6869 case LE_EXPR:
6870 if (above)
6871 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6872 else if (below)
6873 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6874
6875 case GT_EXPR:
6876 case GE_EXPR:
6877 if (above)
6878 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6879 else if (below)
6880 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6881
6882 default:
6883 break;
6884 }
6885
6886 return NULL_TREE;
6887 }
6888
6889 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6890 ARG0 just the signedness is changed. */
6891
6892 static tree
6893 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6894 tree arg0, tree arg1)
6895 {
6896 tree arg0_inner;
6897 tree inner_type, outer_type;
6898
6899 if (!CONVERT_EXPR_P (arg0))
6900 return NULL_TREE;
6901
6902 outer_type = TREE_TYPE (arg0);
6903 arg0_inner = TREE_OPERAND (arg0, 0);
6904 inner_type = TREE_TYPE (arg0_inner);
6905
6906 #ifdef HAVE_canonicalize_funcptr_for_compare
6907 /* Disable this optimization if we're casting a function pointer
6908 type on targets that require function pointer canonicalization. */
6909 if (HAVE_canonicalize_funcptr_for_compare
6910 && TREE_CODE (inner_type) == POINTER_TYPE
6911 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6912 return NULL_TREE;
6913 #endif
6914
6915 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6916 return NULL_TREE;
6917
6918 if (TREE_CODE (arg1) != INTEGER_CST
6919 && !(CONVERT_EXPR_P (arg1)
6920 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6921 return NULL_TREE;
6922
6923 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6924 && code != NE_EXPR
6925 && code != EQ_EXPR)
6926 return NULL_TREE;
6927
6928 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6929 return NULL_TREE;
6930
6931 if (TREE_CODE (arg1) == INTEGER_CST)
6932 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6933 0, TREE_OVERFLOW (arg1));
6934 else
6935 arg1 = fold_convert_loc (loc, inner_type, arg1);
6936
6937 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6938 }
6939
6940 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6941 step of the array. Reconstructs s and delta in the case of s *
6942 delta being an integer constant (and thus already folded). ADDR is
6943 the address. MULT is the multiplicative expression. If the
6944 function succeeds, the new address expression is returned.
6945 Otherwise NULL_TREE is returned. LOC is the location of the
6946 resulting expression. */
6947
6948 static tree
6949 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6950 {
6951 tree s, delta, step;
6952 tree ref = TREE_OPERAND (addr, 0), pref;
6953 tree ret, pos;
6954 tree itype;
6955 bool mdim = false;
6956
6957 /* Strip the nops that might be added when converting op1 to sizetype. */
6958 STRIP_NOPS (op1);
6959
6960 /* Canonicalize op1 into a possibly non-constant delta
6961 and an INTEGER_CST s. */
6962 if (TREE_CODE (op1) == MULT_EXPR)
6963 {
6964 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6965
6966 STRIP_NOPS (arg0);
6967 STRIP_NOPS (arg1);
6968
6969 if (TREE_CODE (arg0) == INTEGER_CST)
6970 {
6971 s = arg0;
6972 delta = arg1;
6973 }
6974 else if (TREE_CODE (arg1) == INTEGER_CST)
6975 {
6976 s = arg1;
6977 delta = arg0;
6978 }
6979 else
6980 return NULL_TREE;
6981 }
6982 else if (TREE_CODE (op1) == INTEGER_CST)
6983 {
6984 delta = op1;
6985 s = NULL_TREE;
6986 }
6987 else
6988 {
6989 /* Simulate we are delta * 1. */
6990 delta = op1;
6991 s = integer_one_node;
6992 }
6993
6994 /* Handle &x.array the same as we would handle &x.array[0]. */
6995 if (TREE_CODE (ref) == COMPONENT_REF
6996 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6997 {
6998 tree domain;
6999
7000 /* Remember if this was a multi-dimensional array. */
7001 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7002 mdim = true;
7003
7004 domain = TYPE_DOMAIN (TREE_TYPE (ref));
7005 if (! domain)
7006 goto cont;
7007 itype = TREE_TYPE (domain);
7008
7009 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
7010 if (TREE_CODE (step) != INTEGER_CST)
7011 goto cont;
7012
7013 if (s)
7014 {
7015 if (! tree_int_cst_equal (step, s))
7016 goto cont;
7017 }
7018 else
7019 {
7020 /* Try if delta is a multiple of step. */
7021 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7022 if (! tmp)
7023 goto cont;
7024 delta = tmp;
7025 }
7026
7027 /* Only fold here if we can verify we do not overflow one
7028 dimension of a multi-dimensional array. */
7029 if (mdim)
7030 {
7031 tree tmp;
7032
7033 if (!TYPE_MIN_VALUE (domain)
7034 || !TYPE_MAX_VALUE (domain)
7035 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7036 goto cont;
7037
7038 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7039 fold_convert_loc (loc, itype,
7040 TYPE_MIN_VALUE (domain)),
7041 fold_convert_loc (loc, itype, delta));
7042 if (TREE_CODE (tmp) != INTEGER_CST
7043 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7044 goto cont;
7045 }
7046
7047 /* We found a suitable component reference. */
7048
7049 pref = TREE_OPERAND (addr, 0);
7050 ret = copy_node (pref);
7051 SET_EXPR_LOCATION (ret, loc);
7052
7053 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7054 fold_build2_loc
7055 (loc, PLUS_EXPR, itype,
7056 fold_convert_loc (loc, itype,
7057 TYPE_MIN_VALUE
7058 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7059 fold_convert_loc (loc, itype, delta)),
7060 NULL_TREE, NULL_TREE);
7061 return build_fold_addr_expr_loc (loc, ret);
7062 }
7063
7064 cont:
7065
7066 for (;; ref = TREE_OPERAND (ref, 0))
7067 {
7068 if (TREE_CODE (ref) == ARRAY_REF)
7069 {
7070 tree domain;
7071
7072 /* Remember if this was a multi-dimensional array. */
7073 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7074 mdim = true;
7075
7076 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7077 if (! domain)
7078 continue;
7079 itype = TREE_TYPE (domain);
7080
7081 step = array_ref_element_size (ref);
7082 if (TREE_CODE (step) != INTEGER_CST)
7083 continue;
7084
7085 if (s)
7086 {
7087 if (! tree_int_cst_equal (step, s))
7088 continue;
7089 }
7090 else
7091 {
7092 /* Try if delta is a multiple of step. */
7093 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7094 if (! tmp)
7095 continue;
7096 delta = tmp;
7097 }
7098
7099 /* Only fold here if we can verify we do not overflow one
7100 dimension of a multi-dimensional array. */
7101 if (mdim)
7102 {
7103 tree tmp;
7104
7105 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7106 || !TYPE_MAX_VALUE (domain)
7107 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7108 continue;
7109
7110 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7111 fold_convert_loc (loc, itype,
7112 TREE_OPERAND (ref, 1)),
7113 fold_convert_loc (loc, itype, delta));
7114 if (!tmp
7115 || TREE_CODE (tmp) != INTEGER_CST
7116 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7117 continue;
7118 }
7119
7120 break;
7121 }
7122 else
7123 mdim = false;
7124
7125 if (!handled_component_p (ref))
7126 return NULL_TREE;
7127 }
7128
7129 /* We found the suitable array reference. So copy everything up to it,
7130 and replace the index. */
7131
7132 pref = TREE_OPERAND (addr, 0);
7133 ret = copy_node (pref);
7134 SET_EXPR_LOCATION (ret, loc);
7135 pos = ret;
7136
7137 while (pref != ref)
7138 {
7139 pref = TREE_OPERAND (pref, 0);
7140 TREE_OPERAND (pos, 0) = copy_node (pref);
7141 pos = TREE_OPERAND (pos, 0);
7142 }
7143
7144 TREE_OPERAND (pos, 1)
7145 = fold_build2_loc (loc, PLUS_EXPR, itype,
7146 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7147 fold_convert_loc (loc, itype, delta));
7148 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7149 }
7150
7151
7152 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7153 means A >= Y && A != MAX, but in this case we know that
7154 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7155
7156 static tree
7157 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7158 {
7159 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7160
7161 if (TREE_CODE (bound) == LT_EXPR)
7162 a = TREE_OPERAND (bound, 0);
7163 else if (TREE_CODE (bound) == GT_EXPR)
7164 a = TREE_OPERAND (bound, 1);
7165 else
7166 return NULL_TREE;
7167
7168 typea = TREE_TYPE (a);
7169 if (!INTEGRAL_TYPE_P (typea)
7170 && !POINTER_TYPE_P (typea))
7171 return NULL_TREE;
7172
7173 if (TREE_CODE (ineq) == LT_EXPR)
7174 {
7175 a1 = TREE_OPERAND (ineq, 1);
7176 y = TREE_OPERAND (ineq, 0);
7177 }
7178 else if (TREE_CODE (ineq) == GT_EXPR)
7179 {
7180 a1 = TREE_OPERAND (ineq, 0);
7181 y = TREE_OPERAND (ineq, 1);
7182 }
7183 else
7184 return NULL_TREE;
7185
7186 if (TREE_TYPE (a1) != typea)
7187 return NULL_TREE;
7188
7189 if (POINTER_TYPE_P (typea))
7190 {
7191 /* Convert the pointer types into integer before taking the difference. */
7192 tree ta = fold_convert_loc (loc, ssizetype, a);
7193 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7194 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7195 }
7196 else
7197 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7198
7199 if (!diff || !integer_onep (diff))
7200 return NULL_TREE;
7201
7202 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7203 }
7204
7205 /* Fold a sum or difference of at least one multiplication.
7206 Returns the folded tree or NULL if no simplification could be made. */
7207
7208 static tree
7209 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7210 tree arg0, tree arg1)
7211 {
7212 tree arg00, arg01, arg10, arg11;
7213 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7214
7215 /* (A * C) +- (B * C) -> (A+-B) * C.
7216 (A * C) +- A -> A * (C+-1).
7217 We are most concerned about the case where C is a constant,
7218 but other combinations show up during loop reduction. Since
7219 it is not difficult, try all four possibilities. */
7220
7221 if (TREE_CODE (arg0) == MULT_EXPR)
7222 {
7223 arg00 = TREE_OPERAND (arg0, 0);
7224 arg01 = TREE_OPERAND (arg0, 1);
7225 }
7226 else if (TREE_CODE (arg0) == INTEGER_CST)
7227 {
7228 arg00 = build_one_cst (type);
7229 arg01 = arg0;
7230 }
7231 else
7232 {
7233 /* We cannot generate constant 1 for fract. */
7234 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7235 return NULL_TREE;
7236 arg00 = arg0;
7237 arg01 = build_one_cst (type);
7238 }
7239 if (TREE_CODE (arg1) == MULT_EXPR)
7240 {
7241 arg10 = TREE_OPERAND (arg1, 0);
7242 arg11 = TREE_OPERAND (arg1, 1);
7243 }
7244 else if (TREE_CODE (arg1) == INTEGER_CST)
7245 {
7246 arg10 = build_one_cst (type);
7247 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7248 the purpose of this canonicalization. */
7249 if (TREE_INT_CST_HIGH (arg1) == -1
7250 && negate_expr_p (arg1)
7251 && code == PLUS_EXPR)
7252 {
7253 arg11 = negate_expr (arg1);
7254 code = MINUS_EXPR;
7255 }
7256 else
7257 arg11 = arg1;
7258 }
7259 else
7260 {
7261 /* We cannot generate constant 1 for fract. */
7262 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7263 return NULL_TREE;
7264 arg10 = arg1;
7265 arg11 = build_one_cst (type);
7266 }
7267 same = NULL_TREE;
7268
7269 if (operand_equal_p (arg01, arg11, 0))
7270 same = arg01, alt0 = arg00, alt1 = arg10;
7271 else if (operand_equal_p (arg00, arg10, 0))
7272 same = arg00, alt0 = arg01, alt1 = arg11;
7273 else if (operand_equal_p (arg00, arg11, 0))
7274 same = arg00, alt0 = arg01, alt1 = arg10;
7275 else if (operand_equal_p (arg01, arg10, 0))
7276 same = arg01, alt0 = arg00, alt1 = arg11;
7277
7278 /* No identical multiplicands; see if we can find a common
7279 power-of-two factor in non-power-of-two multiplies. This
7280 can help in multi-dimensional array access. */
7281 else if (tree_fits_shwi_p (arg01)
7282 && tree_fits_shwi_p (arg11))
7283 {
7284 HOST_WIDE_INT int01, int11, tmp;
7285 bool swap = false;
7286 tree maybe_same;
7287 int01 = tree_to_shwi (arg01);
7288 int11 = tree_to_shwi (arg11);
7289
7290 /* Move min of absolute values to int11. */
7291 if (absu_hwi (int01) < absu_hwi (int11))
7292 {
7293 tmp = int01, int01 = int11, int11 = tmp;
7294 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7295 maybe_same = arg01;
7296 swap = true;
7297 }
7298 else
7299 maybe_same = arg11;
7300
7301 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7302 /* The remainder should not be a constant, otherwise we
7303 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7304 increased the number of multiplications necessary. */
7305 && TREE_CODE (arg10) != INTEGER_CST)
7306 {
7307 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7308 build_int_cst (TREE_TYPE (arg00),
7309 int01 / int11));
7310 alt1 = arg10;
7311 same = maybe_same;
7312 if (swap)
7313 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7314 }
7315 }
7316
7317 if (same)
7318 return fold_build2_loc (loc, MULT_EXPR, type,
7319 fold_build2_loc (loc, code, type,
7320 fold_convert_loc (loc, type, alt0),
7321 fold_convert_loc (loc, type, alt1)),
7322 fold_convert_loc (loc, type, same));
7323
7324 return NULL_TREE;
7325 }
7326
7327 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7328 specified by EXPR into the buffer PTR of length LEN bytes.
7329 Return the number of bytes placed in the buffer, or zero
7330 upon failure. */
7331
7332 static int
7333 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7334 {
7335 tree type = TREE_TYPE (expr);
7336 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7337 int byte, offset, word, words;
7338 unsigned char value;
7339
7340 if (total_bytes > len)
7341 return 0;
7342 words = total_bytes / UNITS_PER_WORD;
7343
7344 for (byte = 0; byte < total_bytes; byte++)
7345 {
7346 int bitpos = byte * BITS_PER_UNIT;
7347 if (bitpos < HOST_BITS_PER_WIDE_INT)
7348 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7349 else
7350 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7351 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7352
7353 if (total_bytes > UNITS_PER_WORD)
7354 {
7355 word = byte / UNITS_PER_WORD;
7356 if (WORDS_BIG_ENDIAN)
7357 word = (words - 1) - word;
7358 offset = word * UNITS_PER_WORD;
7359 if (BYTES_BIG_ENDIAN)
7360 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7361 else
7362 offset += byte % UNITS_PER_WORD;
7363 }
7364 else
7365 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7366 ptr[offset] = value;
7367 }
7368 return total_bytes;
7369 }
7370
7371
7372 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7373 specified by EXPR into the buffer PTR of length LEN bytes.
7374 Return the number of bytes placed in the buffer, or zero
7375 upon failure. */
7376
7377 static int
7378 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7379 {
7380 tree type = TREE_TYPE (expr);
7381 enum machine_mode mode = TYPE_MODE (type);
7382 int total_bytes = GET_MODE_SIZE (mode);
7383 FIXED_VALUE_TYPE value;
7384 tree i_value, i_type;
7385
7386 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7387 return 0;
7388
7389 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7390
7391 if (NULL_TREE == i_type
7392 || TYPE_PRECISION (i_type) != total_bytes)
7393 return 0;
7394
7395 value = TREE_FIXED_CST (expr);
7396 i_value = double_int_to_tree (i_type, value.data);
7397
7398 return native_encode_int (i_value, ptr, len);
7399 }
7400
7401
7402 /* Subroutine of native_encode_expr. Encode the REAL_CST
7403 specified by EXPR into the buffer PTR of length LEN bytes.
7404 Return the number of bytes placed in the buffer, or zero
7405 upon failure. */
7406
7407 static int
7408 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7409 {
7410 tree type = TREE_TYPE (expr);
7411 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7412 int byte, offset, word, words, bitpos;
7413 unsigned char value;
7414
7415 /* There are always 32 bits in each long, no matter the size of
7416 the hosts long. We handle floating point representations with
7417 up to 192 bits. */
7418 long tmp[6];
7419
7420 if (total_bytes > len)
7421 return 0;
7422 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7423
7424 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7425
7426 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7427 bitpos += BITS_PER_UNIT)
7428 {
7429 byte = (bitpos / BITS_PER_UNIT) & 3;
7430 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7431
7432 if (UNITS_PER_WORD < 4)
7433 {
7434 word = byte / UNITS_PER_WORD;
7435 if (WORDS_BIG_ENDIAN)
7436 word = (words - 1) - word;
7437 offset = word * UNITS_PER_WORD;
7438 if (BYTES_BIG_ENDIAN)
7439 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7440 else
7441 offset += byte % UNITS_PER_WORD;
7442 }
7443 else
7444 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7445 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7446 }
7447 return total_bytes;
7448 }
7449
7450 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7451 specified by EXPR into the buffer PTR of length LEN bytes.
7452 Return the number of bytes placed in the buffer, or zero
7453 upon failure. */
7454
7455 static int
7456 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7457 {
7458 int rsize, isize;
7459 tree part;
7460
7461 part = TREE_REALPART (expr);
7462 rsize = native_encode_expr (part, ptr, len);
7463 if (rsize == 0)
7464 return 0;
7465 part = TREE_IMAGPART (expr);
7466 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7467 if (isize != rsize)
7468 return 0;
7469 return rsize + isize;
7470 }
7471
7472
7473 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7474 specified by EXPR into the buffer PTR of length LEN bytes.
7475 Return the number of bytes placed in the buffer, or zero
7476 upon failure. */
7477
7478 static int
7479 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7480 {
7481 unsigned i, count;
7482 int size, offset;
7483 tree itype, elem;
7484
7485 offset = 0;
7486 count = VECTOR_CST_NELTS (expr);
7487 itype = TREE_TYPE (TREE_TYPE (expr));
7488 size = GET_MODE_SIZE (TYPE_MODE (itype));
7489 for (i = 0; i < count; i++)
7490 {
7491 elem = VECTOR_CST_ELT (expr, i);
7492 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7493 return 0;
7494 offset += size;
7495 }
7496 return offset;
7497 }
7498
7499
7500 /* Subroutine of native_encode_expr. Encode the STRING_CST
7501 specified by EXPR into the buffer PTR of length LEN bytes.
7502 Return the number of bytes placed in the buffer, or zero
7503 upon failure. */
7504
7505 static int
7506 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7507 {
7508 tree type = TREE_TYPE (expr);
7509 HOST_WIDE_INT total_bytes;
7510
7511 if (TREE_CODE (type) != ARRAY_TYPE
7512 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7513 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7514 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7515 return 0;
7516 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7517 if (total_bytes > len)
7518 return 0;
7519 if (TREE_STRING_LENGTH (expr) < total_bytes)
7520 {
7521 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7522 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7523 total_bytes - TREE_STRING_LENGTH (expr));
7524 }
7525 else
7526 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7527 return total_bytes;
7528 }
7529
7530
7531 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7532 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7533 buffer PTR of length LEN bytes. Return the number of bytes
7534 placed in the buffer, or zero upon failure. */
7535
7536 int
7537 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7538 {
7539 switch (TREE_CODE (expr))
7540 {
7541 case INTEGER_CST:
7542 return native_encode_int (expr, ptr, len);
7543
7544 case REAL_CST:
7545 return native_encode_real (expr, ptr, len);
7546
7547 case FIXED_CST:
7548 return native_encode_fixed (expr, ptr, len);
7549
7550 case COMPLEX_CST:
7551 return native_encode_complex (expr, ptr, len);
7552
7553 case VECTOR_CST:
7554 return native_encode_vector (expr, ptr, len);
7555
7556 case STRING_CST:
7557 return native_encode_string (expr, ptr, len);
7558
7559 default:
7560 return 0;
7561 }
7562 }
7563
7564
7565 /* Subroutine of native_interpret_expr. Interpret the contents of
7566 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7567 If the buffer cannot be interpreted, return NULL_TREE. */
7568
7569 static tree
7570 native_interpret_int (tree type, const unsigned char *ptr, int len)
7571 {
7572 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7573 double_int result;
7574
7575 if (total_bytes > len
7576 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7577 return NULL_TREE;
7578
7579 result = double_int::from_buffer (ptr, total_bytes);
7580
7581 return double_int_to_tree (type, result);
7582 }
7583
7584
7585 /* Subroutine of native_interpret_expr. Interpret the contents of
7586 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7587 If the buffer cannot be interpreted, return NULL_TREE. */
7588
7589 static tree
7590 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7591 {
7592 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7593 double_int result;
7594 FIXED_VALUE_TYPE fixed_value;
7595
7596 if (total_bytes > len
7597 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7598 return NULL_TREE;
7599
7600 result = double_int::from_buffer (ptr, total_bytes);
7601 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7602
7603 return build_fixed (type, fixed_value);
7604 }
7605
7606
7607 /* Subroutine of native_interpret_expr. Interpret the contents of
7608 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7609 If the buffer cannot be interpreted, return NULL_TREE. */
7610
7611 static tree
7612 native_interpret_real (tree type, const unsigned char *ptr, int len)
7613 {
7614 enum machine_mode mode = TYPE_MODE (type);
7615 int total_bytes = GET_MODE_SIZE (mode);
7616 int byte, offset, word, words, bitpos;
7617 unsigned char value;
7618 /* There are always 32 bits in each long, no matter the size of
7619 the hosts long. We handle floating point representations with
7620 up to 192 bits. */
7621 REAL_VALUE_TYPE r;
7622 long tmp[6];
7623
7624 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7625 if (total_bytes > len || total_bytes > 24)
7626 return NULL_TREE;
7627 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7628
7629 memset (tmp, 0, sizeof (tmp));
7630 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7631 bitpos += BITS_PER_UNIT)
7632 {
7633 byte = (bitpos / BITS_PER_UNIT) & 3;
7634 if (UNITS_PER_WORD < 4)
7635 {
7636 word = byte / UNITS_PER_WORD;
7637 if (WORDS_BIG_ENDIAN)
7638 word = (words - 1) - word;
7639 offset = word * UNITS_PER_WORD;
7640 if (BYTES_BIG_ENDIAN)
7641 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7642 else
7643 offset += byte % UNITS_PER_WORD;
7644 }
7645 else
7646 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7647 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7648
7649 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7650 }
7651
7652 real_from_target (&r, tmp, mode);
7653 return build_real (type, r);
7654 }
7655
7656
7657 /* Subroutine of native_interpret_expr. Interpret the contents of
7658 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7659 If the buffer cannot be interpreted, return NULL_TREE. */
7660
7661 static tree
7662 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7663 {
7664 tree etype, rpart, ipart;
7665 int size;
7666
7667 etype = TREE_TYPE (type);
7668 size = GET_MODE_SIZE (TYPE_MODE (etype));
7669 if (size * 2 > len)
7670 return NULL_TREE;
7671 rpart = native_interpret_expr (etype, ptr, size);
7672 if (!rpart)
7673 return NULL_TREE;
7674 ipart = native_interpret_expr (etype, ptr+size, size);
7675 if (!ipart)
7676 return NULL_TREE;
7677 return build_complex (type, rpart, ipart);
7678 }
7679
7680
7681 /* Subroutine of native_interpret_expr. Interpret the contents of
7682 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7683 If the buffer cannot be interpreted, return NULL_TREE. */
7684
7685 static tree
7686 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7687 {
7688 tree etype, elem;
7689 int i, size, count;
7690 tree *elements;
7691
7692 etype = TREE_TYPE (type);
7693 size = GET_MODE_SIZE (TYPE_MODE (etype));
7694 count = TYPE_VECTOR_SUBPARTS (type);
7695 if (size * count > len)
7696 return NULL_TREE;
7697
7698 elements = XALLOCAVEC (tree, count);
7699 for (i = count - 1; i >= 0; i--)
7700 {
7701 elem = native_interpret_expr (etype, ptr+(i*size), size);
7702 if (!elem)
7703 return NULL_TREE;
7704 elements[i] = elem;
7705 }
7706 return build_vector (type, elements);
7707 }
7708
7709
7710 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7711 the buffer PTR of length LEN as a constant of type TYPE. For
7712 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7713 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7714 return NULL_TREE. */
7715
7716 tree
7717 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7718 {
7719 switch (TREE_CODE (type))
7720 {
7721 case INTEGER_TYPE:
7722 case ENUMERAL_TYPE:
7723 case BOOLEAN_TYPE:
7724 case POINTER_TYPE:
7725 case REFERENCE_TYPE:
7726 return native_interpret_int (type, ptr, len);
7727
7728 case REAL_TYPE:
7729 return native_interpret_real (type, ptr, len);
7730
7731 case FIXED_POINT_TYPE:
7732 return native_interpret_fixed (type, ptr, len);
7733
7734 case COMPLEX_TYPE:
7735 return native_interpret_complex (type, ptr, len);
7736
7737 case VECTOR_TYPE:
7738 return native_interpret_vector (type, ptr, len);
7739
7740 default:
7741 return NULL_TREE;
7742 }
7743 }
7744
7745 /* Returns true if we can interpret the contents of a native encoding
7746 as TYPE. */
7747
7748 static bool
7749 can_native_interpret_type_p (tree type)
7750 {
7751 switch (TREE_CODE (type))
7752 {
7753 case INTEGER_TYPE:
7754 case ENUMERAL_TYPE:
7755 case BOOLEAN_TYPE:
7756 case POINTER_TYPE:
7757 case REFERENCE_TYPE:
7758 case FIXED_POINT_TYPE:
7759 case REAL_TYPE:
7760 case COMPLEX_TYPE:
7761 case VECTOR_TYPE:
7762 return true;
7763 default:
7764 return false;
7765 }
7766 }
7767
7768 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7769 TYPE at compile-time. If we're unable to perform the conversion
7770 return NULL_TREE. */
7771
7772 static tree
7773 fold_view_convert_expr (tree type, tree expr)
7774 {
7775 /* We support up to 512-bit values (for V8DFmode). */
7776 unsigned char buffer[64];
7777 int len;
7778
7779 /* Check that the host and target are sane. */
7780 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7781 return NULL_TREE;
7782
7783 len = native_encode_expr (expr, buffer, sizeof (buffer));
7784 if (len == 0)
7785 return NULL_TREE;
7786
7787 return native_interpret_expr (type, buffer, len);
7788 }
7789
7790 /* Build an expression for the address of T. Folds away INDIRECT_REF
7791 to avoid confusing the gimplify process. */
7792
7793 tree
7794 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7795 {
7796 /* The size of the object is not relevant when talking about its address. */
7797 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7798 t = TREE_OPERAND (t, 0);
7799
7800 if (TREE_CODE (t) == INDIRECT_REF)
7801 {
7802 t = TREE_OPERAND (t, 0);
7803
7804 if (TREE_TYPE (t) != ptrtype)
7805 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7806 }
7807 else if (TREE_CODE (t) == MEM_REF
7808 && integer_zerop (TREE_OPERAND (t, 1)))
7809 return TREE_OPERAND (t, 0);
7810 else if (TREE_CODE (t) == MEM_REF
7811 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7812 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7813 TREE_OPERAND (t, 0),
7814 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7815 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7816 {
7817 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7818
7819 if (TREE_TYPE (t) != ptrtype)
7820 t = fold_convert_loc (loc, ptrtype, t);
7821 }
7822 else
7823 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7824
7825 return t;
7826 }
7827
7828 /* Build an expression for the address of T. */
7829
7830 tree
7831 build_fold_addr_expr_loc (location_t loc, tree t)
7832 {
7833 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7834
7835 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7836 }
7837
7838 static bool vec_cst_ctor_to_array (tree, tree *);
7839
7840 /* Fold a unary expression of code CODE and type TYPE with operand
7841 OP0. Return the folded expression if folding is successful.
7842 Otherwise, return NULL_TREE. */
7843
7844 tree
7845 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7846 {
7847 tree tem;
7848 tree arg0;
7849 enum tree_code_class kind = TREE_CODE_CLASS (code);
7850
7851 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7852 && TREE_CODE_LENGTH (code) == 1);
7853
7854 arg0 = op0;
7855 if (arg0)
7856 {
7857 if (CONVERT_EXPR_CODE_P (code)
7858 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7859 {
7860 /* Don't use STRIP_NOPS, because signedness of argument type
7861 matters. */
7862 STRIP_SIGN_NOPS (arg0);
7863 }
7864 else
7865 {
7866 /* Strip any conversions that don't change the mode. This
7867 is safe for every expression, except for a comparison
7868 expression because its signedness is derived from its
7869 operands.
7870
7871 Note that this is done as an internal manipulation within
7872 the constant folder, in order to find the simplest
7873 representation of the arguments so that their form can be
7874 studied. In any cases, the appropriate type conversions
7875 should be put back in the tree that will get out of the
7876 constant folder. */
7877 STRIP_NOPS (arg0);
7878 }
7879 }
7880
7881 if (TREE_CODE_CLASS (code) == tcc_unary)
7882 {
7883 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7884 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7885 fold_build1_loc (loc, code, type,
7886 fold_convert_loc (loc, TREE_TYPE (op0),
7887 TREE_OPERAND (arg0, 1))));
7888 else if (TREE_CODE (arg0) == COND_EXPR)
7889 {
7890 tree arg01 = TREE_OPERAND (arg0, 1);
7891 tree arg02 = TREE_OPERAND (arg0, 2);
7892 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7893 arg01 = fold_build1_loc (loc, code, type,
7894 fold_convert_loc (loc,
7895 TREE_TYPE (op0), arg01));
7896 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7897 arg02 = fold_build1_loc (loc, code, type,
7898 fold_convert_loc (loc,
7899 TREE_TYPE (op0), arg02));
7900 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7901 arg01, arg02);
7902
7903 /* If this was a conversion, and all we did was to move into
7904 inside the COND_EXPR, bring it back out. But leave it if
7905 it is a conversion from integer to integer and the
7906 result precision is no wider than a word since such a
7907 conversion is cheap and may be optimized away by combine,
7908 while it couldn't if it were outside the COND_EXPR. Then return
7909 so we don't get into an infinite recursion loop taking the
7910 conversion out and then back in. */
7911
7912 if ((CONVERT_EXPR_CODE_P (code)
7913 || code == NON_LVALUE_EXPR)
7914 && TREE_CODE (tem) == COND_EXPR
7915 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7916 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7917 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7918 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7919 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7920 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7921 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7922 && (INTEGRAL_TYPE_P
7923 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7924 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7925 || flag_syntax_only))
7926 tem = build1_loc (loc, code, type,
7927 build3 (COND_EXPR,
7928 TREE_TYPE (TREE_OPERAND
7929 (TREE_OPERAND (tem, 1), 0)),
7930 TREE_OPERAND (tem, 0),
7931 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7932 TREE_OPERAND (TREE_OPERAND (tem, 2),
7933 0)));
7934 return tem;
7935 }
7936 }
7937
7938 switch (code)
7939 {
7940 case PAREN_EXPR:
7941 /* Re-association barriers around constants and other re-association
7942 barriers can be removed. */
7943 if (CONSTANT_CLASS_P (op0)
7944 || TREE_CODE (op0) == PAREN_EXPR)
7945 return fold_convert_loc (loc, type, op0);
7946 return NULL_TREE;
7947
7948 CASE_CONVERT:
7949 case FLOAT_EXPR:
7950 case FIX_TRUNC_EXPR:
7951 if (TREE_TYPE (op0) == type)
7952 return op0;
7953
7954 if (COMPARISON_CLASS_P (op0))
7955 {
7956 /* If we have (type) (a CMP b) and type is an integral type, return
7957 new expression involving the new type. Canonicalize
7958 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7959 non-integral type.
7960 Do not fold the result as that would not simplify further, also
7961 folding again results in recursions. */
7962 if (TREE_CODE (type) == BOOLEAN_TYPE)
7963 return build2_loc (loc, TREE_CODE (op0), type,
7964 TREE_OPERAND (op0, 0),
7965 TREE_OPERAND (op0, 1));
7966 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7967 && TREE_CODE (type) != VECTOR_TYPE)
7968 return build3_loc (loc, COND_EXPR, type, op0,
7969 constant_boolean_node (true, type),
7970 constant_boolean_node (false, type));
7971 }
7972
7973 /* Handle cases of two conversions in a row. */
7974 if (CONVERT_EXPR_P (op0))
7975 {
7976 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7977 tree inter_type = TREE_TYPE (op0);
7978 int inside_int = INTEGRAL_TYPE_P (inside_type);
7979 int inside_ptr = POINTER_TYPE_P (inside_type);
7980 int inside_float = FLOAT_TYPE_P (inside_type);
7981 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7982 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7983 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7984 int inter_int = INTEGRAL_TYPE_P (inter_type);
7985 int inter_ptr = POINTER_TYPE_P (inter_type);
7986 int inter_float = FLOAT_TYPE_P (inter_type);
7987 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7988 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7989 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7990 int final_int = INTEGRAL_TYPE_P (type);
7991 int final_ptr = POINTER_TYPE_P (type);
7992 int final_float = FLOAT_TYPE_P (type);
7993 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7994 unsigned int final_prec = TYPE_PRECISION (type);
7995 int final_unsignedp = TYPE_UNSIGNED (type);
7996
7997 /* In addition to the cases of two conversions in a row
7998 handled below, if we are converting something to its own
7999 type via an object of identical or wider precision, neither
8000 conversion is needed. */
8001 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8002 && (((inter_int || inter_ptr) && final_int)
8003 || (inter_float && final_float))
8004 && inter_prec >= final_prec)
8005 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8006
8007 /* Likewise, if the intermediate and initial types are either both
8008 float or both integer, we don't need the middle conversion if the
8009 former is wider than the latter and doesn't change the signedness
8010 (for integers). Avoid this if the final type is a pointer since
8011 then we sometimes need the middle conversion. Likewise if the
8012 final type has a precision not equal to the size of its mode. */
8013 if (((inter_int && inside_int)
8014 || (inter_float && inside_float)
8015 || (inter_vec && inside_vec))
8016 && inter_prec >= inside_prec
8017 && (inter_float || inter_vec
8018 || inter_unsignedp == inside_unsignedp)
8019 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8020 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8021 && ! final_ptr
8022 && (! final_vec || inter_prec == inside_prec))
8023 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8024
8025 /* If we have a sign-extension of a zero-extended value, we can
8026 replace that by a single zero-extension. Likewise if the
8027 final conversion does not change precision we can drop the
8028 intermediate conversion. */
8029 if (inside_int && inter_int && final_int
8030 && ((inside_prec < inter_prec && inter_prec < final_prec
8031 && inside_unsignedp && !inter_unsignedp)
8032 || final_prec == inter_prec))
8033 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8034
8035 /* Two conversions in a row are not needed unless:
8036 - some conversion is floating-point (overstrict for now), or
8037 - some conversion is a vector (overstrict for now), or
8038 - the intermediate type is narrower than both initial and
8039 final, or
8040 - the intermediate type and innermost type differ in signedness,
8041 and the outermost type is wider than the intermediate, or
8042 - the initial type is a pointer type and the precisions of the
8043 intermediate and final types differ, or
8044 - the final type is a pointer type and the precisions of the
8045 initial and intermediate types differ. */
8046 if (! inside_float && ! inter_float && ! final_float
8047 && ! inside_vec && ! inter_vec && ! final_vec
8048 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8049 && ! (inside_int && inter_int
8050 && inter_unsignedp != inside_unsignedp
8051 && inter_prec < final_prec)
8052 && ((inter_unsignedp && inter_prec > inside_prec)
8053 == (final_unsignedp && final_prec > inter_prec))
8054 && ! (inside_ptr && inter_prec != final_prec)
8055 && ! (final_ptr && inside_prec != inter_prec)
8056 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8057 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8058 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8059 }
8060
8061 /* Handle (T *)&A.B.C for A being of type T and B and C
8062 living at offset zero. This occurs frequently in
8063 C++ upcasting and then accessing the base. */
8064 if (TREE_CODE (op0) == ADDR_EXPR
8065 && POINTER_TYPE_P (type)
8066 && handled_component_p (TREE_OPERAND (op0, 0)))
8067 {
8068 HOST_WIDE_INT bitsize, bitpos;
8069 tree offset;
8070 enum machine_mode mode;
8071 int unsignedp, volatilep;
8072 tree base = TREE_OPERAND (op0, 0);
8073 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8074 &mode, &unsignedp, &volatilep, false);
8075 /* If the reference was to a (constant) zero offset, we can use
8076 the address of the base if it has the same base type
8077 as the result type and the pointer type is unqualified. */
8078 if (! offset && bitpos == 0
8079 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8080 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8081 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8082 return fold_convert_loc (loc, type,
8083 build_fold_addr_expr_loc (loc, base));
8084 }
8085
8086 if (TREE_CODE (op0) == MODIFY_EXPR
8087 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8088 /* Detect assigning a bitfield. */
8089 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8090 && DECL_BIT_FIELD
8091 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8092 {
8093 /* Don't leave an assignment inside a conversion
8094 unless assigning a bitfield. */
8095 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8096 /* First do the assignment, then return converted constant. */
8097 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8098 TREE_NO_WARNING (tem) = 1;
8099 TREE_USED (tem) = 1;
8100 return tem;
8101 }
8102
8103 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8104 constants (if x has signed type, the sign bit cannot be set
8105 in c). This folds extension into the BIT_AND_EXPR.
8106 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8107 very likely don't have maximal range for their precision and this
8108 transformation effectively doesn't preserve non-maximal ranges. */
8109 if (TREE_CODE (type) == INTEGER_TYPE
8110 && TREE_CODE (op0) == BIT_AND_EXPR
8111 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8112 {
8113 tree and_expr = op0;
8114 tree and0 = TREE_OPERAND (and_expr, 0);
8115 tree and1 = TREE_OPERAND (and_expr, 1);
8116 int change = 0;
8117
8118 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8119 || (TYPE_PRECISION (type)
8120 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8121 change = 1;
8122 else if (TYPE_PRECISION (TREE_TYPE (and1))
8123 <= HOST_BITS_PER_WIDE_INT
8124 && tree_fits_uhwi_p (and1))
8125 {
8126 unsigned HOST_WIDE_INT cst;
8127
8128 cst = tree_to_uhwi (and1);
8129 cst &= HOST_WIDE_INT_M1U
8130 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8131 change = (cst == 0);
8132 #ifdef LOAD_EXTEND_OP
8133 if (change
8134 && !flag_syntax_only
8135 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8136 == ZERO_EXTEND))
8137 {
8138 tree uns = unsigned_type_for (TREE_TYPE (and0));
8139 and0 = fold_convert_loc (loc, uns, and0);
8140 and1 = fold_convert_loc (loc, uns, and1);
8141 }
8142 #endif
8143 }
8144 if (change)
8145 {
8146 tem = force_fit_type_double (type, tree_to_double_int (and1),
8147 0, TREE_OVERFLOW (and1));
8148 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8149 fold_convert_loc (loc, type, and0), tem);
8150 }
8151 }
8152
8153 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8154 when one of the new casts will fold away. Conservatively we assume
8155 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8156 if (POINTER_TYPE_P (type)
8157 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8158 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8159 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8160 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8161 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8162 {
8163 tree arg00 = TREE_OPERAND (arg0, 0);
8164 tree arg01 = TREE_OPERAND (arg0, 1);
8165
8166 return fold_build_pointer_plus_loc
8167 (loc, fold_convert_loc (loc, type, arg00), arg01);
8168 }
8169
8170 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8171 of the same precision, and X is an integer type not narrower than
8172 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8173 if (INTEGRAL_TYPE_P (type)
8174 && TREE_CODE (op0) == BIT_NOT_EXPR
8175 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8176 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8177 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8178 {
8179 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8180 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8181 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8182 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8183 fold_convert_loc (loc, type, tem));
8184 }
8185
8186 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8187 type of X and Y (integer types only). */
8188 if (INTEGRAL_TYPE_P (type)
8189 && TREE_CODE (op0) == MULT_EXPR
8190 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8191 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8192 {
8193 /* Be careful not to introduce new overflows. */
8194 tree mult_type;
8195 if (TYPE_OVERFLOW_WRAPS (type))
8196 mult_type = type;
8197 else
8198 mult_type = unsigned_type_for (type);
8199
8200 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8201 {
8202 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8203 fold_convert_loc (loc, mult_type,
8204 TREE_OPERAND (op0, 0)),
8205 fold_convert_loc (loc, mult_type,
8206 TREE_OPERAND (op0, 1)));
8207 return fold_convert_loc (loc, type, tem);
8208 }
8209 }
8210
8211 tem = fold_convert_const (code, type, op0);
8212 return tem ? tem : NULL_TREE;
8213
8214 case ADDR_SPACE_CONVERT_EXPR:
8215 if (integer_zerop (arg0))
8216 return fold_convert_const (code, type, arg0);
8217 return NULL_TREE;
8218
8219 case FIXED_CONVERT_EXPR:
8220 tem = fold_convert_const (code, type, arg0);
8221 return tem ? tem : NULL_TREE;
8222
8223 case VIEW_CONVERT_EXPR:
8224 if (TREE_TYPE (op0) == type)
8225 return op0;
8226 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8227 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8228 type, TREE_OPERAND (op0, 0));
8229 if (TREE_CODE (op0) == MEM_REF)
8230 return fold_build2_loc (loc, MEM_REF, type,
8231 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8232
8233 /* For integral conversions with the same precision or pointer
8234 conversions use a NOP_EXPR instead. */
8235 if ((INTEGRAL_TYPE_P (type)
8236 || POINTER_TYPE_P (type))
8237 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8238 || POINTER_TYPE_P (TREE_TYPE (op0)))
8239 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8240 return fold_convert_loc (loc, type, op0);
8241
8242 /* Strip inner integral conversions that do not change the precision. */
8243 if (CONVERT_EXPR_P (op0)
8244 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8245 || POINTER_TYPE_P (TREE_TYPE (op0)))
8246 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8247 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8248 && (TYPE_PRECISION (TREE_TYPE (op0))
8249 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8250 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8251 type, TREE_OPERAND (op0, 0));
8252
8253 return fold_view_convert_expr (type, op0);
8254
8255 case NEGATE_EXPR:
8256 tem = fold_negate_expr (loc, arg0);
8257 if (tem)
8258 return fold_convert_loc (loc, type, tem);
8259 return NULL_TREE;
8260
8261 case ABS_EXPR:
8262 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8263 return fold_abs_const (arg0, type);
8264 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8265 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8266 /* Convert fabs((double)float) into (double)fabsf(float). */
8267 else if (TREE_CODE (arg0) == NOP_EXPR
8268 && TREE_CODE (type) == REAL_TYPE)
8269 {
8270 tree targ0 = strip_float_extensions (arg0);
8271 if (targ0 != arg0)
8272 return fold_convert_loc (loc, type,
8273 fold_build1_loc (loc, ABS_EXPR,
8274 TREE_TYPE (targ0),
8275 targ0));
8276 }
8277 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8278 else if (TREE_CODE (arg0) == ABS_EXPR)
8279 return arg0;
8280 else if (tree_expr_nonnegative_p (arg0))
8281 return arg0;
8282
8283 /* Strip sign ops from argument. */
8284 if (TREE_CODE (type) == REAL_TYPE)
8285 {
8286 tem = fold_strip_sign_ops (arg0);
8287 if (tem)
8288 return fold_build1_loc (loc, ABS_EXPR, type,
8289 fold_convert_loc (loc, type, tem));
8290 }
8291 return NULL_TREE;
8292
8293 case CONJ_EXPR:
8294 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8295 return fold_convert_loc (loc, type, arg0);
8296 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8297 {
8298 tree itype = TREE_TYPE (type);
8299 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8300 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8301 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8302 negate_expr (ipart));
8303 }
8304 if (TREE_CODE (arg0) == COMPLEX_CST)
8305 {
8306 tree itype = TREE_TYPE (type);
8307 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8308 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8309 return build_complex (type, rpart, negate_expr (ipart));
8310 }
8311 if (TREE_CODE (arg0) == CONJ_EXPR)
8312 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8313 return NULL_TREE;
8314
8315 case BIT_NOT_EXPR:
8316 if (TREE_CODE (arg0) == INTEGER_CST)
8317 return fold_not_const (arg0, type);
8318 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8319 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8320 /* Convert ~ (-A) to A - 1. */
8321 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8322 return fold_build2_loc (loc, MINUS_EXPR, type,
8323 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8324 build_int_cst (type, 1));
8325 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8326 else if (INTEGRAL_TYPE_P (type)
8327 && ((TREE_CODE (arg0) == MINUS_EXPR
8328 && integer_onep (TREE_OPERAND (arg0, 1)))
8329 || (TREE_CODE (arg0) == PLUS_EXPR
8330 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8331 return fold_build1_loc (loc, NEGATE_EXPR, type,
8332 fold_convert_loc (loc, type,
8333 TREE_OPERAND (arg0, 0)));
8334 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8335 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8336 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8337 fold_convert_loc (loc, type,
8338 TREE_OPERAND (arg0, 0)))))
8339 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8340 fold_convert_loc (loc, type,
8341 TREE_OPERAND (arg0, 1)));
8342 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8343 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8344 fold_convert_loc (loc, type,
8345 TREE_OPERAND (arg0, 1)))))
8346 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8347 fold_convert_loc (loc, type,
8348 TREE_OPERAND (arg0, 0)), tem);
8349 /* Perform BIT_NOT_EXPR on each element individually. */
8350 else if (TREE_CODE (arg0) == VECTOR_CST)
8351 {
8352 tree *elements;
8353 tree elem;
8354 unsigned count = VECTOR_CST_NELTS (arg0), i;
8355
8356 elements = XALLOCAVEC (tree, count);
8357 for (i = 0; i < count; i++)
8358 {
8359 elem = VECTOR_CST_ELT (arg0, i);
8360 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8361 if (elem == NULL_TREE)
8362 break;
8363 elements[i] = elem;
8364 }
8365 if (i == count)
8366 return build_vector (type, elements);
8367 }
8368 else if (COMPARISON_CLASS_P (arg0)
8369 && (VECTOR_TYPE_P (type)
8370 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8371 {
8372 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8373 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8374 HONOR_NANS (TYPE_MODE (op_type)));
8375 if (subcode != ERROR_MARK)
8376 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8377 TREE_OPERAND (arg0, 1));
8378 }
8379
8380
8381 return NULL_TREE;
8382
8383 case TRUTH_NOT_EXPR:
8384 /* Note that the operand of this must be an int
8385 and its values must be 0 or 1.
8386 ("true" is a fixed value perhaps depending on the language,
8387 but we don't handle values other than 1 correctly yet.) */
8388 tem = fold_truth_not_expr (loc, arg0);
8389 if (!tem)
8390 return NULL_TREE;
8391 return fold_convert_loc (loc, type, tem);
8392
8393 case REALPART_EXPR:
8394 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8395 return fold_convert_loc (loc, type, arg0);
8396 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8397 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8398 TREE_OPERAND (arg0, 1));
8399 if (TREE_CODE (arg0) == COMPLEX_CST)
8400 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8401 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8402 {
8403 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8404 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8405 fold_build1_loc (loc, REALPART_EXPR, itype,
8406 TREE_OPERAND (arg0, 0)),
8407 fold_build1_loc (loc, REALPART_EXPR, itype,
8408 TREE_OPERAND (arg0, 1)));
8409 return fold_convert_loc (loc, type, tem);
8410 }
8411 if (TREE_CODE (arg0) == CONJ_EXPR)
8412 {
8413 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8414 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8415 TREE_OPERAND (arg0, 0));
8416 return fold_convert_loc (loc, type, tem);
8417 }
8418 if (TREE_CODE (arg0) == CALL_EXPR)
8419 {
8420 tree fn = get_callee_fndecl (arg0);
8421 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8422 switch (DECL_FUNCTION_CODE (fn))
8423 {
8424 CASE_FLT_FN (BUILT_IN_CEXPI):
8425 fn = mathfn_built_in (type, BUILT_IN_COS);
8426 if (fn)
8427 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8428 break;
8429
8430 default:
8431 break;
8432 }
8433 }
8434 return NULL_TREE;
8435
8436 case IMAGPART_EXPR:
8437 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8438 return build_zero_cst (type);
8439 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8440 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8441 TREE_OPERAND (arg0, 0));
8442 if (TREE_CODE (arg0) == COMPLEX_CST)
8443 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8444 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8445 {
8446 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8447 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8448 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8449 TREE_OPERAND (arg0, 0)),
8450 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8451 TREE_OPERAND (arg0, 1)));
8452 return fold_convert_loc (loc, type, tem);
8453 }
8454 if (TREE_CODE (arg0) == CONJ_EXPR)
8455 {
8456 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8457 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8458 return fold_convert_loc (loc, type, negate_expr (tem));
8459 }
8460 if (TREE_CODE (arg0) == CALL_EXPR)
8461 {
8462 tree fn = get_callee_fndecl (arg0);
8463 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8464 switch (DECL_FUNCTION_CODE (fn))
8465 {
8466 CASE_FLT_FN (BUILT_IN_CEXPI):
8467 fn = mathfn_built_in (type, BUILT_IN_SIN);
8468 if (fn)
8469 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8470 break;
8471
8472 default:
8473 break;
8474 }
8475 }
8476 return NULL_TREE;
8477
8478 case INDIRECT_REF:
8479 /* Fold *&X to X if X is an lvalue. */
8480 if (TREE_CODE (op0) == ADDR_EXPR)
8481 {
8482 tree op00 = TREE_OPERAND (op0, 0);
8483 if ((TREE_CODE (op00) == VAR_DECL
8484 || TREE_CODE (op00) == PARM_DECL
8485 || TREE_CODE (op00) == RESULT_DECL)
8486 && !TREE_READONLY (op00))
8487 return op00;
8488 }
8489 return NULL_TREE;
8490
8491 case VEC_UNPACK_LO_EXPR:
8492 case VEC_UNPACK_HI_EXPR:
8493 case VEC_UNPACK_FLOAT_LO_EXPR:
8494 case VEC_UNPACK_FLOAT_HI_EXPR:
8495 {
8496 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8497 tree *elts;
8498 enum tree_code subcode;
8499
8500 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8501 if (TREE_CODE (arg0) != VECTOR_CST)
8502 return NULL_TREE;
8503
8504 elts = XALLOCAVEC (tree, nelts * 2);
8505 if (!vec_cst_ctor_to_array (arg0, elts))
8506 return NULL_TREE;
8507
8508 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8509 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8510 elts += nelts;
8511
8512 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8513 subcode = NOP_EXPR;
8514 else
8515 subcode = FLOAT_EXPR;
8516
8517 for (i = 0; i < nelts; i++)
8518 {
8519 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8520 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8521 return NULL_TREE;
8522 }
8523
8524 return build_vector (type, elts);
8525 }
8526
8527 case REDUC_MIN_EXPR:
8528 case REDUC_MAX_EXPR:
8529 case REDUC_PLUS_EXPR:
8530 {
8531 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8532 tree *elts;
8533 enum tree_code subcode;
8534
8535 if (TREE_CODE (op0) != VECTOR_CST)
8536 return NULL_TREE;
8537
8538 elts = XALLOCAVEC (tree, nelts);
8539 if (!vec_cst_ctor_to_array (op0, elts))
8540 return NULL_TREE;
8541
8542 switch (code)
8543 {
8544 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8545 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8546 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8547 default: gcc_unreachable ();
8548 }
8549
8550 for (i = 1; i < nelts; i++)
8551 {
8552 elts[0] = const_binop (subcode, elts[0], elts[i]);
8553 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8554 return NULL_TREE;
8555 elts[i] = build_zero_cst (TREE_TYPE (type));
8556 }
8557
8558 return build_vector (type, elts);
8559 }
8560
8561 default:
8562 return NULL_TREE;
8563 } /* switch (code) */
8564 }
8565
8566
8567 /* If the operation was a conversion do _not_ mark a resulting constant
8568 with TREE_OVERFLOW if the original constant was not. These conversions
8569 have implementation defined behavior and retaining the TREE_OVERFLOW
8570 flag here would confuse later passes such as VRP. */
8571 tree
8572 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8573 tree type, tree op0)
8574 {
8575 tree res = fold_unary_loc (loc, code, type, op0);
8576 if (res
8577 && TREE_CODE (res) == INTEGER_CST
8578 && TREE_CODE (op0) == INTEGER_CST
8579 && CONVERT_EXPR_CODE_P (code))
8580 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8581
8582 return res;
8583 }
8584
8585 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8586 operands OP0 and OP1. LOC is the location of the resulting expression.
8587 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8588 Return the folded expression if folding is successful. Otherwise,
8589 return NULL_TREE. */
8590 static tree
8591 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8592 tree arg0, tree arg1, tree op0, tree op1)
8593 {
8594 tree tem;
8595
8596 /* We only do these simplifications if we are optimizing. */
8597 if (!optimize)
8598 return NULL_TREE;
8599
8600 /* Check for things like (A || B) && (A || C). We can convert this
8601 to A || (B && C). Note that either operator can be any of the four
8602 truth and/or operations and the transformation will still be
8603 valid. Also note that we only care about order for the
8604 ANDIF and ORIF operators. If B contains side effects, this
8605 might change the truth-value of A. */
8606 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8607 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8608 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8609 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8610 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8611 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8612 {
8613 tree a00 = TREE_OPERAND (arg0, 0);
8614 tree a01 = TREE_OPERAND (arg0, 1);
8615 tree a10 = TREE_OPERAND (arg1, 0);
8616 tree a11 = TREE_OPERAND (arg1, 1);
8617 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8618 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8619 && (code == TRUTH_AND_EXPR
8620 || code == TRUTH_OR_EXPR));
8621
8622 if (operand_equal_p (a00, a10, 0))
8623 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8624 fold_build2_loc (loc, code, type, a01, a11));
8625 else if (commutative && operand_equal_p (a00, a11, 0))
8626 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8627 fold_build2_loc (loc, code, type, a01, a10));
8628 else if (commutative && operand_equal_p (a01, a10, 0))
8629 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8630 fold_build2_loc (loc, code, type, a00, a11));
8631
8632 /* This case if tricky because we must either have commutative
8633 operators or else A10 must not have side-effects. */
8634
8635 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8636 && operand_equal_p (a01, a11, 0))
8637 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8638 fold_build2_loc (loc, code, type, a00, a10),
8639 a01);
8640 }
8641
8642 /* See if we can build a range comparison. */
8643 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8644 return tem;
8645
8646 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8647 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8648 {
8649 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8650 if (tem)
8651 return fold_build2_loc (loc, code, type, tem, arg1);
8652 }
8653
8654 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8655 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8656 {
8657 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8658 if (tem)
8659 return fold_build2_loc (loc, code, type, arg0, tem);
8660 }
8661
8662 /* Check for the possibility of merging component references. If our
8663 lhs is another similar operation, try to merge its rhs with our
8664 rhs. Then try to merge our lhs and rhs. */
8665 if (TREE_CODE (arg0) == code
8666 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8667 TREE_OPERAND (arg0, 1), arg1)))
8668 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8669
8670 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8671 return tem;
8672
8673 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8674 && (code == TRUTH_AND_EXPR
8675 || code == TRUTH_ANDIF_EXPR
8676 || code == TRUTH_OR_EXPR
8677 || code == TRUTH_ORIF_EXPR))
8678 {
8679 enum tree_code ncode, icode;
8680
8681 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8682 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8683 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8684
8685 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8686 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8687 We don't want to pack more than two leafs to a non-IF AND/OR
8688 expression.
8689 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8690 equal to IF-CODE, then we don't want to add right-hand operand.
8691 If the inner right-hand side of left-hand operand has
8692 side-effects, or isn't simple, then we can't add to it,
8693 as otherwise we might destroy if-sequence. */
8694 if (TREE_CODE (arg0) == icode
8695 && simple_operand_p_2 (arg1)
8696 /* Needed for sequence points to handle trappings, and
8697 side-effects. */
8698 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8699 {
8700 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8701 arg1);
8702 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8703 tem);
8704 }
8705 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8706 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8707 else if (TREE_CODE (arg1) == icode
8708 && simple_operand_p_2 (arg0)
8709 /* Needed for sequence points to handle trappings, and
8710 side-effects. */
8711 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8712 {
8713 tem = fold_build2_loc (loc, ncode, type,
8714 arg0, TREE_OPERAND (arg1, 0));
8715 return fold_build2_loc (loc, icode, type, tem,
8716 TREE_OPERAND (arg1, 1));
8717 }
8718 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8719 into (A OR B).
8720 For sequence point consistancy, we need to check for trapping,
8721 and side-effects. */
8722 else if (code == icode && simple_operand_p_2 (arg0)
8723 && simple_operand_p_2 (arg1))
8724 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8725 }
8726
8727 return NULL_TREE;
8728 }
8729
8730 /* Fold a binary expression of code CODE and type TYPE with operands
8731 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8732 Return the folded expression if folding is successful. Otherwise,
8733 return NULL_TREE. */
8734
8735 static tree
8736 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8737 {
8738 enum tree_code compl_code;
8739
8740 if (code == MIN_EXPR)
8741 compl_code = MAX_EXPR;
8742 else if (code == MAX_EXPR)
8743 compl_code = MIN_EXPR;
8744 else
8745 gcc_unreachable ();
8746
8747 /* MIN (MAX (a, b), b) == b. */
8748 if (TREE_CODE (op0) == compl_code
8749 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8750 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8751
8752 /* MIN (MAX (b, a), b) == b. */
8753 if (TREE_CODE (op0) == compl_code
8754 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8755 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8756 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8757
8758 /* MIN (a, MAX (a, b)) == a. */
8759 if (TREE_CODE (op1) == compl_code
8760 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8761 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8762 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8763
8764 /* MIN (a, MAX (b, a)) == a. */
8765 if (TREE_CODE (op1) == compl_code
8766 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8767 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8768 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8769
8770 return NULL_TREE;
8771 }
8772
8773 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8774 by changing CODE to reduce the magnitude of constants involved in
8775 ARG0 of the comparison.
8776 Returns a canonicalized comparison tree if a simplification was
8777 possible, otherwise returns NULL_TREE.
8778 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8779 valid if signed overflow is undefined. */
8780
8781 static tree
8782 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8783 tree arg0, tree arg1,
8784 bool *strict_overflow_p)
8785 {
8786 enum tree_code code0 = TREE_CODE (arg0);
8787 tree t, cst0 = NULL_TREE;
8788 int sgn0;
8789 bool swap = false;
8790
8791 /* Match A +- CST code arg1 and CST code arg1. We can change the
8792 first form only if overflow is undefined. */
8793 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8794 /* In principle pointers also have undefined overflow behavior,
8795 but that causes problems elsewhere. */
8796 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8797 && (code0 == MINUS_EXPR
8798 || code0 == PLUS_EXPR)
8799 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8800 || code0 == INTEGER_CST))
8801 return NULL_TREE;
8802
8803 /* Identify the constant in arg0 and its sign. */
8804 if (code0 == INTEGER_CST)
8805 cst0 = arg0;
8806 else
8807 cst0 = TREE_OPERAND (arg0, 1);
8808 sgn0 = tree_int_cst_sgn (cst0);
8809
8810 /* Overflowed constants and zero will cause problems. */
8811 if (integer_zerop (cst0)
8812 || TREE_OVERFLOW (cst0))
8813 return NULL_TREE;
8814
8815 /* See if we can reduce the magnitude of the constant in
8816 arg0 by changing the comparison code. */
8817 if (code0 == INTEGER_CST)
8818 {
8819 /* CST <= arg1 -> CST-1 < arg1. */
8820 if (code == LE_EXPR && sgn0 == 1)
8821 code = LT_EXPR;
8822 /* -CST < arg1 -> -CST-1 <= arg1. */
8823 else if (code == LT_EXPR && sgn0 == -1)
8824 code = LE_EXPR;
8825 /* CST > arg1 -> CST-1 >= arg1. */
8826 else if (code == GT_EXPR && sgn0 == 1)
8827 code = GE_EXPR;
8828 /* -CST >= arg1 -> -CST-1 > arg1. */
8829 else if (code == GE_EXPR && sgn0 == -1)
8830 code = GT_EXPR;
8831 else
8832 return NULL_TREE;
8833 /* arg1 code' CST' might be more canonical. */
8834 swap = true;
8835 }
8836 else
8837 {
8838 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8839 if (code == LT_EXPR
8840 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8841 code = LE_EXPR;
8842 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8843 else if (code == GT_EXPR
8844 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8845 code = GE_EXPR;
8846 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8847 else if (code == LE_EXPR
8848 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8849 code = LT_EXPR;
8850 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8851 else if (code == GE_EXPR
8852 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8853 code = GT_EXPR;
8854 else
8855 return NULL_TREE;
8856 *strict_overflow_p = true;
8857 }
8858
8859 /* Now build the constant reduced in magnitude. But not if that
8860 would produce one outside of its types range. */
8861 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8862 && ((sgn0 == 1
8863 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8864 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8865 || (sgn0 == -1
8866 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8867 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8868 /* We cannot swap the comparison here as that would cause us to
8869 endlessly recurse. */
8870 return NULL_TREE;
8871
8872 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8873 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8874 if (code0 != INTEGER_CST)
8875 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8876 t = fold_convert (TREE_TYPE (arg1), t);
8877
8878 /* If swapping might yield to a more canonical form, do so. */
8879 if (swap)
8880 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8881 else
8882 return fold_build2_loc (loc, code, type, t, arg1);
8883 }
8884
8885 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8886 overflow further. Try to decrease the magnitude of constants involved
8887 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8888 and put sole constants at the second argument position.
8889 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8890
8891 static tree
8892 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8893 tree arg0, tree arg1)
8894 {
8895 tree t;
8896 bool strict_overflow_p;
8897 const char * const warnmsg = G_("assuming signed overflow does not occur "
8898 "when reducing constant in comparison");
8899
8900 /* Try canonicalization by simplifying arg0. */
8901 strict_overflow_p = false;
8902 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8903 &strict_overflow_p);
8904 if (t)
8905 {
8906 if (strict_overflow_p)
8907 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8908 return t;
8909 }
8910
8911 /* Try canonicalization by simplifying arg1 using the swapped
8912 comparison. */
8913 code = swap_tree_comparison (code);
8914 strict_overflow_p = false;
8915 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8916 &strict_overflow_p);
8917 if (t && strict_overflow_p)
8918 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8919 return t;
8920 }
8921
8922 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8923 space. This is used to avoid issuing overflow warnings for
8924 expressions like &p->x which can not wrap. */
8925
8926 static bool
8927 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8928 {
8929 double_int di_offset, total;
8930
8931 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8932 return true;
8933
8934 if (bitpos < 0)
8935 return true;
8936
8937 if (offset == NULL_TREE)
8938 di_offset = double_int_zero;
8939 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8940 return true;
8941 else
8942 di_offset = TREE_INT_CST (offset);
8943
8944 bool overflow;
8945 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8946 total = di_offset.add_with_sign (units, true, &overflow);
8947 if (overflow)
8948 return true;
8949
8950 if (total.high != 0)
8951 return true;
8952
8953 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8954 if (size <= 0)
8955 return true;
8956
8957 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8958 array. */
8959 if (TREE_CODE (base) == ADDR_EXPR)
8960 {
8961 HOST_WIDE_INT base_size;
8962
8963 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8964 if (base_size > 0 && size < base_size)
8965 size = base_size;
8966 }
8967
8968 return total.low > (unsigned HOST_WIDE_INT) size;
8969 }
8970
8971 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8972 kind INTEGER_CST. This makes sure to properly sign-extend the
8973 constant. */
8974
8975 static HOST_WIDE_INT
8976 size_low_cst (const_tree t)
8977 {
8978 double_int d = tree_to_double_int (t);
8979 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
8980 }
8981
8982 /* Subroutine of fold_binary. This routine performs all of the
8983 transformations that are common to the equality/inequality
8984 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8985 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8986 fold_binary should call fold_binary. Fold a comparison with
8987 tree code CODE and type TYPE with operands OP0 and OP1. Return
8988 the folded comparison or NULL_TREE. */
8989
8990 static tree
8991 fold_comparison (location_t loc, enum tree_code code, tree type,
8992 tree op0, tree op1)
8993 {
8994 tree arg0, arg1, tem;
8995
8996 arg0 = op0;
8997 arg1 = op1;
8998
8999 STRIP_SIGN_NOPS (arg0);
9000 STRIP_SIGN_NOPS (arg1);
9001
9002 tem = fold_relational_const (code, type, arg0, arg1);
9003 if (tem != NULL_TREE)
9004 return tem;
9005
9006 /* If one arg is a real or integer constant, put it last. */
9007 if (tree_swap_operands_p (arg0, arg1, true))
9008 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9009
9010 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9011 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9012 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9013 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9014 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9015 && (TREE_CODE (arg1) == INTEGER_CST
9016 && !TREE_OVERFLOW (arg1)))
9017 {
9018 tree const1 = TREE_OPERAND (arg0, 1);
9019 tree const2 = arg1;
9020 tree variable = TREE_OPERAND (arg0, 0);
9021 tree lhs;
9022 int lhs_add;
9023 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9024
9025 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9026 TREE_TYPE (arg1), const2, const1);
9027
9028 /* If the constant operation overflowed this can be
9029 simplified as a comparison against INT_MAX/INT_MIN. */
9030 if (TREE_CODE (lhs) == INTEGER_CST
9031 && TREE_OVERFLOW (lhs))
9032 {
9033 int const1_sgn = tree_int_cst_sgn (const1);
9034 enum tree_code code2 = code;
9035
9036 /* Get the sign of the constant on the lhs if the
9037 operation were VARIABLE + CONST1. */
9038 if (TREE_CODE (arg0) == MINUS_EXPR)
9039 const1_sgn = -const1_sgn;
9040
9041 /* The sign of the constant determines if we overflowed
9042 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9043 Canonicalize to the INT_MIN overflow by swapping the comparison
9044 if necessary. */
9045 if (const1_sgn == -1)
9046 code2 = swap_tree_comparison (code);
9047
9048 /* We now can look at the canonicalized case
9049 VARIABLE + 1 CODE2 INT_MIN
9050 and decide on the result. */
9051 if (code2 == LT_EXPR
9052 || code2 == LE_EXPR
9053 || code2 == EQ_EXPR)
9054 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9055 else if (code2 == NE_EXPR
9056 || code2 == GE_EXPR
9057 || code2 == GT_EXPR)
9058 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9059 }
9060
9061 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9062 && (TREE_CODE (lhs) != INTEGER_CST
9063 || !TREE_OVERFLOW (lhs)))
9064 {
9065 if (code != EQ_EXPR && code != NE_EXPR)
9066 fold_overflow_warning ("assuming signed overflow does not occur "
9067 "when changing X +- C1 cmp C2 to "
9068 "X cmp C1 +- C2",
9069 WARN_STRICT_OVERFLOW_COMPARISON);
9070 return fold_build2_loc (loc, code, type, variable, lhs);
9071 }
9072 }
9073
9074 /* For comparisons of pointers we can decompose it to a compile time
9075 comparison of the base objects and the offsets into the object.
9076 This requires at least one operand being an ADDR_EXPR or a
9077 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9078 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9079 && (TREE_CODE (arg0) == ADDR_EXPR
9080 || TREE_CODE (arg1) == ADDR_EXPR
9081 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9082 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9083 {
9084 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9085 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9086 enum machine_mode mode;
9087 int volatilep, unsignedp;
9088 bool indirect_base0 = false, indirect_base1 = false;
9089
9090 /* Get base and offset for the access. Strip ADDR_EXPR for
9091 get_inner_reference, but put it back by stripping INDIRECT_REF
9092 off the base object if possible. indirect_baseN will be true
9093 if baseN is not an address but refers to the object itself. */
9094 base0 = arg0;
9095 if (TREE_CODE (arg0) == ADDR_EXPR)
9096 {
9097 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9098 &bitsize, &bitpos0, &offset0, &mode,
9099 &unsignedp, &volatilep, false);
9100 if (TREE_CODE (base0) == INDIRECT_REF)
9101 base0 = TREE_OPERAND (base0, 0);
9102 else
9103 indirect_base0 = true;
9104 }
9105 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9106 {
9107 base0 = TREE_OPERAND (arg0, 0);
9108 STRIP_SIGN_NOPS (base0);
9109 if (TREE_CODE (base0) == ADDR_EXPR)
9110 {
9111 base0 = TREE_OPERAND (base0, 0);
9112 indirect_base0 = true;
9113 }
9114 offset0 = TREE_OPERAND (arg0, 1);
9115 if (tree_fits_shwi_p (offset0))
9116 {
9117 HOST_WIDE_INT off = size_low_cst (offset0);
9118 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9119 * BITS_PER_UNIT)
9120 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9121 {
9122 bitpos0 = off * BITS_PER_UNIT;
9123 offset0 = NULL_TREE;
9124 }
9125 }
9126 }
9127
9128 base1 = arg1;
9129 if (TREE_CODE (arg1) == ADDR_EXPR)
9130 {
9131 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9132 &bitsize, &bitpos1, &offset1, &mode,
9133 &unsignedp, &volatilep, false);
9134 if (TREE_CODE (base1) == INDIRECT_REF)
9135 base1 = TREE_OPERAND (base1, 0);
9136 else
9137 indirect_base1 = true;
9138 }
9139 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9140 {
9141 base1 = TREE_OPERAND (arg1, 0);
9142 STRIP_SIGN_NOPS (base1);
9143 if (TREE_CODE (base1) == ADDR_EXPR)
9144 {
9145 base1 = TREE_OPERAND (base1, 0);
9146 indirect_base1 = true;
9147 }
9148 offset1 = TREE_OPERAND (arg1, 1);
9149 if (tree_fits_shwi_p (offset1))
9150 {
9151 HOST_WIDE_INT off = size_low_cst (offset1);
9152 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9153 * BITS_PER_UNIT)
9154 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9155 {
9156 bitpos1 = off * BITS_PER_UNIT;
9157 offset1 = NULL_TREE;
9158 }
9159 }
9160 }
9161
9162 /* A local variable can never be pointed to by
9163 the default SSA name of an incoming parameter. */
9164 if ((TREE_CODE (arg0) == ADDR_EXPR
9165 && indirect_base0
9166 && TREE_CODE (base0) == VAR_DECL
9167 && auto_var_in_fn_p (base0, current_function_decl)
9168 && !indirect_base1
9169 && TREE_CODE (base1) == SSA_NAME
9170 && SSA_NAME_IS_DEFAULT_DEF (base1)
9171 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9172 || (TREE_CODE (arg1) == ADDR_EXPR
9173 && indirect_base1
9174 && TREE_CODE (base1) == VAR_DECL
9175 && auto_var_in_fn_p (base1, current_function_decl)
9176 && !indirect_base0
9177 && TREE_CODE (base0) == SSA_NAME
9178 && SSA_NAME_IS_DEFAULT_DEF (base0)
9179 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9180 {
9181 if (code == NE_EXPR)
9182 return constant_boolean_node (1, type);
9183 else if (code == EQ_EXPR)
9184 return constant_boolean_node (0, type);
9185 }
9186 /* If we have equivalent bases we might be able to simplify. */
9187 else if (indirect_base0 == indirect_base1
9188 && operand_equal_p (base0, base1, 0))
9189 {
9190 /* We can fold this expression to a constant if the non-constant
9191 offset parts are equal. */
9192 if ((offset0 == offset1
9193 || (offset0 && offset1
9194 && operand_equal_p (offset0, offset1, 0)))
9195 && (code == EQ_EXPR
9196 || code == NE_EXPR
9197 || (indirect_base0 && DECL_P (base0))
9198 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9199
9200 {
9201 if (code != EQ_EXPR
9202 && code != NE_EXPR
9203 && bitpos0 != bitpos1
9204 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9205 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9206 fold_overflow_warning (("assuming pointer wraparound does not "
9207 "occur when comparing P +- C1 with "
9208 "P +- C2"),
9209 WARN_STRICT_OVERFLOW_CONDITIONAL);
9210
9211 switch (code)
9212 {
9213 case EQ_EXPR:
9214 return constant_boolean_node (bitpos0 == bitpos1, type);
9215 case NE_EXPR:
9216 return constant_boolean_node (bitpos0 != bitpos1, type);
9217 case LT_EXPR:
9218 return constant_boolean_node (bitpos0 < bitpos1, type);
9219 case LE_EXPR:
9220 return constant_boolean_node (bitpos0 <= bitpos1, type);
9221 case GE_EXPR:
9222 return constant_boolean_node (bitpos0 >= bitpos1, type);
9223 case GT_EXPR:
9224 return constant_boolean_node (bitpos0 > bitpos1, type);
9225 default:;
9226 }
9227 }
9228 /* We can simplify the comparison to a comparison of the variable
9229 offset parts if the constant offset parts are equal.
9230 Be careful to use signed sizetype here because otherwise we
9231 mess with array offsets in the wrong way. This is possible
9232 because pointer arithmetic is restricted to retain within an
9233 object and overflow on pointer differences is undefined as of
9234 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9235 else if (bitpos0 == bitpos1
9236 && ((code == EQ_EXPR || code == NE_EXPR)
9237 || (indirect_base0 && DECL_P (base0))
9238 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9239 {
9240 /* By converting to signed sizetype we cover middle-end pointer
9241 arithmetic which operates on unsigned pointer types of size
9242 type size and ARRAY_REF offsets which are properly sign or
9243 zero extended from their type in case it is narrower than
9244 sizetype. */
9245 if (offset0 == NULL_TREE)
9246 offset0 = build_int_cst (ssizetype, 0);
9247 else
9248 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9249 if (offset1 == NULL_TREE)
9250 offset1 = build_int_cst (ssizetype, 0);
9251 else
9252 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9253
9254 if (code != EQ_EXPR
9255 && code != NE_EXPR
9256 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9257 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9258 fold_overflow_warning (("assuming pointer wraparound does not "
9259 "occur when comparing P +- C1 with "
9260 "P +- C2"),
9261 WARN_STRICT_OVERFLOW_COMPARISON);
9262
9263 return fold_build2_loc (loc, code, type, offset0, offset1);
9264 }
9265 }
9266 /* For non-equal bases we can simplify if they are addresses
9267 of local binding decls or constants. */
9268 else if (indirect_base0 && indirect_base1
9269 /* We know that !operand_equal_p (base0, base1, 0)
9270 because the if condition was false. But make
9271 sure two decls are not the same. */
9272 && base0 != base1
9273 && TREE_CODE (arg0) == ADDR_EXPR
9274 && TREE_CODE (arg1) == ADDR_EXPR
9275 && (((TREE_CODE (base0) == VAR_DECL
9276 || TREE_CODE (base0) == PARM_DECL)
9277 && (targetm.binds_local_p (base0)
9278 || CONSTANT_CLASS_P (base1)))
9279 || CONSTANT_CLASS_P (base0))
9280 && (((TREE_CODE (base1) == VAR_DECL
9281 || TREE_CODE (base1) == PARM_DECL)
9282 && (targetm.binds_local_p (base1)
9283 || CONSTANT_CLASS_P (base0)))
9284 || CONSTANT_CLASS_P (base1)))
9285 {
9286 if (code == EQ_EXPR)
9287 return omit_two_operands_loc (loc, type, boolean_false_node,
9288 arg0, arg1);
9289 else if (code == NE_EXPR)
9290 return omit_two_operands_loc (loc, type, boolean_true_node,
9291 arg0, arg1);
9292 }
9293 /* For equal offsets we can simplify to a comparison of the
9294 base addresses. */
9295 else if (bitpos0 == bitpos1
9296 && (indirect_base0
9297 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9298 && (indirect_base1
9299 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9300 && ((offset0 == offset1)
9301 || (offset0 && offset1
9302 && operand_equal_p (offset0, offset1, 0))))
9303 {
9304 if (indirect_base0)
9305 base0 = build_fold_addr_expr_loc (loc, base0);
9306 if (indirect_base1)
9307 base1 = build_fold_addr_expr_loc (loc, base1);
9308 return fold_build2_loc (loc, code, type, base0, base1);
9309 }
9310 }
9311
9312 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9313 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9314 the resulting offset is smaller in absolute value than the
9315 original one. */
9316 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9317 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9318 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9319 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9320 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9321 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9322 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9323 {
9324 tree const1 = TREE_OPERAND (arg0, 1);
9325 tree const2 = TREE_OPERAND (arg1, 1);
9326 tree variable1 = TREE_OPERAND (arg0, 0);
9327 tree variable2 = TREE_OPERAND (arg1, 0);
9328 tree cst;
9329 const char * const warnmsg = G_("assuming signed overflow does not "
9330 "occur when combining constants around "
9331 "a comparison");
9332
9333 /* Put the constant on the side where it doesn't overflow and is
9334 of lower absolute value than before. */
9335 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9336 ? MINUS_EXPR : PLUS_EXPR,
9337 const2, const1);
9338 if (!TREE_OVERFLOW (cst)
9339 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9340 {
9341 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9342 return fold_build2_loc (loc, code, type,
9343 variable1,
9344 fold_build2_loc (loc,
9345 TREE_CODE (arg1), TREE_TYPE (arg1),
9346 variable2, cst));
9347 }
9348
9349 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9350 ? MINUS_EXPR : PLUS_EXPR,
9351 const1, const2);
9352 if (!TREE_OVERFLOW (cst)
9353 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9354 {
9355 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9356 return fold_build2_loc (loc, code, type,
9357 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9358 variable1, cst),
9359 variable2);
9360 }
9361 }
9362
9363 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9364 signed arithmetic case. That form is created by the compiler
9365 often enough for folding it to be of value. One example is in
9366 computing loop trip counts after Operator Strength Reduction. */
9367 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9368 && TREE_CODE (arg0) == MULT_EXPR
9369 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9370 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9371 && integer_zerop (arg1))
9372 {
9373 tree const1 = TREE_OPERAND (arg0, 1);
9374 tree const2 = arg1; /* zero */
9375 tree variable1 = TREE_OPERAND (arg0, 0);
9376 enum tree_code cmp_code = code;
9377
9378 /* Handle unfolded multiplication by zero. */
9379 if (integer_zerop (const1))
9380 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9381
9382 fold_overflow_warning (("assuming signed overflow does not occur when "
9383 "eliminating multiplication in comparison "
9384 "with zero"),
9385 WARN_STRICT_OVERFLOW_COMPARISON);
9386
9387 /* If const1 is negative we swap the sense of the comparison. */
9388 if (tree_int_cst_sgn (const1) < 0)
9389 cmp_code = swap_tree_comparison (cmp_code);
9390
9391 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9392 }
9393
9394 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9395 if (tem)
9396 return tem;
9397
9398 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9399 {
9400 tree targ0 = strip_float_extensions (arg0);
9401 tree targ1 = strip_float_extensions (arg1);
9402 tree newtype = TREE_TYPE (targ0);
9403
9404 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9405 newtype = TREE_TYPE (targ1);
9406
9407 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9408 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9409 return fold_build2_loc (loc, code, type,
9410 fold_convert_loc (loc, newtype, targ0),
9411 fold_convert_loc (loc, newtype, targ1));
9412
9413 /* (-a) CMP (-b) -> b CMP a */
9414 if (TREE_CODE (arg0) == NEGATE_EXPR
9415 && TREE_CODE (arg1) == NEGATE_EXPR)
9416 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9417 TREE_OPERAND (arg0, 0));
9418
9419 if (TREE_CODE (arg1) == REAL_CST)
9420 {
9421 REAL_VALUE_TYPE cst;
9422 cst = TREE_REAL_CST (arg1);
9423
9424 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9425 if (TREE_CODE (arg0) == NEGATE_EXPR)
9426 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9427 TREE_OPERAND (arg0, 0),
9428 build_real (TREE_TYPE (arg1),
9429 real_value_negate (&cst)));
9430
9431 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9432 /* a CMP (-0) -> a CMP 0 */
9433 if (REAL_VALUE_MINUS_ZERO (cst))
9434 return fold_build2_loc (loc, code, type, arg0,
9435 build_real (TREE_TYPE (arg1), dconst0));
9436
9437 /* x != NaN is always true, other ops are always false. */
9438 if (REAL_VALUE_ISNAN (cst)
9439 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9440 {
9441 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9442 return omit_one_operand_loc (loc, type, tem, arg0);
9443 }
9444
9445 /* Fold comparisons against infinity. */
9446 if (REAL_VALUE_ISINF (cst)
9447 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9448 {
9449 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9450 if (tem != NULL_TREE)
9451 return tem;
9452 }
9453 }
9454
9455 /* If this is a comparison of a real constant with a PLUS_EXPR
9456 or a MINUS_EXPR of a real constant, we can convert it into a
9457 comparison with a revised real constant as long as no overflow
9458 occurs when unsafe_math_optimizations are enabled. */
9459 if (flag_unsafe_math_optimizations
9460 && TREE_CODE (arg1) == REAL_CST
9461 && (TREE_CODE (arg0) == PLUS_EXPR
9462 || TREE_CODE (arg0) == MINUS_EXPR)
9463 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9464 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9465 ? MINUS_EXPR : PLUS_EXPR,
9466 arg1, TREE_OPERAND (arg0, 1)))
9467 && !TREE_OVERFLOW (tem))
9468 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9469
9470 /* Likewise, we can simplify a comparison of a real constant with
9471 a MINUS_EXPR whose first operand is also a real constant, i.e.
9472 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9473 floating-point types only if -fassociative-math is set. */
9474 if (flag_associative_math
9475 && TREE_CODE (arg1) == REAL_CST
9476 && TREE_CODE (arg0) == MINUS_EXPR
9477 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9478 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9479 arg1))
9480 && !TREE_OVERFLOW (tem))
9481 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9482 TREE_OPERAND (arg0, 1), tem);
9483
9484 /* Fold comparisons against built-in math functions. */
9485 if (TREE_CODE (arg1) == REAL_CST
9486 && flag_unsafe_math_optimizations
9487 && ! flag_errno_math)
9488 {
9489 enum built_in_function fcode = builtin_mathfn_code (arg0);
9490
9491 if (fcode != END_BUILTINS)
9492 {
9493 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9494 if (tem != NULL_TREE)
9495 return tem;
9496 }
9497 }
9498 }
9499
9500 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9501 && CONVERT_EXPR_P (arg0))
9502 {
9503 /* If we are widening one operand of an integer comparison,
9504 see if the other operand is similarly being widened. Perhaps we
9505 can do the comparison in the narrower type. */
9506 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9507 if (tem)
9508 return tem;
9509
9510 /* Or if we are changing signedness. */
9511 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9512 if (tem)
9513 return tem;
9514 }
9515
9516 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9517 constant, we can simplify it. */
9518 if (TREE_CODE (arg1) == INTEGER_CST
9519 && (TREE_CODE (arg0) == MIN_EXPR
9520 || TREE_CODE (arg0) == MAX_EXPR)
9521 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9522 {
9523 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9524 if (tem)
9525 return tem;
9526 }
9527
9528 /* Simplify comparison of something with itself. (For IEEE
9529 floating-point, we can only do some of these simplifications.) */
9530 if (operand_equal_p (arg0, arg1, 0))
9531 {
9532 switch (code)
9533 {
9534 case EQ_EXPR:
9535 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9536 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9537 return constant_boolean_node (1, type);
9538 break;
9539
9540 case GE_EXPR:
9541 case LE_EXPR:
9542 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9543 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9544 return constant_boolean_node (1, type);
9545 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9546
9547 case NE_EXPR:
9548 /* For NE, we can only do this simplification if integer
9549 or we don't honor IEEE floating point NaNs. */
9550 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9551 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9552 break;
9553 /* ... fall through ... */
9554 case GT_EXPR:
9555 case LT_EXPR:
9556 return constant_boolean_node (0, type);
9557 default:
9558 gcc_unreachable ();
9559 }
9560 }
9561
9562 /* If we are comparing an expression that just has comparisons
9563 of two integer values, arithmetic expressions of those comparisons,
9564 and constants, we can simplify it. There are only three cases
9565 to check: the two values can either be equal, the first can be
9566 greater, or the second can be greater. Fold the expression for
9567 those three values. Since each value must be 0 or 1, we have
9568 eight possibilities, each of which corresponds to the constant 0
9569 or 1 or one of the six possible comparisons.
9570
9571 This handles common cases like (a > b) == 0 but also handles
9572 expressions like ((x > y) - (y > x)) > 0, which supposedly
9573 occur in macroized code. */
9574
9575 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9576 {
9577 tree cval1 = 0, cval2 = 0;
9578 int save_p = 0;
9579
9580 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9581 /* Don't handle degenerate cases here; they should already
9582 have been handled anyway. */
9583 && cval1 != 0 && cval2 != 0
9584 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9585 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9586 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9587 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9588 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9589 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9590 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9591 {
9592 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9593 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9594
9595 /* We can't just pass T to eval_subst in case cval1 or cval2
9596 was the same as ARG1. */
9597
9598 tree high_result
9599 = fold_build2_loc (loc, code, type,
9600 eval_subst (loc, arg0, cval1, maxval,
9601 cval2, minval),
9602 arg1);
9603 tree equal_result
9604 = fold_build2_loc (loc, code, type,
9605 eval_subst (loc, arg0, cval1, maxval,
9606 cval2, maxval),
9607 arg1);
9608 tree low_result
9609 = fold_build2_loc (loc, code, type,
9610 eval_subst (loc, arg0, cval1, minval,
9611 cval2, maxval),
9612 arg1);
9613
9614 /* All three of these results should be 0 or 1. Confirm they are.
9615 Then use those values to select the proper code to use. */
9616
9617 if (TREE_CODE (high_result) == INTEGER_CST
9618 && TREE_CODE (equal_result) == INTEGER_CST
9619 && TREE_CODE (low_result) == INTEGER_CST)
9620 {
9621 /* Make a 3-bit mask with the high-order bit being the
9622 value for `>', the next for '=', and the low for '<'. */
9623 switch ((integer_onep (high_result) * 4)
9624 + (integer_onep (equal_result) * 2)
9625 + integer_onep (low_result))
9626 {
9627 case 0:
9628 /* Always false. */
9629 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9630 case 1:
9631 code = LT_EXPR;
9632 break;
9633 case 2:
9634 code = EQ_EXPR;
9635 break;
9636 case 3:
9637 code = LE_EXPR;
9638 break;
9639 case 4:
9640 code = GT_EXPR;
9641 break;
9642 case 5:
9643 code = NE_EXPR;
9644 break;
9645 case 6:
9646 code = GE_EXPR;
9647 break;
9648 case 7:
9649 /* Always true. */
9650 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9651 }
9652
9653 if (save_p)
9654 {
9655 tem = save_expr (build2 (code, type, cval1, cval2));
9656 SET_EXPR_LOCATION (tem, loc);
9657 return tem;
9658 }
9659 return fold_build2_loc (loc, code, type, cval1, cval2);
9660 }
9661 }
9662 }
9663
9664 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9665 into a single range test. */
9666 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9667 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9668 && TREE_CODE (arg1) == INTEGER_CST
9669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9670 && !integer_zerop (TREE_OPERAND (arg0, 1))
9671 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9672 && !TREE_OVERFLOW (arg1))
9673 {
9674 tem = fold_div_compare (loc, code, type, arg0, arg1);
9675 if (tem != NULL_TREE)
9676 return tem;
9677 }
9678
9679 /* Fold ~X op ~Y as Y op X. */
9680 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9681 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9682 {
9683 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9684 return fold_build2_loc (loc, code, type,
9685 fold_convert_loc (loc, cmp_type,
9686 TREE_OPERAND (arg1, 0)),
9687 TREE_OPERAND (arg0, 0));
9688 }
9689
9690 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9691 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9692 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9693 {
9694 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9695 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9696 TREE_OPERAND (arg0, 0),
9697 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9698 fold_convert_loc (loc, cmp_type, arg1)));
9699 }
9700
9701 return NULL_TREE;
9702 }
9703
9704
9705 /* Subroutine of fold_binary. Optimize complex multiplications of the
9706 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9707 argument EXPR represents the expression "z" of type TYPE. */
9708
9709 static tree
9710 fold_mult_zconjz (location_t loc, tree type, tree expr)
9711 {
9712 tree itype = TREE_TYPE (type);
9713 tree rpart, ipart, tem;
9714
9715 if (TREE_CODE (expr) == COMPLEX_EXPR)
9716 {
9717 rpart = TREE_OPERAND (expr, 0);
9718 ipart = TREE_OPERAND (expr, 1);
9719 }
9720 else if (TREE_CODE (expr) == COMPLEX_CST)
9721 {
9722 rpart = TREE_REALPART (expr);
9723 ipart = TREE_IMAGPART (expr);
9724 }
9725 else
9726 {
9727 expr = save_expr (expr);
9728 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9729 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9730 }
9731
9732 rpart = save_expr (rpart);
9733 ipart = save_expr (ipart);
9734 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9735 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9736 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9737 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9738 build_zero_cst (itype));
9739 }
9740
9741
9742 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9743 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9744 guarantees that P and N have the same least significant log2(M) bits.
9745 N is not otherwise constrained. In particular, N is not normalized to
9746 0 <= N < M as is common. In general, the precise value of P is unknown.
9747 M is chosen as large as possible such that constant N can be determined.
9748
9749 Returns M and sets *RESIDUE to N.
9750
9751 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9752 account. This is not always possible due to PR 35705.
9753 */
9754
9755 static unsigned HOST_WIDE_INT
9756 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9757 bool allow_func_align)
9758 {
9759 enum tree_code code;
9760
9761 *residue = 0;
9762
9763 code = TREE_CODE (expr);
9764 if (code == ADDR_EXPR)
9765 {
9766 unsigned int bitalign;
9767 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9768 *residue /= BITS_PER_UNIT;
9769 return bitalign / BITS_PER_UNIT;
9770 }
9771 else if (code == POINTER_PLUS_EXPR)
9772 {
9773 tree op0, op1;
9774 unsigned HOST_WIDE_INT modulus;
9775 enum tree_code inner_code;
9776
9777 op0 = TREE_OPERAND (expr, 0);
9778 STRIP_NOPS (op0);
9779 modulus = get_pointer_modulus_and_residue (op0, residue,
9780 allow_func_align);
9781
9782 op1 = TREE_OPERAND (expr, 1);
9783 STRIP_NOPS (op1);
9784 inner_code = TREE_CODE (op1);
9785 if (inner_code == INTEGER_CST)
9786 {
9787 *residue += TREE_INT_CST_LOW (op1);
9788 return modulus;
9789 }
9790 else if (inner_code == MULT_EXPR)
9791 {
9792 op1 = TREE_OPERAND (op1, 1);
9793 if (TREE_CODE (op1) == INTEGER_CST)
9794 {
9795 unsigned HOST_WIDE_INT align;
9796
9797 /* Compute the greatest power-of-2 divisor of op1. */
9798 align = TREE_INT_CST_LOW (op1);
9799 align &= -align;
9800
9801 /* If align is non-zero and less than *modulus, replace
9802 *modulus with align., If align is 0, then either op1 is 0
9803 or the greatest power-of-2 divisor of op1 doesn't fit in an
9804 unsigned HOST_WIDE_INT. In either case, no additional
9805 constraint is imposed. */
9806 if (align)
9807 modulus = MIN (modulus, align);
9808
9809 return modulus;
9810 }
9811 }
9812 }
9813
9814 /* If we get here, we were unable to determine anything useful about the
9815 expression. */
9816 return 1;
9817 }
9818
9819 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9820 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9821
9822 static bool
9823 vec_cst_ctor_to_array (tree arg, tree *elts)
9824 {
9825 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9826
9827 if (TREE_CODE (arg) == VECTOR_CST)
9828 {
9829 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9830 elts[i] = VECTOR_CST_ELT (arg, i);
9831 }
9832 else if (TREE_CODE (arg) == CONSTRUCTOR)
9833 {
9834 constructor_elt *elt;
9835
9836 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9837 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9838 return false;
9839 else
9840 elts[i] = elt->value;
9841 }
9842 else
9843 return false;
9844 for (; i < nelts; i++)
9845 elts[i]
9846 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9847 return true;
9848 }
9849
9850 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9851 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9852 NULL_TREE otherwise. */
9853
9854 static tree
9855 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9856 {
9857 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9858 tree *elts;
9859 bool need_ctor = false;
9860
9861 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9862 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9863 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9864 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9865 return NULL_TREE;
9866
9867 elts = XALLOCAVEC (tree, nelts * 3);
9868 if (!vec_cst_ctor_to_array (arg0, elts)
9869 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9870 return NULL_TREE;
9871
9872 for (i = 0; i < nelts; i++)
9873 {
9874 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9875 need_ctor = true;
9876 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9877 }
9878
9879 if (need_ctor)
9880 {
9881 vec<constructor_elt, va_gc> *v;
9882 vec_alloc (v, nelts);
9883 for (i = 0; i < nelts; i++)
9884 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9885 return build_constructor (type, v);
9886 }
9887 else
9888 return build_vector (type, &elts[2 * nelts]);
9889 }
9890
9891 /* Try to fold a pointer difference of type TYPE two address expressions of
9892 array references AREF0 and AREF1 using location LOC. Return a
9893 simplified expression for the difference or NULL_TREE. */
9894
9895 static tree
9896 fold_addr_of_array_ref_difference (location_t loc, tree type,
9897 tree aref0, tree aref1)
9898 {
9899 tree base0 = TREE_OPERAND (aref0, 0);
9900 tree base1 = TREE_OPERAND (aref1, 0);
9901 tree base_offset = build_int_cst (type, 0);
9902
9903 /* If the bases are array references as well, recurse. If the bases
9904 are pointer indirections compute the difference of the pointers.
9905 If the bases are equal, we are set. */
9906 if ((TREE_CODE (base0) == ARRAY_REF
9907 && TREE_CODE (base1) == ARRAY_REF
9908 && (base_offset
9909 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9910 || (INDIRECT_REF_P (base0)
9911 && INDIRECT_REF_P (base1)
9912 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9913 TREE_OPERAND (base0, 0),
9914 TREE_OPERAND (base1, 0))))
9915 || operand_equal_p (base0, base1, 0))
9916 {
9917 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9918 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9919 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9920 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9921 return fold_build2_loc (loc, PLUS_EXPR, type,
9922 base_offset,
9923 fold_build2_loc (loc, MULT_EXPR, type,
9924 diff, esz));
9925 }
9926 return NULL_TREE;
9927 }
9928
9929 /* If the real or vector real constant CST of type TYPE has an exact
9930 inverse, return it, else return NULL. */
9931
9932 static tree
9933 exact_inverse (tree type, tree cst)
9934 {
9935 REAL_VALUE_TYPE r;
9936 tree unit_type, *elts;
9937 enum machine_mode mode;
9938 unsigned vec_nelts, i;
9939
9940 switch (TREE_CODE (cst))
9941 {
9942 case REAL_CST:
9943 r = TREE_REAL_CST (cst);
9944
9945 if (exact_real_inverse (TYPE_MODE (type), &r))
9946 return build_real (type, r);
9947
9948 return NULL_TREE;
9949
9950 case VECTOR_CST:
9951 vec_nelts = VECTOR_CST_NELTS (cst);
9952 elts = XALLOCAVEC (tree, vec_nelts);
9953 unit_type = TREE_TYPE (type);
9954 mode = TYPE_MODE (unit_type);
9955
9956 for (i = 0; i < vec_nelts; i++)
9957 {
9958 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9959 if (!exact_real_inverse (mode, &r))
9960 return NULL_TREE;
9961 elts[i] = build_real (unit_type, r);
9962 }
9963
9964 return build_vector (type, elts);
9965
9966 default:
9967 return NULL_TREE;
9968 }
9969 }
9970
9971 /* Mask out the tz least significant bits of X of type TYPE where
9972 tz is the number of trailing zeroes in Y. */
9973 static double_int
9974 mask_with_tz (tree type, double_int x, double_int y)
9975 {
9976 int tz = y.trailing_zeros ();
9977
9978 if (tz > 0)
9979 {
9980 double_int mask;
9981
9982 mask = ~double_int::mask (tz);
9983 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9984 return mask & x;
9985 }
9986 return x;
9987 }
9988
9989 /* Return true when T is an address and is known to be nonzero.
9990 For floating point we further ensure that T is not denormal.
9991 Similar logic is present in nonzero_address in rtlanal.h.
9992
9993 If the return value is based on the assumption that signed overflow
9994 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9995 change *STRICT_OVERFLOW_P. */
9996
9997 static bool
9998 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9999 {
10000 tree type = TREE_TYPE (t);
10001 enum tree_code code;
10002
10003 /* Doing something useful for floating point would need more work. */
10004 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10005 return false;
10006
10007 code = TREE_CODE (t);
10008 switch (TREE_CODE_CLASS (code))
10009 {
10010 case tcc_unary:
10011 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10012 strict_overflow_p);
10013 case tcc_binary:
10014 case tcc_comparison:
10015 return tree_binary_nonzero_warnv_p (code, type,
10016 TREE_OPERAND (t, 0),
10017 TREE_OPERAND (t, 1),
10018 strict_overflow_p);
10019 case tcc_constant:
10020 case tcc_declaration:
10021 case tcc_reference:
10022 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10023
10024 default:
10025 break;
10026 }
10027
10028 switch (code)
10029 {
10030 case TRUTH_NOT_EXPR:
10031 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10032 strict_overflow_p);
10033
10034 case TRUTH_AND_EXPR:
10035 case TRUTH_OR_EXPR:
10036 case TRUTH_XOR_EXPR:
10037 return tree_binary_nonzero_warnv_p (code, type,
10038 TREE_OPERAND (t, 0),
10039 TREE_OPERAND (t, 1),
10040 strict_overflow_p);
10041
10042 case COND_EXPR:
10043 case CONSTRUCTOR:
10044 case OBJ_TYPE_REF:
10045 case ASSERT_EXPR:
10046 case ADDR_EXPR:
10047 case WITH_SIZE_EXPR:
10048 case SSA_NAME:
10049 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10050
10051 case COMPOUND_EXPR:
10052 case MODIFY_EXPR:
10053 case BIND_EXPR:
10054 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10055 strict_overflow_p);
10056
10057 case SAVE_EXPR:
10058 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10059 strict_overflow_p);
10060
10061 case CALL_EXPR:
10062 {
10063 tree fndecl = get_callee_fndecl (t);
10064 if (!fndecl) return false;
10065 if (flag_delete_null_pointer_checks && !flag_check_new
10066 && DECL_IS_OPERATOR_NEW (fndecl)
10067 && !TREE_NOTHROW (fndecl))
10068 return true;
10069 if (flag_delete_null_pointer_checks
10070 && lookup_attribute ("returns_nonnull",
10071 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10072 return true;
10073 return alloca_call_p (t);
10074 }
10075
10076 default:
10077 break;
10078 }
10079 return false;
10080 }
10081
10082 /* Return true when T is an address and is known to be nonzero.
10083 Handle warnings about undefined signed overflow. */
10084
10085 static bool
10086 tree_expr_nonzero_p (tree t)
10087 {
10088 bool ret, strict_overflow_p;
10089
10090 strict_overflow_p = false;
10091 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10092 if (strict_overflow_p)
10093 fold_overflow_warning (("assuming signed overflow does not occur when "
10094 "determining that expression is always "
10095 "non-zero"),
10096 WARN_STRICT_OVERFLOW_MISC);
10097 return ret;
10098 }
10099
10100 /* Fold a binary expression of code CODE and type TYPE with operands
10101 OP0 and OP1. LOC is the location of the resulting expression.
10102 Return the folded expression if folding is successful. Otherwise,
10103 return NULL_TREE. */
10104
10105 tree
10106 fold_binary_loc (location_t loc,
10107 enum tree_code code, tree type, tree op0, tree op1)
10108 {
10109 enum tree_code_class kind = TREE_CODE_CLASS (code);
10110 tree arg0, arg1, tem;
10111 tree t1 = NULL_TREE;
10112 bool strict_overflow_p;
10113 unsigned int prec;
10114
10115 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10116 && TREE_CODE_LENGTH (code) == 2
10117 && op0 != NULL_TREE
10118 && op1 != NULL_TREE);
10119
10120 arg0 = op0;
10121 arg1 = op1;
10122
10123 /* Strip any conversions that don't change the mode. This is
10124 safe for every expression, except for a comparison expression
10125 because its signedness is derived from its operands. So, in
10126 the latter case, only strip conversions that don't change the
10127 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10128 preserved.
10129
10130 Note that this is done as an internal manipulation within the
10131 constant folder, in order to find the simplest representation
10132 of the arguments so that their form can be studied. In any
10133 cases, the appropriate type conversions should be put back in
10134 the tree that will get out of the constant folder. */
10135
10136 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10137 {
10138 STRIP_SIGN_NOPS (arg0);
10139 STRIP_SIGN_NOPS (arg1);
10140 }
10141 else
10142 {
10143 STRIP_NOPS (arg0);
10144 STRIP_NOPS (arg1);
10145 }
10146
10147 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10148 constant but we can't do arithmetic on them. */
10149 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10150 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10151 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10152 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10153 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10154 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10155 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10156 {
10157 if (kind == tcc_binary)
10158 {
10159 /* Make sure type and arg0 have the same saturating flag. */
10160 gcc_assert (TYPE_SATURATING (type)
10161 == TYPE_SATURATING (TREE_TYPE (arg0)));
10162 tem = const_binop (code, arg0, arg1);
10163 }
10164 else if (kind == tcc_comparison)
10165 tem = fold_relational_const (code, type, arg0, arg1);
10166 else
10167 tem = NULL_TREE;
10168
10169 if (tem != NULL_TREE)
10170 {
10171 if (TREE_TYPE (tem) != type)
10172 tem = fold_convert_loc (loc, type, tem);
10173 return tem;
10174 }
10175 }
10176
10177 /* If this is a commutative operation, and ARG0 is a constant, move it
10178 to ARG1 to reduce the number of tests below. */
10179 if (commutative_tree_code (code)
10180 && tree_swap_operands_p (arg0, arg1, true))
10181 return fold_build2_loc (loc, code, type, op1, op0);
10182
10183 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10184
10185 First check for cases where an arithmetic operation is applied to a
10186 compound, conditional, or comparison operation. Push the arithmetic
10187 operation inside the compound or conditional to see if any folding
10188 can then be done. Convert comparison to conditional for this purpose.
10189 The also optimizes non-constant cases that used to be done in
10190 expand_expr.
10191
10192 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10193 one of the operands is a comparison and the other is a comparison, a
10194 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10195 code below would make the expression more complex. Change it to a
10196 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10197 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10198
10199 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10200 || code == EQ_EXPR || code == NE_EXPR)
10201 && TREE_CODE (type) != VECTOR_TYPE
10202 && ((truth_value_p (TREE_CODE (arg0))
10203 && (truth_value_p (TREE_CODE (arg1))
10204 || (TREE_CODE (arg1) == BIT_AND_EXPR
10205 && integer_onep (TREE_OPERAND (arg1, 1)))))
10206 || (truth_value_p (TREE_CODE (arg1))
10207 && (truth_value_p (TREE_CODE (arg0))
10208 || (TREE_CODE (arg0) == BIT_AND_EXPR
10209 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10210 {
10211 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10212 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10213 : TRUTH_XOR_EXPR,
10214 boolean_type_node,
10215 fold_convert_loc (loc, boolean_type_node, arg0),
10216 fold_convert_loc (loc, boolean_type_node, arg1));
10217
10218 if (code == EQ_EXPR)
10219 tem = invert_truthvalue_loc (loc, tem);
10220
10221 return fold_convert_loc (loc, type, tem);
10222 }
10223
10224 if (TREE_CODE_CLASS (code) == tcc_binary
10225 || TREE_CODE_CLASS (code) == tcc_comparison)
10226 {
10227 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10228 {
10229 tem = fold_build2_loc (loc, code, type,
10230 fold_convert_loc (loc, TREE_TYPE (op0),
10231 TREE_OPERAND (arg0, 1)), op1);
10232 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10233 tem);
10234 }
10235 if (TREE_CODE (arg1) == COMPOUND_EXPR
10236 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10237 {
10238 tem = fold_build2_loc (loc, code, type, op0,
10239 fold_convert_loc (loc, TREE_TYPE (op1),
10240 TREE_OPERAND (arg1, 1)));
10241 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10242 tem);
10243 }
10244
10245 if (TREE_CODE (arg0) == COND_EXPR
10246 || TREE_CODE (arg0) == VEC_COND_EXPR
10247 || COMPARISON_CLASS_P (arg0))
10248 {
10249 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10250 arg0, arg1,
10251 /*cond_first_p=*/1);
10252 if (tem != NULL_TREE)
10253 return tem;
10254 }
10255
10256 if (TREE_CODE (arg1) == COND_EXPR
10257 || TREE_CODE (arg1) == VEC_COND_EXPR
10258 || COMPARISON_CLASS_P (arg1))
10259 {
10260 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10261 arg1, arg0,
10262 /*cond_first_p=*/0);
10263 if (tem != NULL_TREE)
10264 return tem;
10265 }
10266 }
10267
10268 switch (code)
10269 {
10270 case MEM_REF:
10271 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10272 if (TREE_CODE (arg0) == ADDR_EXPR
10273 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10274 {
10275 tree iref = TREE_OPERAND (arg0, 0);
10276 return fold_build2 (MEM_REF, type,
10277 TREE_OPERAND (iref, 0),
10278 int_const_binop (PLUS_EXPR, arg1,
10279 TREE_OPERAND (iref, 1)));
10280 }
10281
10282 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10283 if (TREE_CODE (arg0) == ADDR_EXPR
10284 && handled_component_p (TREE_OPERAND (arg0, 0)))
10285 {
10286 tree base;
10287 HOST_WIDE_INT coffset;
10288 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10289 &coffset);
10290 if (!base)
10291 return NULL_TREE;
10292 return fold_build2 (MEM_REF, type,
10293 build_fold_addr_expr (base),
10294 int_const_binop (PLUS_EXPR, arg1,
10295 size_int (coffset)));
10296 }
10297
10298 return NULL_TREE;
10299
10300 case POINTER_PLUS_EXPR:
10301 /* 0 +p index -> (type)index */
10302 if (integer_zerop (arg0))
10303 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10304
10305 /* PTR +p 0 -> PTR */
10306 if (integer_zerop (arg1))
10307 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10308
10309 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10310 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10311 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10312 return fold_convert_loc (loc, type,
10313 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10314 fold_convert_loc (loc, sizetype,
10315 arg1),
10316 fold_convert_loc (loc, sizetype,
10317 arg0)));
10318
10319 /* (PTR +p B) +p A -> PTR +p (B + A) */
10320 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10321 {
10322 tree inner;
10323 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10324 tree arg00 = TREE_OPERAND (arg0, 0);
10325 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10326 arg01, fold_convert_loc (loc, sizetype, arg1));
10327 return fold_convert_loc (loc, type,
10328 fold_build_pointer_plus_loc (loc,
10329 arg00, inner));
10330 }
10331
10332 /* PTR_CST +p CST -> CST1 */
10333 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10334 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10335 fold_convert_loc (loc, type, arg1));
10336
10337 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10338 of the array. Loop optimizer sometimes produce this type of
10339 expressions. */
10340 if (TREE_CODE (arg0) == ADDR_EXPR)
10341 {
10342 tem = try_move_mult_to_index (loc, arg0,
10343 fold_convert_loc (loc,
10344 ssizetype, arg1));
10345 if (tem)
10346 return fold_convert_loc (loc, type, tem);
10347 }
10348
10349 return NULL_TREE;
10350
10351 case PLUS_EXPR:
10352 /* A + (-B) -> A - B */
10353 if (TREE_CODE (arg1) == NEGATE_EXPR
10354 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10355 return fold_build2_loc (loc, MINUS_EXPR, type,
10356 fold_convert_loc (loc, type, arg0),
10357 fold_convert_loc (loc, type,
10358 TREE_OPERAND (arg1, 0)));
10359 /* (-A) + B -> B - A */
10360 if (TREE_CODE (arg0) == NEGATE_EXPR
10361 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10362 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10363 return fold_build2_loc (loc, MINUS_EXPR, type,
10364 fold_convert_loc (loc, type, arg1),
10365 fold_convert_loc (loc, type,
10366 TREE_OPERAND (arg0, 0)));
10367
10368 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10369 {
10370 /* Convert ~A + 1 to -A. */
10371 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10372 && integer_onep (arg1))
10373 return fold_build1_loc (loc, NEGATE_EXPR, type,
10374 fold_convert_loc (loc, type,
10375 TREE_OPERAND (arg0, 0)));
10376
10377 /* ~X + X is -1. */
10378 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10379 && !TYPE_OVERFLOW_TRAPS (type))
10380 {
10381 tree tem = TREE_OPERAND (arg0, 0);
10382
10383 STRIP_NOPS (tem);
10384 if (operand_equal_p (tem, arg1, 0))
10385 {
10386 t1 = build_all_ones_cst (type);
10387 return omit_one_operand_loc (loc, type, t1, arg1);
10388 }
10389 }
10390
10391 /* X + ~X is -1. */
10392 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10393 && !TYPE_OVERFLOW_TRAPS (type))
10394 {
10395 tree tem = TREE_OPERAND (arg1, 0);
10396
10397 STRIP_NOPS (tem);
10398 if (operand_equal_p (arg0, tem, 0))
10399 {
10400 t1 = build_all_ones_cst (type);
10401 return omit_one_operand_loc (loc, type, t1, arg0);
10402 }
10403 }
10404
10405 /* X + (X / CST) * -CST is X % CST. */
10406 if (TREE_CODE (arg1) == MULT_EXPR
10407 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10408 && operand_equal_p (arg0,
10409 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10410 {
10411 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10412 tree cst1 = TREE_OPERAND (arg1, 1);
10413 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10414 cst1, cst0);
10415 if (sum && integer_zerop (sum))
10416 return fold_convert_loc (loc, type,
10417 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10418 TREE_TYPE (arg0), arg0,
10419 cst0));
10420 }
10421 }
10422
10423 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10424 one. Make sure the type is not saturating and has the signedness of
10425 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10426 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10427 if ((TREE_CODE (arg0) == MULT_EXPR
10428 || TREE_CODE (arg1) == MULT_EXPR)
10429 && !TYPE_SATURATING (type)
10430 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10431 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10432 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10433 {
10434 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10435 if (tem)
10436 return tem;
10437 }
10438
10439 if (! FLOAT_TYPE_P (type))
10440 {
10441 if (integer_zerop (arg1))
10442 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10443
10444 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10445 with a constant, and the two constants have no bits in common,
10446 we should treat this as a BIT_IOR_EXPR since this may produce more
10447 simplifications. */
10448 if (TREE_CODE (arg0) == BIT_AND_EXPR
10449 && TREE_CODE (arg1) == BIT_AND_EXPR
10450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10451 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10452 && integer_zerop (const_binop (BIT_AND_EXPR,
10453 TREE_OPERAND (arg0, 1),
10454 TREE_OPERAND (arg1, 1))))
10455 {
10456 code = BIT_IOR_EXPR;
10457 goto bit_ior;
10458 }
10459
10460 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10461 (plus (plus (mult) (mult)) (foo)) so that we can
10462 take advantage of the factoring cases below. */
10463 if (TYPE_OVERFLOW_WRAPS (type)
10464 && (((TREE_CODE (arg0) == PLUS_EXPR
10465 || TREE_CODE (arg0) == MINUS_EXPR)
10466 && TREE_CODE (arg1) == MULT_EXPR)
10467 || ((TREE_CODE (arg1) == PLUS_EXPR
10468 || TREE_CODE (arg1) == MINUS_EXPR)
10469 && TREE_CODE (arg0) == MULT_EXPR)))
10470 {
10471 tree parg0, parg1, parg, marg;
10472 enum tree_code pcode;
10473
10474 if (TREE_CODE (arg1) == MULT_EXPR)
10475 parg = arg0, marg = arg1;
10476 else
10477 parg = arg1, marg = arg0;
10478 pcode = TREE_CODE (parg);
10479 parg0 = TREE_OPERAND (parg, 0);
10480 parg1 = TREE_OPERAND (parg, 1);
10481 STRIP_NOPS (parg0);
10482 STRIP_NOPS (parg1);
10483
10484 if (TREE_CODE (parg0) == MULT_EXPR
10485 && TREE_CODE (parg1) != MULT_EXPR)
10486 return fold_build2_loc (loc, pcode, type,
10487 fold_build2_loc (loc, PLUS_EXPR, type,
10488 fold_convert_loc (loc, type,
10489 parg0),
10490 fold_convert_loc (loc, type,
10491 marg)),
10492 fold_convert_loc (loc, type, parg1));
10493 if (TREE_CODE (parg0) != MULT_EXPR
10494 && TREE_CODE (parg1) == MULT_EXPR)
10495 return
10496 fold_build2_loc (loc, PLUS_EXPR, type,
10497 fold_convert_loc (loc, type, parg0),
10498 fold_build2_loc (loc, pcode, type,
10499 fold_convert_loc (loc, type, marg),
10500 fold_convert_loc (loc, type,
10501 parg1)));
10502 }
10503 }
10504 else
10505 {
10506 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10507 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10508 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10509
10510 /* Likewise if the operands are reversed. */
10511 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10512 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10513
10514 /* Convert X + -C into X - C. */
10515 if (TREE_CODE (arg1) == REAL_CST
10516 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10517 {
10518 tem = fold_negate_const (arg1, type);
10519 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10520 return fold_build2_loc (loc, MINUS_EXPR, type,
10521 fold_convert_loc (loc, type, arg0),
10522 fold_convert_loc (loc, type, tem));
10523 }
10524
10525 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10526 to __complex__ ( x, y ). This is not the same for SNaNs or
10527 if signed zeros are involved. */
10528 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10529 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10530 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10531 {
10532 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10533 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10534 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10535 bool arg0rz = false, arg0iz = false;
10536 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10537 || (arg0i && (arg0iz = real_zerop (arg0i))))
10538 {
10539 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10540 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10541 if (arg0rz && arg1i && real_zerop (arg1i))
10542 {
10543 tree rp = arg1r ? arg1r
10544 : build1 (REALPART_EXPR, rtype, arg1);
10545 tree ip = arg0i ? arg0i
10546 : build1 (IMAGPART_EXPR, rtype, arg0);
10547 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10548 }
10549 else if (arg0iz && arg1r && real_zerop (arg1r))
10550 {
10551 tree rp = arg0r ? arg0r
10552 : build1 (REALPART_EXPR, rtype, arg0);
10553 tree ip = arg1i ? arg1i
10554 : build1 (IMAGPART_EXPR, rtype, arg1);
10555 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10556 }
10557 }
10558 }
10559
10560 if (flag_unsafe_math_optimizations
10561 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10562 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10563 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10564 return tem;
10565
10566 /* Convert x+x into x*2.0. */
10567 if (operand_equal_p (arg0, arg1, 0)
10568 && SCALAR_FLOAT_TYPE_P (type))
10569 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10570 build_real (type, dconst2));
10571
10572 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10573 We associate floats only if the user has specified
10574 -fassociative-math. */
10575 if (flag_associative_math
10576 && TREE_CODE (arg1) == PLUS_EXPR
10577 && TREE_CODE (arg0) != MULT_EXPR)
10578 {
10579 tree tree10 = TREE_OPERAND (arg1, 0);
10580 tree tree11 = TREE_OPERAND (arg1, 1);
10581 if (TREE_CODE (tree11) == MULT_EXPR
10582 && TREE_CODE (tree10) == MULT_EXPR)
10583 {
10584 tree tree0;
10585 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10586 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10587 }
10588 }
10589 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10590 We associate floats only if the user has specified
10591 -fassociative-math. */
10592 if (flag_associative_math
10593 && TREE_CODE (arg0) == PLUS_EXPR
10594 && TREE_CODE (arg1) != MULT_EXPR)
10595 {
10596 tree tree00 = TREE_OPERAND (arg0, 0);
10597 tree tree01 = TREE_OPERAND (arg0, 1);
10598 if (TREE_CODE (tree01) == MULT_EXPR
10599 && TREE_CODE (tree00) == MULT_EXPR)
10600 {
10601 tree tree0;
10602 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10603 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10604 }
10605 }
10606 }
10607
10608 bit_rotate:
10609 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10610 is a rotate of A by C1 bits. */
10611 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10612 is a rotate of A by B bits. */
10613 {
10614 enum tree_code code0, code1;
10615 tree rtype;
10616 code0 = TREE_CODE (arg0);
10617 code1 = TREE_CODE (arg1);
10618 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10619 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10620 && operand_equal_p (TREE_OPERAND (arg0, 0),
10621 TREE_OPERAND (arg1, 0), 0)
10622 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10623 TYPE_UNSIGNED (rtype))
10624 /* Only create rotates in complete modes. Other cases are not
10625 expanded properly. */
10626 && (element_precision (rtype)
10627 == element_precision (TYPE_MODE (rtype))))
10628 {
10629 tree tree01, tree11;
10630 enum tree_code code01, code11;
10631
10632 tree01 = TREE_OPERAND (arg0, 1);
10633 tree11 = TREE_OPERAND (arg1, 1);
10634 STRIP_NOPS (tree01);
10635 STRIP_NOPS (tree11);
10636 code01 = TREE_CODE (tree01);
10637 code11 = TREE_CODE (tree11);
10638 if (code01 == INTEGER_CST
10639 && code11 == INTEGER_CST
10640 && TREE_INT_CST_HIGH (tree01) == 0
10641 && TREE_INT_CST_HIGH (tree11) == 0
10642 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10643 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10644 {
10645 tem = build2_loc (loc, LROTATE_EXPR,
10646 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10647 TREE_OPERAND (arg0, 0),
10648 code0 == LSHIFT_EXPR ? tree01 : tree11);
10649 return fold_convert_loc (loc, type, tem);
10650 }
10651 else if (code11 == MINUS_EXPR)
10652 {
10653 tree tree110, tree111;
10654 tree110 = TREE_OPERAND (tree11, 0);
10655 tree111 = TREE_OPERAND (tree11, 1);
10656 STRIP_NOPS (tree110);
10657 STRIP_NOPS (tree111);
10658 if (TREE_CODE (tree110) == INTEGER_CST
10659 && 0 == compare_tree_int (tree110,
10660 element_precision
10661 (TREE_TYPE (TREE_OPERAND
10662 (arg0, 0))))
10663 && operand_equal_p (tree01, tree111, 0))
10664 return
10665 fold_convert_loc (loc, type,
10666 build2 ((code0 == LSHIFT_EXPR
10667 ? LROTATE_EXPR
10668 : RROTATE_EXPR),
10669 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10670 TREE_OPERAND (arg0, 0), tree01));
10671 }
10672 else if (code01 == MINUS_EXPR)
10673 {
10674 tree tree010, tree011;
10675 tree010 = TREE_OPERAND (tree01, 0);
10676 tree011 = TREE_OPERAND (tree01, 1);
10677 STRIP_NOPS (tree010);
10678 STRIP_NOPS (tree011);
10679 if (TREE_CODE (tree010) == INTEGER_CST
10680 && 0 == compare_tree_int (tree010,
10681 element_precision
10682 (TREE_TYPE (TREE_OPERAND
10683 (arg0, 0))))
10684 && operand_equal_p (tree11, tree011, 0))
10685 return fold_convert_loc
10686 (loc, type,
10687 build2 ((code0 != LSHIFT_EXPR
10688 ? LROTATE_EXPR
10689 : RROTATE_EXPR),
10690 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10691 TREE_OPERAND (arg0, 0), tree11));
10692 }
10693 }
10694 }
10695
10696 associate:
10697 /* In most languages, can't associate operations on floats through
10698 parentheses. Rather than remember where the parentheses were, we
10699 don't associate floats at all, unless the user has specified
10700 -fassociative-math.
10701 And, we need to make sure type is not saturating. */
10702
10703 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10704 && !TYPE_SATURATING (type))
10705 {
10706 tree var0, con0, lit0, minus_lit0;
10707 tree var1, con1, lit1, minus_lit1;
10708 tree atype = type;
10709 bool ok = true;
10710
10711 /* Split both trees into variables, constants, and literals. Then
10712 associate each group together, the constants with literals,
10713 then the result with variables. This increases the chances of
10714 literals being recombined later and of generating relocatable
10715 expressions for the sum of a constant and literal. */
10716 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10717 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10718 code == MINUS_EXPR);
10719
10720 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10721 if (code == MINUS_EXPR)
10722 code = PLUS_EXPR;
10723
10724 /* With undefined overflow prefer doing association in a type
10725 which wraps on overflow, if that is one of the operand types. */
10726 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10727 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10728 {
10729 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10730 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10731 atype = TREE_TYPE (arg0);
10732 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10733 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10734 atype = TREE_TYPE (arg1);
10735 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10736 }
10737
10738 /* With undefined overflow we can only associate constants with one
10739 variable, and constants whose association doesn't overflow. */
10740 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10741 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10742 {
10743 if (var0 && var1)
10744 {
10745 tree tmp0 = var0;
10746 tree tmp1 = var1;
10747
10748 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10749 tmp0 = TREE_OPERAND (tmp0, 0);
10750 if (CONVERT_EXPR_P (tmp0)
10751 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10752 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10753 <= TYPE_PRECISION (atype)))
10754 tmp0 = TREE_OPERAND (tmp0, 0);
10755 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10756 tmp1 = TREE_OPERAND (tmp1, 0);
10757 if (CONVERT_EXPR_P (tmp1)
10758 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10759 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10760 <= TYPE_PRECISION (atype)))
10761 tmp1 = TREE_OPERAND (tmp1, 0);
10762 /* The only case we can still associate with two variables
10763 is if they are the same, modulo negation and bit-pattern
10764 preserving conversions. */
10765 if (!operand_equal_p (tmp0, tmp1, 0))
10766 ok = false;
10767 }
10768 }
10769
10770 /* Only do something if we found more than two objects. Otherwise,
10771 nothing has changed and we risk infinite recursion. */
10772 if (ok
10773 && (2 < ((var0 != 0) + (var1 != 0)
10774 + (con0 != 0) + (con1 != 0)
10775 + (lit0 != 0) + (lit1 != 0)
10776 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10777 {
10778 bool any_overflows = false;
10779 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10780 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10781 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10782 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10783 var0 = associate_trees (loc, var0, var1, code, atype);
10784 con0 = associate_trees (loc, con0, con1, code, atype);
10785 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10786 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10787 code, atype);
10788
10789 /* Preserve the MINUS_EXPR if the negative part of the literal is
10790 greater than the positive part. Otherwise, the multiplicative
10791 folding code (i.e extract_muldiv) may be fooled in case
10792 unsigned constants are subtracted, like in the following
10793 example: ((X*2 + 4) - 8U)/2. */
10794 if (minus_lit0 && lit0)
10795 {
10796 if (TREE_CODE (lit0) == INTEGER_CST
10797 && TREE_CODE (minus_lit0) == INTEGER_CST
10798 && tree_int_cst_lt (lit0, minus_lit0))
10799 {
10800 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10801 MINUS_EXPR, atype);
10802 lit0 = 0;
10803 }
10804 else
10805 {
10806 lit0 = associate_trees (loc, lit0, minus_lit0,
10807 MINUS_EXPR, atype);
10808 minus_lit0 = 0;
10809 }
10810 }
10811
10812 /* Don't introduce overflows through reassociation. */
10813 if (!any_overflows
10814 && ((lit0 && TREE_OVERFLOW (lit0))
10815 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10816 return NULL_TREE;
10817
10818 if (minus_lit0)
10819 {
10820 if (con0 == 0)
10821 return
10822 fold_convert_loc (loc, type,
10823 associate_trees (loc, var0, minus_lit0,
10824 MINUS_EXPR, atype));
10825 else
10826 {
10827 con0 = associate_trees (loc, con0, minus_lit0,
10828 MINUS_EXPR, atype);
10829 return
10830 fold_convert_loc (loc, type,
10831 associate_trees (loc, var0, con0,
10832 PLUS_EXPR, atype));
10833 }
10834 }
10835
10836 con0 = associate_trees (loc, con0, lit0, code, atype);
10837 return
10838 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10839 code, atype));
10840 }
10841 }
10842
10843 return NULL_TREE;
10844
10845 case MINUS_EXPR:
10846 /* Pointer simplifications for subtraction, simple reassociations. */
10847 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10848 {
10849 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10850 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10851 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10852 {
10853 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10854 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10855 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10856 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10857 return fold_build2_loc (loc, PLUS_EXPR, type,
10858 fold_build2_loc (loc, MINUS_EXPR, type,
10859 arg00, arg10),
10860 fold_build2_loc (loc, MINUS_EXPR, type,
10861 arg01, arg11));
10862 }
10863 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10864 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10865 {
10866 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10867 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10868 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10869 fold_convert_loc (loc, type, arg1));
10870 if (tmp)
10871 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10872 }
10873 }
10874 /* A - (-B) -> A + B */
10875 if (TREE_CODE (arg1) == NEGATE_EXPR)
10876 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10877 fold_convert_loc (loc, type,
10878 TREE_OPERAND (arg1, 0)));
10879 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10880 if (TREE_CODE (arg0) == NEGATE_EXPR
10881 && negate_expr_p (arg1)
10882 && reorder_operands_p (arg0, arg1))
10883 return fold_build2_loc (loc, MINUS_EXPR, type,
10884 fold_convert_loc (loc, type,
10885 negate_expr (arg1)),
10886 fold_convert_loc (loc, type,
10887 TREE_OPERAND (arg0, 0)));
10888 /* Convert -A - 1 to ~A. */
10889 if (TREE_CODE (type) != COMPLEX_TYPE
10890 && TREE_CODE (arg0) == NEGATE_EXPR
10891 && integer_onep (arg1)
10892 && !TYPE_OVERFLOW_TRAPS (type))
10893 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10894 fold_convert_loc (loc, type,
10895 TREE_OPERAND (arg0, 0)));
10896
10897 /* Convert -1 - A to ~A. */
10898 if (TREE_CODE (type) != COMPLEX_TYPE
10899 && integer_all_onesp (arg0))
10900 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10901
10902
10903 /* X - (X / Y) * Y is X % Y. */
10904 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10905 && TREE_CODE (arg1) == MULT_EXPR
10906 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10907 && operand_equal_p (arg0,
10908 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10909 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10910 TREE_OPERAND (arg1, 1), 0))
10911 return
10912 fold_convert_loc (loc, type,
10913 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10914 arg0, TREE_OPERAND (arg1, 1)));
10915
10916 if (! FLOAT_TYPE_P (type))
10917 {
10918 if (integer_zerop (arg0))
10919 return negate_expr (fold_convert_loc (loc, type, arg1));
10920 if (integer_zerop (arg1))
10921 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10922
10923 /* Fold A - (A & B) into ~B & A. */
10924 if (!TREE_SIDE_EFFECTS (arg0)
10925 && TREE_CODE (arg1) == BIT_AND_EXPR)
10926 {
10927 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10928 {
10929 tree arg10 = fold_convert_loc (loc, type,
10930 TREE_OPERAND (arg1, 0));
10931 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10932 fold_build1_loc (loc, BIT_NOT_EXPR,
10933 type, arg10),
10934 fold_convert_loc (loc, type, arg0));
10935 }
10936 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10937 {
10938 tree arg11 = fold_convert_loc (loc,
10939 type, TREE_OPERAND (arg1, 1));
10940 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10941 fold_build1_loc (loc, BIT_NOT_EXPR,
10942 type, arg11),
10943 fold_convert_loc (loc, type, arg0));
10944 }
10945 }
10946
10947 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10948 any power of 2 minus 1. */
10949 if (TREE_CODE (arg0) == BIT_AND_EXPR
10950 && TREE_CODE (arg1) == BIT_AND_EXPR
10951 && operand_equal_p (TREE_OPERAND (arg0, 0),
10952 TREE_OPERAND (arg1, 0), 0))
10953 {
10954 tree mask0 = TREE_OPERAND (arg0, 1);
10955 tree mask1 = TREE_OPERAND (arg1, 1);
10956 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10957
10958 if (operand_equal_p (tem, mask1, 0))
10959 {
10960 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10961 TREE_OPERAND (arg0, 0), mask1);
10962 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10963 }
10964 }
10965 }
10966
10967 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10968 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10969 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10970
10971 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10972 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10973 (-ARG1 + ARG0) reduces to -ARG1. */
10974 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10975 return negate_expr (fold_convert_loc (loc, type, arg1));
10976
10977 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10978 __complex__ ( x, -y ). This is not the same for SNaNs or if
10979 signed zeros are involved. */
10980 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10981 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10982 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10983 {
10984 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10985 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10986 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10987 bool arg0rz = false, arg0iz = false;
10988 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10989 || (arg0i && (arg0iz = real_zerop (arg0i))))
10990 {
10991 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10992 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10993 if (arg0rz && arg1i && real_zerop (arg1i))
10994 {
10995 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10996 arg1r ? arg1r
10997 : build1 (REALPART_EXPR, rtype, arg1));
10998 tree ip = arg0i ? arg0i
10999 : build1 (IMAGPART_EXPR, rtype, arg0);
11000 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11001 }
11002 else if (arg0iz && arg1r && real_zerop (arg1r))
11003 {
11004 tree rp = arg0r ? arg0r
11005 : build1 (REALPART_EXPR, rtype, arg0);
11006 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11007 arg1i ? arg1i
11008 : build1 (IMAGPART_EXPR, rtype, arg1));
11009 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11010 }
11011 }
11012 }
11013
11014 /* Fold &x - &x. This can happen from &x.foo - &x.
11015 This is unsafe for certain floats even in non-IEEE formats.
11016 In IEEE, it is unsafe because it does wrong for NaNs.
11017 Also note that operand_equal_p is always false if an operand
11018 is volatile. */
11019
11020 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11021 && operand_equal_p (arg0, arg1, 0))
11022 return build_zero_cst (type);
11023
11024 /* A - B -> A + (-B) if B is easily negatable. */
11025 if (negate_expr_p (arg1)
11026 && ((FLOAT_TYPE_P (type)
11027 /* Avoid this transformation if B is a positive REAL_CST. */
11028 && (TREE_CODE (arg1) != REAL_CST
11029 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11030 || INTEGRAL_TYPE_P (type)))
11031 return fold_build2_loc (loc, PLUS_EXPR, type,
11032 fold_convert_loc (loc, type, arg0),
11033 fold_convert_loc (loc, type,
11034 negate_expr (arg1)));
11035
11036 /* Try folding difference of addresses. */
11037 {
11038 HOST_WIDE_INT diff;
11039
11040 if ((TREE_CODE (arg0) == ADDR_EXPR
11041 || TREE_CODE (arg1) == ADDR_EXPR)
11042 && ptr_difference_const (arg0, arg1, &diff))
11043 return build_int_cst_type (type, diff);
11044 }
11045
11046 /* Fold &a[i] - &a[j] to i-j. */
11047 if (TREE_CODE (arg0) == ADDR_EXPR
11048 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11049 && TREE_CODE (arg1) == ADDR_EXPR
11050 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11051 {
11052 tree tem = fold_addr_of_array_ref_difference (loc, type,
11053 TREE_OPERAND (arg0, 0),
11054 TREE_OPERAND (arg1, 0));
11055 if (tem)
11056 return tem;
11057 }
11058
11059 if (FLOAT_TYPE_P (type)
11060 && flag_unsafe_math_optimizations
11061 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11062 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11063 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11064 return tem;
11065
11066 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11067 one. Make sure the type is not saturating and has the signedness of
11068 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11069 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11070 if ((TREE_CODE (arg0) == MULT_EXPR
11071 || TREE_CODE (arg1) == MULT_EXPR)
11072 && !TYPE_SATURATING (type)
11073 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11074 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11075 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11076 {
11077 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11078 if (tem)
11079 return tem;
11080 }
11081
11082 goto associate;
11083
11084 case MULT_EXPR:
11085 /* (-A) * (-B) -> A * B */
11086 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11087 return fold_build2_loc (loc, MULT_EXPR, type,
11088 fold_convert_loc (loc, type,
11089 TREE_OPERAND (arg0, 0)),
11090 fold_convert_loc (loc, type,
11091 negate_expr (arg1)));
11092 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11093 return fold_build2_loc (loc, MULT_EXPR, type,
11094 fold_convert_loc (loc, type,
11095 negate_expr (arg0)),
11096 fold_convert_loc (loc, type,
11097 TREE_OPERAND (arg1, 0)));
11098
11099 if (! FLOAT_TYPE_P (type))
11100 {
11101 if (integer_zerop (arg1))
11102 return omit_one_operand_loc (loc, type, arg1, arg0);
11103 if (integer_onep (arg1))
11104 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11105 /* Transform x * -1 into -x. Make sure to do the negation
11106 on the original operand with conversions not stripped
11107 because we can only strip non-sign-changing conversions. */
11108 if (integer_minus_onep (arg1))
11109 return fold_convert_loc (loc, type, negate_expr (op0));
11110 /* Transform x * -C into -x * C if x is easily negatable. */
11111 if (TREE_CODE (arg1) == INTEGER_CST
11112 && tree_int_cst_sgn (arg1) == -1
11113 && negate_expr_p (arg0)
11114 && (tem = negate_expr (arg1)) != arg1
11115 && !TREE_OVERFLOW (tem))
11116 return fold_build2_loc (loc, MULT_EXPR, type,
11117 fold_convert_loc (loc, type,
11118 negate_expr (arg0)),
11119 tem);
11120
11121 /* (a * (1 << b)) is (a << b) */
11122 if (TREE_CODE (arg1) == LSHIFT_EXPR
11123 && integer_onep (TREE_OPERAND (arg1, 0)))
11124 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11125 TREE_OPERAND (arg1, 1));
11126 if (TREE_CODE (arg0) == LSHIFT_EXPR
11127 && integer_onep (TREE_OPERAND (arg0, 0)))
11128 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11129 TREE_OPERAND (arg0, 1));
11130
11131 /* (A + A) * C -> A * 2 * C */
11132 if (TREE_CODE (arg0) == PLUS_EXPR
11133 && TREE_CODE (arg1) == INTEGER_CST
11134 && operand_equal_p (TREE_OPERAND (arg0, 0),
11135 TREE_OPERAND (arg0, 1), 0))
11136 return fold_build2_loc (loc, MULT_EXPR, type,
11137 omit_one_operand_loc (loc, type,
11138 TREE_OPERAND (arg0, 0),
11139 TREE_OPERAND (arg0, 1)),
11140 fold_build2_loc (loc, MULT_EXPR, type,
11141 build_int_cst (type, 2) , arg1));
11142
11143 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11144 sign-changing only. */
11145 if (TREE_CODE (arg1) == INTEGER_CST
11146 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11147 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11148 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11149
11150 strict_overflow_p = false;
11151 if (TREE_CODE (arg1) == INTEGER_CST
11152 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11153 &strict_overflow_p)))
11154 {
11155 if (strict_overflow_p)
11156 fold_overflow_warning (("assuming signed overflow does not "
11157 "occur when simplifying "
11158 "multiplication"),
11159 WARN_STRICT_OVERFLOW_MISC);
11160 return fold_convert_loc (loc, type, tem);
11161 }
11162
11163 /* Optimize z * conj(z) for integer complex numbers. */
11164 if (TREE_CODE (arg0) == CONJ_EXPR
11165 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11166 return fold_mult_zconjz (loc, type, arg1);
11167 if (TREE_CODE (arg1) == CONJ_EXPR
11168 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11169 return fold_mult_zconjz (loc, type, arg0);
11170 }
11171 else
11172 {
11173 /* Maybe fold x * 0 to 0. The expressions aren't the same
11174 when x is NaN, since x * 0 is also NaN. Nor are they the
11175 same in modes with signed zeros, since multiplying a
11176 negative value by 0 gives -0, not +0. */
11177 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11178 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11179 && real_zerop (arg1))
11180 return omit_one_operand_loc (loc, type, arg1, arg0);
11181 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11182 Likewise for complex arithmetic with signed zeros. */
11183 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11184 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11185 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11186 && real_onep (arg1))
11187 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11188
11189 /* Transform x * -1.0 into -x. */
11190 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11191 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11192 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11193 && real_minus_onep (arg1))
11194 return fold_convert_loc (loc, type, negate_expr (arg0));
11195
11196 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11197 the result for floating point types due to rounding so it is applied
11198 only if -fassociative-math was specify. */
11199 if (flag_associative_math
11200 && TREE_CODE (arg0) == RDIV_EXPR
11201 && TREE_CODE (arg1) == REAL_CST
11202 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11203 {
11204 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11205 arg1);
11206 if (tem)
11207 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11208 TREE_OPERAND (arg0, 1));
11209 }
11210
11211 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11212 if (operand_equal_p (arg0, arg1, 0))
11213 {
11214 tree tem = fold_strip_sign_ops (arg0);
11215 if (tem != NULL_TREE)
11216 {
11217 tem = fold_convert_loc (loc, type, tem);
11218 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11219 }
11220 }
11221
11222 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11223 This is not the same for NaNs or if signed zeros are
11224 involved. */
11225 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11226 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11227 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11228 && TREE_CODE (arg1) == COMPLEX_CST
11229 && real_zerop (TREE_REALPART (arg1)))
11230 {
11231 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11232 if (real_onep (TREE_IMAGPART (arg1)))
11233 return
11234 fold_build2_loc (loc, COMPLEX_EXPR, type,
11235 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11236 rtype, arg0)),
11237 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11238 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11239 return
11240 fold_build2_loc (loc, COMPLEX_EXPR, type,
11241 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11242 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11243 rtype, arg0)));
11244 }
11245
11246 /* Optimize z * conj(z) for floating point complex numbers.
11247 Guarded by flag_unsafe_math_optimizations as non-finite
11248 imaginary components don't produce scalar results. */
11249 if (flag_unsafe_math_optimizations
11250 && TREE_CODE (arg0) == CONJ_EXPR
11251 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11252 return fold_mult_zconjz (loc, type, arg1);
11253 if (flag_unsafe_math_optimizations
11254 && TREE_CODE (arg1) == CONJ_EXPR
11255 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11256 return fold_mult_zconjz (loc, type, arg0);
11257
11258 if (flag_unsafe_math_optimizations)
11259 {
11260 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11261 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11262
11263 /* Optimizations of root(...)*root(...). */
11264 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11265 {
11266 tree rootfn, arg;
11267 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11268 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11269
11270 /* Optimize sqrt(x)*sqrt(x) as x. */
11271 if (BUILTIN_SQRT_P (fcode0)
11272 && operand_equal_p (arg00, arg10, 0)
11273 && ! HONOR_SNANS (TYPE_MODE (type)))
11274 return arg00;
11275
11276 /* Optimize root(x)*root(y) as root(x*y). */
11277 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11278 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11279 return build_call_expr_loc (loc, rootfn, 1, arg);
11280 }
11281
11282 /* Optimize expN(x)*expN(y) as expN(x+y). */
11283 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11284 {
11285 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11286 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11287 CALL_EXPR_ARG (arg0, 0),
11288 CALL_EXPR_ARG (arg1, 0));
11289 return build_call_expr_loc (loc, expfn, 1, arg);
11290 }
11291
11292 /* Optimizations of pow(...)*pow(...). */
11293 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11294 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11295 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11296 {
11297 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11298 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11299 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11300 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11301
11302 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11303 if (operand_equal_p (arg01, arg11, 0))
11304 {
11305 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11306 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11307 arg00, arg10);
11308 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11309 }
11310
11311 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11312 if (operand_equal_p (arg00, arg10, 0))
11313 {
11314 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11315 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11316 arg01, arg11);
11317 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11318 }
11319 }
11320
11321 /* Optimize tan(x)*cos(x) as sin(x). */
11322 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11323 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11324 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11325 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11326 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11327 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11328 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11329 CALL_EXPR_ARG (arg1, 0), 0))
11330 {
11331 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11332
11333 if (sinfn != NULL_TREE)
11334 return build_call_expr_loc (loc, sinfn, 1,
11335 CALL_EXPR_ARG (arg0, 0));
11336 }
11337
11338 /* Optimize x*pow(x,c) as pow(x,c+1). */
11339 if (fcode1 == BUILT_IN_POW
11340 || fcode1 == BUILT_IN_POWF
11341 || fcode1 == BUILT_IN_POWL)
11342 {
11343 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11344 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11345 if (TREE_CODE (arg11) == REAL_CST
11346 && !TREE_OVERFLOW (arg11)
11347 && operand_equal_p (arg0, arg10, 0))
11348 {
11349 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11350 REAL_VALUE_TYPE c;
11351 tree arg;
11352
11353 c = TREE_REAL_CST (arg11);
11354 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11355 arg = build_real (type, c);
11356 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11357 }
11358 }
11359
11360 /* Optimize pow(x,c)*x as pow(x,c+1). */
11361 if (fcode0 == BUILT_IN_POW
11362 || fcode0 == BUILT_IN_POWF
11363 || fcode0 == BUILT_IN_POWL)
11364 {
11365 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11366 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11367 if (TREE_CODE (arg01) == REAL_CST
11368 && !TREE_OVERFLOW (arg01)
11369 && operand_equal_p (arg1, arg00, 0))
11370 {
11371 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11372 REAL_VALUE_TYPE c;
11373 tree arg;
11374
11375 c = TREE_REAL_CST (arg01);
11376 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11377 arg = build_real (type, c);
11378 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11379 }
11380 }
11381
11382 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11383 if (!in_gimple_form
11384 && optimize
11385 && operand_equal_p (arg0, arg1, 0))
11386 {
11387 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11388
11389 if (powfn)
11390 {
11391 tree arg = build_real (type, dconst2);
11392 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11393 }
11394 }
11395 }
11396 }
11397 goto associate;
11398
11399 case BIT_IOR_EXPR:
11400 bit_ior:
11401 if (integer_all_onesp (arg1))
11402 return omit_one_operand_loc (loc, type, arg1, arg0);
11403 if (integer_zerop (arg1))
11404 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11405 if (operand_equal_p (arg0, arg1, 0))
11406 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11407
11408 /* ~X | X is -1. */
11409 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11410 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11411 {
11412 t1 = build_zero_cst (type);
11413 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11414 return omit_one_operand_loc (loc, type, t1, arg1);
11415 }
11416
11417 /* X | ~X is -1. */
11418 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11419 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11420 {
11421 t1 = build_zero_cst (type);
11422 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11423 return omit_one_operand_loc (loc, type, t1, arg0);
11424 }
11425
11426 /* Canonicalize (X & C1) | C2. */
11427 if (TREE_CODE (arg0) == BIT_AND_EXPR
11428 && TREE_CODE (arg1) == INTEGER_CST
11429 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11430 {
11431 double_int c1, c2, c3, msk;
11432 int width = TYPE_PRECISION (type), w;
11433 bool try_simplify = true;
11434
11435 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11436 c2 = tree_to_double_int (arg1);
11437
11438 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11439 if ((c1 & c2) == c1)
11440 return omit_one_operand_loc (loc, type, arg1,
11441 TREE_OPERAND (arg0, 0));
11442
11443 msk = double_int::mask (width);
11444
11445 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11446 if (msk.and_not (c1 | c2).is_zero ())
11447 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11448 TREE_OPERAND (arg0, 0), arg1);
11449
11450 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11451 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11452 mode which allows further optimizations. */
11453 c1 &= msk;
11454 c2 &= msk;
11455 c3 = c1.and_not (c2);
11456 for (w = BITS_PER_UNIT;
11457 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11458 w <<= 1)
11459 {
11460 unsigned HOST_WIDE_INT mask
11461 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11462 if (((c1.low | c2.low) & mask) == mask
11463 && (c1.low & ~mask) == 0 && c1.high == 0)
11464 {
11465 c3 = double_int::from_uhwi (mask);
11466 break;
11467 }
11468 }
11469
11470 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11471 with that optimization from the BIT_AND_EXPR optimizations.
11472 This could end up in an infinite recursion. */
11473 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11474 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11475 == INTEGER_CST)
11476 {
11477 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11478 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11479
11480 try_simplify = (masked != c1);
11481 }
11482
11483 if (try_simplify && c3 != c1)
11484 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11485 fold_build2_loc (loc, BIT_AND_EXPR, type,
11486 TREE_OPERAND (arg0, 0),
11487 double_int_to_tree (type,
11488 c3)),
11489 arg1);
11490 }
11491
11492 /* (X & Y) | Y is (X, Y). */
11493 if (TREE_CODE (arg0) == BIT_AND_EXPR
11494 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11495 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11496 /* (X & Y) | X is (Y, X). */
11497 if (TREE_CODE (arg0) == BIT_AND_EXPR
11498 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11499 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11500 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11501 /* X | (X & Y) is (Y, X). */
11502 if (TREE_CODE (arg1) == BIT_AND_EXPR
11503 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11504 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11505 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11506 /* X | (Y & X) is (Y, X). */
11507 if (TREE_CODE (arg1) == BIT_AND_EXPR
11508 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11509 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11510 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11511
11512 /* (X & ~Y) | (~X & Y) is X ^ Y */
11513 if (TREE_CODE (arg0) == BIT_AND_EXPR
11514 && TREE_CODE (arg1) == BIT_AND_EXPR)
11515 {
11516 tree a0, a1, l0, l1, n0, n1;
11517
11518 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11519 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11520
11521 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11522 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11523
11524 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11525 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11526
11527 if ((operand_equal_p (n0, a0, 0)
11528 && operand_equal_p (n1, a1, 0))
11529 || (operand_equal_p (n0, a1, 0)
11530 && operand_equal_p (n1, a0, 0)))
11531 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11532 }
11533
11534 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11535 if (t1 != NULL_TREE)
11536 return t1;
11537
11538 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11539
11540 This results in more efficient code for machines without a NAND
11541 instruction. Combine will canonicalize to the first form
11542 which will allow use of NAND instructions provided by the
11543 backend if they exist. */
11544 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11545 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11546 {
11547 return
11548 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11549 build2 (BIT_AND_EXPR, type,
11550 fold_convert_loc (loc, type,
11551 TREE_OPERAND (arg0, 0)),
11552 fold_convert_loc (loc, type,
11553 TREE_OPERAND (arg1, 0))));
11554 }
11555
11556 /* See if this can be simplified into a rotate first. If that
11557 is unsuccessful continue in the association code. */
11558 goto bit_rotate;
11559
11560 case BIT_XOR_EXPR:
11561 if (integer_zerop (arg1))
11562 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11563 if (integer_all_onesp (arg1))
11564 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11565 if (operand_equal_p (arg0, arg1, 0))
11566 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11567
11568 /* ~X ^ X is -1. */
11569 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11570 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11571 {
11572 t1 = build_zero_cst (type);
11573 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11574 return omit_one_operand_loc (loc, type, t1, arg1);
11575 }
11576
11577 /* X ^ ~X is -1. */
11578 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11579 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11580 {
11581 t1 = build_zero_cst (type);
11582 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11583 return omit_one_operand_loc (loc, type, t1, arg0);
11584 }
11585
11586 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11587 with a constant, and the two constants have no bits in common,
11588 we should treat this as a BIT_IOR_EXPR since this may produce more
11589 simplifications. */
11590 if (TREE_CODE (arg0) == BIT_AND_EXPR
11591 && TREE_CODE (arg1) == BIT_AND_EXPR
11592 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11593 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11594 && integer_zerop (const_binop (BIT_AND_EXPR,
11595 TREE_OPERAND (arg0, 1),
11596 TREE_OPERAND (arg1, 1))))
11597 {
11598 code = BIT_IOR_EXPR;
11599 goto bit_ior;
11600 }
11601
11602 /* (X | Y) ^ X -> Y & ~ X*/
11603 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11604 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11605 {
11606 tree t2 = TREE_OPERAND (arg0, 1);
11607 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11608 arg1);
11609 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11610 fold_convert_loc (loc, type, t2),
11611 fold_convert_loc (loc, type, t1));
11612 return t1;
11613 }
11614
11615 /* (Y | X) ^ X -> Y & ~ X*/
11616 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11617 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11618 {
11619 tree t2 = TREE_OPERAND (arg0, 0);
11620 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11621 arg1);
11622 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11623 fold_convert_loc (loc, type, t2),
11624 fold_convert_loc (loc, type, t1));
11625 return t1;
11626 }
11627
11628 /* X ^ (X | Y) -> Y & ~ X*/
11629 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11630 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11631 {
11632 tree t2 = TREE_OPERAND (arg1, 1);
11633 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11634 arg0);
11635 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11636 fold_convert_loc (loc, type, t2),
11637 fold_convert_loc (loc, type, t1));
11638 return t1;
11639 }
11640
11641 /* X ^ (Y | X) -> Y & ~ X*/
11642 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11643 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11644 {
11645 tree t2 = TREE_OPERAND (arg1, 0);
11646 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11647 arg0);
11648 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11649 fold_convert_loc (loc, type, t2),
11650 fold_convert_loc (loc, type, t1));
11651 return t1;
11652 }
11653
11654 /* Convert ~X ^ ~Y to X ^ Y. */
11655 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11656 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11657 return fold_build2_loc (loc, code, type,
11658 fold_convert_loc (loc, type,
11659 TREE_OPERAND (arg0, 0)),
11660 fold_convert_loc (loc, type,
11661 TREE_OPERAND (arg1, 0)));
11662
11663 /* Convert ~X ^ C to X ^ ~C. */
11664 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11665 && TREE_CODE (arg1) == INTEGER_CST)
11666 return fold_build2_loc (loc, code, type,
11667 fold_convert_loc (loc, type,
11668 TREE_OPERAND (arg0, 0)),
11669 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11670
11671 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11672 if (TREE_CODE (arg0) == BIT_AND_EXPR
11673 && integer_onep (TREE_OPERAND (arg0, 1))
11674 && integer_onep (arg1))
11675 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11676 build_zero_cst (TREE_TYPE (arg0)));
11677
11678 /* Fold (X & Y) ^ Y as ~X & Y. */
11679 if (TREE_CODE (arg0) == BIT_AND_EXPR
11680 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11681 {
11682 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11683 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11684 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11685 fold_convert_loc (loc, type, arg1));
11686 }
11687 /* Fold (X & Y) ^ X as ~Y & X. */
11688 if (TREE_CODE (arg0) == BIT_AND_EXPR
11689 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11690 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11691 {
11692 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11693 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11694 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11695 fold_convert_loc (loc, type, arg1));
11696 }
11697 /* Fold X ^ (X & Y) as X & ~Y. */
11698 if (TREE_CODE (arg1) == BIT_AND_EXPR
11699 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11700 {
11701 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11702 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11703 fold_convert_loc (loc, type, arg0),
11704 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11705 }
11706 /* Fold X ^ (Y & X) as ~Y & X. */
11707 if (TREE_CODE (arg1) == BIT_AND_EXPR
11708 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11709 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11710 {
11711 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11712 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11713 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11714 fold_convert_loc (loc, type, arg0));
11715 }
11716
11717 /* See if this can be simplified into a rotate first. If that
11718 is unsuccessful continue in the association code. */
11719 goto bit_rotate;
11720
11721 case BIT_AND_EXPR:
11722 if (integer_all_onesp (arg1))
11723 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11724 if (integer_zerop (arg1))
11725 return omit_one_operand_loc (loc, type, arg1, arg0);
11726 if (operand_equal_p (arg0, arg1, 0))
11727 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11728
11729 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11730 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11731 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11732 || (TREE_CODE (arg0) == EQ_EXPR
11733 && integer_zerop (TREE_OPERAND (arg0, 1))))
11734 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11735 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11736
11737 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11738 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11739 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11740 || (TREE_CODE (arg1) == EQ_EXPR
11741 && integer_zerop (TREE_OPERAND (arg1, 1))))
11742 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11743 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11744
11745 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11746 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11747 && TREE_CODE (arg1) == INTEGER_CST
11748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11749 {
11750 tree tmp1 = fold_convert_loc (loc, type, arg1);
11751 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11752 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11753 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11754 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11755 return
11756 fold_convert_loc (loc, type,
11757 fold_build2_loc (loc, BIT_IOR_EXPR,
11758 type, tmp2, tmp3));
11759 }
11760
11761 /* (X | Y) & Y is (X, Y). */
11762 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11763 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11764 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11765 /* (X | Y) & X is (Y, X). */
11766 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11767 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11768 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11769 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11770 /* X & (X | Y) is (Y, X). */
11771 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11772 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11773 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11774 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11775 /* X & (Y | X) is (Y, X). */
11776 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11777 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11778 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11779 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11780
11781 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11782 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11783 && integer_onep (TREE_OPERAND (arg0, 1))
11784 && integer_onep (arg1))
11785 {
11786 tree tem2;
11787 tem = TREE_OPERAND (arg0, 0);
11788 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11789 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11790 tem, tem2);
11791 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11792 build_zero_cst (TREE_TYPE (tem)));
11793 }
11794 /* Fold ~X & 1 as (X & 1) == 0. */
11795 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11796 && integer_onep (arg1))
11797 {
11798 tree tem2;
11799 tem = TREE_OPERAND (arg0, 0);
11800 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11801 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11802 tem, tem2);
11803 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11804 build_zero_cst (TREE_TYPE (tem)));
11805 }
11806 /* Fold !X & 1 as X == 0. */
11807 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11808 && integer_onep (arg1))
11809 {
11810 tem = TREE_OPERAND (arg0, 0);
11811 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11812 build_zero_cst (TREE_TYPE (tem)));
11813 }
11814
11815 /* Fold (X ^ Y) & Y as ~X & Y. */
11816 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11817 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11818 {
11819 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11820 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11821 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11822 fold_convert_loc (loc, type, arg1));
11823 }
11824 /* Fold (X ^ Y) & X as ~Y & X. */
11825 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11826 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11827 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11828 {
11829 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11830 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11831 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11832 fold_convert_loc (loc, type, arg1));
11833 }
11834 /* Fold X & (X ^ Y) as X & ~Y. */
11835 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11836 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11837 {
11838 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11839 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11840 fold_convert_loc (loc, type, arg0),
11841 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11842 }
11843 /* Fold X & (Y ^ X) as ~Y & X. */
11844 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11845 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11846 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11847 {
11848 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11849 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11850 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11851 fold_convert_loc (loc, type, arg0));
11852 }
11853
11854 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11855 multiple of 1 << CST. */
11856 if (TREE_CODE (arg1) == INTEGER_CST)
11857 {
11858 double_int cst1 = tree_to_double_int (arg1);
11859 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11860 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11861 if ((cst1 & ncst1) == ncst1
11862 && multiple_of_p (type, arg0,
11863 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11864 return fold_convert_loc (loc, type, arg0);
11865 }
11866
11867 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11868 bits from CST2. */
11869 if (TREE_CODE (arg1) == INTEGER_CST
11870 && TREE_CODE (arg0) == MULT_EXPR
11871 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11872 {
11873 double_int masked
11874 = mask_with_tz (type, tree_to_double_int (arg1),
11875 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11876
11877 if (masked.is_zero ())
11878 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11879 arg0, arg1);
11880 else if (masked != tree_to_double_int (arg1))
11881 return fold_build2_loc (loc, code, type, op0,
11882 double_int_to_tree (type, masked));
11883 }
11884
11885 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11886 ((A & N) + B) & M -> (A + B) & M
11887 Similarly if (N & M) == 0,
11888 ((A | N) + B) & M -> (A + B) & M
11889 and for - instead of + (or unary - instead of +)
11890 and/or ^ instead of |.
11891 If B is constant and (B & M) == 0, fold into A & M. */
11892 if (tree_fits_uhwi_p (arg1))
11893 {
11894 unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11895 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11896 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11897 && (TREE_CODE (arg0) == PLUS_EXPR
11898 || TREE_CODE (arg0) == MINUS_EXPR
11899 || TREE_CODE (arg0) == NEGATE_EXPR)
11900 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11901 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11902 {
11903 tree pmop[2];
11904 int which = 0;
11905 unsigned HOST_WIDE_INT cst0;
11906
11907 /* Now we know that arg0 is (C + D) or (C - D) or
11908 -C and arg1 (M) is == (1LL << cst) - 1.
11909 Store C into PMOP[0] and D into PMOP[1]. */
11910 pmop[0] = TREE_OPERAND (arg0, 0);
11911 pmop[1] = NULL;
11912 if (TREE_CODE (arg0) != NEGATE_EXPR)
11913 {
11914 pmop[1] = TREE_OPERAND (arg0, 1);
11915 which = 1;
11916 }
11917
11918 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11919 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11920 & cst1) != cst1)
11921 which = -1;
11922
11923 for (; which >= 0; which--)
11924 switch (TREE_CODE (pmop[which]))
11925 {
11926 case BIT_AND_EXPR:
11927 case BIT_IOR_EXPR:
11928 case BIT_XOR_EXPR:
11929 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11930 != INTEGER_CST)
11931 break;
11932 /* tree_to_[su]hwi not used, because we don't care about
11933 the upper bits. */
11934 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11935 cst0 &= cst1;
11936 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11937 {
11938 if (cst0 != cst1)
11939 break;
11940 }
11941 else if (cst0 != 0)
11942 break;
11943 /* If C or D is of the form (A & N) where
11944 (N & M) == M, or of the form (A | N) or
11945 (A ^ N) where (N & M) == 0, replace it with A. */
11946 pmop[which] = TREE_OPERAND (pmop[which], 0);
11947 break;
11948 case INTEGER_CST:
11949 /* If C or D is a N where (N & M) == 0, it can be
11950 omitted (assumed 0). */
11951 if ((TREE_CODE (arg0) == PLUS_EXPR
11952 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11953 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11954 pmop[which] = NULL;
11955 break;
11956 default:
11957 break;
11958 }
11959
11960 /* Only build anything new if we optimized one or both arguments
11961 above. */
11962 if (pmop[0] != TREE_OPERAND (arg0, 0)
11963 || (TREE_CODE (arg0) != NEGATE_EXPR
11964 && pmop[1] != TREE_OPERAND (arg0, 1)))
11965 {
11966 tree utype = TREE_TYPE (arg0);
11967 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11968 {
11969 /* Perform the operations in a type that has defined
11970 overflow behavior. */
11971 utype = unsigned_type_for (TREE_TYPE (arg0));
11972 if (pmop[0] != NULL)
11973 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11974 if (pmop[1] != NULL)
11975 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11976 }
11977
11978 if (TREE_CODE (arg0) == NEGATE_EXPR)
11979 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11980 else if (TREE_CODE (arg0) == PLUS_EXPR)
11981 {
11982 if (pmop[0] != NULL && pmop[1] != NULL)
11983 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11984 pmop[0], pmop[1]);
11985 else if (pmop[0] != NULL)
11986 tem = pmop[0];
11987 else if (pmop[1] != NULL)
11988 tem = pmop[1];
11989 else
11990 return build_int_cst (type, 0);
11991 }
11992 else if (pmop[0] == NULL)
11993 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11994 else
11995 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11996 pmop[0], pmop[1]);
11997 /* TEM is now the new binary +, - or unary - replacement. */
11998 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11999 fold_convert_loc (loc, utype, arg1));
12000 return fold_convert_loc (loc, type, tem);
12001 }
12002 }
12003 }
12004
12005 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
12006 if (t1 != NULL_TREE)
12007 return t1;
12008 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12009 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12010 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12011 {
12012 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12013
12014 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
12015 && (~TREE_INT_CST_LOW (arg1)
12016 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
12017 return
12018 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12019 }
12020
12021 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12022
12023 This results in more efficient code for machines without a NOR
12024 instruction. Combine will canonicalize to the first form
12025 which will allow use of NOR instructions provided by the
12026 backend if they exist. */
12027 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12028 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12029 {
12030 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12031 build2 (BIT_IOR_EXPR, type,
12032 fold_convert_loc (loc, type,
12033 TREE_OPERAND (arg0, 0)),
12034 fold_convert_loc (loc, type,
12035 TREE_OPERAND (arg1, 0))));
12036 }
12037
12038 /* If arg0 is derived from the address of an object or function, we may
12039 be able to fold this expression using the object or function's
12040 alignment. */
12041 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12042 {
12043 unsigned HOST_WIDE_INT modulus, residue;
12044 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12045
12046 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12047 integer_onep (arg1));
12048
12049 /* This works because modulus is a power of 2. If this weren't the
12050 case, we'd have to replace it by its greatest power-of-2
12051 divisor: modulus & -modulus. */
12052 if (low < modulus)
12053 return build_int_cst (type, residue & low);
12054 }
12055
12056 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12057 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12058 if the new mask might be further optimized. */
12059 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12060 || TREE_CODE (arg0) == RSHIFT_EXPR)
12061 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12062 && TREE_CODE (arg1) == INTEGER_CST
12063 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12064 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12065 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12066 < TYPE_PRECISION (TREE_TYPE (arg0))))
12067 {
12068 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12069 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12070 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12071 tree shift_type = TREE_TYPE (arg0);
12072
12073 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12074 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12075 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12076 && TYPE_PRECISION (TREE_TYPE (arg0))
12077 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12078 {
12079 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12080 tree arg00 = TREE_OPERAND (arg0, 0);
12081 /* See if more bits can be proven as zero because of
12082 zero extension. */
12083 if (TREE_CODE (arg00) == NOP_EXPR
12084 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12085 {
12086 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12087 if (TYPE_PRECISION (inner_type)
12088 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12089 && TYPE_PRECISION (inner_type) < prec)
12090 {
12091 prec = TYPE_PRECISION (inner_type);
12092 /* See if we can shorten the right shift. */
12093 if (shiftc < prec)
12094 shift_type = inner_type;
12095 }
12096 }
12097 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12098 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12099 zerobits <<= prec - shiftc;
12100 /* For arithmetic shift if sign bit could be set, zerobits
12101 can contain actually sign bits, so no transformation is
12102 possible, unless MASK masks them all away. In that
12103 case the shift needs to be converted into logical shift. */
12104 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12105 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12106 {
12107 if ((mask & zerobits) == 0)
12108 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12109 else
12110 zerobits = 0;
12111 }
12112 }
12113
12114 /* ((X << 16) & 0xff00) is (X, 0). */
12115 if ((mask & zerobits) == mask)
12116 return omit_one_operand_loc (loc, type,
12117 build_int_cst (type, 0), arg0);
12118
12119 newmask = mask | zerobits;
12120 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12121 {
12122 /* Only do the transformation if NEWMASK is some integer
12123 mode's mask. */
12124 for (prec = BITS_PER_UNIT;
12125 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12126 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12127 break;
12128 if (prec < HOST_BITS_PER_WIDE_INT
12129 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12130 {
12131 tree newmaskt;
12132
12133 if (shift_type != TREE_TYPE (arg0))
12134 {
12135 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12136 fold_convert_loc (loc, shift_type,
12137 TREE_OPERAND (arg0, 0)),
12138 TREE_OPERAND (arg0, 1));
12139 tem = fold_convert_loc (loc, type, tem);
12140 }
12141 else
12142 tem = op0;
12143 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12144 if (!tree_int_cst_equal (newmaskt, arg1))
12145 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12146 }
12147 }
12148 }
12149
12150 goto associate;
12151
12152 case RDIV_EXPR:
12153 /* Don't touch a floating-point divide by zero unless the mode
12154 of the constant can represent infinity. */
12155 if (TREE_CODE (arg1) == REAL_CST
12156 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12157 && real_zerop (arg1))
12158 return NULL_TREE;
12159
12160 /* Optimize A / A to 1.0 if we don't care about
12161 NaNs or Infinities. Skip the transformation
12162 for non-real operands. */
12163 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12164 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12165 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12166 && operand_equal_p (arg0, arg1, 0))
12167 {
12168 tree r = build_real (TREE_TYPE (arg0), dconst1);
12169
12170 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12171 }
12172
12173 /* The complex version of the above A / A optimization. */
12174 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12175 && operand_equal_p (arg0, arg1, 0))
12176 {
12177 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12178 if (! HONOR_NANS (TYPE_MODE (elem_type))
12179 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12180 {
12181 tree r = build_real (elem_type, dconst1);
12182 /* omit_two_operands will call fold_convert for us. */
12183 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12184 }
12185 }
12186
12187 /* (-A) / (-B) -> A / B */
12188 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12189 return fold_build2_loc (loc, RDIV_EXPR, type,
12190 TREE_OPERAND (arg0, 0),
12191 negate_expr (arg1));
12192 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12193 return fold_build2_loc (loc, RDIV_EXPR, type,
12194 negate_expr (arg0),
12195 TREE_OPERAND (arg1, 0));
12196
12197 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12198 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12199 && real_onep (arg1))
12200 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12201
12202 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12203 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12204 && real_minus_onep (arg1))
12205 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12206 negate_expr (arg0)));
12207
12208 /* If ARG1 is a constant, we can convert this to a multiply by the
12209 reciprocal. This does not have the same rounding properties,
12210 so only do this if -freciprocal-math. We can actually
12211 always safely do it if ARG1 is a power of two, but it's hard to
12212 tell if it is or not in a portable manner. */
12213 if (optimize
12214 && (TREE_CODE (arg1) == REAL_CST
12215 || (TREE_CODE (arg1) == COMPLEX_CST
12216 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12217 || (TREE_CODE (arg1) == VECTOR_CST
12218 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12219 {
12220 if (flag_reciprocal_math
12221 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12222 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12223 /* Find the reciprocal if optimizing and the result is exact.
12224 TODO: Complex reciprocal not implemented. */
12225 if (TREE_CODE (arg1) != COMPLEX_CST)
12226 {
12227 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12228
12229 if (inverse)
12230 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12231 }
12232 }
12233 /* Convert A/B/C to A/(B*C). */
12234 if (flag_reciprocal_math
12235 && TREE_CODE (arg0) == RDIV_EXPR)
12236 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12237 fold_build2_loc (loc, MULT_EXPR, type,
12238 TREE_OPERAND (arg0, 1), arg1));
12239
12240 /* Convert A/(B/C) to (A/B)*C. */
12241 if (flag_reciprocal_math
12242 && TREE_CODE (arg1) == RDIV_EXPR)
12243 return fold_build2_loc (loc, MULT_EXPR, type,
12244 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12245 TREE_OPERAND (arg1, 0)),
12246 TREE_OPERAND (arg1, 1));
12247
12248 /* Convert C1/(X*C2) into (C1/C2)/X. */
12249 if (flag_reciprocal_math
12250 && TREE_CODE (arg1) == MULT_EXPR
12251 && TREE_CODE (arg0) == REAL_CST
12252 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12253 {
12254 tree tem = const_binop (RDIV_EXPR, arg0,
12255 TREE_OPERAND (arg1, 1));
12256 if (tem)
12257 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12258 TREE_OPERAND (arg1, 0));
12259 }
12260
12261 if (flag_unsafe_math_optimizations)
12262 {
12263 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12264 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12265
12266 /* Optimize sin(x)/cos(x) as tan(x). */
12267 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12268 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12269 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12270 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12271 CALL_EXPR_ARG (arg1, 0), 0))
12272 {
12273 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12274
12275 if (tanfn != NULL_TREE)
12276 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12277 }
12278
12279 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12280 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12281 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12282 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12283 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12284 CALL_EXPR_ARG (arg1, 0), 0))
12285 {
12286 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12287
12288 if (tanfn != NULL_TREE)
12289 {
12290 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12291 CALL_EXPR_ARG (arg0, 0));
12292 return fold_build2_loc (loc, RDIV_EXPR, type,
12293 build_real (type, dconst1), tmp);
12294 }
12295 }
12296
12297 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12298 NaNs or Infinities. */
12299 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12300 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12301 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12302 {
12303 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12304 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12305
12306 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12307 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12308 && operand_equal_p (arg00, arg01, 0))
12309 {
12310 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12311
12312 if (cosfn != NULL_TREE)
12313 return build_call_expr_loc (loc, cosfn, 1, arg00);
12314 }
12315 }
12316
12317 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12318 NaNs or Infinities. */
12319 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12320 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12321 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12322 {
12323 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12324 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12325
12326 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12327 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12328 && operand_equal_p (arg00, arg01, 0))
12329 {
12330 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12331
12332 if (cosfn != NULL_TREE)
12333 {
12334 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12335 return fold_build2_loc (loc, RDIV_EXPR, type,
12336 build_real (type, dconst1),
12337 tmp);
12338 }
12339 }
12340 }
12341
12342 /* Optimize pow(x,c)/x as pow(x,c-1). */
12343 if (fcode0 == BUILT_IN_POW
12344 || fcode0 == BUILT_IN_POWF
12345 || fcode0 == BUILT_IN_POWL)
12346 {
12347 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12348 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12349 if (TREE_CODE (arg01) == REAL_CST
12350 && !TREE_OVERFLOW (arg01)
12351 && operand_equal_p (arg1, arg00, 0))
12352 {
12353 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12354 REAL_VALUE_TYPE c;
12355 tree arg;
12356
12357 c = TREE_REAL_CST (arg01);
12358 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12359 arg = build_real (type, c);
12360 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12361 }
12362 }
12363
12364 /* Optimize a/root(b/c) into a*root(c/b). */
12365 if (BUILTIN_ROOT_P (fcode1))
12366 {
12367 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12368
12369 if (TREE_CODE (rootarg) == RDIV_EXPR)
12370 {
12371 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12372 tree b = TREE_OPERAND (rootarg, 0);
12373 tree c = TREE_OPERAND (rootarg, 1);
12374
12375 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12376
12377 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12378 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12379 }
12380 }
12381
12382 /* Optimize x/expN(y) into x*expN(-y). */
12383 if (BUILTIN_EXPONENT_P (fcode1))
12384 {
12385 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12386 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12387 arg1 = build_call_expr_loc (loc,
12388 expfn, 1,
12389 fold_convert_loc (loc, type, arg));
12390 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12391 }
12392
12393 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12394 if (fcode1 == BUILT_IN_POW
12395 || fcode1 == BUILT_IN_POWF
12396 || fcode1 == BUILT_IN_POWL)
12397 {
12398 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12399 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12400 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12401 tree neg11 = fold_convert_loc (loc, type,
12402 negate_expr (arg11));
12403 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12404 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12405 }
12406 }
12407 return NULL_TREE;
12408
12409 case TRUNC_DIV_EXPR:
12410 /* Optimize (X & (-A)) / A where A is a power of 2,
12411 to X >> log2(A) */
12412 if (TREE_CODE (arg0) == BIT_AND_EXPR
12413 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12414 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12415 {
12416 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12417 arg1, TREE_OPERAND (arg0, 1));
12418 if (sum && integer_zerop (sum)) {
12419 unsigned long pow2;
12420
12421 if (TREE_INT_CST_LOW (arg1))
12422 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12423 else
12424 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12425 + HOST_BITS_PER_WIDE_INT;
12426
12427 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12428 TREE_OPERAND (arg0, 0),
12429 build_int_cst (integer_type_node, pow2));
12430 }
12431 }
12432
12433 /* Fall through */
12434
12435 case FLOOR_DIV_EXPR:
12436 /* Simplify A / (B << N) where A and B are positive and B is
12437 a power of 2, to A >> (N + log2(B)). */
12438 strict_overflow_p = false;
12439 if (TREE_CODE (arg1) == LSHIFT_EXPR
12440 && (TYPE_UNSIGNED (type)
12441 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12442 {
12443 tree sval = TREE_OPERAND (arg1, 0);
12444 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12445 {
12446 tree sh_cnt = TREE_OPERAND (arg1, 1);
12447 unsigned long pow2;
12448
12449 if (TREE_INT_CST_LOW (sval))
12450 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12451 else
12452 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12453 + HOST_BITS_PER_WIDE_INT;
12454
12455 if (strict_overflow_p)
12456 fold_overflow_warning (("assuming signed overflow does not "
12457 "occur when simplifying A / (B << N)"),
12458 WARN_STRICT_OVERFLOW_MISC);
12459
12460 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12461 sh_cnt,
12462 build_int_cst (TREE_TYPE (sh_cnt),
12463 pow2));
12464 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12465 fold_convert_loc (loc, type, arg0), sh_cnt);
12466 }
12467 }
12468
12469 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12470 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12471 if (INTEGRAL_TYPE_P (type)
12472 && TYPE_UNSIGNED (type)
12473 && code == FLOOR_DIV_EXPR)
12474 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12475
12476 /* Fall through */
12477
12478 case ROUND_DIV_EXPR:
12479 case CEIL_DIV_EXPR:
12480 case EXACT_DIV_EXPR:
12481 if (integer_onep (arg1))
12482 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12483 if (integer_zerop (arg1))
12484 return NULL_TREE;
12485 /* X / -1 is -X. */
12486 if (!TYPE_UNSIGNED (type)
12487 && TREE_CODE (arg1) == INTEGER_CST
12488 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12489 && TREE_INT_CST_HIGH (arg1) == -1)
12490 return fold_convert_loc (loc, type, negate_expr (arg0));
12491
12492 /* Convert -A / -B to A / B when the type is signed and overflow is
12493 undefined. */
12494 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12495 && TREE_CODE (arg0) == NEGATE_EXPR
12496 && negate_expr_p (arg1))
12497 {
12498 if (INTEGRAL_TYPE_P (type))
12499 fold_overflow_warning (("assuming signed overflow does not occur "
12500 "when distributing negation across "
12501 "division"),
12502 WARN_STRICT_OVERFLOW_MISC);
12503 return fold_build2_loc (loc, code, type,
12504 fold_convert_loc (loc, type,
12505 TREE_OPERAND (arg0, 0)),
12506 fold_convert_loc (loc, type,
12507 negate_expr (arg1)));
12508 }
12509 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12510 && TREE_CODE (arg1) == NEGATE_EXPR
12511 && negate_expr_p (arg0))
12512 {
12513 if (INTEGRAL_TYPE_P (type))
12514 fold_overflow_warning (("assuming signed overflow does not occur "
12515 "when distributing negation across "
12516 "division"),
12517 WARN_STRICT_OVERFLOW_MISC);
12518 return fold_build2_loc (loc, code, type,
12519 fold_convert_loc (loc, type,
12520 negate_expr (arg0)),
12521 fold_convert_loc (loc, type,
12522 TREE_OPERAND (arg1, 0)));
12523 }
12524
12525 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12526 operation, EXACT_DIV_EXPR.
12527
12528 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12529 At one time others generated faster code, it's not clear if they do
12530 after the last round to changes to the DIV code in expmed.c. */
12531 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12532 && multiple_of_p (type, arg0, arg1))
12533 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12534
12535 strict_overflow_p = false;
12536 if (TREE_CODE (arg1) == INTEGER_CST
12537 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12538 &strict_overflow_p)))
12539 {
12540 if (strict_overflow_p)
12541 fold_overflow_warning (("assuming signed overflow does not occur "
12542 "when simplifying division"),
12543 WARN_STRICT_OVERFLOW_MISC);
12544 return fold_convert_loc (loc, type, tem);
12545 }
12546
12547 return NULL_TREE;
12548
12549 case CEIL_MOD_EXPR:
12550 case FLOOR_MOD_EXPR:
12551 case ROUND_MOD_EXPR:
12552 case TRUNC_MOD_EXPR:
12553 /* X % 1 is always zero, but be sure to preserve any side
12554 effects in X. */
12555 if (integer_onep (arg1))
12556 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12557
12558 /* X % 0, return X % 0 unchanged so that we can get the
12559 proper warnings and errors. */
12560 if (integer_zerop (arg1))
12561 return NULL_TREE;
12562
12563 /* 0 % X is always zero, but be sure to preserve any side
12564 effects in X. Place this after checking for X == 0. */
12565 if (integer_zerop (arg0))
12566 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12567
12568 /* X % -1 is zero. */
12569 if (!TYPE_UNSIGNED (type)
12570 && TREE_CODE (arg1) == INTEGER_CST
12571 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12572 && TREE_INT_CST_HIGH (arg1) == -1)
12573 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12574
12575 /* X % -C is the same as X % C. */
12576 if (code == TRUNC_MOD_EXPR
12577 && !TYPE_UNSIGNED (type)
12578 && TREE_CODE (arg1) == INTEGER_CST
12579 && !TREE_OVERFLOW (arg1)
12580 && TREE_INT_CST_HIGH (arg1) < 0
12581 && !TYPE_OVERFLOW_TRAPS (type)
12582 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12583 && !sign_bit_p (arg1, arg1))
12584 return fold_build2_loc (loc, code, type,
12585 fold_convert_loc (loc, type, arg0),
12586 fold_convert_loc (loc, type,
12587 negate_expr (arg1)));
12588
12589 /* X % -Y is the same as X % Y. */
12590 if (code == TRUNC_MOD_EXPR
12591 && !TYPE_UNSIGNED (type)
12592 && TREE_CODE (arg1) == NEGATE_EXPR
12593 && !TYPE_OVERFLOW_TRAPS (type))
12594 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12595 fold_convert_loc (loc, type,
12596 TREE_OPERAND (arg1, 0)));
12597
12598 strict_overflow_p = false;
12599 if (TREE_CODE (arg1) == INTEGER_CST
12600 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12601 &strict_overflow_p)))
12602 {
12603 if (strict_overflow_p)
12604 fold_overflow_warning (("assuming signed overflow does not occur "
12605 "when simplifying modulus"),
12606 WARN_STRICT_OVERFLOW_MISC);
12607 return fold_convert_loc (loc, type, tem);
12608 }
12609
12610 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12611 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12612 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12613 && (TYPE_UNSIGNED (type)
12614 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12615 {
12616 tree c = arg1;
12617 /* Also optimize A % (C << N) where C is a power of 2,
12618 to A & ((C << N) - 1). */
12619 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12620 c = TREE_OPERAND (arg1, 0);
12621
12622 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12623 {
12624 tree mask
12625 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12626 build_int_cst (TREE_TYPE (arg1), 1));
12627 if (strict_overflow_p)
12628 fold_overflow_warning (("assuming signed overflow does not "
12629 "occur when simplifying "
12630 "X % (power of two)"),
12631 WARN_STRICT_OVERFLOW_MISC);
12632 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12633 fold_convert_loc (loc, type, arg0),
12634 fold_convert_loc (loc, type, mask));
12635 }
12636 }
12637
12638 return NULL_TREE;
12639
12640 case LROTATE_EXPR:
12641 case RROTATE_EXPR:
12642 if (integer_all_onesp (arg0))
12643 return omit_one_operand_loc (loc, type, arg0, arg1);
12644 goto shift;
12645
12646 case RSHIFT_EXPR:
12647 /* Optimize -1 >> x for arithmetic right shifts. */
12648 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12649 && tree_expr_nonnegative_p (arg1))
12650 return omit_one_operand_loc (loc, type, arg0, arg1);
12651 /* ... fall through ... */
12652
12653 case LSHIFT_EXPR:
12654 shift:
12655 if (integer_zerop (arg1))
12656 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12657 if (integer_zerop (arg0))
12658 return omit_one_operand_loc (loc, type, arg0, arg1);
12659
12660 /* Prefer vector1 << scalar to vector1 << vector2
12661 if vector2 is uniform. */
12662 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12663 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12664 return fold_build2_loc (loc, code, type, op0, tem);
12665
12666 /* Since negative shift count is not well-defined,
12667 don't try to compute it in the compiler. */
12668 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12669 return NULL_TREE;
12670
12671 prec = element_precision (type);
12672
12673 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12674 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12675 && tree_to_uhwi (arg1) < prec
12676 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12677 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12678 {
12679 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12680 + tree_to_uhwi (arg1));
12681
12682 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12683 being well defined. */
12684 if (low >= prec)
12685 {
12686 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12687 low = low % prec;
12688 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12689 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12690 TREE_OPERAND (arg0, 0));
12691 else
12692 low = prec - 1;
12693 }
12694
12695 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12696 build_int_cst (TREE_TYPE (arg1), low));
12697 }
12698
12699 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12700 into x & ((unsigned)-1 >> c) for unsigned types. */
12701 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12702 || (TYPE_UNSIGNED (type)
12703 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12704 && tree_fits_uhwi_p (arg1)
12705 && tree_to_uhwi (arg1) < prec
12706 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12707 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12708 {
12709 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12710 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12711 tree lshift;
12712 tree arg00;
12713
12714 if (low0 == low1)
12715 {
12716 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12717
12718 lshift = build_minus_one_cst (type);
12719 lshift = const_binop (code, lshift, arg1);
12720
12721 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12722 }
12723 }
12724
12725 /* Rewrite an LROTATE_EXPR by a constant into an
12726 RROTATE_EXPR by a new constant. */
12727 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12728 {
12729 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12730 tem = const_binop (MINUS_EXPR, tem, arg1);
12731 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12732 }
12733
12734 /* If we have a rotate of a bit operation with the rotate count and
12735 the second operand of the bit operation both constant,
12736 permute the two operations. */
12737 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12738 && (TREE_CODE (arg0) == BIT_AND_EXPR
12739 || TREE_CODE (arg0) == BIT_IOR_EXPR
12740 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12742 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12743 fold_build2_loc (loc, code, type,
12744 TREE_OPERAND (arg0, 0), arg1),
12745 fold_build2_loc (loc, code, type,
12746 TREE_OPERAND (arg0, 1), arg1));
12747
12748 /* Two consecutive rotates adding up to the precision of the
12749 type can be ignored. */
12750 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12751 && TREE_CODE (arg0) == RROTATE_EXPR
12752 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12753 && TREE_INT_CST_HIGH (arg1) == 0
12754 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12755 && ((TREE_INT_CST_LOW (arg1)
12756 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12757 == prec))
12758 return TREE_OPERAND (arg0, 0);
12759
12760 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12761 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12762 if the latter can be further optimized. */
12763 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12764 && TREE_CODE (arg0) == BIT_AND_EXPR
12765 && TREE_CODE (arg1) == INTEGER_CST
12766 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12767 {
12768 tree mask = fold_build2_loc (loc, code, type,
12769 fold_convert_loc (loc, type,
12770 TREE_OPERAND (arg0, 1)),
12771 arg1);
12772 tree shift = fold_build2_loc (loc, code, type,
12773 fold_convert_loc (loc, type,
12774 TREE_OPERAND (arg0, 0)),
12775 arg1);
12776 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12777 if (tem)
12778 return tem;
12779 }
12780
12781 return NULL_TREE;
12782
12783 case MIN_EXPR:
12784 if (operand_equal_p (arg0, arg1, 0))
12785 return omit_one_operand_loc (loc, type, arg0, arg1);
12786 if (INTEGRAL_TYPE_P (type)
12787 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12788 return omit_one_operand_loc (loc, type, arg1, arg0);
12789 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12790 if (tem)
12791 return tem;
12792 goto associate;
12793
12794 case MAX_EXPR:
12795 if (operand_equal_p (arg0, arg1, 0))
12796 return omit_one_operand_loc (loc, type, arg0, arg1);
12797 if (INTEGRAL_TYPE_P (type)
12798 && TYPE_MAX_VALUE (type)
12799 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12800 return omit_one_operand_loc (loc, type, arg1, arg0);
12801 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12802 if (tem)
12803 return tem;
12804 goto associate;
12805
12806 case TRUTH_ANDIF_EXPR:
12807 /* Note that the operands of this must be ints
12808 and their values must be 0 or 1.
12809 ("true" is a fixed value perhaps depending on the language.) */
12810 /* If first arg is constant zero, return it. */
12811 if (integer_zerop (arg0))
12812 return fold_convert_loc (loc, type, arg0);
12813 case TRUTH_AND_EXPR:
12814 /* If either arg is constant true, drop it. */
12815 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12816 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12817 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12818 /* Preserve sequence points. */
12819 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12820 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12821 /* If second arg is constant zero, result is zero, but first arg
12822 must be evaluated. */
12823 if (integer_zerop (arg1))
12824 return omit_one_operand_loc (loc, type, arg1, arg0);
12825 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12826 case will be handled here. */
12827 if (integer_zerop (arg0))
12828 return omit_one_operand_loc (loc, type, arg0, arg1);
12829
12830 /* !X && X is always false. */
12831 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12832 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12833 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12834 /* X && !X is always false. */
12835 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12836 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12837 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12838
12839 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12840 means A >= Y && A != MAX, but in this case we know that
12841 A < X <= MAX. */
12842
12843 if (!TREE_SIDE_EFFECTS (arg0)
12844 && !TREE_SIDE_EFFECTS (arg1))
12845 {
12846 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12847 if (tem && !operand_equal_p (tem, arg0, 0))
12848 return fold_build2_loc (loc, code, type, tem, arg1);
12849
12850 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12851 if (tem && !operand_equal_p (tem, arg1, 0))
12852 return fold_build2_loc (loc, code, type, arg0, tem);
12853 }
12854
12855 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12856 != NULL_TREE)
12857 return tem;
12858
12859 return NULL_TREE;
12860
12861 case TRUTH_ORIF_EXPR:
12862 /* Note that the operands of this must be ints
12863 and their values must be 0 or true.
12864 ("true" is a fixed value perhaps depending on the language.) */
12865 /* If first arg is constant true, return it. */
12866 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12867 return fold_convert_loc (loc, type, arg0);
12868 case TRUTH_OR_EXPR:
12869 /* If either arg is constant zero, drop it. */
12870 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12871 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12872 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12873 /* Preserve sequence points. */
12874 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12875 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12876 /* If second arg is constant true, result is true, but we must
12877 evaluate first arg. */
12878 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12879 return omit_one_operand_loc (loc, type, arg1, arg0);
12880 /* Likewise for first arg, but note this only occurs here for
12881 TRUTH_OR_EXPR. */
12882 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12883 return omit_one_operand_loc (loc, type, arg0, arg1);
12884
12885 /* !X || X is always true. */
12886 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12887 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12888 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12889 /* X || !X is always true. */
12890 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12891 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12892 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12893
12894 /* (X && !Y) || (!X && Y) is X ^ Y */
12895 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12896 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12897 {
12898 tree a0, a1, l0, l1, n0, n1;
12899
12900 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12901 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12902
12903 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12904 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12905
12906 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12907 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12908
12909 if ((operand_equal_p (n0, a0, 0)
12910 && operand_equal_p (n1, a1, 0))
12911 || (operand_equal_p (n0, a1, 0)
12912 && operand_equal_p (n1, a0, 0)))
12913 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12914 }
12915
12916 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12917 != NULL_TREE)
12918 return tem;
12919
12920 return NULL_TREE;
12921
12922 case TRUTH_XOR_EXPR:
12923 /* If the second arg is constant zero, drop it. */
12924 if (integer_zerop (arg1))
12925 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12926 /* If the second arg is constant true, this is a logical inversion. */
12927 if (integer_onep (arg1))
12928 {
12929 tem = invert_truthvalue_loc (loc, arg0);
12930 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12931 }
12932 /* Identical arguments cancel to zero. */
12933 if (operand_equal_p (arg0, arg1, 0))
12934 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12935
12936 /* !X ^ X is always true. */
12937 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12938 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12939 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12940
12941 /* X ^ !X is always true. */
12942 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12943 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12944 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12945
12946 return NULL_TREE;
12947
12948 case EQ_EXPR:
12949 case NE_EXPR:
12950 STRIP_NOPS (arg0);
12951 STRIP_NOPS (arg1);
12952
12953 tem = fold_comparison (loc, code, type, op0, op1);
12954 if (tem != NULL_TREE)
12955 return tem;
12956
12957 /* bool_var != 0 becomes bool_var. */
12958 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12959 && code == NE_EXPR)
12960 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12961
12962 /* bool_var == 1 becomes bool_var. */
12963 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12964 && code == EQ_EXPR)
12965 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12966
12967 /* bool_var != 1 becomes !bool_var. */
12968 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12969 && code == NE_EXPR)
12970 return fold_convert_loc (loc, type,
12971 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12972 TREE_TYPE (arg0), arg0));
12973
12974 /* bool_var == 0 becomes !bool_var. */
12975 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12976 && code == EQ_EXPR)
12977 return fold_convert_loc (loc, type,
12978 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12979 TREE_TYPE (arg0), arg0));
12980
12981 /* !exp != 0 becomes !exp */
12982 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12983 && code == NE_EXPR)
12984 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12985
12986 /* If this is an equality comparison of the address of two non-weak,
12987 unaliased symbols neither of which are extern (since we do not
12988 have access to attributes for externs), then we know the result. */
12989 if (TREE_CODE (arg0) == ADDR_EXPR
12990 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12991 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12992 && ! lookup_attribute ("alias",
12993 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12994 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12995 && TREE_CODE (arg1) == ADDR_EXPR
12996 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12997 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12998 && ! lookup_attribute ("alias",
12999 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
13000 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
13001 {
13002 /* We know that we're looking at the address of two
13003 non-weak, unaliased, static _DECL nodes.
13004
13005 It is both wasteful and incorrect to call operand_equal_p
13006 to compare the two ADDR_EXPR nodes. It is wasteful in that
13007 all we need to do is test pointer equality for the arguments
13008 to the two ADDR_EXPR nodes. It is incorrect to use
13009 operand_equal_p as that function is NOT equivalent to a
13010 C equality test. It can in fact return false for two
13011 objects which would test as equal using the C equality
13012 operator. */
13013 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
13014 return constant_boolean_node (equal
13015 ? code == EQ_EXPR : code != EQ_EXPR,
13016 type);
13017 }
13018
13019 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13020 a MINUS_EXPR of a constant, we can convert it into a comparison with
13021 a revised constant as long as no overflow occurs. */
13022 if (TREE_CODE (arg1) == INTEGER_CST
13023 && (TREE_CODE (arg0) == PLUS_EXPR
13024 || TREE_CODE (arg0) == MINUS_EXPR)
13025 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13026 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13027 ? MINUS_EXPR : PLUS_EXPR,
13028 fold_convert_loc (loc, TREE_TYPE (arg0),
13029 arg1),
13030 TREE_OPERAND (arg0, 1)))
13031 && !TREE_OVERFLOW (tem))
13032 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13033
13034 /* Similarly for a NEGATE_EXPR. */
13035 if (TREE_CODE (arg0) == NEGATE_EXPR
13036 && TREE_CODE (arg1) == INTEGER_CST
13037 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13038 arg1)))
13039 && TREE_CODE (tem) == INTEGER_CST
13040 && !TREE_OVERFLOW (tem))
13041 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13042
13043 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13044 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13045 && TREE_CODE (arg1) == INTEGER_CST
13046 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13047 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13048 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13049 fold_convert_loc (loc,
13050 TREE_TYPE (arg0),
13051 arg1),
13052 TREE_OPERAND (arg0, 1)));
13053
13054 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13055 if ((TREE_CODE (arg0) == PLUS_EXPR
13056 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13057 || TREE_CODE (arg0) == MINUS_EXPR)
13058 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13059 0)),
13060 arg1, 0)
13061 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13062 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13063 {
13064 tree val = TREE_OPERAND (arg0, 1);
13065 return omit_two_operands_loc (loc, type,
13066 fold_build2_loc (loc, code, type,
13067 val,
13068 build_int_cst (TREE_TYPE (val),
13069 0)),
13070 TREE_OPERAND (arg0, 0), arg1);
13071 }
13072
13073 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13074 if (TREE_CODE (arg0) == MINUS_EXPR
13075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13076 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13077 1)),
13078 arg1, 0)
13079 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13080 {
13081 return omit_two_operands_loc (loc, type,
13082 code == NE_EXPR
13083 ? boolean_true_node : boolean_false_node,
13084 TREE_OPERAND (arg0, 1), arg1);
13085 }
13086
13087 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13088 for !=. Don't do this for ordered comparisons due to overflow. */
13089 if (TREE_CODE (arg0) == MINUS_EXPR
13090 && integer_zerop (arg1))
13091 return fold_build2_loc (loc, code, type,
13092 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13093
13094 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13095 if (TREE_CODE (arg0) == ABS_EXPR
13096 && (integer_zerop (arg1) || real_zerop (arg1)))
13097 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13098
13099 /* If this is an EQ or NE comparison with zero and ARG0 is
13100 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13101 two operations, but the latter can be done in one less insn
13102 on machines that have only two-operand insns or on which a
13103 constant cannot be the first operand. */
13104 if (TREE_CODE (arg0) == BIT_AND_EXPR
13105 && integer_zerop (arg1))
13106 {
13107 tree arg00 = TREE_OPERAND (arg0, 0);
13108 tree arg01 = TREE_OPERAND (arg0, 1);
13109 if (TREE_CODE (arg00) == LSHIFT_EXPR
13110 && integer_onep (TREE_OPERAND (arg00, 0)))
13111 {
13112 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13113 arg01, TREE_OPERAND (arg00, 1));
13114 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13115 build_int_cst (TREE_TYPE (arg0), 1));
13116 return fold_build2_loc (loc, code, type,
13117 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13118 arg1);
13119 }
13120 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13121 && integer_onep (TREE_OPERAND (arg01, 0)))
13122 {
13123 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13124 arg00, TREE_OPERAND (arg01, 1));
13125 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13126 build_int_cst (TREE_TYPE (arg0), 1));
13127 return fold_build2_loc (loc, code, type,
13128 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13129 arg1);
13130 }
13131 }
13132
13133 /* If this is an NE or EQ comparison of zero against the result of a
13134 signed MOD operation whose second operand is a power of 2, make
13135 the MOD operation unsigned since it is simpler and equivalent. */
13136 if (integer_zerop (arg1)
13137 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13138 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13139 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13140 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13141 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13142 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13143 {
13144 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13145 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13146 fold_convert_loc (loc, newtype,
13147 TREE_OPERAND (arg0, 0)),
13148 fold_convert_loc (loc, newtype,
13149 TREE_OPERAND (arg0, 1)));
13150
13151 return fold_build2_loc (loc, code, type, newmod,
13152 fold_convert_loc (loc, newtype, arg1));
13153 }
13154
13155 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13156 C1 is a valid shift constant, and C2 is a power of two, i.e.
13157 a single bit. */
13158 if (TREE_CODE (arg0) == BIT_AND_EXPR
13159 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13160 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13161 == INTEGER_CST
13162 && integer_pow2p (TREE_OPERAND (arg0, 1))
13163 && integer_zerop (arg1))
13164 {
13165 tree itype = TREE_TYPE (arg0);
13166 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13167 prec = TYPE_PRECISION (itype);
13168
13169 /* Check for a valid shift count. */
13170 if (TREE_INT_CST_HIGH (arg001) == 0
13171 && TREE_INT_CST_LOW (arg001) < prec)
13172 {
13173 tree arg01 = TREE_OPERAND (arg0, 1);
13174 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13175 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13176 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13177 can be rewritten as (X & (C2 << C1)) != 0. */
13178 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13179 {
13180 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13181 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13182 return fold_build2_loc (loc, code, type, tem,
13183 fold_convert_loc (loc, itype, arg1));
13184 }
13185 /* Otherwise, for signed (arithmetic) shifts,
13186 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13187 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13188 else if (!TYPE_UNSIGNED (itype))
13189 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13190 arg000, build_int_cst (itype, 0));
13191 /* Otherwise, of unsigned (logical) shifts,
13192 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13193 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13194 else
13195 return omit_one_operand_loc (loc, type,
13196 code == EQ_EXPR ? integer_one_node
13197 : integer_zero_node,
13198 arg000);
13199 }
13200 }
13201
13202 /* If we have (A & C) == C where C is a power of 2, convert this into
13203 (A & C) != 0. Similarly for NE_EXPR. */
13204 if (TREE_CODE (arg0) == BIT_AND_EXPR
13205 && integer_pow2p (TREE_OPERAND (arg0, 1))
13206 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13207 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13208 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13209 integer_zero_node));
13210
13211 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13212 bit, then fold the expression into A < 0 or A >= 0. */
13213 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13214 if (tem)
13215 return tem;
13216
13217 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13218 Similarly for NE_EXPR. */
13219 if (TREE_CODE (arg0) == BIT_AND_EXPR
13220 && TREE_CODE (arg1) == INTEGER_CST
13221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13222 {
13223 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13224 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13225 TREE_OPERAND (arg0, 1));
13226 tree dandnotc
13227 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13228 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13229 notc);
13230 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13231 if (integer_nonzerop (dandnotc))
13232 return omit_one_operand_loc (loc, type, rslt, arg0);
13233 }
13234
13235 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13236 Similarly for NE_EXPR. */
13237 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13238 && TREE_CODE (arg1) == INTEGER_CST
13239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13240 {
13241 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13242 tree candnotd
13243 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13244 TREE_OPERAND (arg0, 1),
13245 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13246 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13247 if (integer_nonzerop (candnotd))
13248 return omit_one_operand_loc (loc, type, rslt, arg0);
13249 }
13250
13251 /* If this is a comparison of a field, we may be able to simplify it. */
13252 if ((TREE_CODE (arg0) == COMPONENT_REF
13253 || TREE_CODE (arg0) == BIT_FIELD_REF)
13254 /* Handle the constant case even without -O
13255 to make sure the warnings are given. */
13256 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13257 {
13258 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13259 if (t1)
13260 return t1;
13261 }
13262
13263 /* Optimize comparisons of strlen vs zero to a compare of the
13264 first character of the string vs zero. To wit,
13265 strlen(ptr) == 0 => *ptr == 0
13266 strlen(ptr) != 0 => *ptr != 0
13267 Other cases should reduce to one of these two (or a constant)
13268 due to the return value of strlen being unsigned. */
13269 if (TREE_CODE (arg0) == CALL_EXPR
13270 && integer_zerop (arg1))
13271 {
13272 tree fndecl = get_callee_fndecl (arg0);
13273
13274 if (fndecl
13275 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13276 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13277 && call_expr_nargs (arg0) == 1
13278 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13279 {
13280 tree iref = build_fold_indirect_ref_loc (loc,
13281 CALL_EXPR_ARG (arg0, 0));
13282 return fold_build2_loc (loc, code, type, iref,
13283 build_int_cst (TREE_TYPE (iref), 0));
13284 }
13285 }
13286
13287 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13288 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13289 if (TREE_CODE (arg0) == RSHIFT_EXPR
13290 && integer_zerop (arg1)
13291 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13292 {
13293 tree arg00 = TREE_OPERAND (arg0, 0);
13294 tree arg01 = TREE_OPERAND (arg0, 1);
13295 tree itype = TREE_TYPE (arg00);
13296 if (TREE_INT_CST_HIGH (arg01) == 0
13297 && TREE_INT_CST_LOW (arg01)
13298 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13299 {
13300 if (TYPE_UNSIGNED (itype))
13301 {
13302 itype = signed_type_for (itype);
13303 arg00 = fold_convert_loc (loc, itype, arg00);
13304 }
13305 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13306 type, arg00, build_zero_cst (itype));
13307 }
13308 }
13309
13310 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13311 if (integer_zerop (arg1)
13312 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13313 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13314 TREE_OPERAND (arg0, 1));
13315
13316 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13317 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13318 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13319 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13320 build_zero_cst (TREE_TYPE (arg0)));
13321 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13322 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13323 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13324 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13325 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13326 build_zero_cst (TREE_TYPE (arg0)));
13327
13328 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13329 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13330 && TREE_CODE (arg1) == INTEGER_CST
13331 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13332 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13333 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13334 TREE_OPERAND (arg0, 1), arg1));
13335
13336 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13337 (X & C) == 0 when C is a single bit. */
13338 if (TREE_CODE (arg0) == BIT_AND_EXPR
13339 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13340 && integer_zerop (arg1)
13341 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13342 {
13343 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13344 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13345 TREE_OPERAND (arg0, 1));
13346 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13347 type, tem,
13348 fold_convert_loc (loc, TREE_TYPE (arg0),
13349 arg1));
13350 }
13351
13352 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13353 constant C is a power of two, i.e. a single bit. */
13354 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13355 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13356 && integer_zerop (arg1)
13357 && integer_pow2p (TREE_OPERAND (arg0, 1))
13358 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13359 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13360 {
13361 tree arg00 = TREE_OPERAND (arg0, 0);
13362 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13363 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13364 }
13365
13366 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13367 when is C is a power of two, i.e. a single bit. */
13368 if (TREE_CODE (arg0) == BIT_AND_EXPR
13369 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13370 && integer_zerop (arg1)
13371 && integer_pow2p (TREE_OPERAND (arg0, 1))
13372 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13373 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13374 {
13375 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13376 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13377 arg000, TREE_OPERAND (arg0, 1));
13378 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13379 tem, build_int_cst (TREE_TYPE (tem), 0));
13380 }
13381
13382 if (integer_zerop (arg1)
13383 && tree_expr_nonzero_p (arg0))
13384 {
13385 tree res = constant_boolean_node (code==NE_EXPR, type);
13386 return omit_one_operand_loc (loc, type, res, arg0);
13387 }
13388
13389 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13390 if (TREE_CODE (arg0) == NEGATE_EXPR
13391 && TREE_CODE (arg1) == NEGATE_EXPR)
13392 return fold_build2_loc (loc, code, type,
13393 TREE_OPERAND (arg0, 0),
13394 fold_convert_loc (loc, TREE_TYPE (arg0),
13395 TREE_OPERAND (arg1, 0)));
13396
13397 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13398 if (TREE_CODE (arg0) == BIT_AND_EXPR
13399 && TREE_CODE (arg1) == BIT_AND_EXPR)
13400 {
13401 tree arg00 = TREE_OPERAND (arg0, 0);
13402 tree arg01 = TREE_OPERAND (arg0, 1);
13403 tree arg10 = TREE_OPERAND (arg1, 0);
13404 tree arg11 = TREE_OPERAND (arg1, 1);
13405 tree itype = TREE_TYPE (arg0);
13406
13407 if (operand_equal_p (arg01, arg11, 0))
13408 return fold_build2_loc (loc, code, type,
13409 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13410 fold_build2_loc (loc,
13411 BIT_XOR_EXPR, itype,
13412 arg00, arg10),
13413 arg01),
13414 build_zero_cst (itype));
13415
13416 if (operand_equal_p (arg01, arg10, 0))
13417 return fold_build2_loc (loc, code, type,
13418 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13419 fold_build2_loc (loc,
13420 BIT_XOR_EXPR, itype,
13421 arg00, arg11),
13422 arg01),
13423 build_zero_cst (itype));
13424
13425 if (operand_equal_p (arg00, arg11, 0))
13426 return fold_build2_loc (loc, code, type,
13427 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13428 fold_build2_loc (loc,
13429 BIT_XOR_EXPR, itype,
13430 arg01, arg10),
13431 arg00),
13432 build_zero_cst (itype));
13433
13434 if (operand_equal_p (arg00, arg10, 0))
13435 return fold_build2_loc (loc, code, type,
13436 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13437 fold_build2_loc (loc,
13438 BIT_XOR_EXPR, itype,
13439 arg01, arg11),
13440 arg00),
13441 build_zero_cst (itype));
13442 }
13443
13444 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13445 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13446 {
13447 tree arg00 = TREE_OPERAND (arg0, 0);
13448 tree arg01 = TREE_OPERAND (arg0, 1);
13449 tree arg10 = TREE_OPERAND (arg1, 0);
13450 tree arg11 = TREE_OPERAND (arg1, 1);
13451 tree itype = TREE_TYPE (arg0);
13452
13453 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13454 operand_equal_p guarantees no side-effects so we don't need
13455 to use omit_one_operand on Z. */
13456 if (operand_equal_p (arg01, arg11, 0))
13457 return fold_build2_loc (loc, code, type, arg00,
13458 fold_convert_loc (loc, TREE_TYPE (arg00),
13459 arg10));
13460 if (operand_equal_p (arg01, arg10, 0))
13461 return fold_build2_loc (loc, code, type, arg00,
13462 fold_convert_loc (loc, TREE_TYPE (arg00),
13463 arg11));
13464 if (operand_equal_p (arg00, arg11, 0))
13465 return fold_build2_loc (loc, code, type, arg01,
13466 fold_convert_loc (loc, TREE_TYPE (arg01),
13467 arg10));
13468 if (operand_equal_p (arg00, arg10, 0))
13469 return fold_build2_loc (loc, code, type, arg01,
13470 fold_convert_loc (loc, TREE_TYPE (arg01),
13471 arg11));
13472
13473 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13474 if (TREE_CODE (arg01) == INTEGER_CST
13475 && TREE_CODE (arg11) == INTEGER_CST)
13476 {
13477 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13478 fold_convert_loc (loc, itype, arg11));
13479 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13480 return fold_build2_loc (loc, code, type, tem,
13481 fold_convert_loc (loc, itype, arg10));
13482 }
13483 }
13484
13485 /* Attempt to simplify equality/inequality comparisons of complex
13486 values. Only lower the comparison if the result is known or
13487 can be simplified to a single scalar comparison. */
13488 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13489 || TREE_CODE (arg0) == COMPLEX_CST)
13490 && (TREE_CODE (arg1) == COMPLEX_EXPR
13491 || TREE_CODE (arg1) == COMPLEX_CST))
13492 {
13493 tree real0, imag0, real1, imag1;
13494 tree rcond, icond;
13495
13496 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13497 {
13498 real0 = TREE_OPERAND (arg0, 0);
13499 imag0 = TREE_OPERAND (arg0, 1);
13500 }
13501 else
13502 {
13503 real0 = TREE_REALPART (arg0);
13504 imag0 = TREE_IMAGPART (arg0);
13505 }
13506
13507 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13508 {
13509 real1 = TREE_OPERAND (arg1, 0);
13510 imag1 = TREE_OPERAND (arg1, 1);
13511 }
13512 else
13513 {
13514 real1 = TREE_REALPART (arg1);
13515 imag1 = TREE_IMAGPART (arg1);
13516 }
13517
13518 rcond = fold_binary_loc (loc, code, type, real0, real1);
13519 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13520 {
13521 if (integer_zerop (rcond))
13522 {
13523 if (code == EQ_EXPR)
13524 return omit_two_operands_loc (loc, type, boolean_false_node,
13525 imag0, imag1);
13526 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13527 }
13528 else
13529 {
13530 if (code == NE_EXPR)
13531 return omit_two_operands_loc (loc, type, boolean_true_node,
13532 imag0, imag1);
13533 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13534 }
13535 }
13536
13537 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13538 if (icond && TREE_CODE (icond) == INTEGER_CST)
13539 {
13540 if (integer_zerop (icond))
13541 {
13542 if (code == EQ_EXPR)
13543 return omit_two_operands_loc (loc, type, boolean_false_node,
13544 real0, real1);
13545 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13546 }
13547 else
13548 {
13549 if (code == NE_EXPR)
13550 return omit_two_operands_loc (loc, type, boolean_true_node,
13551 real0, real1);
13552 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13553 }
13554 }
13555 }
13556
13557 return NULL_TREE;
13558
13559 case LT_EXPR:
13560 case GT_EXPR:
13561 case LE_EXPR:
13562 case GE_EXPR:
13563 tem = fold_comparison (loc, code, type, op0, op1);
13564 if (tem != NULL_TREE)
13565 return tem;
13566
13567 /* Transform comparisons of the form X +- C CMP X. */
13568 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13569 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13570 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13571 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13572 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13573 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13574 {
13575 tree arg01 = TREE_OPERAND (arg0, 1);
13576 enum tree_code code0 = TREE_CODE (arg0);
13577 int is_positive;
13578
13579 if (TREE_CODE (arg01) == REAL_CST)
13580 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13581 else
13582 is_positive = tree_int_cst_sgn (arg01);
13583
13584 /* (X - c) > X becomes false. */
13585 if (code == GT_EXPR
13586 && ((code0 == MINUS_EXPR && is_positive >= 0)
13587 || (code0 == PLUS_EXPR && is_positive <= 0)))
13588 {
13589 if (TREE_CODE (arg01) == INTEGER_CST
13590 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13591 fold_overflow_warning (("assuming signed overflow does not "
13592 "occur when assuming that (X - c) > X "
13593 "is always false"),
13594 WARN_STRICT_OVERFLOW_ALL);
13595 return constant_boolean_node (0, type);
13596 }
13597
13598 /* Likewise (X + c) < X becomes false. */
13599 if (code == LT_EXPR
13600 && ((code0 == PLUS_EXPR && is_positive >= 0)
13601 || (code0 == MINUS_EXPR && is_positive <= 0)))
13602 {
13603 if (TREE_CODE (arg01) == INTEGER_CST
13604 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13605 fold_overflow_warning (("assuming signed overflow does not "
13606 "occur when assuming that "
13607 "(X + c) < X is always false"),
13608 WARN_STRICT_OVERFLOW_ALL);
13609 return constant_boolean_node (0, type);
13610 }
13611
13612 /* Convert (X - c) <= X to true. */
13613 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13614 && code == LE_EXPR
13615 && ((code0 == MINUS_EXPR && is_positive >= 0)
13616 || (code0 == PLUS_EXPR && is_positive <= 0)))
13617 {
13618 if (TREE_CODE (arg01) == INTEGER_CST
13619 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13620 fold_overflow_warning (("assuming signed overflow does not "
13621 "occur when assuming that "
13622 "(X - c) <= X is always true"),
13623 WARN_STRICT_OVERFLOW_ALL);
13624 return constant_boolean_node (1, type);
13625 }
13626
13627 /* Convert (X + c) >= X to true. */
13628 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13629 && code == GE_EXPR
13630 && ((code0 == PLUS_EXPR && is_positive >= 0)
13631 || (code0 == MINUS_EXPR && is_positive <= 0)))
13632 {
13633 if (TREE_CODE (arg01) == INTEGER_CST
13634 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13635 fold_overflow_warning (("assuming signed overflow does not "
13636 "occur when assuming that "
13637 "(X + c) >= X is always true"),
13638 WARN_STRICT_OVERFLOW_ALL);
13639 return constant_boolean_node (1, type);
13640 }
13641
13642 if (TREE_CODE (arg01) == INTEGER_CST)
13643 {
13644 /* Convert X + c > X and X - c < X to true for integers. */
13645 if (code == GT_EXPR
13646 && ((code0 == PLUS_EXPR && is_positive > 0)
13647 || (code0 == MINUS_EXPR && is_positive < 0)))
13648 {
13649 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13650 fold_overflow_warning (("assuming signed overflow does "
13651 "not occur when assuming that "
13652 "(X + c) > X is always true"),
13653 WARN_STRICT_OVERFLOW_ALL);
13654 return constant_boolean_node (1, type);
13655 }
13656
13657 if (code == LT_EXPR
13658 && ((code0 == MINUS_EXPR && is_positive > 0)
13659 || (code0 == PLUS_EXPR && is_positive < 0)))
13660 {
13661 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13662 fold_overflow_warning (("assuming signed overflow does "
13663 "not occur when assuming that "
13664 "(X - c) < X is always true"),
13665 WARN_STRICT_OVERFLOW_ALL);
13666 return constant_boolean_node (1, type);
13667 }
13668
13669 /* Convert X + c <= X and X - c >= X to false for integers. */
13670 if (code == LE_EXPR
13671 && ((code0 == PLUS_EXPR && is_positive > 0)
13672 || (code0 == MINUS_EXPR && is_positive < 0)))
13673 {
13674 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13675 fold_overflow_warning (("assuming signed overflow does "
13676 "not occur when assuming that "
13677 "(X + c) <= X is always false"),
13678 WARN_STRICT_OVERFLOW_ALL);
13679 return constant_boolean_node (0, type);
13680 }
13681
13682 if (code == GE_EXPR
13683 && ((code0 == MINUS_EXPR && is_positive > 0)
13684 || (code0 == PLUS_EXPR && is_positive < 0)))
13685 {
13686 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13687 fold_overflow_warning (("assuming signed overflow does "
13688 "not occur when assuming that "
13689 "(X - c) >= X is always false"),
13690 WARN_STRICT_OVERFLOW_ALL);
13691 return constant_boolean_node (0, type);
13692 }
13693 }
13694 }
13695
13696 /* Comparisons with the highest or lowest possible integer of
13697 the specified precision will have known values. */
13698 {
13699 tree arg1_type = TREE_TYPE (arg1);
13700 unsigned int width = TYPE_PRECISION (arg1_type);
13701
13702 if (TREE_CODE (arg1) == INTEGER_CST
13703 && width <= HOST_BITS_PER_DOUBLE_INT
13704 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13705 {
13706 HOST_WIDE_INT signed_max_hi;
13707 unsigned HOST_WIDE_INT signed_max_lo;
13708 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13709
13710 if (width <= HOST_BITS_PER_WIDE_INT)
13711 {
13712 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13713 - 1;
13714 signed_max_hi = 0;
13715 max_hi = 0;
13716
13717 if (TYPE_UNSIGNED (arg1_type))
13718 {
13719 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13720 min_lo = 0;
13721 min_hi = 0;
13722 }
13723 else
13724 {
13725 max_lo = signed_max_lo;
13726 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13727 min_hi = -1;
13728 }
13729 }
13730 else
13731 {
13732 width -= HOST_BITS_PER_WIDE_INT;
13733 signed_max_lo = -1;
13734 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13735 - 1;
13736 max_lo = -1;
13737 min_lo = 0;
13738
13739 if (TYPE_UNSIGNED (arg1_type))
13740 {
13741 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13742 min_hi = 0;
13743 }
13744 else
13745 {
13746 max_hi = signed_max_hi;
13747 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13748 }
13749 }
13750
13751 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13752 && TREE_INT_CST_LOW (arg1) == max_lo)
13753 switch (code)
13754 {
13755 case GT_EXPR:
13756 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13757
13758 case GE_EXPR:
13759 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13760
13761 case LE_EXPR:
13762 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13763
13764 case LT_EXPR:
13765 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13766
13767 /* The GE_EXPR and LT_EXPR cases above are not normally
13768 reached because of previous transformations. */
13769
13770 default:
13771 break;
13772 }
13773 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13774 == max_hi
13775 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13776 switch (code)
13777 {
13778 case GT_EXPR:
13779 arg1 = const_binop (PLUS_EXPR, arg1,
13780 build_int_cst (TREE_TYPE (arg1), 1));
13781 return fold_build2_loc (loc, EQ_EXPR, type,
13782 fold_convert_loc (loc,
13783 TREE_TYPE (arg1), arg0),
13784 arg1);
13785 case LE_EXPR:
13786 arg1 = const_binop (PLUS_EXPR, arg1,
13787 build_int_cst (TREE_TYPE (arg1), 1));
13788 return fold_build2_loc (loc, NE_EXPR, type,
13789 fold_convert_loc (loc, TREE_TYPE (arg1),
13790 arg0),
13791 arg1);
13792 default:
13793 break;
13794 }
13795 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13796 == min_hi
13797 && TREE_INT_CST_LOW (arg1) == min_lo)
13798 switch (code)
13799 {
13800 case LT_EXPR:
13801 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13802
13803 case LE_EXPR:
13804 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13805
13806 case GE_EXPR:
13807 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13808
13809 case GT_EXPR:
13810 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13811
13812 default:
13813 break;
13814 }
13815 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13816 == min_hi
13817 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13818 switch (code)
13819 {
13820 case GE_EXPR:
13821 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13822 return fold_build2_loc (loc, NE_EXPR, type,
13823 fold_convert_loc (loc,
13824 TREE_TYPE (arg1), arg0),
13825 arg1);
13826 case LT_EXPR:
13827 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13828 return fold_build2_loc (loc, EQ_EXPR, type,
13829 fold_convert_loc (loc, TREE_TYPE (arg1),
13830 arg0),
13831 arg1);
13832 default:
13833 break;
13834 }
13835
13836 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13837 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13838 && TYPE_UNSIGNED (arg1_type)
13839 /* We will flip the signedness of the comparison operator
13840 associated with the mode of arg1, so the sign bit is
13841 specified by this mode. Check that arg1 is the signed
13842 max associated with this sign bit. */
13843 && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13844 /* signed_type does not work on pointer types. */
13845 && INTEGRAL_TYPE_P (arg1_type))
13846 {
13847 /* The following case also applies to X < signed_max+1
13848 and X >= signed_max+1 because previous transformations. */
13849 if (code == LE_EXPR || code == GT_EXPR)
13850 {
13851 tree st = signed_type_for (arg1_type);
13852 return fold_build2_loc (loc,
13853 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13854 type, fold_convert_loc (loc, st, arg0),
13855 build_int_cst (st, 0));
13856 }
13857 }
13858 }
13859 }
13860
13861 /* If we are comparing an ABS_EXPR with a constant, we can
13862 convert all the cases into explicit comparisons, but they may
13863 well not be faster than doing the ABS and one comparison.
13864 But ABS (X) <= C is a range comparison, which becomes a subtraction
13865 and a comparison, and is probably faster. */
13866 if (code == LE_EXPR
13867 && TREE_CODE (arg1) == INTEGER_CST
13868 && TREE_CODE (arg0) == ABS_EXPR
13869 && ! TREE_SIDE_EFFECTS (arg0)
13870 && (0 != (tem = negate_expr (arg1)))
13871 && TREE_CODE (tem) == INTEGER_CST
13872 && !TREE_OVERFLOW (tem))
13873 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13874 build2 (GE_EXPR, type,
13875 TREE_OPERAND (arg0, 0), tem),
13876 build2 (LE_EXPR, type,
13877 TREE_OPERAND (arg0, 0), arg1));
13878
13879 /* Convert ABS_EXPR<x> >= 0 to true. */
13880 strict_overflow_p = false;
13881 if (code == GE_EXPR
13882 && (integer_zerop (arg1)
13883 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13884 && real_zerop (arg1)))
13885 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13886 {
13887 if (strict_overflow_p)
13888 fold_overflow_warning (("assuming signed overflow does not occur "
13889 "when simplifying comparison of "
13890 "absolute value and zero"),
13891 WARN_STRICT_OVERFLOW_CONDITIONAL);
13892 return omit_one_operand_loc (loc, type,
13893 constant_boolean_node (true, type),
13894 arg0);
13895 }
13896
13897 /* Convert ABS_EXPR<x> < 0 to false. */
13898 strict_overflow_p = false;
13899 if (code == LT_EXPR
13900 && (integer_zerop (arg1) || real_zerop (arg1))
13901 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13902 {
13903 if (strict_overflow_p)
13904 fold_overflow_warning (("assuming signed overflow does not occur "
13905 "when simplifying comparison of "
13906 "absolute value and zero"),
13907 WARN_STRICT_OVERFLOW_CONDITIONAL);
13908 return omit_one_operand_loc (loc, type,
13909 constant_boolean_node (false, type),
13910 arg0);
13911 }
13912
13913 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13914 and similarly for >= into !=. */
13915 if ((code == LT_EXPR || code == GE_EXPR)
13916 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13917 && TREE_CODE (arg1) == LSHIFT_EXPR
13918 && integer_onep (TREE_OPERAND (arg1, 0)))
13919 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13920 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13921 TREE_OPERAND (arg1, 1)),
13922 build_zero_cst (TREE_TYPE (arg0)));
13923
13924 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13925 otherwise Y might be >= # of bits in X's type and thus e.g.
13926 (unsigned char) (1 << Y) for Y 15 might be 0.
13927 If the cast is widening, then 1 << Y should have unsigned type,
13928 otherwise if Y is number of bits in the signed shift type minus 1,
13929 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13930 31 might be 0xffffffff80000000. */
13931 if ((code == LT_EXPR || code == GE_EXPR)
13932 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13933 && CONVERT_EXPR_P (arg1)
13934 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13935 && (TYPE_PRECISION (TREE_TYPE (arg1))
13936 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13937 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13938 || (TYPE_PRECISION (TREE_TYPE (arg1))
13939 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13940 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13941 {
13942 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13943 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13944 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13945 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13946 build_zero_cst (TREE_TYPE (arg0)));
13947 }
13948
13949 return NULL_TREE;
13950
13951 case UNORDERED_EXPR:
13952 case ORDERED_EXPR:
13953 case UNLT_EXPR:
13954 case UNLE_EXPR:
13955 case UNGT_EXPR:
13956 case UNGE_EXPR:
13957 case UNEQ_EXPR:
13958 case LTGT_EXPR:
13959 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13960 {
13961 t1 = fold_relational_const (code, type, arg0, arg1);
13962 if (t1 != NULL_TREE)
13963 return t1;
13964 }
13965
13966 /* If the first operand is NaN, the result is constant. */
13967 if (TREE_CODE (arg0) == REAL_CST
13968 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13969 && (code != LTGT_EXPR || ! flag_trapping_math))
13970 {
13971 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13972 ? integer_zero_node
13973 : integer_one_node;
13974 return omit_one_operand_loc (loc, type, t1, arg1);
13975 }
13976
13977 /* If the second operand is NaN, the result is constant. */
13978 if (TREE_CODE (arg1) == REAL_CST
13979 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13980 && (code != LTGT_EXPR || ! flag_trapping_math))
13981 {
13982 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13983 ? integer_zero_node
13984 : integer_one_node;
13985 return omit_one_operand_loc (loc, type, t1, arg0);
13986 }
13987
13988 /* Simplify unordered comparison of something with itself. */
13989 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13990 && operand_equal_p (arg0, arg1, 0))
13991 return constant_boolean_node (1, type);
13992
13993 if (code == LTGT_EXPR
13994 && !flag_trapping_math
13995 && operand_equal_p (arg0, arg1, 0))
13996 return constant_boolean_node (0, type);
13997
13998 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13999 {
14000 tree targ0 = strip_float_extensions (arg0);
14001 tree targ1 = strip_float_extensions (arg1);
14002 tree newtype = TREE_TYPE (targ0);
14003
14004 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
14005 newtype = TREE_TYPE (targ1);
14006
14007 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
14008 return fold_build2_loc (loc, code, type,
14009 fold_convert_loc (loc, newtype, targ0),
14010 fold_convert_loc (loc, newtype, targ1));
14011 }
14012
14013 return NULL_TREE;
14014
14015 case COMPOUND_EXPR:
14016 /* When pedantic, a compound expression can be neither an lvalue
14017 nor an integer constant expression. */
14018 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14019 return NULL_TREE;
14020 /* Don't let (0, 0) be null pointer constant. */
14021 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14022 : fold_convert_loc (loc, type, arg1);
14023 return pedantic_non_lvalue_loc (loc, tem);
14024
14025 case COMPLEX_EXPR:
14026 if ((TREE_CODE (arg0) == REAL_CST
14027 && TREE_CODE (arg1) == REAL_CST)
14028 || (TREE_CODE (arg0) == INTEGER_CST
14029 && TREE_CODE (arg1) == INTEGER_CST))
14030 return build_complex (type, arg0, arg1);
14031 if (TREE_CODE (arg0) == REALPART_EXPR
14032 && TREE_CODE (arg1) == IMAGPART_EXPR
14033 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14034 && operand_equal_p (TREE_OPERAND (arg0, 0),
14035 TREE_OPERAND (arg1, 0), 0))
14036 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14037 TREE_OPERAND (arg1, 0));
14038 return NULL_TREE;
14039
14040 case ASSERT_EXPR:
14041 /* An ASSERT_EXPR should never be passed to fold_binary. */
14042 gcc_unreachable ();
14043
14044 case VEC_PACK_TRUNC_EXPR:
14045 case VEC_PACK_FIX_TRUNC_EXPR:
14046 {
14047 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14048 tree *elts;
14049
14050 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14051 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14052 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14053 return NULL_TREE;
14054
14055 elts = XALLOCAVEC (tree, nelts);
14056 if (!vec_cst_ctor_to_array (arg0, elts)
14057 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14058 return NULL_TREE;
14059
14060 for (i = 0; i < nelts; i++)
14061 {
14062 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14063 ? NOP_EXPR : FIX_TRUNC_EXPR,
14064 TREE_TYPE (type), elts[i]);
14065 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14066 return NULL_TREE;
14067 }
14068
14069 return build_vector (type, elts);
14070 }
14071
14072 case VEC_WIDEN_MULT_LO_EXPR:
14073 case VEC_WIDEN_MULT_HI_EXPR:
14074 case VEC_WIDEN_MULT_EVEN_EXPR:
14075 case VEC_WIDEN_MULT_ODD_EXPR:
14076 {
14077 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14078 unsigned int out, ofs, scale;
14079 tree *elts;
14080
14081 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14082 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14083 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14084 return NULL_TREE;
14085
14086 elts = XALLOCAVEC (tree, nelts * 4);
14087 if (!vec_cst_ctor_to_array (arg0, elts)
14088 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14089 return NULL_TREE;
14090
14091 if (code == VEC_WIDEN_MULT_LO_EXPR)
14092 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14093 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14094 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14095 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14096 scale = 1, ofs = 0;
14097 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14098 scale = 1, ofs = 1;
14099
14100 for (out = 0; out < nelts; out++)
14101 {
14102 unsigned int in1 = (out << scale) + ofs;
14103 unsigned int in2 = in1 + nelts * 2;
14104 tree t1, t2;
14105
14106 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14107 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14108
14109 if (t1 == NULL_TREE || t2 == NULL_TREE)
14110 return NULL_TREE;
14111 elts[out] = const_binop (MULT_EXPR, t1, t2);
14112 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14113 return NULL_TREE;
14114 }
14115
14116 return build_vector (type, elts);
14117 }
14118
14119 default:
14120 return NULL_TREE;
14121 } /* switch (code) */
14122 }
14123
14124 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14125 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14126 of GOTO_EXPR. */
14127
14128 static tree
14129 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14130 {
14131 switch (TREE_CODE (*tp))
14132 {
14133 case LABEL_EXPR:
14134 return *tp;
14135
14136 case GOTO_EXPR:
14137 *walk_subtrees = 0;
14138
14139 /* ... fall through ... */
14140
14141 default:
14142 return NULL_TREE;
14143 }
14144 }
14145
14146 /* Return whether the sub-tree ST contains a label which is accessible from
14147 outside the sub-tree. */
14148
14149 static bool
14150 contains_label_p (tree st)
14151 {
14152 return
14153 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14154 }
14155
14156 /* Fold a ternary expression of code CODE and type TYPE with operands
14157 OP0, OP1, and OP2. Return the folded expression if folding is
14158 successful. Otherwise, return NULL_TREE. */
14159
14160 tree
14161 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14162 tree op0, tree op1, tree op2)
14163 {
14164 tree tem;
14165 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14166 enum tree_code_class kind = TREE_CODE_CLASS (code);
14167
14168 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14169 && TREE_CODE_LENGTH (code) == 3);
14170
14171 /* Strip any conversions that don't change the mode. This is safe
14172 for every expression, except for a comparison expression because
14173 its signedness is derived from its operands. So, in the latter
14174 case, only strip conversions that don't change the signedness.
14175
14176 Note that this is done as an internal manipulation within the
14177 constant folder, in order to find the simplest representation of
14178 the arguments so that their form can be studied. In any cases,
14179 the appropriate type conversions should be put back in the tree
14180 that will get out of the constant folder. */
14181 if (op0)
14182 {
14183 arg0 = op0;
14184 STRIP_NOPS (arg0);
14185 }
14186
14187 if (op1)
14188 {
14189 arg1 = op1;
14190 STRIP_NOPS (arg1);
14191 }
14192
14193 if (op2)
14194 {
14195 arg2 = op2;
14196 STRIP_NOPS (arg2);
14197 }
14198
14199 switch (code)
14200 {
14201 case COMPONENT_REF:
14202 if (TREE_CODE (arg0) == CONSTRUCTOR
14203 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14204 {
14205 unsigned HOST_WIDE_INT idx;
14206 tree field, value;
14207 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14208 if (field == arg1)
14209 return value;
14210 }
14211 return NULL_TREE;
14212
14213 case COND_EXPR:
14214 case VEC_COND_EXPR:
14215 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14216 so all simple results must be passed through pedantic_non_lvalue. */
14217 if (TREE_CODE (arg0) == INTEGER_CST)
14218 {
14219 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14220 tem = integer_zerop (arg0) ? op2 : op1;
14221 /* Only optimize constant conditions when the selected branch
14222 has the same type as the COND_EXPR. This avoids optimizing
14223 away "c ? x : throw", where the throw has a void type.
14224 Avoid throwing away that operand which contains label. */
14225 if ((!TREE_SIDE_EFFECTS (unused_op)
14226 || !contains_label_p (unused_op))
14227 && (! VOID_TYPE_P (TREE_TYPE (tem))
14228 || VOID_TYPE_P (type)))
14229 return pedantic_non_lvalue_loc (loc, tem);
14230 return NULL_TREE;
14231 }
14232 else if (TREE_CODE (arg0) == VECTOR_CST)
14233 {
14234 if (integer_all_onesp (arg0))
14235 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14236 if (integer_zerop (arg0))
14237 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14238
14239 if ((TREE_CODE (arg1) == VECTOR_CST
14240 || TREE_CODE (arg1) == CONSTRUCTOR)
14241 && (TREE_CODE (arg2) == VECTOR_CST
14242 || TREE_CODE (arg2) == CONSTRUCTOR))
14243 {
14244 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14245 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14246 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14247 for (i = 0; i < nelts; i++)
14248 {
14249 tree val = VECTOR_CST_ELT (arg0, i);
14250 if (integer_all_onesp (val))
14251 sel[i] = i;
14252 else if (integer_zerop (val))
14253 sel[i] = nelts + i;
14254 else /* Currently unreachable. */
14255 return NULL_TREE;
14256 }
14257 tree t = fold_vec_perm (type, arg1, arg2, sel);
14258 if (t != NULL_TREE)
14259 return t;
14260 }
14261 }
14262
14263 if (operand_equal_p (arg1, op2, 0))
14264 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14265
14266 /* If we have A op B ? A : C, we may be able to convert this to a
14267 simpler expression, depending on the operation and the values
14268 of B and C. Signed zeros prevent all of these transformations,
14269 for reasons given above each one.
14270
14271 Also try swapping the arguments and inverting the conditional. */
14272 if (COMPARISON_CLASS_P (arg0)
14273 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14274 arg1, TREE_OPERAND (arg0, 1))
14275 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14276 {
14277 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14278 if (tem)
14279 return tem;
14280 }
14281
14282 if (COMPARISON_CLASS_P (arg0)
14283 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14284 op2,
14285 TREE_OPERAND (arg0, 1))
14286 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14287 {
14288 location_t loc0 = expr_location_or (arg0, loc);
14289 tem = fold_invert_truthvalue (loc0, arg0);
14290 if (tem && COMPARISON_CLASS_P (tem))
14291 {
14292 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14293 if (tem)
14294 return tem;
14295 }
14296 }
14297
14298 /* If the second operand is simpler than the third, swap them
14299 since that produces better jump optimization results. */
14300 if (truth_value_p (TREE_CODE (arg0))
14301 && tree_swap_operands_p (op1, op2, false))
14302 {
14303 location_t loc0 = expr_location_or (arg0, loc);
14304 /* See if this can be inverted. If it can't, possibly because
14305 it was a floating-point inequality comparison, don't do
14306 anything. */
14307 tem = fold_invert_truthvalue (loc0, arg0);
14308 if (tem)
14309 return fold_build3_loc (loc, code, type, tem, op2, op1);
14310 }
14311
14312 /* Convert A ? 1 : 0 to simply A. */
14313 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14314 : (integer_onep (op1)
14315 && !VECTOR_TYPE_P (type)))
14316 && integer_zerop (op2)
14317 /* If we try to convert OP0 to our type, the
14318 call to fold will try to move the conversion inside
14319 a COND, which will recurse. In that case, the COND_EXPR
14320 is probably the best choice, so leave it alone. */
14321 && type == TREE_TYPE (arg0))
14322 return pedantic_non_lvalue_loc (loc, arg0);
14323
14324 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14325 over COND_EXPR in cases such as floating point comparisons. */
14326 if (integer_zerop (op1)
14327 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14328 : (integer_onep (op2)
14329 && !VECTOR_TYPE_P (type)))
14330 && truth_value_p (TREE_CODE (arg0)))
14331 return pedantic_non_lvalue_loc (loc,
14332 fold_convert_loc (loc, type,
14333 invert_truthvalue_loc (loc,
14334 arg0)));
14335
14336 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14337 if (TREE_CODE (arg0) == LT_EXPR
14338 && integer_zerop (TREE_OPERAND (arg0, 1))
14339 && integer_zerop (op2)
14340 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14341 {
14342 /* sign_bit_p looks through both zero and sign extensions,
14343 but for this optimization only sign extensions are
14344 usable. */
14345 tree tem2 = TREE_OPERAND (arg0, 0);
14346 while (tem != tem2)
14347 {
14348 if (TREE_CODE (tem2) != NOP_EXPR
14349 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14350 {
14351 tem = NULL_TREE;
14352 break;
14353 }
14354 tem2 = TREE_OPERAND (tem2, 0);
14355 }
14356 /* sign_bit_p only checks ARG1 bits within A's precision.
14357 If <sign bit of A> has wider type than A, bits outside
14358 of A's precision in <sign bit of A> need to be checked.
14359 If they are all 0, this optimization needs to be done
14360 in unsigned A's type, if they are all 1 in signed A's type,
14361 otherwise this can't be done. */
14362 if (tem
14363 && TYPE_PRECISION (TREE_TYPE (tem))
14364 < TYPE_PRECISION (TREE_TYPE (arg1))
14365 && TYPE_PRECISION (TREE_TYPE (tem))
14366 < TYPE_PRECISION (type))
14367 {
14368 unsigned HOST_WIDE_INT mask_lo;
14369 HOST_WIDE_INT mask_hi;
14370 int inner_width, outer_width;
14371 tree tem_type;
14372
14373 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14374 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14375 if (outer_width > TYPE_PRECISION (type))
14376 outer_width = TYPE_PRECISION (type);
14377
14378 if (outer_width > HOST_BITS_PER_WIDE_INT)
14379 {
14380 mask_hi = (HOST_WIDE_INT_M1U
14381 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14382 mask_lo = -1;
14383 }
14384 else
14385 {
14386 mask_hi = 0;
14387 mask_lo = (HOST_WIDE_INT_M1U
14388 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14389 }
14390 if (inner_width > HOST_BITS_PER_WIDE_INT)
14391 {
14392 mask_hi &= ~(HOST_WIDE_INT_M1U
14393 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14394 mask_lo = 0;
14395 }
14396 else
14397 mask_lo &= ~(HOST_WIDE_INT_M1U
14398 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14399
14400 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14401 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14402 {
14403 tem_type = signed_type_for (TREE_TYPE (tem));
14404 tem = fold_convert_loc (loc, tem_type, tem);
14405 }
14406 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14407 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14408 {
14409 tem_type = unsigned_type_for (TREE_TYPE (tem));
14410 tem = fold_convert_loc (loc, tem_type, tem);
14411 }
14412 else
14413 tem = NULL;
14414 }
14415
14416 if (tem)
14417 return
14418 fold_convert_loc (loc, type,
14419 fold_build2_loc (loc, BIT_AND_EXPR,
14420 TREE_TYPE (tem), tem,
14421 fold_convert_loc (loc,
14422 TREE_TYPE (tem),
14423 arg1)));
14424 }
14425
14426 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14427 already handled above. */
14428 if (TREE_CODE (arg0) == BIT_AND_EXPR
14429 && integer_onep (TREE_OPERAND (arg0, 1))
14430 && integer_zerop (op2)
14431 && integer_pow2p (arg1))
14432 {
14433 tree tem = TREE_OPERAND (arg0, 0);
14434 STRIP_NOPS (tem);
14435 if (TREE_CODE (tem) == RSHIFT_EXPR
14436 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14437 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14438 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14439 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14440 TREE_OPERAND (tem, 0), arg1);
14441 }
14442
14443 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14444 is probably obsolete because the first operand should be a
14445 truth value (that's why we have the two cases above), but let's
14446 leave it in until we can confirm this for all front-ends. */
14447 if (integer_zerop (op2)
14448 && TREE_CODE (arg0) == NE_EXPR
14449 && integer_zerop (TREE_OPERAND (arg0, 1))
14450 && integer_pow2p (arg1)
14451 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14452 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14453 arg1, OEP_ONLY_CONST))
14454 return pedantic_non_lvalue_loc (loc,
14455 fold_convert_loc (loc, type,
14456 TREE_OPERAND (arg0, 0)));
14457
14458 /* Disable the transformations below for vectors, since
14459 fold_binary_op_with_conditional_arg may undo them immediately,
14460 yielding an infinite loop. */
14461 if (code == VEC_COND_EXPR)
14462 return NULL_TREE;
14463
14464 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14465 if (integer_zerop (op2)
14466 && truth_value_p (TREE_CODE (arg0))
14467 && truth_value_p (TREE_CODE (arg1))
14468 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14469 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14470 : TRUTH_ANDIF_EXPR,
14471 type, fold_convert_loc (loc, type, arg0), arg1);
14472
14473 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14474 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14475 && truth_value_p (TREE_CODE (arg0))
14476 && truth_value_p (TREE_CODE (arg1))
14477 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14478 {
14479 location_t loc0 = expr_location_or (arg0, loc);
14480 /* Only perform transformation if ARG0 is easily inverted. */
14481 tem = fold_invert_truthvalue (loc0, arg0);
14482 if (tem)
14483 return fold_build2_loc (loc, code == VEC_COND_EXPR
14484 ? BIT_IOR_EXPR
14485 : TRUTH_ORIF_EXPR,
14486 type, fold_convert_loc (loc, type, tem),
14487 arg1);
14488 }
14489
14490 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14491 if (integer_zerop (arg1)
14492 && truth_value_p (TREE_CODE (arg0))
14493 && truth_value_p (TREE_CODE (op2))
14494 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14495 {
14496 location_t loc0 = expr_location_or (arg0, loc);
14497 /* Only perform transformation if ARG0 is easily inverted. */
14498 tem = fold_invert_truthvalue (loc0, arg0);
14499 if (tem)
14500 return fold_build2_loc (loc, code == VEC_COND_EXPR
14501 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14502 type, fold_convert_loc (loc, type, tem),
14503 op2);
14504 }
14505
14506 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14507 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14508 && truth_value_p (TREE_CODE (arg0))
14509 && truth_value_p (TREE_CODE (op2))
14510 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14511 return fold_build2_loc (loc, code == VEC_COND_EXPR
14512 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14513 type, fold_convert_loc (loc, type, arg0), op2);
14514
14515 return NULL_TREE;
14516
14517 case CALL_EXPR:
14518 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14519 of fold_ternary on them. */
14520 gcc_unreachable ();
14521
14522 case BIT_FIELD_REF:
14523 if ((TREE_CODE (arg0) == VECTOR_CST
14524 || (TREE_CODE (arg0) == CONSTRUCTOR
14525 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14526 && (type == TREE_TYPE (TREE_TYPE (arg0))
14527 || (TREE_CODE (type) == VECTOR_TYPE
14528 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14529 {
14530 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14531 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14532 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14533 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14534
14535 if (n != 0
14536 && (idx % width) == 0
14537 && (n % width) == 0
14538 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14539 {
14540 idx = idx / width;
14541 n = n / width;
14542
14543 if (TREE_CODE (arg0) == VECTOR_CST)
14544 {
14545 if (n == 1)
14546 return VECTOR_CST_ELT (arg0, idx);
14547
14548 tree *vals = XALLOCAVEC (tree, n);
14549 for (unsigned i = 0; i < n; ++i)
14550 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14551 return build_vector (type, vals);
14552 }
14553
14554 /* Constructor elements can be subvectors. */
14555 unsigned HOST_WIDE_INT k = 1;
14556 if (CONSTRUCTOR_NELTS (arg0) != 0)
14557 {
14558 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14559 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14560 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14561 }
14562
14563 /* We keep an exact subset of the constructor elements. */
14564 if ((idx % k) == 0 && (n % k) == 0)
14565 {
14566 if (CONSTRUCTOR_NELTS (arg0) == 0)
14567 return build_constructor (type, NULL);
14568 idx /= k;
14569 n /= k;
14570 if (n == 1)
14571 {
14572 if (idx < CONSTRUCTOR_NELTS (arg0))
14573 return CONSTRUCTOR_ELT (arg0, idx)->value;
14574 return build_zero_cst (type);
14575 }
14576
14577 vec<constructor_elt, va_gc> *vals;
14578 vec_alloc (vals, n);
14579 for (unsigned i = 0;
14580 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14581 ++i)
14582 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14583 CONSTRUCTOR_ELT
14584 (arg0, idx + i)->value);
14585 return build_constructor (type, vals);
14586 }
14587 /* The bitfield references a single constructor element. */
14588 else if (idx + n <= (idx / k + 1) * k)
14589 {
14590 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14591 return build_zero_cst (type);
14592 else if (n == k)
14593 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14594 else
14595 return fold_build3_loc (loc, code, type,
14596 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14597 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14598 }
14599 }
14600 }
14601
14602 /* A bit-field-ref that referenced the full argument can be stripped. */
14603 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14604 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14605 && integer_zerop (op2))
14606 return fold_convert_loc (loc, type, arg0);
14607
14608 /* On constants we can use native encode/interpret to constant
14609 fold (nearly) all BIT_FIELD_REFs. */
14610 if (CONSTANT_CLASS_P (arg0)
14611 && can_native_interpret_type_p (type)
14612 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14613 /* This limitation should not be necessary, we just need to
14614 round this up to mode size. */
14615 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14616 /* Need bit-shifting of the buffer to relax the following. */
14617 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14618 {
14619 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14620 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14621 unsigned HOST_WIDE_INT clen;
14622 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14623 /* ??? We cannot tell native_encode_expr to start at
14624 some random byte only. So limit us to a reasonable amount
14625 of work. */
14626 if (clen <= 4096)
14627 {
14628 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14629 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14630 if (len > 0
14631 && len * BITS_PER_UNIT >= bitpos + bitsize)
14632 {
14633 tree v = native_interpret_expr (type,
14634 b + bitpos / BITS_PER_UNIT,
14635 bitsize / BITS_PER_UNIT);
14636 if (v)
14637 return v;
14638 }
14639 }
14640 }
14641
14642 return NULL_TREE;
14643
14644 case FMA_EXPR:
14645 /* For integers we can decompose the FMA if possible. */
14646 if (TREE_CODE (arg0) == INTEGER_CST
14647 && TREE_CODE (arg1) == INTEGER_CST)
14648 return fold_build2_loc (loc, PLUS_EXPR, type,
14649 const_binop (MULT_EXPR, arg0, arg1), arg2);
14650 if (integer_zerop (arg2))
14651 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14652
14653 return fold_fma (loc, type, arg0, arg1, arg2);
14654
14655 case VEC_PERM_EXPR:
14656 if (TREE_CODE (arg2) == VECTOR_CST)
14657 {
14658 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14659 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14660 tree t;
14661 bool need_mask_canon = false;
14662 bool all_in_vec0 = true;
14663 bool all_in_vec1 = true;
14664 bool maybe_identity = true;
14665 bool single_arg = (op0 == op1);
14666 bool changed = false;
14667
14668 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14669 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14670 for (i = 0; i < nelts; i++)
14671 {
14672 tree val = VECTOR_CST_ELT (arg2, i);
14673 if (TREE_CODE (val) != INTEGER_CST)
14674 return NULL_TREE;
14675
14676 sel[i] = TREE_INT_CST_LOW (val) & mask;
14677 if (TREE_INT_CST_HIGH (val)
14678 || ((unsigned HOST_WIDE_INT)
14679 TREE_INT_CST_LOW (val) != sel[i]))
14680 need_mask_canon = true;
14681
14682 if (sel[i] < nelts)
14683 all_in_vec1 = false;
14684 else
14685 all_in_vec0 = false;
14686
14687 if ((sel[i] & (nelts-1)) != i)
14688 maybe_identity = false;
14689 }
14690
14691 if (maybe_identity)
14692 {
14693 if (all_in_vec0)
14694 return op0;
14695 if (all_in_vec1)
14696 return op1;
14697 }
14698
14699 if (all_in_vec0)
14700 op1 = op0;
14701 else if (all_in_vec1)
14702 {
14703 op0 = op1;
14704 for (i = 0; i < nelts; i++)
14705 sel[i] -= nelts;
14706 need_mask_canon = true;
14707 }
14708
14709 if ((TREE_CODE (op0) == VECTOR_CST
14710 || TREE_CODE (op0) == CONSTRUCTOR)
14711 && (TREE_CODE (op1) == VECTOR_CST
14712 || TREE_CODE (op1) == CONSTRUCTOR))
14713 {
14714 t = fold_vec_perm (type, op0, op1, sel);
14715 if (t != NULL_TREE)
14716 return t;
14717 }
14718
14719 if (op0 == op1 && !single_arg)
14720 changed = true;
14721
14722 if (need_mask_canon && arg2 == op2)
14723 {
14724 tree *tsel = XALLOCAVEC (tree, nelts);
14725 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14726 for (i = 0; i < nelts; i++)
14727 tsel[i] = build_int_cst (eltype, sel[i]);
14728 op2 = build_vector (TREE_TYPE (arg2), tsel);
14729 changed = true;
14730 }
14731
14732 if (changed)
14733 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14734 }
14735 return NULL_TREE;
14736
14737 default:
14738 return NULL_TREE;
14739 } /* switch (code) */
14740 }
14741
14742 /* Perform constant folding and related simplification of EXPR.
14743 The related simplifications include x*1 => x, x*0 => 0, etc.,
14744 and application of the associative law.
14745 NOP_EXPR conversions may be removed freely (as long as we
14746 are careful not to change the type of the overall expression).
14747 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14748 but we can constant-fold them if they have constant operands. */
14749
14750 #ifdef ENABLE_FOLD_CHECKING
14751 # define fold(x) fold_1 (x)
14752 static tree fold_1 (tree);
14753 static
14754 #endif
14755 tree
14756 fold (tree expr)
14757 {
14758 const tree t = expr;
14759 enum tree_code code = TREE_CODE (t);
14760 enum tree_code_class kind = TREE_CODE_CLASS (code);
14761 tree tem;
14762 location_t loc = EXPR_LOCATION (expr);
14763
14764 /* Return right away if a constant. */
14765 if (kind == tcc_constant)
14766 return t;
14767
14768 /* CALL_EXPR-like objects with variable numbers of operands are
14769 treated specially. */
14770 if (kind == tcc_vl_exp)
14771 {
14772 if (code == CALL_EXPR)
14773 {
14774 tem = fold_call_expr (loc, expr, false);
14775 return tem ? tem : expr;
14776 }
14777 return expr;
14778 }
14779
14780 if (IS_EXPR_CODE_CLASS (kind))
14781 {
14782 tree type = TREE_TYPE (t);
14783 tree op0, op1, op2;
14784
14785 switch (TREE_CODE_LENGTH (code))
14786 {
14787 case 1:
14788 op0 = TREE_OPERAND (t, 0);
14789 tem = fold_unary_loc (loc, code, type, op0);
14790 return tem ? tem : expr;
14791 case 2:
14792 op0 = TREE_OPERAND (t, 0);
14793 op1 = TREE_OPERAND (t, 1);
14794 tem = fold_binary_loc (loc, code, type, op0, op1);
14795 return tem ? tem : expr;
14796 case 3:
14797 op0 = TREE_OPERAND (t, 0);
14798 op1 = TREE_OPERAND (t, 1);
14799 op2 = TREE_OPERAND (t, 2);
14800 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14801 return tem ? tem : expr;
14802 default:
14803 break;
14804 }
14805 }
14806
14807 switch (code)
14808 {
14809 case ARRAY_REF:
14810 {
14811 tree op0 = TREE_OPERAND (t, 0);
14812 tree op1 = TREE_OPERAND (t, 1);
14813
14814 if (TREE_CODE (op1) == INTEGER_CST
14815 && TREE_CODE (op0) == CONSTRUCTOR
14816 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14817 {
14818 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14819 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14820 unsigned HOST_WIDE_INT begin = 0;
14821
14822 /* Find a matching index by means of a binary search. */
14823 while (begin != end)
14824 {
14825 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14826 tree index = (*elts)[middle].index;
14827
14828 if (TREE_CODE (index) == INTEGER_CST
14829 && tree_int_cst_lt (index, op1))
14830 begin = middle + 1;
14831 else if (TREE_CODE (index) == INTEGER_CST
14832 && tree_int_cst_lt (op1, index))
14833 end = middle;
14834 else if (TREE_CODE (index) == RANGE_EXPR
14835 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14836 begin = middle + 1;
14837 else if (TREE_CODE (index) == RANGE_EXPR
14838 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14839 end = middle;
14840 else
14841 return (*elts)[middle].value;
14842 }
14843 }
14844
14845 return t;
14846 }
14847
14848 /* Return a VECTOR_CST if possible. */
14849 case CONSTRUCTOR:
14850 {
14851 tree type = TREE_TYPE (t);
14852 if (TREE_CODE (type) != VECTOR_TYPE)
14853 return t;
14854
14855 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14856 unsigned HOST_WIDE_INT idx, pos = 0;
14857 tree value;
14858
14859 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14860 {
14861 if (!CONSTANT_CLASS_P (value))
14862 return t;
14863 if (TREE_CODE (value) == VECTOR_CST)
14864 {
14865 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14866 vec[pos++] = VECTOR_CST_ELT (value, i);
14867 }
14868 else
14869 vec[pos++] = value;
14870 }
14871 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14872 vec[pos] = build_zero_cst (TREE_TYPE (type));
14873
14874 return build_vector (type, vec);
14875 }
14876
14877 case CONST_DECL:
14878 return fold (DECL_INITIAL (t));
14879
14880 default:
14881 return t;
14882 } /* switch (code) */
14883 }
14884
14885 #ifdef ENABLE_FOLD_CHECKING
14886 #undef fold
14887
14888 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14889 hash_table <pointer_hash <tree_node> >);
14890 static void fold_check_failed (const_tree, const_tree);
14891 void print_fold_checksum (const_tree);
14892
14893 /* When --enable-checking=fold, compute a digest of expr before
14894 and after actual fold call to see if fold did not accidentally
14895 change original expr. */
14896
14897 tree
14898 fold (tree expr)
14899 {
14900 tree ret;
14901 struct md5_ctx ctx;
14902 unsigned char checksum_before[16], checksum_after[16];
14903 hash_table <pointer_hash <tree_node> > ht;
14904
14905 ht.create (32);
14906 md5_init_ctx (&ctx);
14907 fold_checksum_tree (expr, &ctx, ht);
14908 md5_finish_ctx (&ctx, checksum_before);
14909 ht.empty ();
14910
14911 ret = fold_1 (expr);
14912
14913 md5_init_ctx (&ctx);
14914 fold_checksum_tree (expr, &ctx, ht);
14915 md5_finish_ctx (&ctx, checksum_after);
14916 ht.dispose ();
14917
14918 if (memcmp (checksum_before, checksum_after, 16))
14919 fold_check_failed (expr, ret);
14920
14921 return ret;
14922 }
14923
14924 void
14925 print_fold_checksum (const_tree expr)
14926 {
14927 struct md5_ctx ctx;
14928 unsigned char checksum[16], cnt;
14929 hash_table <pointer_hash <tree_node> > ht;
14930
14931 ht.create (32);
14932 md5_init_ctx (&ctx);
14933 fold_checksum_tree (expr, &ctx, ht);
14934 md5_finish_ctx (&ctx, checksum);
14935 ht.dispose ();
14936 for (cnt = 0; cnt < 16; ++cnt)
14937 fprintf (stderr, "%02x", checksum[cnt]);
14938 putc ('\n', stderr);
14939 }
14940
14941 static void
14942 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14943 {
14944 internal_error ("fold check: original tree changed by fold");
14945 }
14946
14947 static void
14948 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14949 hash_table <pointer_hash <tree_node> > ht)
14950 {
14951 tree_node **slot;
14952 enum tree_code code;
14953 union tree_node buf;
14954 int i, len;
14955
14956 recursive_label:
14957 if (expr == NULL)
14958 return;
14959 slot = ht.find_slot (expr, INSERT);
14960 if (*slot != NULL)
14961 return;
14962 *slot = CONST_CAST_TREE (expr);
14963 code = TREE_CODE (expr);
14964 if (TREE_CODE_CLASS (code) == tcc_declaration
14965 && DECL_ASSEMBLER_NAME_SET_P (expr))
14966 {
14967 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14968 memcpy ((char *) &buf, expr, tree_size (expr));
14969 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14970 expr = (tree) &buf;
14971 }
14972 else if (TREE_CODE_CLASS (code) == tcc_type
14973 && (TYPE_POINTER_TO (expr)
14974 || TYPE_REFERENCE_TO (expr)
14975 || TYPE_CACHED_VALUES_P (expr)
14976 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14977 || TYPE_NEXT_VARIANT (expr)))
14978 {
14979 /* Allow these fields to be modified. */
14980 tree tmp;
14981 memcpy ((char *) &buf, expr, tree_size (expr));
14982 expr = tmp = (tree) &buf;
14983 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14984 TYPE_POINTER_TO (tmp) = NULL;
14985 TYPE_REFERENCE_TO (tmp) = NULL;
14986 TYPE_NEXT_VARIANT (tmp) = NULL;
14987 if (TYPE_CACHED_VALUES_P (tmp))
14988 {
14989 TYPE_CACHED_VALUES_P (tmp) = 0;
14990 TYPE_CACHED_VALUES (tmp) = NULL;
14991 }
14992 }
14993 md5_process_bytes (expr, tree_size (expr), ctx);
14994 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14995 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14996 if (TREE_CODE_CLASS (code) != tcc_type
14997 && TREE_CODE_CLASS (code) != tcc_declaration
14998 && code != TREE_LIST
14999 && code != SSA_NAME
15000 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
15001 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
15002 switch (TREE_CODE_CLASS (code))
15003 {
15004 case tcc_constant:
15005 switch (code)
15006 {
15007 case STRING_CST:
15008 md5_process_bytes (TREE_STRING_POINTER (expr),
15009 TREE_STRING_LENGTH (expr), ctx);
15010 break;
15011 case COMPLEX_CST:
15012 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
15013 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
15014 break;
15015 case VECTOR_CST:
15016 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
15017 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15018 break;
15019 default:
15020 break;
15021 }
15022 break;
15023 case tcc_exceptional:
15024 switch (code)
15025 {
15026 case TREE_LIST:
15027 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15028 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15029 expr = TREE_CHAIN (expr);
15030 goto recursive_label;
15031 break;
15032 case TREE_VEC:
15033 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15034 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15035 break;
15036 default:
15037 break;
15038 }
15039 break;
15040 case tcc_expression:
15041 case tcc_reference:
15042 case tcc_comparison:
15043 case tcc_unary:
15044 case tcc_binary:
15045 case tcc_statement:
15046 case tcc_vl_exp:
15047 len = TREE_OPERAND_LENGTH (expr);
15048 for (i = 0; i < len; ++i)
15049 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15050 break;
15051 case tcc_declaration:
15052 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15053 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15054 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15055 {
15056 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15057 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15058 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15059 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15060 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15061 }
15062 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15063 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15064
15065 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15066 {
15067 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15068 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15069 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15070 }
15071 break;
15072 case tcc_type:
15073 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15074 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15075 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15076 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15077 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15078 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15079 if (INTEGRAL_TYPE_P (expr)
15080 || SCALAR_FLOAT_TYPE_P (expr))
15081 {
15082 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15083 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15084 }
15085 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15086 if (TREE_CODE (expr) == RECORD_TYPE
15087 || TREE_CODE (expr) == UNION_TYPE
15088 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15089 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15090 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15091 break;
15092 default:
15093 break;
15094 }
15095 }
15096
15097 /* Helper function for outputting the checksum of a tree T. When
15098 debugging with gdb, you can "define mynext" to be "next" followed
15099 by "call debug_fold_checksum (op0)", then just trace down till the
15100 outputs differ. */
15101
15102 DEBUG_FUNCTION void
15103 debug_fold_checksum (const_tree t)
15104 {
15105 int i;
15106 unsigned char checksum[16];
15107 struct md5_ctx ctx;
15108 hash_table <pointer_hash <tree_node> > ht;
15109 ht.create (32);
15110
15111 md5_init_ctx (&ctx);
15112 fold_checksum_tree (t, &ctx, ht);
15113 md5_finish_ctx (&ctx, checksum);
15114 ht.empty ();
15115
15116 for (i = 0; i < 16; i++)
15117 fprintf (stderr, "%d ", checksum[i]);
15118
15119 fprintf (stderr, "\n");
15120 }
15121
15122 #endif
15123
15124 /* Fold a unary tree expression with code CODE of type TYPE with an
15125 operand OP0. LOC is the location of the resulting expression.
15126 Return a folded expression if successful. Otherwise, return a tree
15127 expression with code CODE of type TYPE with an operand OP0. */
15128
15129 tree
15130 fold_build1_stat_loc (location_t loc,
15131 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15132 {
15133 tree tem;
15134 #ifdef ENABLE_FOLD_CHECKING
15135 unsigned char checksum_before[16], checksum_after[16];
15136 struct md5_ctx ctx;
15137 hash_table <pointer_hash <tree_node> > ht;
15138
15139 ht.create (32);
15140 md5_init_ctx (&ctx);
15141 fold_checksum_tree (op0, &ctx, ht);
15142 md5_finish_ctx (&ctx, checksum_before);
15143 ht.empty ();
15144 #endif
15145
15146 tem = fold_unary_loc (loc, code, type, op0);
15147 if (!tem)
15148 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15149
15150 #ifdef ENABLE_FOLD_CHECKING
15151 md5_init_ctx (&ctx);
15152 fold_checksum_tree (op0, &ctx, ht);
15153 md5_finish_ctx (&ctx, checksum_after);
15154 ht.dispose ();
15155
15156 if (memcmp (checksum_before, checksum_after, 16))
15157 fold_check_failed (op0, tem);
15158 #endif
15159 return tem;
15160 }
15161
15162 /* Fold a binary tree expression with code CODE of type TYPE with
15163 operands OP0 and OP1. LOC is the location of the resulting
15164 expression. Return a folded expression if successful. Otherwise,
15165 return a tree expression with code CODE of type TYPE with operands
15166 OP0 and OP1. */
15167
15168 tree
15169 fold_build2_stat_loc (location_t loc,
15170 enum tree_code code, tree type, tree op0, tree op1
15171 MEM_STAT_DECL)
15172 {
15173 tree tem;
15174 #ifdef ENABLE_FOLD_CHECKING
15175 unsigned char checksum_before_op0[16],
15176 checksum_before_op1[16],
15177 checksum_after_op0[16],
15178 checksum_after_op1[16];
15179 struct md5_ctx ctx;
15180 hash_table <pointer_hash <tree_node> > ht;
15181
15182 ht.create (32);
15183 md5_init_ctx (&ctx);
15184 fold_checksum_tree (op0, &ctx, ht);
15185 md5_finish_ctx (&ctx, checksum_before_op0);
15186 ht.empty ();
15187
15188 md5_init_ctx (&ctx);
15189 fold_checksum_tree (op1, &ctx, ht);
15190 md5_finish_ctx (&ctx, checksum_before_op1);
15191 ht.empty ();
15192 #endif
15193
15194 tem = fold_binary_loc (loc, code, type, op0, op1);
15195 if (!tem)
15196 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15197
15198 #ifdef ENABLE_FOLD_CHECKING
15199 md5_init_ctx (&ctx);
15200 fold_checksum_tree (op0, &ctx, ht);
15201 md5_finish_ctx (&ctx, checksum_after_op0);
15202 ht.empty ();
15203
15204 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15205 fold_check_failed (op0, tem);
15206
15207 md5_init_ctx (&ctx);
15208 fold_checksum_tree (op1, &ctx, ht);
15209 md5_finish_ctx (&ctx, checksum_after_op1);
15210 ht.dispose ();
15211
15212 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15213 fold_check_failed (op1, tem);
15214 #endif
15215 return tem;
15216 }
15217
15218 /* Fold a ternary tree expression with code CODE of type TYPE with
15219 operands OP0, OP1, and OP2. Return a folded expression if
15220 successful. Otherwise, return a tree expression with code CODE of
15221 type TYPE with operands OP0, OP1, and OP2. */
15222
15223 tree
15224 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15225 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15226 {
15227 tree tem;
15228 #ifdef ENABLE_FOLD_CHECKING
15229 unsigned char checksum_before_op0[16],
15230 checksum_before_op1[16],
15231 checksum_before_op2[16],
15232 checksum_after_op0[16],
15233 checksum_after_op1[16],
15234 checksum_after_op2[16];
15235 struct md5_ctx ctx;
15236 hash_table <pointer_hash <tree_node> > ht;
15237
15238 ht.create (32);
15239 md5_init_ctx (&ctx);
15240 fold_checksum_tree (op0, &ctx, ht);
15241 md5_finish_ctx (&ctx, checksum_before_op0);
15242 ht.empty ();
15243
15244 md5_init_ctx (&ctx);
15245 fold_checksum_tree (op1, &ctx, ht);
15246 md5_finish_ctx (&ctx, checksum_before_op1);
15247 ht.empty ();
15248
15249 md5_init_ctx (&ctx);
15250 fold_checksum_tree (op2, &ctx, ht);
15251 md5_finish_ctx (&ctx, checksum_before_op2);
15252 ht.empty ();
15253 #endif
15254
15255 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15256 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15257 if (!tem)
15258 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15259
15260 #ifdef ENABLE_FOLD_CHECKING
15261 md5_init_ctx (&ctx);
15262 fold_checksum_tree (op0, &ctx, ht);
15263 md5_finish_ctx (&ctx, checksum_after_op0);
15264 ht.empty ();
15265
15266 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15267 fold_check_failed (op0, tem);
15268
15269 md5_init_ctx (&ctx);
15270 fold_checksum_tree (op1, &ctx, ht);
15271 md5_finish_ctx (&ctx, checksum_after_op1);
15272 ht.empty ();
15273
15274 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15275 fold_check_failed (op1, tem);
15276
15277 md5_init_ctx (&ctx);
15278 fold_checksum_tree (op2, &ctx, ht);
15279 md5_finish_ctx (&ctx, checksum_after_op2);
15280 ht.dispose ();
15281
15282 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15283 fold_check_failed (op2, tem);
15284 #endif
15285 return tem;
15286 }
15287
15288 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15289 arguments in ARGARRAY, and a null static chain.
15290 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15291 of type TYPE from the given operands as constructed by build_call_array. */
15292
15293 tree
15294 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15295 int nargs, tree *argarray)
15296 {
15297 tree tem;
15298 #ifdef ENABLE_FOLD_CHECKING
15299 unsigned char checksum_before_fn[16],
15300 checksum_before_arglist[16],
15301 checksum_after_fn[16],
15302 checksum_after_arglist[16];
15303 struct md5_ctx ctx;
15304 hash_table <pointer_hash <tree_node> > ht;
15305 int i;
15306
15307 ht.create (32);
15308 md5_init_ctx (&ctx);
15309 fold_checksum_tree (fn, &ctx, ht);
15310 md5_finish_ctx (&ctx, checksum_before_fn);
15311 ht.empty ();
15312
15313 md5_init_ctx (&ctx);
15314 for (i = 0; i < nargs; i++)
15315 fold_checksum_tree (argarray[i], &ctx, ht);
15316 md5_finish_ctx (&ctx, checksum_before_arglist);
15317 ht.empty ();
15318 #endif
15319
15320 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15321
15322 #ifdef ENABLE_FOLD_CHECKING
15323 md5_init_ctx (&ctx);
15324 fold_checksum_tree (fn, &ctx, ht);
15325 md5_finish_ctx (&ctx, checksum_after_fn);
15326 ht.empty ();
15327
15328 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15329 fold_check_failed (fn, tem);
15330
15331 md5_init_ctx (&ctx);
15332 for (i = 0; i < nargs; i++)
15333 fold_checksum_tree (argarray[i], &ctx, ht);
15334 md5_finish_ctx (&ctx, checksum_after_arglist);
15335 ht.dispose ();
15336
15337 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15338 fold_check_failed (NULL_TREE, tem);
15339 #endif
15340 return tem;
15341 }
15342
15343 /* Perform constant folding and related simplification of initializer
15344 expression EXPR. These behave identically to "fold_buildN" but ignore
15345 potential run-time traps and exceptions that fold must preserve. */
15346
15347 #define START_FOLD_INIT \
15348 int saved_signaling_nans = flag_signaling_nans;\
15349 int saved_trapping_math = flag_trapping_math;\
15350 int saved_rounding_math = flag_rounding_math;\
15351 int saved_trapv = flag_trapv;\
15352 int saved_folding_initializer = folding_initializer;\
15353 flag_signaling_nans = 0;\
15354 flag_trapping_math = 0;\
15355 flag_rounding_math = 0;\
15356 flag_trapv = 0;\
15357 folding_initializer = 1;
15358
15359 #define END_FOLD_INIT \
15360 flag_signaling_nans = saved_signaling_nans;\
15361 flag_trapping_math = saved_trapping_math;\
15362 flag_rounding_math = saved_rounding_math;\
15363 flag_trapv = saved_trapv;\
15364 folding_initializer = saved_folding_initializer;
15365
15366 tree
15367 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15368 tree type, tree op)
15369 {
15370 tree result;
15371 START_FOLD_INIT;
15372
15373 result = fold_build1_loc (loc, code, type, op);
15374
15375 END_FOLD_INIT;
15376 return result;
15377 }
15378
15379 tree
15380 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15381 tree type, tree op0, tree op1)
15382 {
15383 tree result;
15384 START_FOLD_INIT;
15385
15386 result = fold_build2_loc (loc, code, type, op0, op1);
15387
15388 END_FOLD_INIT;
15389 return result;
15390 }
15391
15392 tree
15393 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15394 int nargs, tree *argarray)
15395 {
15396 tree result;
15397 START_FOLD_INIT;
15398
15399 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15400
15401 END_FOLD_INIT;
15402 return result;
15403 }
15404
15405 #undef START_FOLD_INIT
15406 #undef END_FOLD_INIT
15407
15408 /* Determine if first argument is a multiple of second argument. Return 0 if
15409 it is not, or we cannot easily determined it to be.
15410
15411 An example of the sort of thing we care about (at this point; this routine
15412 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15413 fold cases do now) is discovering that
15414
15415 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15416
15417 is a multiple of
15418
15419 SAVE_EXPR (J * 8)
15420
15421 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15422
15423 This code also handles discovering that
15424
15425 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15426
15427 is a multiple of 8 so we don't have to worry about dealing with a
15428 possible remainder.
15429
15430 Note that we *look* inside a SAVE_EXPR only to determine how it was
15431 calculated; it is not safe for fold to do much of anything else with the
15432 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15433 at run time. For example, the latter example above *cannot* be implemented
15434 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15435 evaluation time of the original SAVE_EXPR is not necessarily the same at
15436 the time the new expression is evaluated. The only optimization of this
15437 sort that would be valid is changing
15438
15439 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15440
15441 divided by 8 to
15442
15443 SAVE_EXPR (I) * SAVE_EXPR (J)
15444
15445 (where the same SAVE_EXPR (J) is used in the original and the
15446 transformed version). */
15447
15448 int
15449 multiple_of_p (tree type, const_tree top, const_tree bottom)
15450 {
15451 if (operand_equal_p (top, bottom, 0))
15452 return 1;
15453
15454 if (TREE_CODE (type) != INTEGER_TYPE)
15455 return 0;
15456
15457 switch (TREE_CODE (top))
15458 {
15459 case BIT_AND_EXPR:
15460 /* Bitwise and provides a power of two multiple. If the mask is
15461 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15462 if (!integer_pow2p (bottom))
15463 return 0;
15464 /* FALLTHRU */
15465
15466 case MULT_EXPR:
15467 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15468 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15469
15470 case PLUS_EXPR:
15471 case MINUS_EXPR:
15472 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15473 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15474
15475 case LSHIFT_EXPR:
15476 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15477 {
15478 tree op1, t1;
15479
15480 op1 = TREE_OPERAND (top, 1);
15481 /* const_binop may not detect overflow correctly,
15482 so check for it explicitly here. */
15483 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15484 > TREE_INT_CST_LOW (op1)
15485 && TREE_INT_CST_HIGH (op1) == 0
15486 && 0 != (t1 = fold_convert (type,
15487 const_binop (LSHIFT_EXPR,
15488 size_one_node,
15489 op1)))
15490 && !TREE_OVERFLOW (t1))
15491 return multiple_of_p (type, t1, bottom);
15492 }
15493 return 0;
15494
15495 case NOP_EXPR:
15496 /* Can't handle conversions from non-integral or wider integral type. */
15497 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15498 || (TYPE_PRECISION (type)
15499 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15500 return 0;
15501
15502 /* .. fall through ... */
15503
15504 case SAVE_EXPR:
15505 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15506
15507 case COND_EXPR:
15508 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15509 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15510
15511 case INTEGER_CST:
15512 if (TREE_CODE (bottom) != INTEGER_CST
15513 || integer_zerop (bottom)
15514 || (TYPE_UNSIGNED (type)
15515 && (tree_int_cst_sgn (top) < 0
15516 || tree_int_cst_sgn (bottom) < 0)))
15517 return 0;
15518 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15519 top, bottom));
15520
15521 default:
15522 return 0;
15523 }
15524 }
15525
15526 /* Return true if CODE or TYPE is known to be non-negative. */
15527
15528 static bool
15529 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15530 {
15531 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15532 && truth_value_p (code))
15533 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15534 have a signed:1 type (where the value is -1 and 0). */
15535 return true;
15536 return false;
15537 }
15538
15539 /* Return true if (CODE OP0) is known to be non-negative. If the return
15540 value is based on the assumption that signed overflow is undefined,
15541 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15542 *STRICT_OVERFLOW_P. */
15543
15544 bool
15545 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15546 bool *strict_overflow_p)
15547 {
15548 if (TYPE_UNSIGNED (type))
15549 return true;
15550
15551 switch (code)
15552 {
15553 case ABS_EXPR:
15554 /* We can't return 1 if flag_wrapv is set because
15555 ABS_EXPR<INT_MIN> = INT_MIN. */
15556 if (!INTEGRAL_TYPE_P (type))
15557 return true;
15558 if (TYPE_OVERFLOW_UNDEFINED (type))
15559 {
15560 *strict_overflow_p = true;
15561 return true;
15562 }
15563 break;
15564
15565 case NON_LVALUE_EXPR:
15566 case FLOAT_EXPR:
15567 case FIX_TRUNC_EXPR:
15568 return tree_expr_nonnegative_warnv_p (op0,
15569 strict_overflow_p);
15570
15571 case NOP_EXPR:
15572 {
15573 tree inner_type = TREE_TYPE (op0);
15574 tree outer_type = type;
15575
15576 if (TREE_CODE (outer_type) == REAL_TYPE)
15577 {
15578 if (TREE_CODE (inner_type) == REAL_TYPE)
15579 return tree_expr_nonnegative_warnv_p (op0,
15580 strict_overflow_p);
15581 if (INTEGRAL_TYPE_P (inner_type))
15582 {
15583 if (TYPE_UNSIGNED (inner_type))
15584 return true;
15585 return tree_expr_nonnegative_warnv_p (op0,
15586 strict_overflow_p);
15587 }
15588 }
15589 else if (INTEGRAL_TYPE_P (outer_type))
15590 {
15591 if (TREE_CODE (inner_type) == REAL_TYPE)
15592 return tree_expr_nonnegative_warnv_p (op0,
15593 strict_overflow_p);
15594 if (INTEGRAL_TYPE_P (inner_type))
15595 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15596 && TYPE_UNSIGNED (inner_type);
15597 }
15598 }
15599 break;
15600
15601 default:
15602 return tree_simple_nonnegative_warnv_p (code, type);
15603 }
15604
15605 /* We don't know sign of `t', so be conservative and return false. */
15606 return false;
15607 }
15608
15609 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15610 value is based on the assumption that signed overflow is undefined,
15611 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15612 *STRICT_OVERFLOW_P. */
15613
15614 bool
15615 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15616 tree op1, bool *strict_overflow_p)
15617 {
15618 if (TYPE_UNSIGNED (type))
15619 return true;
15620
15621 switch (code)
15622 {
15623 case POINTER_PLUS_EXPR:
15624 case PLUS_EXPR:
15625 if (FLOAT_TYPE_P (type))
15626 return (tree_expr_nonnegative_warnv_p (op0,
15627 strict_overflow_p)
15628 && tree_expr_nonnegative_warnv_p (op1,
15629 strict_overflow_p));
15630
15631 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15632 both unsigned and at least 2 bits shorter than the result. */
15633 if (TREE_CODE (type) == INTEGER_TYPE
15634 && TREE_CODE (op0) == NOP_EXPR
15635 && TREE_CODE (op1) == NOP_EXPR)
15636 {
15637 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15638 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15639 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15640 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15641 {
15642 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15643 TYPE_PRECISION (inner2)) + 1;
15644 return prec < TYPE_PRECISION (type);
15645 }
15646 }
15647 break;
15648
15649 case MULT_EXPR:
15650 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15651 {
15652 /* x * x is always non-negative for floating point x
15653 or without overflow. */
15654 if (operand_equal_p (op0, op1, 0)
15655 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15656 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15657 {
15658 if (TYPE_OVERFLOW_UNDEFINED (type))
15659 *strict_overflow_p = true;
15660 return true;
15661 }
15662 }
15663
15664 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15665 both unsigned and their total bits is shorter than the result. */
15666 if (TREE_CODE (type) == INTEGER_TYPE
15667 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15668 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15669 {
15670 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15671 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15672 : TREE_TYPE (op0);
15673 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15674 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15675 : TREE_TYPE (op1);
15676
15677 bool unsigned0 = TYPE_UNSIGNED (inner0);
15678 bool unsigned1 = TYPE_UNSIGNED (inner1);
15679
15680 if (TREE_CODE (op0) == INTEGER_CST)
15681 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15682
15683 if (TREE_CODE (op1) == INTEGER_CST)
15684 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15685
15686 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15687 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15688 {
15689 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15690 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15691 : TYPE_PRECISION (inner0);
15692
15693 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15694 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15695 : TYPE_PRECISION (inner1);
15696
15697 return precision0 + precision1 < TYPE_PRECISION (type);
15698 }
15699 }
15700 return false;
15701
15702 case BIT_AND_EXPR:
15703 case MAX_EXPR:
15704 return (tree_expr_nonnegative_warnv_p (op0,
15705 strict_overflow_p)
15706 || tree_expr_nonnegative_warnv_p (op1,
15707 strict_overflow_p));
15708
15709 case BIT_IOR_EXPR:
15710 case BIT_XOR_EXPR:
15711 case MIN_EXPR:
15712 case RDIV_EXPR:
15713 case TRUNC_DIV_EXPR:
15714 case CEIL_DIV_EXPR:
15715 case FLOOR_DIV_EXPR:
15716 case ROUND_DIV_EXPR:
15717 return (tree_expr_nonnegative_warnv_p (op0,
15718 strict_overflow_p)
15719 && tree_expr_nonnegative_warnv_p (op1,
15720 strict_overflow_p));
15721
15722 case TRUNC_MOD_EXPR:
15723 case CEIL_MOD_EXPR:
15724 case FLOOR_MOD_EXPR:
15725 case ROUND_MOD_EXPR:
15726 return tree_expr_nonnegative_warnv_p (op0,
15727 strict_overflow_p);
15728 default:
15729 return tree_simple_nonnegative_warnv_p (code, type);
15730 }
15731
15732 /* We don't know sign of `t', so be conservative and return false. */
15733 return false;
15734 }
15735
15736 /* Return true if T is known to be non-negative. If the return
15737 value is based on the assumption that signed overflow is undefined,
15738 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15739 *STRICT_OVERFLOW_P. */
15740
15741 bool
15742 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15743 {
15744 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15745 return true;
15746
15747 switch (TREE_CODE (t))
15748 {
15749 case INTEGER_CST:
15750 return tree_int_cst_sgn (t) >= 0;
15751
15752 case REAL_CST:
15753 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15754
15755 case FIXED_CST:
15756 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15757
15758 case COND_EXPR:
15759 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15760 strict_overflow_p)
15761 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15762 strict_overflow_p));
15763 default:
15764 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15765 TREE_TYPE (t));
15766 }
15767 /* We don't know sign of `t', so be conservative and return false. */
15768 return false;
15769 }
15770
15771 /* Return true if T is known to be non-negative. If the return
15772 value is based on the assumption that signed overflow is undefined,
15773 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15774 *STRICT_OVERFLOW_P. */
15775
15776 bool
15777 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15778 tree arg0, tree arg1, bool *strict_overflow_p)
15779 {
15780 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15781 switch (DECL_FUNCTION_CODE (fndecl))
15782 {
15783 CASE_FLT_FN (BUILT_IN_ACOS):
15784 CASE_FLT_FN (BUILT_IN_ACOSH):
15785 CASE_FLT_FN (BUILT_IN_CABS):
15786 CASE_FLT_FN (BUILT_IN_COSH):
15787 CASE_FLT_FN (BUILT_IN_ERFC):
15788 CASE_FLT_FN (BUILT_IN_EXP):
15789 CASE_FLT_FN (BUILT_IN_EXP10):
15790 CASE_FLT_FN (BUILT_IN_EXP2):
15791 CASE_FLT_FN (BUILT_IN_FABS):
15792 CASE_FLT_FN (BUILT_IN_FDIM):
15793 CASE_FLT_FN (BUILT_IN_HYPOT):
15794 CASE_FLT_FN (BUILT_IN_POW10):
15795 CASE_INT_FN (BUILT_IN_FFS):
15796 CASE_INT_FN (BUILT_IN_PARITY):
15797 CASE_INT_FN (BUILT_IN_POPCOUNT):
15798 CASE_INT_FN (BUILT_IN_CLZ):
15799 CASE_INT_FN (BUILT_IN_CLRSB):
15800 case BUILT_IN_BSWAP32:
15801 case BUILT_IN_BSWAP64:
15802 /* Always true. */
15803 return true;
15804
15805 CASE_FLT_FN (BUILT_IN_SQRT):
15806 /* sqrt(-0.0) is -0.0. */
15807 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15808 return true;
15809 return tree_expr_nonnegative_warnv_p (arg0,
15810 strict_overflow_p);
15811
15812 CASE_FLT_FN (BUILT_IN_ASINH):
15813 CASE_FLT_FN (BUILT_IN_ATAN):
15814 CASE_FLT_FN (BUILT_IN_ATANH):
15815 CASE_FLT_FN (BUILT_IN_CBRT):
15816 CASE_FLT_FN (BUILT_IN_CEIL):
15817 CASE_FLT_FN (BUILT_IN_ERF):
15818 CASE_FLT_FN (BUILT_IN_EXPM1):
15819 CASE_FLT_FN (BUILT_IN_FLOOR):
15820 CASE_FLT_FN (BUILT_IN_FMOD):
15821 CASE_FLT_FN (BUILT_IN_FREXP):
15822 CASE_FLT_FN (BUILT_IN_ICEIL):
15823 CASE_FLT_FN (BUILT_IN_IFLOOR):
15824 CASE_FLT_FN (BUILT_IN_IRINT):
15825 CASE_FLT_FN (BUILT_IN_IROUND):
15826 CASE_FLT_FN (BUILT_IN_LCEIL):
15827 CASE_FLT_FN (BUILT_IN_LDEXP):
15828 CASE_FLT_FN (BUILT_IN_LFLOOR):
15829 CASE_FLT_FN (BUILT_IN_LLCEIL):
15830 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15831 CASE_FLT_FN (BUILT_IN_LLRINT):
15832 CASE_FLT_FN (BUILT_IN_LLROUND):
15833 CASE_FLT_FN (BUILT_IN_LRINT):
15834 CASE_FLT_FN (BUILT_IN_LROUND):
15835 CASE_FLT_FN (BUILT_IN_MODF):
15836 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15837 CASE_FLT_FN (BUILT_IN_RINT):
15838 CASE_FLT_FN (BUILT_IN_ROUND):
15839 CASE_FLT_FN (BUILT_IN_SCALB):
15840 CASE_FLT_FN (BUILT_IN_SCALBLN):
15841 CASE_FLT_FN (BUILT_IN_SCALBN):
15842 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15843 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15844 CASE_FLT_FN (BUILT_IN_SINH):
15845 CASE_FLT_FN (BUILT_IN_TANH):
15846 CASE_FLT_FN (BUILT_IN_TRUNC):
15847 /* True if the 1st argument is nonnegative. */
15848 return tree_expr_nonnegative_warnv_p (arg0,
15849 strict_overflow_p);
15850
15851 CASE_FLT_FN (BUILT_IN_FMAX):
15852 /* True if the 1st OR 2nd arguments are nonnegative. */
15853 return (tree_expr_nonnegative_warnv_p (arg0,
15854 strict_overflow_p)
15855 || (tree_expr_nonnegative_warnv_p (arg1,
15856 strict_overflow_p)));
15857
15858 CASE_FLT_FN (BUILT_IN_FMIN):
15859 /* True if the 1st AND 2nd arguments are nonnegative. */
15860 return (tree_expr_nonnegative_warnv_p (arg0,
15861 strict_overflow_p)
15862 && (tree_expr_nonnegative_warnv_p (arg1,
15863 strict_overflow_p)));
15864
15865 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15866 /* True if the 2nd argument is nonnegative. */
15867 return tree_expr_nonnegative_warnv_p (arg1,
15868 strict_overflow_p);
15869
15870 CASE_FLT_FN (BUILT_IN_POWI):
15871 /* True if the 1st argument is nonnegative or the second
15872 argument is an even integer. */
15873 if (TREE_CODE (arg1) == INTEGER_CST
15874 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15875 return true;
15876 return tree_expr_nonnegative_warnv_p (arg0,
15877 strict_overflow_p);
15878
15879 CASE_FLT_FN (BUILT_IN_POW):
15880 /* True if the 1st argument is nonnegative or the second
15881 argument is an even integer valued real. */
15882 if (TREE_CODE (arg1) == REAL_CST)
15883 {
15884 REAL_VALUE_TYPE c;
15885 HOST_WIDE_INT n;
15886
15887 c = TREE_REAL_CST (arg1);
15888 n = real_to_integer (&c);
15889 if ((n & 1) == 0)
15890 {
15891 REAL_VALUE_TYPE cint;
15892 real_from_integer (&cint, VOIDmode, n,
15893 n < 0 ? -1 : 0, 0);
15894 if (real_identical (&c, &cint))
15895 return true;
15896 }
15897 }
15898 return tree_expr_nonnegative_warnv_p (arg0,
15899 strict_overflow_p);
15900
15901 default:
15902 break;
15903 }
15904 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15905 type);
15906 }
15907
15908 /* Return true if T is known to be non-negative. If the return
15909 value is based on the assumption that signed overflow is undefined,
15910 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15911 *STRICT_OVERFLOW_P. */
15912
15913 static bool
15914 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15915 {
15916 enum tree_code code = TREE_CODE (t);
15917 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15918 return true;
15919
15920 switch (code)
15921 {
15922 case TARGET_EXPR:
15923 {
15924 tree temp = TARGET_EXPR_SLOT (t);
15925 t = TARGET_EXPR_INITIAL (t);
15926
15927 /* If the initializer is non-void, then it's a normal expression
15928 that will be assigned to the slot. */
15929 if (!VOID_TYPE_P (t))
15930 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15931
15932 /* Otherwise, the initializer sets the slot in some way. One common
15933 way is an assignment statement at the end of the initializer. */
15934 while (1)
15935 {
15936 if (TREE_CODE (t) == BIND_EXPR)
15937 t = expr_last (BIND_EXPR_BODY (t));
15938 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15939 || TREE_CODE (t) == TRY_CATCH_EXPR)
15940 t = expr_last (TREE_OPERAND (t, 0));
15941 else if (TREE_CODE (t) == STATEMENT_LIST)
15942 t = expr_last (t);
15943 else
15944 break;
15945 }
15946 if (TREE_CODE (t) == MODIFY_EXPR
15947 && TREE_OPERAND (t, 0) == temp)
15948 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15949 strict_overflow_p);
15950
15951 return false;
15952 }
15953
15954 case CALL_EXPR:
15955 {
15956 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15957 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15958
15959 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15960 get_callee_fndecl (t),
15961 arg0,
15962 arg1,
15963 strict_overflow_p);
15964 }
15965 case COMPOUND_EXPR:
15966 case MODIFY_EXPR:
15967 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15968 strict_overflow_p);
15969 case BIND_EXPR:
15970 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15971 strict_overflow_p);
15972 case SAVE_EXPR:
15973 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15974 strict_overflow_p);
15975
15976 default:
15977 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15978 TREE_TYPE (t));
15979 }
15980
15981 /* We don't know sign of `t', so be conservative and return false. */
15982 return false;
15983 }
15984
15985 /* Return true if T is known to be non-negative. If the return
15986 value is based on the assumption that signed overflow is undefined,
15987 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15988 *STRICT_OVERFLOW_P. */
15989
15990 bool
15991 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15992 {
15993 enum tree_code code;
15994 if (t == error_mark_node)
15995 return false;
15996
15997 code = TREE_CODE (t);
15998 switch (TREE_CODE_CLASS (code))
15999 {
16000 case tcc_binary:
16001 case tcc_comparison:
16002 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16003 TREE_TYPE (t),
16004 TREE_OPERAND (t, 0),
16005 TREE_OPERAND (t, 1),
16006 strict_overflow_p);
16007
16008 case tcc_unary:
16009 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16010 TREE_TYPE (t),
16011 TREE_OPERAND (t, 0),
16012 strict_overflow_p);
16013
16014 case tcc_constant:
16015 case tcc_declaration:
16016 case tcc_reference:
16017 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16018
16019 default:
16020 break;
16021 }
16022
16023 switch (code)
16024 {
16025 case TRUTH_AND_EXPR:
16026 case TRUTH_OR_EXPR:
16027 case TRUTH_XOR_EXPR:
16028 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16029 TREE_TYPE (t),
16030 TREE_OPERAND (t, 0),
16031 TREE_OPERAND (t, 1),
16032 strict_overflow_p);
16033 case TRUTH_NOT_EXPR:
16034 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16035 TREE_TYPE (t),
16036 TREE_OPERAND (t, 0),
16037 strict_overflow_p);
16038
16039 case COND_EXPR:
16040 case CONSTRUCTOR:
16041 case OBJ_TYPE_REF:
16042 case ASSERT_EXPR:
16043 case ADDR_EXPR:
16044 case WITH_SIZE_EXPR:
16045 case SSA_NAME:
16046 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16047
16048 default:
16049 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16050 }
16051 }
16052
16053 /* Return true if `t' is known to be non-negative. Handle warnings
16054 about undefined signed overflow. */
16055
16056 bool
16057 tree_expr_nonnegative_p (tree t)
16058 {
16059 bool ret, strict_overflow_p;
16060
16061 strict_overflow_p = false;
16062 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16063 if (strict_overflow_p)
16064 fold_overflow_warning (("assuming signed overflow does not occur when "
16065 "determining that expression is always "
16066 "non-negative"),
16067 WARN_STRICT_OVERFLOW_MISC);
16068 return ret;
16069 }
16070
16071
16072 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16073 For floating point we further ensure that T is not denormal.
16074 Similar logic is present in nonzero_address in rtlanal.h.
16075
16076 If the return value is based on the assumption that signed overflow
16077 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16078 change *STRICT_OVERFLOW_P. */
16079
16080 bool
16081 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16082 bool *strict_overflow_p)
16083 {
16084 switch (code)
16085 {
16086 case ABS_EXPR:
16087 return tree_expr_nonzero_warnv_p (op0,
16088 strict_overflow_p);
16089
16090 case NOP_EXPR:
16091 {
16092 tree inner_type = TREE_TYPE (op0);
16093 tree outer_type = type;
16094
16095 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16096 && tree_expr_nonzero_warnv_p (op0,
16097 strict_overflow_p));
16098 }
16099 break;
16100
16101 case NON_LVALUE_EXPR:
16102 return tree_expr_nonzero_warnv_p (op0,
16103 strict_overflow_p);
16104
16105 default:
16106 break;
16107 }
16108
16109 return false;
16110 }
16111
16112 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16113 For floating point we further ensure that T is not denormal.
16114 Similar logic is present in nonzero_address in rtlanal.h.
16115
16116 If the return value is based on the assumption that signed overflow
16117 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16118 change *STRICT_OVERFLOW_P. */
16119
16120 bool
16121 tree_binary_nonzero_warnv_p (enum tree_code code,
16122 tree type,
16123 tree op0,
16124 tree op1, bool *strict_overflow_p)
16125 {
16126 bool sub_strict_overflow_p;
16127 switch (code)
16128 {
16129 case POINTER_PLUS_EXPR:
16130 case PLUS_EXPR:
16131 if (TYPE_OVERFLOW_UNDEFINED (type))
16132 {
16133 /* With the presence of negative values it is hard
16134 to say something. */
16135 sub_strict_overflow_p = false;
16136 if (!tree_expr_nonnegative_warnv_p (op0,
16137 &sub_strict_overflow_p)
16138 || !tree_expr_nonnegative_warnv_p (op1,
16139 &sub_strict_overflow_p))
16140 return false;
16141 /* One of operands must be positive and the other non-negative. */
16142 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16143 overflows, on a twos-complement machine the sum of two
16144 nonnegative numbers can never be zero. */
16145 return (tree_expr_nonzero_warnv_p (op0,
16146 strict_overflow_p)
16147 || tree_expr_nonzero_warnv_p (op1,
16148 strict_overflow_p));
16149 }
16150 break;
16151
16152 case MULT_EXPR:
16153 if (TYPE_OVERFLOW_UNDEFINED (type))
16154 {
16155 if (tree_expr_nonzero_warnv_p (op0,
16156 strict_overflow_p)
16157 && tree_expr_nonzero_warnv_p (op1,
16158 strict_overflow_p))
16159 {
16160 *strict_overflow_p = true;
16161 return true;
16162 }
16163 }
16164 break;
16165
16166 case MIN_EXPR:
16167 sub_strict_overflow_p = false;
16168 if (tree_expr_nonzero_warnv_p (op0,
16169 &sub_strict_overflow_p)
16170 && tree_expr_nonzero_warnv_p (op1,
16171 &sub_strict_overflow_p))
16172 {
16173 if (sub_strict_overflow_p)
16174 *strict_overflow_p = true;
16175 }
16176 break;
16177
16178 case MAX_EXPR:
16179 sub_strict_overflow_p = false;
16180 if (tree_expr_nonzero_warnv_p (op0,
16181 &sub_strict_overflow_p))
16182 {
16183 if (sub_strict_overflow_p)
16184 *strict_overflow_p = true;
16185
16186 /* When both operands are nonzero, then MAX must be too. */
16187 if (tree_expr_nonzero_warnv_p (op1,
16188 strict_overflow_p))
16189 return true;
16190
16191 /* MAX where operand 0 is positive is positive. */
16192 return tree_expr_nonnegative_warnv_p (op0,
16193 strict_overflow_p);
16194 }
16195 /* MAX where operand 1 is positive is positive. */
16196 else if (tree_expr_nonzero_warnv_p (op1,
16197 &sub_strict_overflow_p)
16198 && tree_expr_nonnegative_warnv_p (op1,
16199 &sub_strict_overflow_p))
16200 {
16201 if (sub_strict_overflow_p)
16202 *strict_overflow_p = true;
16203 return true;
16204 }
16205 break;
16206
16207 case BIT_IOR_EXPR:
16208 return (tree_expr_nonzero_warnv_p (op1,
16209 strict_overflow_p)
16210 || tree_expr_nonzero_warnv_p (op0,
16211 strict_overflow_p));
16212
16213 default:
16214 break;
16215 }
16216
16217 return false;
16218 }
16219
16220 /* Return true when T is an address and is known to be nonzero.
16221 For floating point we further ensure that T is not denormal.
16222 Similar logic is present in nonzero_address in rtlanal.h.
16223
16224 If the return value is based on the assumption that signed overflow
16225 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16226 change *STRICT_OVERFLOW_P. */
16227
16228 bool
16229 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16230 {
16231 bool sub_strict_overflow_p;
16232 switch (TREE_CODE (t))
16233 {
16234 case INTEGER_CST:
16235 return !integer_zerop (t);
16236
16237 case ADDR_EXPR:
16238 {
16239 tree base = TREE_OPERAND (t, 0);
16240 if (!DECL_P (base))
16241 base = get_base_address (base);
16242
16243 if (!base)
16244 return false;
16245
16246 /* Weak declarations may link to NULL. Other things may also be NULL
16247 so protect with -fdelete-null-pointer-checks; but not variables
16248 allocated on the stack. */
16249 if (DECL_P (base)
16250 && (flag_delete_null_pointer_checks
16251 || (DECL_CONTEXT (base)
16252 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16253 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16254 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16255
16256 /* Constants are never weak. */
16257 if (CONSTANT_CLASS_P (base))
16258 return true;
16259
16260 return false;
16261 }
16262
16263 case COND_EXPR:
16264 sub_strict_overflow_p = false;
16265 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16266 &sub_strict_overflow_p)
16267 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16268 &sub_strict_overflow_p))
16269 {
16270 if (sub_strict_overflow_p)
16271 *strict_overflow_p = true;
16272 return true;
16273 }
16274 break;
16275
16276 default:
16277 break;
16278 }
16279 return false;
16280 }
16281
16282 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16283 attempt to fold the expression to a constant without modifying TYPE,
16284 OP0 or OP1.
16285
16286 If the expression could be simplified to a constant, then return
16287 the constant. If the expression would not be simplified to a
16288 constant, then return NULL_TREE. */
16289
16290 tree
16291 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16292 {
16293 tree tem = fold_binary (code, type, op0, op1);
16294 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16295 }
16296
16297 /* Given the components of a unary expression CODE, TYPE and OP0,
16298 attempt to fold the expression to a constant without modifying
16299 TYPE or OP0.
16300
16301 If the expression could be simplified to a constant, then return
16302 the constant. If the expression would not be simplified to a
16303 constant, then return NULL_TREE. */
16304
16305 tree
16306 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16307 {
16308 tree tem = fold_unary (code, type, op0);
16309 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16310 }
16311
16312 /* If EXP represents referencing an element in a constant string
16313 (either via pointer arithmetic or array indexing), return the
16314 tree representing the value accessed, otherwise return NULL. */
16315
16316 tree
16317 fold_read_from_constant_string (tree exp)
16318 {
16319 if ((TREE_CODE (exp) == INDIRECT_REF
16320 || TREE_CODE (exp) == ARRAY_REF)
16321 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16322 {
16323 tree exp1 = TREE_OPERAND (exp, 0);
16324 tree index;
16325 tree string;
16326 location_t loc = EXPR_LOCATION (exp);
16327
16328 if (TREE_CODE (exp) == INDIRECT_REF)
16329 string = string_constant (exp1, &index);
16330 else
16331 {
16332 tree low_bound = array_ref_low_bound (exp);
16333 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16334
16335 /* Optimize the special-case of a zero lower bound.
16336
16337 We convert the low_bound to sizetype to avoid some problems
16338 with constant folding. (E.g. suppose the lower bound is 1,
16339 and its mode is QI. Without the conversion,l (ARRAY
16340 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16341 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16342 if (! integer_zerop (low_bound))
16343 index = size_diffop_loc (loc, index,
16344 fold_convert_loc (loc, sizetype, low_bound));
16345
16346 string = exp1;
16347 }
16348
16349 if (string
16350 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16351 && TREE_CODE (string) == STRING_CST
16352 && TREE_CODE (index) == INTEGER_CST
16353 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16354 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16355 == MODE_INT)
16356 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16357 return build_int_cst_type (TREE_TYPE (exp),
16358 (TREE_STRING_POINTER (string)
16359 [TREE_INT_CST_LOW (index)]));
16360 }
16361 return NULL;
16362 }
16363
16364 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16365 an integer constant, real, or fixed-point constant.
16366
16367 TYPE is the type of the result. */
16368
16369 static tree
16370 fold_negate_const (tree arg0, tree type)
16371 {
16372 tree t = NULL_TREE;
16373
16374 switch (TREE_CODE (arg0))
16375 {
16376 case INTEGER_CST:
16377 {
16378 double_int val = tree_to_double_int (arg0);
16379 bool overflow;
16380 val = val.neg_with_overflow (&overflow);
16381 t = force_fit_type_double (type, val, 1,
16382 (overflow | TREE_OVERFLOW (arg0))
16383 && !TYPE_UNSIGNED (type));
16384 break;
16385 }
16386
16387 case REAL_CST:
16388 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16389 break;
16390
16391 case FIXED_CST:
16392 {
16393 FIXED_VALUE_TYPE f;
16394 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16395 &(TREE_FIXED_CST (arg0)), NULL,
16396 TYPE_SATURATING (type));
16397 t = build_fixed (type, f);
16398 /* Propagate overflow flags. */
16399 if (overflow_p | TREE_OVERFLOW (arg0))
16400 TREE_OVERFLOW (t) = 1;
16401 break;
16402 }
16403
16404 default:
16405 gcc_unreachable ();
16406 }
16407
16408 return t;
16409 }
16410
16411 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16412 an integer constant or real constant.
16413
16414 TYPE is the type of the result. */
16415
16416 tree
16417 fold_abs_const (tree arg0, tree type)
16418 {
16419 tree t = NULL_TREE;
16420
16421 switch (TREE_CODE (arg0))
16422 {
16423 case INTEGER_CST:
16424 {
16425 double_int val = tree_to_double_int (arg0);
16426
16427 /* If the value is unsigned or non-negative, then the absolute value
16428 is the same as the ordinary value. */
16429 if (TYPE_UNSIGNED (type)
16430 || !val.is_negative ())
16431 t = arg0;
16432
16433 /* If the value is negative, then the absolute value is
16434 its negation. */
16435 else
16436 {
16437 bool overflow;
16438 val = val.neg_with_overflow (&overflow);
16439 t = force_fit_type_double (type, val, -1,
16440 overflow | TREE_OVERFLOW (arg0));
16441 }
16442 }
16443 break;
16444
16445 case REAL_CST:
16446 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16447 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16448 else
16449 t = arg0;
16450 break;
16451
16452 default:
16453 gcc_unreachable ();
16454 }
16455
16456 return t;
16457 }
16458
16459 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16460 constant. TYPE is the type of the result. */
16461
16462 static tree
16463 fold_not_const (const_tree arg0, tree type)
16464 {
16465 double_int val;
16466
16467 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16468
16469 val = ~tree_to_double_int (arg0);
16470 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16471 }
16472
16473 /* Given CODE, a relational operator, the target type, TYPE and two
16474 constant operands OP0 and OP1, return the result of the
16475 relational operation. If the result is not a compile time
16476 constant, then return NULL_TREE. */
16477
16478 static tree
16479 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16480 {
16481 int result, invert;
16482
16483 /* From here on, the only cases we handle are when the result is
16484 known to be a constant. */
16485
16486 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16487 {
16488 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16489 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16490
16491 /* Handle the cases where either operand is a NaN. */
16492 if (real_isnan (c0) || real_isnan (c1))
16493 {
16494 switch (code)
16495 {
16496 case EQ_EXPR:
16497 case ORDERED_EXPR:
16498 result = 0;
16499 break;
16500
16501 case NE_EXPR:
16502 case UNORDERED_EXPR:
16503 case UNLT_EXPR:
16504 case UNLE_EXPR:
16505 case UNGT_EXPR:
16506 case UNGE_EXPR:
16507 case UNEQ_EXPR:
16508 result = 1;
16509 break;
16510
16511 case LT_EXPR:
16512 case LE_EXPR:
16513 case GT_EXPR:
16514 case GE_EXPR:
16515 case LTGT_EXPR:
16516 if (flag_trapping_math)
16517 return NULL_TREE;
16518 result = 0;
16519 break;
16520
16521 default:
16522 gcc_unreachable ();
16523 }
16524
16525 return constant_boolean_node (result, type);
16526 }
16527
16528 return constant_boolean_node (real_compare (code, c0, c1), type);
16529 }
16530
16531 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16532 {
16533 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16534 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16535 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16536 }
16537
16538 /* Handle equality/inequality of complex constants. */
16539 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16540 {
16541 tree rcond = fold_relational_const (code, type,
16542 TREE_REALPART (op0),
16543 TREE_REALPART (op1));
16544 tree icond = fold_relational_const (code, type,
16545 TREE_IMAGPART (op0),
16546 TREE_IMAGPART (op1));
16547 if (code == EQ_EXPR)
16548 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16549 else if (code == NE_EXPR)
16550 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16551 else
16552 return NULL_TREE;
16553 }
16554
16555 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16556 {
16557 unsigned count = VECTOR_CST_NELTS (op0);
16558 tree *elts = XALLOCAVEC (tree, count);
16559 gcc_assert (VECTOR_CST_NELTS (op1) == count
16560 && TYPE_VECTOR_SUBPARTS (type) == count);
16561
16562 for (unsigned i = 0; i < count; i++)
16563 {
16564 tree elem_type = TREE_TYPE (type);
16565 tree elem0 = VECTOR_CST_ELT (op0, i);
16566 tree elem1 = VECTOR_CST_ELT (op1, i);
16567
16568 tree tem = fold_relational_const (code, elem_type,
16569 elem0, elem1);
16570
16571 if (tem == NULL_TREE)
16572 return NULL_TREE;
16573
16574 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16575 }
16576
16577 return build_vector (type, elts);
16578 }
16579
16580 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16581
16582 To compute GT, swap the arguments and do LT.
16583 To compute GE, do LT and invert the result.
16584 To compute LE, swap the arguments, do LT and invert the result.
16585 To compute NE, do EQ and invert the result.
16586
16587 Therefore, the code below must handle only EQ and LT. */
16588
16589 if (code == LE_EXPR || code == GT_EXPR)
16590 {
16591 tree tem = op0;
16592 op0 = op1;
16593 op1 = tem;
16594 code = swap_tree_comparison (code);
16595 }
16596
16597 /* Note that it is safe to invert for real values here because we
16598 have already handled the one case that it matters. */
16599
16600 invert = 0;
16601 if (code == NE_EXPR || code == GE_EXPR)
16602 {
16603 invert = 1;
16604 code = invert_tree_comparison (code, false);
16605 }
16606
16607 /* Compute a result for LT or EQ if args permit;
16608 Otherwise return T. */
16609 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16610 {
16611 if (code == EQ_EXPR)
16612 result = tree_int_cst_equal (op0, op1);
16613 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16614 result = INT_CST_LT_UNSIGNED (op0, op1);
16615 else
16616 result = INT_CST_LT (op0, op1);
16617 }
16618 else
16619 return NULL_TREE;
16620
16621 if (invert)
16622 result ^= 1;
16623 return constant_boolean_node (result, type);
16624 }
16625
16626 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16627 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16628 itself. */
16629
16630 tree
16631 fold_build_cleanup_point_expr (tree type, tree expr)
16632 {
16633 /* If the expression does not have side effects then we don't have to wrap
16634 it with a cleanup point expression. */
16635 if (!TREE_SIDE_EFFECTS (expr))
16636 return expr;
16637
16638 /* If the expression is a return, check to see if the expression inside the
16639 return has no side effects or the right hand side of the modify expression
16640 inside the return. If either don't have side effects set we don't need to
16641 wrap the expression in a cleanup point expression. Note we don't check the
16642 left hand side of the modify because it should always be a return decl. */
16643 if (TREE_CODE (expr) == RETURN_EXPR)
16644 {
16645 tree op = TREE_OPERAND (expr, 0);
16646 if (!op || !TREE_SIDE_EFFECTS (op))
16647 return expr;
16648 op = TREE_OPERAND (op, 1);
16649 if (!TREE_SIDE_EFFECTS (op))
16650 return expr;
16651 }
16652
16653 return build1 (CLEANUP_POINT_EXPR, type, expr);
16654 }
16655
16656 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16657 of an indirection through OP0, or NULL_TREE if no simplification is
16658 possible. */
16659
16660 tree
16661 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16662 {
16663 tree sub = op0;
16664 tree subtype;
16665
16666 STRIP_NOPS (sub);
16667 subtype = TREE_TYPE (sub);
16668 if (!POINTER_TYPE_P (subtype))
16669 return NULL_TREE;
16670
16671 if (TREE_CODE (sub) == ADDR_EXPR)
16672 {
16673 tree op = TREE_OPERAND (sub, 0);
16674 tree optype = TREE_TYPE (op);
16675 /* *&CONST_DECL -> to the value of the const decl. */
16676 if (TREE_CODE (op) == CONST_DECL)
16677 return DECL_INITIAL (op);
16678 /* *&p => p; make sure to handle *&"str"[cst] here. */
16679 if (type == optype)
16680 {
16681 tree fop = fold_read_from_constant_string (op);
16682 if (fop)
16683 return fop;
16684 else
16685 return op;
16686 }
16687 /* *(foo *)&fooarray => fooarray[0] */
16688 else if (TREE_CODE (optype) == ARRAY_TYPE
16689 && type == TREE_TYPE (optype)
16690 && (!in_gimple_form
16691 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16692 {
16693 tree type_domain = TYPE_DOMAIN (optype);
16694 tree min_val = size_zero_node;
16695 if (type_domain && TYPE_MIN_VALUE (type_domain))
16696 min_val = TYPE_MIN_VALUE (type_domain);
16697 if (in_gimple_form
16698 && TREE_CODE (min_val) != INTEGER_CST)
16699 return NULL_TREE;
16700 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16701 NULL_TREE, NULL_TREE);
16702 }
16703 /* *(foo *)&complexfoo => __real__ complexfoo */
16704 else if (TREE_CODE (optype) == COMPLEX_TYPE
16705 && type == TREE_TYPE (optype))
16706 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16707 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16708 else if (TREE_CODE (optype) == VECTOR_TYPE
16709 && type == TREE_TYPE (optype))
16710 {
16711 tree part_width = TYPE_SIZE (type);
16712 tree index = bitsize_int (0);
16713 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16714 }
16715 }
16716
16717 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16718 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16719 {
16720 tree op00 = TREE_OPERAND (sub, 0);
16721 tree op01 = TREE_OPERAND (sub, 1);
16722
16723 STRIP_NOPS (op00);
16724 if (TREE_CODE (op00) == ADDR_EXPR)
16725 {
16726 tree op00type;
16727 op00 = TREE_OPERAND (op00, 0);
16728 op00type = TREE_TYPE (op00);
16729
16730 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16731 if (TREE_CODE (op00type) == VECTOR_TYPE
16732 && type == TREE_TYPE (op00type))
16733 {
16734 HOST_WIDE_INT offset = tree_to_shwi (op01);
16735 tree part_width = TYPE_SIZE (type);
16736 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16737 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16738 tree index = bitsize_int (indexi);
16739
16740 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16741 return fold_build3_loc (loc,
16742 BIT_FIELD_REF, type, op00,
16743 part_width, index);
16744
16745 }
16746 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16747 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16748 && type == TREE_TYPE (op00type))
16749 {
16750 tree size = TYPE_SIZE_UNIT (type);
16751 if (tree_int_cst_equal (size, op01))
16752 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16753 }
16754 /* ((foo *)&fooarray)[1] => fooarray[1] */
16755 else if (TREE_CODE (op00type) == ARRAY_TYPE
16756 && type == TREE_TYPE (op00type))
16757 {
16758 tree type_domain = TYPE_DOMAIN (op00type);
16759 tree min_val = size_zero_node;
16760 if (type_domain && TYPE_MIN_VALUE (type_domain))
16761 min_val = TYPE_MIN_VALUE (type_domain);
16762 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16763 TYPE_SIZE_UNIT (type));
16764 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16765 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16766 NULL_TREE, NULL_TREE);
16767 }
16768 }
16769 }
16770
16771 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16772 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16773 && type == TREE_TYPE (TREE_TYPE (subtype))
16774 && (!in_gimple_form
16775 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16776 {
16777 tree type_domain;
16778 tree min_val = size_zero_node;
16779 sub = build_fold_indirect_ref_loc (loc, sub);
16780 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16781 if (type_domain && TYPE_MIN_VALUE (type_domain))
16782 min_val = TYPE_MIN_VALUE (type_domain);
16783 if (in_gimple_form
16784 && TREE_CODE (min_val) != INTEGER_CST)
16785 return NULL_TREE;
16786 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16787 NULL_TREE);
16788 }
16789
16790 return NULL_TREE;
16791 }
16792
16793 /* Builds an expression for an indirection through T, simplifying some
16794 cases. */
16795
16796 tree
16797 build_fold_indirect_ref_loc (location_t loc, tree t)
16798 {
16799 tree type = TREE_TYPE (TREE_TYPE (t));
16800 tree sub = fold_indirect_ref_1 (loc, type, t);
16801
16802 if (sub)
16803 return sub;
16804
16805 return build1_loc (loc, INDIRECT_REF, type, t);
16806 }
16807
16808 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16809
16810 tree
16811 fold_indirect_ref_loc (location_t loc, tree t)
16812 {
16813 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16814
16815 if (sub)
16816 return sub;
16817 else
16818 return t;
16819 }
16820
16821 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16822 whose result is ignored. The type of the returned tree need not be
16823 the same as the original expression. */
16824
16825 tree
16826 fold_ignored_result (tree t)
16827 {
16828 if (!TREE_SIDE_EFFECTS (t))
16829 return integer_zero_node;
16830
16831 for (;;)
16832 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16833 {
16834 case tcc_unary:
16835 t = TREE_OPERAND (t, 0);
16836 break;
16837
16838 case tcc_binary:
16839 case tcc_comparison:
16840 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16841 t = TREE_OPERAND (t, 0);
16842 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16843 t = TREE_OPERAND (t, 1);
16844 else
16845 return t;
16846 break;
16847
16848 case tcc_expression:
16849 switch (TREE_CODE (t))
16850 {
16851 case COMPOUND_EXPR:
16852 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16853 return t;
16854 t = TREE_OPERAND (t, 0);
16855 break;
16856
16857 case COND_EXPR:
16858 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16859 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16860 return t;
16861 t = TREE_OPERAND (t, 0);
16862 break;
16863
16864 default:
16865 return t;
16866 }
16867 break;
16868
16869 default:
16870 return t;
16871 }
16872 }
16873
16874 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16875 This can only be applied to objects of a sizetype. */
16876
16877 tree
16878 round_up_loc (location_t loc, tree value, int divisor)
16879 {
16880 tree div = NULL_TREE;
16881
16882 gcc_assert (divisor > 0);
16883 if (divisor == 1)
16884 return value;
16885
16886 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16887 have to do anything. Only do this when we are not given a const,
16888 because in that case, this check is more expensive than just
16889 doing it. */
16890 if (TREE_CODE (value) != INTEGER_CST)
16891 {
16892 div = build_int_cst (TREE_TYPE (value), divisor);
16893
16894 if (multiple_of_p (TREE_TYPE (value), value, div))
16895 return value;
16896 }
16897
16898 /* If divisor is a power of two, simplify this to bit manipulation. */
16899 if (divisor == (divisor & -divisor))
16900 {
16901 if (TREE_CODE (value) == INTEGER_CST)
16902 {
16903 double_int val = tree_to_double_int (value);
16904 bool overflow_p;
16905
16906 if ((val.low & (divisor - 1)) == 0)
16907 return value;
16908
16909 overflow_p = TREE_OVERFLOW (value);
16910 val.low &= ~(divisor - 1);
16911 val.low += divisor;
16912 if (val.low == 0)
16913 {
16914 val.high++;
16915 if (val.high == 0)
16916 overflow_p = true;
16917 }
16918
16919 return force_fit_type_double (TREE_TYPE (value), val,
16920 -1, overflow_p);
16921 }
16922 else
16923 {
16924 tree t;
16925
16926 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16927 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16928 t = build_int_cst (TREE_TYPE (value), -divisor);
16929 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16930 }
16931 }
16932 else
16933 {
16934 if (!div)
16935 div = build_int_cst (TREE_TYPE (value), divisor);
16936 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16937 value = size_binop_loc (loc, MULT_EXPR, value, div);
16938 }
16939
16940 return value;
16941 }
16942
16943 /* Likewise, but round down. */
16944
16945 tree
16946 round_down_loc (location_t loc, tree value, int divisor)
16947 {
16948 tree div = NULL_TREE;
16949
16950 gcc_assert (divisor > 0);
16951 if (divisor == 1)
16952 return value;
16953
16954 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16955 have to do anything. Only do this when we are not given a const,
16956 because in that case, this check is more expensive than just
16957 doing it. */
16958 if (TREE_CODE (value) != INTEGER_CST)
16959 {
16960 div = build_int_cst (TREE_TYPE (value), divisor);
16961
16962 if (multiple_of_p (TREE_TYPE (value), value, div))
16963 return value;
16964 }
16965
16966 /* If divisor is a power of two, simplify this to bit manipulation. */
16967 if (divisor == (divisor & -divisor))
16968 {
16969 tree t;
16970
16971 t = build_int_cst (TREE_TYPE (value), -divisor);
16972 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16973 }
16974 else
16975 {
16976 if (!div)
16977 div = build_int_cst (TREE_TYPE (value), divisor);
16978 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16979 value = size_binop_loc (loc, MULT_EXPR, value, div);
16980 }
16981
16982 return value;
16983 }
16984
16985 /* Returns the pointer to the base of the object addressed by EXP and
16986 extracts the information about the offset of the access, storing it
16987 to PBITPOS and POFFSET. */
16988
16989 static tree
16990 split_address_to_core_and_offset (tree exp,
16991 HOST_WIDE_INT *pbitpos, tree *poffset)
16992 {
16993 tree core;
16994 enum machine_mode mode;
16995 int unsignedp, volatilep;
16996 HOST_WIDE_INT bitsize;
16997 location_t loc = EXPR_LOCATION (exp);
16998
16999 if (TREE_CODE (exp) == ADDR_EXPR)
17000 {
17001 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
17002 poffset, &mode, &unsignedp, &volatilep,
17003 false);
17004 core = build_fold_addr_expr_loc (loc, core);
17005 }
17006 else
17007 {
17008 core = exp;
17009 *pbitpos = 0;
17010 *poffset = NULL_TREE;
17011 }
17012
17013 return core;
17014 }
17015
17016 /* Returns true if addresses of E1 and E2 differ by a constant, false
17017 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17018
17019 bool
17020 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17021 {
17022 tree core1, core2;
17023 HOST_WIDE_INT bitpos1, bitpos2;
17024 tree toffset1, toffset2, tdiff, type;
17025
17026 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17027 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17028
17029 if (bitpos1 % BITS_PER_UNIT != 0
17030 || bitpos2 % BITS_PER_UNIT != 0
17031 || !operand_equal_p (core1, core2, 0))
17032 return false;
17033
17034 if (toffset1 && toffset2)
17035 {
17036 type = TREE_TYPE (toffset1);
17037 if (type != TREE_TYPE (toffset2))
17038 toffset2 = fold_convert (type, toffset2);
17039
17040 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17041 if (!cst_and_fits_in_hwi (tdiff))
17042 return false;
17043
17044 *diff = int_cst_value (tdiff);
17045 }
17046 else if (toffset1 || toffset2)
17047 {
17048 /* If only one of the offsets is non-constant, the difference cannot
17049 be a constant. */
17050 return false;
17051 }
17052 else
17053 *diff = 0;
17054
17055 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17056 return true;
17057 }
17058
17059 /* Simplify the floating point expression EXP when the sign of the
17060 result is not significant. Return NULL_TREE if no simplification
17061 is possible. */
17062
17063 tree
17064 fold_strip_sign_ops (tree exp)
17065 {
17066 tree arg0, arg1;
17067 location_t loc = EXPR_LOCATION (exp);
17068
17069 switch (TREE_CODE (exp))
17070 {
17071 case ABS_EXPR:
17072 case NEGATE_EXPR:
17073 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17074 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17075
17076 case MULT_EXPR:
17077 case RDIV_EXPR:
17078 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17079 return NULL_TREE;
17080 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17081 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17082 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17083 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17084 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17085 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17086 break;
17087
17088 case COMPOUND_EXPR:
17089 arg0 = TREE_OPERAND (exp, 0);
17090 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17091 if (arg1)
17092 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17093 break;
17094
17095 case COND_EXPR:
17096 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17097 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17098 if (arg0 || arg1)
17099 return fold_build3_loc (loc,
17100 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17101 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17102 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17103 break;
17104
17105 case CALL_EXPR:
17106 {
17107 const enum built_in_function fcode = builtin_mathfn_code (exp);
17108 switch (fcode)
17109 {
17110 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17111 /* Strip copysign function call, return the 1st argument. */
17112 arg0 = CALL_EXPR_ARG (exp, 0);
17113 arg1 = CALL_EXPR_ARG (exp, 1);
17114 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17115
17116 default:
17117 /* Strip sign ops from the argument of "odd" math functions. */
17118 if (negate_mathfn_p (fcode))
17119 {
17120 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17121 if (arg0)
17122 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17123 }
17124 break;
17125 }
17126 }
17127 break;
17128
17129 default:
17130 break;
17131 }
17132 return NULL_TREE;
17133 }